Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(247)

Side by Side Diff: src/incremental-marking.cc

Issue 11362246: Implement progress bar for large objects. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 8 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after
181 MarkBit mark_bit = Marking::MarkBitFrom(HeapObject::cast(obj)); 181 MarkBit mark_bit = Marking::MarkBitFrom(HeapObject::cast(obj));
182 if (Marking::IsBlack(mark_bit)) { 182 if (Marking::IsBlack(mark_bit)) {
183 MemoryChunk::IncrementLiveBytesFromGC(heap_obj->address(), 183 MemoryChunk::IncrementLiveBytesFromGC(heap_obj->address(),
184 -heap_obj->Size()); 184 -heap_obj->Size());
185 } 185 }
186 Marking::AnyToGrey(mark_bit); 186 Marking::AnyToGrey(mark_bit);
187 } 187 }
188 } 188 }
189 189
190 190
191 static inline void MarkBlackOrKeepGrey(HeapObject* heap_object,
192 MarkBit mark_bit,
193 int size) {
194 ASSERT(!Marking::IsImpossible(mark_bit));
195 if (mark_bit.Get()) return;
196 mark_bit.Set();
197 MemoryChunk::IncrementLiveBytesFromGC(heap_object->address(), size);
198 ASSERT(Marking::IsBlack(mark_bit));
199 }
200
201
202 static inline void MarkBlackOrKeepBlack(HeapObject* heap_object,
203 MarkBit mark_bit,
204 int size) {
205 ASSERT(!Marking::IsImpossible(mark_bit));
206 if (Marking::IsBlack(mark_bit)) return;
207 Marking::MarkBlack(mark_bit);
208 MemoryChunk::IncrementLiveBytesFromGC(heap_object->address(), size);
209 ASSERT(Marking::IsBlack(mark_bit));
210 }
211
212
191 class IncrementalMarkingMarkingVisitor 213 class IncrementalMarkingMarkingVisitor
192 : public StaticMarkingVisitor<IncrementalMarkingMarkingVisitor> { 214 : public StaticMarkingVisitor<IncrementalMarkingMarkingVisitor> {
193 public: 215 public:
194 static void Initialize() { 216 static void Initialize() {
195 StaticMarkingVisitor<IncrementalMarkingMarkingVisitor>::Initialize(); 217 StaticMarkingVisitor<IncrementalMarkingMarkingVisitor>::Initialize();
196 218 table_.Register(kVisitFixedArray, &VisitFixedArrayIncremental);
197 table_.Register(kVisitNativeContext, &VisitNativeContextIncremental); 219 table_.Register(kVisitNativeContext, &VisitNativeContextIncremental);
198 table_.Register(kVisitJSRegExp, &VisitJSRegExp); 220 table_.Register(kVisitJSRegExp, &VisitJSRegExp);
199 } 221 }
200 222
223 static const int kProgressBarScanningChunk = 32 * 1024;
224
225 static void VisitFixedArrayIncremental(Map* map, HeapObject* object) {
226 MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
227 if (FLAG_use_progress_bar && chunk->owner()->identity() == LO_SPACE) {
228 Heap* heap = map->GetHeap();
Hannes Payer (out of office) 2012/11/14 12:21:04 can you set that flag earlier e.g. at allocation t
Michael Starzinger 2012/11/15 15:12:50 Yes, that would be cleaner. I just couldn't find a
Michael Starzinger 2012/11/15 17:54:51 Left a TODO in the code about that. Currently ther
229 chunk->SetFlag(MemoryChunk::HAS_PROGRESS_BAR);
230 // When using a progress bar for large fixed arrays, scan only a chunk of
231 // the array and try to push it onto the marking deque again until it is
232 // fully scanned. Fall back to scanning it through to the end in case this
233 // fails because of a full deque.
234 bool scan_until_end = false;
235 int object_size = FixedArray::BodyDescriptor::SizeOf(map, object);
236 do {
237 int start_offset = Max(FixedArray::BodyDescriptor::kStartOffset,
ulan 2012/11/15 09:54:23 Not sure if worthwhile as the loop rarely executes
Michael Starzinger 2012/11/15 15:12:50 Done.
238 chunk->progress_bar());
239 int end_offset = Min(object_size,
240 chunk->progress_bar() + kProgressBarScanningChunk);
ulan 2012/11/15 09:54:23 Shouldn't this be start_offset + kProgressBarScann
Michael Starzinger 2012/11/15 15:12:50 Done.
241 chunk->set_progress_bar(end_offset);
242 VisitPointersWithAnchor(heap,
243 HeapObject::RawField(object, 0),
244 HeapObject::RawField(object, start_offset),
245 HeapObject::RawField(object, end_offset));
246 scan_until_end = heap->incremental_marking()->marking_deque()->IsFull();
247 if (end_offset < object_size) {
Hannes Payer (out of office) 2012/11/14 12:21:04 why is this method called UnshiftGrey and not Unsh
ulan 2012/11/15 09:54:23 (end_offset < object_size && !scan_until_end) woul
Michael Starzinger 2012/11/15 15:12:50 Done.
Michael Starzinger 2012/11/15 15:12:50 The method is called UnshiftGrey because at the ti
Hannes Payer (out of office) 2012/11/15 16:25:54 There is probably no performance difference, I jus
248 heap->incremental_marking()->marking_deque()->UnshiftGrey(object);
249 }
250 } while (scan_until_end && chunk->progress_bar() < object_size);
251 } else {
252 FixedArrayVisitor::Visit(map, object);
253 }
254 }
255
201 static void VisitNativeContextIncremental(Map* map, HeapObject* object) { 256 static void VisitNativeContextIncremental(Map* map, HeapObject* object) {
202 Context* context = Context::cast(object); 257 Context* context = Context::cast(object);
203 258
204 // We will mark cache black with a separate pass 259 // We will mark cache black with a separate pass
205 // when we finish marking. 260 // when we finish marking.
206 MarkObjectGreyDoNotEnqueue(context->normalized_map_cache()); 261 MarkObjectGreyDoNotEnqueue(context->normalized_map_cache());
207 VisitNativeContext(map, context); 262 VisitNativeContext(map, context);
208 } 263 }
209 264
210 static void VisitJSWeakMap(Map* map, HeapObject* object) { 265 static void VisitJSWeakMap(Map* map, HeapObject* object) {
(...skipping 16 matching lines...) Expand all
227 INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) { 282 INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) {
228 for (Object** p = start; p < end; p++) { 283 for (Object** p = start; p < end; p++) {
229 Object* obj = *p; 284 Object* obj = *p;
230 if (obj->NonFailureIsHeapObject()) { 285 if (obj->NonFailureIsHeapObject()) {
231 heap->mark_compact_collector()->RecordSlot(start, p, obj); 286 heap->mark_compact_collector()->RecordSlot(start, p, obj);
232 MarkObject(heap, obj); 287 MarkObject(heap, obj);
233 } 288 }
234 } 289 }
235 } 290 }
236 291
292 INLINE(static void VisitPointersWithAnchor(Heap* heap,
293 Object** anchor,
294 Object** start,
295 Object** end)) {
296 for (Object** p = start; p < end; p++) {
297 Object* obj = *p;
298 if (obj->NonFailureIsHeapObject()) {
299 heap->mark_compact_collector()->RecordSlot(anchor, p, obj);
300 MarkObject(heap, obj);
301 }
302 }
303 }
304
237 // Marks the object grey and pushes it on the marking stack. 305 // Marks the object grey and pushes it on the marking stack.
Hannes Payer (out of office) 2012/11/14 12:21:04 update comment
Michael Starzinger 2012/11/15 15:12:50 I think the comment still applies. I didn't change
238 INLINE(static void MarkObject(Heap* heap, Object* obj)) { 306 INLINE(static void MarkObject(Heap* heap, Object* obj)) {
239 HeapObject* heap_object = HeapObject::cast(obj); 307 HeapObject* heap_object = HeapObject::cast(obj);
240 MarkBit mark_bit = Marking::MarkBitFrom(heap_object); 308 MarkBit mark_bit = Marking::MarkBitFrom(heap_object);
241 if (mark_bit.data_only()) { 309 if (mark_bit.data_only()) {
242 if (heap->incremental_marking()->MarkBlackOrKeepGrey(mark_bit)) { 310 MarkBlackOrKeepGrey(heap_object, mark_bit, heap_object->Size());
243 MemoryChunk::IncrementLiveBytesFromGC(heap_object->address(),
244 heap_object->Size());
245 }
246 } else if (Marking::IsWhite(mark_bit)) { 311 } else if (Marking::IsWhite(mark_bit)) {
247 heap->incremental_marking()->WhiteToGreyAndPush(heap_object, mark_bit); 312 heap->incremental_marking()->WhiteToGreyAndPush(heap_object, mark_bit);
248 } 313 }
249 } 314 }
250 315
251 // Marks the object black without pushing it on the marking stack. 316 // Marks the object black without pushing it on the marking stack.
252 // Returns true if object needed marking and false otherwise. 317 // Returns true if object needed marking and false otherwise.
253 INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) { 318 INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) {
254 HeapObject* heap_object = HeapObject::cast(obj); 319 HeapObject* heap_object = HeapObject::cast(obj);
255 MarkBit mark_bit = Marking::MarkBitFrom(heap_object); 320 MarkBit mark_bit = Marking::MarkBitFrom(heap_object);
(...skipping 25 matching lines...) Expand all
281 } 346 }
282 347
283 private: 348 private:
284 void MarkObjectByPointer(Object** p) { 349 void MarkObjectByPointer(Object** p) {
285 Object* obj = *p; 350 Object* obj = *p;
286 if (!obj->IsHeapObject()) return; 351 if (!obj->IsHeapObject()) return;
287 352
288 HeapObject* heap_object = HeapObject::cast(obj); 353 HeapObject* heap_object = HeapObject::cast(obj);
289 MarkBit mark_bit = Marking::MarkBitFrom(heap_object); 354 MarkBit mark_bit = Marking::MarkBitFrom(heap_object);
290 if (mark_bit.data_only()) { 355 if (mark_bit.data_only()) {
291 if (incremental_marking_->MarkBlackOrKeepGrey(mark_bit)) { 356 MarkBlackOrKeepGrey(heap_object, mark_bit, heap_object->Size());
292 MemoryChunk::IncrementLiveBytesFromGC(heap_object->address(),
293 heap_object->Size());
294 }
295 } else { 357 } else {
296 if (Marking::IsWhite(mark_bit)) { 358 if (Marking::IsWhite(mark_bit)) {
297 incremental_marking_->WhiteToGreyAndPush(heap_object, mark_bit); 359 incremental_marking_->WhiteToGreyAndPush(heap_object, mark_bit);
298 } 360 }
299 } 361 }
300 } 362 }
301 363
302 Heap* heap_; 364 Heap* heap_;
303 IncrementalMarking* incremental_marking_; 365 IncrementalMarking* incremental_marking_;
304 }; 366 };
(...skipping 304 matching lines...) Expand 10 before | Expand all | Expand 10 after
609 #endif 671 #endif
610 } 672 }
611 } else if (obj->map() != filler_map) { 673 } else if (obj->map() != filler_map) {
612 // Skip one word filler objects that appear on the 674 // Skip one word filler objects that appear on the
613 // stack when we perform in place array shift. 675 // stack when we perform in place array shift.
614 array[new_top] = obj; 676 array[new_top] = obj;
615 new_top = ((new_top + 1) & mask); 677 new_top = ((new_top + 1) & mask);
616 ASSERT(new_top != marking_deque_.bottom()); 678 ASSERT(new_top != marking_deque_.bottom());
617 #ifdef DEBUG 679 #ifdef DEBUG
618 MarkBit mark_bit = Marking::MarkBitFrom(obj); 680 MarkBit mark_bit = Marking::MarkBitFrom(obj);
681 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
619 ASSERT(Marking::IsGrey(mark_bit) || 682 ASSERT(Marking::IsGrey(mark_bit) ||
620 (obj->IsFiller() && Marking::IsWhite(mark_bit))); 683 (obj->IsFiller() && Marking::IsWhite(mark_bit)) ||
684 (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) &&
685 Marking::IsBlack(mark_bit)));
621 #endif 686 #endif
622 } 687 }
623 } 688 }
624 marking_deque_.set_top(new_top); 689 marking_deque_.set_top(new_top);
625 690
626 steps_took_since_last_gc_ = 0; 691 steps_took_since_last_gc_ = 0;
627 steps_count_since_last_gc_ = 0; 692 steps_count_since_last_gc_ = 0;
628 longest_step_ = 0.0; 693 longest_step_ = 0.0;
629 } 694 }
630 695
631 696
632 void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) { 697 void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) {
633 MarkBit map_mark_bit = Marking::MarkBitFrom(map); 698 MarkBit map_mark_bit = Marking::MarkBitFrom(map);
634 if (Marking::IsWhite(map_mark_bit)) { 699 if (Marking::IsWhite(map_mark_bit)) {
635 WhiteToGreyAndPush(map, map_mark_bit); 700 WhiteToGreyAndPush(map, map_mark_bit);
636 } 701 }
637 702
638 IncrementalMarkingMarkingVisitor::IterateBody(map, obj); 703 IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
639 704
640 MarkBit obj_mark_bit = Marking::MarkBitFrom(obj); 705 MarkBit mark_bit = Marking::MarkBitFrom(obj);
641 SLOW_ASSERT(Marking::IsGrey(obj_mark_bit) || 706 #ifdef DEBUG
642 (obj->IsFiller() && Marking::IsWhite(obj_mark_bit))); 707 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
643 Marking::MarkBlack(obj_mark_bit); 708 SLOW_ASSERT(Marking::IsGrey(mark_bit) ||
644 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size); 709 (obj->IsFiller() && Marking::IsWhite(mark_bit)) ||
710 (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) &&
711 Marking::IsBlack(mark_bit)));
712 #endif
713 MarkBlackOrKeepBlack(obj, mark_bit, size);
645 } 714 }
646 715
647 716
648 void IncrementalMarking::ProcessMarkingDeque(intptr_t bytes_to_process) { 717 void IncrementalMarking::ProcessMarkingDeque(intptr_t bytes_to_process) {
649 Map* filler_map = heap_->one_pointer_filler_map(); 718 Map* filler_map = heap_->one_pointer_filler_map();
650 while (!marking_deque_.IsEmpty() && bytes_to_process > 0) { 719 while (!marking_deque_.IsEmpty() && bytes_to_process > 0) {
651 HeapObject* obj = marking_deque_.Pop(); 720 HeapObject* obj = marking_deque_.Pop();
652 721
653 // Explicitly skip one word fillers. Incremental markbit patterns are 722 // Explicitly skip one word fillers. Incremental markbit patterns are
654 // correct only for objects that occupy at least two words. 723 // correct only for objects that occupy at least two words.
(...skipping 265 matching lines...) Expand 10 before | Expand all | Expand 10 after
920 bytes_scanned_ = 0; 989 bytes_scanned_ = 0;
921 write_barriers_invoked_since_last_step_ = 0; 990 write_barriers_invoked_since_last_step_ = 0;
922 } 991 }
923 992
924 993
925 int64_t IncrementalMarking::SpaceLeftInOldSpace() { 994 int64_t IncrementalMarking::SpaceLeftInOldSpace() {
926 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects(); 995 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects();
927 } 996 }
928 997
929 } } // namespace v8::internal 998 } } // namespace v8::internal
OLDNEW
« src/flag-definitions.h ('K') | « src/incremental-marking.h ('k') | src/mark-compact.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698