Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(335)

Side by Side Diff: src/incremental-marking.cc

Issue 10878047: Revert to code state of 3.13.1 plus r12350 (Closed) Base URL: https://v8.googlecode.com/svn/trunk
Patch Set: Created 8 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/stub-cache-ia32.cc ('k') | src/isolate.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 611 matching lines...) Expand 10 before | Expand all | Expand 10 after
622 void IncrementalMarking::Hurry() { 622 void IncrementalMarking::Hurry() {
623 if (state() == MARKING) { 623 if (state() == MARKING) {
624 double start = 0.0; 624 double start = 0.0;
625 if (FLAG_trace_incremental_marking) { 625 if (FLAG_trace_incremental_marking) {
626 PrintF("[IncrementalMarking] Hurry\n"); 626 PrintF("[IncrementalMarking] Hurry\n");
627 start = OS::TimeCurrentMillis(); 627 start = OS::TimeCurrentMillis();
628 } 628 }
629 // TODO(gc) hurry can mark objects it encounters black as mutator 629 // TODO(gc) hurry can mark objects it encounters black as mutator
630 // was stopped. 630 // was stopped.
631 Map* filler_map = heap_->one_pointer_filler_map(); 631 Map* filler_map = heap_->one_pointer_filler_map();
632 Map* native_context_map = heap_->native_context_map(); 632 Map* global_context_map = heap_->global_context_map();
633 while (!marking_deque_.IsEmpty()) { 633 while (!marking_deque_.IsEmpty()) {
634 HeapObject* obj = marking_deque_.Pop(); 634 HeapObject* obj = marking_deque_.Pop();
635 635
636 // Explicitly skip one word fillers. Incremental markbit patterns are 636 // Explicitly skip one word fillers. Incremental markbit patterns are
637 // correct only for objects that occupy at least two words. 637 // correct only for objects that occupy at least two words.
638 Map* map = obj->map(); 638 Map* map = obj->map();
639 if (map == filler_map) { 639 if (map == filler_map) {
640 continue; 640 continue;
641 } else if (map == native_context_map) { 641 } else if (map == global_context_map) {
642 // Native contexts have weak fields. 642 // Global contexts have weak fields.
643 IncrementalMarkingMarkingVisitor::VisitNativeContext(map, obj); 643 IncrementalMarkingMarkingVisitor::VisitGlobalContext(map, obj);
644 } else if (map->instance_type() == MAP_TYPE) { 644 } else if (map->instance_type() == MAP_TYPE) {
645 Map* map = Map::cast(obj); 645 Map* map = Map::cast(obj);
646 heap_->ClearCacheOnMap(map); 646 heap_->ClearCacheOnMap(map);
647 647
648 // When map collection is enabled we have to mark through map's 648 // When map collection is enabled we have to mark through map's
649 // transitions and back pointers in a special way to make these links 649 // transitions and back pointers in a special way to make these links
650 // weak. Only maps for subclasses of JSReceiver can have transitions. 650 // weak. Only maps for subclasses of JSReceiver can have transitions.
651 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE); 651 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
652 if (FLAG_collect_maps && 652 if (FLAG_collect_maps &&
653 map->instance_type() >= FIRST_JS_RECEIVER_TYPE) { 653 map->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
(...skipping 25 matching lines...) Expand all
679 } 679 }
680 } 680 }
681 681
682 if (FLAG_cleanup_code_caches_at_gc) { 682 if (FLAG_cleanup_code_caches_at_gc) {
683 PolymorphicCodeCache* poly_cache = heap_->polymorphic_code_cache(); 683 PolymorphicCodeCache* poly_cache = heap_->polymorphic_code_cache();
684 Marking::GreyToBlack(Marking::MarkBitFrom(poly_cache)); 684 Marking::GreyToBlack(Marking::MarkBitFrom(poly_cache));
685 MemoryChunk::IncrementLiveBytesFromGC(poly_cache->address(), 685 MemoryChunk::IncrementLiveBytesFromGC(poly_cache->address(),
686 PolymorphicCodeCache::kSize); 686 PolymorphicCodeCache::kSize);
687 } 687 }
688 688
689 Object* context = heap_->native_contexts_list(); 689 Object* context = heap_->global_contexts_list();
690 while (!context->IsUndefined()) { 690 while (!context->IsUndefined()) {
691 // GC can happen when the context is not fully initialized, 691 // GC can happen when the context is not fully initialized,
692 // so the cache can be undefined. 692 // so the cache can be undefined.
693 HeapObject* cache = HeapObject::cast( 693 HeapObject* cache = HeapObject::cast(
694 Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX)); 694 Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX));
695 if (!cache->IsUndefined()) { 695 if (!cache->IsUndefined()) {
696 MarkBit mark_bit = Marking::MarkBitFrom(cache); 696 MarkBit mark_bit = Marking::MarkBitFrom(cache);
697 if (Marking::IsGrey(mark_bit)) { 697 if (Marking::IsGrey(mark_bit)) {
698 Marking::GreyToBlack(mark_bit); 698 Marking::GreyToBlack(mark_bit);
699 MemoryChunk::IncrementLiveBytesFromGC(cache->address(), cache->Size()); 699 MemoryChunk::IncrementLiveBytesFromGC(cache->address(), cache->Size());
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after
789 start = OS::TimeCurrentMillis(); 789 start = OS::TimeCurrentMillis();
790 } 790 }
791 791
792 if (state_ == SWEEPING) { 792 if (state_ == SWEEPING) {
793 if (heap_->AdvanceSweepers(static_cast<int>(bytes_to_process))) { 793 if (heap_->AdvanceSweepers(static_cast<int>(bytes_to_process))) {
794 bytes_scanned_ = 0; 794 bytes_scanned_ = 0;
795 StartMarking(PREVENT_COMPACTION); 795 StartMarking(PREVENT_COMPACTION);
796 } 796 }
797 } else if (state_ == MARKING) { 797 } else if (state_ == MARKING) {
798 Map* filler_map = heap_->one_pointer_filler_map(); 798 Map* filler_map = heap_->one_pointer_filler_map();
799 Map* native_context_map = heap_->native_context_map(); 799 Map* global_context_map = heap_->global_context_map();
800 while (!marking_deque_.IsEmpty() && bytes_to_process > 0) { 800 while (!marking_deque_.IsEmpty() && bytes_to_process > 0) {
801 HeapObject* obj = marking_deque_.Pop(); 801 HeapObject* obj = marking_deque_.Pop();
802 802
803 // Explicitly skip one word fillers. Incremental markbit patterns are 803 // Explicitly skip one word fillers. Incremental markbit patterns are
804 // correct only for objects that occupy at least two words. 804 // correct only for objects that occupy at least two words.
805 Map* map = obj->map(); 805 Map* map = obj->map();
806 if (map == filler_map) continue; 806 if (map == filler_map) continue;
807 807
808 int size = obj->SizeFromMap(map); 808 int size = obj->SizeFromMap(map);
809 bytes_to_process -= size; 809 bytes_to_process -= size;
810 MarkBit map_mark_bit = Marking::MarkBitFrom(map); 810 MarkBit map_mark_bit = Marking::MarkBitFrom(map);
811 if (Marking::IsWhite(map_mark_bit)) { 811 if (Marking::IsWhite(map_mark_bit)) {
812 WhiteToGreyAndPush(map, map_mark_bit); 812 WhiteToGreyAndPush(map, map_mark_bit);
813 } 813 }
814 814
815 // TODO(gc) switch to static visitor instead of normal visitor. 815 // TODO(gc) switch to static visitor instead of normal visitor.
816 if (map == native_context_map) { 816 if (map == global_context_map) {
817 // Native contexts have weak fields. 817 // Global contexts have weak fields.
818 Context* ctx = Context::cast(obj); 818 Context* ctx = Context::cast(obj);
819 819
820 // We will mark cache black with a separate pass 820 // We will mark cache black with a separate pass
821 // when we finish marking. 821 // when we finish marking.
822 MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache()); 822 MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache());
823 823
824 IncrementalMarkingMarkingVisitor::VisitNativeContext(map, ctx); 824 IncrementalMarkingMarkingVisitor::VisitGlobalContext(map, ctx);
825 } else if (map->instance_type() == MAP_TYPE) { 825 } else if (map->instance_type() == MAP_TYPE) {
826 Map* map = Map::cast(obj); 826 Map* map = Map::cast(obj);
827 heap_->ClearCacheOnMap(map); 827 heap_->ClearCacheOnMap(map);
828 828
829 // When map collection is enabled we have to mark through map's 829 // When map collection is enabled we have to mark through map's
830 // transitions and back pointers in a special way to make these links 830 // transitions and back pointers in a special way to make these links
831 // weak. Only maps for subclasses of JSReceiver can have transitions. 831 // weak. Only maps for subclasses of JSReceiver can have transitions.
832 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE); 832 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
833 if (FLAG_collect_maps && 833 if (FLAG_collect_maps &&
834 map->instance_type() >= FIRST_JS_RECEIVER_TYPE) { 834 map->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after
944 allocation_marking_factor_ = kInitialAllocationMarkingFactor; 944 allocation_marking_factor_ = kInitialAllocationMarkingFactor;
945 bytes_scanned_ = 0; 945 bytes_scanned_ = 0;
946 } 946 }
947 947
948 948
949 int64_t IncrementalMarking::SpaceLeftInOldSpace() { 949 int64_t IncrementalMarking::SpaceLeftInOldSpace() {
950 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects(); 950 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects();
951 } 951 }
952 952
953 } } // namespace v8::internal 953 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/ia32/stub-cache-ia32.cc ('k') | src/isolate.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698