Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(2)

Side by Side Diff: src/heap.cc

Issue 15691017: Make assertion scopes thread safe. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 7 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« src/api.cc ('K') | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after
106 map_space_(NULL), 106 map_space_(NULL),
107 cell_space_(NULL), 107 cell_space_(NULL),
108 lo_space_(NULL), 108 lo_space_(NULL),
109 gc_state_(NOT_IN_GC), 109 gc_state_(NOT_IN_GC),
110 gc_post_processing_depth_(0), 110 gc_post_processing_depth_(0),
111 ms_count_(0), 111 ms_count_(0),
112 gc_count_(0), 112 gc_count_(0),
113 remembered_unmapped_pages_index_(0), 113 remembered_unmapped_pages_index_(0),
114 unflattened_strings_length_(0), 114 unflattened_strings_length_(0),
115 #ifdef DEBUG 115 #ifdef DEBUG
116 allocation_allowed_(true),
117 allocation_timeout_(0), 116 allocation_timeout_(0),
118 disallow_allocation_failure_(false), 117 disallow_allocation_failure_(false),
119 #endif // DEBUG 118 #endif // DEBUG
120 new_space_high_promotion_mode_active_(false), 119 new_space_high_promotion_mode_active_(false),
121 old_generation_allocation_limit_(kMinimumOldGenerationAllocationLimit), 120 old_generation_allocation_limit_(kMinimumOldGenerationAllocationLimit),
122 size_of_old_gen_at_last_old_space_gc_(0), 121 size_of_old_gen_at_last_old_space_gc_(0),
123 external_allocation_limit_(0), 122 external_allocation_limit_(0),
124 amount_of_external_allocated_memory_(0), 123 amount_of_external_allocated_memory_(0),
125 amount_of_external_allocated_memory_at_last_global_gc_(0), 124 amount_of_external_allocated_memory_at_last_global_gc_(0),
126 old_gen_exhausted_(false), 125 old_gen_exhausted_(false),
(...skipping 299 matching lines...) Expand 10 before | Expand all | Expand 10 after
426 mark_compact_collector()->EnableCodeFlushing(true); 425 mark_compact_collector()->EnableCodeFlushing(true);
427 } 426 }
428 427
429 #ifdef VERIFY_HEAP 428 #ifdef VERIFY_HEAP
430 if (FLAG_verify_heap) { 429 if (FLAG_verify_heap) {
431 Verify(); 430 Verify();
432 } 431 }
433 #endif 432 #endif
434 433
435 #ifdef DEBUG 434 #ifdef DEBUG
436 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC); 435 ASSERT(gc_state_ == NOT_IN_GC);
437 allow_allocation(false);
438 436
439 if (FLAG_gc_verbose) Print(); 437 if (FLAG_gc_verbose) Print();
440 438
441 ReportStatisticsBeforeGC(); 439 ReportStatisticsBeforeGC();
442 #endif // DEBUG 440 #endif // DEBUG
443 441
444 store_buffer()->GCPrologue(); 442 store_buffer()->GCPrologue();
445 } 443 }
446 444
447 445
(...skipping 25 matching lines...) Expand all
473 ZapFromSpace(); 471 ZapFromSpace();
474 } 472 }
475 473
476 #ifdef VERIFY_HEAP 474 #ifdef VERIFY_HEAP
477 if (FLAG_verify_heap) { 475 if (FLAG_verify_heap) {
478 Verify(); 476 Verify();
479 } 477 }
480 #endif 478 #endif
481 479
482 #ifdef DEBUG 480 #ifdef DEBUG
483 allow_allocation(true);
484 if (FLAG_print_global_handles) isolate_->global_handles()->Print(); 481 if (FLAG_print_global_handles) isolate_->global_handles()->Print();
485 if (FLAG_print_handles) PrintHandles(); 482 if (FLAG_print_handles) PrintHandles();
486 if (FLAG_gc_verbose) Print(); 483 if (FLAG_gc_verbose) Print();
487 if (FLAG_code_stats) ReportCodeStatistics("After GC"); 484 if (FLAG_code_stats) ReportCodeStatistics("After GC");
488 #endif 485 #endif
489 if (FLAG_deopt_every_n_garbage_collections > 0) { 486 if (FLAG_deopt_every_n_garbage_collections > 0) {
490 if (++gcs_since_last_deopt_ == FLAG_deopt_every_n_garbage_collections) { 487 if (++gcs_since_last_deopt_ == FLAG_deopt_every_n_garbage_collections) {
491 Deoptimizer::DeoptimizeAll(isolate()); 488 Deoptimizer::DeoptimizeAll(isolate());
492 gcs_since_last_deopt_ = 0; 489 gcs_since_last_deopt_ = 0;
493 } 490 }
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after
635 PrintF("[IncrementalMarking] Delaying MarkSweep.\n"); 632 PrintF("[IncrementalMarking] Delaying MarkSweep.\n");
636 } 633 }
637 collector = SCAVENGER; 634 collector = SCAVENGER;
638 collector_reason = "incremental marking delaying mark-sweep"; 635 collector_reason = "incremental marking delaying mark-sweep";
639 } 636 }
640 } 637 }
641 638
642 bool next_gc_likely_to_collect_more = false; 639 bool next_gc_likely_to_collect_more = false;
643 640
644 { GCTracer tracer(this, gc_reason, collector_reason); 641 { GCTracer tracer(this, gc_reason, collector_reason);
642 ASSERT(AllowHandleAllocation::IsAllowed());
643 DisallowHandleAllocation no_allocation_during_gc;
645 GarbageCollectionPrologue(); 644 GarbageCollectionPrologue();
646 // The GC count was incremented in the prologue. Tell the tracer about 645 // The GC count was incremented in the prologue. Tell the tracer about
647 // it. 646 // it.
648 tracer.set_gc_count(gc_count_); 647 tracer.set_gc_count(gc_count_);
649 648
650 // Tell the tracer which collector we've selected. 649 // Tell the tracer which collector we've selected.
651 tracer.set_collector(collector); 650 tracer.set_collector(collector);
652 651
653 { 652 {
654 HistogramTimerScope histogram_timer_scope( 653 HistogramTimerScope histogram_timer_scope(
(...skipping 314 matching lines...) Expand 10 before | Expand all | Expand 10 after
969 new_space_.Shrink(); 968 new_space_.Shrink();
970 } 969 }
971 970
972 isolate_->counters()->objs_since_last_young()->Set(0); 971 isolate_->counters()->objs_since_last_young()->Set(0);
973 972
974 // Callbacks that fire after this point might trigger nested GCs and 973 // Callbacks that fire after this point might trigger nested GCs and
975 // restart incremental marking, the assertion can't be moved down. 974 // restart incremental marking, the assertion can't be moved down.
976 ASSERT(collector == SCAVENGER || incremental_marking()->IsStopped()); 975 ASSERT(collector == SCAVENGER || incremental_marking()->IsStopped());
977 976
978 gc_post_processing_depth_++; 977 gc_post_processing_depth_++;
979 { DisableAssertNoAllocation allow_allocation; 978 { AllowHeapAllocation allow_allocation;
980 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); 979 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
981 next_gc_likely_to_collect_more = 980 next_gc_likely_to_collect_more =
982 isolate_->global_handles()->PostGarbageCollectionProcessing( 981 isolate_->global_handles()->PostGarbageCollectionProcessing(
983 collector, tracer); 982 collector, tracer);
984 } 983 }
985 gc_post_processing_depth_--; 984 gc_post_processing_depth_--;
986 985
987 // Update relocatables. 986 // Update relocatables.
988 Relocatable::PostGarbageCollectionProcessing(); 987 Relocatable::PostGarbageCollectionProcessing();
989 988
(...skipping 619 matching lines...) Expand 10 before | Expand all | Expand 10 after
1609 Heap::undefined_value(), 1608 Heap::undefined_value(),
1610 UPDATE_WRITE_BARRIER); 1609 UPDATE_WRITE_BARRIER);
1611 } 1610 }
1612 1611
1613 // Update the head of the list of contexts. 1612 // Update the head of the list of contexts.
1614 native_contexts_list_ = head; 1613 native_contexts_list_ = head;
1615 } 1614 }
1616 1615
1617 1616
1618 void Heap::VisitExternalResources(v8::ExternalResourceVisitor* visitor) { 1617 void Heap::VisitExternalResources(v8::ExternalResourceVisitor* visitor) {
1619 AssertNoAllocation no_allocation; 1618 DisallowHeapAllocation no_allocation;
1620 1619
1621 // Both the external string table and the string table may contain 1620 // Both the external string table and the string table may contain
1622 // external strings, but neither lists them exhaustively, nor is the 1621 // external strings, but neither lists them exhaustively, nor is the
1623 // intersection set empty. Therefore we iterate over the external string 1622 // intersection set empty. Therefore we iterate over the external string
1624 // table first, ignoring internalized strings, and then over the 1623 // table first, ignoring internalized strings, and then over the
1625 // internalized string table. 1624 // internalized string table.
1626 1625
1627 class ExternalStringTableVisitorAdapter : public ObjectVisitor { 1626 class ExternalStringTableVisitorAdapter : public ObjectVisitor {
1628 public: 1627 public:
1629 explicit ExternalStringTableVisitorAdapter( 1628 explicit ExternalStringTableVisitorAdapter(
(...skipping 1038 matching lines...) Expand 10 before | Expand all | Expand 10 after
2668 } 2667 }
2669 2668
2670 2669
2671 MaybeObject* Heap::AllocateHeapNumber(double value) { 2670 MaybeObject* Heap::AllocateHeapNumber(double value) {
2672 // Use general version, if we're forced to always allocate. 2671 // Use general version, if we're forced to always allocate.
2673 if (always_allocate()) return AllocateHeapNumber(value, TENURED); 2672 if (always_allocate()) return AllocateHeapNumber(value, TENURED);
2674 2673
2675 // This version of AllocateHeapNumber is optimized for 2674 // This version of AllocateHeapNumber is optimized for
2676 // allocation in new space. 2675 // allocation in new space.
2677 STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxNonCodeHeapObjectSize); 2676 STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxNonCodeHeapObjectSize);
2678 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
2679 Object* result; 2677 Object* result;
2680 { MaybeObject* maybe_result = new_space_.AllocateRaw(HeapNumber::kSize); 2678 { MaybeObject* maybe_result = new_space_.AllocateRaw(HeapNumber::kSize);
2681 if (!maybe_result->ToObject(&result)) return maybe_result; 2679 if (!maybe_result->ToObject(&result)) return maybe_result;
2682 } 2680 }
2683 HeapObject::cast(result)->set_map_no_write_barrier(heap_number_map()); 2681 HeapObject::cast(result)->set_map_no_write_barrier(heap_number_map());
2684 HeapNumber::cast(result)->set_value(value); 2682 HeapNumber::cast(result)->set_value(value);
2685 return result; 2683 return result;
2686 } 2684 }
2687 2685
2688 2686
(...skipping 878 matching lines...) Expand 10 before | Expand all | Expand 10 after
3567 } 3565 }
3568 3566
3569 Map* map = (is_one_byte || is_one_byte_data_in_two_byte_string) ? 3567 Map* map = (is_one_byte || is_one_byte_data_in_two_byte_string) ?
3570 cons_ascii_string_map() : cons_string_map(); 3568 cons_ascii_string_map() : cons_string_map();
3571 3569
3572 Object* result; 3570 Object* result;
3573 { MaybeObject* maybe_result = Allocate(map, NEW_SPACE); 3571 { MaybeObject* maybe_result = Allocate(map, NEW_SPACE);
3574 if (!maybe_result->ToObject(&result)) return maybe_result; 3572 if (!maybe_result->ToObject(&result)) return maybe_result;
3575 } 3573 }
3576 3574
3577 AssertNoAllocation no_gc; 3575 DisallowHeapAllocation no_gc;
3578 ConsString* cons_string = ConsString::cast(result); 3576 ConsString* cons_string = ConsString::cast(result);
3579 WriteBarrierMode mode = cons_string->GetWriteBarrierMode(no_gc); 3577 WriteBarrierMode mode = cons_string->GetWriteBarrierMode(no_gc);
3580 cons_string->set_length(length); 3578 cons_string->set_length(length);
3581 cons_string->set_hash_field(String::kEmptyHashField); 3579 cons_string->set_hash_field(String::kEmptyHashField);
3582 cons_string->set_first(first, mode); 3580 cons_string->set_first(first, mode);
3583 cons_string->set_second(second, mode); 3581 cons_string->set_second(second, mode);
3584 return result; 3582 return result;
3585 } 3583 }
3586 3584
3587 3585
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after
3648 // indirect ASCII string is pointing to a two-byte string, the two-byte char 3646 // indirect ASCII string is pointing to a two-byte string, the two-byte char
3649 // codes of the underlying string must still fit into ASCII (because 3647 // codes of the underlying string must still fit into ASCII (because
3650 // externalization must not change char codes). 3648 // externalization must not change char codes).
3651 { Map* map = buffer->IsOneByteRepresentation() 3649 { Map* map = buffer->IsOneByteRepresentation()
3652 ? sliced_ascii_string_map() 3650 ? sliced_ascii_string_map()
3653 : sliced_string_map(); 3651 : sliced_string_map();
3654 MaybeObject* maybe_result = Allocate(map, NEW_SPACE); 3652 MaybeObject* maybe_result = Allocate(map, NEW_SPACE);
3655 if (!maybe_result->ToObject(&result)) return maybe_result; 3653 if (!maybe_result->ToObject(&result)) return maybe_result;
3656 } 3654 }
3657 3655
3658 AssertNoAllocation no_gc; 3656 DisallowHeapAllocation no_gc;
3659 SlicedString* sliced_string = SlicedString::cast(result); 3657 SlicedString* sliced_string = SlicedString::cast(result);
3660 sliced_string->set_length(length); 3658 sliced_string->set_length(length);
3661 sliced_string->set_hash_field(String::kEmptyHashField); 3659 sliced_string->set_hash_field(String::kEmptyHashField);
3662 if (buffer->IsConsString()) { 3660 if (buffer->IsConsString()) {
3663 ConsString* cons = ConsString::cast(buffer); 3661 ConsString* cons = ConsString::cast(buffer);
3664 ASSERT(cons->second()->length() == 0); 3662 ASSERT(cons->second()->length() == 0);
3665 sliced_string->set_parent(cons->first()); 3663 sliced_string->set_parent(cons->first());
3666 sliced_string->set_offset(start); 3664 sliced_string->set_offset(start);
3667 } else if (buffer->IsSlicedString()) { 3665 } else if (buffer->IsSlicedString()) {
3668 // Prevent nesting sliced strings. 3666 // Prevent nesting sliced strings.
(...skipping 444 matching lines...) Expand 10 before | Expand all | Expand 10 after
4113 strict_mode_arguments_boilerplate(); 4111 strict_mode_arguments_boilerplate();
4114 arguments_object_size = kArgumentsObjectSizeStrict; 4112 arguments_object_size = kArgumentsObjectSizeStrict;
4115 } else { 4113 } else {
4116 boilerplate = 4114 boilerplate =
4117 isolate()->context()->native_context()->arguments_boilerplate(); 4115 isolate()->context()->native_context()->arguments_boilerplate();
4118 arguments_object_size = kArgumentsObjectSize; 4116 arguments_object_size = kArgumentsObjectSize;
4119 } 4117 }
4120 4118
4121 // This calls Copy directly rather than using Heap::AllocateRaw so we 4119 // This calls Copy directly rather than using Heap::AllocateRaw so we
4122 // duplicate the check here. 4120 // duplicate the check here.
4123 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC); 4121 ASSERT(AllowHandleAllocation::IsAllowed() && gc_state_ == NOT_IN_GC);
4124 4122
4125 // Check that the size of the boilerplate matches our 4123 // Check that the size of the boilerplate matches our
4126 // expectations. The ArgumentsAccessStub::GenerateNewObject relies 4124 // expectations. The ArgumentsAccessStub::GenerateNewObject relies
4127 // on the size being a known constant. 4125 // on the size being a known constant.
4128 ASSERT(arguments_object_size == boilerplate->map()->instance_size()); 4126 ASSERT(arguments_object_size == boilerplate->map()->instance_size());
4129 4127
4130 // Do the allocation. 4128 // Do the allocation.
4131 Object* result; 4129 Object* result;
4132 { MaybeObject* maybe_result = 4130 { MaybeObject* maybe_result =
4133 AllocateRaw(arguments_object_size, NEW_SPACE, OLD_POINTER_SPACE); 4131 AllocateRaw(arguments_object_size, NEW_SPACE, OLD_POINTER_SPACE);
(...skipping 1185 matching lines...) Expand 10 before | Expand all | Expand 10 after
5319 CopyBlock(dst->address() + kPointerSize, 5317 CopyBlock(dst->address() + kPointerSize,
5320 src->address() + kPointerSize, 5318 src->address() + kPointerSize,
5321 FixedArray::SizeFor(len) - kPointerSize); 5319 FixedArray::SizeFor(len) - kPointerSize);
5322 return obj; 5320 return obj;
5323 } 5321 }
5324 HeapObject::cast(obj)->set_map_no_write_barrier(map); 5322 HeapObject::cast(obj)->set_map_no_write_barrier(map);
5325 FixedArray* result = FixedArray::cast(obj); 5323 FixedArray* result = FixedArray::cast(obj);
5326 result->set_length(len); 5324 result->set_length(len);
5327 5325
5328 // Copy the content 5326 // Copy the content
5329 AssertNoAllocation no_gc; 5327 DisallowHeapAllocation no_gc;
5330 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc); 5328 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc);
5331 for (int i = 0; i < len; i++) result->set(i, src->get(i), mode); 5329 for (int i = 0; i < len; i++) result->set(i, src->get(i), mode);
5332 return result; 5330 return result;
5333 } 5331 }
5334 5332
5335 5333
5336 MaybeObject* Heap::CopyFixedDoubleArrayWithMap(FixedDoubleArray* src, 5334 MaybeObject* Heap::CopyFixedDoubleArrayWithMap(FixedDoubleArray* src,
5337 Map* map) { 5335 Map* map) {
5338 int len = src->length(); 5336 int len = src->length();
5339 Object* obj; 5337 Object* obj;
(...skipping 402 matching lines...) Expand 10 before | Expand all | Expand 10 after
5742 } 5740 }
5743 5741
5744 5742
5745 bool Heap::IsHeapIterable() { 5743 bool Heap::IsHeapIterable() {
5746 return (!old_pointer_space()->was_swept_conservatively() && 5744 return (!old_pointer_space()->was_swept_conservatively() &&
5747 !old_data_space()->was_swept_conservatively()); 5745 !old_data_space()->was_swept_conservatively());
5748 } 5746 }
5749 5747
5750 5748
5751 void Heap::EnsureHeapIsIterable() { 5749 void Heap::EnsureHeapIsIterable() {
5752 ASSERT(IsAllocationAllowed()); 5750 ASSERT(AllowHandleAllocation::IsAllowed());
5753 if (!IsHeapIterable()) { 5751 if (!IsHeapIterable()) {
5754 CollectAllGarbage(kMakeHeapIterableMask, "Heap::EnsureHeapIsIterable"); 5752 CollectAllGarbage(kMakeHeapIterableMask, "Heap::EnsureHeapIsIterable");
5755 } 5753 }
5756 ASSERT(IsHeapIterable()); 5754 ASSERT(IsHeapIterable());
5757 } 5755 }
5758 5756
5759 5757
5760 void Heap::AdvanceIdleIncrementalMarking(intptr_t step_size) { 5758 void Heap::AdvanceIdleIncrementalMarking(intptr_t step_size) {
5761 incremental_marking()->Step(step_size, 5759 incremental_marking()->Step(step_size,
5762 IncrementalMarking::NO_GC_VIA_STACK_GUARD); 5760 IncrementalMarking::NO_GC_VIA_STACK_GUARD);
(...skipping 1297 matching lines...) Expand 10 before | Expand all | Expand 10 after
7060 List<HeapObject*> marking_stack_; 7058 List<HeapObject*> marking_stack_;
7061 }; 7059 };
7062 7060
7063 void MarkReachableObjects() { 7061 void MarkReachableObjects() {
7064 Heap* heap = Isolate::Current()->heap(); 7062 Heap* heap = Isolate::Current()->heap();
7065 MarkingVisitor visitor; 7063 MarkingVisitor visitor;
7066 heap->IterateRoots(&visitor, VISIT_ALL); 7064 heap->IterateRoots(&visitor, VISIT_ALL);
7067 visitor.TransitiveClosure(); 7065 visitor.TransitiveClosure();
7068 } 7066 }
7069 7067
7070 AssertNoAllocation no_alloc; 7068 DisallowHeapAllocation no_allocation_;
7071 }; 7069 };
7072 7070
7073 7071
7074 HeapIterator::HeapIterator(Heap* heap) 7072 HeapIterator::HeapIterator(Heap* heap)
7075 : heap_(heap), 7073 : heap_(heap),
7076 filtering_(HeapIterator::kNoFiltering), 7074 filtering_(HeapIterator::kNoFiltering),
7077 filter_(NULL) { 7075 filter_(NULL) {
7078 Init(); 7076 Init();
7079 } 7077 }
7080 7078
(...skipping 665 matching lines...) Expand 10 before | Expand all | Expand 10 after
7746 if (nested_ || list_.is_empty() || isolate->has_pending_exception()) return; 7744 if (nested_ || list_.is_empty() || isolate->has_pending_exception()) return;
7747 nested_ = true; 7745 nested_ = true;
7748 HandleScope scope(isolate); 7746 HandleScope scope(isolate);
7749 Handle<String> stack_key = isolate->factory()->stack_string(); 7747 Handle<String> stack_key = isolate->factory()->stack_string();
7750 int write_index = 0; 7748 int write_index = 0;
7751 int budget = kBudgetPerGC; 7749 int budget = kBudgetPerGC;
7752 for (int i = 0; i < list_.length(); i++) { 7750 for (int i = 0; i < list_.length(); i++) {
7753 Object* object = list_[i]; 7751 Object* object = list_[i];
7754 JSFunction* getter_fun; 7752 JSFunction* getter_fun;
7755 7753
7756 { AssertNoAllocation assert; 7754 { DisallowHeapAllocation no_gc;
7757 // Skip possible holes in the list. 7755 // Skip possible holes in the list.
7758 if (object->IsTheHole()) continue; 7756 if (object->IsTheHole()) continue;
7759 if (isolate->heap()->InNewSpace(object) || budget == 0) { 7757 if (isolate->heap()->InNewSpace(object) || budget == 0) {
7760 list_[write_index++] = object; 7758 list_[write_index++] = object;
7761 continue; 7759 continue;
7762 } 7760 }
7763 7761
7764 // Check whether the stack property is backed by the original getter. 7762 // Check whether the stack property is backed by the original getter.
7765 LookupResult lookup(isolate); 7763 LookupResult lookup(isolate);
7766 JSObject::cast(object)->LocalLookupRealNamedProperty(*stack_key, &lookup); 7764 JSObject::cast(object)->LocalLookupRealNamedProperty(*stack_key, &lookup);
(...skipping 180 matching lines...) Expand 10 before | Expand all | Expand 10 after
7947 if (FLAG_parallel_recompilation) { 7945 if (FLAG_parallel_recompilation) {
7948 heap_->relocation_mutex_->Lock(); 7946 heap_->relocation_mutex_->Lock();
7949 #ifdef DEBUG 7947 #ifdef DEBUG
7950 heap_->relocation_mutex_locked_by_optimizer_thread_ = 7948 heap_->relocation_mutex_locked_by_optimizer_thread_ =
7951 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); 7949 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread();
7952 #endif // DEBUG 7950 #endif // DEBUG
7953 } 7951 }
7954 } 7952 }
7955 7953
7956 } } // namespace v8::internal 7954 } } // namespace v8::internal
OLDNEW
« src/api.cc ('K') | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698