Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(108)

Side by Side Diff: src/heap.cc

Issue 11931013: Revert trunk to version 3.16.4. (Closed) Base URL: https://v8.googlecode.com/svn/trunk
Patch Set: Created 7 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 532 matching lines...) Expand 10 before | Expand all | Expand 10 after
543 #undef UPDATE_COUNTERS_FOR_SPACE 543 #undef UPDATE_COUNTERS_FOR_SPACE
544 #undef UPDATE_FRAGMENTATION_FOR_SPACE 544 #undef UPDATE_FRAGMENTATION_FOR_SPACE
545 #undef UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE 545 #undef UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE
546 546
547 #if defined(DEBUG) 547 #if defined(DEBUG)
548 ReportStatisticsAfterGC(); 548 ReportStatisticsAfterGC();
549 #endif // DEBUG 549 #endif // DEBUG
550 #ifdef ENABLE_DEBUGGER_SUPPORT 550 #ifdef ENABLE_DEBUGGER_SUPPORT
551 isolate_->debug()->AfterGarbageCollection(); 551 isolate_->debug()->AfterGarbageCollection();
552 #endif // ENABLE_DEBUGGER_SUPPORT 552 #endif // ENABLE_DEBUGGER_SUPPORT
553
554 error_object_list_.DeferredFormatStackTrace(isolate());
555 } 553 }
556 554
557 555
558 void Heap::CollectAllGarbage(int flags, const char* gc_reason) { 556 void Heap::CollectAllGarbage(int flags, const char* gc_reason) {
559 // Since we are ignoring the return value, the exact choice of space does 557 // Since we are ignoring the return value, the exact choice of space does
560 // not matter, so long as we do not specify NEW_SPACE, which would not 558 // not matter, so long as we do not specify NEW_SPACE, which would not
561 // cause a full GC. 559 // cause a full GC.
562 mark_compact_collector_.SetFlags(flags); 560 mark_compact_collector_.SetFlags(flags);
563 CollectGarbage(OLD_POINTER_SPACE, gc_reason); 561 CollectGarbage(OLD_POINTER_SPACE, gc_reason);
564 mark_compact_collector_.SetFlags(kNoGCFlags); 562 mark_compact_collector_.SetFlags(kNoGCFlags);
(...skipping 322 matching lines...) Expand 10 before | Expand all | Expand 10 after
887 885
888 if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) { 886 if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) {
889 ASSERT(!allocation_allowed_); 887 ASSERT(!allocation_allowed_);
890 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); 888 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
891 global_gc_prologue_callback_(); 889 global_gc_prologue_callback_();
892 } 890 }
893 891
894 GCType gc_type = 892 GCType gc_type =
895 collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge; 893 collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge;
896 894
897 { 895 for (int i = 0; i < gc_prologue_callbacks_.length(); ++i) {
898 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); 896 if (gc_type & gc_prologue_callbacks_[i].gc_type) {
899 for (int i = 0; i < gc_prologue_callbacks_.length(); ++i) { 897 gc_prologue_callbacks_[i].callback(gc_type, kNoGCCallbackFlags);
900 if (gc_type & gc_prologue_callbacks_[i].gc_type) {
901 gc_prologue_callbacks_[i].callback(gc_type, kNoGCCallbackFlags);
902 }
903 } 898 }
904 } 899 }
905 900
906 EnsureFromSpaceIsCommitted(); 901 EnsureFromSpaceIsCommitted();
907 902
908 int start_new_space_size = Heap::new_space()->SizeAsInt(); 903 int start_new_space_size = Heap::new_space()->SizeAsInt();
909 904
910 if (IsHighSurvivalRate()) { 905 if (IsHighSurvivalRate()) {
911 // We speed up the incremental marker if it is running so that it 906 // We speed up the incremental marker if it is running so that it
912 // does not fall behind the rate of promotion, which would cause a 907 // does not fall behind the rate of promotion, which would cause a
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after
1000 995
1001 // Update relocatables. 996 // Update relocatables.
1002 Relocatable::PostGarbageCollectionProcessing(); 997 Relocatable::PostGarbageCollectionProcessing();
1003 998
1004 if (collector == MARK_COMPACTOR) { 999 if (collector == MARK_COMPACTOR) {
1005 // Register the amount of external allocated memory. 1000 // Register the amount of external allocated memory.
1006 amount_of_external_allocated_memory_at_last_global_gc_ = 1001 amount_of_external_allocated_memory_at_last_global_gc_ =
1007 amount_of_external_allocated_memory_; 1002 amount_of_external_allocated_memory_;
1008 } 1003 }
1009 1004
1010 { 1005 GCCallbackFlags callback_flags = kNoGCCallbackFlags;
1011 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); 1006 for (int i = 0; i < gc_epilogue_callbacks_.length(); ++i) {
1012 GCCallbackFlags callback_flags = kNoGCCallbackFlags; 1007 if (gc_type & gc_epilogue_callbacks_[i].gc_type) {
1013 for (int i = 0; i < gc_epilogue_callbacks_.length(); ++i) { 1008 gc_epilogue_callbacks_[i].callback(gc_type, callback_flags);
1014 if (gc_type & gc_epilogue_callbacks_[i].gc_type) {
1015 gc_epilogue_callbacks_[i].callback(gc_type, callback_flags);
1016 }
1017 } 1009 }
1018 } 1010 }
1019 1011
1020 if (collector == MARK_COMPACTOR && global_gc_epilogue_callback_) { 1012 if (collector == MARK_COMPACTOR && global_gc_epilogue_callback_) {
1021 ASSERT(!allocation_allowed_); 1013 ASSERT(!allocation_allowed_);
1022 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); 1014 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
1023 global_gc_epilogue_callback_(); 1015 global_gc_epilogue_callback_();
1024 } 1016 }
1025 1017
1026 #ifdef VERIFY_HEAP 1018 #ifdef VERIFY_HEAP
(...skipping 351 matching lines...) Expand 10 before | Expand all | Expand 10 after
1378 1370
1379 isolate_->global_handles()->IdentifyNewSpaceWeakIndependentHandles( 1371 isolate_->global_handles()->IdentifyNewSpaceWeakIndependentHandles(
1380 &IsUnscavengedHeapObject); 1372 &IsUnscavengedHeapObject);
1381 isolate_->global_handles()->IterateNewSpaceWeakIndependentRoots( 1373 isolate_->global_handles()->IterateNewSpaceWeakIndependentRoots(
1382 &scavenge_visitor); 1374 &scavenge_visitor);
1383 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); 1375 new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
1384 1376
1385 UpdateNewSpaceReferencesInExternalStringTable( 1377 UpdateNewSpaceReferencesInExternalStringTable(
1386 &UpdateNewSpaceReferenceInExternalStringTableEntry); 1378 &UpdateNewSpaceReferenceInExternalStringTableEntry);
1387 1379
1388 error_object_list_.UpdateReferencesInNewSpace(this);
1389
1390 promotion_queue_.Destroy(); 1380 promotion_queue_.Destroy();
1391 1381
1392 LiveObjectList::UpdateReferencesForScavengeGC(); 1382 LiveObjectList::UpdateReferencesForScavengeGC();
1393 if (!FLAG_watch_ic_patching) { 1383 if (!FLAG_watch_ic_patching) {
1394 isolate()->runtime_profiler()->UpdateSamplesAfterScavenge(); 1384 isolate()->runtime_profiler()->UpdateSamplesAfterScavenge();
1395 } 1385 }
1396 incremental_marking()->UpdateMarkingDequeAfterScavenge(); 1386 incremental_marking()->UpdateMarkingDequeAfterScavenge();
1397 1387
1398 ScavengeWeakObjectRetainer weak_object_retainer(this); 1388 ScavengeWeakObjectRetainer weak_object_retainer(this);
1399 ProcessWeakReferences(&weak_object_retainer); 1389 ProcessWeakReferences(&weak_object_retainer);
(...skipping 4562 matching lines...) Expand 10 before | Expand all | Expand 10 after
5962 } 5952 }
5963 5953
5964 5954
5965 void Heap::IterateWeakRoots(ObjectVisitor* v, VisitMode mode) { 5955 void Heap::IterateWeakRoots(ObjectVisitor* v, VisitMode mode) {
5966 v->VisitPointer(reinterpret_cast<Object**>(&roots_[kSymbolTableRootIndex])); 5956 v->VisitPointer(reinterpret_cast<Object**>(&roots_[kSymbolTableRootIndex]));
5967 v->Synchronize(VisitorSynchronization::kSymbolTable); 5957 v->Synchronize(VisitorSynchronization::kSymbolTable);
5968 if (mode != VISIT_ALL_IN_SCAVENGE && 5958 if (mode != VISIT_ALL_IN_SCAVENGE &&
5969 mode != VISIT_ALL_IN_SWEEP_NEWSPACE) { 5959 mode != VISIT_ALL_IN_SWEEP_NEWSPACE) {
5970 // Scavenge collections have special processing for this. 5960 // Scavenge collections have special processing for this.
5971 external_string_table_.Iterate(v); 5961 external_string_table_.Iterate(v);
5972 error_object_list_.Iterate(v);
5973 } 5962 }
5974 v->Synchronize(VisitorSynchronization::kExternalStringsTable); 5963 v->Synchronize(VisitorSynchronization::kExternalStringsTable);
5975 } 5964 }
5976 5965
5977 5966
5978 void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) { 5967 void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) {
5979 v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]); 5968 v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]);
5980 v->Synchronize(VisitorSynchronization::kStrongRootList); 5969 v->Synchronize(VisitorSynchronization::kStrongRootList);
5981 5970
5982 v->VisitPointer(BitCast<Object**>(&hidden_symbol_)); 5971 v->VisitPointer(BitCast<Object**>(&hidden_symbol_));
(...skipping 353 matching lines...) Expand 10 before | Expand all | Expand 10 after
6336 PrintF("min_in_mutator=%d ", get_min_in_mutator()); 6325 PrintF("min_in_mutator=%d ", get_min_in_mutator());
6337 PrintF("max_alive_after_gc=%" V8_PTR_PREFIX "d ", 6326 PrintF("max_alive_after_gc=%" V8_PTR_PREFIX "d ",
6338 get_max_alive_after_gc()); 6327 get_max_alive_after_gc());
6339 PrintF("\n\n"); 6328 PrintF("\n\n");
6340 } 6329 }
6341 6330
6342 isolate_->global_handles()->TearDown(); 6331 isolate_->global_handles()->TearDown();
6343 6332
6344 external_string_table_.TearDown(); 6333 external_string_table_.TearDown();
6345 6334
6346 error_object_list_.TearDown();
6347
6348 new_space_.TearDown(); 6335 new_space_.TearDown();
6349 6336
6350 if (old_pointer_space_ != NULL) { 6337 if (old_pointer_space_ != NULL) {
6351 old_pointer_space_->TearDown(); 6338 old_pointer_space_->TearDown();
6352 delete old_pointer_space_; 6339 delete old_pointer_space_;
6353 old_pointer_space_ = NULL; 6340 old_pointer_space_ = NULL;
6354 } 6341 }
6355 6342
6356 if (old_data_space_ != NULL) { 6343 if (old_data_space_ != NULL) {
6357 old_data_space_->TearDown(); 6344 old_data_space_->TearDown();
(...skipping 886 matching lines...) Expand 10 before | Expand all | Expand 10 after
7244 if (new_space_strings_[i] == heap_->the_hole_value()) { 7231 if (new_space_strings_[i] == heap_->the_hole_value()) {
7245 continue; 7232 continue;
7246 } 7233 }
7247 if (heap_->InNewSpace(new_space_strings_[i])) { 7234 if (heap_->InNewSpace(new_space_strings_[i])) {
7248 new_space_strings_[last++] = new_space_strings_[i]; 7235 new_space_strings_[last++] = new_space_strings_[i];
7249 } else { 7236 } else {
7250 old_space_strings_.Add(new_space_strings_[i]); 7237 old_space_strings_.Add(new_space_strings_[i]);
7251 } 7238 }
7252 } 7239 }
7253 new_space_strings_.Rewind(last); 7240 new_space_strings_.Rewind(last);
7254 new_space_strings_.Trim();
7255
7256 last = 0; 7241 last = 0;
7257 for (int i = 0; i < old_space_strings_.length(); ++i) { 7242 for (int i = 0; i < old_space_strings_.length(); ++i) {
7258 if (old_space_strings_[i] == heap_->the_hole_value()) { 7243 if (old_space_strings_[i] == heap_->the_hole_value()) {
7259 continue; 7244 continue;
7260 } 7245 }
7261 ASSERT(!heap_->InNewSpace(old_space_strings_[i])); 7246 ASSERT(!heap_->InNewSpace(old_space_strings_[i]));
7262 old_space_strings_[last++] = old_space_strings_[i]; 7247 old_space_strings_[last++] = old_space_strings_[i];
7263 } 7248 }
7264 old_space_strings_.Rewind(last); 7249 old_space_strings_.Rewind(last);
7265 old_space_strings_.Trim();
7266 #ifdef VERIFY_HEAP 7250 #ifdef VERIFY_HEAP
7267 if (FLAG_verify_heap) { 7251 if (FLAG_verify_heap) {
7268 Verify(); 7252 Verify();
7269 } 7253 }
7270 #endif 7254 #endif
7271 } 7255 }
7272 7256
7273 7257
7274 void ExternalStringTable::TearDown() { 7258 void ExternalStringTable::TearDown() {
7275 new_space_strings_.Free(); 7259 new_space_strings_.Free();
7276 old_space_strings_.Free(); 7260 old_space_strings_.Free();
7277 } 7261 }
7278 7262
7279 7263
7280 // Update all references.
7281 void ErrorObjectList::UpdateReferences() {
7282 for (int i = 0; i < list_.length(); i++) {
7283 HeapObject* object = HeapObject::cast(list_[i]);
7284 MapWord first_word = object->map_word();
7285 if (first_word.IsForwardingAddress()) {
7286 list_[i] = first_word.ToForwardingAddress();
7287 }
7288 }
7289 }
7290
7291
7292 // Unforwarded objects in new space are dead and removed from the list.
7293 void ErrorObjectList::UpdateReferencesInNewSpace(Heap* heap) {
7294 if (!nested_) {
7295 int write_index = 0;
7296 for (int i = 0; i < list_.length(); i++) {
7297 MapWord first_word = HeapObject::cast(list_[i])->map_word();
7298 if (first_word.IsForwardingAddress()) {
7299 list_[write_index++] = first_word.ToForwardingAddress();
7300 }
7301 }
7302 list_.Rewind(write_index);
7303 } else {
7304 // If a GC is triggered during DeferredFormatStackTrace, we do not move
7305 // objects in the list, just remove dead ones, as to not confuse the
7306 // loop in DeferredFormatStackTrace.
7307 for (int i = 0; i < list_.length(); i++) {
7308 MapWord first_word = HeapObject::cast(list_[i])->map_word();
7309 list_[i] = first_word.IsForwardingAddress()
7310 ? first_word.ToForwardingAddress()
7311 : heap->the_hole_value();
7312 }
7313 }
7314 }
7315
7316
7317 void ErrorObjectList::DeferredFormatStackTrace(Isolate* isolate) {
7318 // If formatting the stack trace causes a GC, this method will be
7319 // recursively called. In that case, skip the recursive call, since
7320 // the loop modifies the list while iterating over it.
7321 if (nested_ || isolate->has_pending_exception()) return;
7322 nested_ = true;
7323 HandleScope scope(isolate);
7324 Handle<String> stack_key = isolate->factory()->stack_symbol();
7325 int write_index = 0;
7326 int budget = kBudgetPerGC;
7327 for (int i = 0; i < list_.length(); i++) {
7328 Object* object = list_[i];
7329 JSFunction* getter_fun;
7330
7331 { AssertNoAllocation assert;
7332 // Skip possible holes in the list.
7333 if (object->IsTheHole()) continue;
7334 if (isolate->heap()->InNewSpace(object) || budget == 0) {
7335 list_[write_index++] = object;
7336 continue;
7337 }
7338
7339 // Check whether the stack property is backed by the original getter.
7340 LookupResult lookup(isolate);
7341 JSObject::cast(object)->LocalLookupRealNamedProperty(*stack_key, &lookup);
7342 if (!lookup.IsFound() || lookup.type() != CALLBACKS) continue;
7343 Object* callback = lookup.GetCallbackObject();
7344 if (!callback->IsAccessorPair()) continue;
7345 Object* getter_obj = AccessorPair::cast(callback)->getter();
7346 if (!getter_obj->IsJSFunction()) continue;
7347 getter_fun = JSFunction::cast(getter_obj);
7348 String* key = isolate->heap()->hidden_stack_trace_symbol();
7349 if (key != getter_fun->GetHiddenProperty(key)) continue;
7350 }
7351
7352 budget--;
7353 HandleScope scope(isolate);
7354 bool has_exception = false;
7355 #ifdef DEBUG
7356 Handle<Map> map(HeapObject::cast(object)->map(), isolate);
7357 #endif
7358 Handle<Object> object_handle(object, isolate);
7359 Handle<Object> getter_handle(getter_fun, isolate);
7360 Execution::Call(getter_handle, object_handle, 0, NULL, &has_exception);
7361 ASSERT(*map == HeapObject::cast(*object_handle)->map());
7362 if (has_exception) {
7363 // Hit an exception (most likely a stack overflow).
7364 // Wrap up this pass and retry after another GC.
7365 isolate->clear_pending_exception();
7366 // We use the handle since calling the getter might have caused a GC.
7367 list_[write_index++] = *object_handle;
7368 budget = 0;
7369 }
7370 }
7371 list_.Rewind(write_index);
7372 list_.Trim();
7373 nested_ = false;
7374 }
7375
7376
7377 void ErrorObjectList::RemoveUnmarked(Heap* heap) {
7378 for (int i = 0; i < list_.length(); i++) {
7379 HeapObject* object = HeapObject::cast(list_[i]);
7380 if (!Marking::MarkBitFrom(object).Get()) {
7381 list_[i] = heap->the_hole_value();
7382 }
7383 }
7384 }
7385
7386
7387 void ErrorObjectList::TearDown() {
7388 list_.Free();
7389 }
7390
7391
7392 void Heap::QueueMemoryChunkForFree(MemoryChunk* chunk) { 7264 void Heap::QueueMemoryChunkForFree(MemoryChunk* chunk) {
7393 chunk->set_next_chunk(chunks_queued_for_free_); 7265 chunk->set_next_chunk(chunks_queued_for_free_);
7394 chunks_queued_for_free_ = chunk; 7266 chunks_queued_for_free_ = chunk;
7395 } 7267 }
7396 7268
7397 7269
7398 void Heap::FreeQueuedChunks() { 7270 void Heap::FreeQueuedChunks() {
7399 if (chunks_queued_for_free_ == NULL) return; 7271 if (chunks_queued_for_free_ == NULL) return;
7400 MemoryChunk* next; 7272 MemoryChunk* next;
7401 MemoryChunk* chunk; 7273 MemoryChunk* chunk;
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after
7510 static_cast<int>(object_sizes_last_time_[index])); 7382 static_cast<int>(object_sizes_last_time_[index]));
7511 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT) 7383 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT)
7512 #undef ADJUST_LAST_TIME_OBJECT_COUNT 7384 #undef ADJUST_LAST_TIME_OBJECT_COUNT
7513 7385
7514 memcpy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); 7386 memcpy(object_counts_last_time_, object_counts_, sizeof(object_counts_));
7515 memcpy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); 7387 memcpy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_));
7516 ClearObjectStats(); 7388 ClearObjectStats();
7517 } 7389 }
7518 7390
7519 } } // namespace v8::internal 7391 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698