| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 72 // a multiple of Page::kPageSize. | 72 // a multiple of Page::kPageSize. |
| 73 reserved_semispace_size_(8 * (kPointerSize / 4) * MB), | 73 reserved_semispace_size_(8 * (kPointerSize / 4) * MB), |
| 74 max_semispace_size_(8 * (kPointerSize / 4) * MB), | 74 max_semispace_size_(8 * (kPointerSize / 4) * MB), |
| 75 initial_semispace_size_(Page::kPageSize), | 75 initial_semispace_size_(Page::kPageSize), |
| 76 max_old_generation_size_(700ul * (kPointerSize / 4) * MB), | 76 max_old_generation_size_(700ul * (kPointerSize / 4) * MB), |
| 77 max_executable_size_(256ul * (kPointerSize / 4) * MB), | 77 max_executable_size_(256ul * (kPointerSize / 4) * MB), |
| 78 // Variables set based on semispace_size_ and old_generation_size_ in | 78 // Variables set based on semispace_size_ and old_generation_size_ in |
| 79 // ConfigureHeap (survived_since_last_expansion_, external_allocation_limit_) | 79 // ConfigureHeap (survived_since_last_expansion_, external_allocation_limit_) |
| 80 // Will be 4 * reserved_semispace_size_ to ensure that young | 80 // Will be 4 * reserved_semispace_size_ to ensure that young |
| 81 // generation can be aligned to its size. | 81 // generation can be aligned to its size. |
| 82 maximum_committed_(0), |
| 82 survived_since_last_expansion_(0), | 83 survived_since_last_expansion_(0), |
| 83 sweep_generation_(0), | 84 sweep_generation_(0), |
| 84 always_allocate_scope_depth_(0), | 85 always_allocate_scope_depth_(0), |
| 85 linear_allocation_scope_depth_(0), | 86 linear_allocation_scope_depth_(0), |
| 86 contexts_disposed_(0), | 87 contexts_disposed_(0), |
| 87 global_ic_age_(0), | 88 global_ic_age_(0), |
| 88 flush_monomorphic_ics_(false), | 89 flush_monomorphic_ics_(false), |
| 89 allocation_mementos_found_(0), | 90 allocation_mementos_found_(0), |
| 90 scan_on_scavenge_pages_(0), | 91 scan_on_scavenge_pages_(0), |
| 91 new_space_(this), | 92 new_space_(this), |
| (...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 225 } | 226 } |
| 226 | 227 |
| 227 | 228 |
| 228 intptr_t Heap::CommittedMemoryExecutable() { | 229 intptr_t Heap::CommittedMemoryExecutable() { |
| 229 if (!HasBeenSetUp()) return 0; | 230 if (!HasBeenSetUp()) return 0; |
| 230 | 231 |
| 231 return isolate()->memory_allocator()->SizeExecutable(); | 232 return isolate()->memory_allocator()->SizeExecutable(); |
| 232 } | 233 } |
| 233 | 234 |
| 234 | 235 |
| 236 void Heap::UpdateMaximumCommitted() { |
| 237 if (!HasBeenSetUp()) return; |
| 238 |
| 239 intptr_t current_committed_memory = CommittedMemory(); |
| 240 if (current_committed_memory > maximum_committed_) { |
| 241 maximum_committed_ = current_committed_memory; |
| 242 } |
| 243 } |
| 244 |
| 245 |
| 235 intptr_t Heap::Available() { | 246 intptr_t Heap::Available() { |
| 236 if (!HasBeenSetUp()) return 0; | 247 if (!HasBeenSetUp()) return 0; |
| 237 | 248 |
| 238 return new_space_.Available() + | 249 return new_space_.Available() + |
| 239 old_pointer_space_->Available() + | 250 old_pointer_space_->Available() + |
| 240 old_data_space_->Available() + | 251 old_data_space_->Available() + |
| 241 code_space_->Available() + | 252 code_space_->Available() + |
| 242 map_space_->Available() + | 253 map_space_->Available() + |
| 243 cell_space_->Available() + | 254 cell_space_->Available() + |
| 244 property_cell_space_->Available(); | 255 property_cell_space_->Available(); |
| (...skipping 189 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 434 mark_compact_collector()->EnableCodeFlushing(true); | 445 mark_compact_collector()->EnableCodeFlushing(true); |
| 435 } | 446 } |
| 436 | 447 |
| 437 #ifdef VERIFY_HEAP | 448 #ifdef VERIFY_HEAP |
| 438 if (FLAG_verify_heap) { | 449 if (FLAG_verify_heap) { |
| 439 Verify(); | 450 Verify(); |
| 440 } | 451 } |
| 441 #endif | 452 #endif |
| 442 } | 453 } |
| 443 | 454 |
| 455 UpdateMaximumCommitted(); |
| 456 |
| 444 #ifdef DEBUG | 457 #ifdef DEBUG |
| 445 ASSERT(!AllowHeapAllocation::IsAllowed() && gc_state_ == NOT_IN_GC); | 458 ASSERT(!AllowHeapAllocation::IsAllowed() && gc_state_ == NOT_IN_GC); |
| 446 | 459 |
| 447 if (FLAG_gc_verbose) Print(); | 460 if (FLAG_gc_verbose) Print(); |
| 448 | 461 |
| 449 ReportStatisticsBeforeGC(); | 462 ReportStatisticsBeforeGC(); |
| 450 #endif // DEBUG | 463 #endif // DEBUG |
| 451 | 464 |
| 452 store_buffer()->GCPrologue(); | 465 store_buffer()->GCPrologue(); |
| 453 | 466 |
| 454 if (FLAG_concurrent_osr) { | 467 if (FLAG_concurrent_osr) { |
| 455 isolate()->optimizing_compiler_thread()->AgeBufferedOsrJobs(); | 468 isolate()->optimizing_compiler_thread()->AgeBufferedOsrJobs(); |
| 456 } | 469 } |
| 457 } | 470 } |
| 458 | 471 |
| 459 | 472 |
| 460 intptr_t Heap::SizeOfObjects() { | 473 intptr_t Heap::SizeOfObjects() { |
| 461 intptr_t total = 0; | 474 intptr_t total = 0; |
| 462 AllSpaces spaces(this); | 475 AllSpaces spaces(this); |
| 463 for (Space* space = spaces.next(); space != NULL; space = spaces.next()) { | 476 for (Space* space = spaces.next(); space != NULL; space = spaces.next()) { |
| 464 total += space->SizeOfObjects(); | 477 total += space->SizeOfObjects(); |
| 465 } | 478 } |
| 466 return total; | 479 return total; |
| 467 } | 480 } |
| 468 | 481 |
| 469 | 482 |
| 483 void Heap::ClearAllICsByKind(Code::Kind kind) { |
| 484 HeapObjectIterator it(code_space()); |
| 485 |
| 486 for (Object* object = it.Next(); object != NULL; object = it.Next()) { |
| 487 Code* code = Code::cast(object); |
| 488 Code::Kind current_kind = code->kind(); |
| 489 if (current_kind == Code::FUNCTION || |
| 490 current_kind == Code::OPTIMIZED_FUNCTION) { |
| 491 code->ClearInlineCaches(kind); |
| 492 } |
| 493 } |
| 494 } |
| 495 |
| 496 |
| 470 void Heap::RepairFreeListsAfterBoot() { | 497 void Heap::RepairFreeListsAfterBoot() { |
| 471 PagedSpaces spaces(this); | 498 PagedSpaces spaces(this); |
| 472 for (PagedSpace* space = spaces.next(); | 499 for (PagedSpace* space = spaces.next(); |
| 473 space != NULL; | 500 space != NULL; |
| 474 space = spaces.next()) { | 501 space = spaces.next()) { |
| 475 space->RepairFreeListsAfterBoot(); | 502 space->RepairFreeListsAfterBoot(); |
| 476 } | 503 } |
| 477 } | 504 } |
| 478 | 505 |
| 479 | 506 |
| (...skipping 19 matching lines...) Expand all Loading... |
| 499 if (FLAG_gc_verbose) Print(); | 526 if (FLAG_gc_verbose) Print(); |
| 500 if (FLAG_code_stats) ReportCodeStatistics("After GC"); | 527 if (FLAG_code_stats) ReportCodeStatistics("After GC"); |
| 501 #endif | 528 #endif |
| 502 if (FLAG_deopt_every_n_garbage_collections > 0) { | 529 if (FLAG_deopt_every_n_garbage_collections > 0) { |
| 503 if (++gcs_since_last_deopt_ == FLAG_deopt_every_n_garbage_collections) { | 530 if (++gcs_since_last_deopt_ == FLAG_deopt_every_n_garbage_collections) { |
| 504 Deoptimizer::DeoptimizeAll(isolate()); | 531 Deoptimizer::DeoptimizeAll(isolate()); |
| 505 gcs_since_last_deopt_ = 0; | 532 gcs_since_last_deopt_ = 0; |
| 506 } | 533 } |
| 507 } | 534 } |
| 508 | 535 |
| 536 UpdateMaximumCommitted(); |
| 537 |
| 509 isolate_->counters()->alive_after_last_gc()->Set( | 538 isolate_->counters()->alive_after_last_gc()->Set( |
| 510 static_cast<int>(SizeOfObjects())); | 539 static_cast<int>(SizeOfObjects())); |
| 511 | 540 |
| 512 isolate_->counters()->string_table_capacity()->Set( | 541 isolate_->counters()->string_table_capacity()->Set( |
| 513 string_table()->Capacity()); | 542 string_table()->Capacity()); |
| 514 isolate_->counters()->number_of_symbols()->Set( | 543 isolate_->counters()->number_of_symbols()->Set( |
| 515 string_table()->NumberOfElements()); | 544 string_table()->NumberOfElements()); |
| 516 | 545 |
| 517 if (full_codegen_bytes_generated_ + crankshaft_codegen_bytes_generated_ > 0) { | 546 if (full_codegen_bytes_generated_ + crankshaft_codegen_bytes_generated_ > 0) { |
| 518 isolate_->counters()->codegen_fraction_crankshaft()->AddSample( | 547 isolate_->counters()->codegen_fraction_crankshaft()->AddSample( |
| (...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 560 isolate_->counters()->heap_sample_map_space_committed()->AddSample( | 589 isolate_->counters()->heap_sample_map_space_committed()->AddSample( |
| 561 static_cast<int>(map_space()->CommittedMemory() / KB)); | 590 static_cast<int>(map_space()->CommittedMemory() / KB)); |
| 562 isolate_->counters()->heap_sample_cell_space_committed()->AddSample( | 591 isolate_->counters()->heap_sample_cell_space_committed()->AddSample( |
| 563 static_cast<int>(cell_space()->CommittedMemory() / KB)); | 592 static_cast<int>(cell_space()->CommittedMemory() / KB)); |
| 564 isolate_->counters()-> | 593 isolate_->counters()-> |
| 565 heap_sample_property_cell_space_committed()-> | 594 heap_sample_property_cell_space_committed()-> |
| 566 AddSample(static_cast<int>( | 595 AddSample(static_cast<int>( |
| 567 property_cell_space()->CommittedMemory() / KB)); | 596 property_cell_space()->CommittedMemory() / KB)); |
| 568 isolate_->counters()->heap_sample_code_space_committed()->AddSample( | 597 isolate_->counters()->heap_sample_code_space_committed()->AddSample( |
| 569 static_cast<int>(code_space()->CommittedMemory() / KB)); | 598 static_cast<int>(code_space()->CommittedMemory() / KB)); |
| 599 |
| 600 isolate_->counters()->heap_sample_maximum_committed()->AddSample( |
| 601 static_cast<int>(MaximumCommittedMemory() / KB)); |
| 570 } | 602 } |
| 571 | 603 |
| 572 #define UPDATE_COUNTERS_FOR_SPACE(space) \ | 604 #define UPDATE_COUNTERS_FOR_SPACE(space) \ |
| 573 isolate_->counters()->space##_bytes_available()->Set( \ | 605 isolate_->counters()->space##_bytes_available()->Set( \ |
| 574 static_cast<int>(space()->Available())); \ | 606 static_cast<int>(space()->Available())); \ |
| 575 isolate_->counters()->space##_bytes_committed()->Set( \ | 607 isolate_->counters()->space##_bytes_committed()->Set( \ |
| 576 static_cast<int>(space()->CommittedMemory())); \ | 608 static_cast<int>(space()->CommittedMemory())); \ |
| 577 isolate_->counters()->space##_bytes_used()->Set( \ | 609 isolate_->counters()->space##_bytes_used()->Set( \ |
| 578 static_cast<int>(space()->SizeOfObjects())); | 610 static_cast<int>(space()->SizeOfObjects())); |
| 579 #define UPDATE_FRAGMENTATION_FOR_SPACE(space) \ | 611 #define UPDATE_FRAGMENTATION_FOR_SPACE(space) \ |
| (...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 728 return next_gc_likely_to_collect_more; | 760 return next_gc_likely_to_collect_more; |
| 729 } | 761 } |
| 730 | 762 |
| 731 | 763 |
| 732 int Heap::NotifyContextDisposed() { | 764 int Heap::NotifyContextDisposed() { |
| 733 if (FLAG_concurrent_recompilation) { | 765 if (FLAG_concurrent_recompilation) { |
| 734 // Flush the queued recompilation tasks. | 766 // Flush the queued recompilation tasks. |
| 735 isolate()->optimizing_compiler_thread()->Flush(); | 767 isolate()->optimizing_compiler_thread()->Flush(); |
| 736 } | 768 } |
| 737 flush_monomorphic_ics_ = true; | 769 flush_monomorphic_ics_ = true; |
| 770 AgeInlineCaches(); |
| 738 return ++contexts_disposed_; | 771 return ++contexts_disposed_; |
| 739 } | 772 } |
| 740 | 773 |
| 741 | 774 |
| 742 void Heap::PerformScavenge() { | 775 void Heap::PerformScavenge() { |
| 743 GCTracer tracer(this, NULL, NULL); | 776 GCTracer tracer(this, NULL, NULL); |
| 744 if (incremental_marking()->IsStopped()) { | 777 if (incremental_marking()->IsStopped()) { |
| 745 PerformGarbageCollection(SCAVENGER, &tracer); | 778 PerformGarbageCollection(SCAVENGER, &tracer); |
| 746 } else { | 779 } else { |
| 747 PerformGarbageCollection(MARK_COMPACTOR, &tracer); | 780 PerformGarbageCollection(MARK_COMPACTOR, &tracer); |
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 800 Heap* heap, | 833 Heap* heap, |
| 801 AllocationSpace space, | 834 AllocationSpace space, |
| 802 const char* gc_reason = NULL) { | 835 const char* gc_reason = NULL) { |
| 803 heap->mark_compact_collector()->SetFlags(Heap::kAbortIncrementalMarkingMask); | 836 heap->mark_compact_collector()->SetFlags(Heap::kAbortIncrementalMarkingMask); |
| 804 bool result = heap->CollectGarbage(space, gc_reason); | 837 bool result = heap->CollectGarbage(space, gc_reason); |
| 805 heap->mark_compact_collector()->SetFlags(Heap::kNoGCFlags); | 838 heap->mark_compact_collector()->SetFlags(Heap::kNoGCFlags); |
| 806 return result; | 839 return result; |
| 807 } | 840 } |
| 808 | 841 |
| 809 | 842 |
| 810 void Heap::ReserveSpace( | 843 void Heap::ReserveSpace(int *sizes, Address *locations_out) { |
| 811 int *sizes, | |
| 812 Address *locations_out) { | |
| 813 bool gc_performed = true; | 844 bool gc_performed = true; |
| 814 int counter = 0; | 845 int counter = 0; |
| 815 static const int kThreshold = 20; | 846 static const int kThreshold = 20; |
| 816 while (gc_performed && counter++ < kThreshold) { | 847 while (gc_performed && counter++ < kThreshold) { |
| 817 gc_performed = false; | 848 gc_performed = false; |
| 818 ASSERT(NEW_SPACE == FIRST_PAGED_SPACE - 1); | 849 ASSERT(NEW_SPACE == FIRST_PAGED_SPACE - 1); |
| 819 for (int space = NEW_SPACE; space <= LAST_PAGED_SPACE; space++) { | 850 for (int space = NEW_SPACE; space <= LAST_PAGED_SPACE; space++) { |
| 820 if (sizes[space] != 0) { | 851 if (sizes[space] != 0) { |
| 821 MaybeObject* allocation; | 852 MaybeObject* allocation; |
| 822 if (space == NEW_SPACE) { | 853 if (space == NEW_SPACE) { |
| (...skipping 302 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1125 MarkCompactPrologue(); | 1156 MarkCompactPrologue(); |
| 1126 | 1157 |
| 1127 mark_compact_collector_.CollectGarbage(); | 1158 mark_compact_collector_.CollectGarbage(); |
| 1128 | 1159 |
| 1129 LOG(isolate_, ResourceEvent("markcompact", "end")); | 1160 LOG(isolate_, ResourceEvent("markcompact", "end")); |
| 1130 | 1161 |
| 1131 gc_state_ = NOT_IN_GC; | 1162 gc_state_ = NOT_IN_GC; |
| 1132 | 1163 |
| 1133 isolate_->counters()->objs_since_last_full()->Set(0); | 1164 isolate_->counters()->objs_since_last_full()->Set(0); |
| 1134 | 1165 |
| 1135 contexts_disposed_ = 0; | |
| 1136 | |
| 1137 flush_monomorphic_ics_ = false; | 1166 flush_monomorphic_ics_ = false; |
| 1138 } | 1167 } |
| 1139 | 1168 |
| 1140 | 1169 |
| 1141 void Heap::MarkCompactPrologue() { | 1170 void Heap::MarkCompactPrologue() { |
| 1142 // At any old GC clear the keyed lookup cache to enable collection of unused | 1171 // At any old GC clear the keyed lookup cache to enable collection of unused |
| 1143 // maps. | 1172 // maps. |
| 1144 isolate_->keyed_lookup_cache()->Clear(); | 1173 isolate_->keyed_lookup_cache()->Clear(); |
| 1145 isolate_->context_slot_cache()->Clear(); | 1174 isolate_->context_slot_cache()->Clear(); |
| 1146 isolate_->descriptor_lookup_cache()->Clear(); | 1175 isolate_->descriptor_lookup_cache()->Clear(); |
| (...skipping 1282 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2429 MapWord first_word = object->map_word(); | 2458 MapWord first_word = object->map_word(); |
| 2430 SLOW_ASSERT(!first_word.IsForwardingAddress()); | 2459 SLOW_ASSERT(!first_word.IsForwardingAddress()); |
| 2431 Map* map = first_word.ToMap(); | 2460 Map* map = first_word.ToMap(); |
| 2432 map->GetHeap()->DoScavengeObject(map, p, object); | 2461 map->GetHeap()->DoScavengeObject(map, p, object); |
| 2433 } | 2462 } |
| 2434 | 2463 |
| 2435 | 2464 |
| 2436 MaybeObject* Heap::AllocatePartialMap(InstanceType instance_type, | 2465 MaybeObject* Heap::AllocatePartialMap(InstanceType instance_type, |
| 2437 int instance_size) { | 2466 int instance_size) { |
| 2438 Object* result; | 2467 Object* result; |
| 2439 MaybeObject* maybe_result = AllocateRawMap(); | 2468 MaybeObject* maybe_result = AllocateRaw(Map::kSize, MAP_SPACE, MAP_SPACE); |
| 2440 if (!maybe_result->ToObject(&result)) return maybe_result; | 2469 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 2441 | 2470 |
| 2442 // Map::cast cannot be used due to uninitialized map field. | 2471 // Map::cast cannot be used due to uninitialized map field. |
| 2443 reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map()); | 2472 reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map()); |
| 2444 reinterpret_cast<Map*>(result)->set_instance_type(instance_type); | 2473 reinterpret_cast<Map*>(result)->set_instance_type(instance_type); |
| 2445 reinterpret_cast<Map*>(result)->set_instance_size(instance_size); | 2474 reinterpret_cast<Map*>(result)->set_instance_size(instance_size); |
| 2446 reinterpret_cast<Map*>(result)->set_visitor_id( | 2475 reinterpret_cast<Map*>(result)->set_visitor_id( |
| 2447 StaticVisitorBase::GetVisitorId(instance_type, instance_size)); | 2476 StaticVisitorBase::GetVisitorId(instance_type, instance_size)); |
| 2448 reinterpret_cast<Map*>(result)->set_inobject_properties(0); | 2477 reinterpret_cast<Map*>(result)->set_inobject_properties(0); |
| 2449 reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0); | 2478 reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0); |
| 2450 reinterpret_cast<Map*>(result)->set_unused_property_fields(0); | 2479 reinterpret_cast<Map*>(result)->set_unused_property_fields(0); |
| 2451 reinterpret_cast<Map*>(result)->set_bit_field(0); | 2480 reinterpret_cast<Map*>(result)->set_bit_field(0); |
| 2452 reinterpret_cast<Map*>(result)->set_bit_field2(0); | 2481 reinterpret_cast<Map*>(result)->set_bit_field2(0); |
| 2453 int bit_field3 = Map::EnumLengthBits::encode(Map::kInvalidEnumCache) | | 2482 int bit_field3 = Map::EnumLengthBits::encode(Map::kInvalidEnumCache) | |
| 2454 Map::OwnsDescriptors::encode(true); | 2483 Map::OwnsDescriptors::encode(true); |
| 2455 reinterpret_cast<Map*>(result)->set_bit_field3(bit_field3); | 2484 reinterpret_cast<Map*>(result)->set_bit_field3(bit_field3); |
| 2456 return result; | 2485 return result; |
| 2457 } | 2486 } |
| 2458 | 2487 |
| 2459 | 2488 |
| 2460 MaybeObject* Heap::AllocateMap(InstanceType instance_type, | 2489 MaybeObject* Heap::AllocateMap(InstanceType instance_type, |
| 2461 int instance_size, | 2490 int instance_size, |
| 2462 ElementsKind elements_kind) { | 2491 ElementsKind elements_kind) { |
| 2463 Object* result; | 2492 Object* result; |
| 2464 MaybeObject* maybe_result = AllocateRawMap(); | 2493 MaybeObject* maybe_result = AllocateRaw(Map::kSize, MAP_SPACE, MAP_SPACE); |
| 2465 if (!maybe_result->To(&result)) return maybe_result; | 2494 if (!maybe_result->To(&result)) return maybe_result; |
| 2466 | 2495 |
| 2467 Map* map = reinterpret_cast<Map*>(result); | 2496 Map* map = reinterpret_cast<Map*>(result); |
| 2468 map->set_map_no_write_barrier(meta_map()); | 2497 map->set_map_no_write_barrier(meta_map()); |
| 2469 map->set_instance_type(instance_type); | 2498 map->set_instance_type(instance_type); |
| 2470 map->set_visitor_id( | 2499 map->set_visitor_id( |
| 2471 StaticVisitorBase::GetVisitorId(instance_type, instance_size)); | 2500 StaticVisitorBase::GetVisitorId(instance_type, instance_size)); |
| 2472 map->set_prototype(null_value(), SKIP_WRITE_BARRIER); | 2501 map->set_prototype(null_value(), SKIP_WRITE_BARRIER); |
| 2473 map->set_constructor(null_value(), SKIP_WRITE_BARRIER); | 2502 map->set_constructor(null_value(), SKIP_WRITE_BARRIER); |
| 2474 map->set_instance_size(instance_size); | 2503 map->set_instance_size(instance_size); |
| (...skipping 471 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2946 if (!maybe_result->ToObject(&result)) return maybe_result; | 2975 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 2947 } | 2976 } |
| 2948 | 2977 |
| 2949 HeapObject::cast(result)->set_map_no_write_barrier(heap_number_map()); | 2978 HeapObject::cast(result)->set_map_no_write_barrier(heap_number_map()); |
| 2950 HeapNumber::cast(result)->set_value(value); | 2979 HeapNumber::cast(result)->set_value(value); |
| 2951 return result; | 2980 return result; |
| 2952 } | 2981 } |
| 2953 | 2982 |
| 2954 | 2983 |
| 2955 MaybeObject* Heap::AllocateCell(Object* value) { | 2984 MaybeObject* Heap::AllocateCell(Object* value) { |
| 2985 int size = Cell::kSize; |
| 2986 STATIC_ASSERT(Cell::kSize <= Page::kNonCodeObjectAreaSize); |
| 2987 |
| 2956 Object* result; | 2988 Object* result; |
| 2957 { MaybeObject* maybe_result = AllocateRawCell(); | 2989 { MaybeObject* maybe_result = AllocateRaw(size, CELL_SPACE, CELL_SPACE); |
| 2958 if (!maybe_result->ToObject(&result)) return maybe_result; | 2990 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 2959 } | 2991 } |
| 2960 HeapObject::cast(result)->set_map_no_write_barrier(cell_map()); | 2992 HeapObject::cast(result)->set_map_no_write_barrier(cell_map()); |
| 2961 Cell::cast(result)->set_value(value); | 2993 Cell::cast(result)->set_value(value); |
| 2962 return result; | 2994 return result; |
| 2963 } | 2995 } |
| 2964 | 2996 |
| 2965 | 2997 |
| 2966 MaybeObject* Heap::AllocatePropertyCell() { | 2998 MaybeObject* Heap::AllocatePropertyCell() { |
| 2999 int size = PropertyCell::kSize; |
| 3000 STATIC_ASSERT(PropertyCell::kSize <= Page::kNonCodeObjectAreaSize); |
| 3001 |
| 2967 Object* result; | 3002 Object* result; |
| 2968 MaybeObject* maybe_result = AllocateRawPropertyCell(); | 3003 MaybeObject* maybe_result = |
| 3004 AllocateRaw(size, PROPERTY_CELL_SPACE, PROPERTY_CELL_SPACE); |
| 2969 if (!maybe_result->ToObject(&result)) return maybe_result; | 3005 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 2970 | 3006 |
| 2971 HeapObject::cast(result)->set_map_no_write_barrier( | 3007 HeapObject::cast(result)->set_map_no_write_barrier( |
| 2972 global_property_cell_map()); | 3008 global_property_cell_map()); |
| 2973 PropertyCell* cell = PropertyCell::cast(result); | 3009 PropertyCell* cell = PropertyCell::cast(result); |
| 2974 cell->set_dependent_code(DependentCode::cast(empty_fixed_array()), | 3010 cell->set_dependent_code(DependentCode::cast(empty_fixed_array()), |
| 2975 SKIP_WRITE_BARRIER); | 3011 SKIP_WRITE_BARRIER); |
| 2976 cell->set_value(the_hole_value()); | 3012 cell->set_value(the_hole_value()); |
| 2977 cell->set_type(Type::None()); | 3013 cell->set_type(Type::None()); |
| 2978 return result; | 3014 return result; |
| (...skipping 289 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3268 if (!maybe_obj->ToObject(&obj)) return false; | 3304 if (!maybe_obj->ToObject(&obj)) return false; |
| 3269 } | 3305 } |
| 3270 { MaybeObject* maybe_obj = AllocateJSObjectFromMap(Map::cast(obj)); | 3306 { MaybeObject* maybe_obj = AllocateJSObjectFromMap(Map::cast(obj)); |
| 3271 if (!maybe_obj->ToObject(&obj)) return false; | 3307 if (!maybe_obj->ToObject(&obj)) return false; |
| 3272 } | 3308 } |
| 3273 set_observation_state(JSObject::cast(obj)); | 3309 set_observation_state(JSObject::cast(obj)); |
| 3274 | 3310 |
| 3275 { MaybeObject* maybe_obj = AllocateSymbol(); | 3311 { MaybeObject* maybe_obj = AllocateSymbol(); |
| 3276 if (!maybe_obj->ToObject(&obj)) return false; | 3312 if (!maybe_obj->ToObject(&obj)) return false; |
| 3277 } | 3313 } |
| 3314 Symbol::cast(obj)->set_is_private(true); |
| 3278 set_frozen_symbol(Symbol::cast(obj)); | 3315 set_frozen_symbol(Symbol::cast(obj)); |
| 3279 | 3316 |
| 3280 { MaybeObject* maybe_obj = AllocateSymbol(); | 3317 { MaybeObject* maybe_obj = AllocateSymbol(); |
| 3281 if (!maybe_obj->ToObject(&obj)) return false; | 3318 if (!maybe_obj->ToObject(&obj)) return false; |
| 3282 } | 3319 } |
| 3320 Symbol::cast(obj)->set_is_private(true); |
| 3283 set_elements_transition_symbol(Symbol::cast(obj)); | 3321 set_elements_transition_symbol(Symbol::cast(obj)); |
| 3284 | 3322 |
| 3285 { MaybeObject* maybe_obj = SeededNumberDictionary::Allocate(this, 0, TENURED); | 3323 { MaybeObject* maybe_obj = SeededNumberDictionary::Allocate(this, 0, TENURED); |
| 3286 if (!maybe_obj->ToObject(&obj)) return false; | 3324 if (!maybe_obj->ToObject(&obj)) return false; |
| 3287 } | 3325 } |
| 3288 SeededNumberDictionary::cast(obj)->set_requires_slow_elements(); | 3326 SeededNumberDictionary::cast(obj)->set_requires_slow_elements(); |
| 3289 set_empty_slow_element_dictionary(SeededNumberDictionary::cast(obj)); | 3327 set_empty_slow_element_dictionary(SeededNumberDictionary::cast(obj)); |
| 3290 | 3328 |
| 3291 { MaybeObject* maybe_obj = AllocateSymbol(); | 3329 { MaybeObject* maybe_obj = AllocateSymbol(); |
| 3292 if (!maybe_obj->ToObject(&obj)) return false; | 3330 if (!maybe_obj->ToObject(&obj)) return false; |
| 3293 } | 3331 } |
| 3332 Symbol::cast(obj)->set_is_private(true); |
| 3294 set_observed_symbol(Symbol::cast(obj)); | 3333 set_observed_symbol(Symbol::cast(obj)); |
| 3295 | 3334 |
| 3296 // Handling of script id generation is in Factory::NewScript. | 3335 // Handling of script id generation is in Factory::NewScript. |
| 3297 set_last_script_id(Smi::FromInt(v8::Script::kNoScriptId)); | 3336 set_last_script_id(Smi::FromInt(v8::Script::kNoScriptId)); |
| 3298 | 3337 |
| 3299 // Initialize keyed lookup cache. | 3338 // Initialize keyed lookup cache. |
| 3300 isolate_->keyed_lookup_cache()->Clear(); | 3339 isolate_->keyed_lookup_cache()->Clear(); |
| 3301 | 3340 |
| 3302 // Initialize context slot cache. | 3341 // Initialize context slot cache. |
| 3303 isolate_->context_slot_cache()->Clear(); | 3342 isolate_->context_slot_cache()->Clear(); |
| (...skipping 762 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4066 buffer[0] = static_cast<uint8_t>(code); | 4105 buffer[0] = static_cast<uint8_t>(code); |
| 4067 Object* result; | 4106 Object* result; |
| 4068 MaybeObject* maybe_result = | 4107 MaybeObject* maybe_result = |
| 4069 InternalizeOneByteString(Vector<const uint8_t>(buffer, 1)); | 4108 InternalizeOneByteString(Vector<const uint8_t>(buffer, 1)); |
| 4070 | 4109 |
| 4071 if (!maybe_result->ToObject(&result)) return maybe_result; | 4110 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 4072 single_character_string_cache()->set(code, result); | 4111 single_character_string_cache()->set(code, result); |
| 4073 return result; | 4112 return result; |
| 4074 } | 4113 } |
| 4075 | 4114 |
| 4076 Object* result; | 4115 SeqTwoByteString* result; |
| 4077 { MaybeObject* maybe_result = AllocateRawTwoByteString(1); | 4116 { MaybeObject* maybe_result = AllocateRawTwoByteString(1); |
| 4078 if (!maybe_result->ToObject(&result)) return maybe_result; | 4117 if (!maybe_result->To<SeqTwoByteString>(&result)) return maybe_result; |
| 4079 } | 4118 } |
| 4080 String* answer = String::cast(result); | 4119 result->SeqTwoByteStringSet(0, code); |
| 4081 answer->Set(0, code); | 4120 return result; |
| 4082 return answer; | |
| 4083 } | 4121 } |
| 4084 | 4122 |
| 4085 | 4123 |
| 4086 MaybeObject* Heap::AllocateByteArray(int length, PretenureFlag pretenure) { | 4124 MaybeObject* Heap::AllocateByteArray(int length, PretenureFlag pretenure) { |
| 4087 if (length < 0 || length > ByteArray::kMaxLength) { | 4125 if (length < 0 || length > ByteArray::kMaxLength) { |
| 4088 return Failure::OutOfMemoryException(0x7); | 4126 return Failure::OutOfMemoryException(0x7); |
| 4089 } | 4127 } |
| 4090 int size = ByteArray::SizeFor(length); | 4128 int size = ByteArray::SizeFor(length); |
| 4091 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); | 4129 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); |
| 4092 Object* result; | 4130 Object* result; |
| (...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4153 int obj_size = Code::SizeFor(body_size); | 4191 int obj_size = Code::SizeFor(body_size); |
| 4154 ASSERT(IsAligned(static_cast<intptr_t>(obj_size), kCodeAlignment)); | 4192 ASSERT(IsAligned(static_cast<intptr_t>(obj_size), kCodeAlignment)); |
| 4155 MaybeObject* maybe_result; | 4193 MaybeObject* maybe_result; |
| 4156 // Large code objects and code objects which should stay at a fixed address | 4194 // Large code objects and code objects which should stay at a fixed address |
| 4157 // are allocated in large object space. | 4195 // are allocated in large object space. |
| 4158 HeapObject* result; | 4196 HeapObject* result; |
| 4159 bool force_lo_space = obj_size > code_space()->AreaSize(); | 4197 bool force_lo_space = obj_size > code_space()->AreaSize(); |
| 4160 if (force_lo_space) { | 4198 if (force_lo_space) { |
| 4161 maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE); | 4199 maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE); |
| 4162 } else { | 4200 } else { |
| 4163 maybe_result = code_space_->AllocateRaw(obj_size); | 4201 maybe_result = AllocateRaw(obj_size, CODE_SPACE, CODE_SPACE); |
| 4164 } | 4202 } |
| 4165 if (!maybe_result->To<HeapObject>(&result)) return maybe_result; | 4203 if (!maybe_result->To<HeapObject>(&result)) return maybe_result; |
| 4166 | 4204 |
| 4167 if (immovable && !force_lo_space && | 4205 if (immovable && !force_lo_space && |
| 4168 // Objects on the first page of each space are never moved. | 4206 // Objects on the first page of each space are never moved. |
| 4169 !code_space_->FirstPage()->Contains(result->address())) { | 4207 !code_space_->FirstPage()->Contains(result->address())) { |
| 4170 // Discard the first code allocation, which was on a page where it could be | 4208 // Discard the first code allocation, which was on a page where it could be |
| 4171 // moved. | 4209 // moved. |
| 4172 CreateFillerObjectAt(result->address(), obj_size); | 4210 CreateFillerObjectAt(result->address(), obj_size); |
| 4173 maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE); | 4211 maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE); |
| 4174 if (!maybe_result->To<HeapObject>(&result)) return maybe_result; | 4212 if (!maybe_result->To<HeapObject>(&result)) return maybe_result; |
| 4175 } | 4213 } |
| 4176 | 4214 |
| 4177 // Initialize the object | 4215 // Initialize the object |
| 4178 result->set_map_no_write_barrier(code_map()); | 4216 result->set_map_no_write_barrier(code_map()); |
| 4179 Code* code = Code::cast(result); | 4217 Code* code = Code::cast(result); |
| 4180 ASSERT(!isolate_->code_range()->exists() || | 4218 ASSERT(!isolate_->code_range()->exists() || |
| 4181 isolate_->code_range()->contains(code->address())); | 4219 isolate_->code_range()->contains(code->address())); |
| 4182 code->set_instruction_size(desc.instr_size); | 4220 code->set_instruction_size(desc.instr_size); |
| 4183 code->set_relocation_info(reloc_info); | 4221 code->set_relocation_info(reloc_info); |
| 4184 code->set_flags(flags); | 4222 code->set_flags(flags); |
| 4185 if (code->is_call_stub() || code->is_keyed_call_stub()) { | 4223 if (code->is_call_stub() || code->is_keyed_call_stub()) { |
| 4186 code->set_check_type(RECEIVER_MAP_CHECK); | 4224 code->set_check_type(RECEIVER_MAP_CHECK); |
| 4187 } | 4225 } |
| 4188 code->set_is_crankshafted(crankshafted); | 4226 code->set_is_crankshafted(crankshafted); |
| 4189 code->set_deoptimization_data(empty_fixed_array(), SKIP_WRITE_BARRIER); | 4227 code->set_deoptimization_data(empty_fixed_array(), SKIP_WRITE_BARRIER); |
| 4190 code->InitializeTypeFeedbackInfoNoWriteBarrier(undefined_value()); | 4228 code->set_raw_type_feedback_info(undefined_value()); |
| 4191 code->set_handler_table(empty_fixed_array(), SKIP_WRITE_BARRIER); | 4229 code->set_handler_table(empty_fixed_array(), SKIP_WRITE_BARRIER); |
| 4192 code->set_gc_metadata(Smi::FromInt(0)); | 4230 code->set_gc_metadata(Smi::FromInt(0)); |
| 4193 code->set_ic_age(global_ic_age_); | 4231 code->set_ic_age(global_ic_age_); |
| 4194 code->set_prologue_offset(prologue_offset); | 4232 code->set_prologue_offset(prologue_offset); |
| 4195 if (code->kind() == Code::OPTIMIZED_FUNCTION) { | 4233 if (code->kind() == Code::OPTIMIZED_FUNCTION) { |
| 4196 code->set_marked_for_deoptimization(false); | 4234 code->set_marked_for_deoptimization(false); |
| 4197 } | 4235 } |
| 4198 | 4236 |
| 4199 #ifdef ENABLE_DEBUGGER_SUPPORT | 4237 #ifdef ENABLE_DEBUGGER_SUPPORT |
| 4200 if (code->kind() == Code::FUNCTION) { | 4238 if (code->kind() == Code::FUNCTION) { |
| (...skipping 23 matching lines...) Expand all Loading... |
| 4224 } | 4262 } |
| 4225 | 4263 |
| 4226 | 4264 |
| 4227 MaybeObject* Heap::CopyCode(Code* code) { | 4265 MaybeObject* Heap::CopyCode(Code* code) { |
| 4228 // Allocate an object the same size as the code object. | 4266 // Allocate an object the same size as the code object. |
| 4229 int obj_size = code->Size(); | 4267 int obj_size = code->Size(); |
| 4230 MaybeObject* maybe_result; | 4268 MaybeObject* maybe_result; |
| 4231 if (obj_size > code_space()->AreaSize()) { | 4269 if (obj_size > code_space()->AreaSize()) { |
| 4232 maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE); | 4270 maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE); |
| 4233 } else { | 4271 } else { |
| 4234 maybe_result = code_space_->AllocateRaw(obj_size); | 4272 maybe_result = AllocateRaw(obj_size, CODE_SPACE, CODE_SPACE); |
| 4235 } | 4273 } |
| 4236 | 4274 |
| 4237 Object* result; | 4275 Object* result; |
| 4238 if (!maybe_result->ToObject(&result)) return maybe_result; | 4276 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 4239 | 4277 |
| 4240 // Copy code object. | 4278 // Copy code object. |
| 4241 Address old_addr = code->address(); | 4279 Address old_addr = code->address(); |
| 4242 Address new_addr = reinterpret_cast<HeapObject*>(result)->address(); | 4280 Address new_addr = reinterpret_cast<HeapObject*>(result)->address(); |
| 4243 CopyBlock(new_addr, old_addr, obj_size); | 4281 CopyBlock(new_addr, old_addr, obj_size); |
| 4244 // Relocate the copy. | 4282 // Relocate the copy. |
| (...skipping 22 matching lines...) Expand all Loading... |
| 4267 | 4305 |
| 4268 Address old_addr = code->address(); | 4306 Address old_addr = code->address(); |
| 4269 | 4307 |
| 4270 size_t relocation_offset = | 4308 size_t relocation_offset = |
| 4271 static_cast<size_t>(code->instruction_end() - old_addr); | 4309 static_cast<size_t>(code->instruction_end() - old_addr); |
| 4272 | 4310 |
| 4273 MaybeObject* maybe_result; | 4311 MaybeObject* maybe_result; |
| 4274 if (new_obj_size > code_space()->AreaSize()) { | 4312 if (new_obj_size > code_space()->AreaSize()) { |
| 4275 maybe_result = lo_space_->AllocateRaw(new_obj_size, EXECUTABLE); | 4313 maybe_result = lo_space_->AllocateRaw(new_obj_size, EXECUTABLE); |
| 4276 } else { | 4314 } else { |
| 4277 maybe_result = code_space_->AllocateRaw(new_obj_size); | 4315 maybe_result = AllocateRaw(new_obj_size, CODE_SPACE, CODE_SPACE); |
| 4278 } | 4316 } |
| 4279 | 4317 |
| 4280 Object* result; | 4318 Object* result; |
| 4281 if (!maybe_result->ToObject(&result)) return maybe_result; | 4319 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 4282 | 4320 |
| 4283 // Copy code object. | 4321 // Copy code object. |
| 4284 Address new_addr = reinterpret_cast<HeapObject*>(result)->address(); | 4322 Address new_addr = reinterpret_cast<HeapObject*>(result)->address(); |
| 4285 | 4323 |
| 4286 // Copy header and instructions. | 4324 // Copy header and instructions. |
| 4287 CopyBytes(new_addr, old_addr, relocation_offset); | 4325 CopyBytes(new_addr, old_addr, relocation_offset); |
| (...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4356 function->initialize_elements(); | 4394 function->initialize_elements(); |
| 4357 function->set_shared(shared); | 4395 function->set_shared(shared); |
| 4358 function->set_code(shared->code()); | 4396 function->set_code(shared->code()); |
| 4359 function->set_prototype_or_initial_map(prototype); | 4397 function->set_prototype_or_initial_map(prototype); |
| 4360 function->set_context(undefined_value()); | 4398 function->set_context(undefined_value()); |
| 4361 function->set_literals_or_bindings(empty_fixed_array()); | 4399 function->set_literals_or_bindings(empty_fixed_array()); |
| 4362 function->set_next_function_link(undefined_value()); | 4400 function->set_next_function_link(undefined_value()); |
| 4363 } | 4401 } |
| 4364 | 4402 |
| 4365 | 4403 |
| 4366 MaybeObject* Heap::AllocateFunctionPrototype(JSFunction* function) { | |
| 4367 // Make sure to use globals from the function's context, since the function | |
| 4368 // can be from a different context. | |
| 4369 Context* native_context = function->context()->native_context(); | |
| 4370 Map* new_map; | |
| 4371 if (function->shared()->is_generator()) { | |
| 4372 // Generator prototypes can share maps since they don't have "constructor" | |
| 4373 // properties. | |
| 4374 new_map = native_context->generator_object_prototype_map(); | |
| 4375 } else { | |
| 4376 // Each function prototype gets a fresh map to avoid unwanted sharing of | |
| 4377 // maps between prototypes of different constructors. | |
| 4378 JSFunction* object_function = native_context->object_function(); | |
| 4379 ASSERT(object_function->has_initial_map()); | |
| 4380 MaybeObject* maybe_map = object_function->initial_map()->Copy(); | |
| 4381 if (!maybe_map->To(&new_map)) return maybe_map; | |
| 4382 } | |
| 4383 | |
| 4384 Object* prototype; | |
| 4385 MaybeObject* maybe_prototype = AllocateJSObjectFromMap(new_map); | |
| 4386 if (!maybe_prototype->ToObject(&prototype)) return maybe_prototype; | |
| 4387 | |
| 4388 if (!function->shared()->is_generator()) { | |
| 4389 MaybeObject* maybe_failure = | |
| 4390 JSObject::cast(prototype)->SetLocalPropertyIgnoreAttributesTrampoline( | |
| 4391 constructor_string(), function, DONT_ENUM); | |
| 4392 if (maybe_failure->IsFailure()) return maybe_failure; | |
| 4393 } | |
| 4394 | |
| 4395 return prototype; | |
| 4396 } | |
| 4397 | |
| 4398 | |
| 4399 MaybeObject* Heap::AllocateFunction(Map* function_map, | 4404 MaybeObject* Heap::AllocateFunction(Map* function_map, |
| 4400 SharedFunctionInfo* shared, | 4405 SharedFunctionInfo* shared, |
| 4401 Object* prototype, | 4406 Object* prototype, |
| 4402 PretenureFlag pretenure) { | 4407 PretenureFlag pretenure) { |
| 4403 AllocationSpace space = | 4408 AllocationSpace space = |
| 4404 (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE; | 4409 (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE; |
| 4405 Object* result; | 4410 Object* result; |
| 4406 { MaybeObject* maybe_result = Allocate(function_map, space); | 4411 { MaybeObject* maybe_result = Allocate(function_map, space); |
| 4407 if (!maybe_result->ToObject(&result)) return maybe_result; | 4412 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 4408 } | 4413 } |
| (...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4460 } | 4465 } |
| 4461 | 4466 |
| 4462 // Check the state of the object | 4467 // Check the state of the object |
| 4463 ASSERT(JSObject::cast(result)->HasFastProperties()); | 4468 ASSERT(JSObject::cast(result)->HasFastProperties()); |
| 4464 ASSERT(JSObject::cast(result)->HasFastObjectElements()); | 4469 ASSERT(JSObject::cast(result)->HasFastObjectElements()); |
| 4465 | 4470 |
| 4466 return result; | 4471 return result; |
| 4467 } | 4472 } |
| 4468 | 4473 |
| 4469 | 4474 |
| 4470 MaybeObject* Heap::AllocateInitialMap(JSFunction* fun) { | |
| 4471 ASSERT(!fun->has_initial_map()); | |
| 4472 | |
| 4473 // First create a new map with the size and number of in-object properties | |
| 4474 // suggested by the function. | |
| 4475 InstanceType instance_type; | |
| 4476 int instance_size; | |
| 4477 int in_object_properties; | |
| 4478 if (fun->shared()->is_generator()) { | |
| 4479 instance_type = JS_GENERATOR_OBJECT_TYPE; | |
| 4480 instance_size = JSGeneratorObject::kSize; | |
| 4481 in_object_properties = 0; | |
| 4482 } else { | |
| 4483 instance_type = JS_OBJECT_TYPE; | |
| 4484 instance_size = fun->shared()->CalculateInstanceSize(); | |
| 4485 in_object_properties = fun->shared()->CalculateInObjectProperties(); | |
| 4486 } | |
| 4487 Map* map; | |
| 4488 MaybeObject* maybe_map = AllocateMap(instance_type, instance_size); | |
| 4489 if (!maybe_map->To(&map)) return maybe_map; | |
| 4490 | |
| 4491 // Fetch or allocate prototype. | |
| 4492 Object* prototype; | |
| 4493 if (fun->has_instance_prototype()) { | |
| 4494 prototype = fun->instance_prototype(); | |
| 4495 } else { | |
| 4496 MaybeObject* maybe_prototype = AllocateFunctionPrototype(fun); | |
| 4497 if (!maybe_prototype->To(&prototype)) return maybe_prototype; | |
| 4498 } | |
| 4499 map->set_inobject_properties(in_object_properties); | |
| 4500 map->set_unused_property_fields(in_object_properties); | |
| 4501 map->set_prototype(prototype); | |
| 4502 ASSERT(map->has_fast_object_elements()); | |
| 4503 | |
| 4504 if (!fun->shared()->is_generator()) { | |
| 4505 fun->shared()->StartInobjectSlackTracking(map); | |
| 4506 } | |
| 4507 | |
| 4508 return map; | |
| 4509 } | |
| 4510 | |
| 4511 | |
| 4512 void Heap::InitializeJSObjectFromMap(JSObject* obj, | 4475 void Heap::InitializeJSObjectFromMap(JSObject* obj, |
| 4513 FixedArray* properties, | 4476 FixedArray* properties, |
| 4514 Map* map) { | 4477 Map* map) { |
| 4515 obj->set_properties(properties); | 4478 obj->set_properties(properties); |
| 4516 obj->initialize_elements(); | 4479 obj->initialize_elements(); |
| 4517 // TODO(1240798): Initialize the object's body using valid initial values | 4480 // TODO(1240798): Initialize the object's body using valid initial values |
| 4518 // according to the object's initial map. For example, if the map's | 4481 // according to the object's initial map. For example, if the map's |
| 4519 // instance type is JS_ARRAY_TYPE, the length field should be initialized | 4482 // instance type is JS_ARRAY_TYPE, the length field should be initialized |
| 4520 // to a number (e.g. Smi::FromInt(0)) and the elements initialized to a | 4483 // to a number (e.g. Smi::FromInt(0)) and the elements initialized to a |
| 4521 // fixed array (e.g. Heap::empty_fixed_array()). Currently, the object | 4484 // fixed array (e.g. Heap::empty_fixed_array()). Currently, the object |
| (...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4608 | 4571 |
| 4609 // Initialize the JSObject. | 4572 // Initialize the JSObject. |
| 4610 InitializeJSObjectFromMap(JSObject::cast(obj), properties, map); | 4573 InitializeJSObjectFromMap(JSObject::cast(obj), properties, map); |
| 4611 ASSERT(JSObject::cast(obj)->HasFastElements()); | 4574 ASSERT(JSObject::cast(obj)->HasFastElements()); |
| 4612 return obj; | 4575 return obj; |
| 4613 } | 4576 } |
| 4614 | 4577 |
| 4615 | 4578 |
| 4616 MaybeObject* Heap::AllocateJSObject(JSFunction* constructor, | 4579 MaybeObject* Heap::AllocateJSObject(JSFunction* constructor, |
| 4617 PretenureFlag pretenure) { | 4580 PretenureFlag pretenure) { |
| 4618 // Allocate the initial map if absent. | 4581 ASSERT(constructor->has_initial_map()); |
| 4619 if (!constructor->has_initial_map()) { | |
| 4620 Object* initial_map; | |
| 4621 { MaybeObject* maybe_initial_map = AllocateInitialMap(constructor); | |
| 4622 if (!maybe_initial_map->ToObject(&initial_map)) return maybe_initial_map; | |
| 4623 } | |
| 4624 constructor->set_initial_map(Map::cast(initial_map)); | |
| 4625 Map::cast(initial_map)->set_constructor(constructor); | |
| 4626 } | |
| 4627 // Allocate the object based on the constructors initial map. | 4582 // Allocate the object based on the constructors initial map. |
| 4628 MaybeObject* result = AllocateJSObjectFromMap( | 4583 MaybeObject* result = AllocateJSObjectFromMap( |
| 4629 constructor->initial_map(), pretenure); | 4584 constructor->initial_map(), pretenure); |
| 4630 #ifdef DEBUG | 4585 #ifdef DEBUG |
| 4631 // Make sure result is NOT a global object if valid. | 4586 // Make sure result is NOT a global object if valid. |
| 4632 Object* non_failure; | 4587 Object* non_failure; |
| 4633 ASSERT(!result->ToObject(&non_failure) || !non_failure->IsGlobalObject()); | 4588 ASSERT(!result->ToObject(&non_failure) || !non_failure->IsGlobalObject()); |
| 4634 #endif | 4589 #endif |
| 4635 return result; | 4590 return result; |
| 4636 } | 4591 } |
| 4637 | 4592 |
| 4638 | 4593 |
| 4639 MaybeObject* Heap::AllocateJSObjectWithAllocationSite(JSFunction* constructor, | 4594 MaybeObject* Heap::AllocateJSObjectWithAllocationSite(JSFunction* constructor, |
| 4640 Handle<AllocationSite> allocation_site) { | 4595 Handle<AllocationSite> allocation_site) { |
| 4641 // Allocate the initial map if absent. | 4596 ASSERT(constructor->has_initial_map()); |
| 4642 if (!constructor->has_initial_map()) { | |
| 4643 Object* initial_map; | |
| 4644 { MaybeObject* maybe_initial_map = AllocateInitialMap(constructor); | |
| 4645 if (!maybe_initial_map->ToObject(&initial_map)) return maybe_initial_map; | |
| 4646 } | |
| 4647 constructor->set_initial_map(Map::cast(initial_map)); | |
| 4648 Map::cast(initial_map)->set_constructor(constructor); | |
| 4649 } | |
| 4650 // Allocate the object based on the constructors initial map, or the payload | 4597 // Allocate the object based on the constructors initial map, or the payload |
| 4651 // advice | 4598 // advice |
| 4652 Map* initial_map = constructor->initial_map(); | 4599 Map* initial_map = constructor->initial_map(); |
| 4653 | 4600 |
| 4654 Smi* smi = Smi::cast(allocation_site->transition_info()); | 4601 Smi* smi = Smi::cast(allocation_site->transition_info()); |
| 4655 ElementsKind to_kind = static_cast<ElementsKind>(smi->value()); | 4602 ElementsKind to_kind = static_cast<ElementsKind>(smi->value()); |
| 4656 AllocationSiteMode mode = TRACK_ALLOCATION_SITE; | 4603 AllocationSiteMode mode = TRACK_ALLOCATION_SITE; |
| 4657 if (to_kind != initial_map->elements_kind()) { | 4604 if (to_kind != initial_map->elements_kind()) { |
| 4658 MaybeObject* maybe_new_map = initial_map->AsElementsKind(to_kind); | 4605 MaybeObject* maybe_new_map = initial_map->AsElementsKind(to_kind); |
| 4659 if (!maybe_new_map->To(&initial_map)) return maybe_new_map; | 4606 if (!maybe_new_map->To(&initial_map)) return maybe_new_map; |
| (...skipping 11 matching lines...) Expand all Loading... |
| 4671 } | 4618 } |
| 4672 #ifdef DEBUG | 4619 #ifdef DEBUG |
| 4673 // Make sure result is NOT a global object if valid. | 4620 // Make sure result is NOT a global object if valid. |
| 4674 Object* non_failure; | 4621 Object* non_failure; |
| 4675 ASSERT(!result->ToObject(&non_failure) || !non_failure->IsGlobalObject()); | 4622 ASSERT(!result->ToObject(&non_failure) || !non_failure->IsGlobalObject()); |
| 4676 #endif | 4623 #endif |
| 4677 return result; | 4624 return result; |
| 4678 } | 4625 } |
| 4679 | 4626 |
| 4680 | 4627 |
| 4681 MaybeObject* Heap::AllocateJSGeneratorObject(JSFunction *function) { | |
| 4682 ASSERT(function->shared()->is_generator()); | |
| 4683 Map *map; | |
| 4684 if (function->has_initial_map()) { | |
| 4685 map = function->initial_map(); | |
| 4686 } else { | |
| 4687 // Allocate the initial map if absent. | |
| 4688 MaybeObject* maybe_map = AllocateInitialMap(function); | |
| 4689 if (!maybe_map->To(&map)) return maybe_map; | |
| 4690 function->set_initial_map(map); | |
| 4691 map->set_constructor(function); | |
| 4692 } | |
| 4693 ASSERT(map->instance_type() == JS_GENERATOR_OBJECT_TYPE); | |
| 4694 return AllocateJSObjectFromMap(map); | |
| 4695 } | |
| 4696 | |
| 4697 | |
| 4698 MaybeObject* Heap::AllocateJSModule(Context* context, ScopeInfo* scope_info) { | 4628 MaybeObject* Heap::AllocateJSModule(Context* context, ScopeInfo* scope_info) { |
| 4699 // Allocate a fresh map. Modules do not have a prototype. | 4629 // Allocate a fresh map. Modules do not have a prototype. |
| 4700 Map* map; | 4630 Map* map; |
| 4701 MaybeObject* maybe_map = AllocateMap(JS_MODULE_TYPE, JSModule::kSize); | 4631 MaybeObject* maybe_map = AllocateMap(JS_MODULE_TYPE, JSModule::kSize); |
| 4702 if (!maybe_map->To(&map)) return maybe_map; | 4632 if (!maybe_map->To(&map)) return maybe_map; |
| 4703 // Allocate the object based on the map. | 4633 // Allocate the object based on the map. |
| 4704 JSModule* module; | 4634 JSModule* module; |
| 4705 MaybeObject* maybe_module = AllocateJSObjectFromMap(map, TENURED); | 4635 MaybeObject* maybe_module = AllocateJSObjectFromMap(map, TENURED); |
| 4706 if (!maybe_module->To(&module)) return maybe_module; | 4636 if (!maybe_module->To(&module)) return maybe_module; |
| 4707 module->set_context(context); | 4637 module->set_context(context); |
| (...skipping 179 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4887 // Update write barrier for all fields that lie beyond the header. | 4817 // Update write barrier for all fields that lie beyond the header. |
| 4888 RecordWrites(clone_address, | 4818 RecordWrites(clone_address, |
| 4889 JSObject::kHeaderSize, | 4819 JSObject::kHeaderSize, |
| 4890 (object_size - JSObject::kHeaderSize) / kPointerSize); | 4820 (object_size - JSObject::kHeaderSize) / kPointerSize); |
| 4891 } else { | 4821 } else { |
| 4892 wb_mode = SKIP_WRITE_BARRIER; | 4822 wb_mode = SKIP_WRITE_BARRIER; |
| 4893 | 4823 |
| 4894 { int adjusted_object_size = site != NULL | 4824 { int adjusted_object_size = site != NULL |
| 4895 ? object_size + AllocationMemento::kSize | 4825 ? object_size + AllocationMemento::kSize |
| 4896 : object_size; | 4826 : object_size; |
| 4897 MaybeObject* maybe_clone = new_space_.AllocateRaw(adjusted_object_size); | 4827 MaybeObject* maybe_clone = |
| 4828 AllocateRaw(adjusted_object_size, NEW_SPACE, NEW_SPACE); |
| 4898 if (!maybe_clone->ToObject(&clone)) return maybe_clone; | 4829 if (!maybe_clone->ToObject(&clone)) return maybe_clone; |
| 4899 } | 4830 } |
| 4900 SLOW_ASSERT(InNewSpace(clone)); | 4831 SLOW_ASSERT(InNewSpace(clone)); |
| 4901 // Since we know the clone is allocated in new space, we can copy | 4832 // Since we know the clone is allocated in new space, we can copy |
| 4902 // the contents without worrying about updating the write barrier. | 4833 // the contents without worrying about updating the write barrier. |
| 4903 CopyBlock(HeapObject::cast(clone)->address(), | 4834 CopyBlock(HeapObject::cast(clone)->address(), |
| 4904 source->address(), | 4835 source->address(), |
| 4905 object_size); | 4836 object_size); |
| 4906 | 4837 |
| 4907 if (site != NULL) { | 4838 if (site != NULL) { |
| (...skipping 673 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5581 int attempts = 0; | 5512 int attempts = 0; |
| 5582 do { | 5513 do { |
| 5583 hash = isolate()->random_number_generator()->NextInt() & Name::kHashBitMask; | 5514 hash = isolate()->random_number_generator()->NextInt() & Name::kHashBitMask; |
| 5584 attempts++; | 5515 attempts++; |
| 5585 } while (hash == 0 && attempts < 30); | 5516 } while (hash == 0 && attempts < 30); |
| 5586 if (hash == 0) hash = 1; // never return 0 | 5517 if (hash == 0) hash = 1; // never return 0 |
| 5587 | 5518 |
| 5588 Symbol::cast(result)->set_hash_field( | 5519 Symbol::cast(result)->set_hash_field( |
| 5589 Name::kIsNotArrayIndexMask | (hash << Name::kHashShift)); | 5520 Name::kIsNotArrayIndexMask | (hash << Name::kHashShift)); |
| 5590 Symbol::cast(result)->set_name(undefined_value()); | 5521 Symbol::cast(result)->set_name(undefined_value()); |
| 5522 Symbol::cast(result)->set_flags(Smi::FromInt(0)); |
| 5591 | 5523 |
| 5592 ASSERT(result->IsSymbol()); | 5524 ASSERT(!Symbol::cast(result)->is_private()); |
| 5593 return result; | 5525 return result; |
| 5594 } | 5526 } |
| 5595 | 5527 |
| 5596 | 5528 |
| 5529 MaybeObject* Heap::AllocatePrivateSymbol() { |
| 5530 MaybeObject* maybe = AllocateSymbol(); |
| 5531 Symbol* symbol; |
| 5532 if (!maybe->To(&symbol)) return maybe; |
| 5533 symbol->set_is_private(true); |
| 5534 return symbol; |
| 5535 } |
| 5536 |
| 5537 |
| 5597 MaybeObject* Heap::AllocateNativeContext() { | 5538 MaybeObject* Heap::AllocateNativeContext() { |
| 5598 Object* result; | 5539 Object* result; |
| 5599 { MaybeObject* maybe_result = | 5540 { MaybeObject* maybe_result = |
| 5600 AllocateFixedArray(Context::NATIVE_CONTEXT_SLOTS); | 5541 AllocateFixedArray(Context::NATIVE_CONTEXT_SLOTS); |
| 5601 if (!maybe_result->ToObject(&result)) return maybe_result; | 5542 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 5602 } | 5543 } |
| 5603 Context* context = reinterpret_cast<Context*>(result); | 5544 Context* context = reinterpret_cast<Context*>(result); |
| 5604 context->set_map_no_write_barrier(native_context_map()); | 5545 context->set_map_no_write_barrier(native_context_map()); |
| 5605 context->set_js_array_maps(undefined_value()); | 5546 context->set_js_array_maps(undefined_value()); |
| 5606 ASSERT(context->IsNativeContext()); | 5547 ASSERT(context->IsNativeContext()); |
| (...skipping 197 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5804 // Minimal hint that allows to do full GC. | 5745 // Minimal hint that allows to do full GC. |
| 5805 const int kMinHintForFullGC = 100; | 5746 const int kMinHintForFullGC = 100; |
| 5806 intptr_t size_factor = Min(Max(hint, 20), kMaxHint) / 4; | 5747 intptr_t size_factor = Min(Max(hint, 20), kMaxHint) / 4; |
| 5807 // The size factor is in range [5..250]. The numbers here are chosen from | 5748 // The size factor is in range [5..250]. The numbers here are chosen from |
| 5808 // experiments. If you changes them, make sure to test with | 5749 // experiments. If you changes them, make sure to test with |
| 5809 // chrome/performance_ui_tests --gtest_filter="GeneralMixMemoryTest.* | 5750 // chrome/performance_ui_tests --gtest_filter="GeneralMixMemoryTest.* |
| 5810 intptr_t step_size = | 5751 intptr_t step_size = |
| 5811 size_factor * IncrementalMarking::kAllocatedThreshold; | 5752 size_factor * IncrementalMarking::kAllocatedThreshold; |
| 5812 | 5753 |
| 5813 if (contexts_disposed_ > 0) { | 5754 if (contexts_disposed_ > 0) { |
| 5814 if (hint >= kMaxHint) { | 5755 contexts_disposed_ = 0; |
| 5815 // The embedder is requesting a lot of GC work after context disposal, | |
| 5816 // we age inline caches so that they don't keep objects from | |
| 5817 // the old context alive. | |
| 5818 AgeInlineCaches(); | |
| 5819 } | |
| 5820 int mark_sweep_time = Min(TimeMarkSweepWouldTakeInMs(), 1000); | 5756 int mark_sweep_time = Min(TimeMarkSweepWouldTakeInMs(), 1000); |
| 5821 if (hint >= mark_sweep_time && !FLAG_expose_gc && | 5757 if (hint >= mark_sweep_time && !FLAG_expose_gc && |
| 5822 incremental_marking()->IsStopped()) { | 5758 incremental_marking()->IsStopped()) { |
| 5823 HistogramTimerScope scope(isolate_->counters()->gc_context()); | 5759 HistogramTimerScope scope(isolate_->counters()->gc_context()); |
| 5824 CollectAllGarbage(kReduceMemoryFootprintMask, | 5760 CollectAllGarbage(kReduceMemoryFootprintMask, |
| 5825 "idle notification: contexts disposed"); | 5761 "idle notification: contexts disposed"); |
| 5826 } else { | 5762 } else { |
| 5827 AdvanceIdleIncrementalMarking(step_size); | 5763 AdvanceIdleIncrementalMarking(step_size); |
| 5828 contexts_disposed_ = 0; | |
| 5829 } | 5764 } |
| 5765 |
| 5830 // After context disposal there is likely a lot of garbage remaining, reset | 5766 // After context disposal there is likely a lot of garbage remaining, reset |
| 5831 // the idle notification counters in order to trigger more incremental GCs | 5767 // the idle notification counters in order to trigger more incremental GCs |
| 5832 // on subsequent idle notifications. | 5768 // on subsequent idle notifications. |
| 5833 StartIdleRound(); | 5769 StartIdleRound(); |
| 5834 return false; | 5770 return false; |
| 5835 } | 5771 } |
| 5836 | 5772 |
| 5837 if (!FLAG_incremental_marking || FLAG_expose_gc || Serializer::enabled()) { | 5773 if (!FLAG_incremental_marking || FLAG_expose_gc || Serializer::enabled()) { |
| 5838 return IdleGlobalGC(); | 5774 return IdleGlobalGC(); |
| 5839 } | 5775 } |
| (...skipping 958 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6798 } | 6734 } |
| 6799 | 6735 |
| 6800 | 6736 |
| 6801 void Heap::TearDown() { | 6737 void Heap::TearDown() { |
| 6802 #ifdef VERIFY_HEAP | 6738 #ifdef VERIFY_HEAP |
| 6803 if (FLAG_verify_heap) { | 6739 if (FLAG_verify_heap) { |
| 6804 Verify(); | 6740 Verify(); |
| 6805 } | 6741 } |
| 6806 #endif | 6742 #endif |
| 6807 | 6743 |
| 6744 UpdateMaximumCommitted(); |
| 6745 |
| 6808 if (FLAG_print_cumulative_gc_stat) { | 6746 if (FLAG_print_cumulative_gc_stat) { |
| 6809 PrintF("\n"); | 6747 PrintF("\n"); |
| 6810 PrintF("gc_count=%d ", gc_count_); | 6748 PrintF("gc_count=%d ", gc_count_); |
| 6811 PrintF("mark_sweep_count=%d ", ms_count_); | 6749 PrintF("mark_sweep_count=%d ", ms_count_); |
| 6812 PrintF("max_gc_pause=%.1f ", get_max_gc_pause()); | 6750 PrintF("max_gc_pause=%.1f ", get_max_gc_pause()); |
| 6813 PrintF("total_gc_time=%.1f ", total_gc_time_ms_); | 6751 PrintF("total_gc_time=%.1f ", total_gc_time_ms_); |
| 6814 PrintF("min_in_mutator=%.1f ", get_min_in_mutator()); | 6752 PrintF("min_in_mutator=%.1f ", get_min_in_mutator()); |
| 6815 PrintF("max_alive_after_gc=%" V8_PTR_PREFIX "d ", | 6753 PrintF("max_alive_after_gc=%" V8_PTR_PREFIX "d ", |
| 6816 get_max_alive_after_gc()); | 6754 get_max_alive_after_gc()); |
| 6817 PrintF("total_marking_time=%.1f ", marking_time()); | 6755 PrintF("total_marking_time=%.1f ", marking_time()); |
| 6818 PrintF("total_sweeping_time=%.1f ", sweeping_time()); | 6756 PrintF("total_sweeping_time=%.1f ", sweeping_time()); |
| 6819 PrintF("\n\n"); | 6757 PrintF("\n\n"); |
| 6820 } | 6758 } |
| 6821 | 6759 |
| 6760 if (FLAG_print_max_heap_committed) { |
| 6761 PrintF("\n"); |
| 6762 PrintF("maximum_committed_by_heap=%" V8_PTR_PREFIX "d ", |
| 6763 MaximumCommittedMemory()); |
| 6764 PrintF("maximum_committed_by_new_space=%" V8_PTR_PREFIX "d ", |
| 6765 new_space_.MaximumCommittedMemory()); |
| 6766 PrintF("maximum_committed_by_old_pointer_space=%" V8_PTR_PREFIX "d ", |
| 6767 old_data_space_->MaximumCommittedMemory()); |
| 6768 PrintF("maximum_committed_by_old_data_space=%" V8_PTR_PREFIX "d ", |
| 6769 old_pointer_space_->MaximumCommittedMemory()); |
| 6770 PrintF("maximum_committed_by_old_data_space=%" V8_PTR_PREFIX "d ", |
| 6771 old_pointer_space_->MaximumCommittedMemory()); |
| 6772 PrintF("maximum_committed_by_code_space=%" V8_PTR_PREFIX "d ", |
| 6773 code_space_->MaximumCommittedMemory()); |
| 6774 PrintF("maximum_committed_by_map_space=%" V8_PTR_PREFIX "d ", |
| 6775 map_space_->MaximumCommittedMemory()); |
| 6776 PrintF("maximum_committed_by_cell_space=%" V8_PTR_PREFIX "d ", |
| 6777 cell_space_->MaximumCommittedMemory()); |
| 6778 PrintF("maximum_committed_by_property_space=%" V8_PTR_PREFIX "d ", |
| 6779 property_cell_space_->MaximumCommittedMemory()); |
| 6780 PrintF("maximum_committed_by_lo_space=%" V8_PTR_PREFIX "d ", |
| 6781 lo_space_->MaximumCommittedMemory()); |
| 6782 PrintF("\n\n"); |
| 6783 } |
| 6784 |
| 6822 TearDownArrayBuffers(); | 6785 TearDownArrayBuffers(); |
| 6823 | 6786 |
| 6824 isolate_->global_handles()->TearDown(); | 6787 isolate_->global_handles()->TearDown(); |
| 6825 | 6788 |
| 6826 external_string_table_.TearDown(); | 6789 external_string_table_.TearDown(); |
| 6827 | 6790 |
| 6828 mark_compact_collector()->TearDown(); | 6791 mark_compact_collector()->TearDown(); |
| 6829 | 6792 |
| 6830 new_space_.TearDown(); | 6793 new_space_.TearDown(); |
| 6831 | 6794 |
| (...skipping 1098 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7930 counters->count_of_FIXED_ARRAY_##name()->Increment( \ | 7893 counters->count_of_FIXED_ARRAY_##name()->Increment( \ |
| 7931 static_cast<int>(object_counts_[index])); \ | 7894 static_cast<int>(object_counts_[index])); \ |
| 7932 counters->count_of_FIXED_ARRAY_##name()->Decrement( \ | 7895 counters->count_of_FIXED_ARRAY_##name()->Decrement( \ |
| 7933 static_cast<int>(object_counts_last_time_[index])); \ | 7896 static_cast<int>(object_counts_last_time_[index])); \ |
| 7934 counters->size_of_FIXED_ARRAY_##name()->Increment( \ | 7897 counters->size_of_FIXED_ARRAY_##name()->Increment( \ |
| 7935 static_cast<int>(object_sizes_[index])); \ | 7898 static_cast<int>(object_sizes_[index])); \ |
| 7936 counters->size_of_FIXED_ARRAY_##name()->Decrement( \ | 7899 counters->size_of_FIXED_ARRAY_##name()->Decrement( \ |
| 7937 static_cast<int>(object_sizes_last_time_[index])); | 7900 static_cast<int>(object_sizes_last_time_[index])); |
| 7938 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT) | 7901 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT) |
| 7939 #undef ADJUST_LAST_TIME_OBJECT_COUNT | 7902 #undef ADJUST_LAST_TIME_OBJECT_COUNT |
| 7940 #define ADJUST_LAST_TIME_OBJECT_COUNT(name) \ | 7903 #define ADJUST_LAST_TIME_OBJECT_COUNT(name) \ |
| 7941 index = FIRST_CODE_AGE_SUB_TYPE + Code::k##name##CodeAge; \ | 7904 index = \ |
| 7942 counters->count_of_CODE_AGE_##name()->Increment( \ | 7905 FIRST_CODE_AGE_SUB_TYPE + Code::k##name##CodeAge - Code::kFirstCodeAge; \ |
| 7943 static_cast<int>(object_counts_[index])); \ | 7906 counters->count_of_CODE_AGE_##name()->Increment( \ |
| 7944 counters->count_of_CODE_AGE_##name()->Decrement( \ | 7907 static_cast<int>(object_counts_[index])); \ |
| 7945 static_cast<int>(object_counts_last_time_[index])); \ | 7908 counters->count_of_CODE_AGE_##name()->Decrement( \ |
| 7946 counters->size_of_CODE_AGE_##name()->Increment( \ | 7909 static_cast<int>(object_counts_last_time_[index])); \ |
| 7947 static_cast<int>(object_sizes_[index])); \ | 7910 counters->size_of_CODE_AGE_##name()->Increment( \ |
| 7948 counters->size_of_CODE_AGE_##name()->Decrement( \ | 7911 static_cast<int>(object_sizes_[index])); \ |
| 7912 counters->size_of_CODE_AGE_##name()->Decrement( \ |
| 7949 static_cast<int>(object_sizes_last_time_[index])); | 7913 static_cast<int>(object_sizes_last_time_[index])); |
| 7950 CODE_AGE_LIST_WITH_NO_AGE(ADJUST_LAST_TIME_OBJECT_COUNT) | 7914 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) |
| 7951 #undef ADJUST_LAST_TIME_OBJECT_COUNT | 7915 #undef ADJUST_LAST_TIME_OBJECT_COUNT |
| 7952 | 7916 |
| 7953 OS::MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); | 7917 OS::MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); |
| 7954 OS::MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); | 7918 OS::MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); |
| 7955 ClearObjectStats(); | 7919 ClearObjectStats(); |
| 7956 } | 7920 } |
| 7957 | 7921 |
| 7958 | 7922 |
| 7959 Heap::RelocationLock::RelocationLock(Heap* heap) : heap_(heap) { | 7923 Heap::RelocationLock::RelocationLock(Heap* heap) : heap_(heap) { |
| 7960 if (FLAG_concurrent_recompilation) { | 7924 if (FLAG_concurrent_recompilation) { |
| 7961 heap_->relocation_mutex_->Lock(); | 7925 heap_->relocation_mutex_->Lock(); |
| 7962 #ifdef DEBUG | 7926 #ifdef DEBUG |
| 7963 heap_->relocation_mutex_locked_by_optimizer_thread_ = | 7927 heap_->relocation_mutex_locked_by_optimizer_thread_ = |
| 7964 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); | 7928 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); |
| 7965 #endif // DEBUG | 7929 #endif // DEBUG |
| 7966 } | 7930 } |
| 7967 } | 7931 } |
| 7968 | 7932 |
| 7969 } } // namespace v8::internal | 7933 } } // namespace v8::internal |
| OLD | NEW |