OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
85 | 85 |
86 // Variables set based on semispace_size_ and old_generation_size_ in | 86 // Variables set based on semispace_size_ and old_generation_size_ in |
87 // ConfigureHeap (survived_since_last_expansion_, external_allocation_limit_) | 87 // ConfigureHeap (survived_since_last_expansion_, external_allocation_limit_) |
88 // Will be 4 * reserved_semispace_size_ to ensure that young | 88 // Will be 4 * reserved_semispace_size_ to ensure that young |
89 // generation can be aligned to its size. | 89 // generation can be aligned to its size. |
90 survived_since_last_expansion_(0), | 90 survived_since_last_expansion_(0), |
91 sweep_generation_(0), | 91 sweep_generation_(0), |
92 always_allocate_scope_depth_(0), | 92 always_allocate_scope_depth_(0), |
93 linear_allocation_scope_depth_(0), | 93 linear_allocation_scope_depth_(0), |
94 contexts_disposed_(0), | 94 contexts_disposed_(0), |
| 95 global_ic_age_(0), |
95 scan_on_scavenge_pages_(0), | 96 scan_on_scavenge_pages_(0), |
96 new_space_(this), | 97 new_space_(this), |
97 old_pointer_space_(NULL), | 98 old_pointer_space_(NULL), |
98 old_data_space_(NULL), | 99 old_data_space_(NULL), |
99 code_space_(NULL), | 100 code_space_(NULL), |
100 map_space_(NULL), | 101 map_space_(NULL), |
101 cell_space_(NULL), | 102 cell_space_(NULL), |
102 lo_space_(NULL), | 103 lo_space_(NULL), |
103 gc_state_(NOT_IN_GC), | 104 gc_state_(NOT_IN_GC), |
104 gc_post_processing_depth_(0), | 105 gc_post_processing_depth_(0), |
(...skipping 3281 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3386 code->set_instruction_size(desc.instr_size); | 3387 code->set_instruction_size(desc.instr_size); |
3387 code->set_relocation_info(reloc_info); | 3388 code->set_relocation_info(reloc_info); |
3388 code->set_flags(flags); | 3389 code->set_flags(flags); |
3389 if (code->is_call_stub() || code->is_keyed_call_stub()) { | 3390 if (code->is_call_stub() || code->is_keyed_call_stub()) { |
3390 code->set_check_type(RECEIVER_MAP_CHECK); | 3391 code->set_check_type(RECEIVER_MAP_CHECK); |
3391 } | 3392 } |
3392 code->set_deoptimization_data(empty_fixed_array(), SKIP_WRITE_BARRIER); | 3393 code->set_deoptimization_data(empty_fixed_array(), SKIP_WRITE_BARRIER); |
3393 code->set_type_feedback_info(undefined_value(), SKIP_WRITE_BARRIER); | 3394 code->set_type_feedback_info(undefined_value(), SKIP_WRITE_BARRIER); |
3394 code->set_handler_table(empty_fixed_array(), SKIP_WRITE_BARRIER); | 3395 code->set_handler_table(empty_fixed_array(), SKIP_WRITE_BARRIER); |
3395 code->set_gc_metadata(Smi::FromInt(0)); | 3396 code->set_gc_metadata(Smi::FromInt(0)); |
| 3397 code->set_ic_age(global_ic_age_); |
3396 // Allow self references to created code object by patching the handle to | 3398 // Allow self references to created code object by patching the handle to |
3397 // point to the newly allocated Code object. | 3399 // point to the newly allocated Code object. |
3398 if (!self_reference.is_null()) { | 3400 if (!self_reference.is_null()) { |
3399 *(self_reference.location()) = code; | 3401 *(self_reference.location()) = code; |
3400 } | 3402 } |
3401 // Migrate generated code. | 3403 // Migrate generated code. |
3402 // The generated code can contain Object** values (typically from handles) | 3404 // The generated code can contain Object** values (typically from handles) |
3403 // that are dereferenced during the copy to point directly to the actual heap | 3405 // that are dereferenced during the copy to point directly to the actual heap |
3404 // objects. These pointers can include references to the code object itself, | 3406 // objects. These pointers can include references to the code object itself, |
3405 // through the self_reference parameter. | 3407 // through the self_reference parameter. |
(...skipping 1425 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4831 gc_count_at_last_idle_gc_ = gc_count_; | 4833 gc_count_at_last_idle_gc_ = gc_count_; |
4832 if (uncommit) { | 4834 if (uncommit) { |
4833 new_space_.Shrink(); | 4835 new_space_.Shrink(); |
4834 UncommitFromSpace(); | 4836 UncommitFromSpace(); |
4835 } | 4837 } |
4836 } | 4838 } |
4837 } | 4839 } |
4838 | 4840 |
4839 | 4841 |
4840 bool Heap::IdleNotification(int hint) { | 4842 bool Heap::IdleNotification(int hint) { |
4841 intptr_t size_factor = Min(Max(hint, 30), 1000) / 10; | 4843 const int kMaxHint = 1000; |
| 4844 intptr_t size_factor = Min(Max(hint, 30), kMaxHint) / 10; |
4842 // The size factor is in range [3..100]. | 4845 // The size factor is in range [3..100]. |
4843 intptr_t step_size = size_factor * IncrementalMarking::kAllocatedThreshold; | 4846 intptr_t step_size = size_factor * IncrementalMarking::kAllocatedThreshold; |
4844 | 4847 |
4845 if (contexts_disposed_ > 0) { | 4848 if (contexts_disposed_ > 0) { |
| 4849 if (hint >= kMaxHint) { |
| 4850 // The embedder is requesting a lot of GC work after context disposal, |
| 4851 // we age inline caches so that they don't keep objects from |
| 4852 // the old context alive. |
| 4853 AgeInlineCaches(); |
| 4854 } |
4846 int mark_sweep_time = Min(TimeMarkSweepWouldTakeInMs(), 1000); | 4855 int mark_sweep_time = Min(TimeMarkSweepWouldTakeInMs(), 1000); |
4847 if (hint >= mark_sweep_time && !FLAG_expose_gc) { | 4856 if (hint >= mark_sweep_time && !FLAG_expose_gc && |
| 4857 incremental_marking()->IsStopped()) { |
4848 HistogramTimerScope scope(isolate_->counters()->gc_context()); | 4858 HistogramTimerScope scope(isolate_->counters()->gc_context()); |
4849 CollectAllGarbage(kReduceMemoryFootprintMask, | 4859 CollectAllGarbage(kReduceMemoryFootprintMask, |
4850 "idle notification: contexts disposed"); | 4860 "idle notification: contexts disposed"); |
4851 } else { | 4861 } else { |
4852 AdvanceIdleIncrementalMarking(step_size); | 4862 AdvanceIdleIncrementalMarking(step_size); |
4853 contexts_disposed_ = 0; | 4863 contexts_disposed_ = 0; |
4854 } | 4864 } |
4855 // Make sure that we have no pending context disposals. | 4865 // Make sure that we have no pending context disposals. |
4856 // Take into account that we might have decided to delay full collection | 4866 // Take into account that we might have decided to delay full collection |
4857 // because incremental marking is in progress. | 4867 // because incremental marking is in progress. |
4858 ASSERT((contexts_disposed_ == 0) || !incremental_marking()->IsStopped()); | 4868 ASSERT((contexts_disposed_ == 0) || !incremental_marking()->IsStopped()); |
4859 return false; | 4869 return false; |
4860 } | 4870 } |
4861 | 4871 |
4862 if (hint >= 1000 || !FLAG_incremental_marking || | 4872 if (hint >= kMaxHint || !FLAG_incremental_marking || |
4863 FLAG_expose_gc || Serializer::enabled()) { | 4873 FLAG_expose_gc || Serializer::enabled()) { |
4864 return IdleGlobalGC(); | 4874 return IdleGlobalGC(); |
4865 } | 4875 } |
4866 | 4876 |
4867 // By doing small chunks of GC work in each IdleNotification, | 4877 // By doing small chunks of GC work in each IdleNotification, |
4868 // perform a round of incremental GCs and after that wait until | 4878 // perform a round of incremental GCs and after that wait until |
4869 // the mutator creates enough garbage to justify a new round. | 4879 // the mutator creates enough garbage to justify a new round. |
4870 // An incremental GC progresses as follows: | 4880 // An incremental GC progresses as follows: |
4871 // 1. many incremental marking steps, | 4881 // 1. many incremental marking steps, |
4872 // 2. one old space mark-sweep-compact, | 4882 // 2. one old space mark-sweep-compact, |
(...skipping 2094 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6967 } else { | 6977 } else { |
6968 p ^= 0x1d1ed & (Page::kPageSize - 1); // I died. | 6978 p ^= 0x1d1ed & (Page::kPageSize - 1); // I died. |
6969 } | 6979 } |
6970 remembered_unmapped_pages_[remembered_unmapped_pages_index_] = | 6980 remembered_unmapped_pages_[remembered_unmapped_pages_index_] = |
6971 reinterpret_cast<Address>(p); | 6981 reinterpret_cast<Address>(p); |
6972 remembered_unmapped_pages_index_++; | 6982 remembered_unmapped_pages_index_++; |
6973 remembered_unmapped_pages_index_ %= kRememberedUnmappedPages; | 6983 remembered_unmapped_pages_index_ %= kRememberedUnmappedPages; |
6974 } | 6984 } |
6975 | 6985 |
6976 } } // namespace v8::internal | 6986 } } // namespace v8::internal |
OLD | NEW |