OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
136 marking_(this), | 136 marking_(this), |
137 incremental_marking_(this), | 137 incremental_marking_(this), |
138 number_idle_notifications_(0), | 138 number_idle_notifications_(0), |
139 last_idle_notification_gc_count_(0), | 139 last_idle_notification_gc_count_(0), |
140 last_idle_notification_gc_count_init_(false), | 140 last_idle_notification_gc_count_init_(false), |
141 mark_sweeps_since_idle_round_started_(0), | 141 mark_sweeps_since_idle_round_started_(0), |
142 gc_count_at_last_idle_gc_(0), | 142 gc_count_at_last_idle_gc_(0), |
143 scavenges_since_last_idle_round_(kIdleScavengeThreshold), | 143 scavenges_since_last_idle_round_(kIdleScavengeThreshold), |
144 gcs_since_last_deopt_(0), | 144 gcs_since_last_deopt_(0), |
145 #ifdef VERIFY_HEAP | 145 #ifdef VERIFY_HEAP |
146 no_weak_embedded_maps_verification_scope_depth_(0), | 146 no_weak_object_verification_scope_depth_(0), |
147 #endif | 147 #endif |
148 promotion_queue_(this), | 148 promotion_queue_(this), |
149 configured_(false), | 149 configured_(false), |
150 chunks_queued_for_free_(NULL), | 150 chunks_queued_for_free_(NULL), |
151 relocation_mutex_(NULL) { | 151 relocation_mutex_(NULL) { |
152 // Allow build-time customization of the max semispace size. Building | 152 // Allow build-time customization of the max semispace size. Building |
153 // V8 with snapshots and a non-default max semispace size is much | 153 // V8 with snapshots and a non-default max semispace size is much |
154 // easier if you can define it as part of the build environment. | 154 // easier if you can define it as part of the build environment. |
155 #if defined(V8_MAX_SEMISPACE_SIZE) | 155 #if defined(V8_MAX_SEMISPACE_SIZE) |
156 max_semispace_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE; | 156 max_semispace_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE; |
(...skipping 6566 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6723 // Create initial maps. | 6723 // Create initial maps. |
6724 if (!CreateInitialMaps()) return false; | 6724 if (!CreateInitialMaps()) return false; |
6725 if (!CreateApiObjects()) return false; | 6725 if (!CreateApiObjects()) return false; |
6726 | 6726 |
6727 // Create initial objects | 6727 // Create initial objects |
6728 if (!CreateInitialObjects()) return false; | 6728 if (!CreateInitialObjects()) return false; |
6729 | 6729 |
6730 native_contexts_list_ = undefined_value(); | 6730 native_contexts_list_ = undefined_value(); |
6731 array_buffers_list_ = undefined_value(); | 6731 array_buffers_list_ = undefined_value(); |
6732 allocation_sites_list_ = undefined_value(); | 6732 allocation_sites_list_ = undefined_value(); |
| 6733 weak_object_to_code_table_ = undefined_value(); |
6733 return true; | 6734 return true; |
6734 } | 6735 } |
6735 | 6736 |
6736 | 6737 |
6737 void Heap::SetStackLimits() { | 6738 void Heap::SetStackLimits() { |
6738 ASSERT(isolate_ != NULL); | 6739 ASSERT(isolate_ != NULL); |
6739 ASSERT(isolate_ == isolate()); | 6740 ASSERT(isolate_ == isolate()); |
6740 // On 64 bit machines, pointers are generally out of range of Smis. We write | 6741 // On 64 bit machines, pointers are generally out of range of Smis. We write |
6741 // something that looks like an out of range Smi to the GC. | 6742 // something that looks like an out of range Smi to the GC. |
6742 | 6743 |
(...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6870 for (int i = 0; i < gc_epilogue_callbacks_.length(); ++i) { | 6871 for (int i = 0; i < gc_epilogue_callbacks_.length(); ++i) { |
6871 if (gc_epilogue_callbacks_[i].callback == callback) { | 6872 if (gc_epilogue_callbacks_[i].callback == callback) { |
6872 gc_epilogue_callbacks_.Remove(i); | 6873 gc_epilogue_callbacks_.Remove(i); |
6873 return; | 6874 return; |
6874 } | 6875 } |
6875 } | 6876 } |
6876 UNREACHABLE(); | 6877 UNREACHABLE(); |
6877 } | 6878 } |
6878 | 6879 |
6879 | 6880 |
| 6881 MaybeObject* Heap::AddWeakObjectToCodeDependency(Object* obj, |
| 6882 DependentCode* dep) { |
| 6883 ASSERT(!InNewSpace(obj)); |
| 6884 ASSERT(!InNewSpace(dep)); |
| 6885 MaybeObject* maybe_obj = |
| 6886 WeakHashTable::cast(weak_object_to_code_table_)->Put(obj, dep); |
| 6887 WeakHashTable* table; |
| 6888 if (!maybe_obj->To(&table)) return maybe_obj; |
| 6889 set_weak_object_to_code_table(table); |
| 6890 ASSERT_EQ(dep, WeakHashTable::cast(weak_object_to_code_table_)->Lookup(obj)); |
| 6891 return weak_object_to_code_table_; |
| 6892 } |
| 6893 |
| 6894 |
| 6895 DependentCode* Heap::LookupWeakObjectToCodeDependency(Object* obj) { |
| 6896 Object* dep = WeakHashTable::cast(weak_object_to_code_table_)->Lookup(obj); |
| 6897 if (dep->IsDependentCode()) return DependentCode::cast(dep); |
| 6898 return DependentCode::cast(empty_fixed_array()); |
| 6899 } |
| 6900 |
| 6901 |
| 6902 void Heap::EnsureWeakObjectToCodeTable() { |
| 6903 if (!weak_object_to_code_table()->IsHashTable()) { |
| 6904 set_weak_object_to_code_table(*isolate()->factory()->NewWeakHashTable(16)); |
| 6905 } |
| 6906 } |
| 6907 |
| 6908 |
6880 #ifdef DEBUG | 6909 #ifdef DEBUG |
6881 | 6910 |
6882 class PrintHandleVisitor: public ObjectVisitor { | 6911 class PrintHandleVisitor: public ObjectVisitor { |
6883 public: | 6912 public: |
6884 void VisitPointers(Object** start, Object** end) { | 6913 void VisitPointers(Object** start, Object** end) { |
6885 for (Object** p = start; p < end; p++) | 6914 for (Object** p = start; p < end; p++) |
6886 PrintF(" handle %p to %p\n", | 6915 PrintF(" handle %p to %p\n", |
6887 reinterpret_cast<void*>(p), | 6916 reinterpret_cast<void*>(p), |
6888 reinterpret_cast<void*>(*p)); | 6917 reinterpret_cast<void*>(*p)); |
6889 } | 6918 } |
(...skipping 980 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7870 if (FLAG_concurrent_recompilation) { | 7899 if (FLAG_concurrent_recompilation) { |
7871 heap_->relocation_mutex_->Lock(); | 7900 heap_->relocation_mutex_->Lock(); |
7872 #ifdef DEBUG | 7901 #ifdef DEBUG |
7873 heap_->relocation_mutex_locked_by_optimizer_thread_ = | 7902 heap_->relocation_mutex_locked_by_optimizer_thread_ = |
7874 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); | 7903 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); |
7875 #endif // DEBUG | 7904 #endif // DEBUG |
7876 } | 7905 } |
7877 } | 7906 } |
7878 | 7907 |
7879 } } // namespace v8::internal | 7908 } } // namespace v8::internal |
OLD | NEW |