| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 110 gc_state_(NOT_IN_GC), | 110 gc_state_(NOT_IN_GC), |
| 111 gc_post_processing_depth_(0), | 111 gc_post_processing_depth_(0), |
| 112 ms_count_(0), | 112 ms_count_(0), |
| 113 gc_count_(0), | 113 gc_count_(0), |
| 114 remembered_unmapped_pages_index_(0), | 114 remembered_unmapped_pages_index_(0), |
| 115 unflattened_strings_length_(0), | 115 unflattened_strings_length_(0), |
| 116 #ifdef DEBUG | 116 #ifdef DEBUG |
| 117 allocation_allowed_(true), | 117 allocation_allowed_(true), |
| 118 allocation_timeout_(0), | 118 allocation_timeout_(0), |
| 119 disallow_allocation_failure_(false), | 119 disallow_allocation_failure_(false), |
| 120 debug_utils_(NULL), | |
| 121 #endif // DEBUG | 120 #endif // DEBUG |
| 122 new_space_high_promotion_mode_active_(false), | 121 new_space_high_promotion_mode_active_(false), |
| 123 old_gen_promotion_limit_(kMinimumPromotionLimit), | 122 old_gen_promotion_limit_(kMinimumPromotionLimit), |
| 124 old_gen_allocation_limit_(kMinimumAllocationLimit), | 123 old_gen_allocation_limit_(kMinimumAllocationLimit), |
| 125 old_gen_limit_factor_(1), | 124 old_gen_limit_factor_(1), |
| 126 size_of_old_gen_at_last_old_space_gc_(0), | 125 size_of_old_gen_at_last_old_space_gc_(0), |
| 127 external_allocation_limit_(0), | 126 external_allocation_limit_(0), |
| 128 amount_of_external_allocated_memory_(0), | 127 amount_of_external_allocated_memory_(0), |
| 129 amount_of_external_allocated_memory_at_last_global_gc_(0), | 128 amount_of_external_allocated_memory_at_last_global_gc_(0), |
| 130 old_gen_exhausted_(false), | 129 old_gen_exhausted_(false), |
| (...skipping 5986 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6117 } | 6116 } |
| 6118 | 6117 |
| 6119 | 6118 |
| 6120 intptr_t Heap::PromotedExternalMemorySize() { | 6119 intptr_t Heap::PromotedExternalMemorySize() { |
| 6121 if (amount_of_external_allocated_memory_ | 6120 if (amount_of_external_allocated_memory_ |
| 6122 <= amount_of_external_allocated_memory_at_last_global_gc_) return 0; | 6121 <= amount_of_external_allocated_memory_at_last_global_gc_) return 0; |
| 6123 return amount_of_external_allocated_memory_ | 6122 return amount_of_external_allocated_memory_ |
| 6124 - amount_of_external_allocated_memory_at_last_global_gc_; | 6123 - amount_of_external_allocated_memory_at_last_global_gc_; |
| 6125 } | 6124 } |
| 6126 | 6125 |
| 6127 #ifdef DEBUG | |
| 6128 | |
| 6129 // Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject. | |
| 6130 static const int kMarkTag = 2; | |
| 6131 | |
| 6132 | |
| 6133 class HeapDebugUtils { | |
| 6134 public: | |
| 6135 explicit HeapDebugUtils(Heap* heap) | |
| 6136 : search_for_any_global_(false), | |
| 6137 search_target_(NULL), | |
| 6138 found_target_(false), | |
| 6139 object_stack_(20), | |
| 6140 heap_(heap) { | |
| 6141 } | |
| 6142 | |
| 6143 class MarkObjectVisitor : public ObjectVisitor { | |
| 6144 public: | |
| 6145 explicit MarkObjectVisitor(HeapDebugUtils* utils) : utils_(utils) { } | |
| 6146 | |
| 6147 void VisitPointers(Object** start, Object** end) { | |
| 6148 // Copy all HeapObject pointers in [start, end) | |
| 6149 for (Object** p = start; p < end; p++) { | |
| 6150 if ((*p)->IsHeapObject()) | |
| 6151 utils_->MarkObjectRecursively(p); | |
| 6152 } | |
| 6153 } | |
| 6154 | |
| 6155 HeapDebugUtils* utils_; | |
| 6156 }; | |
| 6157 | |
| 6158 void MarkObjectRecursively(Object** p) { | |
| 6159 if (!(*p)->IsHeapObject()) return; | |
| 6160 | |
| 6161 HeapObject* obj = HeapObject::cast(*p); | |
| 6162 | |
| 6163 Object* map = obj->map(); | |
| 6164 | |
| 6165 if (!map->IsHeapObject()) return; // visited before | |
| 6166 | |
| 6167 if (found_target_) return; // stop if target found | |
| 6168 object_stack_.Add(obj); | |
| 6169 if ((search_for_any_global_ && obj->IsJSGlobalObject()) || | |
| 6170 (!search_for_any_global_ && (obj == search_target_))) { | |
| 6171 found_target_ = true; | |
| 6172 return; | |
| 6173 } | |
| 6174 | |
| 6175 // not visited yet | |
| 6176 Map* map_p = reinterpret_cast<Map*>(HeapObject::cast(map)); | |
| 6177 | |
| 6178 Address map_addr = map_p->address(); | |
| 6179 | |
| 6180 obj->set_map_no_write_barrier(reinterpret_cast<Map*>(map_addr + kMarkTag)); | |
| 6181 | |
| 6182 MarkObjectRecursively(&map); | |
| 6183 | |
| 6184 MarkObjectVisitor mark_visitor(this); | |
| 6185 | |
| 6186 obj->IterateBody(map_p->instance_type(), obj->SizeFromMap(map_p), | |
| 6187 &mark_visitor); | |
| 6188 | |
| 6189 if (!found_target_) // don't pop if found the target | |
| 6190 object_stack_.RemoveLast(); | |
| 6191 } | |
| 6192 | |
| 6193 | |
| 6194 class UnmarkObjectVisitor : public ObjectVisitor { | |
| 6195 public: | |
| 6196 explicit UnmarkObjectVisitor(HeapDebugUtils* utils) : utils_(utils) { } | |
| 6197 | |
| 6198 void VisitPointers(Object** start, Object** end) { | |
| 6199 // Copy all HeapObject pointers in [start, end) | |
| 6200 for (Object** p = start; p < end; p++) { | |
| 6201 if ((*p)->IsHeapObject()) | |
| 6202 utils_->UnmarkObjectRecursively(p); | |
| 6203 } | |
| 6204 } | |
| 6205 | |
| 6206 HeapDebugUtils* utils_; | |
| 6207 }; | |
| 6208 | |
| 6209 | |
| 6210 void UnmarkObjectRecursively(Object** p) { | |
| 6211 if (!(*p)->IsHeapObject()) return; | |
| 6212 | |
| 6213 HeapObject* obj = HeapObject::cast(*p); | |
| 6214 | |
| 6215 Object* map = obj->map(); | |
| 6216 | |
| 6217 if (map->IsHeapObject()) return; // unmarked already | |
| 6218 | |
| 6219 Address map_addr = reinterpret_cast<Address>(map); | |
| 6220 | |
| 6221 map_addr -= kMarkTag; | |
| 6222 | |
| 6223 ASSERT_TAG_ALIGNED(map_addr); | |
| 6224 | |
| 6225 HeapObject* map_p = HeapObject::FromAddress(map_addr); | |
| 6226 | |
| 6227 obj->set_map_no_write_barrier(reinterpret_cast<Map*>(map_p)); | |
| 6228 | |
| 6229 UnmarkObjectRecursively(reinterpret_cast<Object**>(&map_p)); | |
| 6230 | |
| 6231 UnmarkObjectVisitor unmark_visitor(this); | |
| 6232 | |
| 6233 obj->IterateBody(Map::cast(map_p)->instance_type(), | |
| 6234 obj->SizeFromMap(Map::cast(map_p)), | |
| 6235 &unmark_visitor); | |
| 6236 } | |
| 6237 | |
| 6238 | |
| 6239 void MarkRootObjectRecursively(Object** root) { | |
| 6240 if (search_for_any_global_) { | |
| 6241 ASSERT(search_target_ == NULL); | |
| 6242 } else { | |
| 6243 ASSERT(search_target_->IsHeapObject()); | |
| 6244 } | |
| 6245 found_target_ = false; | |
| 6246 object_stack_.Clear(); | |
| 6247 | |
| 6248 MarkObjectRecursively(root); | |
| 6249 UnmarkObjectRecursively(root); | |
| 6250 | |
| 6251 if (found_target_) { | |
| 6252 PrintF("=====================================\n"); | |
| 6253 PrintF("==== Path to object ====\n"); | |
| 6254 PrintF("=====================================\n\n"); | |
| 6255 | |
| 6256 ASSERT(!object_stack_.is_empty()); | |
| 6257 for (int i = 0; i < object_stack_.length(); i++) { | |
| 6258 if (i > 0) PrintF("\n |\n |\n V\n\n"); | |
| 6259 Object* obj = object_stack_[i]; | |
| 6260 obj->Print(); | |
| 6261 } | |
| 6262 PrintF("=====================================\n"); | |
| 6263 } | |
| 6264 } | |
| 6265 | |
| 6266 // Helper class for visiting HeapObjects recursively. | |
| 6267 class MarkRootVisitor: public ObjectVisitor { | |
| 6268 public: | |
| 6269 explicit MarkRootVisitor(HeapDebugUtils* utils) : utils_(utils) { } | |
| 6270 | |
| 6271 void VisitPointers(Object** start, Object** end) { | |
| 6272 // Visit all HeapObject pointers in [start, end) | |
| 6273 for (Object** p = start; p < end; p++) { | |
| 6274 if ((*p)->IsHeapObject()) | |
| 6275 utils_->MarkRootObjectRecursively(p); | |
| 6276 } | |
| 6277 } | |
| 6278 | |
| 6279 HeapDebugUtils* utils_; | |
| 6280 }; | |
| 6281 | |
| 6282 bool search_for_any_global_; | |
| 6283 Object* search_target_; | |
| 6284 bool found_target_; | |
| 6285 List<Object*> object_stack_; | |
| 6286 Heap* heap_; | |
| 6287 | |
| 6288 friend class Heap; | |
| 6289 }; | |
| 6290 | |
| 6291 #endif | |
| 6292 | |
| 6293 | 6126 |
| 6294 V8_DECLARE_ONCE(initialize_gc_once); | 6127 V8_DECLARE_ONCE(initialize_gc_once); |
| 6295 | 6128 |
| 6296 static void InitializeGCOnce() { | 6129 static void InitializeGCOnce() { |
| 6297 InitializeScavengingVisitorsTables(); | 6130 InitializeScavengingVisitorsTables(); |
| 6298 NewSpaceScavenger::Initialize(); | 6131 NewSpaceScavenger::Initialize(); |
| 6299 MarkCompactCollector::Initialize(); | 6132 MarkCompactCollector::Initialize(); |
| 6300 } | 6133 } |
| 6301 | 6134 |
| 6302 bool Heap::SetUp(bool create_heap_objects) { | 6135 bool Heap::SetUp(bool create_heap_objects) { |
| 6303 #ifdef DEBUG | 6136 #ifdef DEBUG |
| 6304 allocation_timeout_ = FLAG_gc_interval; | 6137 allocation_timeout_ = FLAG_gc_interval; |
| 6305 debug_utils_ = new HeapDebugUtils(this); | |
| 6306 #endif | 6138 #endif |
| 6307 | 6139 |
| 6308 // Initialize heap spaces and initial maps and objects. Whenever something | 6140 // Initialize heap spaces and initial maps and objects. Whenever something |
| 6309 // goes wrong, just return false. The caller should check the results and | 6141 // goes wrong, just return false. The caller should check the results and |
| 6310 // call Heap::TearDown() to release allocated memory. | 6142 // call Heap::TearDown() to release allocated memory. |
| 6311 // | 6143 // |
| 6312 // If the heap is not yet configured (e.g. through the API), configure it. | 6144 // If the heap is not yet configured (e.g. through the API), configure it. |
| 6313 // Configuration is based on the flags new-space-size (really the semispace | 6145 // Configuration is based on the flags new-space-size (really the semispace |
| 6314 // size) and old-space-size if set or the initial values of semispace_size_ | 6146 // size) and old-space-size if set or the initial values of semispace_size_ |
| 6315 // and old_generation_size_ otherwise. | 6147 // and old_generation_size_ otherwise. |
| (...skipping 174 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6490 delete lo_space_; | 6322 delete lo_space_; |
| 6491 lo_space_ = NULL; | 6323 lo_space_ = NULL; |
| 6492 } | 6324 } |
| 6493 | 6325 |
| 6494 store_buffer()->TearDown(); | 6326 store_buffer()->TearDown(); |
| 6495 incremental_marking()->TearDown(); | 6327 incremental_marking()->TearDown(); |
| 6496 | 6328 |
| 6497 isolate_->memory_allocator()->TearDown(); | 6329 isolate_->memory_allocator()->TearDown(); |
| 6498 | 6330 |
| 6499 delete relocation_mutex_; | 6331 delete relocation_mutex_; |
| 6500 | |
| 6501 #ifdef DEBUG | |
| 6502 delete debug_utils_; | |
| 6503 debug_utils_ = NULL; | |
| 6504 #endif | |
| 6505 } | 6332 } |
| 6506 | 6333 |
| 6507 | 6334 |
| 6508 void Heap::Shrink() { | 6335 void Heap::Shrink() { |
| 6509 // Try to shrink all paged spaces. | 6336 // Try to shrink all paged spaces. |
| 6510 PagedSpaces spaces; | 6337 PagedSpaces spaces; |
| 6511 for (PagedSpace* space = spaces.next(); | 6338 for (PagedSpace* space = spaces.next(); |
| 6512 space != NULL; | 6339 space != NULL; |
| 6513 space = spaces.next()) { | 6340 space = spaces.next()) { |
| 6514 space->ReleaseAllUnusedPages(); | 6341 space->ReleaseAllUnusedPages(); |
| (...skipping 987 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7502 static_cast<int>(object_sizes_last_time_[index])); | 7329 static_cast<int>(object_sizes_last_time_[index])); |
| 7503 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT) | 7330 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT) |
| 7504 #undef ADJUST_LAST_TIME_OBJECT_COUNT | 7331 #undef ADJUST_LAST_TIME_OBJECT_COUNT |
| 7505 | 7332 |
| 7506 memcpy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); | 7333 memcpy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); |
| 7507 memcpy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); | 7334 memcpy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); |
| 7508 ClearObjectStats(); | 7335 ClearObjectStats(); |
| 7509 } | 7336 } |
| 7510 | 7337 |
| 7511 } } // namespace v8::internal | 7338 } } // namespace v8::internal |
| OLD | NEW |