| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 11 matching lines...) Expand all Loading... |
| 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 | 27 |
| 28 #include "v8.h" | 28 #include "v8.h" |
| 29 | 29 |
| 30 #include "heap-snapshot-generator-inl.h" | 30 #include "heap-snapshot-generator-inl.h" |
| 31 | 31 |
| 32 #include "allocation-tracker.h" | 32 #include "code-stubs.h" |
| 33 #include "heap-profiler.h" | 33 #include "heap-profiler.h" |
| 34 #include "debug.h" | 34 #include "debug.h" |
| 35 #include "types.h" | 35 #include "types.h" |
| 36 | 36 |
| 37 namespace v8 { | 37 namespace v8 { |
| 38 namespace internal { | 38 namespace internal { |
| 39 | 39 |
| 40 | 40 |
| 41 HeapGraphEdge::HeapGraphEdge(Type type, const char* name, int from, int to) | 41 HeapGraphEdge::HeapGraphEdge(Type type, const char* name, int from, int to) |
| 42 : type_(type), | 42 : type_(type), |
| (...skipping 698 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 741 sizeof(*this) + | 741 sizeof(*this) + |
| 742 sizeof(HashMap::Entry) * entries_map_.capacity() + | 742 sizeof(HashMap::Entry) * entries_map_.capacity() + |
| 743 GetMemoryUsedByList(entries_) + | 743 GetMemoryUsedByList(entries_) + |
| 744 GetMemoryUsedByList(time_intervals_); | 744 GetMemoryUsedByList(time_intervals_); |
| 745 } | 745 } |
| 746 | 746 |
| 747 | 747 |
| 748 HeapSnapshotsCollection::HeapSnapshotsCollection(Heap* heap) | 748 HeapSnapshotsCollection::HeapSnapshotsCollection(Heap* heap) |
| 749 : is_tracking_objects_(false), | 749 : is_tracking_objects_(false), |
| 750 names_(heap), | 750 names_(heap), |
| 751 ids_(heap), | 751 ids_(heap) { |
| 752 allocation_tracker_(NULL) { | |
| 753 } | 752 } |
| 754 | 753 |
| 755 | 754 |
| 756 static void DeleteHeapSnapshot(HeapSnapshot** snapshot_ptr) { | 755 static void DeleteHeapSnapshot(HeapSnapshot** snapshot_ptr) { |
| 757 delete *snapshot_ptr; | 756 delete *snapshot_ptr; |
| 758 } | 757 } |
| 759 | 758 |
| 760 | 759 |
| 761 HeapSnapshotsCollection::~HeapSnapshotsCollection() { | 760 HeapSnapshotsCollection::~HeapSnapshotsCollection() { |
| 762 delete allocation_tracker_; | |
| 763 snapshots_.Iterate(DeleteHeapSnapshot); | 761 snapshots_.Iterate(DeleteHeapSnapshot); |
| 764 } | 762 } |
| 765 | 763 |
| 766 | 764 |
| 767 void HeapSnapshotsCollection::StartHeapObjectsTracking() { | |
| 768 ids_.UpdateHeapObjectsMap(); | |
| 769 if (allocation_tracker_ == NULL) { | |
| 770 allocation_tracker_ = new AllocationTracker(&ids_, names()); | |
| 771 } | |
| 772 is_tracking_objects_ = true; | |
| 773 } | |
| 774 | |
| 775 | |
| 776 void HeapSnapshotsCollection::StopHeapObjectsTracking() { | |
| 777 ids_.StopHeapObjectsTracking(); | |
| 778 if (allocation_tracker_ != NULL) { | |
| 779 delete allocation_tracker_; | |
| 780 allocation_tracker_ = NULL; | |
| 781 } | |
| 782 } | |
| 783 | |
| 784 | |
| 785 HeapSnapshot* HeapSnapshotsCollection::NewSnapshot(const char* name, | 765 HeapSnapshot* HeapSnapshotsCollection::NewSnapshot(const char* name, |
| 786 unsigned uid) { | 766 unsigned uid) { |
| 787 is_tracking_objects_ = true; // Start watching for heap objects moves. | 767 is_tracking_objects_ = true; // Start watching for heap objects moves. |
| 788 return new HeapSnapshot(this, name, uid); | 768 return new HeapSnapshot(this, name, uid); |
| 789 } | 769 } |
| 790 | 770 |
| 791 | 771 |
| 792 void HeapSnapshotsCollection::SnapshotGenerationFinished( | 772 void HeapSnapshotsCollection::SnapshotGenerationFinished( |
| 793 HeapSnapshot* snapshot) { | 773 HeapSnapshot* snapshot) { |
| 794 ids_.SnapshotGenerationFinished(); | 774 ids_.SnapshotGenerationFinished(); |
| (...skipping 23 matching lines...) Expand all Loading... |
| 818 if (ids_.FindEntry(obj->address()) == id) { | 798 if (ids_.FindEntry(obj->address()) == id) { |
| 819 ASSERT(object == NULL); | 799 ASSERT(object == NULL); |
| 820 object = obj; | 800 object = obj; |
| 821 // Can't break -- kFilterUnreachable requires full heap traversal. | 801 // Can't break -- kFilterUnreachable requires full heap traversal. |
| 822 } | 802 } |
| 823 } | 803 } |
| 824 return object != NULL ? Handle<HeapObject>(object) : Handle<HeapObject>(); | 804 return object != NULL ? Handle<HeapObject>(object) : Handle<HeapObject>(); |
| 825 } | 805 } |
| 826 | 806 |
| 827 | 807 |
| 828 void HeapSnapshotsCollection::NewObjectEvent(Address addr, int size) { | |
| 829 DisallowHeapAllocation no_allocation; | |
| 830 ids_.NewObject(addr, size); | |
| 831 if (allocation_tracker_ != NULL) { | |
| 832 allocation_tracker_->NewObjectEvent(addr, size); | |
| 833 } | |
| 834 } | |
| 835 | |
| 836 | |
| 837 size_t HeapSnapshotsCollection::GetUsedMemorySize() const { | 808 size_t HeapSnapshotsCollection::GetUsedMemorySize() const { |
| 838 size_t size = sizeof(*this); | 809 size_t size = sizeof(*this); |
| 839 size += names_.GetUsedMemorySize(); | 810 size += names_.GetUsedMemorySize(); |
| 840 size += ids_.GetUsedMemorySize(); | 811 size += ids_.GetUsedMemorySize(); |
| 841 size += GetMemoryUsedByList(snapshots_); | 812 size += GetMemoryUsedByList(snapshots_); |
| 842 for (int i = 0; i < snapshots_.length(); ++i) { | 813 for (int i = 0; i < snapshots_.length(); ++i) { |
| 843 size += snapshots_[i]->RawSnapshotSize(); | 814 size += snapshots_[i]->RawSnapshotSize(); |
| 844 } | 815 } |
| 845 return size; | 816 return size; |
| 846 } | 817 } |
| (...skipping 256 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1103 HeapObject* parent_obj, | 1074 HeapObject* parent_obj, |
| 1104 int parent) | 1075 int parent) |
| 1105 : generator_(generator), | 1076 : generator_(generator), |
| 1106 parent_obj_(parent_obj), | 1077 parent_obj_(parent_obj), |
| 1107 parent_(parent), | 1078 parent_(parent), |
| 1108 next_index_(0) { | 1079 next_index_(0) { |
| 1109 } | 1080 } |
| 1110 void VisitCodeEntry(Address entry_address) { | 1081 void VisitCodeEntry(Address entry_address) { |
| 1111 Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address)); | 1082 Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address)); |
| 1112 generator_->SetInternalReference(parent_obj_, parent_, "code", code); | 1083 generator_->SetInternalReference(parent_obj_, parent_, "code", code); |
| 1113 generator_->TagObject(code, "(code)"); | 1084 generator_->TagCodeObject(code); |
| 1114 } | 1085 } |
| 1115 void VisitPointers(Object** start, Object** end) { | 1086 void VisitPointers(Object** start, Object** end) { |
| 1116 for (Object** p = start; p < end; p++) { | 1087 for (Object** p = start; p < end; p++) { |
| 1117 if (CheckVisitedAndUnmark(p)) continue; | 1088 if (CheckVisitedAndUnmark(p)) continue; |
| 1118 generator_->SetHiddenReference(parent_obj_, parent_, next_index_++, *p); | 1089 generator_->SetHiddenReference(parent_obj_, parent_, next_index_++, *p); |
| 1119 } | 1090 } |
| 1120 } | 1091 } |
| 1121 static void MarkVisitedField(HeapObject* obj, int offset) { | 1092 static void MarkVisitedField(HeapObject* obj, int offset) { |
| 1122 if (offset < 0) return; | 1093 if (offset < 0) return; |
| 1123 Address field = obj->address() + offset; | 1094 Address field = obj->address() + offset; |
| (...skipping 239 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1363 TagObject(map->dependent_code(), "(dependent code)"); | 1334 TagObject(map->dependent_code(), "(dependent code)"); |
| 1364 SetInternalReference(map, entry, | 1335 SetInternalReference(map, entry, |
| 1365 "dependent_code", map->dependent_code(), | 1336 "dependent_code", map->dependent_code(), |
| 1366 Map::kDependentCodeOffset); | 1337 Map::kDependentCodeOffset); |
| 1367 } | 1338 } |
| 1368 | 1339 |
| 1369 | 1340 |
| 1370 void V8HeapExplorer::ExtractSharedFunctionInfoReferences( | 1341 void V8HeapExplorer::ExtractSharedFunctionInfoReferences( |
| 1371 int entry, SharedFunctionInfo* shared) { | 1342 int entry, SharedFunctionInfo* shared) { |
| 1372 HeapObject* obj = shared; | 1343 HeapObject* obj = shared; |
| 1344 StringsStorage* names = collection_->names(); |
| 1345 String* shared_name = shared->DebugName(); |
| 1346 const char* name = NULL; |
| 1347 if (shared_name != *heap_->isolate()->factory()->empty_string()) { |
| 1348 name = names->GetName(shared_name); |
| 1349 TagObject(shared->code(), names->GetFormatted("(code for %s)", name)); |
| 1350 } else { |
| 1351 TagObject(shared->code(), names->GetFormatted("(%s code)", |
| 1352 Code::Kind2String(shared->code()->kind()))); |
| 1353 } |
| 1354 |
| 1373 SetInternalReference(obj, entry, | 1355 SetInternalReference(obj, entry, |
| 1374 "name", shared->name(), | 1356 "name", shared->name(), |
| 1375 SharedFunctionInfo::kNameOffset); | 1357 SharedFunctionInfo::kNameOffset); |
| 1376 TagObject(shared->code(), "(code)"); | |
| 1377 SetInternalReference(obj, entry, | 1358 SetInternalReference(obj, entry, |
| 1378 "code", shared->code(), | 1359 "code", shared->code(), |
| 1379 SharedFunctionInfo::kCodeOffset); | 1360 SharedFunctionInfo::kCodeOffset); |
| 1380 TagObject(shared->scope_info(), "(function scope info)"); | 1361 TagObject(shared->scope_info(), "(function scope info)"); |
| 1381 SetInternalReference(obj, entry, | 1362 SetInternalReference(obj, entry, |
| 1382 "scope_info", shared->scope_info(), | 1363 "scope_info", shared->scope_info(), |
| 1383 SharedFunctionInfo::kScopeInfoOffset); | 1364 SharedFunctionInfo::kScopeInfoOffset); |
| 1384 SetInternalReference(obj, entry, | 1365 SetInternalReference(obj, entry, |
| 1385 "instance_class_name", shared->instance_class_name(), | 1366 "instance_class_name", shared->instance_class_name(), |
| 1386 SharedFunctionInfo::kInstanceClassNameOffset); | 1367 SharedFunctionInfo::kInstanceClassNameOffset); |
| 1387 SetInternalReference(obj, entry, | 1368 SetInternalReference(obj, entry, |
| 1388 "script", shared->script(), | 1369 "script", shared->script(), |
| 1389 SharedFunctionInfo::kScriptOffset); | 1370 SharedFunctionInfo::kScriptOffset); |
| 1390 TagObject(shared->construct_stub(), "(code)"); | 1371 const char* construct_stub_name = name ? |
| 1372 names->GetFormatted("(construct stub code for %s)", name) : |
| 1373 "(construct stub code)"; |
| 1374 TagObject(shared->construct_stub(), construct_stub_name); |
| 1391 SetInternalReference(obj, entry, | 1375 SetInternalReference(obj, entry, |
| 1392 "construct_stub", shared->construct_stub(), | 1376 "construct_stub", shared->construct_stub(), |
| 1393 SharedFunctionInfo::kConstructStubOffset); | 1377 SharedFunctionInfo::kConstructStubOffset); |
| 1394 SetInternalReference(obj, entry, | 1378 SetInternalReference(obj, entry, |
| 1395 "function_data", shared->function_data(), | 1379 "function_data", shared->function_data(), |
| 1396 SharedFunctionInfo::kFunctionDataOffset); | 1380 SharedFunctionInfo::kFunctionDataOffset); |
| 1397 SetInternalReference(obj, entry, | 1381 SetInternalReference(obj, entry, |
| 1398 "debug_info", shared->debug_info(), | 1382 "debug_info", shared->debug_info(), |
| 1399 SharedFunctionInfo::kDebugInfoOffset); | 1383 SharedFunctionInfo::kDebugInfoOffset); |
| 1400 SetInternalReference(obj, entry, | 1384 SetInternalReference(obj, entry, |
| 1401 "inferred_name", shared->inferred_name(), | 1385 "inferred_name", shared->inferred_name(), |
| 1402 SharedFunctionInfo::kInferredNameOffset); | 1386 SharedFunctionInfo::kInferredNameOffset); |
| 1387 SetInternalReference(obj, entry, |
| 1388 "optimized_code_map", shared->optimized_code_map(), |
| 1389 SharedFunctionInfo::kOptimizedCodeMapOffset); |
| 1403 SetWeakReference(obj, entry, | 1390 SetWeakReference(obj, entry, |
| 1404 1, shared->initial_map(), | 1391 1, shared->initial_map(), |
| 1405 SharedFunctionInfo::kInitialMapOffset); | 1392 SharedFunctionInfo::kInitialMapOffset); |
| 1406 } | 1393 } |
| 1407 | 1394 |
| 1408 | 1395 |
| 1409 void V8HeapExplorer::ExtractScriptReferences(int entry, Script* script) { | 1396 void V8HeapExplorer::ExtractScriptReferences(int entry, Script* script) { |
| 1410 HeapObject* obj = script; | 1397 HeapObject* obj = script; |
| 1411 SetInternalReference(obj, entry, | 1398 SetInternalReference(obj, entry, |
| 1412 "source", script->source(), | 1399 "source", script->source(), |
| (...skipping 29 matching lines...) Expand all Loading... |
| 1442 SetInternalReference(code_cache, entry, | 1429 SetInternalReference(code_cache, entry, |
| 1443 "default_cache", code_cache->default_cache(), | 1430 "default_cache", code_cache->default_cache(), |
| 1444 CodeCache::kDefaultCacheOffset); | 1431 CodeCache::kDefaultCacheOffset); |
| 1445 TagObject(code_cache->normal_type_cache(), "(code type cache)"); | 1432 TagObject(code_cache->normal_type_cache(), "(code type cache)"); |
| 1446 SetInternalReference(code_cache, entry, | 1433 SetInternalReference(code_cache, entry, |
| 1447 "type_cache", code_cache->normal_type_cache(), | 1434 "type_cache", code_cache->normal_type_cache(), |
| 1448 CodeCache::kNormalTypeCacheOffset); | 1435 CodeCache::kNormalTypeCacheOffset); |
| 1449 } | 1436 } |
| 1450 | 1437 |
| 1451 | 1438 |
| 1439 void V8HeapExplorer::TagCodeObject(Code* code, const char* external_name) { |
| 1440 TagObject(code, collection_->names()->GetFormatted("(%s code)", |
| 1441 external_name)); |
| 1442 } |
| 1443 |
| 1444 |
| 1445 void V8HeapExplorer::TagCodeObject(Code* code) { |
| 1446 if (code->kind() == Code::STUB) { |
| 1447 TagObject(code, collection_->names()->GetFormatted( |
| 1448 "(%s code)", CodeStub::MajorName( |
| 1449 static_cast<CodeStub::Major>(code->major_key()), true))); |
| 1450 } |
| 1451 } |
| 1452 |
| 1453 |
| 1452 void V8HeapExplorer::ExtractCodeReferences(int entry, Code* code) { | 1454 void V8HeapExplorer::ExtractCodeReferences(int entry, Code* code) { |
| 1455 TagCodeObject(code); |
| 1453 TagObject(code->relocation_info(), "(code relocation info)"); | 1456 TagObject(code->relocation_info(), "(code relocation info)"); |
| 1454 SetInternalReference(code, entry, | 1457 SetInternalReference(code, entry, |
| 1455 "relocation_info", code->relocation_info(), | 1458 "relocation_info", code->relocation_info(), |
| 1456 Code::kRelocationInfoOffset); | 1459 Code::kRelocationInfoOffset); |
| 1457 SetInternalReference(code, entry, | 1460 SetInternalReference(code, entry, |
| 1458 "handler_table", code->handler_table(), | 1461 "handler_table", code->handler_table(), |
| 1459 Code::kHandlerTableOffset); | 1462 Code::kHandlerTableOffset); |
| 1460 TagObject(code->deoptimization_data(), "(code deopt data)"); | 1463 TagObject(code->deoptimization_data(), "(code deopt data)"); |
| 1461 SetInternalReference(code, entry, | 1464 SetInternalReference(code, entry, |
| 1462 "deoptimization_data", code->deoptimization_data(), | 1465 "deoptimization_data", code->deoptimization_data(), |
| (...skipping 225 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1688 class RootsReferencesExtractor : public ObjectVisitor { | 1691 class RootsReferencesExtractor : public ObjectVisitor { |
| 1689 private: | 1692 private: |
| 1690 struct IndexTag { | 1693 struct IndexTag { |
| 1691 IndexTag(int index, VisitorSynchronization::SyncTag tag) | 1694 IndexTag(int index, VisitorSynchronization::SyncTag tag) |
| 1692 : index(index), tag(tag) { } | 1695 : index(index), tag(tag) { } |
| 1693 int index; | 1696 int index; |
| 1694 VisitorSynchronization::SyncTag tag; | 1697 VisitorSynchronization::SyncTag tag; |
| 1695 }; | 1698 }; |
| 1696 | 1699 |
| 1697 public: | 1700 public: |
| 1698 RootsReferencesExtractor() | 1701 explicit RootsReferencesExtractor(Heap* heap) |
| 1699 : collecting_all_references_(false), | 1702 : collecting_all_references_(false), |
| 1700 previous_reference_count_(0) { | 1703 previous_reference_count_(0), |
| 1704 heap_(heap) { |
| 1701 } | 1705 } |
| 1702 | 1706 |
| 1703 void VisitPointers(Object** start, Object** end) { | 1707 void VisitPointers(Object** start, Object** end) { |
| 1704 if (collecting_all_references_) { | 1708 if (collecting_all_references_) { |
| 1705 for (Object** p = start; p < end; p++) all_references_.Add(*p); | 1709 for (Object** p = start; p < end; p++) all_references_.Add(*p); |
| 1706 } else { | 1710 } else { |
| 1707 for (Object** p = start; p < end; p++) strong_references_.Add(*p); | 1711 for (Object** p = start; p < end; p++) strong_references_.Add(*p); |
| 1708 } | 1712 } |
| 1709 } | 1713 } |
| 1710 | 1714 |
| 1711 void SetCollectingAllReferences() { collecting_all_references_ = true; } | 1715 void SetCollectingAllReferences() { collecting_all_references_ = true; } |
| 1712 | 1716 |
| 1713 void FillReferences(V8HeapExplorer* explorer) { | 1717 void FillReferences(V8HeapExplorer* explorer) { |
| 1714 ASSERT(strong_references_.length() <= all_references_.length()); | 1718 ASSERT(strong_references_.length() <= all_references_.length()); |
| 1719 Builtins* builtins = heap_->isolate()->builtins(); |
| 1715 for (int i = 0; i < reference_tags_.length(); ++i) { | 1720 for (int i = 0; i < reference_tags_.length(); ++i) { |
| 1716 explorer->SetGcRootsReference(reference_tags_[i].tag); | 1721 explorer->SetGcRootsReference(reference_tags_[i].tag); |
| 1717 } | 1722 } |
| 1718 int strong_index = 0, all_index = 0, tags_index = 0; | 1723 int strong_index = 0, all_index = 0, tags_index = 0, builtin_index = 0; |
| 1719 while (all_index < all_references_.length()) { | 1724 while (all_index < all_references_.length()) { |
| 1720 if (strong_index < strong_references_.length() && | 1725 if (strong_index < strong_references_.length() && |
| 1721 strong_references_[strong_index] == all_references_[all_index]) { | 1726 strong_references_[strong_index] == all_references_[all_index]) { |
| 1722 explorer->SetGcSubrootReference(reference_tags_[tags_index].tag, | 1727 explorer->SetGcSubrootReference(reference_tags_[tags_index].tag, |
| 1723 false, | 1728 false, |
| 1724 all_references_[all_index++]); | 1729 all_references_[all_index]); |
| 1725 ++strong_index; | 1730 ++strong_index; |
| 1726 } else { | 1731 } else { |
| 1727 explorer->SetGcSubrootReference(reference_tags_[tags_index].tag, | 1732 explorer->SetGcSubrootReference(reference_tags_[tags_index].tag, |
| 1728 true, | 1733 true, |
| 1729 all_references_[all_index++]); | 1734 all_references_[all_index]); |
| 1730 } | 1735 } |
| 1736 if (reference_tags_[tags_index].tag == |
| 1737 VisitorSynchronization::kBuiltins) { |
| 1738 ASSERT(all_references_[all_index]->IsCode()); |
| 1739 explorer->TagCodeObject(Code::cast(all_references_[all_index]), |
| 1740 builtins->name(builtin_index++)); |
| 1741 } |
| 1742 ++all_index; |
| 1731 if (reference_tags_[tags_index].index == all_index) ++tags_index; | 1743 if (reference_tags_[tags_index].index == all_index) ++tags_index; |
| 1732 } | 1744 } |
| 1733 } | 1745 } |
| 1734 | 1746 |
| 1735 void Synchronize(VisitorSynchronization::SyncTag tag) { | 1747 void Synchronize(VisitorSynchronization::SyncTag tag) { |
| 1736 if (collecting_all_references_ && | 1748 if (collecting_all_references_ && |
| 1737 previous_reference_count_ != all_references_.length()) { | 1749 previous_reference_count_ != all_references_.length()) { |
| 1738 previous_reference_count_ = all_references_.length(); | 1750 previous_reference_count_ = all_references_.length(); |
| 1739 reference_tags_.Add(IndexTag(previous_reference_count_, tag)); | 1751 reference_tags_.Add(IndexTag(previous_reference_count_, tag)); |
| 1740 } | 1752 } |
| 1741 } | 1753 } |
| 1742 | 1754 |
| 1743 private: | 1755 private: |
| 1744 bool collecting_all_references_; | 1756 bool collecting_all_references_; |
| 1745 List<Object*> strong_references_; | 1757 List<Object*> strong_references_; |
| 1746 List<Object*> all_references_; | 1758 List<Object*> all_references_; |
| 1747 int previous_reference_count_; | 1759 int previous_reference_count_; |
| 1748 List<IndexTag> reference_tags_; | 1760 List<IndexTag> reference_tags_; |
| 1761 Heap* heap_; |
| 1749 }; | 1762 }; |
| 1750 | 1763 |
| 1751 | 1764 |
| 1752 bool V8HeapExplorer::IterateAndExtractReferences( | 1765 bool V8HeapExplorer::IterateAndExtractReferences( |
| 1753 SnapshotFillerInterface* filler) { | 1766 SnapshotFillerInterface* filler) { |
| 1754 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable); | 1767 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable); |
| 1755 | 1768 |
| 1756 filler_ = filler; | 1769 filler_ = filler; |
| 1757 bool interrupted = false; | 1770 bool interrupted = false; |
| 1758 | 1771 |
| 1759 // Heap iteration with filtering must be finished in any case. | 1772 // Heap iteration with filtering must be finished in any case. |
| 1760 for (HeapObject* obj = iterator.next(); | 1773 for (HeapObject* obj = iterator.next(); |
| 1761 obj != NULL; | 1774 obj != NULL; |
| 1762 obj = iterator.next(), progress_->ProgressStep()) { | 1775 obj = iterator.next(), progress_->ProgressStep()) { |
| 1763 if (!interrupted) { | 1776 if (!interrupted) { |
| 1764 ExtractReferences(obj); | 1777 ExtractReferences(obj); |
| 1765 if (!progress_->ProgressReport(false)) interrupted = true; | 1778 if (!progress_->ProgressReport(false)) interrupted = true; |
| 1766 } | 1779 } |
| 1767 } | 1780 } |
| 1768 if (interrupted) { | 1781 if (interrupted) { |
| 1769 filler_ = NULL; | 1782 filler_ = NULL; |
| 1770 return false; | 1783 return false; |
| 1771 } | 1784 } |
| 1772 | 1785 |
| 1773 SetRootGcRootsReference(); | 1786 SetRootGcRootsReference(); |
| 1774 RootsReferencesExtractor extractor; | 1787 RootsReferencesExtractor extractor(heap_); |
| 1775 heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG); | 1788 heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG); |
| 1776 extractor.SetCollectingAllReferences(); | 1789 extractor.SetCollectingAllReferences(); |
| 1777 heap_->IterateRoots(&extractor, VISIT_ALL); | 1790 heap_->IterateRoots(&extractor, VISIT_ALL); |
| 1778 extractor.FillReferences(this); | 1791 extractor.FillReferences(this); |
| 1779 filler_ = NULL; | 1792 filler_ = NULL; |
| 1780 return progress_->ProgressReport(true); | 1793 return progress_->ProgressReport(true); |
| 1781 } | 1794 } |
| 1782 | 1795 |
| 1783 | 1796 |
| 1784 bool V8HeapExplorer::IsEssentialObject(Object* object) { | 1797 bool V8HeapExplorer::IsEssentialObject(Object* object) { |
| (...skipping 840 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2625 bool aborted_; | 2638 bool aborted_; |
| 2626 }; | 2639 }; |
| 2627 | 2640 |
| 2628 | 2641 |
| 2629 // type, name|index, to_node. | 2642 // type, name|index, to_node. |
| 2630 const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3; | 2643 const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3; |
| 2631 // type, name, id, self_size, children_index. | 2644 // type, name, id, self_size, children_index. |
| 2632 const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 5; | 2645 const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 5; |
| 2633 | 2646 |
| 2634 void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) { | 2647 void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) { |
| 2635 if (AllocationTracker* allocation_tracker = | |
| 2636 snapshot_->collection()->allocation_tracker()) { | |
| 2637 allocation_tracker->PrepareForSerialization(); | |
| 2638 } | |
| 2639 ASSERT(writer_ == NULL); | 2648 ASSERT(writer_ == NULL); |
| 2640 writer_ = new OutputStreamWriter(stream); | 2649 writer_ = new OutputStreamWriter(stream); |
| 2641 SerializeImpl(); | 2650 SerializeImpl(); |
| 2642 delete writer_; | 2651 delete writer_; |
| 2643 writer_ = NULL; | 2652 writer_ = NULL; |
| 2644 } | 2653 } |
| 2645 | 2654 |
| 2646 | 2655 |
| 2647 void HeapSnapshotJSONSerializer::SerializeImpl() { | 2656 void HeapSnapshotJSONSerializer::SerializeImpl() { |
| 2648 ASSERT(0 == snapshot_->root()->index()); | 2657 ASSERT(0 == snapshot_->root()->index()); |
| 2649 writer_->AddCharacter('{'); | 2658 writer_->AddCharacter('{'); |
| 2650 writer_->AddString("\"snapshot\":{"); | 2659 writer_->AddString("\"snapshot\":{"); |
| 2651 SerializeSnapshot(); | 2660 SerializeSnapshot(); |
| 2652 if (writer_->aborted()) return; | 2661 if (writer_->aborted()) return; |
| 2653 writer_->AddString("},\n"); | 2662 writer_->AddString("},\n"); |
| 2654 writer_->AddString("\"nodes\":["); | 2663 writer_->AddString("\"nodes\":["); |
| 2655 SerializeNodes(); | 2664 SerializeNodes(); |
| 2656 if (writer_->aborted()) return; | 2665 if (writer_->aborted()) return; |
| 2657 writer_->AddString("],\n"); | 2666 writer_->AddString("],\n"); |
| 2658 writer_->AddString("\"edges\":["); | 2667 writer_->AddString("\"edges\":["); |
| 2659 SerializeEdges(); | 2668 SerializeEdges(); |
| 2660 if (writer_->aborted()) return; | 2669 if (writer_->aborted()) return; |
| 2661 writer_->AddString("],\n"); | 2670 writer_->AddString("],\n"); |
| 2662 | |
| 2663 writer_->AddString("\"trace_function_infos\":["); | |
| 2664 SerializeTraceNodeInfos(); | |
| 2665 if (writer_->aborted()) return; | |
| 2666 writer_->AddString("],\n"); | |
| 2667 writer_->AddString("\"trace_tree\":["); | |
| 2668 SerializeTraceTree(); | |
| 2669 if (writer_->aborted()) return; | |
| 2670 writer_->AddString("],\n"); | |
| 2671 | |
| 2672 writer_->AddString("\"strings\":["); | 2671 writer_->AddString("\"strings\":["); |
| 2673 SerializeStrings(); | 2672 SerializeStrings(); |
| 2674 if (writer_->aborted()) return; | 2673 if (writer_->aborted()) return; |
| 2675 writer_->AddCharacter(']'); | 2674 writer_->AddCharacter(']'); |
| 2676 writer_->AddCharacter('}'); | 2675 writer_->AddCharacter('}'); |
| 2677 writer_->Finalize(); | 2676 writer_->Finalize(); |
| 2678 } | 2677 } |
| 2679 | 2678 |
| 2680 | 2679 |
| 2681 int HeapSnapshotJSONSerializer::GetStringId(const char* s) { | 2680 int HeapSnapshotJSONSerializer::GetStringId(const char* s) { |
| (...skipping 140 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2822 JSON_S("edge_types") ":" JSON_A( | 2821 JSON_S("edge_types") ":" JSON_A( |
| 2823 JSON_A( | 2822 JSON_A( |
| 2824 JSON_S("context") "," | 2823 JSON_S("context") "," |
| 2825 JSON_S("element") "," | 2824 JSON_S("element") "," |
| 2826 JSON_S("property") "," | 2825 JSON_S("property") "," |
| 2827 JSON_S("internal") "," | 2826 JSON_S("internal") "," |
| 2828 JSON_S("hidden") "," | 2827 JSON_S("hidden") "," |
| 2829 JSON_S("shortcut") "," | 2828 JSON_S("shortcut") "," |
| 2830 JSON_S("weak")) "," | 2829 JSON_S("weak")) "," |
| 2831 JSON_S("string_or_number") "," | 2830 JSON_S("string_or_number") "," |
| 2832 JSON_S("node")) "," | 2831 JSON_S("node")))); |
| 2833 JSON_S("trace_function_info_fields") ":" JSON_A( | |
| 2834 JSON_S("function_id") "," | |
| 2835 JSON_S("name") "," | |
| 2836 JSON_S("script_name") "," | |
| 2837 JSON_S("script_id") "," | |
| 2838 JSON_S("line") "," | |
| 2839 JSON_S("column")) "," | |
| 2840 JSON_S("trace_node_fields") ":" JSON_A( | |
| 2841 JSON_S("id") "," | |
| 2842 JSON_S("function_id") "," | |
| 2843 JSON_S("count") "," | |
| 2844 JSON_S("size") "," | |
| 2845 JSON_S("children")))); | |
| 2846 #undef JSON_S | 2832 #undef JSON_S |
| 2847 #undef JSON_O | 2833 #undef JSON_O |
| 2848 #undef JSON_A | 2834 #undef JSON_A |
| 2849 writer_->AddString(",\"node_count\":"); | 2835 writer_->AddString(",\"node_count\":"); |
| 2850 writer_->AddNumber(snapshot_->entries().length()); | 2836 writer_->AddNumber(snapshot_->entries().length()); |
| 2851 writer_->AddString(",\"edge_count\":"); | 2837 writer_->AddString(",\"edge_count\":"); |
| 2852 writer_->AddNumber(snapshot_->edges().length()); | 2838 writer_->AddNumber(snapshot_->edges().length()); |
| 2853 writer_->AddString(",\"trace_function_count\":"); | |
| 2854 uint32_t count = 0; | |
| 2855 AllocationTracker* tracker = snapshot_->collection()->allocation_tracker(); | |
| 2856 if (tracker) { | |
| 2857 count = tracker->id_to_function_info()->occupancy(); | |
| 2858 } | |
| 2859 writer_->AddNumber(count); | |
| 2860 } | 2839 } |
| 2861 | 2840 |
| 2862 | 2841 |
| 2863 static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) { | 2842 static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) { |
| 2864 static const char hex_chars[] = "0123456789ABCDEF"; | 2843 static const char hex_chars[] = "0123456789ABCDEF"; |
| 2865 w->AddString("\\u"); | 2844 w->AddString("\\u"); |
| 2866 w->AddCharacter(hex_chars[(u >> 12) & 0xf]); | 2845 w->AddCharacter(hex_chars[(u >> 12) & 0xf]); |
| 2867 w->AddCharacter(hex_chars[(u >> 8) & 0xf]); | 2846 w->AddCharacter(hex_chars[(u >> 8) & 0xf]); |
| 2868 w->AddCharacter(hex_chars[(u >> 4) & 0xf]); | 2847 w->AddCharacter(hex_chars[(u >> 4) & 0xf]); |
| 2869 w->AddCharacter(hex_chars[u & 0xf]); | 2848 w->AddCharacter(hex_chars[u & 0xf]); |
| 2870 } | 2849 } |
| 2871 | 2850 |
| 2872 | 2851 |
| 2873 void HeapSnapshotJSONSerializer::SerializeTraceTree() { | |
| 2874 AllocationTracker* tracker = snapshot_->collection()->allocation_tracker(); | |
| 2875 if (!tracker) return; | |
| 2876 AllocationTraceTree* traces = tracker->trace_tree(); | |
| 2877 SerializeTraceNode(traces->root()); | |
| 2878 } | |
| 2879 | |
| 2880 | |
| 2881 void HeapSnapshotJSONSerializer::SerializeTraceNode(AllocationTraceNode* node) { | |
| 2882 // The buffer needs space for 4 unsigned ints, 4 commas, [ and \0 | |
| 2883 const int kBufferSize = | |
| 2884 4 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT | |
| 2885 + 4 + 1 + 1; | |
| 2886 EmbeddedVector<char, kBufferSize> buffer; | |
| 2887 int buffer_pos = 0; | |
| 2888 buffer_pos = utoa(node->id(), buffer, buffer_pos); | |
| 2889 buffer[buffer_pos++] = ','; | |
| 2890 buffer_pos = utoa(node->function_id(), buffer, buffer_pos); | |
| 2891 buffer[buffer_pos++] = ','; | |
| 2892 buffer_pos = utoa(node->allocation_count(), buffer, buffer_pos); | |
| 2893 buffer[buffer_pos++] = ','; | |
| 2894 buffer_pos = utoa(node->allocation_size(), buffer, buffer_pos); | |
| 2895 buffer[buffer_pos++] = ','; | |
| 2896 buffer[buffer_pos++] = '['; | |
| 2897 buffer[buffer_pos++] = '\0'; | |
| 2898 writer_->AddString(buffer.start()); | |
| 2899 | |
| 2900 Vector<AllocationTraceNode*> children = node->children(); | |
| 2901 for (int i = 0; i < children.length(); i++) { | |
| 2902 if (i > 0) { | |
| 2903 writer_->AddCharacter(','); | |
| 2904 } | |
| 2905 SerializeTraceNode(children[i]); | |
| 2906 } | |
| 2907 writer_->AddCharacter(']'); | |
| 2908 } | |
| 2909 | |
| 2910 | |
| 2911 // 0-based position is converted to 1-based during the serialization. | |
| 2912 static int SerializePosition(int position, const Vector<char>& buffer, | |
| 2913 int buffer_pos) { | |
| 2914 if (position == -1) { | |
| 2915 buffer[buffer_pos++] = '0'; | |
| 2916 } else { | |
| 2917 ASSERT(position >= 0); | |
| 2918 buffer_pos = utoa(static_cast<unsigned>(position + 1), buffer, buffer_pos); | |
| 2919 } | |
| 2920 return buffer_pos; | |
| 2921 } | |
| 2922 | |
| 2923 | |
| 2924 void HeapSnapshotJSONSerializer::SerializeTraceNodeInfos() { | |
| 2925 AllocationTracker* tracker = snapshot_->collection()->allocation_tracker(); | |
| 2926 if (!tracker) return; | |
| 2927 // The buffer needs space for 6 unsigned ints, 6 commas, \n and \0 | |
| 2928 const int kBufferSize = | |
| 2929 6 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT | |
| 2930 + 6 + 1 + 1; | |
| 2931 EmbeddedVector<char, kBufferSize> buffer; | |
| 2932 HashMap* id_to_function_info = tracker->id_to_function_info(); | |
| 2933 bool first_entry = true; | |
| 2934 for (HashMap::Entry* p = id_to_function_info->Start(); | |
| 2935 p != NULL; | |
| 2936 p = id_to_function_info->Next(p)) { | |
| 2937 SnapshotObjectId id = | |
| 2938 static_cast<SnapshotObjectId>(reinterpret_cast<intptr_t>(p->key)); | |
| 2939 AllocationTracker::FunctionInfo* info = | |
| 2940 reinterpret_cast<AllocationTracker::FunctionInfo* >(p->value); | |
| 2941 int buffer_pos = 0; | |
| 2942 if (first_entry) { | |
| 2943 first_entry = false; | |
| 2944 } else { | |
| 2945 buffer[buffer_pos++] = ','; | |
| 2946 } | |
| 2947 buffer_pos = utoa(id, buffer, buffer_pos); | |
| 2948 buffer[buffer_pos++] = ','; | |
| 2949 buffer_pos = utoa(GetStringId(info->name), buffer, buffer_pos); | |
| 2950 buffer[buffer_pos++] = ','; | |
| 2951 buffer_pos = utoa(GetStringId(info->script_name), buffer, buffer_pos); | |
| 2952 buffer[buffer_pos++] = ','; | |
| 2953 // The cast is safe because script id is a non-negative Smi. | |
| 2954 buffer_pos = utoa(static_cast<unsigned>(info->script_id), buffer, | |
| 2955 buffer_pos); | |
| 2956 buffer[buffer_pos++] = ','; | |
| 2957 buffer_pos = SerializePosition(info->line, buffer, buffer_pos); | |
| 2958 buffer[buffer_pos++] = ','; | |
| 2959 buffer_pos = SerializePosition(info->column, buffer, buffer_pos); | |
| 2960 buffer[buffer_pos++] = '\n'; | |
| 2961 buffer[buffer_pos++] = '\0'; | |
| 2962 writer_->AddString(buffer.start()); | |
| 2963 } | |
| 2964 } | |
| 2965 | |
| 2966 | |
| 2967 void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) { | 2852 void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) { |
| 2968 writer_->AddCharacter('\n'); | 2853 writer_->AddCharacter('\n'); |
| 2969 writer_->AddCharacter('\"'); | 2854 writer_->AddCharacter('\"'); |
| 2970 for ( ; *s != '\0'; ++s) { | 2855 for ( ; *s != '\0'; ++s) { |
| 2971 switch (*s) { | 2856 switch (*s) { |
| 2972 case '\b': | 2857 case '\b': |
| 2973 writer_->AddString("\\b"); | 2858 writer_->AddString("\\b"); |
| 2974 continue; | 2859 continue; |
| 2975 case '\f': | 2860 case '\f': |
| 2976 writer_->AddString("\\f"); | 2861 writer_->AddString("\\f"); |
| (...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3026 writer_->AddString("\"<dummy>\""); | 2911 writer_->AddString("\"<dummy>\""); |
| 3027 for (int i = 1; i < sorted_strings.length(); ++i) { | 2912 for (int i = 1; i < sorted_strings.length(); ++i) { |
| 3028 writer_->AddCharacter(','); | 2913 writer_->AddCharacter(','); |
| 3029 SerializeString(sorted_strings[i]); | 2914 SerializeString(sorted_strings[i]); |
| 3030 if (writer_->aborted()) return; | 2915 if (writer_->aborted()) return; |
| 3031 } | 2916 } |
| 3032 } | 2917 } |
| 3033 | 2918 |
| 3034 | 2919 |
| 3035 } } // namespace v8::internal | 2920 } } // namespace v8::internal |
| OLD | NEW |