Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(154)

Side by Side Diff: src/profile-generator.cc

Issue 12314027: Split profile-generator (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 7 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 12 matching lines...) Expand all
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 27
28 #include "v8.h" 28 #include "v8.h"
29 29
30 #include "profile-generator-inl.h" 30 #include "profile-generator-inl.h"
31 31
32 #include "global-handles.h" 32 #include "global-handles.h"
33 #include "heap-profiler.h"
34 #include "scopeinfo.h" 33 #include "scopeinfo.h"
35 #include "unicode.h" 34 #include "unicode.h"
36 #include "zone-inl.h" 35 #include "zone-inl.h"
37 #include "debug.h" 36 #include "debug.h"
38 37
39 namespace v8 { 38 namespace v8 {
40 namespace internal { 39 namespace internal {
41 40
42 41
43 TokenEnumerator::TokenEnumerator() 42 TokenEnumerator::TokenEnumerator()
(...skipping 892 matching lines...) Expand 10 before | Expand all | Expand 10 after
936 // If no frames were symbolized, put the VM state entry in. 935 // If no frames were symbolized, put the VM state entry in.
937 if (no_symbolized_entries) { 936 if (no_symbolized_entries) {
938 *entry++ = EntryForVMState(sample.state); 937 *entry++ = EntryForVMState(sample.state);
939 } 938 }
940 } 939 }
941 940
942 profiles_->AddPathToCurrentProfiles(entries); 941 profiles_->AddPathToCurrentProfiles(entries);
943 } 942 }
944 943
945 944
946 HeapGraphEdge::HeapGraphEdge(Type type, const char* name, int from, int to)
947 : type_(type),
948 from_index_(from),
949 to_index_(to),
950 name_(name) {
951 ASSERT(type == kContextVariable
952 || type == kProperty
953 || type == kInternal
954 || type == kShortcut);
955 }
956
957
958 HeapGraphEdge::HeapGraphEdge(Type type, int index, int from, int to)
959 : type_(type),
960 from_index_(from),
961 to_index_(to),
962 index_(index) {
963 ASSERT(type == kElement || type == kHidden || type == kWeak);
964 }
965
966
967 void HeapGraphEdge::ReplaceToIndexWithEntry(HeapSnapshot* snapshot) {
968 to_entry_ = &snapshot->entries()[to_index_];
969 }
970
971
972 const int HeapEntry::kNoEntry = -1;
973
974 HeapEntry::HeapEntry(HeapSnapshot* snapshot,
975 Type type,
976 const char* name,
977 SnapshotObjectId id,
978 int self_size)
979 : type_(type),
980 children_count_(0),
981 children_index_(-1),
982 self_size_(self_size),
983 id_(id),
984 snapshot_(snapshot),
985 name_(name) { }
986
987
988 void HeapEntry::SetNamedReference(HeapGraphEdge::Type type,
989 const char* name,
990 HeapEntry* entry) {
991 HeapGraphEdge edge(type, name, this->index(), entry->index());
992 snapshot_->edges().Add(edge);
993 ++children_count_;
994 }
995
996
997 void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type,
998 int index,
999 HeapEntry* entry) {
1000 HeapGraphEdge edge(type, index, this->index(), entry->index());
1001 snapshot_->edges().Add(edge);
1002 ++children_count_;
1003 }
1004
1005
1006 Handle<HeapObject> HeapEntry::GetHeapObject() {
1007 return snapshot_->collection()->FindHeapObjectById(id());
1008 }
1009
1010
1011 void HeapEntry::Print(
1012 const char* prefix, const char* edge_name, int max_depth, int indent) {
1013 STATIC_CHECK(sizeof(unsigned) == sizeof(id()));
1014 OS::Print("%6d @%6u %*c %s%s: ",
1015 self_size(), id(), indent, ' ', prefix, edge_name);
1016 if (type() != kString) {
1017 OS::Print("%s %.40s\n", TypeAsString(), name_);
1018 } else {
1019 OS::Print("\"");
1020 const char* c = name_;
1021 while (*c && (c - name_) <= 40) {
1022 if (*c != '\n')
1023 OS::Print("%c", *c);
1024 else
1025 OS::Print("\\n");
1026 ++c;
1027 }
1028 OS::Print("\"\n");
1029 }
1030 if (--max_depth == 0) return;
1031 Vector<HeapGraphEdge*> ch = children();
1032 for (int i = 0; i < ch.length(); ++i) {
1033 HeapGraphEdge& edge = *ch[i];
1034 const char* edge_prefix = "";
1035 EmbeddedVector<char, 64> index;
1036 const char* edge_name = index.start();
1037 switch (edge.type()) {
1038 case HeapGraphEdge::kContextVariable:
1039 edge_prefix = "#";
1040 edge_name = edge.name();
1041 break;
1042 case HeapGraphEdge::kElement:
1043 OS::SNPrintF(index, "%d", edge.index());
1044 break;
1045 case HeapGraphEdge::kInternal:
1046 edge_prefix = "$";
1047 edge_name = edge.name();
1048 break;
1049 case HeapGraphEdge::kProperty:
1050 edge_name = edge.name();
1051 break;
1052 case HeapGraphEdge::kHidden:
1053 edge_prefix = "$";
1054 OS::SNPrintF(index, "%d", edge.index());
1055 break;
1056 case HeapGraphEdge::kShortcut:
1057 edge_prefix = "^";
1058 edge_name = edge.name();
1059 break;
1060 case HeapGraphEdge::kWeak:
1061 edge_prefix = "w";
1062 OS::SNPrintF(index, "%d", edge.index());
1063 break;
1064 default:
1065 OS::SNPrintF(index, "!!! unknown edge type: %d ", edge.type());
1066 }
1067 edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2);
1068 }
1069 }
1070
1071
1072 const char* HeapEntry::TypeAsString() {
1073 switch (type()) {
1074 case kHidden: return "/hidden/";
1075 case kObject: return "/object/";
1076 case kClosure: return "/closure/";
1077 case kString: return "/string/";
1078 case kCode: return "/code/";
1079 case kArray: return "/array/";
1080 case kRegExp: return "/regexp/";
1081 case kHeapNumber: return "/number/";
1082 case kNative: return "/native/";
1083 case kSynthetic: return "/synthetic/";
1084 default: return "???";
1085 }
1086 }
1087
1088
1089 // It is very important to keep objects that form a heap snapshot
1090 // as small as possible.
1091 namespace { // Avoid littering the global namespace.
1092
1093 template <size_t ptr_size> struct SnapshotSizeConstants;
1094
1095 template <> struct SnapshotSizeConstants<4> {
1096 static const int kExpectedHeapGraphEdgeSize = 12;
1097 static const int kExpectedHeapEntrySize = 24;
1098 static const int kExpectedHeapSnapshotsCollectionSize = 100;
1099 static const int kExpectedHeapSnapshotSize = 136;
1100 static const size_t kMaxSerializableSnapshotRawSize = 256 * MB;
1101 };
1102
1103 template <> struct SnapshotSizeConstants<8> {
1104 static const int kExpectedHeapGraphEdgeSize = 24;
1105 static const int kExpectedHeapEntrySize = 32;
1106 static const int kExpectedHeapSnapshotsCollectionSize = 152;
1107 static const int kExpectedHeapSnapshotSize = 168;
1108 static const uint64_t kMaxSerializableSnapshotRawSize =
1109 static_cast<uint64_t>(6000) * MB;
1110 };
1111
1112 } // namespace
1113
1114 HeapSnapshot::HeapSnapshot(HeapSnapshotsCollection* collection,
1115 HeapSnapshot::Type type,
1116 const char* title,
1117 unsigned uid)
1118 : collection_(collection),
1119 type_(type),
1120 title_(title),
1121 uid_(uid),
1122 root_index_(HeapEntry::kNoEntry),
1123 gc_roots_index_(HeapEntry::kNoEntry),
1124 natives_root_index_(HeapEntry::kNoEntry),
1125 max_snapshot_js_object_id_(0) {
1126 STATIC_CHECK(
1127 sizeof(HeapGraphEdge) ==
1128 SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize);
1129 STATIC_CHECK(
1130 sizeof(HeapEntry) ==
1131 SnapshotSizeConstants<kPointerSize>::kExpectedHeapEntrySize);
1132 for (int i = 0; i < VisitorSynchronization::kNumberOfSyncTags; ++i) {
1133 gc_subroot_indexes_[i] = HeapEntry::kNoEntry;
1134 }
1135 }
1136
1137
1138 void HeapSnapshot::Delete() {
1139 collection_->RemoveSnapshot(this);
1140 delete this;
1141 }
1142
1143
1144 void HeapSnapshot::RememberLastJSObjectId() {
1145 max_snapshot_js_object_id_ = collection_->last_assigned_id();
1146 }
1147
1148
1149 HeapEntry* HeapSnapshot::AddRootEntry() {
1150 ASSERT(root_index_ == HeapEntry::kNoEntry);
1151 ASSERT(entries_.is_empty()); // Root entry must be the first one.
1152 HeapEntry* entry = AddEntry(HeapEntry::kObject,
1153 "",
1154 HeapObjectsMap::kInternalRootObjectId,
1155 0);
1156 root_index_ = entry->index();
1157 ASSERT(root_index_ == 0);
1158 return entry;
1159 }
1160
1161
1162 HeapEntry* HeapSnapshot::AddGcRootsEntry() {
1163 ASSERT(gc_roots_index_ == HeapEntry::kNoEntry);
1164 HeapEntry* entry = AddEntry(HeapEntry::kObject,
1165 "(GC roots)",
1166 HeapObjectsMap::kGcRootsObjectId,
1167 0);
1168 gc_roots_index_ = entry->index();
1169 return entry;
1170 }
1171
1172
1173 HeapEntry* HeapSnapshot::AddGcSubrootEntry(int tag) {
1174 ASSERT(gc_subroot_indexes_[tag] == HeapEntry::kNoEntry);
1175 ASSERT(0 <= tag && tag < VisitorSynchronization::kNumberOfSyncTags);
1176 HeapEntry* entry = AddEntry(
1177 HeapEntry::kObject,
1178 VisitorSynchronization::kTagNames[tag],
1179 HeapObjectsMap::GetNthGcSubrootId(tag),
1180 0);
1181 gc_subroot_indexes_[tag] = entry->index();
1182 return entry;
1183 }
1184
1185
1186 HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type,
1187 const char* name,
1188 SnapshotObjectId id,
1189 int size) {
1190 HeapEntry entry(this, type, name, id, size);
1191 entries_.Add(entry);
1192 return &entries_.last();
1193 }
1194
1195
1196 void HeapSnapshot::FillChildren() {
1197 ASSERT(children().is_empty());
1198 children().Allocate(edges().length());
1199 int children_index = 0;
1200 for (int i = 0; i < entries().length(); ++i) {
1201 HeapEntry* entry = &entries()[i];
1202 children_index = entry->set_children_index(children_index);
1203 }
1204 ASSERT(edges().length() == children_index);
1205 for (int i = 0; i < edges().length(); ++i) {
1206 HeapGraphEdge* edge = &edges()[i];
1207 edge->ReplaceToIndexWithEntry(this);
1208 edge->from()->add_child(edge);
1209 }
1210 }
1211
1212
1213 class FindEntryById {
1214 public:
1215 explicit FindEntryById(SnapshotObjectId id) : id_(id) { }
1216 int operator()(HeapEntry* const* entry) {
1217 if ((*entry)->id() == id_) return 0;
1218 return (*entry)->id() < id_ ? -1 : 1;
1219 }
1220 private:
1221 SnapshotObjectId id_;
1222 };
1223
1224
1225 HeapEntry* HeapSnapshot::GetEntryById(SnapshotObjectId id) {
1226 List<HeapEntry*>* entries_by_id = GetSortedEntriesList();
1227 // Perform a binary search by id.
1228 int index = SortedListBSearch(*entries_by_id, FindEntryById(id));
1229 if (index == -1)
1230 return NULL;
1231 return entries_by_id->at(index);
1232 }
1233
1234
1235 template<class T>
1236 static int SortByIds(const T* entry1_ptr,
1237 const T* entry2_ptr) {
1238 if ((*entry1_ptr)->id() == (*entry2_ptr)->id()) return 0;
1239 return (*entry1_ptr)->id() < (*entry2_ptr)->id() ? -1 : 1;
1240 }
1241
1242
1243 List<HeapEntry*>* HeapSnapshot::GetSortedEntriesList() {
1244 if (sorted_entries_.is_empty()) {
1245 sorted_entries_.Allocate(entries_.length());
1246 for (int i = 0; i < entries_.length(); ++i) {
1247 sorted_entries_[i] = &entries_[i];
1248 }
1249 sorted_entries_.Sort(SortByIds);
1250 }
1251 return &sorted_entries_;
1252 }
1253
1254
1255 void HeapSnapshot::Print(int max_depth) {
1256 root()->Print("", "", max_depth, 0);
1257 }
1258
1259
1260 template<typename T, class P>
1261 static size_t GetMemoryUsedByList(const List<T, P>& list) {
1262 return list.length() * sizeof(T) + sizeof(list);
1263 }
1264
1265
1266 size_t HeapSnapshot::RawSnapshotSize() const {
1267 STATIC_CHECK(SnapshotSizeConstants<kPointerSize>::kExpectedHeapSnapshotSize ==
1268 sizeof(HeapSnapshot)); // NOLINT
1269 return
1270 sizeof(*this) +
1271 GetMemoryUsedByList(entries_) +
1272 GetMemoryUsedByList(edges_) +
1273 GetMemoryUsedByList(children_) +
1274 GetMemoryUsedByList(sorted_entries_);
1275 }
1276
1277
1278 // We split IDs on evens for embedder objects (see
1279 // HeapObjectsMap::GenerateId) and odds for native objects.
1280 const SnapshotObjectId HeapObjectsMap::kInternalRootObjectId = 1;
1281 const SnapshotObjectId HeapObjectsMap::kGcRootsObjectId =
1282 HeapObjectsMap::kInternalRootObjectId + HeapObjectsMap::kObjectIdStep;
1283 const SnapshotObjectId HeapObjectsMap::kGcRootsFirstSubrootId =
1284 HeapObjectsMap::kGcRootsObjectId + HeapObjectsMap::kObjectIdStep;
1285 const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId =
1286 HeapObjectsMap::kGcRootsFirstSubrootId +
1287 VisitorSynchronization::kNumberOfSyncTags * HeapObjectsMap::kObjectIdStep;
1288
1289 HeapObjectsMap::HeapObjectsMap(Heap* heap)
1290 : next_id_(kFirstAvailableObjectId),
1291 entries_map_(AddressesMatch),
1292 heap_(heap) {
1293 // This dummy element solves a problem with entries_map_.
1294 // When we do lookup in HashMap we see no difference between two cases:
1295 // it has an entry with NULL as the value or it has created
1296 // a new entry on the fly with NULL as the default value.
1297 // With such dummy element we have a guaranty that all entries_map_ entries
1298 // will have the value field grater than 0.
1299 // This fact is using in MoveObject method.
1300 entries_.Add(EntryInfo(0, NULL, 0));
1301 }
1302
1303
1304 void HeapObjectsMap::SnapshotGenerationFinished() {
1305 RemoveDeadEntries();
1306 }
1307
1308
1309 void HeapObjectsMap::MoveObject(Address from, Address to) {
1310 ASSERT(to != NULL);
1311 ASSERT(from != NULL);
1312 if (from == to) return;
1313 void* from_value = entries_map_.Remove(from, AddressHash(from));
1314 if (from_value == NULL) return;
1315 int from_entry_info_index =
1316 static_cast<int>(reinterpret_cast<intptr_t>(from_value));
1317 entries_.at(from_entry_info_index).addr = to;
1318 HashMap::Entry* to_entry = entries_map_.Lookup(to, AddressHash(to), true);
1319 if (to_entry->value != NULL) {
1320 int to_entry_info_index =
1321 static_cast<int>(reinterpret_cast<intptr_t>(to_entry->value));
1322 // Without this operation we will have two EntryInfo's with the same
1323 // value in addr field. It is bad because later at RemoveDeadEntries
1324 // one of this entry will be removed with the corresponding entries_map_
1325 // entry.
1326 entries_.at(to_entry_info_index).addr = NULL;
1327 }
1328 to_entry->value = reinterpret_cast<void*>(from_entry_info_index);
1329 }
1330
1331
1332 SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) {
1333 HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), false);
1334 if (entry == NULL) return 0;
1335 int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
1336 EntryInfo& entry_info = entries_.at(entry_index);
1337 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
1338 return entry_info.id;
1339 }
1340
1341
1342 SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr,
1343 unsigned int size) {
1344 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
1345 HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), true);
1346 if (entry->value != NULL) {
1347 int entry_index =
1348 static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
1349 EntryInfo& entry_info = entries_.at(entry_index);
1350 entry_info.accessed = true;
1351 entry_info.size = size;
1352 return entry_info.id;
1353 }
1354 entry->value = reinterpret_cast<void*>(entries_.length());
1355 SnapshotObjectId id = next_id_;
1356 next_id_ += kObjectIdStep;
1357 entries_.Add(EntryInfo(id, addr, size));
1358 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
1359 return id;
1360 }
1361
1362
1363 void HeapObjectsMap::StopHeapObjectsTracking() {
1364 time_intervals_.Clear();
1365 }
1366
1367 void HeapObjectsMap::UpdateHeapObjectsMap() {
1368 HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask,
1369 "HeapSnapshotsCollection::UpdateHeapObjectsMap");
1370 HeapIterator iterator(heap_);
1371 for (HeapObject* obj = iterator.next();
1372 obj != NULL;
1373 obj = iterator.next()) {
1374 FindOrAddEntry(obj->address(), obj->Size());
1375 }
1376 RemoveDeadEntries();
1377 }
1378
1379
1380 SnapshotObjectId HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream) {
1381 UpdateHeapObjectsMap();
1382 time_intervals_.Add(TimeInterval(next_id_));
1383 int prefered_chunk_size = stream->GetChunkSize();
1384 List<v8::HeapStatsUpdate> stats_buffer;
1385 ASSERT(!entries_.is_empty());
1386 EntryInfo* entry_info = &entries_.first();
1387 EntryInfo* end_entry_info = &entries_.last() + 1;
1388 for (int time_interval_index = 0;
1389 time_interval_index < time_intervals_.length();
1390 ++time_interval_index) {
1391 TimeInterval& time_interval = time_intervals_[time_interval_index];
1392 SnapshotObjectId time_interval_id = time_interval.id;
1393 uint32_t entries_size = 0;
1394 EntryInfo* start_entry_info = entry_info;
1395 while (entry_info < end_entry_info && entry_info->id < time_interval_id) {
1396 entries_size += entry_info->size;
1397 ++entry_info;
1398 }
1399 uint32_t entries_count =
1400 static_cast<uint32_t>(entry_info - start_entry_info);
1401 if (time_interval.count != entries_count ||
1402 time_interval.size != entries_size) {
1403 stats_buffer.Add(v8::HeapStatsUpdate(
1404 time_interval_index,
1405 time_interval.count = entries_count,
1406 time_interval.size = entries_size));
1407 if (stats_buffer.length() >= prefered_chunk_size) {
1408 OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
1409 &stats_buffer.first(), stats_buffer.length());
1410 if (result == OutputStream::kAbort) return last_assigned_id();
1411 stats_buffer.Clear();
1412 }
1413 }
1414 }
1415 ASSERT(entry_info == end_entry_info);
1416 if (!stats_buffer.is_empty()) {
1417 OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
1418 &stats_buffer.first(), stats_buffer.length());
1419 if (result == OutputStream::kAbort) return last_assigned_id();
1420 }
1421 stream->EndOfStream();
1422 return last_assigned_id();
1423 }
1424
1425
1426 void HeapObjectsMap::RemoveDeadEntries() {
1427 ASSERT(entries_.length() > 0 &&
1428 entries_.at(0).id == 0 &&
1429 entries_.at(0).addr == NULL);
1430 int first_free_entry = 1;
1431 for (int i = 1; i < entries_.length(); ++i) {
1432 EntryInfo& entry_info = entries_.at(i);
1433 if (entry_info.accessed) {
1434 if (first_free_entry != i) {
1435 entries_.at(first_free_entry) = entry_info;
1436 }
1437 entries_.at(first_free_entry).accessed = false;
1438 HashMap::Entry* entry = entries_map_.Lookup(
1439 entry_info.addr, AddressHash(entry_info.addr), false);
1440 ASSERT(entry);
1441 entry->value = reinterpret_cast<void*>(first_free_entry);
1442 ++first_free_entry;
1443 } else {
1444 if (entry_info.addr) {
1445 entries_map_.Remove(entry_info.addr, AddressHash(entry_info.addr));
1446 }
1447 }
1448 }
1449 entries_.Rewind(first_free_entry);
1450 ASSERT(static_cast<uint32_t>(entries_.length()) - 1 ==
1451 entries_map_.occupancy());
1452 }
1453
1454
1455 SnapshotObjectId HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) {
1456 SnapshotObjectId id = static_cast<SnapshotObjectId>(info->GetHash());
1457 const char* label = info->GetLabel();
1458 id ^= StringHasher::HashSequentialString(label,
1459 static_cast<int>(strlen(label)),
1460 HEAP->HashSeed());
1461 intptr_t element_count = info->GetElementCount();
1462 if (element_count != -1)
1463 id ^= ComputeIntegerHash(static_cast<uint32_t>(element_count),
1464 v8::internal::kZeroHashSeed);
1465 return id << 1;
1466 }
1467
1468
1469 size_t HeapObjectsMap::GetUsedMemorySize() const {
1470 return
1471 sizeof(*this) +
1472 sizeof(HashMap::Entry) * entries_map_.capacity() +
1473 GetMemoryUsedByList(entries_) +
1474 GetMemoryUsedByList(time_intervals_);
1475 }
1476
1477
1478 HeapSnapshotsCollection::HeapSnapshotsCollection(Heap* heap)
1479 : is_tracking_objects_(false),
1480 snapshots_uids_(HeapSnapshotsMatch),
1481 token_enumerator_(new TokenEnumerator()),
1482 ids_(heap) {
1483 }
1484
1485
1486 static void DeleteHeapSnapshot(HeapSnapshot** snapshot_ptr) {
1487 delete *snapshot_ptr;
1488 }
1489
1490
1491 HeapSnapshotsCollection::~HeapSnapshotsCollection() {
1492 delete token_enumerator_;
1493 snapshots_.Iterate(DeleteHeapSnapshot);
1494 }
1495
1496
1497 HeapSnapshot* HeapSnapshotsCollection::NewSnapshot(HeapSnapshot::Type type,
1498 const char* name,
1499 unsigned uid) {
1500 is_tracking_objects_ = true; // Start watching for heap objects moves.
1501 return new HeapSnapshot(this, type, name, uid);
1502 }
1503
1504
1505 void HeapSnapshotsCollection::SnapshotGenerationFinished(
1506 HeapSnapshot* snapshot) {
1507 ids_.SnapshotGenerationFinished();
1508 if (snapshot != NULL) {
1509 snapshots_.Add(snapshot);
1510 HashMap::Entry* entry =
1511 snapshots_uids_.Lookup(reinterpret_cast<void*>(snapshot->uid()),
1512 static_cast<uint32_t>(snapshot->uid()),
1513 true);
1514 ASSERT(entry->value == NULL);
1515 entry->value = snapshot;
1516 }
1517 }
1518
1519
1520 HeapSnapshot* HeapSnapshotsCollection::GetSnapshot(unsigned uid) {
1521 HashMap::Entry* entry = snapshots_uids_.Lookup(reinterpret_cast<void*>(uid),
1522 static_cast<uint32_t>(uid),
1523 false);
1524 return entry != NULL ? reinterpret_cast<HeapSnapshot*>(entry->value) : NULL;
1525 }
1526
1527
1528 void HeapSnapshotsCollection::RemoveSnapshot(HeapSnapshot* snapshot) {
1529 snapshots_.RemoveElement(snapshot);
1530 unsigned uid = snapshot->uid();
1531 snapshots_uids_.Remove(reinterpret_cast<void*>(uid),
1532 static_cast<uint32_t>(uid));
1533 }
1534
1535
1536 Handle<HeapObject> HeapSnapshotsCollection::FindHeapObjectById(
1537 SnapshotObjectId id) {
1538 // First perform a full GC in order to avoid dead objects.
1539 HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask,
1540 "HeapSnapshotsCollection::FindHeapObjectById");
1541 AssertNoAllocation no_allocation;
1542 HeapObject* object = NULL;
1543 HeapIterator iterator(heap(), HeapIterator::kFilterUnreachable);
1544 // Make sure that object with the given id is still reachable.
1545 for (HeapObject* obj = iterator.next();
1546 obj != NULL;
1547 obj = iterator.next()) {
1548 if (ids_.FindEntry(obj->address()) == id) {
1549 ASSERT(object == NULL);
1550 object = obj;
1551 // Can't break -- kFilterUnreachable requires full heap traversal.
1552 }
1553 }
1554 return object != NULL ? Handle<HeapObject>(object) : Handle<HeapObject>();
1555 }
1556
1557
1558 size_t HeapSnapshotsCollection::GetUsedMemorySize() const {
1559 STATIC_CHECK(SnapshotSizeConstants<kPointerSize>::
1560 kExpectedHeapSnapshotsCollectionSize ==
1561 sizeof(HeapSnapshotsCollection)); // NOLINT
1562 size_t size = sizeof(*this);
1563 size += names_.GetUsedMemorySize();
1564 size += ids_.GetUsedMemorySize();
1565 size += sizeof(HashMap::Entry) * snapshots_uids_.capacity();
1566 size += GetMemoryUsedByList(snapshots_);
1567 for (int i = 0; i < snapshots_.length(); ++i) {
1568 size += snapshots_[i]->RawSnapshotSize();
1569 }
1570 return size;
1571 }
1572
1573
1574 HeapEntriesMap::HeapEntriesMap()
1575 : entries_(HeapThingsMatch) {
1576 }
1577
1578
1579 int HeapEntriesMap::Map(HeapThing thing) {
1580 HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), false);
1581 if (cache_entry == NULL) return HeapEntry::kNoEntry;
1582 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
1583 }
1584
1585
1586 void HeapEntriesMap::Pair(HeapThing thing, int entry) {
1587 HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), true);
1588 ASSERT(cache_entry->value == NULL);
1589 cache_entry->value = reinterpret_cast<void*>(static_cast<intptr_t>(entry));
1590 }
1591
1592
1593 HeapObjectsSet::HeapObjectsSet()
1594 : entries_(HeapEntriesMap::HeapThingsMatch) {
1595 }
1596
1597
1598 void HeapObjectsSet::Clear() {
1599 entries_.Clear();
1600 }
1601
1602
1603 bool HeapObjectsSet::Contains(Object* obj) {
1604 if (!obj->IsHeapObject()) return false;
1605 HeapObject* object = HeapObject::cast(obj);
1606 return entries_.Lookup(object, HeapEntriesMap::Hash(object), false) != NULL;
1607 }
1608
1609
1610 void HeapObjectsSet::Insert(Object* obj) {
1611 if (!obj->IsHeapObject()) return;
1612 HeapObject* object = HeapObject::cast(obj);
1613 entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
1614 }
1615
1616
1617 const char* HeapObjectsSet::GetTag(Object* obj) {
1618 HeapObject* object = HeapObject::cast(obj);
1619 HashMap::Entry* cache_entry =
1620 entries_.Lookup(object, HeapEntriesMap::Hash(object), false);
1621 return cache_entry != NULL
1622 ? reinterpret_cast<const char*>(cache_entry->value)
1623 : NULL;
1624 }
1625
1626
1627 void HeapObjectsSet::SetTag(Object* obj, const char* tag) {
1628 if (!obj->IsHeapObject()) return;
1629 HeapObject* object = HeapObject::cast(obj);
1630 HashMap::Entry* cache_entry =
1631 entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
1632 cache_entry->value = const_cast<char*>(tag);
1633 }
1634
1635
1636 HeapObject* const V8HeapExplorer::kInternalRootObject =
1637 reinterpret_cast<HeapObject*>(
1638 static_cast<intptr_t>(HeapObjectsMap::kInternalRootObjectId));
1639 HeapObject* const V8HeapExplorer::kGcRootsObject =
1640 reinterpret_cast<HeapObject*>(
1641 static_cast<intptr_t>(HeapObjectsMap::kGcRootsObjectId));
1642 HeapObject* const V8HeapExplorer::kFirstGcSubrootObject =
1643 reinterpret_cast<HeapObject*>(
1644 static_cast<intptr_t>(HeapObjectsMap::kGcRootsFirstSubrootId));
1645 HeapObject* const V8HeapExplorer::kLastGcSubrootObject =
1646 reinterpret_cast<HeapObject*>(
1647 static_cast<intptr_t>(HeapObjectsMap::kFirstAvailableObjectId));
1648
1649
1650 V8HeapExplorer::V8HeapExplorer(
1651 HeapSnapshot* snapshot,
1652 SnapshottingProgressReportingInterface* progress,
1653 v8::HeapProfiler::ObjectNameResolver* resolver)
1654 : heap_(Isolate::Current()->heap()),
1655 snapshot_(snapshot),
1656 collection_(snapshot_->collection()),
1657 progress_(progress),
1658 filler_(NULL),
1659 global_object_name_resolver_(resolver) {
1660 }
1661
1662
1663 V8HeapExplorer::~V8HeapExplorer() {
1664 }
1665
1666
1667 HeapEntry* V8HeapExplorer::AllocateEntry(HeapThing ptr) {
1668 return AddEntry(reinterpret_cast<HeapObject*>(ptr));
1669 }
1670
1671
1672 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object) {
1673 if (object == kInternalRootObject) {
1674 snapshot_->AddRootEntry();
1675 return snapshot_->root();
1676 } else if (object == kGcRootsObject) {
1677 HeapEntry* entry = snapshot_->AddGcRootsEntry();
1678 return entry;
1679 } else if (object >= kFirstGcSubrootObject && object < kLastGcSubrootObject) {
1680 HeapEntry* entry = snapshot_->AddGcSubrootEntry(GetGcSubrootOrder(object));
1681 return entry;
1682 } else if (object->IsJSFunction()) {
1683 JSFunction* func = JSFunction::cast(object);
1684 SharedFunctionInfo* shared = func->shared();
1685 const char* name = shared->bound() ? "native_bind" :
1686 collection_->names()->GetName(String::cast(shared->name()));
1687 return AddEntry(object, HeapEntry::kClosure, name);
1688 } else if (object->IsJSRegExp()) {
1689 JSRegExp* re = JSRegExp::cast(object);
1690 return AddEntry(object,
1691 HeapEntry::kRegExp,
1692 collection_->names()->GetName(re->Pattern()));
1693 } else if (object->IsJSObject()) {
1694 const char* name = collection_->names()->GetName(
1695 GetConstructorName(JSObject::cast(object)));
1696 if (object->IsJSGlobalObject()) {
1697 const char* tag = objects_tags_.GetTag(object);
1698 if (tag != NULL) {
1699 name = collection_->names()->GetFormatted("%s / %s", name, tag);
1700 }
1701 }
1702 return AddEntry(object, HeapEntry::kObject, name);
1703 } else if (object->IsString()) {
1704 return AddEntry(object,
1705 HeapEntry::kString,
1706 collection_->names()->GetName(String::cast(object)));
1707 } else if (object->IsCode()) {
1708 return AddEntry(object, HeapEntry::kCode, "");
1709 } else if (object->IsSharedFunctionInfo()) {
1710 String* name = String::cast(SharedFunctionInfo::cast(object)->name());
1711 return AddEntry(object,
1712 HeapEntry::kCode,
1713 collection_->names()->GetName(name));
1714 } else if (object->IsScript()) {
1715 Object* name = Script::cast(object)->name();
1716 return AddEntry(object,
1717 HeapEntry::kCode,
1718 name->IsString()
1719 ? collection_->names()->GetName(String::cast(name))
1720 : "");
1721 } else if (object->IsNativeContext()) {
1722 return AddEntry(object, HeapEntry::kHidden, "system / NativeContext");
1723 } else if (object->IsContext()) {
1724 return AddEntry(object, HeapEntry::kHidden, "system / Context");
1725 } else if (object->IsFixedArray() ||
1726 object->IsFixedDoubleArray() ||
1727 object->IsByteArray() ||
1728 object->IsExternalArray()) {
1729 return AddEntry(object, HeapEntry::kArray, "");
1730 } else if (object->IsHeapNumber()) {
1731 return AddEntry(object, HeapEntry::kHeapNumber, "number");
1732 }
1733 return AddEntry(object, HeapEntry::kHidden, GetSystemEntryName(object));
1734 }
1735
1736
1737 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
1738 HeapEntry::Type type,
1739 const char* name) {
1740 int object_size = object->Size();
1741 SnapshotObjectId object_id =
1742 collection_->GetObjectId(object->address(), object_size);
1743 return snapshot_->AddEntry(type, name, object_id, object_size);
1744 }
1745
1746
1747 class GcSubrootsEnumerator : public ObjectVisitor {
1748 public:
1749 GcSubrootsEnumerator(
1750 SnapshotFillerInterface* filler, V8HeapExplorer* explorer)
1751 : filler_(filler),
1752 explorer_(explorer),
1753 previous_object_count_(0),
1754 object_count_(0) {
1755 }
1756 void VisitPointers(Object** start, Object** end) {
1757 object_count_ += end - start;
1758 }
1759 void Synchronize(VisitorSynchronization::SyncTag tag) {
1760 // Skip empty subroots.
1761 if (previous_object_count_ != object_count_) {
1762 previous_object_count_ = object_count_;
1763 filler_->AddEntry(V8HeapExplorer::GetNthGcSubrootObject(tag), explorer_);
1764 }
1765 }
1766 private:
1767 SnapshotFillerInterface* filler_;
1768 V8HeapExplorer* explorer_;
1769 intptr_t previous_object_count_;
1770 intptr_t object_count_;
1771 };
1772
1773
1774 void V8HeapExplorer::AddRootEntries(SnapshotFillerInterface* filler) {
1775 filler->AddEntry(kInternalRootObject, this);
1776 filler->AddEntry(kGcRootsObject, this);
1777 GcSubrootsEnumerator enumerator(filler, this);
1778 heap_->IterateRoots(&enumerator, VISIT_ALL);
1779 }
1780
1781
1782 const char* V8HeapExplorer::GetSystemEntryName(HeapObject* object) {
1783 switch (object->map()->instance_type()) {
1784 case MAP_TYPE:
1785 switch (Map::cast(object)->instance_type()) {
1786 #define MAKE_STRING_MAP_CASE(instance_type, size, name, Name) \
1787 case instance_type: return "system / Map (" #Name ")";
1788 STRING_TYPE_LIST(MAKE_STRING_MAP_CASE)
1789 #undef MAKE_STRING_MAP_CASE
1790 default: return "system / Map";
1791 }
1792 case JS_GLOBAL_PROPERTY_CELL_TYPE: return "system / JSGlobalPropertyCell";
1793 case FOREIGN_TYPE: return "system / Foreign";
1794 case ODDBALL_TYPE: return "system / Oddball";
1795 #define MAKE_STRUCT_CASE(NAME, Name, name) \
1796 case NAME##_TYPE: return "system / "#Name;
1797 STRUCT_LIST(MAKE_STRUCT_CASE)
1798 #undef MAKE_STRUCT_CASE
1799 default: return "system";
1800 }
1801 }
1802
1803
1804 int V8HeapExplorer::EstimateObjectsCount(HeapIterator* iterator) {
1805 int objects_count = 0;
1806 for (HeapObject* obj = iterator->next();
1807 obj != NULL;
1808 obj = iterator->next()) {
1809 objects_count++;
1810 }
1811 return objects_count;
1812 }
1813
1814
1815 class IndexedReferencesExtractor : public ObjectVisitor {
1816 public:
1817 IndexedReferencesExtractor(V8HeapExplorer* generator,
1818 HeapObject* parent_obj,
1819 int parent)
1820 : generator_(generator),
1821 parent_obj_(parent_obj),
1822 parent_(parent),
1823 next_index_(1) {
1824 }
1825 void VisitPointers(Object** start, Object** end) {
1826 for (Object** p = start; p < end; p++) {
1827 if (CheckVisitedAndUnmark(p)) continue;
1828 generator_->SetHiddenReference(parent_obj_, parent_, next_index_++, *p);
1829 }
1830 }
1831 static void MarkVisitedField(HeapObject* obj, int offset) {
1832 if (offset < 0) return;
1833 Address field = obj->address() + offset;
1834 ASSERT(!Memory::Object_at(field)->IsFailure());
1835 ASSERT(Memory::Object_at(field)->IsHeapObject());
1836 *field |= kFailureTag;
1837 }
1838
1839 private:
1840 bool CheckVisitedAndUnmark(Object** field) {
1841 if ((*field)->IsFailure()) {
1842 intptr_t untagged = reinterpret_cast<intptr_t>(*field) & ~kFailureTagMask;
1843 *field = reinterpret_cast<Object*>(untagged | kHeapObjectTag);
1844 ASSERT((*field)->IsHeapObject());
1845 return true;
1846 }
1847 return false;
1848 }
1849 V8HeapExplorer* generator_;
1850 HeapObject* parent_obj_;
1851 int parent_;
1852 int next_index_;
1853 };
1854
1855
1856 void V8HeapExplorer::ExtractReferences(HeapObject* obj) {
1857 HeapEntry* heap_entry = GetEntry(obj);
1858 if (heap_entry == NULL) return; // No interest in this object.
1859 int entry = heap_entry->index();
1860
1861 bool extract_indexed_refs = true;
1862 if (obj->IsJSGlobalProxy()) {
1863 ExtractJSGlobalProxyReferences(JSGlobalProxy::cast(obj));
1864 } else if (obj->IsJSObject()) {
1865 ExtractJSObjectReferences(entry, JSObject::cast(obj));
1866 } else if (obj->IsString()) {
1867 ExtractStringReferences(entry, String::cast(obj));
1868 } else if (obj->IsContext()) {
1869 ExtractContextReferences(entry, Context::cast(obj));
1870 } else if (obj->IsMap()) {
1871 ExtractMapReferences(entry, Map::cast(obj));
1872 } else if (obj->IsSharedFunctionInfo()) {
1873 ExtractSharedFunctionInfoReferences(entry, SharedFunctionInfo::cast(obj));
1874 } else if (obj->IsScript()) {
1875 ExtractScriptReferences(entry, Script::cast(obj));
1876 } else if (obj->IsCodeCache()) {
1877 ExtractCodeCacheReferences(entry, CodeCache::cast(obj));
1878 } else if (obj->IsCode()) {
1879 ExtractCodeReferences(entry, Code::cast(obj));
1880 } else if (obj->IsJSGlobalPropertyCell()) {
1881 ExtractJSGlobalPropertyCellReferences(
1882 entry, JSGlobalPropertyCell::cast(obj));
1883 extract_indexed_refs = false;
1884 }
1885 if (extract_indexed_refs) {
1886 SetInternalReference(obj, entry, "map", obj->map(), HeapObject::kMapOffset);
1887 IndexedReferencesExtractor refs_extractor(this, obj, entry);
1888 obj->Iterate(&refs_extractor);
1889 }
1890 }
1891
1892
1893 void V8HeapExplorer::ExtractJSGlobalProxyReferences(JSGlobalProxy* proxy) {
1894 // We need to reference JS global objects from snapshot's root.
1895 // We use JSGlobalProxy because this is what embedder (e.g. browser)
1896 // uses for the global object.
1897 Object* object = proxy->map()->prototype();
1898 bool is_debug_object = false;
1899 #ifdef ENABLE_DEBUGGER_SUPPORT
1900 is_debug_object = object->IsGlobalObject() &&
1901 Isolate::Current()->debug()->IsDebugGlobal(GlobalObject::cast(object));
1902 #endif
1903 if (!is_debug_object) {
1904 SetUserGlobalReference(object);
1905 }
1906 }
1907
1908
1909 void V8HeapExplorer::ExtractJSObjectReferences(
1910 int entry, JSObject* js_obj) {
1911 HeapObject* obj = js_obj;
1912 ExtractClosureReferences(js_obj, entry);
1913 ExtractPropertyReferences(js_obj, entry);
1914 ExtractElementReferences(js_obj, entry);
1915 ExtractInternalReferences(js_obj, entry);
1916 SetPropertyReference(
1917 obj, entry, heap_->Proto_symbol(), js_obj->GetPrototype());
1918 if (obj->IsJSFunction()) {
1919 JSFunction* js_fun = JSFunction::cast(js_obj);
1920 Object* proto_or_map = js_fun->prototype_or_initial_map();
1921 if (!proto_or_map->IsTheHole()) {
1922 if (!proto_or_map->IsMap()) {
1923 SetPropertyReference(
1924 obj, entry,
1925 heap_->prototype_symbol(), proto_or_map,
1926 NULL,
1927 JSFunction::kPrototypeOrInitialMapOffset);
1928 } else {
1929 SetPropertyReference(
1930 obj, entry,
1931 heap_->prototype_symbol(), js_fun->prototype());
1932 }
1933 }
1934 SharedFunctionInfo* shared_info = js_fun->shared();
1935 // JSFunction has either bindings or literals and never both.
1936 bool bound = shared_info->bound();
1937 TagObject(js_fun->literals_or_bindings(),
1938 bound ? "(function bindings)" : "(function literals)");
1939 SetInternalReference(js_fun, entry,
1940 bound ? "bindings" : "literals",
1941 js_fun->literals_or_bindings(),
1942 JSFunction::kLiteralsOffset);
1943 TagObject(shared_info, "(shared function info)");
1944 SetInternalReference(js_fun, entry,
1945 "shared", shared_info,
1946 JSFunction::kSharedFunctionInfoOffset);
1947 TagObject(js_fun->unchecked_context(), "(context)");
1948 SetInternalReference(js_fun, entry,
1949 "context", js_fun->unchecked_context(),
1950 JSFunction::kContextOffset);
1951 for (int i = JSFunction::kNonWeakFieldsEndOffset;
1952 i < JSFunction::kSize;
1953 i += kPointerSize) {
1954 SetWeakReference(js_fun, entry, i, *HeapObject::RawField(js_fun, i), i);
1955 }
1956 } else if (obj->IsGlobalObject()) {
1957 GlobalObject* global_obj = GlobalObject::cast(obj);
1958 SetInternalReference(global_obj, entry,
1959 "builtins", global_obj->builtins(),
1960 GlobalObject::kBuiltinsOffset);
1961 SetInternalReference(global_obj, entry,
1962 "native_context", global_obj->native_context(),
1963 GlobalObject::kNativeContextOffset);
1964 SetInternalReference(global_obj, entry,
1965 "global_receiver", global_obj->global_receiver(),
1966 GlobalObject::kGlobalReceiverOffset);
1967 }
1968 TagObject(js_obj->properties(), "(object properties)");
1969 SetInternalReference(obj, entry,
1970 "properties", js_obj->properties(),
1971 JSObject::kPropertiesOffset);
1972 TagObject(js_obj->elements(), "(object elements)");
1973 SetInternalReference(obj, entry,
1974 "elements", js_obj->elements(),
1975 JSObject::kElementsOffset);
1976 }
1977
1978
1979 void V8HeapExplorer::ExtractStringReferences(int entry, String* string) {
1980 if (string->IsConsString()) {
1981 ConsString* cs = ConsString::cast(string);
1982 SetInternalReference(cs, entry, "first", cs->first(),
1983 ConsString::kFirstOffset);
1984 SetInternalReference(cs, entry, "second", cs->second(),
1985 ConsString::kSecondOffset);
1986 } else if (string->IsSlicedString()) {
1987 SlicedString* ss = SlicedString::cast(string);
1988 SetInternalReference(ss, entry, "parent", ss->parent(),
1989 SlicedString::kParentOffset);
1990 }
1991 }
1992
1993
1994 void V8HeapExplorer::ExtractContextReferences(int entry, Context* context) {
1995 #define EXTRACT_CONTEXT_FIELD(index, type, name) \
1996 SetInternalReference(context, entry, #name, context->get(Context::index), \
1997 FixedArray::OffsetOfElementAt(Context::index));
1998 EXTRACT_CONTEXT_FIELD(CLOSURE_INDEX, JSFunction, closure);
1999 EXTRACT_CONTEXT_FIELD(PREVIOUS_INDEX, Context, previous);
2000 EXTRACT_CONTEXT_FIELD(EXTENSION_INDEX, Object, extension);
2001 EXTRACT_CONTEXT_FIELD(GLOBAL_OBJECT_INDEX, GlobalObject, global);
2002 if (context->IsNativeContext()) {
2003 TagObject(context->jsfunction_result_caches(),
2004 "(context func. result caches)");
2005 TagObject(context->normalized_map_cache(), "(context norm. map cache)");
2006 TagObject(context->runtime_context(), "(runtime context)");
2007 TagObject(context->embedder_data(), "(context data)");
2008 NATIVE_CONTEXT_FIELDS(EXTRACT_CONTEXT_FIELD);
2009 #undef EXTRACT_CONTEXT_FIELD
2010 for (int i = Context::FIRST_WEAK_SLOT;
2011 i < Context::NATIVE_CONTEXT_SLOTS;
2012 ++i) {
2013 SetWeakReference(context, entry, i, context->get(i),
2014 FixedArray::OffsetOfElementAt(i));
2015 }
2016 }
2017 }
2018
2019
2020 void V8HeapExplorer::ExtractMapReferences(int entry, Map* map) {
2021 SetInternalReference(map, entry,
2022 "prototype", map->prototype(), Map::kPrototypeOffset);
2023 SetInternalReference(map, entry,
2024 "constructor", map->constructor(),
2025 Map::kConstructorOffset);
2026 if (map->HasTransitionArray()) {
2027 TransitionArray* transitions = map->transitions();
2028
2029 Object* back_pointer = transitions->back_pointer_storage();
2030 TagObject(transitions->back_pointer_storage(), "(back pointer)");
2031 SetInternalReference(transitions, entry,
2032 "backpointer", back_pointer,
2033 TransitionArray::kBackPointerStorageOffset);
2034 IndexedReferencesExtractor transitions_refs(this, transitions, entry);
2035 transitions->Iterate(&transitions_refs);
2036
2037 TagObject(transitions, "(transition array)");
2038 SetInternalReference(map, entry,
2039 "transitions", transitions,
2040 Map::kTransitionsOrBackPointerOffset);
2041 } else {
2042 Object* back_pointer = map->GetBackPointer();
2043 TagObject(back_pointer, "(back pointer)");
2044 SetInternalReference(map, entry,
2045 "backpointer", back_pointer,
2046 Map::kTransitionsOrBackPointerOffset);
2047 }
2048 DescriptorArray* descriptors = map->instance_descriptors();
2049 TagObject(descriptors, "(map descriptors)");
2050 SetInternalReference(map, entry,
2051 "descriptors", descriptors,
2052 Map::kDescriptorsOffset);
2053
2054 SetInternalReference(map, entry,
2055 "code_cache", map->code_cache(),
2056 Map::kCodeCacheOffset);
2057 }
2058
2059
2060 void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
2061 int entry, SharedFunctionInfo* shared) {
2062 HeapObject* obj = shared;
2063 SetInternalReference(obj, entry,
2064 "name", shared->name(),
2065 SharedFunctionInfo::kNameOffset);
2066 TagObject(shared->code(), "(code)");
2067 SetInternalReference(obj, entry,
2068 "code", shared->code(),
2069 SharedFunctionInfo::kCodeOffset);
2070 TagObject(shared->scope_info(), "(function scope info)");
2071 SetInternalReference(obj, entry,
2072 "scope_info", shared->scope_info(),
2073 SharedFunctionInfo::kScopeInfoOffset);
2074 SetInternalReference(obj, entry,
2075 "instance_class_name", shared->instance_class_name(),
2076 SharedFunctionInfo::kInstanceClassNameOffset);
2077 SetInternalReference(obj, entry,
2078 "script", shared->script(),
2079 SharedFunctionInfo::kScriptOffset);
2080 TagObject(shared->construct_stub(), "(code)");
2081 SetInternalReference(obj, entry,
2082 "construct_stub", shared->construct_stub(),
2083 SharedFunctionInfo::kConstructStubOffset);
2084 SetInternalReference(obj, entry,
2085 "function_data", shared->function_data(),
2086 SharedFunctionInfo::kFunctionDataOffset);
2087 SetInternalReference(obj, entry,
2088 "debug_info", shared->debug_info(),
2089 SharedFunctionInfo::kDebugInfoOffset);
2090 SetInternalReference(obj, entry,
2091 "inferred_name", shared->inferred_name(),
2092 SharedFunctionInfo::kInferredNameOffset);
2093 SetInternalReference(obj, entry,
2094 "this_property_assignments",
2095 shared->this_property_assignments(),
2096 SharedFunctionInfo::kThisPropertyAssignmentsOffset);
2097 SetWeakReference(obj, entry,
2098 1, shared->initial_map(),
2099 SharedFunctionInfo::kInitialMapOffset);
2100 }
2101
2102
2103 void V8HeapExplorer::ExtractScriptReferences(int entry, Script* script) {
2104 HeapObject* obj = script;
2105 SetInternalReference(obj, entry,
2106 "source", script->source(),
2107 Script::kSourceOffset);
2108 SetInternalReference(obj, entry,
2109 "name", script->name(),
2110 Script::kNameOffset);
2111 SetInternalReference(obj, entry,
2112 "data", script->data(),
2113 Script::kDataOffset);
2114 SetInternalReference(obj, entry,
2115 "context_data", script->context_data(),
2116 Script::kContextOffset);
2117 TagObject(script->line_ends(), "(script line ends)");
2118 SetInternalReference(obj, entry,
2119 "line_ends", script->line_ends(),
2120 Script::kLineEndsOffset);
2121 }
2122
2123
2124 void V8HeapExplorer::ExtractCodeCacheReferences(
2125 int entry, CodeCache* code_cache) {
2126 TagObject(code_cache->default_cache(), "(default code cache)");
2127 SetInternalReference(code_cache, entry,
2128 "default_cache", code_cache->default_cache(),
2129 CodeCache::kDefaultCacheOffset);
2130 TagObject(code_cache->normal_type_cache(), "(code type cache)");
2131 SetInternalReference(code_cache, entry,
2132 "type_cache", code_cache->normal_type_cache(),
2133 CodeCache::kNormalTypeCacheOffset);
2134 }
2135
2136
2137 void V8HeapExplorer::ExtractCodeReferences(int entry, Code* code) {
2138 TagObject(code->relocation_info(), "(code relocation info)");
2139 SetInternalReference(code, entry,
2140 "relocation_info", code->relocation_info(),
2141 Code::kRelocationInfoOffset);
2142 SetInternalReference(code, entry,
2143 "handler_table", code->handler_table(),
2144 Code::kHandlerTableOffset);
2145 TagObject(code->deoptimization_data(), "(code deopt data)");
2146 SetInternalReference(code, entry,
2147 "deoptimization_data", code->deoptimization_data(),
2148 Code::kDeoptimizationDataOffset);
2149 if (code->kind() == Code::FUNCTION) {
2150 SetInternalReference(code, entry,
2151 "type_feedback_info", code->type_feedback_info(),
2152 Code::kTypeFeedbackInfoOffset);
2153 }
2154 SetInternalReference(code, entry,
2155 "gc_metadata", code->gc_metadata(),
2156 Code::kGCMetadataOffset);
2157 }
2158
2159
2160 void V8HeapExplorer::ExtractJSGlobalPropertyCellReferences(
2161 int entry, JSGlobalPropertyCell* cell) {
2162 SetInternalReference(cell, entry, "value", cell->value());
2163 }
2164
2165
2166 void V8HeapExplorer::ExtractClosureReferences(JSObject* js_obj, int entry) {
2167 if (!js_obj->IsJSFunction()) return;
2168
2169 JSFunction* func = JSFunction::cast(js_obj);
2170 if (func->shared()->bound()) {
2171 FixedArray* bindings = func->function_bindings();
2172 SetNativeBindReference(js_obj, entry, "bound_this",
2173 bindings->get(JSFunction::kBoundThisIndex));
2174 SetNativeBindReference(js_obj, entry, "bound_function",
2175 bindings->get(JSFunction::kBoundFunctionIndex));
2176 for (int i = JSFunction::kBoundArgumentsStartIndex;
2177 i < bindings->length(); i++) {
2178 const char* reference_name = collection_->names()->GetFormatted(
2179 "bound_argument_%d",
2180 i - JSFunction::kBoundArgumentsStartIndex);
2181 SetNativeBindReference(js_obj, entry, reference_name,
2182 bindings->get(i));
2183 }
2184 } else {
2185 Context* context = func->context()->declaration_context();
2186 ScopeInfo* scope_info = context->closure()->shared()->scope_info();
2187 // Add context allocated locals.
2188 int context_locals = scope_info->ContextLocalCount();
2189 for (int i = 0; i < context_locals; ++i) {
2190 String* local_name = scope_info->ContextLocalName(i);
2191 int idx = Context::MIN_CONTEXT_SLOTS + i;
2192 SetClosureReference(js_obj, entry, local_name, context->get(idx));
2193 }
2194
2195 // Add function variable.
2196 if (scope_info->HasFunctionName()) {
2197 String* name = scope_info->FunctionName();
2198 VariableMode mode;
2199 int idx = scope_info->FunctionContextSlotIndex(name, &mode);
2200 if (idx >= 0) {
2201 SetClosureReference(js_obj, entry, name, context->get(idx));
2202 }
2203 }
2204 }
2205 }
2206
2207
2208 void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) {
2209 if (js_obj->HasFastProperties()) {
2210 DescriptorArray* descs = js_obj->map()->instance_descriptors();
2211 int real_size = js_obj->map()->NumberOfOwnDescriptors();
2212 for (int i = 0; i < descs->number_of_descriptors(); i++) {
2213 if (descs->GetDetails(i).descriptor_index() > real_size) continue;
2214 switch (descs->GetType(i)) {
2215 case FIELD: {
2216 int index = descs->GetFieldIndex(i);
2217
2218 String* k = descs->GetKey(i);
2219 if (index < js_obj->map()->inobject_properties()) {
2220 Object* value = js_obj->InObjectPropertyAt(index);
2221 if (k != heap_->hidden_symbol()) {
2222 SetPropertyReference(
2223 js_obj, entry,
2224 k, value,
2225 NULL,
2226 js_obj->GetInObjectPropertyOffset(index));
2227 } else {
2228 TagObject(value, "(hidden properties)");
2229 SetInternalReference(
2230 js_obj, entry,
2231 "hidden_properties", value,
2232 js_obj->GetInObjectPropertyOffset(index));
2233 }
2234 } else {
2235 Object* value = js_obj->FastPropertyAt(index);
2236 if (k != heap_->hidden_symbol()) {
2237 SetPropertyReference(js_obj, entry, k, value);
2238 } else {
2239 TagObject(value, "(hidden properties)");
2240 SetInternalReference(js_obj, entry, "hidden_properties", value);
2241 }
2242 }
2243 break;
2244 }
2245 case CONSTANT_FUNCTION:
2246 SetPropertyReference(
2247 js_obj, entry,
2248 descs->GetKey(i), descs->GetConstantFunction(i));
2249 break;
2250 case CALLBACKS: {
2251 Object* callback_obj = descs->GetValue(i);
2252 if (callback_obj->IsAccessorPair()) {
2253 AccessorPair* accessors = AccessorPair::cast(callback_obj);
2254 if (Object* getter = accessors->getter()) {
2255 SetPropertyReference(js_obj, entry, descs->GetKey(i),
2256 getter, "get-%s");
2257 }
2258 if (Object* setter = accessors->setter()) {
2259 SetPropertyReference(js_obj, entry, descs->GetKey(i),
2260 setter, "set-%s");
2261 }
2262 }
2263 break;
2264 }
2265 case NORMAL: // only in slow mode
2266 case HANDLER: // only in lookup results, not in descriptors
2267 case INTERCEPTOR: // only in lookup results, not in descriptors
2268 break;
2269 case TRANSITION:
2270 case NONEXISTENT:
2271 UNREACHABLE();
2272 break;
2273 }
2274 }
2275 } else {
2276 StringDictionary* dictionary = js_obj->property_dictionary();
2277 int length = dictionary->Capacity();
2278 for (int i = 0; i < length; ++i) {
2279 Object* k = dictionary->KeyAt(i);
2280 if (dictionary->IsKey(k)) {
2281 Object* target = dictionary->ValueAt(i);
2282 // We assume that global objects can only have slow properties.
2283 Object* value = target->IsJSGlobalPropertyCell()
2284 ? JSGlobalPropertyCell::cast(target)->value()
2285 : target;
2286 if (k != heap_->hidden_symbol()) {
2287 SetPropertyReference(js_obj, entry, String::cast(k), value);
2288 } else {
2289 TagObject(value, "(hidden properties)");
2290 SetInternalReference(js_obj, entry, "hidden_properties", value);
2291 }
2292 }
2293 }
2294 }
2295 }
2296
2297
2298 void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj, int entry) {
2299 if (js_obj->HasFastObjectElements()) {
2300 FixedArray* elements = FixedArray::cast(js_obj->elements());
2301 int length = js_obj->IsJSArray() ?
2302 Smi::cast(JSArray::cast(js_obj)->length())->value() :
2303 elements->length();
2304 for (int i = 0; i < length; ++i) {
2305 if (!elements->get(i)->IsTheHole()) {
2306 SetElementReference(js_obj, entry, i, elements->get(i));
2307 }
2308 }
2309 } else if (js_obj->HasDictionaryElements()) {
2310 SeededNumberDictionary* dictionary = js_obj->element_dictionary();
2311 int length = dictionary->Capacity();
2312 for (int i = 0; i < length; ++i) {
2313 Object* k = dictionary->KeyAt(i);
2314 if (dictionary->IsKey(k)) {
2315 ASSERT(k->IsNumber());
2316 uint32_t index = static_cast<uint32_t>(k->Number());
2317 SetElementReference(js_obj, entry, index, dictionary->ValueAt(i));
2318 }
2319 }
2320 }
2321 }
2322
2323
2324 void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj, int entry) {
2325 int length = js_obj->GetInternalFieldCount();
2326 for (int i = 0; i < length; ++i) {
2327 Object* o = js_obj->GetInternalField(i);
2328 SetInternalReference(
2329 js_obj, entry, i, o, js_obj->GetInternalFieldOffset(i));
2330 }
2331 }
2332
2333
2334 String* V8HeapExplorer::GetConstructorName(JSObject* object) {
2335 Heap* heap = object->GetHeap();
2336 if (object->IsJSFunction()) return heap->closure_symbol();
2337 String* constructor_name = object->constructor_name();
2338 if (constructor_name == heap->Object_symbol()) {
2339 // Look up an immediate "constructor" property, if it is a function,
2340 // return its name. This is for instances of binding objects, which
2341 // have prototype constructor type "Object".
2342 Object* constructor_prop = NULL;
2343 LookupResult result(heap->isolate());
2344 object->LocalLookupRealNamedProperty(heap->constructor_symbol(), &result);
2345 if (!result.IsFound()) return object->constructor_name();
2346
2347 constructor_prop = result.GetLazyValue();
2348 if (constructor_prop->IsJSFunction()) {
2349 Object* maybe_name =
2350 JSFunction::cast(constructor_prop)->shared()->name();
2351 if (maybe_name->IsString()) {
2352 String* name = String::cast(maybe_name);
2353 if (name->length() > 0) return name;
2354 }
2355 }
2356 }
2357 return object->constructor_name();
2358 }
2359
2360
2361 HeapEntry* V8HeapExplorer::GetEntry(Object* obj) {
2362 if (!obj->IsHeapObject()) return NULL;
2363 return filler_->FindOrAddEntry(obj, this);
2364 }
2365
2366
2367 class RootsReferencesExtractor : public ObjectVisitor {
2368 private:
2369 struct IndexTag {
2370 IndexTag(int index, VisitorSynchronization::SyncTag tag)
2371 : index(index), tag(tag) { }
2372 int index;
2373 VisitorSynchronization::SyncTag tag;
2374 };
2375
2376 public:
2377 RootsReferencesExtractor()
2378 : collecting_all_references_(false),
2379 previous_reference_count_(0) {
2380 }
2381
2382 void VisitPointers(Object** start, Object** end) {
2383 if (collecting_all_references_) {
2384 for (Object** p = start; p < end; p++) all_references_.Add(*p);
2385 } else {
2386 for (Object** p = start; p < end; p++) strong_references_.Add(*p);
2387 }
2388 }
2389
2390 void SetCollectingAllReferences() { collecting_all_references_ = true; }
2391
2392 void FillReferences(V8HeapExplorer* explorer) {
2393 ASSERT(strong_references_.length() <= all_references_.length());
2394 for (int i = 0; i < reference_tags_.length(); ++i) {
2395 explorer->SetGcRootsReference(reference_tags_[i].tag);
2396 }
2397 int strong_index = 0, all_index = 0, tags_index = 0;
2398 while (all_index < all_references_.length()) {
2399 if (strong_index < strong_references_.length() &&
2400 strong_references_[strong_index] == all_references_[all_index]) {
2401 explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
2402 false,
2403 all_references_[all_index++]);
2404 ++strong_index;
2405 } else {
2406 explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
2407 true,
2408 all_references_[all_index++]);
2409 }
2410 if (reference_tags_[tags_index].index == all_index) ++tags_index;
2411 }
2412 }
2413
2414 void Synchronize(VisitorSynchronization::SyncTag tag) {
2415 if (collecting_all_references_ &&
2416 previous_reference_count_ != all_references_.length()) {
2417 previous_reference_count_ = all_references_.length();
2418 reference_tags_.Add(IndexTag(previous_reference_count_, tag));
2419 }
2420 }
2421
2422 private:
2423 bool collecting_all_references_;
2424 List<Object*> strong_references_;
2425 List<Object*> all_references_;
2426 int previous_reference_count_;
2427 List<IndexTag> reference_tags_;
2428 };
2429
2430
2431 bool V8HeapExplorer::IterateAndExtractReferences(
2432 SnapshotFillerInterface* filler) {
2433 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
2434
2435 filler_ = filler;
2436 bool interrupted = false;
2437
2438 // Heap iteration with filtering must be finished in any case.
2439 for (HeapObject* obj = iterator.next();
2440 obj != NULL;
2441 obj = iterator.next(), progress_->ProgressStep()) {
2442 if (!interrupted) {
2443 ExtractReferences(obj);
2444 if (!progress_->ProgressReport(false)) interrupted = true;
2445 }
2446 }
2447 if (interrupted) {
2448 filler_ = NULL;
2449 return false;
2450 }
2451
2452 SetRootGcRootsReference();
2453 RootsReferencesExtractor extractor;
2454 heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG);
2455 extractor.SetCollectingAllReferences();
2456 heap_->IterateRoots(&extractor, VISIT_ALL);
2457 extractor.FillReferences(this);
2458 filler_ = NULL;
2459 return progress_->ProgressReport(true);
2460 }
2461
2462
2463 bool V8HeapExplorer::IsEssentialObject(Object* object) {
2464 return object->IsHeapObject()
2465 && !object->IsOddball()
2466 && object != heap_->empty_byte_array()
2467 && object != heap_->empty_fixed_array()
2468 && object != heap_->empty_descriptor_array()
2469 && object != heap_->fixed_array_map()
2470 && object != heap_->global_property_cell_map()
2471 && object != heap_->shared_function_info_map()
2472 && object != heap_->free_space_map()
2473 && object != heap_->one_pointer_filler_map()
2474 && object != heap_->two_pointer_filler_map();
2475 }
2476
2477
2478 void V8HeapExplorer::SetClosureReference(HeapObject* parent_obj,
2479 int parent_entry,
2480 String* reference_name,
2481 Object* child_obj) {
2482 HeapEntry* child_entry = GetEntry(child_obj);
2483 if (child_entry != NULL) {
2484 filler_->SetNamedReference(HeapGraphEdge::kContextVariable,
2485 parent_entry,
2486 collection_->names()->GetName(reference_name),
2487 child_entry);
2488 }
2489 }
2490
2491
2492 void V8HeapExplorer::SetNativeBindReference(HeapObject* parent_obj,
2493 int parent_entry,
2494 const char* reference_name,
2495 Object* child_obj) {
2496 HeapEntry* child_entry = GetEntry(child_obj);
2497 if (child_entry != NULL) {
2498 filler_->SetNamedReference(HeapGraphEdge::kShortcut,
2499 parent_entry,
2500 reference_name,
2501 child_entry);
2502 }
2503 }
2504
2505
2506 void V8HeapExplorer::SetElementReference(HeapObject* parent_obj,
2507 int parent_entry,
2508 int index,
2509 Object* child_obj) {
2510 HeapEntry* child_entry = GetEntry(child_obj);
2511 if (child_entry != NULL) {
2512 filler_->SetIndexedReference(HeapGraphEdge::kElement,
2513 parent_entry,
2514 index,
2515 child_entry);
2516 }
2517 }
2518
2519
2520 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
2521 int parent_entry,
2522 const char* reference_name,
2523 Object* child_obj,
2524 int field_offset) {
2525 HeapEntry* child_entry = GetEntry(child_obj);
2526 if (child_entry == NULL) return;
2527 if (IsEssentialObject(child_obj)) {
2528 filler_->SetNamedReference(HeapGraphEdge::kInternal,
2529 parent_entry,
2530 reference_name,
2531 child_entry);
2532 }
2533 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2534 }
2535
2536
2537 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
2538 int parent_entry,
2539 int index,
2540 Object* child_obj,
2541 int field_offset) {
2542 HeapEntry* child_entry = GetEntry(child_obj);
2543 if (child_entry == NULL) return;
2544 if (IsEssentialObject(child_obj)) {
2545 filler_->SetNamedReference(HeapGraphEdge::kInternal,
2546 parent_entry,
2547 collection_->names()->GetName(index),
2548 child_entry);
2549 }
2550 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2551 }
2552
2553
2554 void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj,
2555 int parent_entry,
2556 int index,
2557 Object* child_obj) {
2558 HeapEntry* child_entry = GetEntry(child_obj);
2559 if (child_entry != NULL && IsEssentialObject(child_obj)) {
2560 filler_->SetIndexedReference(HeapGraphEdge::kHidden,
2561 parent_entry,
2562 index,
2563 child_entry);
2564 }
2565 }
2566
2567
2568 void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
2569 int parent_entry,
2570 int index,
2571 Object* child_obj,
2572 int field_offset) {
2573 HeapEntry* child_entry = GetEntry(child_obj);
2574 if (child_entry != NULL) {
2575 filler_->SetIndexedReference(HeapGraphEdge::kWeak,
2576 parent_entry,
2577 index,
2578 child_entry);
2579 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2580 }
2581 }
2582
2583
2584 void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj,
2585 int parent_entry,
2586 String* reference_name,
2587 Object* child_obj,
2588 const char* name_format_string,
2589 int field_offset) {
2590 HeapEntry* child_entry = GetEntry(child_obj);
2591 if (child_entry != NULL) {
2592 HeapGraphEdge::Type type = reference_name->length() > 0 ?
2593 HeapGraphEdge::kProperty : HeapGraphEdge::kInternal;
2594 const char* name = name_format_string != NULL ?
2595 collection_->names()->GetFormatted(
2596 name_format_string,
2597 *reference_name->ToCString(DISALLOW_NULLS,
2598 ROBUST_STRING_TRAVERSAL)) :
2599 collection_->names()->GetName(reference_name);
2600
2601 filler_->SetNamedReference(type,
2602 parent_entry,
2603 name,
2604 child_entry);
2605 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2606 }
2607 }
2608
2609
2610 void V8HeapExplorer::SetRootGcRootsReference() {
2611 filler_->SetIndexedAutoIndexReference(
2612 HeapGraphEdge::kElement,
2613 snapshot_->root()->index(),
2614 snapshot_->gc_roots());
2615 }
2616
2617
2618 void V8HeapExplorer::SetUserGlobalReference(Object* child_obj) {
2619 HeapEntry* child_entry = GetEntry(child_obj);
2620 ASSERT(child_entry != NULL);
2621 filler_->SetNamedAutoIndexReference(
2622 HeapGraphEdge::kShortcut,
2623 snapshot_->root()->index(),
2624 child_entry);
2625 }
2626
2627
2628 void V8HeapExplorer::SetGcRootsReference(VisitorSynchronization::SyncTag tag) {
2629 filler_->SetIndexedAutoIndexReference(
2630 HeapGraphEdge::kElement,
2631 snapshot_->gc_roots()->index(),
2632 snapshot_->gc_subroot(tag));
2633 }
2634
2635
2636 void V8HeapExplorer::SetGcSubrootReference(
2637 VisitorSynchronization::SyncTag tag, bool is_weak, Object* child_obj) {
2638 HeapEntry* child_entry = GetEntry(child_obj);
2639 if (child_entry != NULL) {
2640 const char* name = GetStrongGcSubrootName(child_obj);
2641 if (name != NULL) {
2642 filler_->SetNamedReference(
2643 HeapGraphEdge::kInternal,
2644 snapshot_->gc_subroot(tag)->index(),
2645 name,
2646 child_entry);
2647 } else {
2648 filler_->SetIndexedAutoIndexReference(
2649 is_weak ? HeapGraphEdge::kWeak : HeapGraphEdge::kElement,
2650 snapshot_->gc_subroot(tag)->index(),
2651 child_entry);
2652 }
2653 }
2654 }
2655
2656
2657 const char* V8HeapExplorer::GetStrongGcSubrootName(Object* object) {
2658 if (strong_gc_subroot_names_.is_empty()) {
2659 #define NAME_ENTRY(name) strong_gc_subroot_names_.SetTag(heap_->name(), #name);
2660 #define ROOT_NAME(type, name, camel_name) NAME_ENTRY(name)
2661 STRONG_ROOT_LIST(ROOT_NAME)
2662 #undef ROOT_NAME
2663 #define STRUCT_MAP_NAME(NAME, Name, name) NAME_ENTRY(name##_map)
2664 STRUCT_LIST(STRUCT_MAP_NAME)
2665 #undef STRUCT_MAP_NAME
2666 #define SYMBOL_NAME(name, str) NAME_ENTRY(name)
2667 SYMBOL_LIST(SYMBOL_NAME)
2668 #undef SYMBOL_NAME
2669 #undef NAME_ENTRY
2670 CHECK(!strong_gc_subroot_names_.is_empty());
2671 }
2672 return strong_gc_subroot_names_.GetTag(object);
2673 }
2674
2675
2676 void V8HeapExplorer::TagObject(Object* obj, const char* tag) {
2677 if (IsEssentialObject(obj)) {
2678 HeapEntry* entry = GetEntry(obj);
2679 if (entry->name()[0] == '\0') {
2680 entry->set_name(tag);
2681 }
2682 }
2683 }
2684
2685
2686 class GlobalObjectsEnumerator : public ObjectVisitor {
2687 public:
2688 virtual void VisitPointers(Object** start, Object** end) {
2689 for (Object** p = start; p < end; p++) {
2690 if ((*p)->IsNativeContext()) {
2691 Context* context = Context::cast(*p);
2692 JSObject* proxy = context->global_proxy();
2693 if (proxy->IsJSGlobalProxy()) {
2694 Object* global = proxy->map()->prototype();
2695 if (global->IsJSGlobalObject()) {
2696 objects_.Add(Handle<JSGlobalObject>(JSGlobalObject::cast(global)));
2697 }
2698 }
2699 }
2700 }
2701 }
2702 int count() { return objects_.length(); }
2703 Handle<JSGlobalObject>& at(int i) { return objects_[i]; }
2704
2705 private:
2706 List<Handle<JSGlobalObject> > objects_;
2707 };
2708
2709
2710 // Modifies heap. Must not be run during heap traversal.
2711 void V8HeapExplorer::TagGlobalObjects() {
2712 Isolate* isolate = Isolate::Current();
2713 HandleScope scope(isolate);
2714 GlobalObjectsEnumerator enumerator;
2715 isolate->global_handles()->IterateAllRoots(&enumerator);
2716 const char** urls = NewArray<const char*>(enumerator.count());
2717 for (int i = 0, l = enumerator.count(); i < l; ++i) {
2718 if (global_object_name_resolver_) {
2719 HandleScope scope(isolate);
2720 Handle<JSGlobalObject> global_obj = enumerator.at(i);
2721 urls[i] = global_object_name_resolver_->GetName(
2722 Utils::ToLocal(Handle<JSObject>::cast(global_obj)));
2723 } else {
2724 urls[i] = NULL;
2725 }
2726 }
2727
2728 AssertNoAllocation no_allocation;
2729 for (int i = 0, l = enumerator.count(); i < l; ++i) {
2730 objects_tags_.SetTag(*enumerator.at(i), urls[i]);
2731 }
2732
2733 DeleteArray(urls);
2734 }
2735
2736
2737 class GlobalHandlesExtractor : public ObjectVisitor {
2738 public:
2739 explicit GlobalHandlesExtractor(NativeObjectsExplorer* explorer)
2740 : explorer_(explorer) {}
2741 virtual ~GlobalHandlesExtractor() {}
2742 virtual void VisitPointers(Object** start, Object** end) {
2743 UNREACHABLE();
2744 }
2745 virtual void VisitEmbedderReference(Object** p, uint16_t class_id) {
2746 explorer_->VisitSubtreeWrapper(p, class_id);
2747 }
2748 private:
2749 NativeObjectsExplorer* explorer_;
2750 };
2751
2752
2753 class BasicHeapEntriesAllocator : public HeapEntriesAllocator {
2754 public:
2755 BasicHeapEntriesAllocator(
2756 HeapSnapshot* snapshot,
2757 HeapEntry::Type entries_type)
2758 : snapshot_(snapshot),
2759 collection_(snapshot_->collection()),
2760 entries_type_(entries_type) {
2761 }
2762 virtual HeapEntry* AllocateEntry(HeapThing ptr);
2763 private:
2764 HeapSnapshot* snapshot_;
2765 HeapSnapshotsCollection* collection_;
2766 HeapEntry::Type entries_type_;
2767 };
2768
2769
2770 HeapEntry* BasicHeapEntriesAllocator::AllocateEntry(HeapThing ptr) {
2771 v8::RetainedObjectInfo* info = reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
2772 intptr_t elements = info->GetElementCount();
2773 intptr_t size = info->GetSizeInBytes();
2774 const char* name = elements != -1
2775 ? collection_->names()->GetFormatted(
2776 "%s / %" V8_PTR_PREFIX "d entries", info->GetLabel(), elements)
2777 : collection_->names()->GetCopy(info->GetLabel());
2778 return snapshot_->AddEntry(
2779 entries_type_,
2780 name,
2781 HeapObjectsMap::GenerateId(info),
2782 size != -1 ? static_cast<int>(size) : 0);
2783 }
2784
2785
2786 NativeObjectsExplorer::NativeObjectsExplorer(
2787 HeapSnapshot* snapshot, SnapshottingProgressReportingInterface* progress)
2788 : snapshot_(snapshot),
2789 collection_(snapshot_->collection()),
2790 progress_(progress),
2791 embedder_queried_(false),
2792 objects_by_info_(RetainedInfosMatch),
2793 native_groups_(StringsMatch),
2794 filler_(NULL) {
2795 synthetic_entries_allocator_ =
2796 new BasicHeapEntriesAllocator(snapshot, HeapEntry::kSynthetic);
2797 native_entries_allocator_ =
2798 new BasicHeapEntriesAllocator(snapshot, HeapEntry::kNative);
2799 }
2800
2801
2802 NativeObjectsExplorer::~NativeObjectsExplorer() {
2803 for (HashMap::Entry* p = objects_by_info_.Start();
2804 p != NULL;
2805 p = objects_by_info_.Next(p)) {
2806 v8::RetainedObjectInfo* info =
2807 reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2808 info->Dispose();
2809 List<HeapObject*>* objects =
2810 reinterpret_cast<List<HeapObject*>* >(p->value);
2811 delete objects;
2812 }
2813 for (HashMap::Entry* p = native_groups_.Start();
2814 p != NULL;
2815 p = native_groups_.Next(p)) {
2816 v8::RetainedObjectInfo* info =
2817 reinterpret_cast<v8::RetainedObjectInfo*>(p->value);
2818 info->Dispose();
2819 }
2820 delete synthetic_entries_allocator_;
2821 delete native_entries_allocator_;
2822 }
2823
2824
2825 int NativeObjectsExplorer::EstimateObjectsCount() {
2826 FillRetainedObjects();
2827 return objects_by_info_.occupancy();
2828 }
2829
2830
2831 void NativeObjectsExplorer::FillRetainedObjects() {
2832 if (embedder_queried_) return;
2833 Isolate* isolate = Isolate::Current();
2834 const GCType major_gc_type = kGCTypeMarkSweepCompact;
2835 // Record objects that are joined into ObjectGroups.
2836 isolate->heap()->CallGCPrologueCallbacks(major_gc_type);
2837 List<ObjectGroup*>* groups = isolate->global_handles()->object_groups();
2838 for (int i = 0; i < groups->length(); ++i) {
2839 ObjectGroup* group = groups->at(i);
2840 if (group->info_ == NULL) continue;
2841 List<HeapObject*>* list = GetListMaybeDisposeInfo(group->info_);
2842 for (size_t j = 0; j < group->length_; ++j) {
2843 HeapObject* obj = HeapObject::cast(*group->objects_[j]);
2844 list->Add(obj);
2845 in_groups_.Insert(obj);
2846 }
2847 group->info_ = NULL; // Acquire info object ownership.
2848 }
2849 isolate->global_handles()->RemoveObjectGroups();
2850 isolate->heap()->CallGCEpilogueCallbacks(major_gc_type);
2851 // Record objects that are not in ObjectGroups, but have class ID.
2852 GlobalHandlesExtractor extractor(this);
2853 isolate->global_handles()->IterateAllRootsWithClassIds(&extractor);
2854 embedder_queried_ = true;
2855 }
2856
2857 void NativeObjectsExplorer::FillImplicitReferences() {
2858 Isolate* isolate = Isolate::Current();
2859 List<ImplicitRefGroup*>* groups =
2860 isolate->global_handles()->implicit_ref_groups();
2861 for (int i = 0; i < groups->length(); ++i) {
2862 ImplicitRefGroup* group = groups->at(i);
2863 HeapObject* parent = *group->parent_;
2864 int parent_entry =
2865 filler_->FindOrAddEntry(parent, native_entries_allocator_)->index();
2866 ASSERT(parent_entry != HeapEntry::kNoEntry);
2867 Object*** children = group->children_;
2868 for (size_t j = 0; j < group->length_; ++j) {
2869 Object* child = *children[j];
2870 HeapEntry* child_entry =
2871 filler_->FindOrAddEntry(child, native_entries_allocator_);
2872 filler_->SetNamedReference(
2873 HeapGraphEdge::kInternal,
2874 parent_entry,
2875 "native",
2876 child_entry);
2877 }
2878 }
2879 isolate->global_handles()->RemoveImplicitRefGroups();
2880 }
2881
2882 List<HeapObject*>* NativeObjectsExplorer::GetListMaybeDisposeInfo(
2883 v8::RetainedObjectInfo* info) {
2884 HashMap::Entry* entry =
2885 objects_by_info_.Lookup(info, InfoHash(info), true);
2886 if (entry->value != NULL) {
2887 info->Dispose();
2888 } else {
2889 entry->value = new List<HeapObject*>(4);
2890 }
2891 return reinterpret_cast<List<HeapObject*>* >(entry->value);
2892 }
2893
2894
2895 bool NativeObjectsExplorer::IterateAndExtractReferences(
2896 SnapshotFillerInterface* filler) {
2897 filler_ = filler;
2898 FillRetainedObjects();
2899 FillImplicitReferences();
2900 if (EstimateObjectsCount() > 0) {
2901 for (HashMap::Entry* p = objects_by_info_.Start();
2902 p != NULL;
2903 p = objects_by_info_.Next(p)) {
2904 v8::RetainedObjectInfo* info =
2905 reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2906 SetNativeRootReference(info);
2907 List<HeapObject*>* objects =
2908 reinterpret_cast<List<HeapObject*>* >(p->value);
2909 for (int i = 0; i < objects->length(); ++i) {
2910 SetWrapperNativeReferences(objects->at(i), info);
2911 }
2912 }
2913 SetRootNativeRootsReference();
2914 }
2915 filler_ = NULL;
2916 return true;
2917 }
2918
2919
2920 class NativeGroupRetainedObjectInfo : public v8::RetainedObjectInfo {
2921 public:
2922 explicit NativeGroupRetainedObjectInfo(const char* label)
2923 : disposed_(false),
2924 hash_(reinterpret_cast<intptr_t>(label)),
2925 label_(label) {
2926 }
2927
2928 virtual ~NativeGroupRetainedObjectInfo() {}
2929 virtual void Dispose() {
2930 CHECK(!disposed_);
2931 disposed_ = true;
2932 delete this;
2933 }
2934 virtual bool IsEquivalent(RetainedObjectInfo* other) {
2935 return hash_ == other->GetHash() && !strcmp(label_, other->GetLabel());
2936 }
2937 virtual intptr_t GetHash() { return hash_; }
2938 virtual const char* GetLabel() { return label_; }
2939
2940 private:
2941 bool disposed_;
2942 intptr_t hash_;
2943 const char* label_;
2944 };
2945
2946
2947 NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo(
2948 const char* label) {
2949 const char* label_copy = collection_->names()->GetCopy(label);
2950 uint32_t hash = StringHasher::HashSequentialString(
2951 label_copy,
2952 static_cast<int>(strlen(label_copy)),
2953 HEAP->HashSeed());
2954 HashMap::Entry* entry = native_groups_.Lookup(const_cast<char*>(label_copy),
2955 hash, true);
2956 if (entry->value == NULL) {
2957 entry->value = new NativeGroupRetainedObjectInfo(label);
2958 }
2959 return static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2960 }
2961
2962
2963 void NativeObjectsExplorer::SetNativeRootReference(
2964 v8::RetainedObjectInfo* info) {
2965 HeapEntry* child_entry =
2966 filler_->FindOrAddEntry(info, native_entries_allocator_);
2967 ASSERT(child_entry != NULL);
2968 NativeGroupRetainedObjectInfo* group_info =
2969 FindOrAddGroupInfo(info->GetGroupLabel());
2970 HeapEntry* group_entry =
2971 filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_);
2972 filler_->SetNamedAutoIndexReference(
2973 HeapGraphEdge::kInternal,
2974 group_entry->index(),
2975 child_entry);
2976 }
2977
2978
2979 void NativeObjectsExplorer::SetWrapperNativeReferences(
2980 HeapObject* wrapper, v8::RetainedObjectInfo* info) {
2981 HeapEntry* wrapper_entry = filler_->FindEntry(wrapper);
2982 ASSERT(wrapper_entry != NULL);
2983 HeapEntry* info_entry =
2984 filler_->FindOrAddEntry(info, native_entries_allocator_);
2985 ASSERT(info_entry != NULL);
2986 filler_->SetNamedReference(HeapGraphEdge::kInternal,
2987 wrapper_entry->index(),
2988 "native",
2989 info_entry);
2990 filler_->SetIndexedAutoIndexReference(HeapGraphEdge::kElement,
2991 info_entry->index(),
2992 wrapper_entry);
2993 }
2994
2995
2996 void NativeObjectsExplorer::SetRootNativeRootsReference() {
2997 for (HashMap::Entry* entry = native_groups_.Start();
2998 entry;
2999 entry = native_groups_.Next(entry)) {
3000 NativeGroupRetainedObjectInfo* group_info =
3001 static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
3002 HeapEntry* group_entry =
3003 filler_->FindOrAddEntry(group_info, native_entries_allocator_);
3004 ASSERT(group_entry != NULL);
3005 filler_->SetIndexedAutoIndexReference(
3006 HeapGraphEdge::kElement,
3007 snapshot_->root()->index(),
3008 group_entry);
3009 }
3010 }
3011
3012
3013 void NativeObjectsExplorer::VisitSubtreeWrapper(Object** p, uint16_t class_id) {
3014 if (in_groups_.Contains(*p)) return;
3015 Isolate* isolate = Isolate::Current();
3016 v8::RetainedObjectInfo* info =
3017 isolate->heap_profiler()->ExecuteWrapperClassCallback(class_id, p);
3018 if (info == NULL) return;
3019 GetListMaybeDisposeInfo(info)->Add(HeapObject::cast(*p));
3020 }
3021
3022
3023 class SnapshotFiller : public SnapshotFillerInterface {
3024 public:
3025 explicit SnapshotFiller(HeapSnapshot* snapshot, HeapEntriesMap* entries)
3026 : snapshot_(snapshot),
3027 collection_(snapshot->collection()),
3028 entries_(entries) { }
3029 HeapEntry* AddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
3030 HeapEntry* entry = allocator->AllocateEntry(ptr);
3031 entries_->Pair(ptr, entry->index());
3032 return entry;
3033 }
3034 HeapEntry* FindEntry(HeapThing ptr) {
3035 int index = entries_->Map(ptr);
3036 return index != HeapEntry::kNoEntry ? &snapshot_->entries()[index] : NULL;
3037 }
3038 HeapEntry* FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
3039 HeapEntry* entry = FindEntry(ptr);
3040 return entry != NULL ? entry : AddEntry(ptr, allocator);
3041 }
3042 void SetIndexedReference(HeapGraphEdge::Type type,
3043 int parent,
3044 int index,
3045 HeapEntry* child_entry) {
3046 HeapEntry* parent_entry = &snapshot_->entries()[parent];
3047 parent_entry->SetIndexedReference(type, index, child_entry);
3048 }
3049 void SetIndexedAutoIndexReference(HeapGraphEdge::Type type,
3050 int parent,
3051 HeapEntry* child_entry) {
3052 HeapEntry* parent_entry = &snapshot_->entries()[parent];
3053 int index = parent_entry->children_count() + 1;
3054 parent_entry->SetIndexedReference(type, index, child_entry);
3055 }
3056 void SetNamedReference(HeapGraphEdge::Type type,
3057 int parent,
3058 const char* reference_name,
3059 HeapEntry* child_entry) {
3060 HeapEntry* parent_entry = &snapshot_->entries()[parent];
3061 parent_entry->SetNamedReference(type, reference_name, child_entry);
3062 }
3063 void SetNamedAutoIndexReference(HeapGraphEdge::Type type,
3064 int parent,
3065 HeapEntry* child_entry) {
3066 HeapEntry* parent_entry = &snapshot_->entries()[parent];
3067 int index = parent_entry->children_count() + 1;
3068 parent_entry->SetNamedReference(
3069 type,
3070 collection_->names()->GetName(index),
3071 child_entry);
3072 }
3073
3074 private:
3075 HeapSnapshot* snapshot_;
3076 HeapSnapshotsCollection* collection_;
3077 HeapEntriesMap* entries_;
3078 };
3079
3080
3081 HeapSnapshotGenerator::HeapSnapshotGenerator(
3082 HeapSnapshot* snapshot,
3083 v8::ActivityControl* control,
3084 v8::HeapProfiler::ObjectNameResolver* resolver,
3085 Heap* heap)
3086 : snapshot_(snapshot),
3087 control_(control),
3088 v8_heap_explorer_(snapshot_, this, resolver),
3089 dom_explorer_(snapshot_, this),
3090 heap_(heap) {
3091 }
3092
3093
3094 bool HeapSnapshotGenerator::GenerateSnapshot() {
3095 v8_heap_explorer_.TagGlobalObjects();
3096
3097 // TODO(1562) Profiler assumes that any object that is in the heap after
3098 // full GC is reachable from the root when computing dominators.
3099 // This is not true for weakly reachable objects.
3100 // As a temporary solution we call GC twice.
3101 Isolate::Current()->heap()->CollectAllGarbage(
3102 Heap::kMakeHeapIterableMask,
3103 "HeapSnapshotGenerator::GenerateSnapshot");
3104 Isolate::Current()->heap()->CollectAllGarbage(
3105 Heap::kMakeHeapIterableMask,
3106 "HeapSnapshotGenerator::GenerateSnapshot");
3107
3108 #ifdef VERIFY_HEAP
3109 Heap* debug_heap = Isolate::Current()->heap();
3110 CHECK(!debug_heap->old_data_space()->was_swept_conservatively());
3111 CHECK(!debug_heap->old_pointer_space()->was_swept_conservatively());
3112 CHECK(!debug_heap->code_space()->was_swept_conservatively());
3113 CHECK(!debug_heap->cell_space()->was_swept_conservatively());
3114 CHECK(!debug_heap->map_space()->was_swept_conservatively());
3115 #endif
3116
3117 // The following code uses heap iterators, so we want the heap to be
3118 // stable. It should follow TagGlobalObjects as that can allocate.
3119 AssertNoAllocation no_alloc;
3120
3121 #ifdef VERIFY_HEAP
3122 debug_heap->Verify();
3123 #endif
3124
3125 SetProgressTotal(1); // 1 pass.
3126
3127 #ifdef VERIFY_HEAP
3128 debug_heap->Verify();
3129 #endif
3130
3131 if (!FillReferences()) return false;
3132
3133 snapshot_->FillChildren();
3134 snapshot_->RememberLastJSObjectId();
3135
3136 progress_counter_ = progress_total_;
3137 if (!ProgressReport(true)) return false;
3138 return true;
3139 }
3140
3141
3142 void HeapSnapshotGenerator::ProgressStep() {
3143 ++progress_counter_;
3144 }
3145
3146
3147 bool HeapSnapshotGenerator::ProgressReport(bool force) {
3148 const int kProgressReportGranularity = 10000;
3149 if (control_ != NULL
3150 && (force || progress_counter_ % kProgressReportGranularity == 0)) {
3151 return
3152 control_->ReportProgressValue(progress_counter_, progress_total_) ==
3153 v8::ActivityControl::kContinue;
3154 }
3155 return true;
3156 }
3157
3158
3159 void HeapSnapshotGenerator::SetProgressTotal(int iterations_count) {
3160 if (control_ == NULL) return;
3161 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
3162 progress_total_ = iterations_count * (
3163 v8_heap_explorer_.EstimateObjectsCount(&iterator) +
3164 dom_explorer_.EstimateObjectsCount());
3165 progress_counter_ = 0;
3166 }
3167
3168
3169 bool HeapSnapshotGenerator::FillReferences() {
3170 SnapshotFiller filler(snapshot_, &entries_);
3171 v8_heap_explorer_.AddRootEntries(&filler);
3172 return v8_heap_explorer_.IterateAndExtractReferences(&filler)
3173 && dom_explorer_.IterateAndExtractReferences(&filler);
3174 }
3175
3176
3177 template<int bytes> struct MaxDecimalDigitsIn;
3178 template<> struct MaxDecimalDigitsIn<4> {
3179 static const int kSigned = 11;
3180 static const int kUnsigned = 10;
3181 };
3182 template<> struct MaxDecimalDigitsIn<8> {
3183 static const int kSigned = 20;
3184 static const int kUnsigned = 20;
3185 };
3186
3187
3188 class OutputStreamWriter {
3189 public:
3190 explicit OutputStreamWriter(v8::OutputStream* stream)
3191 : stream_(stream),
3192 chunk_size_(stream->GetChunkSize()),
3193 chunk_(chunk_size_),
3194 chunk_pos_(0),
3195 aborted_(false) {
3196 ASSERT(chunk_size_ > 0);
3197 }
3198 bool aborted() { return aborted_; }
3199 void AddCharacter(char c) {
3200 ASSERT(c != '\0');
3201 ASSERT(chunk_pos_ < chunk_size_);
3202 chunk_[chunk_pos_++] = c;
3203 MaybeWriteChunk();
3204 }
3205 void AddString(const char* s) {
3206 AddSubstring(s, StrLength(s));
3207 }
3208 void AddSubstring(const char* s, int n) {
3209 if (n <= 0) return;
3210 ASSERT(static_cast<size_t>(n) <= strlen(s));
3211 const char* s_end = s + n;
3212 while (s < s_end) {
3213 int s_chunk_size = Min(
3214 chunk_size_ - chunk_pos_, static_cast<int>(s_end - s));
3215 ASSERT(s_chunk_size > 0);
3216 memcpy(chunk_.start() + chunk_pos_, s, s_chunk_size);
3217 s += s_chunk_size;
3218 chunk_pos_ += s_chunk_size;
3219 MaybeWriteChunk();
3220 }
3221 }
3222 void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u"); }
3223 void Finalize() {
3224 if (aborted_) return;
3225 ASSERT(chunk_pos_ < chunk_size_);
3226 if (chunk_pos_ != 0) {
3227 WriteChunk();
3228 }
3229 stream_->EndOfStream();
3230 }
3231
3232 private:
3233 template<typename T>
3234 void AddNumberImpl(T n, const char* format) {
3235 // Buffer for the longest value plus trailing \0
3236 static const int kMaxNumberSize =
3237 MaxDecimalDigitsIn<sizeof(T)>::kUnsigned + 1;
3238 if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) {
3239 int result = OS::SNPrintF(
3240 chunk_.SubVector(chunk_pos_, chunk_size_), format, n);
3241 ASSERT(result != -1);
3242 chunk_pos_ += result;
3243 MaybeWriteChunk();
3244 } else {
3245 EmbeddedVector<char, kMaxNumberSize> buffer;
3246 int result = OS::SNPrintF(buffer, format, n);
3247 USE(result);
3248 ASSERT(result != -1);
3249 AddString(buffer.start());
3250 }
3251 }
3252 void MaybeWriteChunk() {
3253 ASSERT(chunk_pos_ <= chunk_size_);
3254 if (chunk_pos_ == chunk_size_) {
3255 WriteChunk();
3256 }
3257 }
3258 void WriteChunk() {
3259 if (aborted_) return;
3260 if (stream_->WriteAsciiChunk(chunk_.start(), chunk_pos_) ==
3261 v8::OutputStream::kAbort) aborted_ = true;
3262 chunk_pos_ = 0;
3263 }
3264
3265 v8::OutputStream* stream_;
3266 int chunk_size_;
3267 ScopedVector<char> chunk_;
3268 int chunk_pos_;
3269 bool aborted_;
3270 };
3271
3272
3273 // type, name|index, to_node.
3274 const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3;
3275 // type, name, id, self_size, children_index.
3276 const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 5;
3277
3278 void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) {
3279 ASSERT(writer_ == NULL);
3280 writer_ = new OutputStreamWriter(stream);
3281
3282 HeapSnapshot* original_snapshot = NULL;
3283 if (snapshot_->RawSnapshotSize() >=
3284 SnapshotSizeConstants<kPointerSize>::kMaxSerializableSnapshotRawSize) {
3285 // The snapshot is too big. Serialize a fake snapshot.
3286 original_snapshot = snapshot_;
3287 snapshot_ = CreateFakeSnapshot();
3288 }
3289
3290 SerializeImpl();
3291
3292 delete writer_;
3293 writer_ = NULL;
3294
3295 if (original_snapshot != NULL) {
3296 delete snapshot_;
3297 snapshot_ = original_snapshot;
3298 }
3299 }
3300
3301
3302 HeapSnapshot* HeapSnapshotJSONSerializer::CreateFakeSnapshot() {
3303 HeapSnapshot* result = new HeapSnapshot(snapshot_->collection(),
3304 HeapSnapshot::kFull,
3305 snapshot_->title(),
3306 snapshot_->uid());
3307 result->AddRootEntry();
3308 const char* text = snapshot_->collection()->names()->GetFormatted(
3309 "The snapshot is too big. "
3310 "Maximum snapshot size is %" V8_PTR_PREFIX "u MB. "
3311 "Actual snapshot size is %" V8_PTR_PREFIX "u MB.",
3312 SnapshotSizeConstants<kPointerSize>::kMaxSerializableSnapshotRawSize / MB,
3313 (snapshot_->RawSnapshotSize() + MB - 1) / MB);
3314 HeapEntry* message = result->AddEntry(HeapEntry::kString, text, 0, 4);
3315 result->root()->SetIndexedReference(HeapGraphEdge::kElement, 1, message);
3316 result->FillChildren();
3317 return result;
3318 }
3319
3320
3321 void HeapSnapshotJSONSerializer::SerializeImpl() {
3322 ASSERT(0 == snapshot_->root()->index());
3323 writer_->AddCharacter('{');
3324 writer_->AddString("\"snapshot\":{");
3325 SerializeSnapshot();
3326 if (writer_->aborted()) return;
3327 writer_->AddString("},\n");
3328 writer_->AddString("\"nodes\":[");
3329 SerializeNodes();
3330 if (writer_->aborted()) return;
3331 writer_->AddString("],\n");
3332 writer_->AddString("\"edges\":[");
3333 SerializeEdges();
3334 if (writer_->aborted()) return;
3335 writer_->AddString("],\n");
3336 writer_->AddString("\"strings\":[");
3337 SerializeStrings();
3338 if (writer_->aborted()) return;
3339 writer_->AddCharacter(']');
3340 writer_->AddCharacter('}');
3341 writer_->Finalize();
3342 }
3343
3344
3345 int HeapSnapshotJSONSerializer::GetStringId(const char* s) {
3346 HashMap::Entry* cache_entry = strings_.Lookup(
3347 const_cast<char*>(s), ObjectHash(s), true);
3348 if (cache_entry->value == NULL) {
3349 cache_entry->value = reinterpret_cast<void*>(next_string_id_++);
3350 }
3351 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
3352 }
3353
3354
3355 static int utoa(unsigned value, const Vector<char>& buffer, int buffer_pos) {
3356 int number_of_digits = 0;
3357 unsigned t = value;
3358 do {
3359 ++number_of_digits;
3360 } while (t /= 10);
3361
3362 buffer_pos += number_of_digits;
3363 int result = buffer_pos;
3364 do {
3365 int last_digit = value % 10;
3366 buffer[--buffer_pos] = '0' + last_digit;
3367 value /= 10;
3368 } while (value);
3369 return result;
3370 }
3371
3372
3373 void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge,
3374 bool first_edge) {
3375 // The buffer needs space for 3 unsigned ints, 3 commas, \n and \0
3376 static const int kBufferSize =
3377 MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 2; // NOLINT
3378 EmbeddedVector<char, kBufferSize> buffer;
3379 int edge_name_or_index = edge->type() == HeapGraphEdge::kElement
3380 || edge->type() == HeapGraphEdge::kHidden
3381 || edge->type() == HeapGraphEdge::kWeak
3382 ? edge->index() : GetStringId(edge->name());
3383 int buffer_pos = 0;
3384 if (!first_edge) {
3385 buffer[buffer_pos++] = ',';
3386 }
3387 buffer_pos = utoa(edge->type(), buffer, buffer_pos);
3388 buffer[buffer_pos++] = ',';
3389 buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos);
3390 buffer[buffer_pos++] = ',';
3391 buffer_pos = utoa(entry_index(edge->to()), buffer, buffer_pos);
3392 buffer[buffer_pos++] = '\n';
3393 buffer[buffer_pos++] = '\0';
3394 writer_->AddString(buffer.start());
3395 }
3396
3397
3398 void HeapSnapshotJSONSerializer::SerializeEdges() {
3399 List<HeapGraphEdge*>& edges = snapshot_->children();
3400 for (int i = 0; i < edges.length(); ++i) {
3401 ASSERT(i == 0 ||
3402 edges[i - 1]->from()->index() <= edges[i]->from()->index());
3403 SerializeEdge(edges[i], i == 0);
3404 if (writer_->aborted()) return;
3405 }
3406 }
3407
3408
3409 void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry) {
3410 // The buffer needs space for 5 unsigned ints, 5 commas, \n and \0
3411 static const int kBufferSize =
3412 5 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
3413 + 5 + 1 + 1;
3414 EmbeddedVector<char, kBufferSize> buffer;
3415 int buffer_pos = 0;
3416 if (entry_index(entry) != 0) {
3417 buffer[buffer_pos++] = ',';
3418 }
3419 buffer_pos = utoa(entry->type(), buffer, buffer_pos);
3420 buffer[buffer_pos++] = ',';
3421 buffer_pos = utoa(GetStringId(entry->name()), buffer, buffer_pos);
3422 buffer[buffer_pos++] = ',';
3423 buffer_pos = utoa(entry->id(), buffer, buffer_pos);
3424 buffer[buffer_pos++] = ',';
3425 buffer_pos = utoa(entry->self_size(), buffer, buffer_pos);
3426 buffer[buffer_pos++] = ',';
3427 buffer_pos = utoa(entry->children_count(), buffer, buffer_pos);
3428 buffer[buffer_pos++] = '\n';
3429 buffer[buffer_pos++] = '\0';
3430 writer_->AddString(buffer.start());
3431 }
3432
3433
3434 void HeapSnapshotJSONSerializer::SerializeNodes() {
3435 List<HeapEntry>& entries = snapshot_->entries();
3436 for (int i = 0; i < entries.length(); ++i) {
3437 SerializeNode(&entries[i]);
3438 if (writer_->aborted()) return;
3439 }
3440 }
3441
3442
3443 void HeapSnapshotJSONSerializer::SerializeSnapshot() {
3444 writer_->AddString("\"title\":\"");
3445 writer_->AddString(snapshot_->title());
3446 writer_->AddString("\"");
3447 writer_->AddString(",\"uid\":");
3448 writer_->AddNumber(snapshot_->uid());
3449 writer_->AddString(",\"meta\":");
3450 // The object describing node serialization layout.
3451 // We use a set of macros to improve readability.
3452 #define JSON_A(s) "[" s "]"
3453 #define JSON_O(s) "{" s "}"
3454 #define JSON_S(s) "\"" s "\""
3455 writer_->AddString(JSON_O(
3456 JSON_S("node_fields") ":" JSON_A(
3457 JSON_S("type") ","
3458 JSON_S("name") ","
3459 JSON_S("id") ","
3460 JSON_S("self_size") ","
3461 JSON_S("edge_count")) ","
3462 JSON_S("node_types") ":" JSON_A(
3463 JSON_A(
3464 JSON_S("hidden") ","
3465 JSON_S("array") ","
3466 JSON_S("string") ","
3467 JSON_S("object") ","
3468 JSON_S("code") ","
3469 JSON_S("closure") ","
3470 JSON_S("regexp") ","
3471 JSON_S("number") ","
3472 JSON_S("native") ","
3473 JSON_S("synthetic")) ","
3474 JSON_S("string") ","
3475 JSON_S("number") ","
3476 JSON_S("number") ","
3477 JSON_S("number") ","
3478 JSON_S("number") ","
3479 JSON_S("number")) ","
3480 JSON_S("edge_fields") ":" JSON_A(
3481 JSON_S("type") ","
3482 JSON_S("name_or_index") ","
3483 JSON_S("to_node")) ","
3484 JSON_S("edge_types") ":" JSON_A(
3485 JSON_A(
3486 JSON_S("context") ","
3487 JSON_S("element") ","
3488 JSON_S("property") ","
3489 JSON_S("internal") ","
3490 JSON_S("hidden") ","
3491 JSON_S("shortcut") ","
3492 JSON_S("weak")) ","
3493 JSON_S("string_or_number") ","
3494 JSON_S("node"))));
3495 #undef JSON_S
3496 #undef JSON_O
3497 #undef JSON_A
3498 writer_->AddString(",\"node_count\":");
3499 writer_->AddNumber(snapshot_->entries().length());
3500 writer_->AddString(",\"edge_count\":");
3501 writer_->AddNumber(snapshot_->edges().length());
3502 }
3503
3504
3505 static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) {
3506 static const char hex_chars[] = "0123456789ABCDEF";
3507 w->AddString("\\u");
3508 w->AddCharacter(hex_chars[(u >> 12) & 0xf]);
3509 w->AddCharacter(hex_chars[(u >> 8) & 0xf]);
3510 w->AddCharacter(hex_chars[(u >> 4) & 0xf]);
3511 w->AddCharacter(hex_chars[u & 0xf]);
3512 }
3513
3514 void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) {
3515 writer_->AddCharacter('\n');
3516 writer_->AddCharacter('\"');
3517 for ( ; *s != '\0'; ++s) {
3518 switch (*s) {
3519 case '\b':
3520 writer_->AddString("\\b");
3521 continue;
3522 case '\f':
3523 writer_->AddString("\\f");
3524 continue;
3525 case '\n':
3526 writer_->AddString("\\n");
3527 continue;
3528 case '\r':
3529 writer_->AddString("\\r");
3530 continue;
3531 case '\t':
3532 writer_->AddString("\\t");
3533 continue;
3534 case '\"':
3535 case '\\':
3536 writer_->AddCharacter('\\');
3537 writer_->AddCharacter(*s);
3538 continue;
3539 default:
3540 if (*s > 31 && *s < 128) {
3541 writer_->AddCharacter(*s);
3542 } else if (*s <= 31) {
3543 // Special character with no dedicated literal.
3544 WriteUChar(writer_, *s);
3545 } else {
3546 // Convert UTF-8 into \u UTF-16 literal.
3547 unsigned length = 1, cursor = 0;
3548 for ( ; length <= 4 && *(s + length) != '\0'; ++length) { }
3549 unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor);
3550 if (c != unibrow::Utf8::kBadChar) {
3551 WriteUChar(writer_, c);
3552 ASSERT(cursor != 0);
3553 s += cursor - 1;
3554 } else {
3555 writer_->AddCharacter('?');
3556 }
3557 }
3558 }
3559 }
3560 writer_->AddCharacter('\"');
3561 }
3562
3563
3564 void HeapSnapshotJSONSerializer::SerializeStrings() {
3565 List<HashMap::Entry*> sorted_strings;
3566 SortHashMap(&strings_, &sorted_strings);
3567 writer_->AddString("\"<dummy>\"");
3568 for (int i = 0; i < sorted_strings.length(); ++i) {
3569 writer_->AddCharacter(',');
3570 SerializeString(
3571 reinterpret_cast<const unsigned char*>(sorted_strings[i]->key));
3572 if (writer_->aborted()) return;
3573 }
3574 }
3575
3576
3577 template<typename T>
3578 inline static int SortUsingEntryValue(const T* x, const T* y) {
3579 uintptr_t x_uint = reinterpret_cast<uintptr_t>((*x)->value);
3580 uintptr_t y_uint = reinterpret_cast<uintptr_t>((*y)->value);
3581 if (x_uint > y_uint) {
3582 return 1;
3583 } else if (x_uint == y_uint) {
3584 return 0;
3585 } else {
3586 return -1;
3587 }
3588 }
3589
3590
3591 void HeapSnapshotJSONSerializer::SortHashMap(
3592 HashMap* map, List<HashMap::Entry*>* sorted_entries) {
3593 for (HashMap::Entry* p = map->Start(); p != NULL; p = map->Next(p))
3594 sorted_entries->Add(p);
3595 sorted_entries->Sort(SortUsingEntryValue);
3596 }
3597
3598 } } // namespace v8::internal 945 } } // namespace v8::internal
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698