| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 831 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 842 } \ | 842 } \ |
| 843 } \ | 843 } \ |
| 844 if (within == kFirstInstruction) { \ | 844 if (within == kFirstInstruction) { \ |
| 845 Code* new_code_object = reinterpret_cast<Code*>(new_object); \ | 845 Code* new_code_object = reinterpret_cast<Code*>(new_object); \ |
| 846 new_object = reinterpret_cast<Object*>( \ | 846 new_object = reinterpret_cast<Object*>( \ |
| 847 new_code_object->instruction_start()); \ | 847 new_code_object->instruction_start()); \ |
| 848 } \ | 848 } \ |
| 849 if (how == kFromCode) { \ | 849 if (how == kFromCode) { \ |
| 850 Address location_of_branch_data = \ | 850 Address location_of_branch_data = \ |
| 851 reinterpret_cast<Address>(current); \ | 851 reinterpret_cast<Address>(current); \ |
| 852 Assembler::set_target_at(location_of_branch_data, \ | 852 Assembler::deserialization_set_special_target_at( \ |
| 853 reinterpret_cast<Address>(new_object)); \ | 853 location_of_branch_data, \ |
| 854 if (within == kFirstInstruction) { \ | 854 reinterpret_cast<Address>(new_object)); \ |
| 855 location_of_branch_data += Assembler::kCallTargetSize; \ | 855 location_of_branch_data += Assembler::kSpecialTargetSize; \ |
| 856 current = reinterpret_cast<Object**>(location_of_branch_data); \ | 856 current = reinterpret_cast<Object**>(location_of_branch_data); \ |
| 857 current_was_incremented = true; \ | 857 current_was_incremented = true; \ |
| 858 } \ | |
| 859 } else { \ | 858 } else { \ |
| 860 *current = new_object; \ | 859 *current = new_object; \ |
| 861 } \ | 860 } \ |
| 862 } \ | 861 } \ |
| 863 if (emit_write_barrier && write_barrier_needed) { \ | 862 if (emit_write_barrier && write_barrier_needed) { \ |
| 864 Address current_address = reinterpret_cast<Address>(current); \ | 863 Address current_address = reinterpret_cast<Address>(current); \ |
| 865 isolate->heap()->RecordWrite( \ | 864 isolate->heap()->RecordWrite( \ |
| 866 current_object_address, \ | 865 current_object_address, \ |
| 867 static_cast<int>(current_address - current_object_address)); \ | 866 static_cast<int>(current_address - current_object_address)); \ |
| 868 } \ | 867 } \ |
| (...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 984 // object. | 983 // object. |
| 985 ONE_PER_SPACE(kNewObject, kPlain, kStartOfObject) | 984 ONE_PER_SPACE(kNewObject, kPlain, kStartOfObject) |
| 986 // Support for direct instruction pointers in functions | 985 // Support for direct instruction pointers in functions |
| 987 ONE_PER_CODE_SPACE(kNewObject, kPlain, kFirstInstruction) | 986 ONE_PER_CODE_SPACE(kNewObject, kPlain, kFirstInstruction) |
| 988 // Deserialize a new code object and write a pointer to its first | 987 // Deserialize a new code object and write a pointer to its first |
| 989 // instruction to the current code object. | 988 // instruction to the current code object. |
| 990 ONE_PER_SPACE(kNewObject, kFromCode, kFirstInstruction) | 989 ONE_PER_SPACE(kNewObject, kFromCode, kFirstInstruction) |
| 991 // Find a recently deserialized object using its offset from the current | 990 // Find a recently deserialized object using its offset from the current |
| 992 // allocation point and write a pointer to it to the current object. | 991 // allocation point and write a pointer to it to the current object. |
| 993 ALL_SPACES(kBackref, kPlain, kStartOfObject) | 992 ALL_SPACES(kBackref, kPlain, kStartOfObject) |
| 993 #if V8_TARGET_ARCH_MIPS |
| 994 // Deserialize a new object from pointer found in code and write |
| 995 // a pointer to it to the current object. Required only for MIPS, and |
| 996 // omitted on the other architectures because it is fully unrolled and |
| 997 // would cause bloat. |
| 998 ONE_PER_SPACE(kNewObject, kFromCode, kStartOfObject) |
| 999 // Find a recently deserialized code object using its offset from the |
| 1000 // current allocation point and write a pointer to it to the current |
| 1001 // object. Required only for MIPS. |
| 1002 ALL_SPACES(kBackref, kFromCode, kStartOfObject) |
| 1003 // Find an already deserialized code object using its offset from |
| 1004 // the start and write a pointer to it to the current object. |
| 1005 // Required only for MIPS. |
| 1006 ALL_SPACES(kFromStart, kFromCode, kStartOfObject) |
| 1007 #endif |
| 994 // Find a recently deserialized code object using its offset from the | 1008 // Find a recently deserialized code object using its offset from the |
| 995 // current allocation point and write a pointer to its first instruction | 1009 // current allocation point and write a pointer to its first instruction |
| 996 // to the current code object or the instruction pointer in a function | 1010 // to the current code object or the instruction pointer in a function |
| 997 // object. | 1011 // object. |
| 998 ALL_SPACES(kBackref, kFromCode, kFirstInstruction) | 1012 ALL_SPACES(kBackref, kFromCode, kFirstInstruction) |
| 999 ALL_SPACES(kBackref, kPlain, kFirstInstruction) | 1013 ALL_SPACES(kBackref, kPlain, kFirstInstruction) |
| 1000 // Find an already deserialized object using its offset from the start | 1014 // Find an already deserialized object using its offset from the start |
| 1001 // and write a pointer to it to the current object. | 1015 // and write a pointer to it to the current object. |
| 1002 ALL_SPACES(kFromStart, kPlain, kStartOfObject) | 1016 ALL_SPACES(kFromStart, kPlain, kStartOfObject) |
| 1003 ALL_SPACES(kFromStart, kPlain, kFirstInstruction) | 1017 ALL_SPACES(kFromStart, kPlain, kFirstInstruction) |
| (...skipping 218 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1222 startup_serializer_->VisitPointer( | 1236 startup_serializer_->VisitPointer( |
| 1223 &isolate->serialize_partial_snapshot_cache()[length]); | 1237 &isolate->serialize_partial_snapshot_cache()[length]); |
| 1224 // We don't recurse from the startup snapshot generator into the partial | 1238 // We don't recurse from the startup snapshot generator into the partial |
| 1225 // snapshot generator. | 1239 // snapshot generator. |
| 1226 ASSERT(length == isolate->serialize_partial_snapshot_cache_length()); | 1240 ASSERT(length == isolate->serialize_partial_snapshot_cache_length()); |
| 1227 isolate->set_serialize_partial_snapshot_cache_length(length + 1); | 1241 isolate->set_serialize_partial_snapshot_cache_length(length + 1); |
| 1228 return length; | 1242 return length; |
| 1229 } | 1243 } |
| 1230 | 1244 |
| 1231 | 1245 |
| 1232 int Serializer::RootIndex(HeapObject* heap_object) { | 1246 int Serializer::RootIndex(HeapObject* heap_object, HowToCode from) { |
| 1233 Heap* heap = HEAP; | 1247 Heap* heap = HEAP; |
| 1234 if (heap->InNewSpace(heap_object)) return kInvalidRootIndex; | 1248 if (heap->InNewSpace(heap_object)) return kInvalidRootIndex; |
| 1235 for (int i = 0; i < root_index_wave_front_; i++) { | 1249 for (int i = 0; i < root_index_wave_front_; i++) { |
| 1236 Object* root = heap->roots_array_start()[i]; | 1250 Object* root = heap->roots_array_start()[i]; |
| 1237 if (!root->IsSmi() && root == heap_object) return i; | 1251 if (!root->IsSmi() && root == heap_object) { |
| 1252 #if V8_TARGET_ARCH_MIPS |
| 1253 if (from == kFromCode) { |
| 1254 // In order to avoid code bloat in the deserializer we don't have |
| 1255 // support for the encoding that specifies a particular root should |
| 1256 // be written into the lui/ori instructions on MIPS. Therefore we |
| 1257 // should not generate such serialization data for MIPS. |
| 1258 return kInvalidRootIndex; |
| 1259 } |
| 1260 #endif |
| 1261 return i; |
| 1262 } |
| 1238 } | 1263 } |
| 1239 return kInvalidRootIndex; | 1264 return kInvalidRootIndex; |
| 1240 } | 1265 } |
| 1241 | 1266 |
| 1242 | 1267 |
| 1243 // Encode the location of an already deserialized object in order to write its | 1268 // Encode the location of an already deserialized object in order to write its |
| 1244 // location into a later object. We can encode the location as an offset from | 1269 // location into a later object. We can encode the location as an offset from |
| 1245 // the start of the deserialized objects or as an offset backwards from the | 1270 // the start of the deserialized objects or as an offset backwards from the |
| 1246 // current allocation pointer. | 1271 // current allocation pointer. |
| 1247 void Serializer::SerializeReferenceToPreviousObject( | 1272 void Serializer::SerializeReferenceToPreviousObject( |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1280 | 1305 |
| 1281 | 1306 |
| 1282 void StartupSerializer::SerializeObject( | 1307 void StartupSerializer::SerializeObject( |
| 1283 Object* o, | 1308 Object* o, |
| 1284 HowToCode how_to_code, | 1309 HowToCode how_to_code, |
| 1285 WhereToPoint where_to_point) { | 1310 WhereToPoint where_to_point) { |
| 1286 CHECK(o->IsHeapObject()); | 1311 CHECK(o->IsHeapObject()); |
| 1287 HeapObject* heap_object = HeapObject::cast(o); | 1312 HeapObject* heap_object = HeapObject::cast(o); |
| 1288 | 1313 |
| 1289 int root_index; | 1314 int root_index; |
| 1290 if ((root_index = RootIndex(heap_object)) != kInvalidRootIndex) { | 1315 if ((root_index = RootIndex(heap_object, how_to_code)) != kInvalidRootIndex) { |
| 1291 PutRoot(root_index, heap_object, how_to_code, where_to_point); | 1316 PutRoot(root_index, heap_object, how_to_code, where_to_point); |
| 1292 return; | 1317 return; |
| 1293 } | 1318 } |
| 1294 | 1319 |
| 1295 if (address_mapper_.IsMapped(heap_object)) { | 1320 if (address_mapper_.IsMapped(heap_object)) { |
| 1296 int space = SpaceOfAlreadySerializedObject(heap_object); | 1321 int space = SpaceOfAlreadySerializedObject(heap_object); |
| 1297 int address = address_mapper_.MappedTo(heap_object); | 1322 int address = address_mapper_.MappedTo(heap_object); |
| 1298 SerializeReferenceToPreviousObject(space, | 1323 SerializeReferenceToPreviousObject(space, |
| 1299 address, | 1324 address, |
| 1300 how_to_code, | 1325 how_to_code, |
| (...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1352 HeapObject* heap_object = HeapObject::cast(o); | 1377 HeapObject* heap_object = HeapObject::cast(o); |
| 1353 | 1378 |
| 1354 if (heap_object->IsMap()) { | 1379 if (heap_object->IsMap()) { |
| 1355 // The code-caches link to context-specific code objects, which | 1380 // The code-caches link to context-specific code objects, which |
| 1356 // the startup and context serializes cannot currently handle. | 1381 // the startup and context serializes cannot currently handle. |
| 1357 ASSERT(Map::cast(heap_object)->code_cache() == | 1382 ASSERT(Map::cast(heap_object)->code_cache() == |
| 1358 heap_object->GetHeap()->raw_unchecked_empty_fixed_array()); | 1383 heap_object->GetHeap()->raw_unchecked_empty_fixed_array()); |
| 1359 } | 1384 } |
| 1360 | 1385 |
| 1361 int root_index; | 1386 int root_index; |
| 1362 if ((root_index = RootIndex(heap_object)) != kInvalidRootIndex) { | 1387 if ((root_index = RootIndex(heap_object, how_to_code)) != kInvalidRootIndex) { |
| 1363 PutRoot(root_index, heap_object, how_to_code, where_to_point); | 1388 PutRoot(root_index, heap_object, how_to_code, where_to_point); |
| 1364 return; | 1389 return; |
| 1365 } | 1390 } |
| 1366 | 1391 |
| 1367 if (ShouldBeInThePartialSnapshotCache(heap_object)) { | 1392 if (ShouldBeInThePartialSnapshotCache(heap_object)) { |
| 1368 int cache_index = PartialSnapshotCacheIndex(heap_object); | 1393 int cache_index = PartialSnapshotCacheIndex(heap_object); |
| 1369 sink_->Put(kPartialSnapshotCache + how_to_code + where_to_point, | 1394 sink_->Put(kPartialSnapshotCache + how_to_code + where_to_point, |
| 1370 "PartialSnapshotCache"); | 1395 "PartialSnapshotCache"); |
| 1371 sink_->PutInt(cache_index, "partial_snapshot_cache_index"); | 1396 sink_->PutInt(cache_index, "partial_snapshot_cache_index"); |
| 1372 return; | 1397 return; |
| (...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1432 | 1457 |
| 1433 void Serializer::ObjectSerializer::VisitPointers(Object** start, | 1458 void Serializer::ObjectSerializer::VisitPointers(Object** start, |
| 1434 Object** end) { | 1459 Object** end) { |
| 1435 Object** current = start; | 1460 Object** current = start; |
| 1436 while (current < end) { | 1461 while (current < end) { |
| 1437 while (current < end && (*current)->IsSmi()) current++; | 1462 while (current < end && (*current)->IsSmi()) current++; |
| 1438 if (current < end) OutputRawData(reinterpret_cast<Address>(current)); | 1463 if (current < end) OutputRawData(reinterpret_cast<Address>(current)); |
| 1439 | 1464 |
| 1440 while (current < end && !(*current)->IsSmi()) { | 1465 while (current < end && !(*current)->IsSmi()) { |
| 1441 HeapObject* current_contents = HeapObject::cast(*current); | 1466 HeapObject* current_contents = HeapObject::cast(*current); |
| 1442 int root_index = serializer_->RootIndex(current_contents); | 1467 int root_index = serializer_->RootIndex(current_contents, kPlain); |
| 1443 // Repeats are not subject to the write barrier so there are only some | 1468 // Repeats are not subject to the write barrier so there are only some |
| 1444 // objects that can be used in a repeat encoding. These are the early | 1469 // objects that can be used in a repeat encoding. These are the early |
| 1445 // ones in the root array that are never in new space. | 1470 // ones in the root array that are never in new space. |
| 1446 if (current != start && | 1471 if (current != start && |
| 1447 root_index != kInvalidRootIndex && | 1472 root_index != kInvalidRootIndex && |
| 1448 root_index < kRootArrayNumberOfConstantEncodings && | 1473 root_index < kRootArrayNumberOfConstantEncodings && |
| 1449 current_contents == current[-1]) { | 1474 current_contents == current[-1]) { |
| 1450 ASSERT(!HEAP->InNewSpace(current_contents)); | 1475 ASSERT(!HEAP->InNewSpace(current_contents)); |
| 1451 int repeat_count = 1; | 1476 int repeat_count = 1; |
| 1452 while (current < end - 1 && current[repeat_count] == current_contents) { | 1477 while (current < end - 1 && current[repeat_count] == current_contents) { |
| (...skipping 221 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1674 int Serializer::SpaceAreaSize(int space) { | 1699 int Serializer::SpaceAreaSize(int space) { |
| 1675 if (space == CODE_SPACE) { | 1700 if (space == CODE_SPACE) { |
| 1676 return isolate_->memory_allocator()->CodePageAreaSize(); | 1701 return isolate_->memory_allocator()->CodePageAreaSize(); |
| 1677 } else { | 1702 } else { |
| 1678 return Page::kPageSize - Page::kObjectStartOffset; | 1703 return Page::kPageSize - Page::kObjectStartOffset; |
| 1679 } | 1704 } |
| 1680 } | 1705 } |
| 1681 | 1706 |
| 1682 | 1707 |
| 1683 } } // namespace v8::internal | 1708 } } // namespace v8::internal |
| OLD | NEW |