| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1324 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1335 // Returns the adjusted value. | 1335 // Returns the adjusted value. |
| 1336 inline intptr_t AdjustAmountOfExternalAllocatedMemory( | 1336 inline intptr_t AdjustAmountOfExternalAllocatedMemory( |
| 1337 intptr_t change_in_bytes); | 1337 intptr_t change_in_bytes); |
| 1338 | 1338 |
| 1339 // Allocate uninitialized fixed array. | 1339 // Allocate uninitialized fixed array. |
| 1340 MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(int length); | 1340 MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(int length); |
| 1341 MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(int length, | 1341 MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(int length, |
| 1342 PretenureFlag pretenure); | 1342 PretenureFlag pretenure); |
| 1343 | 1343 |
| 1344 inline intptr_t PromotedTotalSize() { | 1344 inline intptr_t PromotedTotalSize() { |
| 1345 return PromotedSpaceSize() + PromotedExternalMemorySize(); | 1345 return PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize(); |
| 1346 } | 1346 } |
| 1347 | 1347 |
| 1348 // True if we have reached the allocation limit in the old generation that | 1348 // True if we have reached the allocation limit in the old generation that |
| 1349 // should force the next GC (caused normally) to be a full one. | 1349 // should force the next GC (caused normally) to be a full one. |
| 1350 inline bool OldGenerationPromotionLimitReached() { | 1350 inline bool OldGenerationPromotionLimitReached() { |
| 1351 return PromotedTotalSize() > old_gen_promotion_limit_; | 1351 return PromotedTotalSize() > old_gen_promotion_limit_; |
| 1352 } | 1352 } |
| 1353 | 1353 |
| 1354 inline intptr_t OldGenerationSpaceAvailable() { | 1354 inline intptr_t OldGenerationSpaceAvailable() { |
| 1355 return old_gen_allocation_limit_ - PromotedTotalSize(); | 1355 return old_gen_allocation_limit_ - PromotedTotalSize(); |
| 1356 } | 1356 } |
| 1357 | 1357 |
| 1358 inline intptr_t OldGenerationCapacityAvailable() { | 1358 inline intptr_t OldGenerationCapacityAvailable() { |
| 1359 return max_old_generation_size_ - PromotedTotalSize(); | 1359 return max_old_generation_size_ - PromotedTotalSize(); |
| 1360 } | 1360 } |
| 1361 | 1361 |
| 1362 static const intptr_t kMinimumPromotionLimit = 5 * Page::kPageSize; | 1362 static const intptr_t kMinimumPromotionLimit = 5 * Page::kPageSize; |
| 1363 static const intptr_t kMinimumAllocationLimit = | 1363 static const intptr_t kMinimumAllocationLimit = |
| 1364 8 * (Page::kPageSize > MB ? Page::kPageSize : MB); | 1364 8 * (Page::kPageSize > MB ? Page::kPageSize : MB); |
| 1365 | 1365 |
| 1366 // When we sweep lazily we initially guess that there is no garbage on the | |
| 1367 // heap and set the limits for the next GC accordingly. As we sweep we find | |
| 1368 // out that some of the pages contained garbage and we have to adjust | |
| 1369 // downwards the size of the heap. This means the limits that control the | |
| 1370 // timing of the next GC also need to be adjusted downwards. | |
| 1371 void LowerOldGenLimits(intptr_t adjustment) { | |
| 1372 size_of_old_gen_at_last_old_space_gc_ -= adjustment; | |
| 1373 old_gen_promotion_limit_ = | |
| 1374 OldGenPromotionLimit(size_of_old_gen_at_last_old_space_gc_); | |
| 1375 old_gen_allocation_limit_ = | |
| 1376 OldGenAllocationLimit(size_of_old_gen_at_last_old_space_gc_); | |
| 1377 } | |
| 1378 | |
| 1379 intptr_t OldGenPromotionLimit(intptr_t old_gen_size) { | 1366 intptr_t OldGenPromotionLimit(intptr_t old_gen_size) { |
| 1380 const int divisor = FLAG_stress_compaction ? 10 : 3; | 1367 const int divisor = FLAG_stress_compaction ? 10 : 3; |
| 1381 intptr_t limit = | 1368 intptr_t limit = |
| 1382 Max(old_gen_size + old_gen_size / divisor, kMinimumPromotionLimit); | 1369 Max(old_gen_size + old_gen_size / divisor, kMinimumPromotionLimit); |
| 1383 limit += new_space_.Capacity(); | 1370 limit += new_space_.Capacity(); |
| 1384 limit *= old_gen_limit_factor_; | 1371 limit *= old_gen_limit_factor_; |
| 1385 intptr_t halfway_to_the_max = (old_gen_size + max_old_generation_size_) / 2; | 1372 intptr_t halfway_to_the_max = (old_gen_size + max_old_generation_size_) / 2; |
| 1386 return Min(limit, halfway_to_the_max); | 1373 return Min(limit, halfway_to_the_max); |
| 1387 } | 1374 } |
| 1388 | 1375 |
| (...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1461 intptr_t total_promoted = PromotedTotalSize(); | 1448 intptr_t total_promoted = PromotedTotalSize(); |
| 1462 | 1449 |
| 1463 intptr_t adjusted_promotion_limit = | 1450 intptr_t adjusted_promotion_limit = |
| 1464 old_gen_promotion_limit_ - new_space_.Capacity(); | 1451 old_gen_promotion_limit_ - new_space_.Capacity(); |
| 1465 | 1452 |
| 1466 if (total_promoted >= adjusted_promotion_limit) return true; | 1453 if (total_promoted >= adjusted_promotion_limit) return true; |
| 1467 | 1454 |
| 1468 intptr_t adjusted_allocation_limit = | 1455 intptr_t adjusted_allocation_limit = |
| 1469 old_gen_allocation_limit_ - new_space_.Capacity() / 5; | 1456 old_gen_allocation_limit_ - new_space_.Capacity() / 5; |
| 1470 | 1457 |
| 1471 if (PromotedSpaceSize() >= adjusted_allocation_limit) return true; | 1458 if (PromotedSpaceSizeOfObjects() >= adjusted_allocation_limit) return true; |
| 1472 | 1459 |
| 1473 return false; | 1460 return false; |
| 1474 } | 1461 } |
| 1475 | 1462 |
| 1476 | 1463 |
| 1477 void UpdateNewSpaceReferencesInExternalStringTable( | 1464 void UpdateNewSpaceReferencesInExternalStringTable( |
| 1478 ExternalStringTableUpdaterCallback updater_func); | 1465 ExternalStringTableUpdaterCallback updater_func); |
| 1479 | 1466 |
| 1480 void UpdateReferencesInExternalStringTable( | 1467 void UpdateReferencesInExternalStringTable( |
| 1481 ExternalStringTableUpdaterCallback updater_func); | 1468 ExternalStringTableUpdaterCallback updater_func); |
| (...skipping 17 matching lines...) Expand all Loading... |
| 1499 // Clears the cache of ICs related to this map. | 1486 // Clears the cache of ICs related to this map. |
| 1500 void ClearCacheOnMap(Map* map) { | 1487 void ClearCacheOnMap(Map* map) { |
| 1501 if (FLAG_cleanup_code_caches_at_gc) { | 1488 if (FLAG_cleanup_code_caches_at_gc) { |
| 1502 map->ClearCodeCache(this); | 1489 map->ClearCodeCache(this); |
| 1503 } | 1490 } |
| 1504 } | 1491 } |
| 1505 | 1492 |
| 1506 GCTracer* tracer() { return tracer_; } | 1493 GCTracer* tracer() { return tracer_; } |
| 1507 | 1494 |
| 1508 // Returns the size of objects residing in non new spaces. | 1495 // Returns the size of objects residing in non new spaces. |
| 1509 intptr_t PromotedSpaceSize(); | |
| 1510 intptr_t PromotedSpaceSizeOfObjects(); | 1496 intptr_t PromotedSpaceSizeOfObjects(); |
| 1511 | 1497 |
| 1512 double total_regexp_code_generated() { return total_regexp_code_generated_; } | 1498 double total_regexp_code_generated() { return total_regexp_code_generated_; } |
| 1513 void IncreaseTotalRegexpCodeGenerated(int size) { | 1499 void IncreaseTotalRegexpCodeGenerated(int size) { |
| 1514 total_regexp_code_generated_ += size; | 1500 total_regexp_code_generated_ += size; |
| 1515 } | 1501 } |
| 1516 | 1502 |
| 1517 // Returns maximum GC pause. | 1503 // Returns maximum GC pause. |
| 1518 int get_max_gc_pause() { return max_gc_pause_; } | 1504 int get_max_gc_pause() { return max_gc_pause_; } |
| 1519 | 1505 |
| (...skipping 1195 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2715 AssertNoAllocation no_alloc; // i.e. no gc allowed. | 2701 AssertNoAllocation no_alloc; // i.e. no gc allowed. |
| 2716 | 2702 |
| 2717 private: | 2703 private: |
| 2718 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); | 2704 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); |
| 2719 }; | 2705 }; |
| 2720 #endif // DEBUG || LIVE_OBJECT_LIST | 2706 #endif // DEBUG || LIVE_OBJECT_LIST |
| 2721 | 2707 |
| 2722 } } // namespace v8::internal | 2708 } } // namespace v8::internal |
| 2723 | 2709 |
| 2724 #endif // V8_HEAP_H_ | 2710 #endif // V8_HEAP_H_ |
| OLD | NEW |