OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 564 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
575 int map_space_size, | 575 int map_space_size, |
576 int cell_space_size, | 576 int cell_space_size, |
577 int large_object_size) { | 577 int large_object_size) { |
578 NewSpace* new_space = Heap::new_space(); | 578 NewSpace* new_space = Heap::new_space(); |
579 PagedSpace* old_pointer_space = Heap::old_pointer_space(); | 579 PagedSpace* old_pointer_space = Heap::old_pointer_space(); |
580 PagedSpace* old_data_space = Heap::old_data_space(); | 580 PagedSpace* old_data_space = Heap::old_data_space(); |
581 PagedSpace* code_space = Heap::code_space(); | 581 PagedSpace* code_space = Heap::code_space(); |
582 PagedSpace* map_space = Heap::map_space(); | 582 PagedSpace* map_space = Heap::map_space(); |
583 PagedSpace* cell_space = Heap::cell_space(); | 583 PagedSpace* cell_space = Heap::cell_space(); |
584 LargeObjectSpace* lo_space = Heap::lo_space(); | 584 LargeObjectSpace* lo_space = Heap::lo_space(); |
585 bool one_old_space_gc_has_been_performed = false; | |
585 bool gc_performed = true; | 586 bool gc_performed = true; |
586 int counter = 0; | 587 int counter = 0; |
587 static const int kThreshold = 20; | 588 static const int kThreshold = 20; |
589 bool old_space_gc_performed; | |
590 | |
588 while (gc_performed && counter++ < kThreshold) { | 591 while (gc_performed && counter++ < kThreshold) { |
Erik Corry
2012/01/31 10:44:44
This function was changed in response to the last
| |
592 old_space_gc_performed = false; | |
589 gc_performed = false; | 593 gc_performed = false; |
590 if (!new_space->ReserveSpace(new_space_size)) { | 594 if (!new_space->ReserveSpace(new_space_size)) { |
591 Heap::CollectGarbage(NEW_SPACE); | 595 Heap::CollectGarbage(NEW_SPACE); |
592 gc_performed = true; | 596 gc_performed = true; |
593 } | 597 } |
594 if (!old_pointer_space->ReserveSpace(pointer_space_size)) { | 598 if (!old_pointer_space->ReserveSpace(pointer_space_size)) { |
595 Heap::CollectGarbage(OLD_POINTER_SPACE); | 599 Heap::CollectGarbage(OLD_POINTER_SPACE); |
596 gc_performed = true; | 600 gc_performed = true; |
601 old_space_gc_performed = true; | |
597 } | 602 } |
598 if (!(old_data_space->ReserveSpace(data_space_size))) { | 603 if (!(old_data_space->ReserveSpace(data_space_size))) { |
599 Heap::CollectGarbage(OLD_DATA_SPACE); | 604 Heap::CollectGarbage(OLD_DATA_SPACE); |
600 gc_performed = true; | 605 gc_performed = true; |
606 old_space_gc_performed = true; | |
601 } | 607 } |
602 if (!(code_space->ReserveSpace(code_space_size))) { | 608 if (!(code_space->ReserveSpace(code_space_size))) { |
603 Heap::CollectGarbage(CODE_SPACE); | 609 Heap::CollectGarbage(CODE_SPACE); |
604 gc_performed = true; | 610 gc_performed = true; |
611 old_space_gc_performed = true; | |
605 } | 612 } |
606 if (!(map_space->ReserveSpace(map_space_size))) { | 613 if (!(map_space->ReserveSpace(map_space_size))) { |
607 Heap::CollectGarbage(MAP_SPACE); | 614 Heap::CollectGarbage(MAP_SPACE); |
608 gc_performed = true; | 615 gc_performed = true; |
616 old_space_gc_performed = true; | |
609 } | 617 } |
610 if (!(cell_space->ReserveSpace(cell_space_size))) { | 618 if (!(cell_space->ReserveSpace(cell_space_size))) { |
611 Heap::CollectGarbage(CELL_SPACE); | 619 Heap::CollectGarbage(CELL_SPACE); |
612 gc_performed = true; | 620 gc_performed = true; |
621 old_space_gc_performed = true; | |
613 } | 622 } |
614 // We add a slack-factor of 2 in order to have space for a series of | 623 // We add a slack-factor of 2 in order to have space for a series of |
615 // large-object allocations that are only just larger than the page size. | 624 // large-object allocations that are only just larger than the page size. |
616 large_object_size *= 2; | 625 large_object_size *= 2; |
617 // The ReserveSpace method on the large object space checks how much | 626 // The ReserveSpace method on the large object space checks how much |
618 // we can expand the old generation. This includes expansion caused by | 627 // we can expand the old generation. This includes expansion caused by |
619 // allocation in the other spaces. | 628 // allocation in the other spaces. |
620 large_object_size += cell_space_size + map_space_size + code_space_size + | 629 large_object_size += cell_space_size + map_space_size + code_space_size + |
621 data_space_size + pointer_space_size; | 630 data_space_size + pointer_space_size; |
622 if (!(lo_space->ReserveSpace(large_object_size))) { | 631 |
632 // If we already did one GC in order to make space in old space, there is | |
633 // no sense in doing another one. We will attempt to force through the | |
634 // large object space allocation, which comes directly from the OS, | |
635 // regardless of any soft limit. | |
636 if (!one_old_space_gc_has_been_performed && | |
637 !(lo_space->ReserveSpace(large_object_size))) { | |
623 Heap::CollectGarbage(LO_SPACE); | 638 Heap::CollectGarbage(LO_SPACE); |
624 gc_performed = true; | 639 gc_performed = true; |
625 } | 640 } |
641 if (old_space_gc_performed) one_old_space_gc_has_been_performed = true; | |
626 } | 642 } |
627 | 643 |
628 if (gc_performed) { | 644 if (gc_performed) { |
629 // Failed to reserve the space after several attempts. | 645 // Failed to reserve the space after several attempts. |
630 V8::FatalProcessOutOfMemory("Heap::ReserveSpace"); | 646 V8::FatalProcessOutOfMemory("Heap.:ReserveSpace"); |
Vyacheslav Egorov (Chromium)
2012/01/31 11:21:22
accidental edit?
| |
631 } | 647 } |
632 } | 648 } |
633 | 649 |
634 | 650 |
635 void Heap::EnsureFromSpaceIsCommitted() { | 651 void Heap::EnsureFromSpaceIsCommitted() { |
636 if (new_space_.CommitFromSpaceIfNeeded()) return; | 652 if (new_space_.CommitFromSpaceIfNeeded()) return; |
637 | 653 |
638 // Committing memory to from space failed. | 654 // Committing memory to from space failed. |
639 // Try shrinking and try again. | 655 // Try shrinking and try again. |
640 Shrink(); | 656 Shrink(); |
(...skipping 6197 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
6838 isolate_->heap()->store_buffer()->Compact(); | 6854 isolate_->heap()->store_buffer()->Compact(); |
6839 isolate_->heap()->store_buffer()->Filter(MemoryChunk::ABOUT_TO_BE_FREED); | 6855 isolate_->heap()->store_buffer()->Filter(MemoryChunk::ABOUT_TO_BE_FREED); |
6840 for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) { | 6856 for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) { |
6841 next = chunk->next_chunk(); | 6857 next = chunk->next_chunk(); |
6842 isolate_->memory_allocator()->Free(chunk); | 6858 isolate_->memory_allocator()->Free(chunk); |
6843 } | 6859 } |
6844 chunks_queued_for_free_ = NULL; | 6860 chunks_queued_for_free_ = NULL; |
6845 } | 6861 } |
6846 | 6862 |
6847 } } // namespace v8::internal | 6863 } } // namespace v8::internal |
OLD | NEW |