OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 560 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
571 | 571 |
572 | 572 |
573 static void VerifySymbolTable() { | 573 static void VerifySymbolTable() { |
574 #ifdef DEBUG | 574 #ifdef DEBUG |
575 SymbolTableVerifier verifier; | 575 SymbolTableVerifier verifier; |
576 HEAP->symbol_table()->IterateElements(&verifier); | 576 HEAP->symbol_table()->IterateElements(&verifier); |
577 #endif // DEBUG | 577 #endif // DEBUG |
578 } | 578 } |
579 | 579 |
580 | 580 |
| 581 static bool AbortIncrementalMarkingAndCollectGarbage( |
| 582 Heap* heap, |
| 583 AllocationSpace space, |
| 584 const char* gc_reason = NULL) { |
| 585 heap->mark_compact_collector()->SetFlags(Heap::kMakeHeapIterableMask); |
| 586 bool result = heap->CollectGarbage(space, gc_reason); |
| 587 heap->mark_compact_collector()->SetFlags(Heap::kNoGCFlags); |
| 588 return result; |
| 589 } |
| 590 |
| 591 |
581 void Heap::ReserveSpace( | 592 void Heap::ReserveSpace( |
582 int new_space_size, | 593 int new_space_size, |
583 int pointer_space_size, | 594 int pointer_space_size, |
584 int data_space_size, | 595 int data_space_size, |
585 int code_space_size, | 596 int code_space_size, |
586 int map_space_size, | 597 int map_space_size, |
587 int cell_space_size, | 598 int cell_space_size, |
588 int large_object_size) { | 599 int large_object_size) { |
589 NewSpace* new_space = Heap::new_space(); | 600 NewSpace* new_space = Heap::new_space(); |
590 PagedSpace* old_pointer_space = Heap::old_pointer_space(); | 601 PagedSpace* old_pointer_space = Heap::old_pointer_space(); |
591 PagedSpace* old_data_space = Heap::old_data_space(); | 602 PagedSpace* old_data_space = Heap::old_data_space(); |
592 PagedSpace* code_space = Heap::code_space(); | 603 PagedSpace* code_space = Heap::code_space(); |
593 PagedSpace* map_space = Heap::map_space(); | 604 PagedSpace* map_space = Heap::map_space(); |
594 PagedSpace* cell_space = Heap::cell_space(); | 605 PagedSpace* cell_space = Heap::cell_space(); |
595 LargeObjectSpace* lo_space = Heap::lo_space(); | 606 LargeObjectSpace* lo_space = Heap::lo_space(); |
596 bool gc_performed = true; | 607 bool gc_performed = true; |
597 int counter = 0; | 608 int counter = 0; |
598 static const int kThreshold = 20; | 609 static const int kThreshold = 20; |
599 while (gc_performed && counter++ < kThreshold) { | 610 while (gc_performed && counter++ < kThreshold) { |
600 gc_performed = false; | 611 gc_performed = false; |
601 if (!new_space->ReserveSpace(new_space_size)) { | 612 if (!new_space->ReserveSpace(new_space_size)) { |
602 Heap::CollectGarbage(NEW_SPACE, | 613 Heap::CollectGarbage(NEW_SPACE, |
603 "failed to reserve space in the new space"); | 614 "failed to reserve space in the new space"); |
604 gc_performed = true; | 615 gc_performed = true; |
605 } | 616 } |
606 if (!old_pointer_space->ReserveSpace(pointer_space_size)) { | 617 if (!old_pointer_space->ReserveSpace(pointer_space_size)) { |
607 Heap::CollectGarbage(OLD_POINTER_SPACE, | 618 AbortIncrementalMarkingAndCollectGarbage(this, OLD_POINTER_SPACE, |
608 "failed to reserve space in the old pointer space"); | 619 "failed to reserve space in the old pointer space"); |
609 gc_performed = true; | 620 gc_performed = true; |
610 } | 621 } |
611 if (!(old_data_space->ReserveSpace(data_space_size))) { | 622 if (!(old_data_space->ReserveSpace(data_space_size))) { |
612 Heap::CollectGarbage(OLD_DATA_SPACE, | 623 AbortIncrementalMarkingAndCollectGarbage(this, OLD_DATA_SPACE, |
613 "failed to reserve space in the old data space"); | 624 "failed to reserve space in the old data space"); |
614 gc_performed = true; | 625 gc_performed = true; |
615 } | 626 } |
616 if (!(code_space->ReserveSpace(code_space_size))) { | 627 if (!(code_space->ReserveSpace(code_space_size))) { |
617 Heap::CollectGarbage(CODE_SPACE, | 628 AbortIncrementalMarkingAndCollectGarbage(this, CODE_SPACE, |
618 "failed to reserve space in the code space"); | 629 "failed to reserve space in the code space"); |
619 gc_performed = true; | 630 gc_performed = true; |
620 } | 631 } |
621 if (!(map_space->ReserveSpace(map_space_size))) { | 632 if (!(map_space->ReserveSpace(map_space_size))) { |
622 Heap::CollectGarbage(MAP_SPACE, | 633 AbortIncrementalMarkingAndCollectGarbage(this, MAP_SPACE, |
623 "failed to reserve space in the map space"); | 634 "failed to reserve space in the map space"); |
624 gc_performed = true; | 635 gc_performed = true; |
625 } | 636 } |
626 if (!(cell_space->ReserveSpace(cell_space_size))) { | 637 if (!(cell_space->ReserveSpace(cell_space_size))) { |
627 Heap::CollectGarbage(CELL_SPACE, | 638 AbortIncrementalMarkingAndCollectGarbage(this, CELL_SPACE, |
628 "failed to reserve space in the cell space"); | 639 "failed to reserve space in the cell space"); |
629 gc_performed = true; | 640 gc_performed = true; |
630 } | 641 } |
631 // We add a slack-factor of 2 in order to have space for a series of | 642 // We add a slack-factor of 2 in order to have space for a series of |
632 // large-object allocations that are only just larger than the page size. | 643 // large-object allocations that are only just larger than the page size. |
633 large_object_size *= 2; | 644 large_object_size *= 2; |
634 // The ReserveSpace method on the large object space checks how much | 645 // The ReserveSpace method on the large object space checks how much |
635 // we can expand the old generation. This includes expansion caused by | 646 // we can expand the old generation. This includes expansion caused by |
636 // allocation in the other spaces. | 647 // allocation in the other spaces. |
637 large_object_size += cell_space_size + map_space_size + code_space_size + | 648 large_object_size += cell_space_size + map_space_size + code_space_size + |
638 data_space_size + pointer_space_size; | 649 data_space_size + pointer_space_size; |
639 if (!(lo_space->ReserveSpace(large_object_size))) { | 650 if (!(lo_space->ReserveSpace(large_object_size))) { |
640 Heap::CollectGarbage(LO_SPACE, | 651 AbortIncrementalMarkingAndCollectGarbage(this, LO_SPACE, |
641 "failed to reserve space in the large object space"); | 652 "failed to reserve space in the large object space"); |
642 gc_performed = true; | 653 gc_performed = true; |
643 } | 654 } |
644 } | 655 } |
645 | 656 |
646 if (gc_performed) { | 657 if (gc_performed) { |
647 // Failed to reserve the space after several attempts. | 658 // Failed to reserve the space after several attempts. |
648 V8::FatalProcessOutOfMemory("Heap::ReserveSpace"); | 659 V8::FatalProcessOutOfMemory("Heap::ReserveSpace"); |
649 } | 660 } |
650 } | 661 } |
651 | 662 |
(...skipping 6289 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6941 isolate_->heap()->store_buffer()->Compact(); | 6952 isolate_->heap()->store_buffer()->Compact(); |
6942 isolate_->heap()->store_buffer()->Filter(MemoryChunk::ABOUT_TO_BE_FREED); | 6953 isolate_->heap()->store_buffer()->Filter(MemoryChunk::ABOUT_TO_BE_FREED); |
6943 for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) { | 6954 for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) { |
6944 next = chunk->next_chunk(); | 6955 next = chunk->next_chunk(); |
6945 isolate_->memory_allocator()->Free(chunk); | 6956 isolate_->memory_allocator()->Free(chunk); |
6946 } | 6957 } |
6947 chunks_queued_for_free_ = NULL; | 6958 chunks_queued_for_free_ = NULL; |
6948 } | 6959 } |
6949 | 6960 |
6950 } } // namespace v8::internal | 6961 } } // namespace v8::internal |
OLD | NEW |