Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(226)

Side by Side Diff: src/lithium-allocator.cc

Issue 10534006: Remove TLS access for current Zone. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Address review. Created 8 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/lithium-allocator.h ('k') | src/liveedit.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 212 matching lines...) Expand 10 before | Expand all | Expand 10 after
223 while (pos != NULL && !pos->HasHint()) pos = pos->next(); 223 while (pos != NULL && !pos->HasHint()) pos = pos->next();
224 return pos; 224 return pos;
225 } 225 }
226 226
227 227
228 LOperand* LiveRange::CreateAssignedOperand(Zone* zone) { 228 LOperand* LiveRange::CreateAssignedOperand(Zone* zone) {
229 LOperand* op = NULL; 229 LOperand* op = NULL;
230 if (HasRegisterAssigned()) { 230 if (HasRegisterAssigned()) {
231 ASSERT(!IsSpilled()); 231 ASSERT(!IsSpilled());
232 if (IsDouble()) { 232 if (IsDouble()) {
233 op = LDoubleRegister::Create(assigned_register()); 233 op = LDoubleRegister::Create(assigned_register(), zone);
234 } else { 234 } else {
235 op = LRegister::Create(assigned_register()); 235 op = LRegister::Create(assigned_register(), zone);
236 } 236 }
237 } else if (IsSpilled()) { 237 } else if (IsSpilled()) {
238 ASSERT(!HasRegisterAssigned()); 238 ASSERT(!HasRegisterAssigned());
239 op = TopLevel()->GetSpillOperand(); 239 op = TopLevel()->GetSpillOperand();
240 ASSERT(!op->IsUnallocated()); 240 ASSERT(!op->IsUnallocated());
241 } else { 241 } else {
242 LUnallocated* unalloc = new(zone) LUnallocated(LUnallocated::NONE); 242 LUnallocated* unalloc = new(zone) LUnallocated(LUnallocated::NONE);
243 unalloc->set_virtual_register(id_); 243 unalloc->set_virtual_register(id_);
244 op = unalloc; 244 op = unalloc;
245 } 245 }
(...skipping 280 matching lines...) Expand 10 before | Expand all | Expand 10 after
526 b = b->next(); 526 b = b->next();
527 } 527 }
528 } 528 }
529 return LifetimePosition::Invalid(); 529 return LifetimePosition::Invalid();
530 } 530 }
531 531
532 532
533 LAllocator::LAllocator(int num_values, HGraph* graph) 533 LAllocator::LAllocator(int num_values, HGraph* graph)
534 : zone_(graph->zone()), 534 : zone_(graph->zone()),
535 chunk_(NULL), 535 chunk_(NULL),
536 live_in_sets_(graph->blocks()->length()), 536 live_in_sets_(graph->blocks()->length(), zone_),
537 live_ranges_(num_values * 2), 537 live_ranges_(num_values * 2, zone_),
538 fixed_live_ranges_(NULL), 538 fixed_live_ranges_(NULL),
539 fixed_double_live_ranges_(NULL), 539 fixed_double_live_ranges_(NULL),
540 unhandled_live_ranges_(num_values * 2), 540 unhandled_live_ranges_(num_values * 2, zone_),
541 active_live_ranges_(8), 541 active_live_ranges_(8, zone_),
542 inactive_live_ranges_(8), 542 inactive_live_ranges_(8, zone_),
543 reusable_slots_(8), 543 reusable_slots_(8, zone_),
544 next_virtual_register_(num_values), 544 next_virtual_register_(num_values),
545 first_artificial_register_(num_values), 545 first_artificial_register_(num_values),
546 mode_(GENERAL_REGISTERS), 546 mode_(GENERAL_REGISTERS),
547 num_registers_(-1), 547 num_registers_(-1),
548 graph_(graph), 548 graph_(graph),
549 has_osr_entry_(false), 549 has_osr_entry_(false),
550 allocation_ok_(true) { } 550 allocation_ok_(true) { }
551 551
552 552
553 void LAllocator::InitializeLivenessAnalysis() { 553 void LAllocator::InitializeLivenessAnalysis() {
554 // Initialize the live_in sets for each block to NULL. 554 // Initialize the live_in sets for each block to NULL.
555 int block_count = graph_->blocks()->length(); 555 int block_count = graph_->blocks()->length();
556 live_in_sets_.Initialize(block_count); 556 live_in_sets_.Initialize(block_count, zone());
557 live_in_sets_.AddBlock(NULL, block_count); 557 live_in_sets_.AddBlock(NULL, block_count, zone());
558 } 558 }
559 559
560 560
561 BitVector* LAllocator::ComputeLiveOut(HBasicBlock* block) { 561 BitVector* LAllocator::ComputeLiveOut(HBasicBlock* block) {
562 // Compute live out for the given block, except not including backward 562 // Compute live out for the given block, except not including backward
563 // successor edges. 563 // successor edges.
564 BitVector* live_out = new(zone_) BitVector(next_virtual_register_, zone_); 564 BitVector* live_out = new(zone_) BitVector(next_virtual_register_, zone_);
565 565
566 // Process all successor blocks. 566 // Process all successor blocks.
567 for (HSuccessorIterator it(block->end()); !it.Done(); it.Advance()) { 567 for (HSuccessorIterator it(block->end()); !it.Done(); it.Advance()) {
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after
623 } else if (operand->policy() == LUnallocated::FIXED_DOUBLE_REGISTER) { 623 } else if (operand->policy() == LUnallocated::FIXED_DOUBLE_REGISTER) {
624 int reg_index = operand->fixed_index(); 624 int reg_index = operand->fixed_index();
625 operand->ConvertTo(LOperand::DOUBLE_REGISTER, reg_index); 625 operand->ConvertTo(LOperand::DOUBLE_REGISTER, reg_index);
626 } else { 626 } else {
627 UNREACHABLE(); 627 UNREACHABLE();
628 } 628 }
629 if (is_tagged) { 629 if (is_tagged) {
630 TraceAlloc("Fixed reg is tagged at %d\n", pos); 630 TraceAlloc("Fixed reg is tagged at %d\n", pos);
631 LInstruction* instr = InstructionAt(pos); 631 LInstruction* instr = InstructionAt(pos);
632 if (instr->HasPointerMap()) { 632 if (instr->HasPointerMap()) {
633 instr->pointer_map()->RecordPointer(operand); 633 instr->pointer_map()->RecordPointer(operand, zone());
634 } 634 }
635 } 635 }
636 return operand; 636 return operand;
637 } 637 }
638 638
639 639
640 LiveRange* LAllocator::FixedLiveRangeFor(int index) { 640 LiveRange* LAllocator::FixedLiveRangeFor(int index) {
641 ASSERT(index < Register::kNumAllocatableRegisters); 641 ASSERT(index < Register::kNumAllocatableRegisters);
642 LiveRange* result = fixed_live_ranges_[index]; 642 LiveRange* result = fixed_live_ranges_[index];
643 if (result == NULL) { 643 if (result == NULL) {
(...skipping 14 matching lines...) Expand all
658 ASSERT(result->IsFixed()); 658 ASSERT(result->IsFixed());
659 result->set_assigned_register(index, DOUBLE_REGISTERS, zone_); 659 result->set_assigned_register(index, DOUBLE_REGISTERS, zone_);
660 fixed_double_live_ranges_[index] = result; 660 fixed_double_live_ranges_[index] = result;
661 } 661 }
662 return result; 662 return result;
663 } 663 }
664 664
665 665
666 LiveRange* LAllocator::LiveRangeFor(int index) { 666 LiveRange* LAllocator::LiveRangeFor(int index) {
667 if (index >= live_ranges_.length()) { 667 if (index >= live_ranges_.length()) {
668 live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1); 668 live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1, zone());
669 } 669 }
670 LiveRange* result = live_ranges_[index]; 670 LiveRange* result = live_ranges_[index];
671 if (result == NULL) { 671 if (result == NULL) {
672 result = new(zone_) LiveRange(index, zone_); 672 result = new(zone_) LiveRange(index, zone_);
673 live_ranges_[index] = result; 673 live_ranges_[index] = result;
674 } 674 }
675 return result; 675 return result;
676 } 676 }
677 677
678 678
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after
739 range->AddUsePosition(position, unalloc_operand, zone_)->set_hint(hint); 739 range->AddUsePosition(position, unalloc_operand, zone_)->set_hint(hint);
740 } 740 }
741 range->AddUseInterval(block_start, position, zone_); 741 range->AddUseInterval(block_start, position, zone_);
742 } 742 }
743 743
744 744
745 void LAllocator::AddConstraintsGapMove(int index, 745 void LAllocator::AddConstraintsGapMove(int index,
746 LOperand* from, 746 LOperand* from,
747 LOperand* to) { 747 LOperand* to) {
748 LGap* gap = GapAt(index); 748 LGap* gap = GapAt(index);
749 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START); 749 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START, zone());
750 if (from->IsUnallocated()) { 750 if (from->IsUnallocated()) {
751 const ZoneList<LMoveOperands>* move_operands = move->move_operands(); 751 const ZoneList<LMoveOperands>* move_operands = move->move_operands();
752 for (int i = 0; i < move_operands->length(); ++i) { 752 for (int i = 0; i < move_operands->length(); ++i) {
753 LMoveOperands cur = move_operands->at(i); 753 LMoveOperands cur = move_operands->at(i);
754 LOperand* cur_to = cur.destination(); 754 LOperand* cur_to = cur.destination();
755 if (cur_to->IsUnallocated()) { 755 if (cur_to->IsUnallocated()) {
756 if (LUnallocated::cast(cur_to)->virtual_register() == 756 if (LUnallocated::cast(cur_to)->virtual_register() ==
757 LUnallocated::cast(from)->virtual_register()) { 757 LUnallocated::cast(from)->virtual_register()) {
758 move->AddMove(cur.source(), to); 758 move->AddMove(cur.source(), to, zone());
759 return; 759 return;
760 } 760 }
761 } 761 }
762 } 762 }
763 } 763 }
764 move->AddMove(from, to); 764 move->AddMove(from, to, zone());
765 } 765 }
766 766
767 767
768 void LAllocator::MeetRegisterConstraints(HBasicBlock* block) { 768 void LAllocator::MeetRegisterConstraints(HBasicBlock* block) {
769 int start = block->first_instruction_index(); 769 int start = block->first_instruction_index();
770 int end = block->last_instruction_index(); 770 int end = block->last_instruction_index();
771 for (int i = start; i <= end; ++i) { 771 for (int i = start; i <= end; ++i) {
772 if (IsGapAt(i)) { 772 if (IsGapAt(i)) {
773 LInstruction* instr = NULL; 773 LInstruction* instr = NULL;
774 LInstruction* prev_instr = NULL; 774 LInstruction* prev_instr = NULL;
(...skipping 18 matching lines...) Expand all
793 } 793 }
794 } 794 }
795 } 795 }
796 796
797 // Handle fixed output operand. 797 // Handle fixed output operand.
798 if (first != NULL && first->Output() != NULL) { 798 if (first != NULL && first->Output() != NULL) {
799 LUnallocated* first_output = LUnallocated::cast(first->Output()); 799 LUnallocated* first_output = LUnallocated::cast(first->Output());
800 LiveRange* range = LiveRangeFor(first_output->virtual_register()); 800 LiveRange* range = LiveRangeFor(first_output->virtual_register());
801 bool assigned = false; 801 bool assigned = false;
802 if (first_output->HasFixedPolicy()) { 802 if (first_output->HasFixedPolicy()) {
803 LUnallocated* output_copy = first_output->CopyUnconstrained(); 803 LUnallocated* output_copy = first_output->CopyUnconstrained(zone());
804 bool is_tagged = HasTaggedValue(first_output->virtual_register()); 804 bool is_tagged = HasTaggedValue(first_output->virtual_register());
805 AllocateFixed(first_output, gap_index, is_tagged); 805 AllocateFixed(first_output, gap_index, is_tagged);
806 806
807 // This value is produced on the stack, we never need to spill it. 807 // This value is produced on the stack, we never need to spill it.
808 if (first_output->IsStackSlot()) { 808 if (first_output->IsStackSlot()) {
809 range->SetSpillOperand(first_output); 809 range->SetSpillOperand(first_output);
810 range->SetSpillStartIndex(gap_index - 1); 810 range->SetSpillStartIndex(gap_index - 1);
811 assigned = true; 811 assigned = true;
812 } 812 }
813 chunk_->AddGapMove(gap_index, first_output, output_copy); 813 chunk_->AddGapMove(gap_index, first_output, output_copy);
814 } 814 }
815 815
816 if (!assigned) { 816 if (!assigned) {
817 range->SetSpillStartIndex(gap_index); 817 range->SetSpillStartIndex(gap_index);
818 818
819 // This move to spill operand is not a real use. Liveness analysis 819 // This move to spill operand is not a real use. Liveness analysis
820 // and splitting of live ranges do not account for it. 820 // and splitting of live ranges do not account for it.
821 // Thus it should be inserted to a lifetime position corresponding to 821 // Thus it should be inserted to a lifetime position corresponding to
822 // the instruction end. 822 // the instruction end.
823 LGap* gap = GapAt(gap_index); 823 LGap* gap = GapAt(gap_index);
824 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE); 824 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE, zone());
825 move->AddMove(first_output, range->GetSpillOperand()); 825 move->AddMove(first_output, range->GetSpillOperand(), zone());
826 } 826 }
827 } 827 }
828 828
829 // Handle fixed input operands of second instruction. 829 // Handle fixed input operands of second instruction.
830 if (second != NULL) { 830 if (second != NULL) {
831 for (UseIterator it(second); !it.Done(); it.Advance()) { 831 for (UseIterator it(second); !it.Done(); it.Advance()) {
832 LUnallocated* cur_input = LUnallocated::cast(it.Current()); 832 LUnallocated* cur_input = LUnallocated::cast(it.Current());
833 if (cur_input->HasFixedPolicy()) { 833 if (cur_input->HasFixedPolicy()) {
834 LUnallocated* input_copy = cur_input->CopyUnconstrained(); 834 LUnallocated* input_copy = cur_input->CopyUnconstrained(zone());
835 bool is_tagged = HasTaggedValue(cur_input->virtual_register()); 835 bool is_tagged = HasTaggedValue(cur_input->virtual_register());
836 AllocateFixed(cur_input, gap_index + 1, is_tagged); 836 AllocateFixed(cur_input, gap_index + 1, is_tagged);
837 AddConstraintsGapMove(gap_index, input_copy, cur_input); 837 AddConstraintsGapMove(gap_index, input_copy, cur_input);
838 } else if (cur_input->policy() == LUnallocated::WRITABLE_REGISTER) { 838 } else if (cur_input->policy() == LUnallocated::WRITABLE_REGISTER) {
839 // The live range of writable input registers always goes until the end 839 // The live range of writable input registers always goes until the end
840 // of the instruction. 840 // of the instruction.
841 ASSERT(!cur_input->IsUsedAtStart()); 841 ASSERT(!cur_input->IsUsedAtStart());
842 842
843 LUnallocated* input_copy = cur_input->CopyUnconstrained(); 843 LUnallocated* input_copy = cur_input->CopyUnconstrained(zone());
844 cur_input->set_virtual_register(GetVirtualRegister()); 844 cur_input->set_virtual_register(GetVirtualRegister());
845 if (!AllocationOk()) return; 845 if (!AllocationOk()) return;
846 846
847 if (RequiredRegisterKind(input_copy->virtual_register()) == 847 if (RequiredRegisterKind(input_copy->virtual_register()) ==
848 DOUBLE_REGISTERS) { 848 DOUBLE_REGISTERS) {
849 double_artificial_registers_.Add( 849 double_artificial_registers_.Add(
850 cur_input->virtual_register() - first_artificial_register_, 850 cur_input->virtual_register() - first_artificial_register_,
851 zone_); 851 zone_);
852 } 852 }
853 853
854 AddConstraintsGapMove(gap_index, input_copy, cur_input); 854 AddConstraintsGapMove(gap_index, input_copy, cur_input);
855 } 855 }
856 } 856 }
857 } 857 }
858 858
859 // Handle "output same as input" for second instruction. 859 // Handle "output same as input" for second instruction.
860 if (second != NULL && second->Output() != NULL) { 860 if (second != NULL && second->Output() != NULL) {
861 LUnallocated* second_output = LUnallocated::cast(second->Output()); 861 LUnallocated* second_output = LUnallocated::cast(second->Output());
862 if (second_output->HasSameAsInputPolicy()) { 862 if (second_output->HasSameAsInputPolicy()) {
863 LUnallocated* cur_input = LUnallocated::cast(second->FirstInput()); 863 LUnallocated* cur_input = LUnallocated::cast(second->FirstInput());
864 int output_vreg = second_output->virtual_register(); 864 int output_vreg = second_output->virtual_register();
865 int input_vreg = cur_input->virtual_register(); 865 int input_vreg = cur_input->virtual_register();
866 866
867 LUnallocated* input_copy = cur_input->CopyUnconstrained(); 867 LUnallocated* input_copy = cur_input->CopyUnconstrained(zone());
868 cur_input->set_virtual_register(second_output->virtual_register()); 868 cur_input->set_virtual_register(second_output->virtual_register());
869 AddConstraintsGapMove(gap_index, input_copy, cur_input); 869 AddConstraintsGapMove(gap_index, input_copy, cur_input);
870 870
871 if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) { 871 if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) {
872 int index = gap_index + 1; 872 int index = gap_index + 1;
873 LInstruction* instr = InstructionAt(index); 873 LInstruction* instr = InstructionAt(index);
874 if (instr->HasPointerMap()) { 874 if (instr->HasPointerMap()) {
875 instr->pointer_map()->RecordPointer(input_copy); 875 instr->pointer_map()->RecordPointer(input_copy, zone());
876 } 876 }
877 } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) { 877 } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) {
878 // The input is assumed to immediately have a tagged representation, 878 // The input is assumed to immediately have a tagged representation,
879 // before the pointer map can be used. I.e. the pointer map at the 879 // before the pointer map can be used. I.e. the pointer map at the
880 // instruction will include the output operand (whose value at the 880 // instruction will include the output operand (whose value at the
881 // beginning of the instruction is equal to the input operand). If 881 // beginning of the instruction is equal to the input operand). If
882 // this is not desired, then the pointer map at this instruction needs 882 // this is not desired, then the pointer map at this instruction needs
883 // to be adjusted manually. 883 // to be adjusted manually.
884 } 884 }
885 } 885 }
886 } 886 }
887 } 887 }
888 888
889 889
890 void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) { 890 void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) {
891 int block_start = block->first_instruction_index(); 891 int block_start = block->first_instruction_index();
892 int index = block->last_instruction_index(); 892 int index = block->last_instruction_index();
893 893
894 LifetimePosition block_start_position = 894 LifetimePosition block_start_position =
895 LifetimePosition::FromInstructionIndex(block_start); 895 LifetimePosition::FromInstructionIndex(block_start);
896 896
897 while (index >= block_start) { 897 while (index >= block_start) {
898 LifetimePosition curr_position = 898 LifetimePosition curr_position =
899 LifetimePosition::FromInstructionIndex(index); 899 LifetimePosition::FromInstructionIndex(index);
900 900
901 if (IsGapAt(index)) { 901 if (IsGapAt(index)) {
902 // We have a gap at this position. 902 // We have a gap at this position.
903 LGap* gap = GapAt(index); 903 LGap* gap = GapAt(index);
904 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START); 904 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START, zone());
905 const ZoneList<LMoveOperands>* move_operands = move->move_operands(); 905 const ZoneList<LMoveOperands>* move_operands = move->move_operands();
906 for (int i = 0; i < move_operands->length(); ++i) { 906 for (int i = 0; i < move_operands->length(); ++i) {
907 LMoveOperands* cur = &move_operands->at(i); 907 LMoveOperands* cur = &move_operands->at(i);
908 if (cur->IsIgnored()) continue; 908 if (cur->IsIgnored()) continue;
909 LOperand* from = cur->source(); 909 LOperand* from = cur->source();
910 LOperand* to = cur->destination(); 910 LOperand* to = cur->destination();
911 HPhi* phi = LookupPhi(to); 911 HPhi* phi = LookupPhi(to);
912 LOperand* hint = to; 912 LOperand* hint = to;
913 if (phi != NULL) { 913 if (phi != NULL) {
914 // This is a phi resolving move. 914 // This is a phi resolving move.
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after
1039 // can potentially cause a GC so they have a pointer map. 1039 // can potentially cause a GC so they have a pointer map.
1040 // By inserting a move we essentially create a copy of a 1040 // By inserting a move we essentially create a copy of a
1041 // value which is invisible to PopulatePointerMaps(), because we store 1041 // value which is invisible to PopulatePointerMaps(), because we store
1042 // it into a location different from the operand of a live range 1042 // it into a location different from the operand of a live range
1043 // covering a branch instruction. 1043 // covering a branch instruction.
1044 // Thus we need to manually record a pointer. 1044 // Thus we need to manually record a pointer.
1045 LInstruction* branch = 1045 LInstruction* branch =
1046 InstructionAt(cur_block->last_instruction_index()); 1046 InstructionAt(cur_block->last_instruction_index());
1047 if (branch->HasPointerMap()) { 1047 if (branch->HasPointerMap()) {
1048 if (phi->representation().IsTagged()) { 1048 if (phi->representation().IsTagged()) {
1049 branch->pointer_map()->RecordPointer(phi_operand); 1049 branch->pointer_map()->RecordPointer(phi_operand, zone());
1050 } else if (!phi->representation().IsDouble()) { 1050 } else if (!phi->representation().IsDouble()) {
1051 branch->pointer_map()->RecordUntagged(phi_operand); 1051 branch->pointer_map()->RecordUntagged(phi_operand, zone());
1052 } 1052 }
1053 } 1053 }
1054 } 1054 }
1055 1055
1056 LiveRange* live_range = LiveRangeFor(phi->id()); 1056 LiveRange* live_range = LiveRangeFor(phi->id());
1057 LLabel* label = chunk_->GetLabel(phi->block()->block_id()); 1057 LLabel* label = chunk_->GetLabel(phi->block()->block_id());
1058 label->GetOrCreateParallelMove(LGap::START)-> 1058 label->GetOrCreateParallelMove(LGap::START, zone())->
1059 AddMove(phi_operand, live_range->GetSpillOperand()); 1059 AddMove(phi_operand, live_range->GetSpillOperand(), zone());
1060 live_range->SetSpillStartIndex(phi->block()->first_instruction_index()); 1060 live_range->SetSpillStartIndex(phi->block()->first_instruction_index());
1061 } 1061 }
1062 } 1062 }
1063 1063
1064 1064
1065 bool LAllocator::Allocate(LChunk* chunk) { 1065 bool LAllocator::Allocate(LChunk* chunk) {
1066 ASSERT(chunk_ == NULL); 1066 ASSERT(chunk_ == NULL);
1067 chunk_ = chunk; 1067 chunk_ = chunk;
1068 MeetRegisterConstraints(); 1068 MeetRegisterConstraints();
1069 if (!AllocationOk()) return false; 1069 if (!AllocationOk()) return false;
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after
1144 // Some branch instructions (e.g. loops' back edges) 1144 // Some branch instructions (e.g. loops' back edges)
1145 // can potentially cause a GC so they have a pointer map. 1145 // can potentially cause a GC so they have a pointer map.
1146 // By inserting a move we essentially create a copy of a 1146 // By inserting a move we essentially create a copy of a
1147 // value which is invisible to PopulatePointerMaps(), because we store 1147 // value which is invisible to PopulatePointerMaps(), because we store
1148 // it into a location different from the operand of a live range 1148 // it into a location different from the operand of a live range
1149 // covering a branch instruction. 1149 // covering a branch instruction.
1150 // Thus we need to manually record a pointer. 1150 // Thus we need to manually record a pointer.
1151 LInstruction* branch = InstructionAt(pred->last_instruction_index()); 1151 LInstruction* branch = InstructionAt(pred->last_instruction_index());
1152 if (branch->HasPointerMap()) { 1152 if (branch->HasPointerMap()) {
1153 if (HasTaggedValue(range->id())) { 1153 if (HasTaggedValue(range->id())) {
1154 branch->pointer_map()->RecordPointer(cur_op); 1154 branch->pointer_map()->RecordPointer(cur_op, zone());
1155 } else if (!cur_op->IsDoubleStackSlot() && 1155 } else if (!cur_op->IsDoubleStackSlot() &&
1156 !cur_op->IsDoubleRegister()) { 1156 !cur_op->IsDoubleRegister()) {
1157 branch->pointer_map()->RemovePointer(cur_op); 1157 branch->pointer_map()->RemovePointer(cur_op);
1158 } 1158 }
1159 } 1159 }
1160 } 1160 }
1161 gap->GetOrCreateParallelMove(LGap::START)->AddMove(pred_op, cur_op); 1161 gap->GetOrCreateParallelMove(
1162 LGap::START, zone())->AddMove(pred_op, cur_op, zone());
1162 } 1163 }
1163 } 1164 }
1164 } 1165 }
1165 1166
1166 1167
1167 LParallelMove* LAllocator::GetConnectingParallelMove(LifetimePosition pos) { 1168 LParallelMove* LAllocator::GetConnectingParallelMove(LifetimePosition pos) {
1168 int index = pos.InstructionIndex(); 1169 int index = pos.InstructionIndex();
1169 if (IsGapAt(index)) { 1170 if (IsGapAt(index)) {
1170 LGap* gap = GapAt(index); 1171 LGap* gap = GapAt(index);
1171 return gap->GetOrCreateParallelMove( 1172 return gap->GetOrCreateParallelMove(
1172 pos.IsInstructionStart() ? LGap::START : LGap::END); 1173 pos.IsInstructionStart() ? LGap::START : LGap::END, zone());
1173 } 1174 }
1174 int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1); 1175 int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1);
1175 return GapAt(gap_pos)->GetOrCreateParallelMove( 1176 return GapAt(gap_pos)->GetOrCreateParallelMove(
1176 (gap_pos < index) ? LGap::AFTER : LGap::BEFORE); 1177 (gap_pos < index) ? LGap::AFTER : LGap::BEFORE, zone());
1177 } 1178 }
1178 1179
1179 1180
1180 HBasicBlock* LAllocator::GetBlock(LifetimePosition pos) { 1181 HBasicBlock* LAllocator::GetBlock(LifetimePosition pos) {
1181 LGap* gap = GapAt(chunk_->NearestGapPos(pos.InstructionIndex())); 1182 LGap* gap = GapAt(chunk_->NearestGapPos(pos.InstructionIndex()));
1182 return gap->block(); 1183 return gap->block();
1183 } 1184 }
1184 1185
1185 1186
1186 void LAllocator::ConnectRanges() { 1187 void LAllocator::ConnectRanges() {
(...skipping 11 matching lines...) Expand all
1198 // boundary. 1199 // boundary.
1199 if (first_range->End().Value() == pos.Value()) { 1200 if (first_range->End().Value() == pos.Value()) {
1200 bool should_insert = true; 1201 bool should_insert = true;
1201 if (IsBlockBoundary(pos)) { 1202 if (IsBlockBoundary(pos)) {
1202 should_insert = CanEagerlyResolveControlFlow(GetBlock(pos)); 1203 should_insert = CanEagerlyResolveControlFlow(GetBlock(pos));
1203 } 1204 }
1204 if (should_insert) { 1205 if (should_insert) {
1205 LParallelMove* move = GetConnectingParallelMove(pos); 1206 LParallelMove* move = GetConnectingParallelMove(pos);
1206 LOperand* prev_operand = first_range->CreateAssignedOperand(zone_); 1207 LOperand* prev_operand = first_range->CreateAssignedOperand(zone_);
1207 LOperand* cur_operand = second_range->CreateAssignedOperand(zone_); 1208 LOperand* cur_operand = second_range->CreateAssignedOperand(zone_);
1208 move->AddMove(prev_operand, cur_operand); 1209 move->AddMove(prev_operand, cur_operand, zone());
1209 } 1210 }
1210 } 1211 }
1211 } 1212 }
1212 1213
1213 first_range = second_range; 1214 first_range = second_range;
1214 second_range = second_range->next(); 1215 second_range = second_range->next();
1215 } 1216 }
1216 } 1217 }
1217 } 1218 }
1218 1219
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
1263 const ZoneList<HPhi*>* phis = block->phis(); 1264 const ZoneList<HPhi*>* phis = block->phis();
1264 for (int i = 0; i < phis->length(); ++i) { 1265 for (int i = 0; i < phis->length(); ++i) {
1265 // The live range interval already ends at the first instruction of the 1266 // The live range interval already ends at the first instruction of the
1266 // block. 1267 // block.
1267 HPhi* phi = phis->at(i); 1268 HPhi* phi = phis->at(i);
1268 live->Remove(phi->id()); 1269 live->Remove(phi->id());
1269 1270
1270 LOperand* hint = NULL; 1271 LOperand* hint = NULL;
1271 LOperand* phi_operand = NULL; 1272 LOperand* phi_operand = NULL;
1272 LGap* gap = GetLastGap(phi->block()->predecessors()->at(0)); 1273 LGap* gap = GetLastGap(phi->block()->predecessors()->at(0));
1273 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START); 1274 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START, zone());
1274 for (int j = 0; j < move->move_operands()->length(); ++j) { 1275 for (int j = 0; j < move->move_operands()->length(); ++j) {
1275 LOperand* to = move->move_operands()->at(j).destination(); 1276 LOperand* to = move->move_operands()->at(j).destination();
1276 if (to->IsUnallocated() && 1277 if (to->IsUnallocated() &&
1277 LUnallocated::cast(to)->virtual_register() == phi->id()) { 1278 LUnallocated::cast(to)->virtual_register() == phi->id()) {
1278 hint = move->move_operands()->at(j).source(); 1279 hint = move->move_operands()->at(j).source();
1279 phi_operand = to; 1280 phi_operand = to;
1280 break; 1281 break;
1281 } 1282 }
1282 } 1283 }
1283 ASSERT(hint != NULL); 1284 ASSERT(hint != NULL);
(...skipping 130 matching lines...) Expand 10 before | Expand all | Expand 10 after
1414 cur = cur->next(); 1415 cur = cur->next();
1415 } 1416 }
1416 if (cur == NULL) continue; 1417 if (cur == NULL) continue;
1417 1418
1418 // Check if the live range is spilled and the safe point is after 1419 // Check if the live range is spilled and the safe point is after
1419 // the spill position. 1420 // the spill position.
1420 if (range->HasAllocatedSpillOperand() && 1421 if (range->HasAllocatedSpillOperand() &&
1421 safe_point >= range->spill_start_index()) { 1422 safe_point >= range->spill_start_index()) {
1422 TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n", 1423 TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n",
1423 range->id(), range->spill_start_index(), safe_point); 1424 range->id(), range->spill_start_index(), safe_point);
1424 map->RecordPointer(range->GetSpillOperand()); 1425 map->RecordPointer(range->GetSpillOperand(), zone());
1425 } 1426 }
1426 1427
1427 if (!cur->IsSpilled()) { 1428 if (!cur->IsSpilled()) {
1428 TraceAlloc("Pointer in register for range %d (start at %d) " 1429 TraceAlloc("Pointer in register for range %d (start at %d) "
1429 "at safe point %d\n", 1430 "at safe point %d\n",
1430 cur->id(), cur->Start().Value(), safe_point); 1431 cur->id(), cur->Start().Value(), safe_point);
1431 LOperand* operand = cur->CreateAssignedOperand(zone_); 1432 LOperand* operand = cur->CreateAssignedOperand(zone_);
1432 ASSERT(!operand->IsStackSlot()); 1433 ASSERT(!operand->IsStackSlot());
1433 map->RecordPointer(operand); 1434 map->RecordPointer(operand, zone());
1434 } 1435 }
1435 } 1436 }
1436 } 1437 }
1437 } 1438 }
1438 1439
1439 1440
1440 void LAllocator::ProcessOsrEntry() { 1441 void LAllocator::ProcessOsrEntry() {
1441 const ZoneList<LInstruction*>* instrs = chunk_->instructions(); 1442 const ZoneList<LInstruction*>* instrs = chunk_->instructions();
1442 1443
1443 // Linear search for the OSR entry instruction in the chunk. 1444 // Linear search for the OSR entry instruction in the chunk.
(...skipping 181 matching lines...) Expand 10 before | Expand all | Expand 10 after
1625 virtual_register - first_artificial_register_)) { 1626 virtual_register - first_artificial_register_)) {
1626 return DOUBLE_REGISTERS; 1627 return DOUBLE_REGISTERS;
1627 } 1628 }
1628 1629
1629 return GENERAL_REGISTERS; 1630 return GENERAL_REGISTERS;
1630 } 1631 }
1631 1632
1632 1633
1633 void LAllocator::AddToActive(LiveRange* range) { 1634 void LAllocator::AddToActive(LiveRange* range) {
1634 TraceAlloc("Add live range %d to active\n", range->id()); 1635 TraceAlloc("Add live range %d to active\n", range->id());
1635 active_live_ranges_.Add(range); 1636 active_live_ranges_.Add(range, zone());
1636 } 1637 }
1637 1638
1638 1639
1639 void LAllocator::AddToInactive(LiveRange* range) { 1640 void LAllocator::AddToInactive(LiveRange* range) {
1640 TraceAlloc("Add live range %d to inactive\n", range->id()); 1641 TraceAlloc("Add live range %d to inactive\n", range->id());
1641 inactive_live_ranges_.Add(range); 1642 inactive_live_ranges_.Add(range, zone());
1642 } 1643 }
1643 1644
1644 1645
1645 void LAllocator::AddToUnhandledSorted(LiveRange* range) { 1646 void LAllocator::AddToUnhandledSorted(LiveRange* range) {
1646 if (range == NULL || range->IsEmpty()) return; 1647 if (range == NULL || range->IsEmpty()) return;
1647 ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled()); 1648 ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled());
1648 for (int i = unhandled_live_ranges_.length() - 1; i >= 0; --i) { 1649 for (int i = unhandled_live_ranges_.length() - 1; i >= 0; --i) {
1649 LiveRange* cur_range = unhandled_live_ranges_.at(i); 1650 LiveRange* cur_range = unhandled_live_ranges_.at(i);
1650 if (range->ShouldBeAllocatedBefore(cur_range)) { 1651 if (range->ShouldBeAllocatedBefore(cur_range)) {
1651 TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1); 1652 TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1);
1652 unhandled_live_ranges_.InsertAt(i + 1, range); 1653 unhandled_live_ranges_.InsertAt(i + 1, range, zone());
1653 ASSERT(UnhandledIsSorted()); 1654 ASSERT(UnhandledIsSorted());
1654 return; 1655 return;
1655 } 1656 }
1656 } 1657 }
1657 TraceAlloc("Add live range %d to unhandled at start\n", range->id()); 1658 TraceAlloc("Add live range %d to unhandled at start\n", range->id());
1658 unhandled_live_ranges_.InsertAt(0, range); 1659 unhandled_live_ranges_.InsertAt(0, range, zone());
1659 ASSERT(UnhandledIsSorted()); 1660 ASSERT(UnhandledIsSorted());
1660 } 1661 }
1661 1662
1662 1663
1663 void LAllocator::AddToUnhandledUnsorted(LiveRange* range) { 1664 void LAllocator::AddToUnhandledUnsorted(LiveRange* range) {
1664 if (range == NULL || range->IsEmpty()) return; 1665 if (range == NULL || range->IsEmpty()) return;
1665 ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled()); 1666 ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled());
1666 TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id()); 1667 TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id());
1667 unhandled_live_ranges_.Add(range); 1668 unhandled_live_ranges_.Add(range, zone());
1668 } 1669 }
1669 1670
1670 1671
1671 static int UnhandledSortHelper(LiveRange* const* a, LiveRange* const* b) { 1672 static int UnhandledSortHelper(LiveRange* const* a, LiveRange* const* b) {
1672 ASSERT(!(*a)->ShouldBeAllocatedBefore(*b) || 1673 ASSERT(!(*a)->ShouldBeAllocatedBefore(*b) ||
1673 !(*b)->ShouldBeAllocatedBefore(*a)); 1674 !(*b)->ShouldBeAllocatedBefore(*a));
1674 if ((*a)->ShouldBeAllocatedBefore(*b)) return 1; 1675 if ((*a)->ShouldBeAllocatedBefore(*b)) return 1;
1675 if ((*b)->ShouldBeAllocatedBefore(*a)) return -1; 1676 if ((*b)->ShouldBeAllocatedBefore(*a)) return -1;
1676 return (*a)->id() - (*b)->id(); 1677 return (*a)->id() - (*b)->id();
1677 } 1678 }
(...skipping 20 matching lines...) Expand all
1698 1699
1699 1700
1700 void LAllocator::FreeSpillSlot(LiveRange* range) { 1701 void LAllocator::FreeSpillSlot(LiveRange* range) {
1701 // Check that we are the last range. 1702 // Check that we are the last range.
1702 if (range->next() != NULL) return; 1703 if (range->next() != NULL) return;
1703 1704
1704 if (!range->TopLevel()->HasAllocatedSpillOperand()) return; 1705 if (!range->TopLevel()->HasAllocatedSpillOperand()) return;
1705 1706
1706 int index = range->TopLevel()->GetSpillOperand()->index(); 1707 int index = range->TopLevel()->GetSpillOperand()->index();
1707 if (index >= 0) { 1708 if (index >= 0) {
1708 reusable_slots_.Add(range); 1709 reusable_slots_.Add(range, zone());
1709 } 1710 }
1710 } 1711 }
1711 1712
1712 1713
1713 LOperand* LAllocator::TryReuseSpillSlot(LiveRange* range) { 1714 LOperand* LAllocator::TryReuseSpillSlot(LiveRange* range) {
1714 if (reusable_slots_.is_empty()) return NULL; 1715 if (reusable_slots_.is_empty()) return NULL;
1715 if (reusable_slots_.first()->End().Value() > 1716 if (reusable_slots_.first()->End().Value() >
1716 range->TopLevel()->Start().Value()) { 1717 range->TopLevel()->Start().Value()) {
1717 return NULL; 1718 return NULL;
1718 } 1719 }
1719 LOperand* result = reusable_slots_.first()->TopLevel()->GetSpillOperand(); 1720 LOperand* result = reusable_slots_.first()->TopLevel()->GetSpillOperand();
1720 reusable_slots_.Remove(0); 1721 reusable_slots_.Remove(0);
1721 return result; 1722 return result;
1722 } 1723 }
1723 1724
1724 1725
1725 void LAllocator::ActiveToHandled(LiveRange* range) { 1726 void LAllocator::ActiveToHandled(LiveRange* range) {
1726 ASSERT(active_live_ranges_.Contains(range)); 1727 ASSERT(active_live_ranges_.Contains(range));
1727 active_live_ranges_.RemoveElement(range); 1728 active_live_ranges_.RemoveElement(range);
1728 TraceAlloc("Moving live range %d from active to handled\n", range->id()); 1729 TraceAlloc("Moving live range %d from active to handled\n", range->id());
1729 FreeSpillSlot(range); 1730 FreeSpillSlot(range);
1730 } 1731 }
1731 1732
1732 1733
1733 void LAllocator::ActiveToInactive(LiveRange* range) { 1734 void LAllocator::ActiveToInactive(LiveRange* range) {
1734 ASSERT(active_live_ranges_.Contains(range)); 1735 ASSERT(active_live_ranges_.Contains(range));
1735 active_live_ranges_.RemoveElement(range); 1736 active_live_ranges_.RemoveElement(range);
1736 inactive_live_ranges_.Add(range); 1737 inactive_live_ranges_.Add(range, zone());
1737 TraceAlloc("Moving live range %d from active to inactive\n", range->id()); 1738 TraceAlloc("Moving live range %d from active to inactive\n", range->id());
1738 } 1739 }
1739 1740
1740 1741
1741 void LAllocator::InactiveToHandled(LiveRange* range) { 1742 void LAllocator::InactiveToHandled(LiveRange* range) {
1742 ASSERT(inactive_live_ranges_.Contains(range)); 1743 ASSERT(inactive_live_ranges_.Contains(range));
1743 inactive_live_ranges_.RemoveElement(range); 1744 inactive_live_ranges_.RemoveElement(range);
1744 TraceAlloc("Moving live range %d from inactive to handled\n", range->id()); 1745 TraceAlloc("Moving live range %d from inactive to handled\n", range->id());
1745 FreeSpillSlot(range); 1746 FreeSpillSlot(range);
1746 } 1747 }
1747 1748
1748 1749
1749 void LAllocator::InactiveToActive(LiveRange* range) { 1750 void LAllocator::InactiveToActive(LiveRange* range) {
1750 ASSERT(inactive_live_ranges_.Contains(range)); 1751 ASSERT(inactive_live_ranges_.Contains(range));
1751 inactive_live_ranges_.RemoveElement(range); 1752 inactive_live_ranges_.RemoveElement(range);
1752 active_live_ranges_.Add(range); 1753 active_live_ranges_.Add(range, zone());
1753 TraceAlloc("Moving live range %d from inactive to active\n", range->id()); 1754 TraceAlloc("Moving live range %d from inactive to active\n", range->id());
1754 } 1755 }
1755 1756
1756 1757
1757 // TryAllocateFreeReg and AllocateBlockedReg assume this 1758 // TryAllocateFreeReg and AllocateBlockedReg assume this
1758 // when allocating local arrays. 1759 // when allocating local arrays.
1759 STATIC_ASSERT(DoubleRegister::kNumAllocatableRegisters >= 1760 STATIC_ASSERT(DoubleRegister::kNumAllocatableRegisters >=
1760 Register::kNumAllocatableRegisters); 1761 Register::kNumAllocatableRegisters);
1761 1762
1762 1763
(...skipping 346 matching lines...) Expand 10 before | Expand all | Expand 10 after
2109 LiveRange* current = live_ranges()->at(i); 2110 LiveRange* current = live_ranges()->at(i);
2110 if (current != NULL) current->Verify(); 2111 if (current != NULL) current->Verify();
2111 } 2112 }
2112 } 2113 }
2113 2114
2114 2115
2115 #endif 2116 #endif
2116 2117
2117 2118
2118 } } // namespace v8::internal 2119 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/lithium-allocator.h ('k') | src/liveedit.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698