OLD | NEW |
1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/flow_graph_allocator.h" | 5 #include "vm/flow_graph_allocator.h" |
6 | 6 |
7 #include "vm/bit_vector.h" | 7 #include "vm/bit_vector.h" |
8 #include "vm/intermediate_language.h" | 8 #include "vm/intermediate_language.h" |
9 #include "vm/il_printer.h" | 9 #include "vm/il_printer.h" |
10 #include "vm/flow_graph.h" | 10 #include "vm/flow_graph.h" |
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
59 FlowGraphAllocator::FlowGraphAllocator(const FlowGraph& flow_graph) | 59 FlowGraphAllocator::FlowGraphAllocator(const FlowGraph& flow_graph) |
60 : flow_graph_(flow_graph), | 60 : flow_graph_(flow_graph), |
61 block_order_(flow_graph.reverse_postorder()), | 61 block_order_(flow_graph.reverse_postorder()), |
62 postorder_(flow_graph.postorder()), | 62 postorder_(flow_graph.postorder()), |
63 live_out_(block_order_.length()), | 63 live_out_(block_order_.length()), |
64 kill_(block_order_.length()), | 64 kill_(block_order_.length()), |
65 live_in_(block_order_.length()), | 65 live_in_(block_order_.length()), |
66 vreg_count_(flow_graph.max_virtual_register_number()), | 66 vreg_count_(flow_graph.max_virtual_register_number()), |
67 live_ranges_(flow_graph.max_virtual_register_number()), | 67 live_ranges_(flow_graph.max_virtual_register_number()), |
68 cpu_regs_(), | 68 cpu_regs_(), |
69 blocked_cpu_regs_() { | 69 xmm_regs_(), |
| 70 blocked_cpu_registers_(), |
| 71 blocked_xmm_registers_(), |
| 72 cpu_spill_slot_count_(0) { |
70 for (intptr_t i = 0; i < vreg_count_; i++) live_ranges_.Add(NULL); | 73 for (intptr_t i = 0; i < vreg_count_; i++) live_ranges_.Add(NULL); |
71 | 74 |
72 blocked_cpu_regs_[CTX] = true; | 75 blocked_cpu_registers_[CTX] = true; |
73 if (TMP != kNoRegister) { | 76 if (TMP != kNoRegister) { |
74 blocked_cpu_regs_[TMP] = true; | 77 blocked_cpu_registers_[TMP] = true; |
75 } | 78 } |
76 blocked_cpu_regs_[SPREG] = true; | 79 blocked_cpu_registers_[SPREG] = true; |
77 blocked_cpu_regs_[FPREG] = true; | 80 blocked_cpu_registers_[FPREG] = true; |
| 81 |
| 82 // XMM0 is used as scratch by optimized code and parallel move resolver. |
| 83 blocked_xmm_registers_[XMM0] = true; |
78 } | 84 } |
79 | 85 |
80 | 86 |
81 // Remove environments from the instructions which can't deoptimize. | 87 // Remove environments from the instructions which can't deoptimize. |
82 // Replace dead phis uses with null values in environments. | 88 // Replace dead phis uses with null values in environments. |
83 void FlowGraphAllocator::EliminateEnvironmentUses() { | 89 void FlowGraphAllocator::EliminateEnvironmentUses() { |
84 ConstantVal* null_value = new ConstantVal(Object::ZoneHandle()); | 90 ConstantVal* null_value = new ConstantVal(Object::ZoneHandle()); |
85 | 91 |
86 for (intptr_t i = 0; i < block_order_.length(); ++i) { | 92 for (intptr_t i = 0; i < block_order_.length(); ++i) { |
87 BlockEntryInstr* block = block_order_[i]; | 93 BlockEntryInstr* block = block_order_[i]; |
(...skipping 200 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
288 OS::Print("\n"); | 294 OS::Print("\n"); |
289 | 295 |
290 PrintBitVector(" live out", live_out_[i]); | 296 PrintBitVector(" live out", live_out_[i]); |
291 PrintBitVector(" kill", kill_[i]); | 297 PrintBitVector(" kill", kill_[i]); |
292 PrintBitVector(" live in", live_in_[i]); | 298 PrintBitVector(" live in", live_in_[i]); |
293 } | 299 } |
294 } | 300 } |
295 | 301 |
296 | 302 |
297 void LiveRange::AddUse(intptr_t pos, Location* location_slot) { | 303 void LiveRange::AddUse(intptr_t pos, Location* location_slot) { |
| 304 ASSERT(location_slot != NULL); |
298 ASSERT((first_use_interval_->start_ <= pos) && | 305 ASSERT((first_use_interval_->start_ <= pos) && |
299 (pos <= first_use_interval_->end_)); | 306 (pos <= first_use_interval_->end_)); |
300 if ((uses_ != NULL) && (uses_->pos() == pos)) { | 307 if ((uses_ != NULL) && |
301 if ((location_slot == NULL) || (uses_->location_slot() == location_slot)) { | 308 (uses_->pos() == pos) && |
302 return; | 309 (uses_->location_slot() == location_slot)) { |
303 } else if (uses_->location_slot() == NULL) { | 310 return; |
304 uses_->set_location_slot(location_slot); | |
305 return; | |
306 } | |
307 } | 311 } |
308 uses_ = new UsePosition(pos, uses_, location_slot); | 312 uses_ = new UsePosition(pos, uses_, location_slot); |
309 } | 313 } |
310 | 314 |
311 | 315 |
312 void LiveRange::AddSafepoint(intptr_t pos, LocationSummary* locs) { | 316 void LiveRange::AddSafepoint(intptr_t pos, LocationSummary* locs) { |
313 SafepointPosition* safepoint = new SafepointPosition(pos, locs); | 317 SafepointPosition* safepoint = new SafepointPosition(pos, locs); |
314 | 318 |
315 if (first_safepoint_ == NULL) { | 319 if (first_safepoint_ == NULL) { |
316 ASSERT(last_safepoint_ == NULL); | 320 ASSERT(last_safepoint_ == NULL); |
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
399 | 403 |
400 LiveRange* FlowGraphAllocator::MakeLiveRangeForTemporary() { | 404 LiveRange* FlowGraphAllocator::MakeLiveRangeForTemporary() { |
401 LiveRange* range = new LiveRange(kTempVirtualRegister); | 405 LiveRange* range = new LiveRange(kTempVirtualRegister); |
402 #if defined(DEBUG) | 406 #if defined(DEBUG) |
403 temporaries_.Add(range); | 407 temporaries_.Add(range); |
404 #endif | 408 #endif |
405 return range; | 409 return range; |
406 } | 410 } |
407 | 411 |
408 | 412 |
| 413 void FlowGraphAllocator::BlockRegisterLocation(Location loc, |
| 414 intptr_t from, |
| 415 intptr_t to, |
| 416 bool* blocked_registers, |
| 417 LiveRange** blocking_ranges) { |
| 418 if (blocked_registers[loc.register_code()]) { |
| 419 return; |
| 420 } |
| 421 |
| 422 if (blocking_ranges[loc.register_code()] == NULL) { |
| 423 LiveRange* range = new LiveRange(kNoVirtualRegister); |
| 424 blocking_ranges[loc.register_code()] = range; |
| 425 range->set_assigned_location(loc); |
| 426 #if defined(DEBUG) |
| 427 temporaries_.Add(range); |
| 428 #endif |
| 429 } |
| 430 |
| 431 blocking_ranges[loc.register_code()]->AddUseInterval(from, to); |
| 432 } |
| 433 |
| 434 |
409 // Block location from the start of the instruction to its end. | 435 // Block location from the start of the instruction to its end. |
410 void FlowGraphAllocator::BlockLocation(Location loc, | 436 void FlowGraphAllocator::BlockLocation(Location loc, |
411 intptr_t from, | 437 intptr_t from, |
412 intptr_t to) { | 438 intptr_t to) { |
413 ASSERT(loc.IsRegister()); | 439 if (loc.IsRegister()) { |
414 const Register reg = loc.reg(); | 440 BlockRegisterLocation(loc, from, to, blocked_cpu_registers_, cpu_regs_); |
415 if (blocked_cpu_regs_[reg]) return; | 441 } else if (loc.IsXmmRegister()) { |
416 if (cpu_regs_[reg].length() == 0) { | 442 BlockRegisterLocation(loc, from, to, blocked_xmm_registers_, xmm_regs_); |
417 LiveRange* range = new LiveRange(kNoVirtualRegister); | 443 } else { |
418 cpu_regs_[reg].Add(range); | 444 UNREACHABLE(); |
419 range->set_assigned_location(loc); | |
420 #if defined(DEBUG) | |
421 temporaries_.Add(range); | |
422 #endif | |
423 } | 445 } |
424 cpu_regs_[reg][0]->AddUseInterval(from, to); | |
425 } | 446 } |
426 | 447 |
427 | 448 |
428 void LiveRange::Print() { | 449 void LiveRange::Print() { |
429 if (first_use_interval() == NULL) { | 450 if (first_use_interval() == NULL) { |
430 return; | 451 return; |
431 } | 452 } |
432 | 453 |
433 OS::Print(" live range v%d [%d, %d) in ", vreg(), Start(), End()); | 454 OS::Print(" live range v%d [%d, %d) in ", vreg(), Start(), End()); |
434 assigned_location().Print(); | 455 assigned_location().Print(); |
(...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
533 | 554 |
534 AssignSafepoints(range); | 555 AssignSafepoints(range); |
535 | 556 |
536 range->finger()->Initialize(range); | 557 range->finger()->Initialize(range); |
537 UsePosition* use = range->finger()->FirstRegisterBeneficialUse( | 558 UsePosition* use = range->finger()->FirstRegisterBeneficialUse( |
538 graph_entry->start_pos()); | 559 graph_entry->start_pos()); |
539 if (use != NULL) { | 560 if (use != NULL) { |
540 LiveRange* tail = SplitBetween(range, | 561 LiveRange* tail = SplitBetween(range, |
541 graph_entry->start_pos(), | 562 graph_entry->start_pos(), |
542 use->pos()); | 563 use->pos()); |
543 AddToUnallocated(tail); | 564 |
| 565 // All incomming parameters are tagged. |
| 566 CompleteRange(tail, Location::kRegister); |
544 } | 567 } |
545 ConvertAllUses(range); | 568 ConvertAllUses(range); |
546 if (flow_graph_.copied_parameter_count() > 0) { | 569 if (flow_graph_.copied_parameter_count() > 0) { |
547 MarkAsObjectAtSafepoints(range); | 570 MarkAsObjectAtSafepoints(range); |
548 } | 571 } |
549 } | 572 } |
550 } | 573 } |
551 } | 574 } |
552 | 575 |
553 // | 576 // |
(...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
668 ASSERT((goto_instr != NULL) && (goto_instr->HasParallelMove())); | 691 ASSERT((goto_instr != NULL) && (goto_instr->HasParallelMove())); |
669 MoveOperands* move = | 692 MoveOperands* move = |
670 goto_instr->parallel_move()->MoveOperandsAt(move_idx); | 693 goto_instr->parallel_move()->MoveOperandsAt(move_idx); |
671 move->set_dest(Location::PrefersRegister()); | 694 move->set_dest(Location::PrefersRegister()); |
672 range->AddUse(pos, move->dest_slot()); | 695 range->AddUse(pos, move->dest_slot()); |
673 } | 696 } |
674 | 697 |
675 // All phi resolution moves are connected. Phi's live range is | 698 // All phi resolution moves are connected. Phi's live range is |
676 // complete. | 699 // complete. |
677 AssignSafepoints(range); | 700 AssignSafepoints(range); |
678 AddToUnallocated(range); | 701 |
| 702 // TODO(vegorov): unboxed double phis. |
| 703 CompleteRange(range, Location::kRegister); |
679 | 704 |
680 move_idx++; | 705 move_idx++; |
681 } | 706 } |
682 } | 707 } |
683 } | 708 } |
684 | 709 |
685 | 710 |
686 void FlowGraphAllocator::ProcessEnvironmentUses(BlockEntryInstr* block, | 711 void FlowGraphAllocator::ProcessEnvironmentUses(BlockEntryInstr* block, |
687 Instruction* current) { | 712 Instruction* current) { |
688 ASSERT(current->env() != NULL); | 713 ASSERT(current->env() != NULL); |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
725 } else { | 750 } else { |
726 ASSERT(value->IsConstant()); | 751 ASSERT(value->IsConstant()); |
727 locations[i] = Location::NoLocation(); | 752 locations[i] = Location::NoLocation(); |
728 } | 753 } |
729 } | 754 } |
730 | 755 |
731 env->set_locations(locations); | 756 env->set_locations(locations); |
732 } | 757 } |
733 | 758 |
734 | 759 |
| 760 static Location::Kind RegisterKindFromPolicy(Location loc) { |
| 761 if (loc.policy() == Location::kRequiresXmmRegister) { |
| 762 return Location::kXmmRegister; |
| 763 } else { |
| 764 return Location::kRegister; |
| 765 } |
| 766 } |
| 767 |
| 768 |
| 769 static Location::Kind RegisterKindForResult(Instruction* instr) { |
| 770 if (instr->representation() == kUnboxedDouble) { |
| 771 return Location::kXmmRegister; |
| 772 } else { |
| 773 return Location::kRegister; |
| 774 } |
| 775 } |
| 776 |
| 777 |
735 // Create and update live ranges corresponding to instruction's inputs, | 778 // Create and update live ranges corresponding to instruction's inputs, |
736 // temporaries and output. | 779 // temporaries and output. |
737 void FlowGraphAllocator::ProcessOneInstruction(BlockEntryInstr* block, | 780 void FlowGraphAllocator::ProcessOneInstruction(BlockEntryInstr* block, |
738 Instruction* current) { | 781 Instruction* current) { |
739 const intptr_t pos = current->lifetime_position(); | 782 const intptr_t pos = current->lifetime_position(); |
740 ASSERT(IsInstructionStartPosition(pos)); | 783 ASSERT(IsInstructionStartPosition(pos)); |
741 | 784 |
742 LocationSummary* locs = current->locs(); | 785 LocationSummary* locs = current->locs(); |
743 | 786 |
744 // Number of input locations and number of input operands have to agree. | 787 // Number of input locations and number of input operands have to agree. |
745 ASSERT(locs->input_count() == current->InputCount()); | 788 ASSERT(locs->input_count() == current->InputCount()); |
746 | 789 |
747 // Normalize same-as-first-input output if input is specified as | 790 // Normalize same-as-first-input output if input is specified as |
748 // fixed register. | 791 // fixed register. |
749 if (locs->out().IsUnallocated() && | 792 if (locs->out().IsUnallocated() && |
750 (locs->out().policy() == Location::kSameAsFirstInput) && | 793 (locs->out().policy() == Location::kSameAsFirstInput) && |
751 (locs->in(0).IsRegister())) { | 794 (locs->in(0).IsMachineRegister())) { |
752 locs->set_out(locs->in(0)); | 795 locs->set_out(locs->in(0)); |
753 } | 796 } |
754 | 797 |
755 const bool output_same_as_first_input = | 798 const bool output_same_as_first_input = |
756 locs->out().IsUnallocated() && | 799 locs->out().IsUnallocated() && |
757 (locs->out().policy() == Location::kSameAsFirstInput); | 800 (locs->out().policy() == Location::kSameAsFirstInput); |
758 | 801 |
759 // Add uses from the deoptimization environment. | 802 // Add uses from the deoptimization environment. |
760 if (current->env() != NULL) ProcessEnvironmentUses(block, current); | 803 if (current->env() != NULL) ProcessEnvironmentUses(block, current); |
761 | 804 |
762 // Process inputs. | 805 // Process inputs. |
763 // Skip the first input if output is specified with kSameAsFirstInput policy, | 806 // Skip the first input if output is specified with kSameAsFirstInput policy, |
764 // they will be processed together at the very end. | 807 // they will be processed together at the very end. |
765 for (intptr_t j = output_same_as_first_input ? 1 : 0; | 808 for (intptr_t j = output_same_as_first_input ? 1 : 0; |
766 j < current->InputCount(); | 809 j < current->InputCount(); |
767 j++) { | 810 j++) { |
768 Value* input = current->InputAt(j); | 811 Value* input = current->InputAt(j); |
769 ASSERT(input->IsUse()); // Can not be a constant currently. | 812 ASSERT(input->IsUse()); // Can not be a constant currently. |
770 const intptr_t vreg = input->AsUse()->definition()->ssa_temp_index(); | 813 const intptr_t vreg = input->AsUse()->definition()->ssa_temp_index(); |
771 LiveRange* range = GetLiveRange(vreg); | 814 LiveRange* range = GetLiveRange(vreg); |
772 | 815 |
773 Location* in_ref = locs->in_slot(j); | 816 Location* in_ref = locs->in_slot(j); |
774 | 817 |
775 if (in_ref->IsRegister()) { | 818 if (in_ref->IsMachineRegister()) { |
776 // Input is expected in a fixed register. Expected shape of | 819 // Input is expected in a fixed register. Expected shape of |
777 // live ranges: | 820 // live ranges: |
778 // | 821 // |
779 // j' i i' | 822 // j' i i' |
780 // value --* | 823 // value --* |
781 // register [-----) | 824 // register [-----) |
782 // | 825 // |
783 MoveOperands* move = | 826 MoveOperands* move = |
784 AddMoveAt(pos - 1, *in_ref, Location::Any()); | 827 AddMoveAt(pos - 1, *in_ref, Location::Any()); |
785 BlockLocation(*in_ref, pos - 1, pos + 1); | 828 BlockLocation(*in_ref, pos - 1, pos + 1); |
(...skipping 14 matching lines...) Expand all Loading... |
800 | 843 |
801 // Process temps. | 844 // Process temps. |
802 for (intptr_t j = 0; j < locs->temp_count(); j++) { | 845 for (intptr_t j = 0; j < locs->temp_count(); j++) { |
803 // Expected shape of live range: | 846 // Expected shape of live range: |
804 // | 847 // |
805 // i i' | 848 // i i' |
806 // [--) | 849 // [--) |
807 // | 850 // |
808 | 851 |
809 Location temp = locs->temp(j); | 852 Location temp = locs->temp(j); |
810 if (temp.IsRegister()) { | 853 if (temp.IsMachineRegister()) { |
811 BlockLocation(temp, pos, pos + 1); | 854 BlockLocation(temp, pos, pos + 1); |
812 } else if (temp.IsUnallocated()) { | 855 } else if (temp.IsUnallocated()) { |
813 LiveRange* range = MakeLiveRangeForTemporary(); | 856 LiveRange* range = MakeLiveRangeForTemporary(); |
814 range->AddUseInterval(pos, pos + 1); | 857 range->AddUseInterval(pos, pos + 1); |
815 range->AddUse(pos, locs->temp_slot(j)); | 858 range->AddUse(pos, locs->temp_slot(j)); |
816 AddToUnallocated(range); | 859 CompleteRange(range, RegisterKindFromPolicy(temp)); |
817 } else { | 860 } else { |
818 UNREACHABLE(); | 861 UNREACHABLE(); |
819 } | 862 } |
820 } | 863 } |
821 | 864 |
822 // Block all allocatable registers for calls and record the stack bitmap. | 865 // Block all allocatable registers for calls and record the stack bitmap. |
823 if (locs->always_calls()) { | 866 if (locs->always_calls()) { |
824 // Expected shape of live range: | 867 // Expected shape of live range: |
825 // | 868 // |
826 // i i' | 869 // i i' |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
865 } | 908 } |
866 | 909 |
867 // We might have a definition without use. We do not assign SSA index to | 910 // We might have a definition without use. We do not assign SSA index to |
868 // such definitions. | 911 // such definitions. |
869 LiveRange* range = (def->ssa_temp_index() >= 0) ? | 912 LiveRange* range = (def->ssa_temp_index() >= 0) ? |
870 GetLiveRange(def->ssa_temp_index()) : | 913 GetLiveRange(def->ssa_temp_index()) : |
871 MakeLiveRangeForTemporary(); | 914 MakeLiveRangeForTemporary(); |
872 Location* out = locs->out_slot(); | 915 Location* out = locs->out_slot(); |
873 | 916 |
874 // Process output and finalize its liverange. | 917 // Process output and finalize its liverange. |
875 if (out->IsRegister()) { | 918 if (out->IsMachineRegister()) { |
876 // Fixed output location. Expected shape of live range: | 919 // Fixed output location. Expected shape of live range: |
877 // | 920 // |
878 // i i' j j' | 921 // i i' j j' |
879 // register [--) | 922 // register [--) |
880 // output [------- | 923 // output [------- |
881 // | 924 // |
882 BlockLocation(*out, pos, pos + 1); | 925 BlockLocation(*out, pos, pos + 1); |
883 | 926 |
884 if (range->vreg() == kTempVirtualRegister) return; | 927 if (range->vreg() == kTempVirtualRegister) return; |
885 | 928 |
(...skipping 22 matching lines...) Expand all Loading... |
908 MoveOperands* move = AddMoveAt(pos + 1, Location::Any(), *out); | 951 MoveOperands* move = AddMoveAt(pos + 1, Location::Any(), *out); |
909 range->AddHintedUse(pos + 1, move->dest_slot(), out); | 952 range->AddHintedUse(pos + 1, move->dest_slot(), out); |
910 } else if (output_same_as_first_input) { | 953 } else if (output_same_as_first_input) { |
911 // Output register will contain a value of the first input at instruction's | 954 // Output register will contain a value of the first input at instruction's |
912 // start. Expected shape of live ranges: | 955 // start. Expected shape of live ranges: |
913 // | 956 // |
914 // i i' | 957 // i i' |
915 // input #0 --* | 958 // input #0 --* |
916 // output [---- | 959 // output [---- |
917 // | 960 // |
918 ASSERT(locs->in_slot(0)->Equals(Location::RequiresRegister())); | 961 ASSERT(locs->in(0).Equals(Location::RequiresRegister()) || |
| 962 locs->in(0).Equals(Location::RequiresXmmRegister())); |
919 | 963 |
920 // Create move that will copy value between input and output. | 964 // Create move that will copy value between input and output. |
921 locs->set_out(Location::RequiresRegister()); | 965 locs->set_out(Location::RequiresRegister()); |
922 MoveOperands* move = AddMoveAt(pos, | 966 MoveOperands* move = AddMoveAt(pos, |
923 Location::RequiresRegister(), | 967 Location::RequiresRegister(), |
924 Location::Any()); | 968 Location::Any()); |
925 | 969 |
926 // Add uses to the live range of the input. | 970 // Add uses to the live range of the input. |
927 Value* input = current->InputAt(0); | 971 Value* input = current->InputAt(0); |
928 ASSERT(input->IsUse()); // Can not be a constant currently. | 972 ASSERT(input->IsUse()); // Can not be a constant currently. |
929 LiveRange* input_range = GetLiveRange( | 973 LiveRange* input_range = GetLiveRange( |
930 input->AsUse()->definition()->ssa_temp_index()); | 974 input->AsUse()->definition()->ssa_temp_index()); |
931 input_range->AddUseInterval(block->start_pos(), pos); | 975 input_range->AddUseInterval(block->start_pos(), pos); |
932 input_range->AddUse(pos, move->src_slot()); | 976 input_range->AddUse(pos, move->src_slot()); |
933 | 977 |
934 // Shorten output live range to the point of definition and add both input | 978 // Shorten output live range to the point of definition and add both input |
935 // and output uses slots to be filled by allocator. | 979 // and output uses slots to be filled by allocator. |
936 range->DefineAt(pos); | 980 range->DefineAt(pos); |
937 range->AddUse(pos, out); | 981 range->AddHintedUse(pos, out, move->src_slot()); |
938 range->AddUse(pos, move->dest_slot()); | 982 range->AddUse(pos, move->dest_slot()); |
939 range->AddUse(pos, locs->in_slot(0)); | 983 range->AddUse(pos, locs->in_slot(0)); |
940 } else { | 984 } else { |
941 // Normal unallocated location that requires a register. Expected shape of | 985 // Normal unallocated location that requires a register. Expected shape of |
942 // live range: | 986 // live range: |
943 // | 987 // |
944 // i i' | 988 // i i' |
945 // output [------- | 989 // output [------- |
946 // | 990 // |
947 ASSERT(out->IsUnallocated() && | 991 ASSERT(locs->out().Equals(Location::RequiresRegister()) || |
948 (out->policy() == Location::kRequiresRegister)); | 992 locs->out().Equals(Location::RequiresXmmRegister())); |
949 | 993 |
950 // Shorten live range to the point of definition and add use to be filled by | 994 // Shorten live range to the point of definition and add use to be filled by |
951 // allocator. | 995 // allocator. |
952 range->DefineAt(pos); | 996 range->DefineAt(pos); |
953 range->AddUse(pos, out); | 997 range->AddUse(pos, out); |
954 } | 998 } |
955 | 999 |
956 AssignSafepoints(range); | 1000 AssignSafepoints(range); |
957 AddToUnallocated(range); | 1001 CompleteRange(range, RegisterKindForResult(current)); |
958 } | 1002 } |
959 | 1003 |
960 | 1004 |
961 static ParallelMoveInstr* CreateParallelMoveBefore(Instruction* instr, | 1005 static ParallelMoveInstr* CreateParallelMoveBefore(Instruction* instr, |
962 intptr_t pos) { | 1006 intptr_t pos) { |
963 ASSERT(pos > 0); | 1007 ASSERT(pos > 0); |
964 Instruction* prev = instr->previous(); | 1008 Instruction* prev = instr->previous(); |
965 ParallelMoveInstr* move = prev->AsParallelMove(); | 1009 ParallelMoveInstr* move = prev->AsParallelMove(); |
966 if ((move == NULL) || (move->lifetime_position() != pos)) { | 1010 if ((move == NULL) || (move->lifetime_position() != pos)) { |
967 move = new ParallelMoveInstr(); | 1011 move = new ParallelMoveInstr(); |
(...skipping 174 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1142 } | 1186 } |
1143 return use; | 1187 return use; |
1144 } | 1188 } |
1145 | 1189 |
1146 | 1190 |
1147 UsePosition* AllocationFinger::FirstRegisterUse(intptr_t after) { | 1191 UsePosition* AllocationFinger::FirstRegisterUse(intptr_t after) { |
1148 for (UsePosition* use = FirstUseAfter(first_register_use_, after); | 1192 for (UsePosition* use = FirstUseAfter(first_register_use_, after); |
1149 use != NULL; | 1193 use != NULL; |
1150 use = use->next()) { | 1194 use = use->next()) { |
1151 Location* loc = use->location_slot(); | 1195 Location* loc = use->location_slot(); |
1152 if ((loc != NULL) && | 1196 if (loc->IsUnallocated() && |
1153 loc->IsUnallocated() && | |
1154 (loc->policy() == Location::kRequiresRegister)) { | 1197 (loc->policy() == Location::kRequiresRegister)) { |
1155 first_register_use_ = use; | 1198 first_register_use_ = use; |
1156 return use; | 1199 return use; |
1157 } | 1200 } |
1158 } | 1201 } |
1159 return NULL; | 1202 return NULL; |
1160 } | 1203 } |
1161 | 1204 |
1162 | 1205 |
1163 UsePosition* AllocationFinger::FirstRegisterBeneficialUse(intptr_t after) { | 1206 UsePosition* AllocationFinger::FirstRegisterBeneficialUse(intptr_t after) { |
1164 for (UsePosition* use = FirstUseAfter(first_register_beneficial_use_, after); | 1207 for (UsePosition* use = FirstUseAfter(first_register_beneficial_use_, after); |
1165 use != NULL; | 1208 use != NULL; |
1166 use = use->next()) { | 1209 use = use->next()) { |
1167 Location* loc = use->location_slot(); | 1210 Location* loc = use->location_slot(); |
1168 if ((loc != NULL) && | 1211 if (loc->IsUnallocated() && loc->IsRegisterBeneficial()) { |
1169 (loc->IsRegister() || | |
1170 (loc->IsUnallocated() && loc->IsRegisterBeneficial()))) { | |
1171 first_register_beneficial_use_ = use; | 1212 first_register_beneficial_use_ = use; |
1172 return use; | 1213 return use; |
1173 } | 1214 } |
1174 } | 1215 } |
1175 return NULL; | 1216 return NULL; |
1176 } | 1217 } |
1177 | 1218 |
1178 | 1219 |
1179 void AllocationFinger::UpdateAfterSplit(intptr_t first_use_after_split_pos) { | 1220 void AllocationFinger::UpdateAfterSplit(intptr_t first_use_after_split_pos) { |
1180 if ((first_register_use_ != NULL) && | 1221 if ((first_register_use_ != NULL) && |
(...skipping 208 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1389 | 1430 |
1390 if (idx == spill_slots_.length()) spill_slots_.Add(0); | 1431 if (idx == spill_slots_.length()) spill_slots_.Add(0); |
1391 | 1432 |
1392 LiveRange* last_sibling = range; | 1433 LiveRange* last_sibling = range; |
1393 while (last_sibling->next_sibling() != NULL) { | 1434 while (last_sibling->next_sibling() != NULL) { |
1394 last_sibling = last_sibling->next_sibling(); | 1435 last_sibling = last_sibling->next_sibling(); |
1395 } | 1436 } |
1396 | 1437 |
1397 spill_slots_[idx] = last_sibling->End(); | 1438 spill_slots_[idx] = last_sibling->End(); |
1398 | 1439 |
1399 range->set_spill_slot(Location::StackSlot(idx)); | 1440 if (register_kind_ == Location::kRegister) { |
| 1441 range->set_spill_slot(Location::StackSlot(idx)); |
| 1442 } else { |
| 1443 range->set_spill_slot( |
| 1444 Location::DoubleStackSlot(cpu_spill_slot_count_ + idx * 2)); |
| 1445 } |
1400 | 1446 |
1401 spilled_.Add(range); | 1447 spilled_.Add(range); |
1402 } | 1448 } |
1403 | 1449 |
1404 | 1450 |
1405 void FlowGraphAllocator::MarkAsObjectAtSafepoints(LiveRange* range) { | 1451 void FlowGraphAllocator::MarkAsObjectAtSafepoints(LiveRange* range) { |
1406 intptr_t stack_index = range->spill_slot().stack_index(); | 1452 intptr_t stack_index = range->spill_slot().stack_index(); |
1407 ASSERT(stack_index >= 0); | 1453 ASSERT(stack_index >= 0); |
1408 | 1454 |
1409 while (range != NULL) { | 1455 while (range != NULL) { |
1410 for (SafepointPosition* safepoint = range->first_safepoint(); | 1456 for (SafepointPosition* safepoint = range->first_safepoint(); |
1411 safepoint != NULL; | 1457 safepoint != NULL; |
1412 safepoint = safepoint->next()) { | 1458 safepoint = safepoint->next()) { |
1413 safepoint->locs()->stack_bitmap()->Set(stack_index, true); | 1459 safepoint->locs()->stack_bitmap()->Set(stack_index, true); |
1414 } | 1460 } |
1415 range = range->next_sibling(); | 1461 range = range->next_sibling(); |
1416 } | 1462 } |
1417 } | 1463 } |
1418 | 1464 |
1419 | 1465 |
1420 void FlowGraphAllocator::Spill(LiveRange* range) { | 1466 void FlowGraphAllocator::Spill(LiveRange* range) { |
1421 LiveRange* parent = GetLiveRange(range->vreg()); | 1467 LiveRange* parent = GetLiveRange(range->vreg()); |
1422 if (parent->spill_slot().IsInvalid()) { | 1468 if (parent->spill_slot().IsInvalid()) { |
1423 AllocateSpillSlotFor(parent); | 1469 AllocateSpillSlotFor(parent); |
1424 MarkAsObjectAtSafepoints(parent); | 1470 if (register_kind_ == Location::kRegister) { |
| 1471 MarkAsObjectAtSafepoints(parent); |
| 1472 } |
1425 } | 1473 } |
1426 range->set_assigned_location(parent->spill_slot()); | 1474 range->set_assigned_location(parent->spill_slot()); |
1427 ConvertAllUses(range); | 1475 ConvertAllUses(range); |
1428 } | 1476 } |
1429 | 1477 |
1430 | 1478 |
1431 intptr_t FlowGraphAllocator::FirstIntersectionWithAllocated( | 1479 intptr_t FlowGraphAllocator::FirstIntersectionWithAllocated( |
1432 Register reg, LiveRange* unallocated) { | 1480 intptr_t reg, LiveRange* unallocated) { |
1433 intptr_t intersection = kMaxPosition; | 1481 intptr_t intersection = kMaxPosition; |
1434 for (intptr_t i = 0; i < cpu_regs_[reg].length(); i++) { | 1482 for (intptr_t i = 0; i < registers_[reg].length(); i++) { |
1435 LiveRange* allocated = cpu_regs_[reg][i]; | 1483 LiveRange* allocated = registers_[reg][i]; |
1436 if (allocated == NULL) continue; | 1484 if (allocated == NULL) continue; |
1437 | 1485 |
1438 UseInterval* allocated_head = | 1486 UseInterval* allocated_head = |
1439 allocated->finger()->first_pending_use_interval(); | 1487 allocated->finger()->first_pending_use_interval(); |
1440 if (allocated_head->start() >= intersection) continue; | 1488 if (allocated_head->start() >= intersection) continue; |
1441 | 1489 |
1442 const intptr_t pos = FirstIntersection( | 1490 const intptr_t pos = FirstIntersection( |
1443 unallocated->finger()->first_pending_use_interval(), | 1491 unallocated->finger()->first_pending_use_interval(), |
1444 allocated_head); | 1492 allocated_head); |
1445 if (pos < intersection) intersection = pos; | 1493 if (pos < intersection) intersection = pos; |
1446 } | 1494 } |
1447 return intersection; | 1495 return intersection; |
1448 } | 1496 } |
1449 | 1497 |
1450 | 1498 |
1451 | |
1452 bool FlowGraphAllocator::AllocateFreeRegister(LiveRange* unallocated) { | 1499 bool FlowGraphAllocator::AllocateFreeRegister(LiveRange* unallocated) { |
1453 Register candidate = kNoRegister; | 1500 intptr_t candidate = kNoRegister; |
1454 intptr_t free_until = 0; | 1501 intptr_t free_until = 0; |
1455 | 1502 |
1456 // If hint is available try hint first. | 1503 // If hint is available try hint first. |
1457 // TODO(vegorov): ensure that phis are hinted on the back edge. | 1504 // TODO(vegorov): ensure that phis are hinted on the back edge. |
1458 Location hint = unallocated->finger()->FirstHint(); | 1505 Location hint = unallocated->finger()->FirstHint(); |
1459 if (hint.IsRegister()) { | 1506 if (hint.IsMachineRegister()) { |
1460 if (!blocked_cpu_regs_[hint.reg()]) { | 1507 if (!blocked_registers_[hint.register_code()]) { |
1461 free_until = FirstIntersectionWithAllocated(hint.reg(), unallocated); | 1508 free_until = FirstIntersectionWithAllocated(hint.register_code(), |
1462 candidate = hint.reg(); | 1509 unallocated); |
| 1510 candidate = hint.register_code(); |
1463 } | 1511 } |
1464 | 1512 |
1465 TRACE_ALLOC(OS::Print("found hint ")); | 1513 TRACE_ALLOC(OS::Print("found hint ")); |
1466 TRACE_ALLOC(hint.Print()); | 1514 TRACE_ALLOC(hint.Print()); |
1467 TRACE_ALLOC(OS::Print(" for %d: free until %d\n", | 1515 TRACE_ALLOC(OS::Print(" for %d: free until %d\n", |
1468 unallocated->vreg(), free_until)); | 1516 unallocated->vreg(), free_until)); |
1469 } else if (free_until != kMaxPosition) { | 1517 } else if (free_until != kMaxPosition) { |
1470 for (intptr_t reg = 0; reg < kNumberOfCpuRegisters; ++reg) { | 1518 for (intptr_t reg = 0; reg < NumberOfRegisters(); ++reg) { |
1471 if (!blocked_cpu_regs_[reg] && cpu_regs_[reg].length() == 0) { | 1519 if (!blocked_registers_[reg] && (registers_[reg].length() == 0)) { |
1472 candidate = static_cast<Register>(reg); | 1520 candidate = reg; |
1473 free_until = kMaxPosition; | 1521 free_until = kMaxPosition; |
1474 break; | 1522 break; |
1475 } | 1523 } |
1476 } | 1524 } |
1477 } | 1525 } |
1478 | 1526 |
1479 ASSERT(0 <= kMaxPosition); | 1527 ASSERT(0 <= kMaxPosition); |
1480 if (free_until != kMaxPosition) { | 1528 if (free_until != kMaxPosition) { |
1481 for (intptr_t reg = 0; reg < kNumberOfCpuRegisters; ++reg) { | 1529 for (intptr_t reg = 0; reg < NumberOfRegisters(); ++reg) { |
1482 if (blocked_cpu_regs_[reg] || (reg == candidate)) continue; | 1530 if (blocked_registers_[reg] || (reg == candidate)) continue; |
1483 const intptr_t intersection = | 1531 const intptr_t intersection = |
1484 FirstIntersectionWithAllocated(static_cast<Register>(reg), | 1532 FirstIntersectionWithAllocated(reg, unallocated); |
1485 unallocated); | |
1486 if (intersection > free_until) { | 1533 if (intersection > free_until) { |
1487 candidate = static_cast<Register>(reg); | 1534 candidate = reg; |
1488 free_until = intersection; | 1535 free_until = intersection; |
1489 if (free_until == kMaxPosition) break; | 1536 if (free_until == kMaxPosition) break; |
1490 } | 1537 } |
1491 } | 1538 } |
1492 } | 1539 } |
1493 | 1540 |
1494 // All registers are blocked by active ranges. | 1541 // All registers are blocked by active ranges. |
1495 if (free_until <= unallocated->Start()) return false; | 1542 if (free_until <= unallocated->Start()) return false; |
1496 | 1543 |
1497 TRACE_ALLOC(OS::Print("assigning free register ")); | 1544 TRACE_ALLOC(OS::Print("assigning free register ")); |
1498 TRACE_ALLOC(Location::RegisterLocation(candidate).Print()); | 1545 TRACE_ALLOC(MakeRegisterLocation(candidate).Print()); |
1499 TRACE_ALLOC(OS::Print(" to %d\n", unallocated->vreg())); | 1546 TRACE_ALLOC(OS::Print(" to %d\n", unallocated->vreg())); |
1500 | 1547 |
1501 if (free_until != kMaxPosition) { | 1548 if (free_until != kMaxPosition) { |
1502 // There was an intersection. Split unallocated. | 1549 // There was an intersection. Split unallocated. |
1503 TRACE_ALLOC(OS::Print(" splitting at %d\n", free_until)); | 1550 TRACE_ALLOC(OS::Print(" splitting at %d\n", free_until)); |
1504 LiveRange* tail = unallocated->SplitAt(free_until); | 1551 LiveRange* tail = unallocated->SplitAt(free_until); |
1505 AddToUnallocated(tail); | 1552 AddToUnallocated(tail); |
1506 } | 1553 } |
1507 | 1554 |
1508 cpu_regs_[candidate].Add(unallocated); | 1555 registers_[candidate].Add(unallocated); |
1509 unallocated->set_assigned_location(Location::RegisterLocation(candidate)); | 1556 unallocated->set_assigned_location(MakeRegisterLocation(candidate)); |
1510 | 1557 |
1511 return true; | 1558 return true; |
1512 } | 1559 } |
1513 | 1560 |
1514 | 1561 |
1515 void FlowGraphAllocator::AllocateAnyRegister(LiveRange* unallocated) { | 1562 void FlowGraphAllocator::AllocateAnyRegister(LiveRange* unallocated) { |
1516 UsePosition* register_use = | 1563 UsePosition* register_use = |
1517 unallocated->finger()->FirstRegisterUse(unallocated->Start()); | 1564 unallocated->finger()->FirstRegisterUse(unallocated->Start()); |
1518 if (register_use == NULL) { | 1565 if (register_use == NULL) { |
1519 Spill(unallocated); | 1566 Spill(unallocated); |
1520 return; | 1567 return; |
1521 } | 1568 } |
1522 | 1569 |
1523 Register candidate = kNoRegister; | 1570 intptr_t candidate = kNoRegister; |
1524 intptr_t free_until = 0; | 1571 intptr_t free_until = 0; |
1525 intptr_t blocked_at = kMaxPosition; | 1572 intptr_t blocked_at = kMaxPosition; |
1526 | 1573 |
1527 for (int reg = 0; reg < kNumberOfCpuRegisters; ++reg) { | 1574 for (int reg = 0; reg < NumberOfRegisters(); ++reg) { |
1528 if (blocked_cpu_regs_[reg]) continue; | 1575 if (blocked_registers_[reg]) continue; |
1529 if (UpdateFreeUntil(static_cast<Register>(reg), | 1576 if (UpdateFreeUntil(reg, unallocated, &free_until, &blocked_at)) { |
1530 unallocated, | 1577 candidate = reg; |
1531 &free_until, | |
1532 &blocked_at)) { | |
1533 candidate = static_cast<Register>(reg); | |
1534 } | 1578 } |
1535 } | 1579 } |
1536 | 1580 |
1537 if (free_until < register_use->pos()) { | 1581 if (free_until < register_use->pos()) { |
1538 // Can't acquire free register. Spill until we really need one. | 1582 // Can't acquire free register. Spill until we really need one. |
1539 ASSERT(unallocated->Start() < ToInstructionStart(register_use->pos())); | 1583 ASSERT(unallocated->Start() < ToInstructionStart(register_use->pos())); |
1540 SpillBetween(unallocated, unallocated->Start(), register_use->pos()); | 1584 SpillBetween(unallocated, unallocated->Start(), register_use->pos()); |
1541 return; | 1585 return; |
1542 } | 1586 } |
1543 | 1587 |
1544 TRACE_ALLOC(OS::Print("assigning blocked register ")); | 1588 TRACE_ALLOC(OS::Print("assigning blocked register ")); |
1545 TRACE_ALLOC(Location::RegisterLocation(candidate).Print()); | 1589 TRACE_ALLOC(MakeRegisterLocation(candidate).Print()); |
1546 TRACE_ALLOC(OS::Print(" to live range %d until %d\n", | 1590 TRACE_ALLOC(OS::Print(" to live range %d until %d\n", |
1547 unallocated->vreg(), blocked_at)); | 1591 unallocated->vreg(), blocked_at)); |
1548 | 1592 |
1549 if (blocked_at < unallocated->End()) { | 1593 if (blocked_at < unallocated->End()) { |
1550 // Register is blocked before the end of the live range. Split the range | 1594 // Register is blocked before the end of the live range. Split the range |
1551 // at latest at blocked_at position. | 1595 // at latest at blocked_at position. |
1552 LiveRange* tail = SplitBetween(unallocated, | 1596 LiveRange* tail = SplitBetween(unallocated, |
1553 unallocated->Start(), | 1597 unallocated->Start(), |
1554 blocked_at + 1); | 1598 blocked_at + 1); |
1555 AddToUnallocated(tail); | 1599 AddToUnallocated(tail); |
1556 } | 1600 } |
1557 | 1601 |
1558 AssignNonFreeRegister(unallocated, candidate); | 1602 AssignNonFreeRegister(unallocated, candidate); |
1559 } | 1603 } |
1560 | 1604 |
1561 | 1605 |
1562 bool FlowGraphAllocator::UpdateFreeUntil(Register reg, | 1606 bool FlowGraphAllocator::UpdateFreeUntil(intptr_t reg, |
1563 LiveRange* unallocated, | 1607 LiveRange* unallocated, |
1564 intptr_t* cur_free_until, | 1608 intptr_t* cur_free_until, |
1565 intptr_t* cur_blocked_at) { | 1609 intptr_t* cur_blocked_at) { |
1566 intptr_t free_until = kMaxPosition; | 1610 intptr_t free_until = kMaxPosition; |
1567 intptr_t blocked_at = kMaxPosition; | 1611 intptr_t blocked_at = kMaxPosition; |
1568 const intptr_t start = unallocated->Start(); | 1612 const intptr_t start = unallocated->Start(); |
1569 | 1613 |
1570 for (intptr_t i = 0; i < cpu_regs_[reg].length(); i++) { | 1614 for (intptr_t i = 0; i < registers_[reg].length(); i++) { |
1571 LiveRange* allocated = cpu_regs_[reg][i]; | 1615 LiveRange* allocated = registers_[reg][i]; |
1572 | 1616 |
1573 UseInterval* first_pending_use_interval = | 1617 UseInterval* first_pending_use_interval = |
1574 allocated->finger()->first_pending_use_interval(); | 1618 allocated->finger()->first_pending_use_interval(); |
1575 if (first_pending_use_interval->Contains(start)) { | 1619 if (first_pending_use_interval->Contains(start)) { |
1576 // This is an active interval. | 1620 // This is an active interval. |
1577 if (allocated->vreg() < 0) { | 1621 if (allocated->vreg() < 0) { |
1578 // This register blocked by an interval that | 1622 // This register blocked by an interval that |
1579 // can't be spilled. | 1623 // can't be spilled. |
1580 return false; | 1624 return false; |
1581 } | 1625 } |
(...skipping 27 matching lines...) Expand all Loading... |
1609 } | 1653 } |
1610 } | 1654 } |
1611 | 1655 |
1612 ASSERT(free_until > *cur_free_until); | 1656 ASSERT(free_until > *cur_free_until); |
1613 *cur_free_until = free_until; | 1657 *cur_free_until = free_until; |
1614 *cur_blocked_at = blocked_at; | 1658 *cur_blocked_at = blocked_at; |
1615 return true; | 1659 return true; |
1616 } | 1660 } |
1617 | 1661 |
1618 | 1662 |
1619 void FlowGraphAllocator::RemoveEvicted(Register reg, intptr_t first_evicted) { | 1663 void FlowGraphAllocator::RemoveEvicted(intptr_t reg, intptr_t first_evicted) { |
1620 intptr_t to = first_evicted; | 1664 intptr_t to = first_evicted; |
1621 intptr_t from = first_evicted + 1; | 1665 intptr_t from = first_evicted + 1; |
1622 while (from < cpu_regs_[reg].length()) { | 1666 while (from < registers_[reg].length()) { |
1623 LiveRange* allocated = cpu_regs_[reg][from++]; | 1667 LiveRange* allocated = registers_[reg][from++]; |
1624 if (allocated != NULL) cpu_regs_[reg][to++] = allocated; | 1668 if (allocated != NULL) registers_[reg][to++] = allocated; |
1625 } | 1669 } |
1626 cpu_regs_[reg].TruncateTo(to); | 1670 registers_[reg].TruncateTo(to); |
1627 } | 1671 } |
1628 | 1672 |
1629 | 1673 |
1630 void FlowGraphAllocator::AssignNonFreeRegister(LiveRange* unallocated, | 1674 void FlowGraphAllocator::AssignNonFreeRegister(LiveRange* unallocated, |
1631 Register reg) { | 1675 intptr_t reg) { |
1632 intptr_t first_evicted = -1; | 1676 intptr_t first_evicted = -1; |
1633 for (intptr_t i = cpu_regs_[reg].length() - 1; i >= 0; i--) { | 1677 for (intptr_t i = registers_[reg].length() - 1; i >= 0; i--) { |
1634 LiveRange* allocated = cpu_regs_[reg][i]; | 1678 LiveRange* allocated = registers_[reg][i]; |
1635 if (allocated->vreg() < 0) continue; // Can't be evicted. | 1679 if (allocated->vreg() < 0) continue; // Can't be evicted. |
1636 if (EvictIntersection(allocated, unallocated)) { | 1680 if (EvictIntersection(allocated, unallocated)) { |
1637 // If allocated was not spilled convert all pending uses. | 1681 // If allocated was not spilled convert all pending uses. |
1638 if (allocated->assigned_location().IsRegister()) { | 1682 if (allocated->assigned_location().IsRegister()) { |
1639 ASSERT(allocated->End() <= unallocated->Start()); | 1683 ASSERT(allocated->End() <= unallocated->Start()); |
1640 ConvertAllUses(allocated); | 1684 ConvertAllUses(allocated); |
1641 } | 1685 } |
1642 cpu_regs_[reg][i] = NULL; | 1686 registers_[reg][i] = NULL; |
1643 first_evicted = i; | 1687 first_evicted = i; |
1644 } | 1688 } |
1645 } | 1689 } |
1646 | 1690 |
1647 // Remove evicted ranges from the array. | 1691 // Remove evicted ranges from the array. |
1648 if (first_evicted != -1) RemoveEvicted(reg, first_evicted); | 1692 if (first_evicted != -1) RemoveEvicted(reg, first_evicted); |
1649 | 1693 |
1650 cpu_regs_[reg].Add(unallocated); | 1694 registers_[reg].Add(unallocated); |
1651 unallocated->set_assigned_location(Location::RegisterLocation(reg)); | 1695 unallocated->set_assigned_location(MakeRegisterLocation(reg)); |
1652 } | 1696 } |
1653 | 1697 |
1654 | 1698 |
1655 bool FlowGraphAllocator::EvictIntersection(LiveRange* allocated, | 1699 bool FlowGraphAllocator::EvictIntersection(LiveRange* allocated, |
1656 LiveRange* unallocated) { | 1700 LiveRange* unallocated) { |
1657 UseInterval* first_unallocated = | 1701 UseInterval* first_unallocated = |
1658 unallocated->finger()->first_pending_use_interval(); | 1702 unallocated->finger()->first_pending_use_interval(); |
1659 const intptr_t intersection = FirstIntersection( | 1703 const intptr_t intersection = FirstIntersection( |
1660 allocated->finger()->first_pending_use_interval(), | 1704 allocated->finger()->first_pending_use_interval(), |
1661 first_unallocated); | 1705 first_unallocated); |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1693 } | 1737 } |
1694 | 1738 |
1695 return parallel_move->AddMove(to, from); | 1739 return parallel_move->AddMove(to, from); |
1696 } | 1740 } |
1697 | 1741 |
1698 | 1742 |
1699 void FlowGraphAllocator::ConvertUseTo(UsePosition* use, Location loc) { | 1743 void FlowGraphAllocator::ConvertUseTo(UsePosition* use, Location loc) { |
1700 ASSERT(use->location_slot() != NULL); | 1744 ASSERT(use->location_slot() != NULL); |
1701 Location* slot = use->location_slot(); | 1745 Location* slot = use->location_slot(); |
1702 ASSERT(slot->IsUnallocated()); | 1746 ASSERT(slot->IsUnallocated()); |
1703 ASSERT((slot->policy() == Location::kRequiresRegister) || | |
1704 (slot->policy() == Location::kPrefersRegister) || | |
1705 (slot->policy() == Location::kAny)); | |
1706 TRACE_ALLOC(OS::Print(" use at %d converted to ", use->pos())); | 1747 TRACE_ALLOC(OS::Print(" use at %d converted to ", use->pos())); |
1707 TRACE_ALLOC(loc.Print()); | 1748 TRACE_ALLOC(loc.Print()); |
1708 TRACE_ALLOC(OS::Print("\n")); | 1749 TRACE_ALLOC(OS::Print("\n")); |
1709 *slot = loc; | 1750 *slot = loc; |
1710 } | 1751 } |
1711 | 1752 |
1712 | 1753 |
1713 void FlowGraphAllocator::ConvertAllUses(LiveRange* range) { | 1754 void FlowGraphAllocator::ConvertAllUses(LiveRange* range) { |
1714 if (range->vreg() == kNoVirtualRegister) return; | 1755 if (range->vreg() == kNoVirtualRegister) return; |
| 1756 |
| 1757 const Location loc = range->assigned_location(); |
| 1758 ASSERT(!loc.IsInvalid()); |
| 1759 |
1715 TRACE_ALLOC(OS::Print("range [%d, %d) for v%d has been allocated to ", | 1760 TRACE_ALLOC(OS::Print("range [%d, %d) for v%d has been allocated to ", |
1716 range->Start(), range->End(), range->vreg())); | 1761 range->Start(), range->End(), range->vreg())); |
1717 TRACE_ALLOC(range->assigned_location().Print()); | 1762 TRACE_ALLOC(loc.Print()); |
1718 TRACE_ALLOC(OS::Print(":\n")); | 1763 TRACE_ALLOC(OS::Print(":\n")); |
1719 ASSERT(!range->assigned_location().IsInvalid()); | 1764 |
1720 const Location loc = range->assigned_location(); | |
1721 for (UsePosition* use = range->first_use(); use != NULL; use = use->next()) { | 1765 for (UsePosition* use = range->first_use(); use != NULL; use = use->next()) { |
1722 ConvertUseTo(use, loc); | 1766 ConvertUseTo(use, loc); |
1723 } | 1767 } |
1724 | 1768 |
1725 if (range->assigned_location().IsRegister()) { | 1769 if (loc.IsMachineRegister()) { |
1726 Register reg = range->assigned_location().reg(); | |
1727 for (SafepointPosition* safepoint = range->first_safepoint(); | 1770 for (SafepointPosition* safepoint = range->first_safepoint(); |
1728 safepoint != NULL; | 1771 safepoint != NULL; |
1729 safepoint = safepoint->next()) { | 1772 safepoint = safepoint->next()) { |
1730 safepoint->locs()->live_registers()->Add(reg); | 1773 safepoint->locs()->live_registers()->Add(loc); |
1731 } | 1774 } |
1732 } | 1775 } |
1733 } | 1776 } |
1734 | 1777 |
1735 | 1778 |
1736 void FlowGraphAllocator::AdvanceActiveIntervals(const intptr_t start) { | 1779 void FlowGraphAllocator::AdvanceActiveIntervals(const intptr_t start) { |
1737 for (intptr_t reg = 0; reg < kNumberOfCpuRegisters; reg++) { | 1780 for (intptr_t reg = 0; reg < NumberOfRegisters(); reg++) { |
1738 if (cpu_regs_[reg].is_empty()) continue; | 1781 if (registers_[reg].is_empty()) continue; |
1739 | 1782 |
1740 intptr_t first_evicted = -1; | 1783 intptr_t first_evicted = -1; |
1741 for (intptr_t i = cpu_regs_[reg].length() - 1; i >= 0; i--) { | 1784 for (intptr_t i = registers_[reg].length() - 1; i >= 0; i--) { |
1742 LiveRange* range = cpu_regs_[reg][i]; | 1785 LiveRange* range = registers_[reg][i]; |
1743 if (range->finger()->Advance(start)) { | 1786 if (range->finger()->Advance(start)) { |
1744 ConvertAllUses(range); | 1787 ConvertAllUses(range); |
1745 cpu_regs_[reg][i] = NULL; | 1788 registers_[reg][i] = NULL; |
1746 first_evicted = i; | 1789 first_evicted = i; |
1747 } | 1790 } |
1748 } | 1791 } |
1749 | 1792 |
1750 if (first_evicted != -1) { | 1793 if (first_evicted != -1) RemoveEvicted(reg, first_evicted); |
1751 RemoveEvicted(static_cast<Register>(reg), first_evicted); | |
1752 } | |
1753 } | 1794 } |
1754 } | 1795 } |
1755 | 1796 |
1756 | 1797 |
1757 static inline bool ShouldBeAllocatedBefore(LiveRange* a, LiveRange* b) { | |
1758 return a->Start() <= b->Start(); | |
1759 } | |
1760 | |
1761 | |
1762 bool LiveRange::Contains(intptr_t pos) const { | 1798 bool LiveRange::Contains(intptr_t pos) const { |
1763 if (!CanCover(pos)) return false; | 1799 if (!CanCover(pos)) return false; |
1764 | 1800 |
1765 for (UseInterval* interval = first_use_interval_; | 1801 for (UseInterval* interval = first_use_interval_; |
1766 interval != NULL; | 1802 interval != NULL; |
1767 interval = interval->next()) { | 1803 interval = interval->next()) { |
1768 if (interval->Contains(pos)) { | 1804 if (interval->Contains(pos)) { |
1769 return true; | 1805 return true; |
1770 } | 1806 } |
1771 } | 1807 } |
1772 | 1808 |
1773 return false; | 1809 return false; |
1774 } | 1810 } |
1775 | 1811 |
1776 | 1812 |
1777 void FlowGraphAllocator::AssignSafepoints(LiveRange* range) { | 1813 void FlowGraphAllocator::AssignSafepoints(LiveRange* range) { |
1778 for (intptr_t i = safepoints_.length() - 1; i >= 0; i--) { | 1814 for (intptr_t i = safepoints_.length() - 1; i >= 0; i--) { |
1779 Instruction* instr = safepoints_[i]; | 1815 Instruction* instr = safepoints_[i]; |
1780 | 1816 |
1781 const intptr_t pos = instr->lifetime_position(); | 1817 const intptr_t pos = instr->lifetime_position(); |
1782 if (range->End() <= pos) break; | 1818 if (range->End() <= pos) break; |
1783 | 1819 |
1784 if (range->Contains(pos)) range->AddSafepoint(pos, instr->locs()); | 1820 if (range->Contains(pos)) range->AddSafepoint(pos, instr->locs()); |
1785 } | 1821 } |
1786 } | 1822 } |
1787 | 1823 |
1788 | 1824 |
| 1825 static inline bool ShouldBeAllocatedBefore(LiveRange* a, LiveRange* b) { |
| 1826 // TODO(vegorov): consider first hint position when ordering live ranges. |
| 1827 return a->Start() <= b->Start(); |
| 1828 } |
| 1829 |
| 1830 |
| 1831 static void AddToSortedListOfRanges(GrowableArray<LiveRange*>* list, |
| 1832 LiveRange* range) { |
| 1833 range->finger()->Initialize(range); |
| 1834 |
| 1835 if (list->is_empty()) { |
| 1836 list->Add(range); |
| 1837 return; |
| 1838 } |
| 1839 |
| 1840 for (intptr_t i = list->length() - 1; i >= 0; i--) { |
| 1841 if (ShouldBeAllocatedBefore(range, (*list)[i])) { |
| 1842 list->InsertAt(i + 1, range); |
| 1843 return; |
| 1844 } |
| 1845 } |
| 1846 list->InsertAt(0, range); |
| 1847 } |
| 1848 |
| 1849 |
1789 void FlowGraphAllocator::AddToUnallocated(LiveRange* range) { | 1850 void FlowGraphAllocator::AddToUnallocated(LiveRange* range) { |
1790 range->finger()->Initialize(range); | 1851 AddToSortedListOfRanges(&unallocated_, range); |
1791 | |
1792 if (unallocated_.is_empty()) { | |
1793 unallocated_.Add(range); | |
1794 return; | |
1795 } | |
1796 | |
1797 for (intptr_t i = unallocated_.length() - 1; i >= 0; i--) { | |
1798 if (ShouldBeAllocatedBefore(range, unallocated_[i])) { | |
1799 unallocated_.InsertAt(i + 1, range); | |
1800 return; | |
1801 } | |
1802 } | |
1803 unallocated_.InsertAt(0, range); | |
1804 } | 1852 } |
1805 | 1853 |
1806 | 1854 |
| 1855 void FlowGraphAllocator::CompleteRange(LiveRange* range, Location::Kind kind) { |
| 1856 switch (kind) { |
| 1857 case Location::kRegister: |
| 1858 AddToSortedListOfRanges(&unallocated_cpu_, range); |
| 1859 break; |
| 1860 |
| 1861 case Location::kXmmRegister: |
| 1862 AddToSortedListOfRanges(&unallocated_xmm_, range); |
| 1863 break; |
| 1864 |
| 1865 default: |
| 1866 UNREACHABLE(); |
| 1867 } |
| 1868 } |
| 1869 |
| 1870 |
1807 #if defined(DEBUG) | 1871 #if defined(DEBUG) |
1808 bool FlowGraphAllocator::UnallocatedIsSorted() { | 1872 bool FlowGraphAllocator::UnallocatedIsSorted() { |
1809 for (intptr_t i = unallocated_.length() - 1; i >= 1; i--) { | 1873 for (intptr_t i = unallocated_.length() - 1; i >= 1; i--) { |
1810 LiveRange* a = unallocated_[i]; | 1874 LiveRange* a = unallocated_[i]; |
1811 LiveRange* b = unallocated_[i - 1]; | 1875 LiveRange* b = unallocated_[i - 1]; |
1812 if (!ShouldBeAllocatedBefore(a, b)) return false; | 1876 if (!ShouldBeAllocatedBefore(a, b)) return false; |
1813 } | 1877 } |
1814 return true; | 1878 return true; |
1815 } | 1879 } |
1816 #endif | 1880 #endif |
1817 | 1881 |
1818 | 1882 |
1819 void FlowGraphAllocator::AllocateCPURegisters() { | 1883 void FlowGraphAllocator::PrepareForAllocation( |
| 1884 Location::Kind register_kind, |
| 1885 intptr_t number_of_registers, |
| 1886 const GrowableArray<LiveRange*>& unallocated, |
| 1887 LiveRange** blocking_ranges, |
| 1888 bool* blocked_registers) { |
| 1889 register_kind_ = register_kind; |
| 1890 number_of_registers_ = number_of_registers; |
| 1891 |
| 1892 ASSERT(unallocated_.is_empty()); |
| 1893 unallocated_.AddArray(unallocated); |
| 1894 |
| 1895 for (intptr_t reg = 0; reg < number_of_registers; reg++) { |
| 1896 blocked_registers_[reg] = blocked_registers[reg]; |
| 1897 ASSERT(registers_[reg].is_empty()); |
| 1898 |
| 1899 LiveRange* range = blocking_ranges[reg]; |
| 1900 if (range != NULL) { |
| 1901 range->finger()->Initialize(range); |
| 1902 registers_[reg].Add(range); |
| 1903 } |
| 1904 } |
| 1905 } |
| 1906 |
| 1907 |
| 1908 void FlowGraphAllocator::AllocateUnallocatedRanges() { |
1820 #if defined(DEBUG) | 1909 #if defined(DEBUG) |
1821 ASSERT(UnallocatedIsSorted()); | 1910 ASSERT(UnallocatedIsSorted()); |
1822 #endif | 1911 #endif |
1823 | 1912 |
1824 for (intptr_t i = 0; i < kNumberOfCpuRegisters; i++) { | |
1825 if (cpu_regs_[i].length() == 1) { | |
1826 LiveRange* range = cpu_regs_[i][0]; | |
1827 range->finger()->Initialize(range); | |
1828 } | |
1829 } | |
1830 | |
1831 while (!unallocated_.is_empty()) { | 1913 while (!unallocated_.is_empty()) { |
1832 LiveRange* range = unallocated_.Last(); | 1914 LiveRange* range = unallocated_.Last(); |
1833 unallocated_.RemoveLast(); | 1915 unallocated_.RemoveLast(); |
1834 const intptr_t start = range->Start(); | 1916 const intptr_t start = range->Start(); |
1835 TRACE_ALLOC(OS::Print("Processing live range for vreg %d starting at %d\n", | 1917 TRACE_ALLOC(OS::Print("Processing live range for vreg %d starting at %d\n", |
1836 range->vreg(), | 1918 range->vreg(), |
1837 start)); | 1919 start)); |
1838 | 1920 |
1839 // TODO(vegorov): eagerly spill liveranges without register uses. | 1921 // TODO(vegorov): eagerly spill liveranges without register uses. |
1840 AdvanceActiveIntervals(start); | 1922 AdvanceActiveIntervals(start); |
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1903 TRACE_ALLOC(source.Print()); | 1985 TRACE_ALLOC(source.Print()); |
1904 TRACE_ALLOC(OS::Print("] to [%d, %d) [", | 1986 TRACE_ALLOC(OS::Print("] to [%d, %d) [", |
1905 target_cover->Start(), target_cover->End())); | 1987 target_cover->Start(), target_cover->End())); |
1906 TRACE_ALLOC(target.Print()); | 1988 TRACE_ALLOC(target.Print()); |
1907 TRACE_ALLOC(OS::Print("]\n")); | 1989 TRACE_ALLOC(OS::Print("]\n")); |
1908 | 1990 |
1909 // Siblings were allocated to the same register. | 1991 // Siblings were allocated to the same register. |
1910 if (source.Equals(target)) return; | 1992 if (source.Equals(target)) return; |
1911 | 1993 |
1912 // Values are eagerly spilled. Spill slot already contains appropriate value. | 1994 // Values are eagerly spilled. Spill slot already contains appropriate value. |
1913 if (target.IsStackSlot()) { | 1995 if (target.IsStackSlot() || target.IsDoubleStackSlot()) { |
1914 ASSERT(parent->spill_slot().Equals(target)); | 1996 ASSERT(parent->spill_slot().Equals(target)); |
1915 return; | 1997 return; |
1916 } | 1998 } |
1917 | 1999 |
1918 Instruction* last = source_block->last_instruction(); | 2000 Instruction* last = source_block->last_instruction(); |
1919 if ((last->SuccessorCount() == 1) && !source_block->IsGraphEntry()) { | 2001 if ((last->SuccessorCount() == 1) && !source_block->IsGraphEntry()) { |
1920 ASSERT(last->IsGoto()); | 2002 ASSERT(last->IsGoto()); |
1921 last->AsGoto()->GetParallelMove()->AddMove(target, source); | 2003 last->AsGoto()->GetParallelMove()->AddMove(target, source); |
1922 } else { | 2004 } else { |
1923 target_block->GetParallelMove()->AddMove(target, source); | 2005 target_block->GetParallelMove()->AddMove(target, source); |
(...skipping 12 matching lines...) Expand all Loading... |
1936 LiveRange* sibling = range->next_sibling(); | 2018 LiveRange* sibling = range->next_sibling(); |
1937 TRACE_ALLOC(OS::Print("connecting [%d, %d) [", | 2019 TRACE_ALLOC(OS::Print("connecting [%d, %d) [", |
1938 range->Start(), range->End())); | 2020 range->Start(), range->End())); |
1939 TRACE_ALLOC(range->assigned_location().Print()); | 2021 TRACE_ALLOC(range->assigned_location().Print()); |
1940 TRACE_ALLOC(OS::Print("] to [%d, %d) [", | 2022 TRACE_ALLOC(OS::Print("] to [%d, %d) [", |
1941 sibling->Start(), sibling->End())); | 2023 sibling->Start(), sibling->End())); |
1942 TRACE_ALLOC(sibling->assigned_location().Print()); | 2024 TRACE_ALLOC(sibling->assigned_location().Print()); |
1943 TRACE_ALLOC(OS::Print("]\n")); | 2025 TRACE_ALLOC(OS::Print("]\n")); |
1944 if ((range->End() == sibling->Start()) && | 2026 if ((range->End() == sibling->Start()) && |
1945 !sibling->assigned_location().IsStackSlot() && | 2027 !sibling->assigned_location().IsStackSlot() && |
| 2028 !sibling->assigned_location().IsDoubleStackSlot() && |
1946 !range->assigned_location().Equals(sibling->assigned_location()) && | 2029 !range->assigned_location().Equals(sibling->assigned_location()) && |
1947 !IsBlockEntry(range->End())) { | 2030 !IsBlockEntry(range->End())) { |
1948 AddMoveAt(sibling->Start(), | 2031 AddMoveAt(sibling->Start(), |
1949 sibling->assigned_location(), | 2032 sibling->assigned_location(), |
1950 range->assigned_location()); | 2033 range->assigned_location()); |
1951 } | 2034 } |
1952 range = sibling; | 2035 range = sibling; |
1953 } | 2036 } |
1954 } | 2037 } |
1955 | 2038 |
1956 // Resolve non-linear control flow across branches. | 2039 // Resolve non-linear control flow across branches. |
1957 for (intptr_t i = 1; i < block_order_.length(); i++) { | 2040 for (intptr_t i = 1; i < block_order_.length(); i++) { |
1958 BlockEntryInstr* block = block_order_[i]; | 2041 BlockEntryInstr* block = block_order_[i]; |
1959 BitVector* live = live_in_[block->postorder_number()]; | 2042 BitVector* live = live_in_[block->postorder_number()]; |
1960 for (BitVector::Iterator it(live); !it.Done(); it.Advance()) { | 2043 for (BitVector::Iterator it(live); !it.Done(); it.Advance()) { |
1961 LiveRange* range = GetLiveRange(it.Current()); | 2044 LiveRange* range = GetLiveRange(it.Current()); |
1962 for (intptr_t j = 0; j < block->PredecessorCount(); j++) { | 2045 for (intptr_t j = 0; j < block->PredecessorCount(); j++) { |
1963 ConnectSplitSiblings(range, block->PredecessorAt(j), block); | 2046 ConnectSplitSiblings(range, block->PredecessorAt(j), block); |
1964 } | 2047 } |
1965 } | 2048 } |
1966 } | 2049 } |
1967 | 2050 |
1968 // Eagerly spill values. | 2051 // Eagerly spill values. |
1969 // TODO(vegorov): if value is spilled on the cold path (e.g. by the call) | 2052 // TODO(vegorov): if value is spilled on the cold path (e.g. by the call) |
1970 // this will cause spilling to occur on the fast path (at the definition). | 2053 // this will cause spilling to occur on the fast path (at the definition). |
1971 for (intptr_t i = 0; i < spilled_.length(); i++) { | 2054 for (intptr_t i = 0; i < spilled_.length(); i++) { |
1972 LiveRange* range = spilled_[i]; | 2055 LiveRange* range = spilled_[i]; |
1973 if (range->assigned_location().IsStackSlot()) { | 2056 if (range->assigned_location().IsStackSlot() || |
| 2057 range->assigned_location().IsDoubleStackSlot()) { |
1974 ASSERT(range->assigned_location().Equals(range->spill_slot())); | 2058 ASSERT(range->assigned_location().Equals(range->spill_slot())); |
1975 } else { | 2059 } else { |
1976 AddMoveAt(range->Start() + 1, | 2060 AddMoveAt(range->Start() + 1, |
1977 range->spill_slot(), | 2061 range->spill_slot(), |
1978 range->assigned_location()); | 2062 range->assigned_location()); |
1979 } | 2063 } |
1980 } | 2064 } |
1981 } | 2065 } |
1982 | 2066 |
1983 | 2067 |
(...skipping 20 matching lines...) Expand all Loading... |
2004 PrintLiveRanges(); | 2088 PrintLiveRanges(); |
2005 OS::Print("----------------------------------------------\n"); | 2089 OS::Print("----------------------------------------------\n"); |
2006 | 2090 |
2007 OS::Print("-- [before ssa allocator] ir [%s] -------------\n", | 2091 OS::Print("-- [before ssa allocator] ir [%s] -------------\n", |
2008 function.ToFullyQualifiedCString()); | 2092 function.ToFullyQualifiedCString()); |
2009 FlowGraphPrinter printer(flow_graph_, true); | 2093 FlowGraphPrinter printer(flow_graph_, true); |
2010 printer.PrintBlocks(); | 2094 printer.PrintBlocks(); |
2011 OS::Print("----------------------------------------------\n"); | 2095 OS::Print("----------------------------------------------\n"); |
2012 } | 2096 } |
2013 | 2097 |
2014 AllocateCPURegisters(); | 2098 PrepareForAllocation(Location::kRegister, |
| 2099 kNumberOfCpuRegisters, |
| 2100 unallocated_cpu_, |
| 2101 cpu_regs_, |
| 2102 blocked_cpu_registers_); |
| 2103 AllocateUnallocatedRanges(); |
| 2104 |
| 2105 cpu_spill_slot_count_ = spill_slots_.length(); |
| 2106 spill_slots_.Clear(); |
| 2107 |
| 2108 PrepareForAllocation(Location::kXmmRegister, |
| 2109 kNumberOfXmmRegisters, |
| 2110 unallocated_xmm_, |
| 2111 xmm_regs_, |
| 2112 blocked_xmm_registers_); |
| 2113 AllocateUnallocatedRanges(); |
2015 | 2114 |
2016 ResolveControlFlow(); | 2115 ResolveControlFlow(); |
2017 | 2116 |
| 2117 // Reserve spill slots for XMM registers alive across slow path code. |
| 2118 // TODO(vegorov): remove this code when safepoints with registers are |
| 2119 // implemented. |
| 2120 intptr_t deferred_xmm_spills = 0; |
| 2121 for (intptr_t i = 0; i < safepoints_.length(); i++) { |
| 2122 if (!safepoints_[i]->locs()->always_calls()) { |
| 2123 const intptr_t count = |
| 2124 safepoints_[i]->locs()->live_registers()->xmm_regs_count(); |
| 2125 if (count > deferred_xmm_spills) deferred_xmm_spills = count; |
| 2126 } |
| 2127 } |
| 2128 |
2018 GraphEntryInstr* entry = block_order_[0]->AsGraphEntry(); | 2129 GraphEntryInstr* entry = block_order_[0]->AsGraphEntry(); |
2019 ASSERT(entry != NULL); | 2130 ASSERT(entry != NULL); |
2020 entry->set_spill_slot_count(spill_slots_.length()); | 2131 entry->set_spill_slot_count( |
| 2132 (deferred_xmm_spills + spill_slots_.length()) * 2 + |
| 2133 cpu_spill_slot_count_); |
2021 | 2134 |
2022 if (FLAG_print_ssa_liveranges) { | 2135 if (FLAG_print_ssa_liveranges) { |
2023 const Function& function = flow_graph_.parsed_function().function(); | 2136 const Function& function = flow_graph_.parsed_function().function(); |
2024 | 2137 |
2025 OS::Print("-- [after ssa allocator] ranges [%s] ---------\n", | 2138 OS::Print("-- [after ssa allocator] ranges [%s] ---------\n", |
2026 function.ToFullyQualifiedCString()); | 2139 function.ToFullyQualifiedCString()); |
2027 PrintLiveRanges(); | 2140 PrintLiveRanges(); |
2028 OS::Print("----------------------------------------------\n"); | 2141 OS::Print("----------------------------------------------\n"); |
2029 | 2142 |
2030 OS::Print("-- [after ssa allocator] ir [%s] -------------\n", | 2143 OS::Print("-- [after ssa allocator] ir [%s] -------------\n", |
2031 function.ToFullyQualifiedCString()); | 2144 function.ToFullyQualifiedCString()); |
2032 FlowGraphPrinter printer(flow_graph_, true); | 2145 FlowGraphPrinter printer(flow_graph_, true); |
2033 printer.PrintBlocks(); | 2146 printer.PrintBlocks(); |
2034 OS::Print("----------------------------------------------\n"); | 2147 OS::Print("----------------------------------------------\n"); |
2035 } | 2148 } |
2036 } | 2149 } |
2037 | 2150 |
2038 | 2151 |
2039 } // namespace dart | 2152 } // namespace dart |
OLD | NEW |