Chromium Code Reviews| Index: runtime/vm/flow_graph_allocator.h |
| diff --git a/runtime/vm/flow_graph_allocator.h b/runtime/vm/flow_graph_allocator.h |
| index 7848d2b11822e7b5d6a25d69c61d9430cd34ba02..abef00c28554ab67543feb48ae96161e83fad5f1 100644 |
| --- a/runtime/vm/flow_graph_allocator.h |
| +++ b/runtime/vm/flow_graph_allocator.h |
| @@ -10,12 +10,16 @@ |
| namespace dart { |
| +class FlowGraphBuilder; |
| +class LiveRange; |
| +class UseInterval; |
| + |
| class FlowGraphAllocator : public ValueObject { |
| public: |
| - FlowGraphAllocator(const GrowableArray<BlockEntryInstr*>& postorder, |
| - intptr_t max_ssa_temp_index); |
| + FlowGraphAllocator(const GrowableArray<BlockEntryInstr*>& block_order, |
| + FlowGraphBuilder* builder); |
| - void ResolveConstraints(); |
| + void AllocateRegisters(); |
| // Build live-in and live-out sets for each block. |
| void AnalyzeLiveness(); |
| @@ -30,7 +34,7 @@ class FlowGraphAllocator : public ValueObject { |
| bool UpdateLiveOut(BlockEntryInstr* instr); |
| // Update live-in set for the given block: live-in should contain |
| - // all values taht are live-out from the block and are not defined |
| + // all values that are live-out from the block and are not defined |
| // by this block. |
| // Returns true if live-in set was changed. |
| bool UpdateLiveIn(BlockEntryInstr* instr); |
| @@ -42,6 +46,55 @@ class FlowGraphAllocator : public ValueObject { |
| // Print results of liveness analysis. |
| void DumpLiveness(); |
| + // Visit blocks in the code generation order (reverse post order) and |
| + // linearly assign consequent lifetime positions to every instruction. |
| + // Each instruction gets two positions: |
| + // |
| + // 2 * n - even one corresponding to instruction's start |
| + // |
| + // 2 * n + 1 - odd one corresponding to instruction's end |
| + // |
| + // Having two positions allows us to capture non-trivial register |
| + // constraints in use intervals: for example we can declare that |
| + // an input value is only used at the start of the instruction and |
| + // this might allow register allocator to allocate both this input |
| + // and output (or temp) to the same register if this is the last |
| + // use of the value. |
| + // Additionally creates parallel moves at the joins' predecessors |
| + // that will be used for phi resolution. |
| + void NumberInstructions(); |
| + |
| + LiveRange* GetLiveRange(intptr_t vreg); |
| + void BuildLiveRanges(); |
| + void PrintLiveRanges(); |
| + |
| + void UseValue(Instruction* instr, |
| + intptr_t def, |
| + intptr_t use, |
|
srdjan
2012/07/11 17:22:32
Please describe what are def and use. intptr_t som
|
| + intptr_t vreg, |
| + Location* loc, |
| + bool use_at_end); |
| + void Define(Instruction* instr, intptr_t def, intptr_t vreg, Location* loc); |
| + |
| + void AddToUnallocated(UseInterval* chain); |
| + void BlockLocation(Location loc, intptr_t pos); |
| + |
| + bool AllocateFreeRegister(UseInterval* unallocated); |
| + void AssignFreeRegister(UseInterval* unallocated, Register reg); |
| + |
| + void FinalizeInterval(UseInterval* interval, Location loc); |
| + void AdvanceActiveIntervals(const intptr_t start); |
| + |
| + bool UnallocatedIsSorted(); |
| + void AllocateCPURegisters(); |
| + |
| + // TODO(vegorov): this field is used only to call Bailout. remove when |
|
srdjan
2012/07/11 17:22:32
s/remove/Remove/
|
| + // all bailouts are gone. |
| + FlowGraphBuilder* builder_; |
| + |
| + const GrowableArray<BlockEntryInstr*>& block_order_; |
| + const GrowableArray<BlockEntryInstr*>& postorder_; |
| + |
| // Live-out sets for each block. They contain indices of SSA values |
| // that are live out from this block: that is values that were either |
| // defined in this block or live into it and that are used in some |
| @@ -56,15 +109,193 @@ class FlowGraphAllocator : public ValueObject { |
| // that are used by this block or its successors. |
| GrowableArray<BitVector*> live_in_; |
| - const GrowableArray<BlockEntryInstr*>& postorder_; |
| - |
| // Number of virtual registers. Currently equal to the number of |
| // SSA values. |
| const intptr_t vreg_count_; |
| + // LiveRanges corresponding to SSA values. |
| + GrowableArray<LiveRange*> live_ranges_; |
| + |
| + // Worklist for register allocator. Always maintained sorted according |
| + // to ShouldBeAllocatedBefore predicate. |
| + GrowableArray<UseInterval*> unallocated_; |
| + |
| + // Per register lists of allocated UseIntervals, linked through |
| + // next_allocated field. Contains only those intervals that |
| + // can be affected by future allocation decisions. Those intervals |
| + // that end before the start of the current UseInterval are removed |
| + // from this list and will not be affected. |
| + UseInterval* cpu_regs_[kNumberOfCpuRegisters]; |
| + |
| DISALLOW_COPY_AND_ASSIGN(FlowGraphAllocator); |
| }; |
| + |
| +// UsePosition represents a single use of an SSA value by some instruction. |
| +// It points to a location slot which either tells register allocator |
| +// where instruction expects the value (if slot contains a fixed location) or |
| +// asks register allocator to allocate storage (register or spill slot) for |
| +// this use with certain properties (if slot contain an unallocated location). |
| +class UsePosition : public ZoneAllocated { |
| + public: |
| + static const intptr_t kFixedUse = 0x1; |
| + static const intptr_t kSameAsFirstUse = 0x2; |
| + static const intptr_t kInstructionTagMask = 0x3; |
| + static const intptr_t kPositionShift = 2; |
| + |
| + static intptr_t FlagForUse(const Location& loc) { |
| + if (loc.IsRegister()) return kFixedUse; |
| + if (loc.IsUnallocated() && (loc.policy() == Location::kSameAsFirstInput)) { |
| + return kSameAsFirstUse; |
| + } |
| + return kInstructionTagMask; |
| + } |
| + |
| + // TODO(vegorov): we encode either position or instruction pointer |
| + // into the pos_ field to generate moves when needed to resolve |
| + // fixed or same-as-first constraints, but this looks ugly. |
| + UsePosition(Instruction* instr, |
| + intptr_t pos, |
| + UsePosition* next, |
| + Location* location_slot) |
| + : pos_(pos << kPositionShift), |
| + location_slot_(location_slot), |
| + next_(next) { |
| + if (instr != NULL) { |
| + ASSERT(location_slot_ != NULL); |
| + pos_ = reinterpret_cast<intptr_t>(instr) | FlagForUse(*location_slot_); |
| + } |
| + ASSERT(this->pos() == pos); |
| + } |
| + |
| + // Tell the use that it should load the value from the given location. |
| + // If location slot for the use is flexible (unallocated) it will be updated |
| + // with the given location. Otherwise a move will be scheduled from the given |
| + // location to the location already stored in the slot. |
| + void AssignLocation(Location loc); |
| + |
| + Location* location_slot() const { return location_slot_; } |
| + void set_location_slot(Location* location_slot) { |
| + location_slot_ = location_slot; |
| + } |
| + |
| + void set_next(UsePosition* next) { next_ = next; } |
| + UsePosition* next() const { return next_; } |
| + |
| + intptr_t pos() const { |
| + if ((pos_ & kInstructionTagMask) != 0) { |
| + return instr()->lifetime_position(); |
| + } |
| + return pos_ >> kPositionShift; |
| + } |
| + |
| + Instruction* instr() const { |
| + ASSERT((pos_ & kInstructionTagMask) != 0); |
| + return reinterpret_cast<Instruction*>(pos_ & ~kInstructionTagMask); |
| + } |
| + |
| + bool HasHint() const { |
| + return (pos_ & kInstructionTagMask) == kFixedUse; |
| + } |
| + |
| + Location hint() const { |
| + ASSERT(HasHint()); |
| + ASSERT(location_slot()->IsRegister()); |
| + return *location_slot_; |
| + } |
| + |
| + private: |
| + intptr_t pos_; |
| + Location* location_slot_; |
| + UsePosition* next_; |
| +}; |
| + |
| + |
| +// UseInterval represents a holeless half open interval of liveness for a given |
| +// SSA value: [start, end) in terms of lifetime positions that |
| +// NumberInstructions assigns to instructions. Register allocator has to keep |
| +// a value live in the register or in a spill slot from start position and until |
| +// the end position. The interval can cover zero or more uses. |
| +// During the register allocation UseIntervals from different live ranges |
| +// allocated to the same register will be chained together through |
| +// next_allocated_ field. |
| +// Note: currently all uses of the same SSA value are linked together into a |
| +// single list (and not split between UseIntervals). |
| +class UseInterval : public ZoneAllocated { |
| + public: |
| + UseInterval(intptr_t vreg, intptr_t start, intptr_t end, UseInterval* next) |
| + : vreg_(vreg), |
| + start_(start), |
| + end_(end), |
| + uses_((next == NULL) ? NULL : next->uses_), |
| + next_(next), |
| + next_allocated_(next) { } |
| + |
| + |
| + void AddUse(Instruction* instr, intptr_t pos, Location* loc); |
| + void Print(); |
| + |
| + intptr_t vreg() const { return vreg_; } |
| + intptr_t start() const { return start_; } |
| + intptr_t end() const { return end_; } |
| + UsePosition* first_use() const { return uses_; } |
| + UseInterval* next() const { return next_; } |
| + |
| + bool Contains(intptr_t pos) const { |
| + return (start() <= pos) && (pos < end()); |
| + } |
| + |
| + // Return the smallest position that is covered by both UseIntervals or |
| + // kIllegalPosition if intervals do not intersect. |
| + intptr_t Intersect(UseInterval* other); |
| + |
| + UseInterval* Split(intptr_t pos); |
| + |
| + void set_next_allocated(UseInterval* next_allocated) { |
| + next_allocated_ = next_allocated; |
| + } |
| + UseInterval* next_allocated() const { return next_allocated_; } |
| + |
| + private: |
| + friend class LiveRange; |
| + const intptr_t vreg_; |
| + |
| + intptr_t start_; |
| + intptr_t end_; |
| + |
| + UsePosition* uses_; |
| + |
| + UseInterval* next_; |
| + UseInterval* next_allocated_; |
| +}; |
| + |
| + |
| +// LiveRange represents a sequence of UseIntervals for a given SSA value. |
| +// TODO(vegorov): this class is actually redundant currently. |
| +class LiveRange : public ZoneAllocated { |
| + public: |
| + explicit LiveRange(intptr_t vreg) : vreg_(vreg), head_(NULL) { } |
| + |
| + void DefineAt(Instruction* instr, intptr_t pos, Location* loc); |
| + |
| + void UseAt(Instruction* instr, |
| + intptr_t def_pos, |
| + intptr_t use_pos, |
| + bool use_at_end, |
| + Location* loc); |
| + |
| + void AddUseInterval(intptr_t start, intptr_t end); |
| + |
| + void Print(); |
| + |
| + UseInterval* head() const { return head_; } |
| + |
| + private: |
| + const intptr_t vreg_; |
| + UseInterval* head_; |
| +}; |
| + |
| + |
| } // namespace dart |
| #endif // VM_FLOW_GRAPH_ALLOCATOR_H_ |