Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef VM_FLOW_GRAPH_ALLOCATOR_H_ | 5 #ifndef VM_FLOW_GRAPH_ALLOCATOR_H_ |
| 6 #define VM_FLOW_GRAPH_ALLOCATOR_H_ | 6 #define VM_FLOW_GRAPH_ALLOCATOR_H_ |
| 7 | 7 |
| 8 #include "vm/growable_array.h" | 8 #include "vm/growable_array.h" |
| 9 #include "vm/intermediate_language.h" | 9 #include "vm/intermediate_language.h" |
| 10 | 10 |
| 11 namespace dart { | 11 namespace dart { |
| 12 | 12 |
| 13 class FlowGraphBuilder; | |
| 14 class LiveRange; | |
| 15 class UseInterval; | |
| 16 | |
| 13 class FlowGraphAllocator : public ValueObject { | 17 class FlowGraphAllocator : public ValueObject { |
| 14 public: | 18 public: |
| 15 FlowGraphAllocator(const GrowableArray<BlockEntryInstr*>& postorder, | 19 FlowGraphAllocator(const GrowableArray<BlockEntryInstr*>& block_order, |
| 16 intptr_t max_ssa_temp_index); | 20 FlowGraphBuilder* builder); |
| 17 | 21 |
| 18 void ResolveConstraints(); | 22 void AllocateRegisters(); |
| 19 | 23 |
| 20 // Build live-in and live-out sets for each block. | 24 // Build live-in and live-out sets for each block. |
| 21 void AnalyzeLiveness(); | 25 void AnalyzeLiveness(); |
| 22 | 26 |
| 23 private: | 27 private: |
| 24 // Compute initial values for live-out, kill and live-in sets. | 28 // Compute initial values for live-out, kill and live-in sets. |
| 25 void ComputeInitialSets(); | 29 void ComputeInitialSets(); |
| 26 | 30 |
| 27 // Update live-out set for the given block: live-out should contain | 31 // Update live-out set for the given block: live-out should contain |
| 28 // all values that are live-in for block's successors. | 32 // all values that are live-in for block's successors. |
| 29 // Returns true if live-out set was changed. | 33 // Returns true if live-out set was changed. |
| 30 bool UpdateLiveOut(BlockEntryInstr* instr); | 34 bool UpdateLiveOut(BlockEntryInstr* instr); |
| 31 | 35 |
| 32 // Update live-in set for the given block: live-in should contain | 36 // Update live-in set for the given block: live-in should contain |
| 33 // all values taht are live-out from the block and are not defined | 37 // all values that are live-out from the block and are not defined |
| 34 // by this block. | 38 // by this block. |
| 35 // Returns true if live-in set was changed. | 39 // Returns true if live-in set was changed. |
| 36 bool UpdateLiveIn(BlockEntryInstr* instr); | 40 bool UpdateLiveIn(BlockEntryInstr* instr); |
| 37 | 41 |
| 38 // Perform fix-point iteration updating live-out and live-in sets | 42 // Perform fix-point iteration updating live-out and live-in sets |
| 39 // for blocks until they stop changing. | 43 // for blocks until they stop changing. |
| 40 void ComputeLiveInAndLiveOutSets(); | 44 void ComputeLiveInAndLiveOutSets(); |
| 41 | 45 |
| 42 // Print results of liveness analysis. | 46 // Print results of liveness analysis. |
| 43 void DumpLiveness(); | 47 void DumpLiveness(); |
| 44 | 48 |
| 49 // Visit blocks in the code generation order (reverse post order) and | |
| 50 // linearly assign consequent lifetime positions to every instruction. | |
| 51 // Each instruction gets two positions: | |
| 52 // | |
| 53 // 2 * n - even one corresponding to instruction's start | |
| 54 // | |
| 55 // 2 * n + 1 - odd one corresponding to instruction's end | |
| 56 // | |
| 57 // Having two positions allows us to capture non-trivial register | |
| 58 // constraints in use intervals: for example we can declare that | |
| 59 // an input value is only used at the start of the instruction and | |
| 60 // this might allow register allocator to allocate both this input | |
| 61 // and output (or temp) to the same register if this is the last | |
| 62 // use of the value. | |
| 63 // Additionally creates parallel moves at the joins' predecessors | |
| 64 // that will be used for phi resolution. | |
| 65 void NumberInstructions(); | |
| 66 | |
| 67 LiveRange* GetLiveRange(intptr_t vreg); | |
| 68 void BuildLiveRanges(); | |
| 69 void PrintLiveRanges(); | |
| 70 | |
| 71 void UseValue(Instruction* instr, | |
| 72 intptr_t def, | |
| 73 intptr_t use, | |
|
srdjan
2012/07/11 17:22:32
Please describe what are def and use. intptr_t som
| |
| 74 intptr_t vreg, | |
| 75 Location* loc, | |
| 76 bool use_at_end); | |
| 77 void Define(Instruction* instr, intptr_t def, intptr_t vreg, Location* loc); | |
| 78 | |
| 79 void AddToUnallocated(UseInterval* chain); | |
| 80 void BlockLocation(Location loc, intptr_t pos); | |
| 81 | |
| 82 bool AllocateFreeRegister(UseInterval* unallocated); | |
| 83 void AssignFreeRegister(UseInterval* unallocated, Register reg); | |
| 84 | |
| 85 void FinalizeInterval(UseInterval* interval, Location loc); | |
| 86 void AdvanceActiveIntervals(const intptr_t start); | |
| 87 | |
| 88 bool UnallocatedIsSorted(); | |
| 89 void AllocateCPURegisters(); | |
| 90 | |
| 91 // TODO(vegorov): this field is used only to call Bailout. remove when | |
|
srdjan
2012/07/11 17:22:32
s/remove/Remove/
| |
| 92 // all bailouts are gone. | |
| 93 FlowGraphBuilder* builder_; | |
| 94 | |
| 95 const GrowableArray<BlockEntryInstr*>& block_order_; | |
| 96 const GrowableArray<BlockEntryInstr*>& postorder_; | |
| 97 | |
| 45 // Live-out sets for each block. They contain indices of SSA values | 98 // Live-out sets for each block. They contain indices of SSA values |
| 46 // that are live out from this block: that is values that were either | 99 // that are live out from this block: that is values that were either |
| 47 // defined in this block or live into it and that are used in some | 100 // defined in this block or live into it and that are used in some |
| 48 // successor block. | 101 // successor block. |
| 49 GrowableArray<BitVector*> live_out_; | 102 GrowableArray<BitVector*> live_out_; |
| 50 | 103 |
| 51 // Kill sets for each block. They contain indices of SSA values that | 104 // Kill sets for each block. They contain indices of SSA values that |
| 52 // are defined by this block. | 105 // are defined by this block. |
| 53 GrowableArray<BitVector*> kill_; | 106 GrowableArray<BitVector*> kill_; |
| 54 | 107 |
| 55 // Live-in sets for each block. They contain indices of SSA values | 108 // Live-in sets for each block. They contain indices of SSA values |
| 56 // that are used by this block or its successors. | 109 // that are used by this block or its successors. |
| 57 GrowableArray<BitVector*> live_in_; | 110 GrowableArray<BitVector*> live_in_; |
| 58 | 111 |
| 59 const GrowableArray<BlockEntryInstr*>& postorder_; | |
| 60 | |
| 61 // Number of virtual registers. Currently equal to the number of | 112 // Number of virtual registers. Currently equal to the number of |
| 62 // SSA values. | 113 // SSA values. |
| 63 const intptr_t vreg_count_; | 114 const intptr_t vreg_count_; |
| 64 | 115 |
| 116 // LiveRanges corresponding to SSA values. | |
| 117 GrowableArray<LiveRange*> live_ranges_; | |
| 118 | |
| 119 // Worklist for register allocator. Always maintained sorted according | |
| 120 // to ShouldBeAllocatedBefore predicate. | |
| 121 GrowableArray<UseInterval*> unallocated_; | |
| 122 | |
| 123 // Per register lists of allocated UseIntervals, linked through | |
| 124 // next_allocated field. Contains only those intervals that | |
| 125 // can be affected by future allocation decisions. Those intervals | |
| 126 // that end before the start of the current UseInterval are removed | |
| 127 // from this list and will not be affected. | |
| 128 UseInterval* cpu_regs_[kNumberOfCpuRegisters]; | |
| 129 | |
| 65 DISALLOW_COPY_AND_ASSIGN(FlowGraphAllocator); | 130 DISALLOW_COPY_AND_ASSIGN(FlowGraphAllocator); |
| 66 }; | 131 }; |
| 67 | 132 |
| 133 | |
| 134 // UsePosition represents a single use of an SSA value by some instruction. | |
| 135 // It points to a location slot which either tells register allocator | |
| 136 // where instruction expects the value (if slot contains a fixed location) or | |
| 137 // asks register allocator to allocate storage (register or spill slot) for | |
| 138 // this use with certain properties (if slot contain an unallocated location). | |
| 139 class UsePosition : public ZoneAllocated { | |
| 140 public: | |
| 141 static const intptr_t kFixedUse = 0x1; | |
| 142 static const intptr_t kSameAsFirstUse = 0x2; | |
| 143 static const intptr_t kInstructionTagMask = 0x3; | |
| 144 static const intptr_t kPositionShift = 2; | |
| 145 | |
| 146 static intptr_t FlagForUse(const Location& loc) { | |
| 147 if (loc.IsRegister()) return kFixedUse; | |
| 148 if (loc.IsUnallocated() && (loc.policy() == Location::kSameAsFirstInput)) { | |
| 149 return kSameAsFirstUse; | |
| 150 } | |
| 151 return kInstructionTagMask; | |
| 152 } | |
| 153 | |
| 154 // TODO(vegorov): we encode either position or instruction pointer | |
| 155 // into the pos_ field to generate moves when needed to resolve | |
| 156 // fixed or same-as-first constraints, but this looks ugly. | |
| 157 UsePosition(Instruction* instr, | |
| 158 intptr_t pos, | |
| 159 UsePosition* next, | |
| 160 Location* location_slot) | |
| 161 : pos_(pos << kPositionShift), | |
| 162 location_slot_(location_slot), | |
| 163 next_(next) { | |
| 164 if (instr != NULL) { | |
| 165 ASSERT(location_slot_ != NULL); | |
| 166 pos_ = reinterpret_cast<intptr_t>(instr) | FlagForUse(*location_slot_); | |
| 167 } | |
| 168 ASSERT(this->pos() == pos); | |
| 169 } | |
| 170 | |
| 171 // Tell the use that it should load the value from the given location. | |
| 172 // If location slot for the use is flexible (unallocated) it will be updated | |
| 173 // with the given location. Otherwise a move will be scheduled from the given | |
| 174 // location to the location already stored in the slot. | |
| 175 void AssignLocation(Location loc); | |
| 176 | |
| 177 Location* location_slot() const { return location_slot_; } | |
| 178 void set_location_slot(Location* location_slot) { | |
| 179 location_slot_ = location_slot; | |
| 180 } | |
| 181 | |
| 182 void set_next(UsePosition* next) { next_ = next; } | |
| 183 UsePosition* next() const { return next_; } | |
| 184 | |
| 185 intptr_t pos() const { | |
| 186 if ((pos_ & kInstructionTagMask) != 0) { | |
| 187 return instr()->lifetime_position(); | |
| 188 } | |
| 189 return pos_ >> kPositionShift; | |
| 190 } | |
| 191 | |
| 192 Instruction* instr() const { | |
| 193 ASSERT((pos_ & kInstructionTagMask) != 0); | |
| 194 return reinterpret_cast<Instruction*>(pos_ & ~kInstructionTagMask); | |
| 195 } | |
| 196 | |
| 197 bool HasHint() const { | |
| 198 return (pos_ & kInstructionTagMask) == kFixedUse; | |
| 199 } | |
| 200 | |
| 201 Location hint() const { | |
| 202 ASSERT(HasHint()); | |
| 203 ASSERT(location_slot()->IsRegister()); | |
| 204 return *location_slot_; | |
| 205 } | |
| 206 | |
| 207 private: | |
| 208 intptr_t pos_; | |
| 209 Location* location_slot_; | |
| 210 UsePosition* next_; | |
| 211 }; | |
| 212 | |
| 213 | |
| 214 // UseInterval represents a holeless half open interval of liveness for a given | |
| 215 // SSA value: [start, end) in terms of lifetime positions that | |
| 216 // NumberInstructions assigns to instructions. Register allocator has to keep | |
| 217 // a value live in the register or in a spill slot from start position and until | |
| 218 // the end position. The interval can cover zero or more uses. | |
| 219 // During the register allocation UseIntervals from different live ranges | |
| 220 // allocated to the same register will be chained together through | |
| 221 // next_allocated_ field. | |
| 222 // Note: currently all uses of the same SSA value are linked together into a | |
| 223 // single list (and not split between UseIntervals). | |
| 224 class UseInterval : public ZoneAllocated { | |
| 225 public: | |
| 226 UseInterval(intptr_t vreg, intptr_t start, intptr_t end, UseInterval* next) | |
| 227 : vreg_(vreg), | |
| 228 start_(start), | |
| 229 end_(end), | |
| 230 uses_((next == NULL) ? NULL : next->uses_), | |
| 231 next_(next), | |
| 232 next_allocated_(next) { } | |
| 233 | |
| 234 | |
| 235 void AddUse(Instruction* instr, intptr_t pos, Location* loc); | |
| 236 void Print(); | |
| 237 | |
| 238 intptr_t vreg() const { return vreg_; } | |
| 239 intptr_t start() const { return start_; } | |
| 240 intptr_t end() const { return end_; } | |
| 241 UsePosition* first_use() const { return uses_; } | |
| 242 UseInterval* next() const { return next_; } | |
| 243 | |
| 244 bool Contains(intptr_t pos) const { | |
| 245 return (start() <= pos) && (pos < end()); | |
| 246 } | |
| 247 | |
| 248 // Return the smallest position that is covered by both UseIntervals or | |
| 249 // kIllegalPosition if intervals do not intersect. | |
| 250 intptr_t Intersect(UseInterval* other); | |
| 251 | |
| 252 UseInterval* Split(intptr_t pos); | |
| 253 | |
| 254 void set_next_allocated(UseInterval* next_allocated) { | |
| 255 next_allocated_ = next_allocated; | |
| 256 } | |
| 257 UseInterval* next_allocated() const { return next_allocated_; } | |
| 258 | |
| 259 private: | |
| 260 friend class LiveRange; | |
| 261 const intptr_t vreg_; | |
| 262 | |
| 263 intptr_t start_; | |
| 264 intptr_t end_; | |
| 265 | |
| 266 UsePosition* uses_; | |
| 267 | |
| 268 UseInterval* next_; | |
| 269 UseInterval* next_allocated_; | |
| 270 }; | |
| 271 | |
| 272 | |
| 273 // LiveRange represents a sequence of UseIntervals for a given SSA value. | |
| 274 // TODO(vegorov): this class is actually redundant currently. | |
| 275 class LiveRange : public ZoneAllocated { | |
| 276 public: | |
| 277 explicit LiveRange(intptr_t vreg) : vreg_(vreg), head_(NULL) { } | |
| 278 | |
| 279 void DefineAt(Instruction* instr, intptr_t pos, Location* loc); | |
| 280 | |
| 281 void UseAt(Instruction* instr, | |
| 282 intptr_t def_pos, | |
| 283 intptr_t use_pos, | |
| 284 bool use_at_end, | |
| 285 Location* loc); | |
| 286 | |
| 287 void AddUseInterval(intptr_t start, intptr_t end); | |
| 288 | |
| 289 void Print(); | |
| 290 | |
| 291 UseInterval* head() const { return head_; } | |
| 292 | |
| 293 private: | |
| 294 const intptr_t vreg_; | |
| 295 UseInterval* head_; | |
| 296 }; | |
| 297 | |
| 298 | |
| 68 } // namespace dart | 299 } // namespace dart |
| 69 | 300 |
| 70 #endif // VM_FLOW_GRAPH_ALLOCATOR_H_ | 301 #endif // VM_FLOW_GRAPH_ALLOCATOR_H_ |
| OLD | NEW |