Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(74)

Side by Side Diff: runtime/vm/flow_graph_allocator.h

Issue 10800037: New linear scan allocator. (Closed) Base URL: https://dart.googlecode.com/svn/branches/bleeding_edge/dart
Patch Set: refactored liveness computation Created 8 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | runtime/vm/flow_graph_allocator.cc » ('j') | runtime/vm/flow_graph_allocator.cc » ('J')
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #ifndef VM_FLOW_GRAPH_ALLOCATOR_H_ 5 #ifndef VM_FLOW_GRAPH_ALLOCATOR_H_
6 #define VM_FLOW_GRAPH_ALLOCATOR_H_ 6 #define VM_FLOW_GRAPH_ALLOCATOR_H_
7 7
8 #include "vm/growable_array.h" 8 #include "vm/growable_array.h"
9 #include "vm/intermediate_language.h" 9 #include "vm/intermediate_language.h"
10 10
11 namespace dart { 11 namespace dart {
12 12
13 class AllocationFinger;
13 class FlowGraphBuilder; 14 class FlowGraphBuilder;
14 class LiveRange; 15 class LiveRange;
15 class UseInterval; 16 class UseInterval;
17 class UsePosition;
16 18
17 class FlowGraphAllocator : public ValueObject { 19 class FlowGraphAllocator : public ValueObject {
18 public: 20 public:
19 FlowGraphAllocator(const GrowableArray<BlockEntryInstr*>& block_order, 21 FlowGraphAllocator(const GrowableArray<BlockEntryInstr*>& block_order,
20 FlowGraphBuilder* builder); 22 FlowGraphBuilder* builder);
21 23
22 void AllocateRegisters(); 24 void AllocateRegisters();
23 25
24 // Build live-in and live-out sets for each block. 26 // Build live-in and live-out sets for each block.
25 void AnalyzeLiveness(); 27 void AnalyzeLiveness();
(...skipping 15 matching lines...) Expand all
41 43
42 // Perform fix-point iteration updating live-out and live-in sets 44 // Perform fix-point iteration updating live-out and live-in sets
43 // for blocks until they stop changing. 45 // for blocks until they stop changing.
44 void ComputeLiveInAndLiveOutSets(); 46 void ComputeLiveInAndLiveOutSets();
45 47
46 // Print results of liveness analysis. 48 // Print results of liveness analysis.
47 void DumpLiveness(); 49 void DumpLiveness();
48 50
49 // Visit blocks in the code generation order (reverse post order) and 51 // Visit blocks in the code generation order (reverse post order) and
50 // linearly assign consequent lifetime positions to every instruction. 52 // linearly assign consequent lifetime positions to every instruction.
51 // Each instruction gets two positions: 53 // We assign position as follows:
52 // 54 //
53 // 2 * n - even one corresponding to instruction's start 55 // 2 * n - even position corresponding to an implicit parallel move
56 // preceding the instruction;
54 // 57 //
55 // 2 * n + 1 - odd one corresponding to instruction's end 58 // 2 * n + 1 - odd position corresponding to instruction itself;
56 // 59 //
57 // Having two positions allows us to capture non-trivial register 60 // Having positions corresponding to parallel moves between every two
58 // constraints in use intervals: for example we can declare that 61 // instructions allows us to capture non-trivial shapes of use intervals.
59 // an input value is only used at the start of the instruction and 62 // For specific examples see comments inside ProcessOneInstruction.
60 // this might allow register allocator to allocate both this input
61 // and output (or temp) to the same register if this is the last
62 // use of the value.
63 // Additionally creates parallel moves at the joins' predecessors 63 // Additionally creates parallel moves at the joins' predecessors
64 // that will be used for phi resolution. 64 // that will be used for phi resolution.
65 void NumberInstructions(); 65 void NumberInstructions();
66 Instruction* InstructionAt(intptr_t pos);
srdjan 2012/07/22 15:09:24 const
Vyacheslav Egorov (Google) 2012/07/24 12:26:42 Done.
67 bool IsBlockEntry(intptr_t pos);
srdjan 2012/07/22 15:09:24 const
Vyacheslav Egorov (Google) 2012/07/24 12:26:42 Done.
66 68
67 LiveRange* GetLiveRange(intptr_t vreg); 69 LiveRange* GetLiveRange(intptr_t vreg);
70
68 void BuildLiveRanges(); 71 void BuildLiveRanges();
72 Instruction* ConnectOutgoingPhiMoves(BlockEntryInstr* block);
73 void ProcessOneInstruction(BlockEntryInstr* block, Instruction* instr);
74 void ConnectIncommingPhiMoves(BlockEntryInstr* block);
75
76 MoveOperands* AddMoveAt(intptr_t pos, Location to, Location from);
77
69 void PrintLiveRanges(); 78 void PrintLiveRanges();
70 79
71 // Register use of the given virtual register at lifetime position use_pos.
72 // If definition position is unknown then start of the block contaning
73 // use_pos will be passed.
74 void UseValue(Instruction* instr,
75 intptr_t def_pos, // Lifetime position for the definition.
76 intptr_t use_pos, // Lifetime position for the use.
77 intptr_t vreg,
78 Location* loc,
79 bool use_at_end);
80
81 // Register definition of the given virtual register at lifetime position 80 // Register definition of the given virtual register at lifetime position
82 // def_pos. Existing use interval will be shortened to start at def_pos. 81 // def_pos. Existing use interval will be shortened to start at def_pos.
83 void Define(Instruction* instr, 82 void Define(intptr_t def_pos,
84 intptr_t def_pos,
85 intptr_t vreg, 83 intptr_t vreg,
86 Location* loc); 84 Location* loc);
87 85
88 void AddToUnallocated(UseInterval* chain); 86 void AddToUnallocated(LiveRange* range);
89 void BlockLocation(Location loc, intptr_t pos); 87 void BlockLocation(Location loc, intptr_t from, intptr_t to);
90 88
91 bool AllocateFreeRegister(UseInterval* unallocated); 89 bool AllocateFreeRegister(LiveRange* unallocated);
92 void AssignFreeRegister(UseInterval* unallocated, Register reg);
93 90
94 void FinalizeInterval(UseInterval* interval, Location loc); 91 void AllocateAnyRegister(LiveRange* unallocated);
92 bool UpdateFreeUntil(Register reg,
93 LiveRange* unallocated,
94 intptr_t* cur_free_until,
95 intptr_t* cur_blocked_at);
96
97 void AssignBlockedRegister(LiveRange* unallocated, Register reg);
98
99 intptr_t FirstIntersectionWithAllocated(Register reg,
100 LiveRange* unallocated);
101 bool EvictIntersection(LiveRange* allocated, LiveRange* unallocated);
102
103 void RemoveEvicted(Register reg, intptr_t first_evicted);
104
95 void AdvanceActiveIntervals(const intptr_t start); 105 void AdvanceActiveIntervals(const intptr_t start);
96 106
107 LiveRange* SplitBetween(LiveRange* range, intptr_t from, intptr_t to);
108
109 intptr_t AllocateSpillSlotFor(LiveRange* range);
110 void Spill(LiveRange* range);
111 void SpillAfter(LiveRange* range, intptr_t from);
112 void SpillBetween(LiveRange* range, intptr_t from, intptr_t to);
113
97 bool UnallocatedIsSorted(); 114 bool UnallocatedIsSorted();
98 void AllocateCPURegisters(); 115 void AllocateCPURegisters();
99 116
117 // Tell the use that it should load the value from the given location.
118 // If location slot for the use is flexible (unallocated) it will be updated
119 // with the given location. Otherwise a move will be scheduled from the given
120 // location to the location already stored in the slot.
121 void ConvertUseTo(UsePosition* use, Location loc);
122 void ConvertAllUses(LiveRange* range);
123
124
125 // Connect split siblings over non-linear control flow edges.
126 void ResolveControlFlow();
127 void ConnectSplitSiblings(LiveRange* range,
128 BlockEntryInstr* source_block,
129 BlockEntryInstr* target_block);
130
131
100 // TODO(vegorov): this field is used only to call Bailout. Remove when 132 // TODO(vegorov): this field is used only to call Bailout. Remove when
101 // all bailouts are gone. 133 // all bailouts are gone.
102 FlowGraphBuilder* builder_; 134 FlowGraphBuilder* builder_;
103 135
104 const GrowableArray<BlockEntryInstr*>& block_order_; 136 const GrowableArray<BlockEntryInstr*>& block_order_;
105 const GrowableArray<BlockEntryInstr*>& postorder_; 137 const GrowableArray<BlockEntryInstr*>& postorder_;
106 138
139 GrowableArray<Instruction*> instructions_;
140
107 // Live-out sets for each block. They contain indices of SSA values 141 // Live-out sets for each block. They contain indices of SSA values
108 // that are live out from this block: that is values that were either 142 // that are live out from this block: that is values that were either
109 // defined in this block or live into it and that are used in some 143 // defined in this block or live into it and that are used in some
110 // successor block. 144 // successor block.
111 GrowableArray<BitVector*> live_out_; 145 GrowableArray<BitVector*> live_out_;
112 146
113 // Kill sets for each block. They contain indices of SSA values that 147 // Kill sets for each block. They contain indices of SSA values that
114 // are defined by this block. 148 // are defined by this block.
115 GrowableArray<BitVector*> kill_; 149 GrowableArray<BitVector*> kill_;
116 150
117 // Live-in sets for each block. They contain indices of SSA values 151 // Live-in sets for each block. They contain indices of SSA values
118 // that are used by this block or its successors. 152 // that are used by this block or its successors.
119 GrowableArray<BitVector*> live_in_; 153 GrowableArray<BitVector*> live_in_;
120 154
121 // Number of virtual registers. Currently equal to the number of 155 // Number of virtual registers. Currently equal to the number of
122 // SSA values. 156 // SSA values.
123 const intptr_t vreg_count_; 157 const intptr_t vreg_count_;
124 158
125 // LiveRanges corresponding to SSA values. 159 // LiveRanges corresponding to SSA values.
126 GrowableArray<LiveRange*> live_ranges_; 160 GrowableArray<LiveRange*> live_ranges_;
127 161
128 // Worklist for register allocator. Always maintained sorted according 162 // Worklist for register allocator. Always maintained sorted according
129 // to ShouldBeAllocatedBefore predicate. 163 // to ShouldBeAllocatedBefore predicate.
130 GrowableArray<UseInterval*> unallocated_; 164 GrowableArray<LiveRange*> unallocated_;
131 165
132 // Per register lists of allocated UseIntervals, linked through 166 // Per register lists of allocated UseIntervals. Contains only those
133 // next_allocated field. Contains only those intervals that 167 // intervals that can be affected by future allocation decisions.
134 // can be affected by future allocation decisions. Those intervals 168 // Those intervals that end before the start of the current UseInterval are
135 // that end before the start of the current UseInterval are removed 169 // removed from this list and will not be affected.
136 // from this list and will not be affected. 170 GrowableArray<LiveRange*> cpu_regs_[kNumberOfCpuRegisters];
137 UseInterval* cpu_regs_[kNumberOfCpuRegisters]; 171
172 GrowableArray<intptr_t> spill_slots_;
173
174 bool blocked_cpu_regs_[kNumberOfCpuRegisters];
138 175
139 DISALLOW_COPY_AND_ASSIGN(FlowGraphAllocator); 176 DISALLOW_COPY_AND_ASSIGN(FlowGraphAllocator);
140 }; 177 };
141 178
142 179
143 // UsePosition represents a single use of an SSA value by some instruction. 180 // UsePosition represents a single use of an SSA value by some instruction.
144 // It points to a location slot which either tells register allocator 181 // It points to a location slot which either tells register allocator
145 // where instruction expects the value (if slot contains a fixed location) or 182 // where instruction expects the value (if slot contains a fixed location) or
146 // asks register allocator to allocate storage (register or spill slot) for 183 // asks register allocator to allocate storage (register or spill slot) for
147 // this use with certain properties (if slot contain an unallocated location). 184 // this use with certain properties (if slot contain an unallocated location).
148 class UsePosition : public ZoneAllocated { 185 class UsePosition : public ZoneAllocated {
149 public: 186 public:
150 enum UseFlag { 187 UsePosition(intptr_t pos,
151 kNoFlag = 0,
152 kFixedUse = 1,
153 kSameAsFirstUse = 2,
154 kOther = 3
155 };
156
157 static const intptr_t kUseFlagMask = 0x3;
158 static const intptr_t kPositionShift = 2;
159
160 static UseFlag FlagForUse(const Location& loc) {
161 if (loc.IsRegister()) return kFixedUse;
162 if (loc.IsUnallocated() && (loc.policy() == Location::kSameAsFirstInput)) {
163 return kSameAsFirstUse;
164 }
165 return kOther;
166 }
167
168 // TODO(vegorov): we encode either position or instruction pointer
169 // into the pos_ field to generate moves when needed to resolve
170 // fixed or same-as-first constraints, but this looks ugly.
171 UsePosition(Instruction* instr,
172 intptr_t pos,
173 UsePosition* next, 188 UsePosition* next,
174 Location* location_slot) 189 Location* location_slot)
175 : pos_(pos << kPositionShift), 190 : pos_(pos),
176 location_slot_(location_slot), 191 location_slot_(location_slot),
177 next_(next) { 192 next_(next) {
178 // Non-NULL instr is considered unlikely so we preinitialize pos_ field
179 // with an encoded position even if instr is not NULL.
180 if (instr != NULL) {
181 ASSERT(location_slot_ != NULL);
182 pos_ = reinterpret_cast<intptr_t>(instr) | FlagForUse(*location_slot_);
183 }
184 ASSERT(this->pos() == pos);
185 } 193 }
186 194
187 // Tell the use that it should load the value from the given location.
188 // If location slot for the use is flexible (unallocated) it will be updated
189 // with the given location. Otherwise a move will be scheduled from the given
190 // location to the location already stored in the slot.
191 void AssignLocation(Location loc);
192
193 Location* location_slot() const { return location_slot_; } 195 Location* location_slot() const { return location_slot_; }
194 void set_location_slot(Location* location_slot) { 196 void set_location_slot(Location* location_slot) {
195 location_slot_ = location_slot; 197 location_slot_ = location_slot;
196 } 198 }
197 199
198 void set_next(UsePosition* next) { next_ = next; } 200 void set_next(UsePosition* next) { next_ = next; }
199 UsePosition* next() const { return next_; } 201 UsePosition* next() const { return next_; }
200 202
201 intptr_t pos() const { 203 intptr_t pos() const { return pos_; }
202 if ((pos_ & kUseFlagMask) != kNoFlag) {
203 return instr()->lifetime_position();
204 }
205 return pos_ >> kPositionShift;
206 }
207
208 Instruction* instr() const {
209 ASSERT((pos_ & kUseFlagMask) != kNoFlag);
210 return reinterpret_cast<Instruction*>(pos_ & ~kUseFlagMask);
211 }
212 204
213 bool HasHint() const { 205 bool HasHint() const {
214 return (pos_ & kUseFlagMask) == kFixedUse; 206 return (location_slot() != NULL) && (location_slot()->IsRegister());
215 } 207 }
216 208
217 Location hint() const { 209 Location hint() const {
218 ASSERT(HasHint()); 210 ASSERT(HasHint());
219 ASSERT(location_slot()->IsRegister()); 211 return *location_slot();
220 return *location_slot_;
221 } 212 }
222 213
223 private: 214 private:
srdjan 2012/07/22 15:09:24 Add DISALLOW_COPY_AND_ASSIGN
Vyacheslav Egorov (Google) 2012/07/24 12:26:42 Done.
224 intptr_t pos_; 215 intptr_t pos_;
srdjan 2012/07/22 15:09:24 const ?
Vyacheslav Egorov (Google) 2012/07/24 12:26:42 Done.
225 Location* location_slot_; 216 Location* location_slot_;
226 UsePosition* next_; 217 UsePosition* next_;
227 }; 218 };
228 219
229 220
230 // UseInterval represents a holeless half open interval of liveness for a given 221 // UseInterval represents a holeless half open interval of liveness for a given
231 // SSA value: [start, end) in terms of lifetime positions that 222 // SSA value: [start, end) in terms of lifetime positions that
232 // NumberInstructions assigns to instructions. Register allocator has to keep 223 // NumberInstructions assigns to instructions. Register allocator has to keep
233 // a value live in the register or in a spill slot from start position and until 224 // a value live in the register or in a spill slot from start position and until
234 // the end position. The interval can cover zero or more uses. 225 // the end position. The interval can cover zero or more uses.
235 // During the register allocation UseIntervals from different live ranges
236 // allocated to the same register will be chained together through
237 // next_allocated_ field.
238 // Note: currently all uses of the same SSA value are linked together into a 226 // Note: currently all uses of the same SSA value are linked together into a
239 // single list (and not split between UseIntervals). 227 // single list (and not split between UseIntervals).
240 class UseInterval : public ZoneAllocated { 228 class UseInterval : public ZoneAllocated {
241 public: 229 public:
242 UseInterval(intptr_t vreg, intptr_t start, intptr_t end, UseInterval* next) 230 UseInterval(intptr_t start, intptr_t end, UseInterval* next)
243 : vreg_(vreg), 231 : start_(start),
244 start_(start),
245 end_(end), 232 end_(end),
246 uses_((next == NULL) ? NULL : next->uses_), 233 next_(next) { }
247 next_(next),
248 next_allocated_(next) { }
249 234
250
251 void AddUse(Instruction* instr, intptr_t pos, Location* loc);
252 void Print(); 235 void Print();
253 236
254 intptr_t vreg() const { return vreg_; }
255 intptr_t start() const { return start_; } 237 intptr_t start() const { return start_; }
256 intptr_t end() const { return end_; } 238 intptr_t end() const { return end_; }
257 UsePosition* first_use() const { return uses_; }
258 UseInterval* next() const { return next_; } 239 UseInterval* next() const { return next_; }
259 240
260 bool Contains(intptr_t pos) const { 241 bool Contains(intptr_t pos) const {
261 return (start() <= pos) && (pos < end()); 242 return (start() <= pos) && (pos < end());
262 } 243 }
263 244
264 // Return the smallest position that is covered by both UseIntervals or 245 // Return the smallest position that is covered by both UseIntervals or
265 // kIllegalPosition if intervals do not intersect. 246 // kIllegalPosition if intervals do not intersect.
266 intptr_t Intersect(UseInterval* other); 247 intptr_t Intersect(UseInterval* other);
267 248
268 UseInterval* Split(intptr_t pos); 249 // UseInterval* Split(intptr_t pos);
srdjan 2012/07/22 15:09:24 Delete
Vyacheslav Egorov (Google) 2012/07/24 12:26:42 Done.
269
270 void set_next_allocated(UseInterval* next_allocated) {
271 next_allocated_ = next_allocated;
272 }
273 UseInterval* next_allocated() const { return next_allocated_; }
274 250
275 private: 251 private:
276 friend class LiveRange; 252 friend class LiveRange;
277 const intptr_t vreg_;
278 253
279 intptr_t start_; 254 intptr_t start_;
280 intptr_t end_; 255 intptr_t end_;
256 UseInterval* next_;
257 };
srdjan 2012/07/22 15:09:24 Add DISALLOW_COPY_AND_ASSIGN
Vyacheslav Egorov (Google) 2012/07/24 12:26:42 Done.
281 258
282 UsePosition* uses_;
283 259
284 UseInterval* next_; 260 class AllocationFinger : public ValueObject {
285 UseInterval* next_allocated_; 261 public:
262 AllocationFinger()
263 : first_pending_use_interval_(NULL),
264 first_register_use_(NULL),
265 first_register_beneficial_use_(NULL),
266 first_hinted_use_(NULL) {
267 }
268
269 void Initialize(LiveRange* range);
270 bool Advance(intptr_t start);
271
272 UseInterval* first_pending_use_interval() const {
273 return first_pending_use_interval_;
274 }
275
276 Location FirstHint();
277 UsePosition* FirstRegisterUse(intptr_t after_pos);
278 UsePosition* FirstRegisterBeneficialUse(intptr_t after_pos);
279
280 private:
281 UseInterval* first_pending_use_interval_;
282 UsePosition* first_register_use_;
283 UsePosition* first_register_beneficial_use_;
284 UsePosition* first_hinted_use_;
srdjan 2012/07/22 15:09:24 Add DISALLOW_COPY_AND_ASSIGN
Vyacheslav Egorov (Google) 2012/07/24 12:26:42 Done.
286 }; 285 };
287 286
288 287
289 // LiveRange represents a sequence of UseIntervals for a given SSA value. 288 // LiveRange represents a sequence of UseIntervals for a given SSA value.
290 // TODO(vegorov): this class is actually redundant currently.
291 class LiveRange : public ZoneAllocated { 289 class LiveRange : public ZoneAllocated {
292 public: 290 public:
293 explicit LiveRange(intptr_t vreg) : vreg_(vreg), head_(NULL) { } 291 explicit LiveRange(intptr_t vreg)
292 : vreg_(vreg),
293 uses_(NULL),
294 first_use_interval_(NULL),
295 last_use_interval_(NULL),
296 next_sibling_(NULL) {
297 }
294 298
295 void DefineAt(Instruction* instr, intptr_t pos, Location* loc); 299 static LiveRange* MakeTemp(intptr_t pos, Location* location_slot);
296 300
297 void UseAt(Instruction* instr, 301 intptr_t vreg() const { return vreg_; }
298 intptr_t def_pos, 302 LiveRange* next_sibling() const { return next_sibling_; }
299 intptr_t use_pos, 303 UsePosition* first_use() const { return uses_; }
300 bool use_at_end, 304 UseInterval* first_use_interval() const { return first_use_interval_; }
301 Location* loc); 305 UseInterval* last_use_interval() const { return last_use_interval_; }
306 Location assigned_location() const { return assigned_location_; }
307 intptr_t Start() const { return first_use_interval()->start(); }
308 intptr_t End() const { return last_use_interval()->end(); }
302 309
310 AllocationFinger* finger() { return &finger_; }
311
312 void set_assigned_location(Location location) {
313 assigned_location_ = location;
314 }
315
316
317 void DefineAt(intptr_t pos);
318
319 void AddUse(intptr_t pos, Location* location_slot);
303 void AddUseInterval(intptr_t start, intptr_t end); 320 void AddUseInterval(intptr_t start, intptr_t end);
304 321
305 void Print(); 322 void Print();
306 323
307 UseInterval* head() const { return head_; } 324 void AssignLocation(UseInterval* use, Location loc);
325
326 LiveRange* SplitAt(intptr_t pos);
327
328 bool CanCover(intptr_t pos) const {
329 return (Start() <= pos) && (pos < End());
330 }
308 331
309 private: 332 private:
333 LiveRange(intptr_t vreg,
334 UsePosition* uses,
335 UseInterval* first_use_interval,
336 UseInterval* last_use_interval,
337 LiveRange* next_sibling)
338 : vreg_(vreg),
339 uses_(uses),
340 first_use_interval_(first_use_interval),
341 last_use_interval_(last_use_interval),
342 next_sibling_(next_sibling) {
343 }
344
310 const intptr_t vreg_; 345 const intptr_t vreg_;
311 UseInterval* head_; 346 Location assigned_location_;
347
348 UsePosition* uses_;
349 UseInterval* first_use_interval_;
350 UseInterval* last_use_interval_;
351
352 LiveRange* next_sibling_;
353
354 AllocationFinger finger_;
srdjan 2012/07/22 15:09:24 Add DISALLOW_COPY_AND_ASSIGN
Vyacheslav Egorov (Google) 2012/07/24 12:26:42 Done.
312 }; 355 };
313 356
314 357
315 } // namespace dart 358 } // namespace dart
316 359
317 #endif // VM_FLOW_GRAPH_ALLOCATOR_H_ 360 #endif // VM_FLOW_GRAPH_ALLOCATOR_H_
OLDNEW
« no previous file with comments | « no previous file | runtime/vm/flow_graph_allocator.cc » ('j') | runtime/vm/flow_graph_allocator.cc » ('J')

Powered by Google App Engine
This is Rietveld 408576698