OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 17 matching lines...) Expand all Loading... |
28 #ifndef V8_LITHIUM_H_ | 28 #ifndef V8_LITHIUM_H_ |
29 #define V8_LITHIUM_H_ | 29 #define V8_LITHIUM_H_ |
30 | 30 |
31 #include "allocation.h" | 31 #include "allocation.h" |
32 #include "hydrogen.h" | 32 #include "hydrogen.h" |
33 #include "safepoint-table.h" | 33 #include "safepoint-table.h" |
34 | 34 |
35 namespace v8 { | 35 namespace v8 { |
36 namespace internal { | 36 namespace internal { |
37 | 37 |
| 38 #define LITHIUM_OPERAND_LIST(V) \ |
| 39 V(ConstantOperand, CONSTANT_OPERAND) \ |
| 40 V(StackSlot, STACK_SLOT) \ |
| 41 V(DoubleStackSlot, DOUBLE_STACK_SLOT) \ |
| 42 V(Register, REGISTER) \ |
| 43 V(DoubleRegister, DOUBLE_REGISTER) |
| 44 |
| 45 |
38 class LOperand: public ZoneObject { | 46 class LOperand: public ZoneObject { |
39 public: | 47 public: |
40 enum Kind { | 48 enum Kind { |
41 INVALID, | 49 INVALID, |
42 UNALLOCATED, | 50 UNALLOCATED, |
43 CONSTANT_OPERAND, | 51 CONSTANT_OPERAND, |
44 STACK_SLOT, | 52 STACK_SLOT, |
45 DOUBLE_STACK_SLOT, | 53 DOUBLE_STACK_SLOT, |
46 REGISTER, | 54 REGISTER, |
47 DOUBLE_REGISTER, | 55 DOUBLE_REGISTER, |
48 ARGUMENT | 56 ARGUMENT |
49 }; | 57 }; |
50 | 58 |
51 LOperand() : value_(KindField::encode(INVALID)) { } | 59 LOperand() : value_(KindField::encode(INVALID)) { } |
52 | 60 |
53 Kind kind() const { return KindField::decode(value_); } | 61 Kind kind() const { return KindField::decode(value_); } |
54 int index() const { return static_cast<int>(value_) >> kKindFieldWidth; } | 62 int index() const { return static_cast<int>(value_) >> kKindFieldWidth; } |
55 bool IsConstantOperand() const { return kind() == CONSTANT_OPERAND; } | 63 #define LITHIUM_OPERAND_PREDICATE(name, type) \ |
56 bool IsStackSlot() const { return kind() == STACK_SLOT; } | 64 bool Is##name() const { return kind() == type; } |
57 bool IsDoubleStackSlot() const { return kind() == DOUBLE_STACK_SLOT; } | 65 LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_PREDICATE) |
58 bool IsRegister() const { return kind() == REGISTER; } | 66 LITHIUM_OPERAND_PREDICATE(Argument, ARGUMENT) |
59 bool IsDoubleRegister() const { return kind() == DOUBLE_REGISTER; } | 67 LITHIUM_OPERAND_PREDICATE(Unallocated, UNALLOCATED) |
60 bool IsArgument() const { return kind() == ARGUMENT; } | 68 LITHIUM_OPERAND_PREDICATE(Ignored, INVALID) |
61 bool IsUnallocated() const { return kind() == UNALLOCATED; } | 69 #undef LITHIUM_OPERAND_PREDICATE |
62 bool IsIgnored() const { return kind() == INVALID; } | |
63 bool Equals(LOperand* other) const { return value_ == other->value_; } | 70 bool Equals(LOperand* other) const { return value_ == other->value_; } |
64 | 71 |
65 void PrintTo(StringStream* stream); | 72 void PrintTo(StringStream* stream); |
66 void ConvertTo(Kind kind, int index) { | 73 void ConvertTo(Kind kind, int index) { |
67 value_ = KindField::encode(kind); | 74 value_ = KindField::encode(kind); |
68 value_ |= index << kKindFieldWidth; | 75 value_ |= index << kKindFieldWidth; |
69 ASSERT(this->index() == index); | 76 ASSERT(this->index() == index); |
70 } | 77 } |
71 | 78 |
72 // Calls SetUpCache() for each subclass. Don't forget to update this method | 79 // Calls SetUpCache()/TearDownCache() for each subclass. |
73 // if you add a new LOperand subclass. | |
74 static void SetUpCaches(); | 80 static void SetUpCaches(); |
| 81 static void TearDownCaches(); |
75 | 82 |
76 protected: | 83 protected: |
77 static const int kKindFieldWidth = 3; | 84 static const int kKindFieldWidth = 3; |
78 class KindField : public BitField<Kind, 0, kKindFieldWidth> { }; | 85 class KindField : public BitField<Kind, 0, kKindFieldWidth> { }; |
79 | 86 |
80 LOperand(Kind kind, int index) { ConvertTo(kind, index); } | 87 LOperand(Kind kind, int index) { ConvertTo(kind, index); } |
81 | 88 |
82 unsigned value_; | 89 unsigned value_; |
83 }; | 90 }; |
84 | 91 |
(...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
258 if (index < kNumCachedOperands) return &cache[index]; | 265 if (index < kNumCachedOperands) return &cache[index]; |
259 return new LConstantOperand(index); | 266 return new LConstantOperand(index); |
260 } | 267 } |
261 | 268 |
262 static LConstantOperand* cast(LOperand* op) { | 269 static LConstantOperand* cast(LOperand* op) { |
263 ASSERT(op->IsConstantOperand()); | 270 ASSERT(op->IsConstantOperand()); |
264 return reinterpret_cast<LConstantOperand*>(op); | 271 return reinterpret_cast<LConstantOperand*>(op); |
265 } | 272 } |
266 | 273 |
267 static void SetUpCache(); | 274 static void SetUpCache(); |
| 275 static void TearDownCache(); |
268 | 276 |
269 private: | 277 private: |
270 static const int kNumCachedOperands = 128; | 278 static const int kNumCachedOperands = 128; |
271 static LConstantOperand* cache; | 279 static LConstantOperand* cache; |
272 | 280 |
273 LConstantOperand() : LOperand() { } | 281 LConstantOperand() : LOperand() { } |
274 explicit LConstantOperand(int index) : LOperand(CONSTANT_OPERAND, index) { } | 282 explicit LConstantOperand(int index) : LOperand(CONSTANT_OPERAND, index) { } |
275 }; | 283 }; |
276 | 284 |
277 | 285 |
(...skipping 15 matching lines...) Expand all Loading... |
293 if (index < kNumCachedOperands) return &cache[index]; | 301 if (index < kNumCachedOperands) return &cache[index]; |
294 return new LStackSlot(index); | 302 return new LStackSlot(index); |
295 } | 303 } |
296 | 304 |
297 static LStackSlot* cast(LOperand* op) { | 305 static LStackSlot* cast(LOperand* op) { |
298 ASSERT(op->IsStackSlot()); | 306 ASSERT(op->IsStackSlot()); |
299 return reinterpret_cast<LStackSlot*>(op); | 307 return reinterpret_cast<LStackSlot*>(op); |
300 } | 308 } |
301 | 309 |
302 static void SetUpCache(); | 310 static void SetUpCache(); |
| 311 static void TearDownCache(); |
303 | 312 |
304 private: | 313 private: |
305 static const int kNumCachedOperands = 128; | 314 static const int kNumCachedOperands = 128; |
306 static LStackSlot* cache; | 315 static LStackSlot* cache; |
307 | 316 |
308 LStackSlot() : LOperand() { } | 317 LStackSlot() : LOperand() { } |
309 explicit LStackSlot(int index) : LOperand(STACK_SLOT, index) { } | 318 explicit LStackSlot(int index) : LOperand(STACK_SLOT, index) { } |
310 }; | 319 }; |
311 | 320 |
312 | 321 |
313 class LDoubleStackSlot: public LOperand { | 322 class LDoubleStackSlot: public LOperand { |
314 public: | 323 public: |
315 static LDoubleStackSlot* Create(int index) { | 324 static LDoubleStackSlot* Create(int index) { |
316 ASSERT(index >= 0); | 325 ASSERT(index >= 0); |
317 if (index < kNumCachedOperands) return &cache[index]; | 326 if (index < kNumCachedOperands) return &cache[index]; |
318 return new LDoubleStackSlot(index); | 327 return new LDoubleStackSlot(index); |
319 } | 328 } |
320 | 329 |
321 static LDoubleStackSlot* cast(LOperand* op) { | 330 static LDoubleStackSlot* cast(LOperand* op) { |
322 ASSERT(op->IsStackSlot()); | 331 ASSERT(op->IsStackSlot()); |
323 return reinterpret_cast<LDoubleStackSlot*>(op); | 332 return reinterpret_cast<LDoubleStackSlot*>(op); |
324 } | 333 } |
325 | 334 |
326 static void SetUpCache(); | 335 static void SetUpCache(); |
| 336 static void TearDownCache(); |
327 | 337 |
328 private: | 338 private: |
329 static const int kNumCachedOperands = 128; | 339 static const int kNumCachedOperands = 128; |
330 static LDoubleStackSlot* cache; | 340 static LDoubleStackSlot* cache; |
331 | 341 |
332 LDoubleStackSlot() : LOperand() { } | 342 LDoubleStackSlot() : LOperand() { } |
333 explicit LDoubleStackSlot(int index) : LOperand(DOUBLE_STACK_SLOT, index) { } | 343 explicit LDoubleStackSlot(int index) : LOperand(DOUBLE_STACK_SLOT, index) { } |
334 }; | 344 }; |
335 | 345 |
336 | 346 |
337 class LRegister: public LOperand { | 347 class LRegister: public LOperand { |
338 public: | 348 public: |
339 static LRegister* Create(int index) { | 349 static LRegister* Create(int index) { |
340 ASSERT(index >= 0); | 350 ASSERT(index >= 0); |
341 if (index < kNumCachedOperands) return &cache[index]; | 351 if (index < kNumCachedOperands) return &cache[index]; |
342 return new LRegister(index); | 352 return new LRegister(index); |
343 } | 353 } |
344 | 354 |
345 static LRegister* cast(LOperand* op) { | 355 static LRegister* cast(LOperand* op) { |
346 ASSERT(op->IsRegister()); | 356 ASSERT(op->IsRegister()); |
347 return reinterpret_cast<LRegister*>(op); | 357 return reinterpret_cast<LRegister*>(op); |
348 } | 358 } |
349 | 359 |
350 static void SetUpCache(); | 360 static void SetUpCache(); |
| 361 static void TearDownCache(); |
351 | 362 |
352 private: | 363 private: |
353 static const int kNumCachedOperands = 16; | 364 static const int kNumCachedOperands = 16; |
354 static LRegister* cache; | 365 static LRegister* cache; |
355 | 366 |
356 LRegister() : LOperand() { } | 367 LRegister() : LOperand() { } |
357 explicit LRegister(int index) : LOperand(REGISTER, index) { } | 368 explicit LRegister(int index) : LOperand(REGISTER, index) { } |
358 }; | 369 }; |
359 | 370 |
360 | 371 |
361 class LDoubleRegister: public LOperand { | 372 class LDoubleRegister: public LOperand { |
362 public: | 373 public: |
363 static LDoubleRegister* Create(int index) { | 374 static LDoubleRegister* Create(int index) { |
364 ASSERT(index >= 0); | 375 ASSERT(index >= 0); |
365 if (index < kNumCachedOperands) return &cache[index]; | 376 if (index < kNumCachedOperands) return &cache[index]; |
366 return new LDoubleRegister(index); | 377 return new LDoubleRegister(index); |
367 } | 378 } |
368 | 379 |
369 static LDoubleRegister* cast(LOperand* op) { | 380 static LDoubleRegister* cast(LOperand* op) { |
370 ASSERT(op->IsDoubleRegister()); | 381 ASSERT(op->IsDoubleRegister()); |
371 return reinterpret_cast<LDoubleRegister*>(op); | 382 return reinterpret_cast<LDoubleRegister*>(op); |
372 } | 383 } |
373 | 384 |
374 static void SetUpCache(); | 385 static void SetUpCache(); |
| 386 static void TearDownCache(); |
375 | 387 |
376 private: | 388 private: |
377 static const int kNumCachedOperands = 16; | 389 static const int kNumCachedOperands = 16; |
378 static LDoubleRegister* cache; | 390 static LDoubleRegister* cache; |
379 | 391 |
380 LDoubleRegister() : LOperand() { } | 392 LDoubleRegister() : LOperand() { } |
381 explicit LDoubleRegister(int index) : LOperand(DOUBLE_REGISTER, index) { } | 393 explicit LDoubleRegister(int index) : LOperand(DOUBLE_REGISTER, index) { } |
382 }; | 394 }; |
383 | 395 |
384 | 396 |
(...skipping 218 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
603 ShallowIterator current_iterator_; | 615 ShallowIterator current_iterator_; |
604 }; | 616 }; |
605 | 617 |
606 | 618 |
607 int ElementsKindToShiftSize(ElementsKind elements_kind); | 619 int ElementsKindToShiftSize(ElementsKind elements_kind); |
608 | 620 |
609 | 621 |
610 } } // namespace v8::internal | 622 } } // namespace v8::internal |
611 | 623 |
612 #endif // V8_LITHIUM_H_ | 624 #endif // V8_LITHIUM_H_ |
OLD | NEW |