OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 304 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
315 ASSERT(op->IsRegister()); | 315 ASSERT(op->IsRegister()); |
316 return ToRegister(op->index()); | 316 return ToRegister(op->index()); |
317 } | 317 } |
318 | 318 |
319 | 319 |
320 Register LCodeGen::EmitLoadRegister(LOperand* op, Register scratch) { | 320 Register LCodeGen::EmitLoadRegister(LOperand* op, Register scratch) { |
321 if (op->IsRegister()) { | 321 if (op->IsRegister()) { |
322 return ToRegister(op->index()); | 322 return ToRegister(op->index()); |
323 } else if (op->IsConstantOperand()) { | 323 } else if (op->IsConstantOperand()) { |
324 LConstantOperand* const_op = LConstantOperand::cast(op); | 324 LConstantOperand* const_op = LConstantOperand::cast(op); |
325 Handle<Object> literal = chunk_->LookupLiteral(const_op); | 325 HConstant* constant = chunk_->LookupConstant(const_op); |
| 326 Handle<Object> literal = constant->handle(); |
326 Representation r = chunk_->LookupLiteralRepresentation(const_op); | 327 Representation r = chunk_->LookupLiteralRepresentation(const_op); |
327 if (r.IsInteger32()) { | 328 if (r.IsInteger32()) { |
328 ASSERT(literal->IsNumber()); | 329 ASSERT(literal->IsNumber()); |
329 __ mov(scratch, Operand(static_cast<int32_t>(literal->Number()))); | 330 __ mov(scratch, Operand(static_cast<int32_t>(literal->Number()))); |
330 } else if (r.IsDouble()) { | 331 } else if (r.IsDouble()) { |
331 Abort("EmitLoadRegister: Unsupported double immediate."); | 332 Abort("EmitLoadRegister: Unsupported double immediate."); |
332 } else { | 333 } else { |
333 ASSERT(r.IsTagged()); | 334 ASSERT(r.IsTagged()); |
334 if (literal->IsSmi()) { | 335 if (literal->IsSmi()) { |
335 __ mov(scratch, Operand(literal)); | 336 __ mov(scratch, Operand(literal)); |
(...skipping 17 matching lines...) Expand all Loading... |
353 } | 354 } |
354 | 355 |
355 | 356 |
356 DoubleRegister LCodeGen::EmitLoadDoubleRegister(LOperand* op, | 357 DoubleRegister LCodeGen::EmitLoadDoubleRegister(LOperand* op, |
357 SwVfpRegister flt_scratch, | 358 SwVfpRegister flt_scratch, |
358 DoubleRegister dbl_scratch) { | 359 DoubleRegister dbl_scratch) { |
359 if (op->IsDoubleRegister()) { | 360 if (op->IsDoubleRegister()) { |
360 return ToDoubleRegister(op->index()); | 361 return ToDoubleRegister(op->index()); |
361 } else if (op->IsConstantOperand()) { | 362 } else if (op->IsConstantOperand()) { |
362 LConstantOperand* const_op = LConstantOperand::cast(op); | 363 LConstantOperand* const_op = LConstantOperand::cast(op); |
363 Handle<Object> literal = chunk_->LookupLiteral(const_op); | 364 HConstant* constant = chunk_->LookupConstant(const_op); |
| 365 Handle<Object> literal = constant->handle(); |
364 Representation r = chunk_->LookupLiteralRepresentation(const_op); | 366 Representation r = chunk_->LookupLiteralRepresentation(const_op); |
365 if (r.IsInteger32()) { | 367 if (r.IsInteger32()) { |
366 ASSERT(literal->IsNumber()); | 368 ASSERT(literal->IsNumber()); |
367 __ mov(ip, Operand(static_cast<int32_t>(literal->Number()))); | 369 __ mov(ip, Operand(static_cast<int32_t>(literal->Number()))); |
368 __ vmov(flt_scratch, ip); | 370 __ vmov(flt_scratch, ip); |
369 __ vcvt_f64_s32(dbl_scratch, flt_scratch); | 371 __ vcvt_f64_s32(dbl_scratch, flt_scratch); |
370 return dbl_scratch; | 372 return dbl_scratch; |
371 } else if (r.IsDouble()) { | 373 } else if (r.IsDouble()) { |
372 Abort("unsupported double immediate"); | 374 Abort("unsupported double immediate"); |
373 } else if (r.IsTagged()) { | 375 } else if (r.IsTagged()) { |
374 Abort("unsupported tagged immediate"); | 376 Abort("unsupported tagged immediate"); |
375 } | 377 } |
376 } else if (op->IsStackSlot() || op->IsArgument()) { | 378 } else if (op->IsStackSlot() || op->IsArgument()) { |
377 // TODO(regis): Why is vldr not taking a MemOperand? | 379 // TODO(regis): Why is vldr not taking a MemOperand? |
378 // __ vldr(dbl_scratch, ToMemOperand(op)); | 380 // __ vldr(dbl_scratch, ToMemOperand(op)); |
379 MemOperand mem_op = ToMemOperand(op); | 381 MemOperand mem_op = ToMemOperand(op); |
380 __ vldr(dbl_scratch, mem_op.rn(), mem_op.offset()); | 382 __ vldr(dbl_scratch, mem_op.rn(), mem_op.offset()); |
381 return dbl_scratch; | 383 return dbl_scratch; |
382 } | 384 } |
383 UNREACHABLE(); | 385 UNREACHABLE(); |
384 return dbl_scratch; | 386 return dbl_scratch; |
385 } | 387 } |
386 | 388 |
387 | 389 |
388 Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const { | 390 Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const { |
389 Handle<Object> literal = chunk_->LookupLiteral(op); | 391 HConstant* constant = chunk_->LookupConstant(op); |
390 ASSERT(chunk_->LookupLiteralRepresentation(op).IsTagged()); | 392 ASSERT(chunk_->LookupLiteralRepresentation(op).IsTagged()); |
391 return literal; | 393 return constant->handle(); |
392 } | 394 } |
393 | 395 |
394 | 396 |
395 bool LCodeGen::IsInteger32(LConstantOperand* op) const { | 397 bool LCodeGen::IsInteger32(LConstantOperand* op) const { |
396 return chunk_->LookupLiteralRepresentation(op).IsInteger32(); | 398 return chunk_->LookupLiteralRepresentation(op).IsInteger32(); |
397 } | 399 } |
398 | 400 |
399 | 401 |
400 int LCodeGen::ToInteger32(LConstantOperand* op) const { | 402 int LCodeGen::ToInteger32(LConstantOperand* op) const { |
401 Handle<Object> value = chunk_->LookupLiteral(op); | 403 HConstant* constant = chunk_->LookupConstant(op); |
402 ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32()); | 404 ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32()); |
403 ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) == | 405 ASSERT(constant->HasInteger32Value()); |
404 value->Number()); | 406 return constant->Integer32Value(); |
405 return static_cast<int32_t>(value->Number()); | |
406 } | 407 } |
407 | 408 |
408 | 409 |
409 double LCodeGen::ToDouble(LConstantOperand* op) const { | 410 double LCodeGen::ToDouble(LConstantOperand* op) const { |
410 Handle<Object> value = chunk_->LookupLiteral(op); | 411 HConstant* constant = chunk_->LookupConstant(op); |
411 return value->Number(); | 412 ASSERT(constant->HasDoubleValue()); |
| 413 return constant->DoubleValue(); |
412 } | 414 } |
413 | 415 |
414 | 416 |
415 Operand LCodeGen::ToOperand(LOperand* op) { | 417 Operand LCodeGen::ToOperand(LOperand* op) { |
416 if (op->IsConstantOperand()) { | 418 if (op->IsConstantOperand()) { |
417 LConstantOperand* const_op = LConstantOperand::cast(op); | 419 LConstantOperand* const_op = LConstantOperand::cast(op); |
418 Handle<Object> literal = chunk_->LookupLiteral(const_op); | 420 HConstant* constant = chunk()->LookupConstant(const_op); |
419 Representation r = chunk_->LookupLiteralRepresentation(const_op); | 421 Representation r = chunk_->LookupLiteralRepresentation(const_op); |
420 if (r.IsInteger32()) { | 422 if (r.IsInteger32()) { |
421 ASSERT(literal->IsNumber()); | 423 ASSERT(constant->HasInteger32Value()); |
422 return Operand(static_cast<int32_t>(literal->Number())); | 424 return Operand(constant->Integer32Value()); |
423 } else if (r.IsDouble()) { | 425 } else if (r.IsDouble()) { |
424 Abort("ToOperand Unsupported double immediate."); | 426 Abort("ToOperand Unsupported double immediate."); |
425 } | 427 } |
426 ASSERT(r.IsTagged()); | 428 ASSERT(r.IsTagged()); |
427 return Operand(literal); | 429 return Operand(constant->handle()); |
428 } else if (op->IsRegister()) { | 430 } else if (op->IsRegister()) { |
429 return Operand(ToRegister(op)); | 431 return Operand(ToRegister(op)); |
430 } else if (op->IsDoubleRegister()) { | 432 } else if (op->IsDoubleRegister()) { |
431 Abort("ToOperand IsDoubleRegister unimplemented"); | 433 Abort("ToOperand IsDoubleRegister unimplemented"); |
432 return Operand(0); | 434 return Operand(0); |
433 } | 435 } |
434 // Stack slots not implemented, use ToMemOperand instead. | 436 // Stack slots not implemented, use ToMemOperand instead. |
435 UNREACHABLE(); | 437 UNREACHABLE(); |
436 return Operand(0); | 438 return Operand(0); |
437 } | 439 } |
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
546 Register reg = ToRegister(op); | 548 Register reg = ToRegister(op); |
547 if (is_tagged) { | 549 if (is_tagged) { |
548 translation->StoreRegister(reg); | 550 translation->StoreRegister(reg); |
549 } else { | 551 } else { |
550 translation->StoreInt32Register(reg); | 552 translation->StoreInt32Register(reg); |
551 } | 553 } |
552 } else if (op->IsDoubleRegister()) { | 554 } else if (op->IsDoubleRegister()) { |
553 DoubleRegister reg = ToDoubleRegister(op); | 555 DoubleRegister reg = ToDoubleRegister(op); |
554 translation->StoreDoubleRegister(reg); | 556 translation->StoreDoubleRegister(reg); |
555 } else if (op->IsConstantOperand()) { | 557 } else if (op->IsConstantOperand()) { |
556 Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op)); | 558 HConstant* constant = chunk()->LookupConstant(LConstantOperand::cast(op)); |
557 int src_index = DefineDeoptimizationLiteral(literal); | 559 int src_index = DefineDeoptimizationLiteral(constant->handle()); |
558 translation->StoreLiteral(src_index); | 560 translation->StoreLiteral(src_index); |
559 } else { | 561 } else { |
560 UNREACHABLE(); | 562 UNREACHABLE(); |
561 } | 563 } |
562 } | 564 } |
563 | 565 |
564 | 566 |
565 void LCodeGen::CallCode(Handle<Code> code, | 567 void LCodeGen::CallCode(Handle<Code> code, |
566 RelocInfo::Mode mode, | 568 RelocInfo::Mode mode, |
567 LInstruction* instr) { | 569 LInstruction* instr) { |
(...skipping 4831 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5399 __ sub(scratch, result, Operand(index, LSL, kPointerSizeLog2 - kSmiTagSize)); | 5401 __ sub(scratch, result, Operand(index, LSL, kPointerSizeLog2 - kSmiTagSize)); |
5400 __ ldr(result, FieldMemOperand(scratch, | 5402 __ ldr(result, FieldMemOperand(scratch, |
5401 FixedArray::kHeaderSize - kPointerSize)); | 5403 FixedArray::kHeaderSize - kPointerSize)); |
5402 __ bind(&done); | 5404 __ bind(&done); |
5403 } | 5405 } |
5404 | 5406 |
5405 | 5407 |
5406 #undef __ | 5408 #undef __ |
5407 | 5409 |
5408 } } // namespace v8::internal | 5410 } } // namespace v8::internal |
OLD | NEW |