Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 304 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 315 ASSERT(op->IsRegister()); | 315 ASSERT(op->IsRegister()); |
| 316 return ToRegister(op->index()); | 316 return ToRegister(op->index()); |
| 317 } | 317 } |
| 318 | 318 |
| 319 | 319 |
| 320 Register LCodeGen::EmitLoadRegister(LOperand* op, Register scratch) { | 320 Register LCodeGen::EmitLoadRegister(LOperand* op, Register scratch) { |
| 321 if (op->IsRegister()) { | 321 if (op->IsRegister()) { |
| 322 return ToRegister(op->index()); | 322 return ToRegister(op->index()); |
| 323 } else if (op->IsConstantOperand()) { | 323 } else if (op->IsConstantOperand()) { |
| 324 LConstantOperand* const_op = LConstantOperand::cast(op); | 324 LConstantOperand* const_op = LConstantOperand::cast(op); |
| 325 Handle<Object> literal = chunk_->LookupLiteral(const_op); | 325 HConstant* constant = chunk_->LookupConstant(const_op); |
| 326 Handle<Object> literal = constant->handle(); | |
| 326 Representation r = chunk_->LookupLiteralRepresentation(const_op); | 327 Representation r = chunk_->LookupLiteralRepresentation(const_op); |
| 327 if (r.IsInteger32()) { | 328 if (r.IsInteger32()) { |
| 328 ASSERT(literal->IsNumber()); | 329 ASSERT(literal->IsNumber()); |
| 329 __ mov(scratch, Operand(static_cast<int32_t>(literal->Number()))); | 330 __ mov(scratch, Operand(static_cast<int32_t>(literal->Number()))); |
| 330 } else if (r.IsDouble()) { | 331 } else if (r.IsDouble()) { |
| 331 Abort("EmitLoadRegister: Unsupported double immediate."); | 332 Abort("EmitLoadRegister: Unsupported double immediate."); |
| 332 } else { | 333 } else { |
| 333 ASSERT(r.IsTagged()); | 334 ASSERT(r.IsTagged()); |
| 334 if (literal->IsSmi()) { | 335 if (literal->IsSmi()) { |
| 335 __ mov(scratch, Operand(literal)); | 336 __ mov(scratch, Operand(literal)); |
| (...skipping 17 matching lines...) Expand all Loading... | |
| 353 } | 354 } |
| 354 | 355 |
| 355 | 356 |
| 356 DoubleRegister LCodeGen::EmitLoadDoubleRegister(LOperand* op, | 357 DoubleRegister LCodeGen::EmitLoadDoubleRegister(LOperand* op, |
| 357 SwVfpRegister flt_scratch, | 358 SwVfpRegister flt_scratch, |
| 358 DoubleRegister dbl_scratch) { | 359 DoubleRegister dbl_scratch) { |
| 359 if (op->IsDoubleRegister()) { | 360 if (op->IsDoubleRegister()) { |
| 360 return ToDoubleRegister(op->index()); | 361 return ToDoubleRegister(op->index()); |
| 361 } else if (op->IsConstantOperand()) { | 362 } else if (op->IsConstantOperand()) { |
| 362 LConstantOperand* const_op = LConstantOperand::cast(op); | 363 LConstantOperand* const_op = LConstantOperand::cast(op); |
| 363 Handle<Object> literal = chunk_->LookupLiteral(const_op); | 364 HConstant* constant = chunk_->LookupConstant(const_op); |
| 365 Handle<Object> literal = constant->handle(); | |
| 364 Representation r = chunk_->LookupLiteralRepresentation(const_op); | 366 Representation r = chunk_->LookupLiteralRepresentation(const_op); |
| 365 if (r.IsInteger32()) { | 367 if (r.IsInteger32()) { |
| 366 ASSERT(literal->IsNumber()); | 368 ASSERT(literal->IsNumber()); |
| 367 __ mov(ip, Operand(static_cast<int32_t>(literal->Number()))); | 369 __ mov(ip, Operand(static_cast<int32_t>(literal->Number()))); |
| 368 __ vmov(flt_scratch, ip); | 370 __ vmov(flt_scratch, ip); |
| 369 __ vcvt_f64_s32(dbl_scratch, flt_scratch); | 371 __ vcvt_f64_s32(dbl_scratch, flt_scratch); |
| 370 return dbl_scratch; | 372 return dbl_scratch; |
| 371 } else if (r.IsDouble()) { | 373 } else if (r.IsDouble()) { |
| 372 Abort("unsupported double immediate"); | 374 Abort("unsupported double immediate"); |
| 373 } else if (r.IsTagged()) { | 375 } else if (r.IsTagged()) { |
| 374 Abort("unsupported tagged immediate"); | 376 Abort("unsupported tagged immediate"); |
| 375 } | 377 } |
| 376 } else if (op->IsStackSlot() || op->IsArgument()) { | 378 } else if (op->IsStackSlot() || op->IsArgument()) { |
| 377 // TODO(regis): Why is vldr not taking a MemOperand? | 379 // TODO(regis): Why is vldr not taking a MemOperand? |
| 378 // __ vldr(dbl_scratch, ToMemOperand(op)); | 380 // __ vldr(dbl_scratch, ToMemOperand(op)); |
| 379 MemOperand mem_op = ToMemOperand(op); | 381 MemOperand mem_op = ToMemOperand(op); |
| 380 __ vldr(dbl_scratch, mem_op.rn(), mem_op.offset()); | 382 __ vldr(dbl_scratch, mem_op.rn(), mem_op.offset()); |
| 381 return dbl_scratch; | 383 return dbl_scratch; |
| 382 } | 384 } |
| 383 UNREACHABLE(); | 385 UNREACHABLE(); |
| 384 return dbl_scratch; | 386 return dbl_scratch; |
| 385 } | 387 } |
| 386 | 388 |
| 387 | 389 |
| 388 Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const { | 390 Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const { |
| 389 Handle<Object> literal = chunk_->LookupLiteral(op); | 391 HConstant* constant = chunk_->LookupConstant(op); |
|
Michael Starzinger
2012/07/11 09:16:10
We should keep the assertion that the representati
sanjoy
2012/07/11 10:54:08
Done.
| |
| 390 ASSERT(chunk_->LookupLiteralRepresentation(op).IsTagged()); | 392 return constant->handle(); |
| 391 return literal; | |
| 392 } | 393 } |
| 393 | 394 |
| 394 | 395 |
| 395 bool LCodeGen::IsInteger32(LConstantOperand* op) const { | 396 bool LCodeGen::IsInteger32(LConstantOperand* op) const { |
| 396 return chunk_->LookupLiteralRepresentation(op).IsInteger32(); | 397 return chunk_->LookupLiteralRepresentation(op).IsInteger32(); |
| 397 } | 398 } |
| 398 | 399 |
| 399 | 400 |
| 400 int LCodeGen::ToInteger32(LConstantOperand* op) const { | 401 int LCodeGen::ToInteger32(LConstantOperand* op) const { |
| 401 Handle<Object> value = chunk_->LookupLiteral(op); | 402 HConstant* constant = chunk_->LookupConstant(op); |
| 402 ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32()); | 403 ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32()); |
| 403 ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) == | 404 ASSERT(constant->HasInteger32Value()); |
| 404 value->Number()); | 405 return constant->Integer32Value(); |
| 405 return static_cast<int32_t>(value->Number()); | |
| 406 } | 406 } |
| 407 | 407 |
| 408 | 408 |
| 409 double LCodeGen::ToDouble(LConstantOperand* op) const { | 409 double LCodeGen::ToDouble(LConstantOperand* op) const { |
| 410 Handle<Object> value = chunk_->LookupLiteral(op); | 410 HConstant* constant = chunk_->LookupConstant(op); |
| 411 return value->Number(); | 411 ASSERT(constant->HasDoubleValue()); |
| 412 return constant->DoubleValue(); | |
| 412 } | 413 } |
| 413 | 414 |
| 414 | 415 |
| 415 Operand LCodeGen::ToOperand(LOperand* op) { | 416 Operand LCodeGen::ToOperand(LOperand* op) { |
| 416 if (op->IsConstantOperand()) { | 417 if (op->IsConstantOperand()) { |
| 417 LConstantOperand* const_op = LConstantOperand::cast(op); | 418 LConstantOperand* const_op = LConstantOperand::cast(op); |
| 418 Handle<Object> literal = chunk_->LookupLiteral(const_op); | 419 HConstant* constant = chunk()->LookupConstant(const_op); |
| 419 Representation r = chunk_->LookupLiteralRepresentation(const_op); | 420 Representation r = chunk_->LookupLiteralRepresentation(const_op); |
| 420 if (r.IsInteger32()) { | 421 if (r.IsInteger32()) { |
| 421 ASSERT(literal->IsNumber()); | 422 ASSERT(constant->HasInteger32Value()); |
| 422 return Operand(static_cast<int32_t>(literal->Number())); | 423 return Operand(constant->Integer32Value()); |
| 423 } else if (r.IsDouble()) { | 424 } else if (r.IsDouble()) { |
| 424 Abort("ToOperand Unsupported double immediate."); | 425 Abort("ToOperand Unsupported double immediate."); |
| 425 } | 426 } |
| 426 ASSERT(r.IsTagged()); | 427 ASSERT(r.IsTagged()); |
| 427 return Operand(literal); | 428 return Operand(constant->handle()); |
| 428 } else if (op->IsRegister()) { | 429 } else if (op->IsRegister()) { |
| 429 return Operand(ToRegister(op)); | 430 return Operand(ToRegister(op)); |
| 430 } else if (op->IsDoubleRegister()) { | 431 } else if (op->IsDoubleRegister()) { |
| 431 Abort("ToOperand IsDoubleRegister unimplemented"); | 432 Abort("ToOperand IsDoubleRegister unimplemented"); |
| 432 return Operand(0); | 433 return Operand(0); |
| 433 } | 434 } |
| 434 // Stack slots not implemented, use ToMemOperand instead. | 435 // Stack slots not implemented, use ToMemOperand instead. |
| 435 UNREACHABLE(); | 436 UNREACHABLE(); |
| 436 return Operand(0); | 437 return Operand(0); |
| 437 } | 438 } |
| (...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 546 Register reg = ToRegister(op); | 547 Register reg = ToRegister(op); |
| 547 if (is_tagged) { | 548 if (is_tagged) { |
| 548 translation->StoreRegister(reg); | 549 translation->StoreRegister(reg); |
| 549 } else { | 550 } else { |
| 550 translation->StoreInt32Register(reg); | 551 translation->StoreInt32Register(reg); |
| 551 } | 552 } |
| 552 } else if (op->IsDoubleRegister()) { | 553 } else if (op->IsDoubleRegister()) { |
| 553 DoubleRegister reg = ToDoubleRegister(op); | 554 DoubleRegister reg = ToDoubleRegister(op); |
| 554 translation->StoreDoubleRegister(reg); | 555 translation->StoreDoubleRegister(reg); |
| 555 } else if (op->IsConstantOperand()) { | 556 } else if (op->IsConstantOperand()) { |
| 556 Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op)); | 557 HConstant* constant = chunk()->LookupConstant(LConstantOperand::cast(op)); |
| 557 int src_index = DefineDeoptimizationLiteral(literal); | 558 int src_index = DefineDeoptimizationLiteral(constant->handle()); |
| 558 translation->StoreLiteral(src_index); | 559 translation->StoreLiteral(src_index); |
| 559 } else { | 560 } else { |
| 560 UNREACHABLE(); | 561 UNREACHABLE(); |
| 561 } | 562 } |
| 562 } | 563 } |
| 563 | 564 |
| 564 | 565 |
| 565 void LCodeGen::CallCode(Handle<Code> code, | 566 void LCodeGen::CallCode(Handle<Code> code, |
| 566 RelocInfo::Mode mode, | 567 RelocInfo::Mode mode, |
| 567 LInstruction* instr) { | 568 LInstruction* instr) { |
| (...skipping 4830 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 5398 __ sub(scratch, result, Operand(index, LSL, kPointerSizeLog2 - kSmiTagSize)); | 5399 __ sub(scratch, result, Operand(index, LSL, kPointerSizeLog2 - kSmiTagSize)); |
| 5399 __ ldr(result, FieldMemOperand(scratch, | 5400 __ ldr(result, FieldMemOperand(scratch, |
| 5400 FixedArray::kHeaderSize - kPointerSize)); | 5401 FixedArray::kHeaderSize - kPointerSize)); |
| 5401 __ bind(&done); | 5402 __ bind(&done); |
| 5402 } | 5403 } |
| 5403 | 5404 |
| 5404 | 5405 |
| 5405 #undef __ | 5406 #undef __ |
| 5406 | 5407 |
| 5407 } } // namespace v8::internal | 5408 } } // namespace v8::internal |
| OLD | NEW |