Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(387)

Side by Side Diff: src/arm/assembler-arm.cc

Issue 11191029: Use VLDR instead of VMOVs from GPR when a 64-bit double can't be encoded as a VMOV immediate. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 8 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/assembler-arm.h ('k') | src/arm/assembler-arm-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 1994-2006 Sun Microsystems Inc. 1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved. 2 // All Rights Reserved.
3 // 3 //
4 // Redistribution and use in source and binary forms, with or without 4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions 5 // modification, are permitted provided that the following conditions
6 // are met: 6 // are met:
7 // 7 //
8 // - Redistributions of source code must retain the above copyright notice, 8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer. 9 // this list of conditions and the following disclaimer.
10 // 10 //
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
43 43
44 namespace v8 { 44 namespace v8 {
45 namespace internal { 45 namespace internal {
46 46
47 #ifdef DEBUG 47 #ifdef DEBUG
48 bool CpuFeatures::initialized_ = false; 48 bool CpuFeatures::initialized_ = false;
49 #endif 49 #endif
50 unsigned CpuFeatures::supported_ = 0; 50 unsigned CpuFeatures::supported_ = 0;
51 unsigned CpuFeatures::found_by_runtime_probing_ = 0; 51 unsigned CpuFeatures::found_by_runtime_probing_ = 0;
52 52
53
ulan 2012/10/18 13:36:53 This line seems to be accidentally deleted.
54 // Get the CPU features enabled by the build. For cross compilation the 53 // Get the CPU features enabled by the build. For cross compilation the
55 // preprocessor symbols CAN_USE_ARMV7_INSTRUCTIONS and CAN_USE_VFP3_INSTRUCTIONS 54 // preprocessor symbols CAN_USE_ARMV7_INSTRUCTIONS and CAN_USE_VFP3_INSTRUCTIONS
56 // can be defined to enable ARMv7 and VFPv3 instructions when building the 55 // can be defined to enable ARMv7 and VFPv3 instructions when building the
57 // snapshot. 56 // snapshot.
58 static unsigned CpuFeaturesImpliedByCompiler() { 57 static unsigned CpuFeaturesImpliedByCompiler() {
59 unsigned answer = 0; 58 unsigned answer = 0;
60 #ifdef CAN_USE_ARMV7_INSTRUCTIONS 59 #ifdef CAN_USE_ARMV7_INSTRUCTIONS
61 answer |= 1u << ARMv7; 60 answer |= 1u << ARMv7;
62 #endif // CAN_USE_ARMV7_INSTRUCTIONS 61 #endif // CAN_USE_ARMV7_INSTRUCTIONS
63 #ifdef CAN_USE_VFP3_INSTRUCTIONS 62 #ifdef CAN_USE_VFP3_INSTRUCTIONS
(...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after
191 rm_ = no_reg; 190 rm_ = no_reg;
192 // Verify all Objects referred by code are NOT in new space. 191 // Verify all Objects referred by code are NOT in new space.
193 Object* obj = *handle; 192 Object* obj = *handle;
194 ASSERT(!HEAP->InNewSpace(obj)); 193 ASSERT(!HEAP->InNewSpace(obj));
195 if (obj->IsHeapObject()) { 194 if (obj->IsHeapObject()) {
196 imm32_ = reinterpret_cast<intptr_t>(handle.location()); 195 imm32_ = reinterpret_cast<intptr_t>(handle.location());
197 rmode_ = RelocInfo::EMBEDDED_OBJECT; 196 rmode_ = RelocInfo::EMBEDDED_OBJECT;
198 } else { 197 } else {
199 // no relocation needed 198 // no relocation needed
200 imm32_ = reinterpret_cast<intptr_t>(obj); 199 imm32_ = reinterpret_cast<intptr_t>(obj);
201 rmode_ = RelocInfo::NONE; 200 rmode_ = RelocInfo::NONE32;
202 } 201 }
203 } 202 }
204 203
205 204
206 Operand::Operand(Register rm, ShiftOp shift_op, int shift_imm) { 205 Operand::Operand(Register rm, ShiftOp shift_op, int shift_imm) {
207 ASSERT(is_uint5(shift_imm)); 206 ASSERT(is_uint5(shift_imm));
208 ASSERT(shift_op != ROR || shift_imm != 0); // use RRX if you mean it 207 ASSERT(shift_op != ROR || shift_imm != 0); // use RRX if you mean it
209 rm_ = rm; 208 rm_ = rm;
210 rs_ = no_reg; 209 rs_ = no_reg;
211 shift_op_ = shift_op; 210 shift_op_ = shift_op;
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
266 // register r is not encoded. 265 // register r is not encoded.
267 const Instr kPushRegPattern = 266 const Instr kPushRegPattern =
268 al | B26 | 4 | NegPreIndex | kRegister_sp_Code * B16; 267 al | B26 | 4 | NegPreIndex | kRegister_sp_Code * B16;
269 // ldr(r, MemOperand(sp, 4, PostIndex), al) instruction (aka pop(r)) 268 // ldr(r, MemOperand(sp, 4, PostIndex), al) instruction (aka pop(r))
270 // register r is not encoded. 269 // register r is not encoded.
271 const Instr kPopRegPattern = 270 const Instr kPopRegPattern =
272 al | B26 | L | 4 | PostIndex | kRegister_sp_Code * B16; 271 al | B26 | L | 4 | PostIndex | kRegister_sp_Code * B16;
273 // mov lr, pc 272 // mov lr, pc
274 const Instr kMovLrPc = al | MOV | kRegister_pc_Code | kRegister_lr_Code * B12; 273 const Instr kMovLrPc = al | MOV | kRegister_pc_Code | kRegister_lr_Code * B12;
275 // ldr rd, [pc, #offset] 274 // ldr rd, [pc, #offset]
276 const Instr kLdrPCMask = kCondMask | 15 * B24 | 7 * B20 | 15 * B16; 275 const Instr kLdrPCMask = 15 * B24 | 7 * B20 | 15 * B16;
277 const Instr kLdrPCPattern = al | 5 * B24 | L | kRegister_pc_Code * B16; 276 const Instr kLdrPCPattern = 5 * B24 | L | kRegister_pc_Code * B16;
277 // vldr dd, [pc, #offset]
278 const Instr kVldrDPCMask = 15 * B24 | 3 * B20 | 15 * B16 | 15 * B8;
279 const Instr kVldrDPCPattern = 13 * B24 | L | kRegister_pc_Code * B16 | 11 * B8;
278 // blxcc rm 280 // blxcc rm
279 const Instr kBlxRegMask = 281 const Instr kBlxRegMask =
280 15 * B24 | 15 * B20 | 15 * B16 | 15 * B12 | 15 * B8 | 15 * B4; 282 15 * B24 | 15 * B20 | 15 * B16 | 15 * B12 | 15 * B8 | 15 * B4;
281 const Instr kBlxRegPattern = 283 const Instr kBlxRegPattern =
282 B24 | B21 | 15 * B16 | 15 * B12 | 15 * B8 | BLX; 284 B24 | B21 | 15 * B16 | 15 * B12 | 15 * B8 | BLX;
283 const Instr kBlxIp = al | kBlxRegPattern | ip.code(); 285 const Instr kBlxIp = al | kBlxRegPattern | ip.code();
284 const Instr kMovMvnMask = 0x6d * B21 | 0xf * B16; 286 const Instr kMovMvnMask = 0x6d * B21 | 0xf * B16;
285 const Instr kMovMvnPattern = 0xd * B21; 287 const Instr kMovMvnPattern = 0xd * B21;
286 const Instr kMovMvnFlip = B22; 288 const Instr kMovMvnFlip = B22;
287 const Instr kMovLeaveCCMask = 0xdff * B16; 289 const Instr kMovLeaveCCMask = 0xdff * B16;
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after
343 buffer_ = static_cast<byte*>(buffer); 345 buffer_ = static_cast<byte*>(buffer);
344 buffer_size_ = buffer_size; 346 buffer_size_ = buffer_size;
345 own_buffer_ = false; 347 own_buffer_ = false;
346 } 348 }
347 349
348 // Set up buffer pointers. 350 // Set up buffer pointers.
349 ASSERT(buffer_ != NULL); 351 ASSERT(buffer_ != NULL);
350 pc_ = buffer_; 352 pc_ = buffer_;
351 reloc_info_writer.Reposition(buffer_ + buffer_size, pc_); 353 reloc_info_writer.Reposition(buffer_ + buffer_size, pc_);
352 num_pending_reloc_info_ = 0; 354 num_pending_reloc_info_ = 0;
355 num_pending_64_bit_reloc_info_ = 0;
353 next_buffer_check_ = 0; 356 next_buffer_check_ = 0;
354 const_pool_blocked_nesting_ = 0; 357 const_pool_blocked_nesting_ = 0;
355 no_const_pool_before_ = 0; 358 no_const_pool_before_ = 0;
356 first_const_pool_use_ = -1; 359 first_const_pool_use_ = -1;
357 last_bound_pos_ = 0; 360 last_bound_pos_ = 0;
358 ClearRecordedAstId(); 361 ClearRecordedAstId();
359 } 362 }
360 363
361 364
362 Assembler::~Assembler() { 365 Assembler::~Assembler() {
363 ASSERT(const_pool_blocked_nesting_ == 0); 366 ASSERT(const_pool_blocked_nesting_ == 0);
364 if (own_buffer_) { 367 if (own_buffer_) {
365 if (isolate()->assembler_spare_buffer() == NULL && 368 if (isolate()->assembler_spare_buffer() == NULL &&
366 buffer_size_ == kMinimalBufferSize) { 369 buffer_size_ == kMinimalBufferSize) {
367 isolate()->set_assembler_spare_buffer(buffer_); 370 isolate()->set_assembler_spare_buffer(buffer_);
368 } else { 371 } else {
369 DeleteArray(buffer_); 372 DeleteArray(buffer_);
370 } 373 }
371 } 374 }
372 } 375 }
373 376
374 377
375 void Assembler::GetCode(CodeDesc* desc) { 378 void Assembler::GetCode(CodeDesc* desc) {
376 // Emit constant pool if necessary. 379 // Emit constant pool if necessary.
377 CheckConstPool(true, false); 380 CheckConstPool(true, false);
378 ASSERT(num_pending_reloc_info_ == 0); 381 ASSERT(num_pending_reloc_info_ == 0);
382 ASSERT(num_pending_64_bit_reloc_info_ == 0);
379 383
380 // Set up code descriptor. 384 // Set up code descriptor.
381 desc->buffer = buffer_; 385 desc->buffer = buffer_;
382 desc->buffer_size = buffer_size_; 386 desc->buffer_size = buffer_size_;
383 desc->instr_size = pc_offset(); 387 desc->instr_size = pc_offset();
384 desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos(); 388 desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos();
385 } 389 }
386 390
387 391
388 void Assembler::Align(int m) { 392 void Assembler::Align(int m) {
(...skipping 26 matching lines...) Expand all
415 // with 4 to get the offset in bytes. 419 // with 4 to get the offset in bytes.
416 return ((instr & kImm24Mask) << 8) >> 6; 420 return ((instr & kImm24Mask) << 8) >> 6;
417 } 421 }
418 422
419 423
420 bool Assembler::IsLdrRegisterImmediate(Instr instr) { 424 bool Assembler::IsLdrRegisterImmediate(Instr instr) {
421 return (instr & (B27 | B26 | B25 | B22 | B20)) == (B26 | B20); 425 return (instr & (B27 | B26 | B25 | B22 | B20)) == (B26 | B20);
422 } 426 }
423 427
424 428
429 bool Assembler::IsVldrDRegisterImmediate(Instr instr) {
430 return (instr & (15 * B24 | 3 * B20 | 15 * B8)) == (13 * B24 | B20 | 11 * B8);
431 }
432
433
425 int Assembler::GetLdrRegisterImmediateOffset(Instr instr) { 434 int Assembler::GetLdrRegisterImmediateOffset(Instr instr) {
426 ASSERT(IsLdrRegisterImmediate(instr)); 435 ASSERT(IsLdrRegisterImmediate(instr));
427 bool positive = (instr & B23) == B23; 436 bool positive = (instr & B23) == B23;
428 int offset = instr & kOff12Mask; // Zero extended offset. 437 int offset = instr & kOff12Mask; // Zero extended offset.
429 return positive ? offset : -offset; 438 return positive ? offset : -offset;
430 } 439 }
431 440
432 441
442 int Assembler::GetVldrDRegisterImmediateOffset(Instr instr) {
443 ASSERT(IsVldrDRegisterImmediate(instr));
444 bool positive = (instr & B23) == B23;
445 int offset = instr & kOff8Mask; // Zero extended offset.
446 offset <<= 2;
447 return positive ? offset : -offset;
448 }
449
450
433 Instr Assembler::SetLdrRegisterImmediateOffset(Instr instr, int offset) { 451 Instr Assembler::SetLdrRegisterImmediateOffset(Instr instr, int offset) {
434 ASSERT(IsLdrRegisterImmediate(instr)); 452 ASSERT(IsLdrRegisterImmediate(instr));
435 bool positive = offset >= 0; 453 bool positive = offset >= 0;
436 if (!positive) offset = -offset; 454 if (!positive) offset = -offset;
437 ASSERT(is_uint12(offset)); 455 ASSERT(is_uint12(offset));
438 // Set bit indicating whether the offset should be added. 456 // Set bit indicating whether the offset should be added.
439 instr = (instr & ~B23) | (positive ? B23 : 0); 457 instr = (instr & ~B23) | (positive ? B23 : 0);
440 // Set the actual offset. 458 // Set the actual offset.
441 return (instr & ~kOff12Mask) | offset; 459 return (instr & ~kOff12Mask) | offset;
442 } 460 }
443 461
462 Instr Assembler::SetVldrDRegisterImmediateOffset(Instr instr, int offset) {
463 ASSERT(IsVldrDRegisterImmediate(instr));
464 ASSERT((offset & ~3) == offset); // Must be 64-bit aligned.
465 bool positive = offset >= 0;
466 if (!positive) offset = -offset;
467 ASSERT(is_uint10(offset));
468 // Set bit indicating whether the offset should be added.
469 instr = (instr & ~B23) | (positive ? B23 : 0);
470 // Set the actual offset. Its bottom 2 bits are zero.
471 return (instr & ~kOff8Mask) | (offset >> 2);
472 }
473
444 474
445 bool Assembler::IsStrRegisterImmediate(Instr instr) { 475 bool Assembler::IsStrRegisterImmediate(Instr instr) {
446 return (instr & (B27 | B26 | B25 | B22 | B20)) == B26; 476 return (instr & (B27 | B26 | B25 | B22 | B20)) == B26;
447 } 477 }
448 478
449 479
450 Instr Assembler::SetStrRegisterImmediateOffset(Instr instr, int offset) { 480 Instr Assembler::SetStrRegisterImmediateOffset(Instr instr, int offset) {
451 ASSERT(IsStrRegisterImmediate(instr)); 481 ASSERT(IsStrRegisterImmediate(instr));
452 bool positive = offset >= 0; 482 bool positive = offset >= 0;
453 if (!positive) offset = -offset; 483 if (!positive) offset = -offset;
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after
520 550
521 551
522 bool Assembler::IsLdrRegFpNegOffset(Instr instr) { 552 bool Assembler::IsLdrRegFpNegOffset(Instr instr) {
523 return ((instr & kLdrStrInstrTypeMask) == kLdrRegFpNegOffsetPattern); 553 return ((instr & kLdrStrInstrTypeMask) == kLdrRegFpNegOffsetPattern);
524 } 554 }
525 555
526 556
527 bool Assembler::IsLdrPcImmediateOffset(Instr instr) { 557 bool Assembler::IsLdrPcImmediateOffset(Instr instr) {
528 // Check the instruction is indeed a 558 // Check the instruction is indeed a
529 // ldr<cond> <Rd>, [pc +/- offset_12]. 559 // ldr<cond> <Rd>, [pc +/- offset_12].
530 return (instr & (kLdrPCMask & ~kCondMask)) == 0x051f0000; 560 return (instr & kLdrPCMask) == kLdrPCPattern;
531 } 561 }
532 562
533 563
564 bool Assembler::IsVldrDPcImmediateOffset(Instr instr) {
565 // Check the instruction is indeed a
566 // vldr<cond> <Dd>, [pc +/- offset_12].
567 return (instr & kVldrDPCMask) == kVldrDPCPattern;
568 }
569
570
534 bool Assembler::IsTstImmediate(Instr instr) { 571 bool Assembler::IsTstImmediate(Instr instr) {
535 return (instr & (B27 | B26 | I | kOpCodeMask | S | kRdMask)) == 572 return (instr & (B27 | B26 | I | kOpCodeMask | S | kRdMask)) ==
536 (I | TST | S); 573 (I | TST | S);
537 } 574 }
538 575
539 576
540 bool Assembler::IsCmpRegister(Instr instr) { 577 bool Assembler::IsCmpRegister(Instr instr) {
541 return (instr & (B27 | B26 | I | kOpCodeMask | S | kRdMask | B4)) == 578 return (instr & (B27 | B26 | I | kOpCodeMask | S | kRdMask | B4)) ==
542 (CMP | S); 579 (CMP | S);
543 } 580 }
(...skipping 258 matching lines...) Expand 10 before | Expand all | Expand 10 after
802 // encoded. 839 // encoded.
803 bool Operand::must_use_constant_pool(const Assembler* assembler) const { 840 bool Operand::must_use_constant_pool(const Assembler* assembler) const {
804 if (rmode_ == RelocInfo::EXTERNAL_REFERENCE) { 841 if (rmode_ == RelocInfo::EXTERNAL_REFERENCE) {
805 #ifdef DEBUG 842 #ifdef DEBUG
806 if (!Serializer::enabled()) { 843 if (!Serializer::enabled()) {
807 Serializer::TooLateToEnableNow(); 844 Serializer::TooLateToEnableNow();
808 } 845 }
809 #endif // def DEBUG 846 #endif // def DEBUG
810 if (assembler != NULL && assembler->predictable_code_size()) return true; 847 if (assembler != NULL && assembler->predictable_code_size()) return true;
811 return Serializer::enabled(); 848 return Serializer::enabled();
812 } else if (rmode_ == RelocInfo::NONE) { 849 } else if (RelocInfo::IsNone(rmode_)) {
813 return false; 850 return false;
814 } 851 }
815 return true; 852 return true;
816 } 853 }
817 854
818 855
819 bool Operand::is_single_instruction(const Assembler* assembler, 856 bool Operand::is_single_instruction(const Assembler* assembler,
820 Instr instr) const { 857 Instr instr) const {
821 if (rm_.is_valid()) return true; 858 if (rm_.is_valid()) return true;
822 uint32_t dummy1, dummy2; 859 uint32_t dummy1, dummy2;
(...skipping 1187 matching lines...) Expand 10 before | Expand all | Expand 10 after
2010 const Condition cond) { 2047 const Condition cond) {
2011 // Dd = immediate 2048 // Dd = immediate
2012 // Instruction details available in ARM DDI 0406B, A8-640. 2049 // Instruction details available in ARM DDI 0406B, A8-640.
2013 ASSERT(CpuFeatures::IsEnabled(VFP2)); 2050 ASSERT(CpuFeatures::IsEnabled(VFP2));
2014 2051
2015 uint32_t enc; 2052 uint32_t enc;
2016 if (CpuFeatures::IsSupported(VFP3) && FitsVMOVDoubleImmediate(imm, &enc)) { 2053 if (CpuFeatures::IsSupported(VFP3) && FitsVMOVDoubleImmediate(imm, &enc)) {
2017 // The double can be encoded in the instruction. 2054 // The double can be encoded in the instruction.
2018 emit(cond | 0xE*B24 | 0xB*B20 | dst.code()*B12 | 0xB*B8 | enc); 2055 emit(cond | 0xE*B24 | 0xB*B20 | dst.code()*B12 | 0xB*B8 | enc);
2019 } else { 2056 } else {
2020 // Synthesise the double from ARM immediates. This could be implemented 2057 RecordRelocInfo(imm);
2021 // using vldr from a constant pool. 2058 vldr(dst, MemOperand(pc, 0), cond);
2022 uint32_t lo, hi; 2059 // TODO(jfb) Constant blinding, denorm to zero, no NaN.
2023 DoubleAsTwoUInt32(imm, &lo, &hi);
2024 mov(ip, Operand(lo));
2025
2026 if (scratch.is(no_reg)) {
2027 // Move the low part of the double into the lower of the corresponsing S
2028 // registers of D register dst.
2029 vmov(dst.low(), ip, cond);
2030
2031 // Move the high part of the double into the higher of the corresponsing S
2032 // registers of D register dst.
2033 mov(ip, Operand(hi));
2034 vmov(dst.high(), ip, cond);
2035 } else {
2036 // Move the low and high parts of the double to a D register in one
2037 // instruction.
2038 mov(scratch, Operand(hi));
2039 vmov(dst, ip, scratch, cond);
2040 }
2041 } 2060 }
2042 } 2061 }
2043 2062
2044 2063
2045 void Assembler::vmov(const SwVfpRegister dst, 2064 void Assembler::vmov(const SwVfpRegister dst,
2046 const SwVfpRegister src, 2065 const SwVfpRegister src,
2047 const Condition cond) { 2066 const Condition cond) {
2048 // Sd = Sm 2067 // Sd = Sm
2049 // Instruction details available in ARM DDI 0406B, A8-642. 2068 // Instruction details available in ARM DDI 0406B, A8-642.
2050 ASSERT(CpuFeatures::IsEnabled(VFP2)); 2069 ASSERT(CpuFeatures::IsEnabled(VFP2));
(...skipping 493 matching lines...) Expand 10 before | Expand all | Expand 10 after
2544 } 2563 }
2545 } 2564 }
2546 } 2565 }
2547 2566
2548 2567
2549 void Assembler::db(uint8_t data) { 2568 void Assembler::db(uint8_t data) {
2550 // No relocation info should be pending while using db. db is used 2569 // No relocation info should be pending while using db. db is used
2551 // to write pure data with no pointers and the constant pool should 2570 // to write pure data with no pointers and the constant pool should
2552 // be emitted before using db. 2571 // be emitted before using db.
2553 ASSERT(num_pending_reloc_info_ == 0); 2572 ASSERT(num_pending_reloc_info_ == 0);
2573 ASSERT(num_pending_64_bit_reloc_info_ == 0);
2554 CheckBuffer(); 2574 CheckBuffer();
2555 *reinterpret_cast<uint8_t*>(pc_) = data; 2575 *reinterpret_cast<uint8_t*>(pc_) = data;
2556 pc_ += sizeof(uint8_t); 2576 pc_ += sizeof(uint8_t);
2557 } 2577 }
2558 2578
2559 2579
2560 void Assembler::dd(uint32_t data) { 2580 void Assembler::dd(uint32_t data) {
2561 // No relocation info should be pending while using dd. dd is used 2581 // No relocation info should be pending while using dd. dd is used
2562 // to write pure data with no pointers and the constant pool should 2582 // to write pure data with no pointers and the constant pool should
2563 // be emitted before using dd. 2583 // be emitted before using dd.
2564 ASSERT(num_pending_reloc_info_ == 0); 2584 ASSERT(num_pending_reloc_info_ == 0);
2585 ASSERT(num_pending_64_bit_reloc_info_ == 0);
2565 CheckBuffer(); 2586 CheckBuffer();
2566 *reinterpret_cast<uint32_t*>(pc_) = data; 2587 *reinterpret_cast<uint32_t*>(pc_) = data;
2567 pc_ += sizeof(uint32_t); 2588 pc_ += sizeof(uint32_t);
2568 } 2589 }
2569 2590
2570 2591
2571 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { 2592 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
2572 // We do not try to reuse pool constants. 2593 // We do not try to reuse pool constants.
2573 RelocInfo rinfo(pc_, rmode, data, NULL); 2594 RelocInfo rinfo(pc_, rmode, data, NULL);
2574 if (((rmode >= RelocInfo::JS_RETURN) && 2595 if (((rmode >= RelocInfo::JS_RETURN) &&
2575 (rmode <= RelocInfo::DEBUG_BREAK_SLOT)) || 2596 (rmode <= RelocInfo::DEBUG_BREAK_SLOT)) ||
2576 (rmode == RelocInfo::CONST_POOL)) { 2597 (rmode == RelocInfo::CONST_POOL)) {
2577 // Adjust code for new modes. 2598 // Adjust code for new modes.
2578 ASSERT(RelocInfo::IsDebugBreakSlot(rmode) 2599 ASSERT(RelocInfo::IsDebugBreakSlot(rmode)
2579 || RelocInfo::IsJSReturn(rmode) 2600 || RelocInfo::IsJSReturn(rmode)
2580 || RelocInfo::IsComment(rmode) 2601 || RelocInfo::IsComment(rmode)
2581 || RelocInfo::IsPosition(rmode) 2602 || RelocInfo::IsPosition(rmode)
2582 || RelocInfo::IsConstPool(rmode)); 2603 || RelocInfo::IsConstPool(rmode));
2583 // These modes do not need an entry in the constant pool. 2604 // These modes do not need an entry in the constant pool.
2584 } else { 2605 } else {
2585 ASSERT(num_pending_reloc_info_ < kMaxNumPendingRelocInfo); 2606 RecordRelocInfoConstantPoolEntryHelper(rinfo);
2586 if (num_pending_reloc_info_ == 0) {
2587 first_const_pool_use_ = pc_offset();
2588 }
2589 pending_reloc_info_[num_pending_reloc_info_++] = rinfo;
2590 // Make sure the constant pool is not emitted in place of the next
2591 // instruction for which we just recorded relocation info.
2592 BlockConstPoolFor(1);
2593 } 2607 }
2594 if (rinfo.rmode() != RelocInfo::NONE) { 2608 if (!RelocInfo::IsNone(rinfo.rmode())) {
2595 // Don't record external references unless the heap will be serialized. 2609 // Don't record external references unless the heap will be serialized.
2596 if (rmode == RelocInfo::EXTERNAL_REFERENCE) { 2610 if (rmode == RelocInfo::EXTERNAL_REFERENCE) {
2597 #ifdef DEBUG 2611 #ifdef DEBUG
2598 if (!Serializer::enabled()) { 2612 if (!Serializer::enabled()) {
2599 Serializer::TooLateToEnableNow(); 2613 Serializer::TooLateToEnableNow();
2600 } 2614 }
2601 #endif 2615 #endif
2602 if (!Serializer::enabled() && !emit_debug_code()) { 2616 if (!Serializer::enabled() && !emit_debug_code()) {
2603 return; 2617 return;
2604 } 2618 }
2605 } 2619 }
2606 ASSERT(buffer_space() >= kMaxRelocSize); // too late to grow buffer here 2620 ASSERT(buffer_space() >= kMaxRelocSize); // too late to grow buffer here
2607 if (rmode == RelocInfo::CODE_TARGET_WITH_ID) { 2621 if (rmode == RelocInfo::CODE_TARGET_WITH_ID) {
2608 RelocInfo reloc_info_with_ast_id(pc_, 2622 RelocInfo reloc_info_with_ast_id(pc_,
2609 rmode, 2623 rmode,
2610 RecordedAstId().ToInt(), 2624 RecordedAstId().ToInt(),
2611 NULL); 2625 NULL);
2612 ClearRecordedAstId(); 2626 ClearRecordedAstId();
2613 reloc_info_writer.Write(&reloc_info_with_ast_id); 2627 reloc_info_writer.Write(&reloc_info_with_ast_id);
2614 } else { 2628 } else {
2615 reloc_info_writer.Write(&rinfo); 2629 reloc_info_writer.Write(&rinfo);
2616 } 2630 }
2617 } 2631 }
2618 } 2632 }
2619 2633
2634 void Assembler::RecordRelocInfo(double data) {
2635 // We do not try to reuse pool constants.
2636 RelocInfo rinfo(pc_, data);
2637 RecordRelocInfoConstantPoolEntryHelper(rinfo);
2638 }
2639
2640
2641 void Assembler::RecordRelocInfoConstantPoolEntryHelper(const RelocInfo& rinfo) {
2642 ASSERT(num_pending_reloc_info_ < kMaxNumPendingRelocInfo);
2643 if (num_pending_reloc_info_ == 0) {
2644 first_const_pool_use_ = pc_offset();
2645 }
2646 pending_reloc_info_[num_pending_reloc_info_++] = rinfo;
2647 if (rinfo.rmode() == RelocInfo::NONE64) {
2648 ++num_pending_64_bit_reloc_info_;
2649 }
2650 ASSERT(num_pending_64_bit_reloc_info_ <= num_pending_reloc_info_);
2651 // Make sure the constant pool is not emitted in place of the next
2652 // instruction for which we just recorded relocation info.
2653 BlockConstPoolFor(1);
2654 }
2655
2620 2656
2621 void Assembler::BlockConstPoolFor(int instructions) { 2657 void Assembler::BlockConstPoolFor(int instructions) {
2622 int pc_limit = pc_offset() + instructions * kInstrSize; 2658 int pc_limit = pc_offset() + instructions * kInstrSize;
2623 if (no_const_pool_before_ < pc_limit) { 2659 if (no_const_pool_before_ < pc_limit) {
2624 // If there are some pending entries, the constant pool cannot be blocked 2660 // If there are some pending entries, the constant pool cannot be blocked
2625 // further than first_const_pool_use_ + kMaxDistToPool 2661 // further than constant pool instruction's reach.
2626 ASSERT((num_pending_reloc_info_ == 0) || 2662 ASSERT((num_pending_reloc_info_ == 0) ||
2627 (pc_limit < (first_const_pool_use_ + kMaxDistToPool))); 2663 (pc_limit - first_const_pool_use_ < kMaxDistToIntPool));
2628 no_const_pool_before_ = pc_limit; 2664 no_const_pool_before_ = pc_limit;
2629 } 2665 }
2630 2666
2631 if (next_buffer_check_ < no_const_pool_before_) { 2667 if (next_buffer_check_ < no_const_pool_before_) {
2632 next_buffer_check_ = no_const_pool_before_; 2668 next_buffer_check_ = no_const_pool_before_;
2633 } 2669 }
2634 } 2670 }
2635 2671
2636 2672
2637 void Assembler::CheckConstPool(bool force_emit, bool require_jump) { 2673 void Assembler::CheckConstPool(bool force_emit, bool require_jump) {
2638 // Some short sequence of instruction mustn't be broken up by constant pool 2674 // Some short sequence of instruction mustn't be broken up by constant pool
2639 // emission, such sequences are protected by calls to BlockConstPoolFor and 2675 // emission, such sequences are protected by calls to BlockConstPoolFor and
2640 // BlockConstPoolScope. 2676 // BlockConstPoolScope.
2641 if (is_const_pool_blocked()) { 2677 if (is_const_pool_blocked()) {
2642 // Something is wrong if emission is forced and blocked at the same time. 2678 // Something is wrong if emission is forced and blocked at the same time.
2643 ASSERT(!force_emit); 2679 ASSERT(!force_emit);
2644 return; 2680 return;
2645 } 2681 }
2646 2682
2647 // There is nothing to do if there are no pending constant pool entries. 2683 // There is nothing to do if there are no pending constant pool entries.
2648 if (num_pending_reloc_info_ == 0) { 2684 if (num_pending_reloc_info_ == 0) {
2685 ASSERT(num_pending_64_bit_reloc_info_ == 0);
2649 // Calculate the offset of the next check. 2686 // Calculate the offset of the next check.
2650 next_buffer_check_ = pc_offset() + kCheckPoolInterval; 2687 next_buffer_check_ = pc_offset() + kCheckPoolInterval;
2651 return; 2688 return;
2652 } 2689 }
2653 2690
2654 // We emit a constant pool when:
2655 // * requested to do so by parameter force_emit (e.g. after each function).
2656 // * the distance to the first instruction accessing the constant pool is
2657 // kAvgDistToPool or more.
2658 // * no jump is required and the distance to the first instruction accessing
2659 // the constant pool is at least kMaxDistToPool / 2.
2660 ASSERT(first_const_pool_use_ >= 0);
2661 int dist = pc_offset() - first_const_pool_use_;
2662 if (!force_emit && dist < kAvgDistToPool &&
2663 (require_jump || (dist < (kMaxDistToPool / 2)))) {
2664 return;
2665 }
2666
2667 // Check that the code buffer is large enough before emitting the constant 2691 // Check that the code buffer is large enough before emitting the constant
2668 // pool (include the jump over the pool and the constant pool marker and 2692 // pool (include the jump over the pool and the constant pool marker and
2669 // the gap to the relocation information). 2693 // the gap to the relocation information).
2694 // Note 64-bit values are wider, and the first one needs to be 64-bit aligned.
2670 int jump_instr = require_jump ? kInstrSize : 0; 2695 int jump_instr = require_jump ? kInstrSize : 0;
2671 int size = jump_instr + kInstrSize + num_pending_reloc_info_ * kPointerSize; 2696 int size = kInstrSize + jump_instr + num_pending_reloc_info_ * kPointerSize;
2697 bool has_fp_values = (num_pending_64_bit_reloc_info_ > 0);
2698 // 64-bit values must be 64-bit aligned.
2699 bool require_64_bit_align = has_fp_values && (((uintptr_t)pc_ + size) & 0x3);
2700 if (require_64_bit_align) {
2701 size += kInstrSize;
2702 }
2703 STATIC_ASSERT(kPointerSize == kDoubleSize / 2);
2704 size += num_pending_64_bit_reloc_info_ * (kDoubleSize / 2);
2705 int marker_num = (size - kInstrSize - jump_instr) / 4;
2706
2707 // We emit a constant pool when:
2708 // * requested to do so by parameter force_emit (e.g. after each function).
2709 // * the distance from the first instruction accessing the constant pool to
2710 // any of the constant pool entries will exceed its limit the next
2711 // time the pool is checked. This is overly restrictive, but we don't emit
2712 // constant pool entries in-order so it's conservatively correct.
2713 // * the instruction doesn't require a jump after itself to jump over the
2714 // constant pool, and we're getting close to running out of range.
2715 if (!force_emit) {
2716 ASSERT((first_const_pool_use_ >= 0) && (num_pending_reloc_info_ > 0));
2717 int dist = pc_offset() + size - first_const_pool_use_;
2718 if (has_fp_values) {
2719 if ((dist < kMaxDistToFPPool - kCheckPoolInterval) &&
2720 (require_jump || (dist < kMaxDistToFPPool / 2))) {
2721 return;
2722 }
2723 } else {
2724 if ((dist < kMaxDistToIntPool - kCheckPoolInterval) &&
2725 (require_jump || (dist < kMaxDistToIntPool / 2))) {
2726 return;
2727 }
2728 }
2729 }
2730
2672 int needed_space = size + kGap; 2731 int needed_space = size + kGap;
2673 while (buffer_space() <= needed_space) GrowBuffer(); 2732 while (buffer_space() <= needed_space) GrowBuffer();
2674 2733
2675 { 2734 {
2676 // Block recursive calls to CheckConstPool. 2735 // Block recursive calls to CheckConstPool.
2677 BlockConstPoolScope block_const_pool(this); 2736 BlockConstPoolScope block_const_pool(this);
2678 RecordComment("[ Constant Pool"); 2737 RecordComment("[ Constant Pool");
2679 RecordConstPool(size); 2738 RecordConstPool(size);
2680 2739
2681 // Emit jump over constant pool if necessary. 2740 // Emit jump over constant pool if necessary.
2682 Label after_pool; 2741 Label after_pool;
2683 if (require_jump) { 2742 if (require_jump) {
2684 b(&after_pool); 2743 b(&after_pool);
2685 } 2744 }
2686 2745
2687 // Put down constant pool marker "Undefined instruction" as specified by 2746 // Put down constant pool marker "Undefined instruction" as specified by
2688 // A5.6 (ARMv7) Instruction set encoding. 2747 // A5.6 (ARMv7) Instruction set encoding.
2689 emit(kConstantPoolMarker | num_pending_reloc_info_); 2748 emit(kConstantPoolMarker | marker_num);
2690 2749
2691 // Emit constant pool entries. 2750 if (require_64_bit_align) {
2751 emit(kConstantPoolMarker);
2752 }
2753
2754 // Emit 64-bit constant pool entries first: their range is smaller than
2755 // 32-bit entries.
2756 for (int i = 0; i < num_pending_reloc_info_; i++) {
2757 ASSERT(!((uintptr_t)pc_ & 0x3)); // Check 64-bit alignment.
2758 RelocInfo& rinfo = pending_reloc_info_[i];
2759
2760 if (rinfo.rmode() != RelocInfo::NONE64) {
2761 // 32-bit values emitted later.
2762 continue;
2763 }
2764
2765 Instr instr = instr_at(rinfo.pc());
2766 // Instruction to patch must be 'vldr rd, [pc, #offset]' with offset == 0.
2767 ASSERT((IsVldrDPcImmediateOffset(instr) &&
2768 GetVldrDRegisterImmediateOffset(instr) == 0));
2769
2770 int delta = pc_ - rinfo.pc() - kPcLoadDelta;
2771 ASSERT(is_uint10(delta));
2772
2773 instr_at_put(rinfo.pc(), SetVldrDRegisterImmediateOffset(instr, delta));
2774
2775 const double double_data = rinfo.data64();
2776 uint64_t uint_data = 0;
2777 memcpy(&uint_data, &double_data, sizeof(double_data));
2778 emit(uint_data & 0xFFFFFFFF);
2779 emit(uint_data >> 32);
2780 }
2781
2782 // Emit 32-bit constant pool entries.
2692 for (int i = 0; i < num_pending_reloc_info_; i++) { 2783 for (int i = 0; i < num_pending_reloc_info_; i++) {
2693 RelocInfo& rinfo = pending_reloc_info_[i]; 2784 RelocInfo& rinfo = pending_reloc_info_[i];
2694 ASSERT(rinfo.rmode() != RelocInfo::COMMENT && 2785 ASSERT(rinfo.rmode() != RelocInfo::COMMENT &&
2695 rinfo.rmode() != RelocInfo::POSITION && 2786 rinfo.rmode() != RelocInfo::POSITION &&
2696 rinfo.rmode() != RelocInfo::STATEMENT_POSITION && 2787 rinfo.rmode() != RelocInfo::STATEMENT_POSITION &&
2697 rinfo.rmode() != RelocInfo::CONST_POOL); 2788 rinfo.rmode() != RelocInfo::CONST_POOL);
2698 2789
2790 if (rinfo.rmode() == RelocInfo::NONE64) {
2791 // 64-bit values emitted earlier.
2792 continue;
2793 }
2794
2699 Instr instr = instr_at(rinfo.pc()); 2795 Instr instr = instr_at(rinfo.pc());
2700 // Instruction to patch must be 'ldr rd, [pc, #offset]' with offset == 0. 2796 // Instruction to patch must be 'ldr rd, [pc, #offset]' with offset == 0.
2701 ASSERT(IsLdrPcImmediateOffset(instr) && 2797 ASSERT((IsLdrPcImmediateOffset(instr) &&
2702 GetLdrRegisterImmediateOffset(instr) == 0); 2798 GetLdrRegisterImmediateOffset(instr) == 0));
2703 2799
2704 int delta = pc_ - rinfo.pc() - kPcLoadDelta; 2800 int delta = pc_ - rinfo.pc() - kPcLoadDelta;
2705 // 0 is the smallest delta: 2801 // 0 is the smallest delta:
2706 // ldr rd, [pc, #0] 2802 // ldr rd, [pc, #0]
2707 // constant pool marker 2803 // constant pool marker
2708 // data 2804 // data
2709 ASSERT(is_uint12(delta)); 2805 ASSERT(is_uint12(delta));
2710 2806
2711 instr_at_put(rinfo.pc(), SetLdrRegisterImmediateOffset(instr, delta)); 2807 instr_at_put(rinfo.pc(), SetLdrRegisterImmediateOffset(instr, delta));
2712 emit(rinfo.data()); 2808 emit(rinfo.data());
2713 } 2809 }
2714 2810
2715 num_pending_reloc_info_ = 0; 2811 num_pending_reloc_info_ = 0;
2812 num_pending_64_bit_reloc_info_ = 0;
2716 first_const_pool_use_ = -1; 2813 first_const_pool_use_ = -1;
2717 2814
2718 RecordComment("]"); 2815 RecordComment("]");
2719 2816
2720 if (after_pool.is_linked()) { 2817 if (after_pool.is_linked()) {
2721 bind(&after_pool); 2818 bind(&after_pool);
2722 } 2819 }
2723 } 2820 }
2724 2821
2725 // Since a constant pool was just emitted, move the check offset forward by 2822 // Since a constant pool was just emitted, move the check offset forward by
2726 // the standard interval. 2823 // the standard interval.
2727 next_buffer_check_ = pc_offset() + kCheckPoolInterval; 2824 next_buffer_check_ = pc_offset() + kCheckPoolInterval;
2728 } 2825 }
2729 2826
2730 2827
2731 } } // namespace v8::internal 2828 } } // namespace v8::internal
2732 2829
2733 #endif // V8_TARGET_ARCH_ARM 2830 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/assembler-arm.h ('k') | src/arm/assembler-arm-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698