Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(155)

Side by Side Diff: src/arm/assembler-arm.cc

Issue 11191029: Use VLDR instead of VMOVs from GPR when a 64-bit double can't be encoded as a VMOV immediate. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Revert Operand::Zero() change that came with the RelocInfo::NONE32 rename, both to be done later. Created 8 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright (c) 1994-2006 Sun Microsystems Inc. 1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved. 2 // All Rights Reserved.
3 // 3 //
4 // Redistribution and use in source and binary forms, with or without 4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions 5 // modification, are permitted provided that the following conditions
6 // are met: 6 // are met:
7 // 7 //
8 // - Redistributions of source code must retain the above copyright notice, 8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer. 9 // this list of conditions and the following disclaimer.
10 // 10 //
(...skipping 255 matching lines...) Expand 10 before | Expand all | Expand 10 after
266 // register r is not encoded. 266 // register r is not encoded.
267 const Instr kPushRegPattern = 267 const Instr kPushRegPattern =
268 al | B26 | 4 | NegPreIndex | kRegister_sp_Code * B16; 268 al | B26 | 4 | NegPreIndex | kRegister_sp_Code * B16;
269 // ldr(r, MemOperand(sp, 4, PostIndex), al) instruction (aka pop(r)) 269 // ldr(r, MemOperand(sp, 4, PostIndex), al) instruction (aka pop(r))
270 // register r is not encoded. 270 // register r is not encoded.
271 const Instr kPopRegPattern = 271 const Instr kPopRegPattern =
272 al | B26 | L | 4 | PostIndex | kRegister_sp_Code * B16; 272 al | B26 | L | 4 | PostIndex | kRegister_sp_Code * B16;
273 // mov lr, pc 273 // mov lr, pc
274 const Instr kMovLrPc = al | MOV | kRegister_pc_Code | kRegister_lr_Code * B12; 274 const Instr kMovLrPc = al | MOV | kRegister_pc_Code | kRegister_lr_Code * B12;
275 // ldr rd, [pc, #offset] 275 // ldr rd, [pc, #offset]
276 const Instr kLdrPCMask = kCondMask | 15 * B24 | 7 * B20 | 15 * B16; 276 const Instr kLdrPCMask = 15 * B24 | 7 * B20 | 15 * B16;
277 const Instr kLdrPCPattern = al | 5 * B24 | L | kRegister_pc_Code * B16; 277 const Instr kLdrPCPattern = 5 * B24 | L | kRegister_pc_Code * B16;
278 // vldr dd, [pc, #offset]
279 const Instr kVldrDPCMask = 15 * B24 | 3 * B20 | 15 * B16 | 15 * B8;
280 const Instr kVldrDPCPattern = 13 * B24 | L | kRegister_pc_Code * B16 | 11 * B8;
278 // blxcc rm 281 // blxcc rm
279 const Instr kBlxRegMask = 282 const Instr kBlxRegMask =
280 15 * B24 | 15 * B20 | 15 * B16 | 15 * B12 | 15 * B8 | 15 * B4; 283 15 * B24 | 15 * B20 | 15 * B16 | 15 * B12 | 15 * B8 | 15 * B4;
281 const Instr kBlxRegPattern = 284 const Instr kBlxRegPattern =
282 B24 | B21 | 15 * B16 | 15 * B12 | 15 * B8 | BLX; 285 B24 | B21 | 15 * B16 | 15 * B12 | 15 * B8 | BLX;
283 const Instr kBlxIp = al | kBlxRegPattern | ip.code(); 286 const Instr kBlxIp = al | kBlxRegPattern | ip.code();
284 const Instr kMovMvnMask = 0x6d * B21 | 0xf * B16; 287 const Instr kMovMvnMask = 0x6d * B21 | 0xf * B16;
285 const Instr kMovMvnPattern = 0xd * B21; 288 const Instr kMovMvnPattern = 0xd * B21;
286 const Instr kMovMvnFlip = B22; 289 const Instr kMovMvnFlip = B22;
287 const Instr kMovLeaveCCMask = 0xdff * B16; 290 const Instr kMovLeaveCCMask = 0xdff * B16;
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after
343 buffer_ = static_cast<byte*>(buffer); 346 buffer_ = static_cast<byte*>(buffer);
344 buffer_size_ = buffer_size; 347 buffer_size_ = buffer_size;
345 own_buffer_ = false; 348 own_buffer_ = false;
346 } 349 }
347 350
348 // Set up buffer pointers. 351 // Set up buffer pointers.
349 ASSERT(buffer_ != NULL); 352 ASSERT(buffer_ != NULL);
350 pc_ = buffer_; 353 pc_ = buffer_;
351 reloc_info_writer.Reposition(buffer_ + buffer_size, pc_); 354 reloc_info_writer.Reposition(buffer_ + buffer_size, pc_);
352 num_pending_reloc_info_ = 0; 355 num_pending_reloc_info_ = 0;
356 num_pending_64_bit_reloc_info_ = 0;
353 next_buffer_check_ = 0; 357 next_buffer_check_ = 0;
354 const_pool_blocked_nesting_ = 0; 358 const_pool_blocked_nesting_ = 0;
355 no_const_pool_before_ = 0; 359 no_const_pool_before_ = 0;
356 first_const_pool_use_ = -1; 360 first_const_pool_use_ = -1;
357 last_bound_pos_ = 0; 361 last_bound_pos_ = 0;
358 ClearRecordedAstId(); 362 ClearRecordedAstId();
359 } 363 }
360 364
361 365
362 Assembler::~Assembler() { 366 Assembler::~Assembler() {
363 ASSERT(const_pool_blocked_nesting_ == 0); 367 ASSERT(const_pool_blocked_nesting_ == 0);
364 if (own_buffer_) { 368 if (own_buffer_) {
365 if (isolate()->assembler_spare_buffer() == NULL && 369 if (isolate()->assembler_spare_buffer() == NULL &&
366 buffer_size_ == kMinimalBufferSize) { 370 buffer_size_ == kMinimalBufferSize) {
367 isolate()->set_assembler_spare_buffer(buffer_); 371 isolate()->set_assembler_spare_buffer(buffer_);
368 } else { 372 } else {
369 DeleteArray(buffer_); 373 DeleteArray(buffer_);
370 } 374 }
371 } 375 }
372 } 376 }
373 377
374 378
375 void Assembler::GetCode(CodeDesc* desc) { 379 void Assembler::GetCode(CodeDesc* desc) {
376 // Emit constant pool if necessary. 380 // Emit constant pool if necessary.
377 CheckConstPool(true, false); 381 CheckConstPool(true, false);
378 ASSERT(num_pending_reloc_info_ == 0); 382 ASSERT(num_pending_reloc_info_ == 0);
383 ASSERT(num_pending_64_bit_reloc_info_ == 0);
379 384
380 // Set up code descriptor. 385 // Set up code descriptor.
381 desc->buffer = buffer_; 386 desc->buffer = buffer_;
382 desc->buffer_size = buffer_size_; 387 desc->buffer_size = buffer_size_;
383 desc->instr_size = pc_offset(); 388 desc->instr_size = pc_offset();
384 desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos(); 389 desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos();
385 } 390 }
386 391
387 392
388 void Assembler::Align(int m) { 393 void Assembler::Align(int m) {
(...skipping 26 matching lines...) Expand all
415 // with 4 to get the offset in bytes. 420 // with 4 to get the offset in bytes.
416 return ((instr & kImm24Mask) << 8) >> 6; 421 return ((instr & kImm24Mask) << 8) >> 6;
417 } 422 }
418 423
419 424
420 bool Assembler::IsLdrRegisterImmediate(Instr instr) { 425 bool Assembler::IsLdrRegisterImmediate(Instr instr) {
421 return (instr & (B27 | B26 | B25 | B22 | B20)) == (B26 | B20); 426 return (instr & (B27 | B26 | B25 | B22 | B20)) == (B26 | B20);
422 } 427 }
423 428
424 429
430 bool Assembler::IsVldrDRegisterImmediate(Instr instr) {
431 return (instr & (15 * B24 | 3 * B20 | 15 * B8)) == (13 * B24 | B20 | 11 * B8);
432 }
433
434
425 int Assembler::GetLdrRegisterImmediateOffset(Instr instr) { 435 int Assembler::GetLdrRegisterImmediateOffset(Instr instr) {
426 ASSERT(IsLdrRegisterImmediate(instr)); 436 ASSERT(IsLdrRegisterImmediate(instr));
427 bool positive = (instr & B23) == B23; 437 bool positive = (instr & B23) == B23;
428 int offset = instr & kOff12Mask; // Zero extended offset. 438 int offset = instr & kOff12Mask; // Zero extended offset.
429 return positive ? offset : -offset; 439 return positive ? offset : -offset;
430 } 440 }
431 441
432 442
443 int Assembler::GetVldrDRegisterImmediateOffset(Instr instr) {
444 ASSERT(IsVldrDRegisterImmediate(instr));
445 bool positive = (instr & B23) == B23;
446 int offset = instr & kOff8Mask; // Zero extended offset.
447 offset <<= 2;
448 return positive ? offset : -offset;
449 }
450
451
433 Instr Assembler::SetLdrRegisterImmediateOffset(Instr instr, int offset) { 452 Instr Assembler::SetLdrRegisterImmediateOffset(Instr instr, int offset) {
434 ASSERT(IsLdrRegisterImmediate(instr)); 453 ASSERT(IsLdrRegisterImmediate(instr));
435 bool positive = offset >= 0; 454 bool positive = offset >= 0;
436 if (!positive) offset = -offset; 455 if (!positive) offset = -offset;
437 ASSERT(is_uint12(offset)); 456 ASSERT(is_uint12(offset));
438 // Set bit indicating whether the offset should be added. 457 // Set bit indicating whether the offset should be added.
439 instr = (instr & ~B23) | (positive ? B23 : 0); 458 instr = (instr & ~B23) | (positive ? B23 : 0);
440 // Set the actual offset. 459 // Set the actual offset.
441 return (instr & ~kOff12Mask) | offset; 460 return (instr & ~kOff12Mask) | offset;
442 } 461 }
443 462
ulan 2012/10/22 09:18:25 Functions should be separated by two empty lines.
463 Instr Assembler::SetVldrDRegisterImmediateOffset(Instr instr, int offset) {
464 ASSERT(IsVldrDRegisterImmediate(instr));
465 ASSERT((offset & ~3) == offset); // Must be 64-bit aligned.
466 bool positive = offset >= 0;
467 if (!positive) offset = -offset;
468 ASSERT(is_uint10(offset));
469 // Set bit indicating whether the offset should be added.
470 instr = (instr & ~B23) | (positive ? B23 : 0);
471 // Set the actual offset. Its bottom 2 bits are zero.
472 return (instr & ~kOff8Mask) | (offset >> 2);
473 }
474
444 475
445 bool Assembler::IsStrRegisterImmediate(Instr instr) { 476 bool Assembler::IsStrRegisterImmediate(Instr instr) {
446 return (instr & (B27 | B26 | B25 | B22 | B20)) == B26; 477 return (instr & (B27 | B26 | B25 | B22 | B20)) == B26;
447 } 478 }
448 479
449 480
450 Instr Assembler::SetStrRegisterImmediateOffset(Instr instr, int offset) { 481 Instr Assembler::SetStrRegisterImmediateOffset(Instr instr, int offset) {
451 ASSERT(IsStrRegisterImmediate(instr)); 482 ASSERT(IsStrRegisterImmediate(instr));
452 bool positive = offset >= 0; 483 bool positive = offset >= 0;
453 if (!positive) offset = -offset; 484 if (!positive) offset = -offset;
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after
520 551
521 552
522 bool Assembler::IsLdrRegFpNegOffset(Instr instr) { 553 bool Assembler::IsLdrRegFpNegOffset(Instr instr) {
523 return ((instr & kLdrStrInstrTypeMask) == kLdrRegFpNegOffsetPattern); 554 return ((instr & kLdrStrInstrTypeMask) == kLdrRegFpNegOffsetPattern);
524 } 555 }
525 556
526 557
527 bool Assembler::IsLdrPcImmediateOffset(Instr instr) { 558 bool Assembler::IsLdrPcImmediateOffset(Instr instr) {
528 // Check the instruction is indeed a 559 // Check the instruction is indeed a
529 // ldr<cond> <Rd>, [pc +/- offset_12]. 560 // ldr<cond> <Rd>, [pc +/- offset_12].
530 return (instr & (kLdrPCMask & ~kCondMask)) == 0x051f0000; 561 return (instr & kLdrPCMask) == kLdrPCPattern;
531 } 562 }
532 563
533 564
565 bool Assembler::IsVldrDPcImmediateOffset(Instr instr) {
566 // Check the instruction is indeed a
567 // vldr<cond> <Dd>, [pc +/- offset_12].
568 return (instr & kVldrDPCMask) == kVldrDPCPattern;
569 }
570
571
534 bool Assembler::IsTstImmediate(Instr instr) { 572 bool Assembler::IsTstImmediate(Instr instr) {
535 return (instr & (B27 | B26 | I | kOpCodeMask | S | kRdMask)) == 573 return (instr & (B27 | B26 | I | kOpCodeMask | S | kRdMask)) ==
536 (I | TST | S); 574 (I | TST | S);
537 } 575 }
538 576
539 577
540 bool Assembler::IsCmpRegister(Instr instr) { 578 bool Assembler::IsCmpRegister(Instr instr) {
541 return (instr & (B27 | B26 | I | kOpCodeMask | S | kRdMask | B4)) == 579 return (instr & (B27 | B26 | I | kOpCodeMask | S | kRdMask | B4)) ==
542 (CMP | S); 580 (CMP | S);
543 } 581 }
(...skipping 258 matching lines...) Expand 10 before | Expand all | Expand 10 after
802 // encoded. 840 // encoded.
803 bool Operand::must_use_constant_pool(const Assembler* assembler) const { 841 bool Operand::must_use_constant_pool(const Assembler* assembler) const {
804 if (rmode_ == RelocInfo::EXTERNAL_REFERENCE) { 842 if (rmode_ == RelocInfo::EXTERNAL_REFERENCE) {
805 #ifdef DEBUG 843 #ifdef DEBUG
806 if (!Serializer::enabled()) { 844 if (!Serializer::enabled()) {
807 Serializer::TooLateToEnableNow(); 845 Serializer::TooLateToEnableNow();
808 } 846 }
809 #endif // def DEBUG 847 #endif // def DEBUG
810 if (assembler != NULL && assembler->predictable_code_size()) return true; 848 if (assembler != NULL && assembler->predictable_code_size()) return true;
811 return Serializer::enabled(); 849 return Serializer::enabled();
812 } else if (rmode_ == RelocInfo::NONE) { 850 } else if (RelocInfo::IsNone(rmode_)) {
813 return false; 851 return false;
814 } 852 }
815 return true; 853 return true;
816 } 854 }
817 855
818 856
819 bool Operand::is_single_instruction(const Assembler* assembler, 857 bool Operand::is_single_instruction(const Assembler* assembler,
820 Instr instr) const { 858 Instr instr) const {
821 if (rm_.is_valid()) return true; 859 if (rm_.is_valid()) return true;
822 uint32_t dummy1, dummy2; 860 uint32_t dummy1, dummy2;
(...skipping 1187 matching lines...) Expand 10 before | Expand all | Expand 10 after
2010 const Condition cond) { 2048 const Condition cond) {
2011 // Dd = immediate 2049 // Dd = immediate
2012 // Instruction details available in ARM DDI 0406B, A8-640. 2050 // Instruction details available in ARM DDI 0406B, A8-640.
2013 ASSERT(CpuFeatures::IsEnabled(VFP2)); 2051 ASSERT(CpuFeatures::IsEnabled(VFP2));
2014 2052
2015 uint32_t enc; 2053 uint32_t enc;
2016 if (CpuFeatures::IsSupported(VFP3) && FitsVMOVDoubleImmediate(imm, &enc)) { 2054 if (CpuFeatures::IsSupported(VFP3) && FitsVMOVDoubleImmediate(imm, &enc)) {
2017 // The double can be encoded in the instruction. 2055 // The double can be encoded in the instruction.
2018 emit(cond | 0xE*B24 | 0xB*B20 | dst.code()*B12 | 0xB*B8 | enc); 2056 emit(cond | 0xE*B24 | 0xB*B20 | dst.code()*B12 | 0xB*B8 | enc);
2019 } else { 2057 } else {
2020 // Synthesise the double from ARM immediates. This could be implemented 2058 RecordRelocInfo(imm);
2021 // using vldr from a constant pool. 2059 vldr(dst, MemOperand(pc, 0), cond);
2022 uint32_t lo, hi; 2060 // TODO(jfb) Constant blinding, denorm to zero, no NaN.
2023 DoubleAsTwoUInt32(imm, &lo, &hi);
2024 mov(ip, Operand(lo));
2025
2026 if (scratch.is(no_reg)) {
2027 // Move the low part of the double into the lower of the corresponsing S
2028 // registers of D register dst.
2029 vmov(dst.low(), ip, cond);
2030
2031 // Move the high part of the double into the higher of the corresponsing S
2032 // registers of D register dst.
2033 mov(ip, Operand(hi));
2034 vmov(dst.high(), ip, cond);
2035 } else {
2036 // Move the low and high parts of the double to a D register in one
2037 // instruction.
2038 mov(scratch, Operand(hi));
2039 vmov(dst, ip, scratch, cond);
2040 }
2041 } 2061 }
2042 } 2062 }
2043 2063
2044 2064
2045 void Assembler::vmov(const SwVfpRegister dst, 2065 void Assembler::vmov(const SwVfpRegister dst,
2046 const SwVfpRegister src, 2066 const SwVfpRegister src,
2047 const Condition cond) { 2067 const Condition cond) {
2048 // Sd = Sm 2068 // Sd = Sm
2049 // Instruction details available in ARM DDI 0406B, A8-642. 2069 // Instruction details available in ARM DDI 0406B, A8-642.
2050 ASSERT(CpuFeatures::IsEnabled(VFP2)); 2070 ASSERT(CpuFeatures::IsEnabled(VFP2));
(...skipping 493 matching lines...) Expand 10 before | Expand all | Expand 10 after
2544 } 2564 }
2545 } 2565 }
2546 } 2566 }
2547 2567
2548 2568
2549 void Assembler::db(uint8_t data) { 2569 void Assembler::db(uint8_t data) {
2550 // No relocation info should be pending while using db. db is used 2570 // No relocation info should be pending while using db. db is used
2551 // to write pure data with no pointers and the constant pool should 2571 // to write pure data with no pointers and the constant pool should
2552 // be emitted before using db. 2572 // be emitted before using db.
2553 ASSERT(num_pending_reloc_info_ == 0); 2573 ASSERT(num_pending_reloc_info_ == 0);
2574 ASSERT(num_pending_64_bit_reloc_info_ == 0);
2554 CheckBuffer(); 2575 CheckBuffer();
2555 *reinterpret_cast<uint8_t*>(pc_) = data; 2576 *reinterpret_cast<uint8_t*>(pc_) = data;
2556 pc_ += sizeof(uint8_t); 2577 pc_ += sizeof(uint8_t);
2557 } 2578 }
2558 2579
2559 2580
2560 void Assembler::dd(uint32_t data) { 2581 void Assembler::dd(uint32_t data) {
2561 // No relocation info should be pending while using dd. dd is used 2582 // No relocation info should be pending while using dd. dd is used
2562 // to write pure data with no pointers and the constant pool should 2583 // to write pure data with no pointers and the constant pool should
2563 // be emitted before using dd. 2584 // be emitted before using dd.
2564 ASSERT(num_pending_reloc_info_ == 0); 2585 ASSERT(num_pending_reloc_info_ == 0);
2586 ASSERT(num_pending_64_bit_reloc_info_ == 0);
2565 CheckBuffer(); 2587 CheckBuffer();
2566 *reinterpret_cast<uint32_t*>(pc_) = data; 2588 *reinterpret_cast<uint32_t*>(pc_) = data;
2567 pc_ += sizeof(uint32_t); 2589 pc_ += sizeof(uint32_t);
2568 } 2590 }
2569 2591
2570 2592
2571 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { 2593 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
2572 // We do not try to reuse pool constants. 2594 // We do not try to reuse pool constants.
2573 RelocInfo rinfo(pc_, rmode, data, NULL); 2595 RelocInfo rinfo(pc_, rmode, data, NULL);
2574 if (((rmode >= RelocInfo::JS_RETURN) && 2596 if (((rmode >= RelocInfo::JS_RETURN) &&
2575 (rmode <= RelocInfo::DEBUG_BREAK_SLOT)) || 2597 (rmode <= RelocInfo::DEBUG_BREAK_SLOT)) ||
2576 (rmode == RelocInfo::CONST_POOL)) { 2598 (rmode == RelocInfo::CONST_POOL)) {
2577 // Adjust code for new modes. 2599 // Adjust code for new modes.
2578 ASSERT(RelocInfo::IsDebugBreakSlot(rmode) 2600 ASSERT(RelocInfo::IsDebugBreakSlot(rmode)
2579 || RelocInfo::IsJSReturn(rmode) 2601 || RelocInfo::IsJSReturn(rmode)
2580 || RelocInfo::IsComment(rmode) 2602 || RelocInfo::IsComment(rmode)
2581 || RelocInfo::IsPosition(rmode) 2603 || RelocInfo::IsPosition(rmode)
2582 || RelocInfo::IsConstPool(rmode)); 2604 || RelocInfo::IsConstPool(rmode));
2583 // These modes do not need an entry in the constant pool. 2605 // These modes do not need an entry in the constant pool.
2584 } else { 2606 } else {
2585 ASSERT(num_pending_reloc_info_ < kMaxNumPendingRelocInfo); 2607 RecordRelocInfoConstantPoolEntryHelper(rinfo);
2586 if (num_pending_reloc_info_ == 0) {
2587 first_const_pool_use_ = pc_offset();
2588 }
2589 pending_reloc_info_[num_pending_reloc_info_++] = rinfo;
2590 // Make sure the constant pool is not emitted in place of the next
2591 // instruction for which we just recorded relocation info.
2592 BlockConstPoolFor(1);
2593 } 2608 }
2594 if (rinfo.rmode() != RelocInfo::NONE) { 2609 if (!RelocInfo::IsNone(rinfo.rmode())) {
2595 // Don't record external references unless the heap will be serialized. 2610 // Don't record external references unless the heap will be serialized.
2596 if (rmode == RelocInfo::EXTERNAL_REFERENCE) { 2611 if (rmode == RelocInfo::EXTERNAL_REFERENCE) {
2597 #ifdef DEBUG 2612 #ifdef DEBUG
2598 if (!Serializer::enabled()) { 2613 if (!Serializer::enabled()) {
2599 Serializer::TooLateToEnableNow(); 2614 Serializer::TooLateToEnableNow();
2600 } 2615 }
2601 #endif 2616 #endif
2602 if (!Serializer::enabled() && !emit_debug_code()) { 2617 if (!Serializer::enabled() && !emit_debug_code()) {
2603 return; 2618 return;
2604 } 2619 }
2605 } 2620 }
2606 ASSERT(buffer_space() >= kMaxRelocSize); // too late to grow buffer here 2621 ASSERT(buffer_space() >= kMaxRelocSize); // too late to grow buffer here
2607 if (rmode == RelocInfo::CODE_TARGET_WITH_ID) { 2622 if (rmode == RelocInfo::CODE_TARGET_WITH_ID) {
2608 RelocInfo reloc_info_with_ast_id(pc_, 2623 RelocInfo reloc_info_with_ast_id(pc_,
2609 rmode, 2624 rmode,
2610 RecordedAstId().ToInt(), 2625 RecordedAstId().ToInt(),
2611 NULL); 2626 NULL);
2612 ClearRecordedAstId(); 2627 ClearRecordedAstId();
2613 reloc_info_writer.Write(&reloc_info_with_ast_id); 2628 reloc_info_writer.Write(&reloc_info_with_ast_id);
2614 } else { 2629 } else {
2615 reloc_info_writer.Write(&rinfo); 2630 reloc_info_writer.Write(&rinfo);
2616 } 2631 }
2617 } 2632 }
2618 } 2633 }
2619 2634
2635 void Assembler::RecordRelocInfo(double data) {
2636 // We do not try to reuse pool constants.
2637 RelocInfo rinfo(pc_, data);
2638 RecordRelocInfoConstantPoolEntryHelper(rinfo);
2639 }
2640
2641
2642 void Assembler::RecordRelocInfoConstantPoolEntryHelper(const RelocInfo& rinfo) {
2643 ASSERT(num_pending_reloc_info_ < kMaxNumPendingRelocInfo);
2644 if (num_pending_reloc_info_ == 0) {
2645 first_const_pool_use_ = pc_offset();
2646 }
2647 pending_reloc_info_[num_pending_reloc_info_++] = rinfo;
2648 if (rinfo.rmode() == RelocInfo::NONE64) {
2649 ++num_pending_64_bit_reloc_info_;
2650 }
2651 ASSERT(num_pending_64_bit_reloc_info_ <= num_pending_reloc_info_);
2652 // Make sure the constant pool is not emitted in place of the next
2653 // instruction for which we just recorded relocation info.
2654 BlockConstPoolFor(1);
2655 }
2656
2620 2657
2621 void Assembler::BlockConstPoolFor(int instructions) { 2658 void Assembler::BlockConstPoolFor(int instructions) {
2622 int pc_limit = pc_offset() + instructions * kInstrSize; 2659 int pc_limit = pc_offset() + instructions * kInstrSize;
2623 if (no_const_pool_before_ < pc_limit) { 2660 if (no_const_pool_before_ < pc_limit) {
2624 // If there are some pending entries, the constant pool cannot be blocked 2661 // If there are some pending entries, the constant pool cannot be blocked
2625 // further than first_const_pool_use_ + kMaxDistToPool 2662 // further than constant pool instruction's reach.
2626 ASSERT((num_pending_reloc_info_ == 0) || 2663 ASSERT((num_pending_reloc_info_ == 0) ||
2627 (pc_limit < (first_const_pool_use_ + kMaxDistToPool))); 2664 (pc_limit - first_const_pool_use_ < kMaxDistToIntPool));
ulan 2012/10/22 09:18:25 Maybe also add assert for kMaxDistToFPPool if ther
2628 no_const_pool_before_ = pc_limit; 2665 no_const_pool_before_ = pc_limit;
2629 } 2666 }
2630 2667
2631 if (next_buffer_check_ < no_const_pool_before_) { 2668 if (next_buffer_check_ < no_const_pool_before_) {
2632 next_buffer_check_ = no_const_pool_before_; 2669 next_buffer_check_ = no_const_pool_before_;
2633 } 2670 }
2634 } 2671 }
2635 2672
2636 2673
2637 void Assembler::CheckConstPool(bool force_emit, bool require_jump) { 2674 void Assembler::CheckConstPool(bool force_emit, bool require_jump) {
2638 // Some short sequence of instruction mustn't be broken up by constant pool 2675 // Some short sequence of instruction mustn't be broken up by constant pool
2639 // emission, such sequences are protected by calls to BlockConstPoolFor and 2676 // emission, such sequences are protected by calls to BlockConstPoolFor and
2640 // BlockConstPoolScope. 2677 // BlockConstPoolScope.
2641 if (is_const_pool_blocked()) { 2678 if (is_const_pool_blocked()) {
2642 // Something is wrong if emission is forced and blocked at the same time. 2679 // Something is wrong if emission is forced and blocked at the same time.
2643 ASSERT(!force_emit); 2680 ASSERT(!force_emit);
2644 return; 2681 return;
2645 } 2682 }
2646 2683
2647 // There is nothing to do if there are no pending constant pool entries. 2684 // There is nothing to do if there are no pending constant pool entries.
2648 if (num_pending_reloc_info_ == 0) { 2685 if (num_pending_reloc_info_ == 0) {
2686 ASSERT(num_pending_64_bit_reloc_info_ == 0);
2649 // Calculate the offset of the next check. 2687 // Calculate the offset of the next check.
2650 next_buffer_check_ = pc_offset() + kCheckPoolInterval; 2688 next_buffer_check_ = pc_offset() + kCheckPoolInterval;
2651 return; 2689 return;
2652 } 2690 }
2653 2691
2654 // We emit a constant pool when:
2655 // * requested to do so by parameter force_emit (e.g. after each function).
2656 // * the distance to the first instruction accessing the constant pool is
2657 // kAvgDistToPool or more.
2658 // * no jump is required and the distance to the first instruction accessing
2659 // the constant pool is at least kMaxDistToPool / 2.
2660 ASSERT(first_const_pool_use_ >= 0);
2661 int dist = pc_offset() - first_const_pool_use_;
2662 if (!force_emit && dist < kAvgDistToPool &&
2663 (require_jump || (dist < (kMaxDistToPool / 2)))) {
2664 return;
2665 }
2666
2667 // Check that the code buffer is large enough before emitting the constant 2692 // Check that the code buffer is large enough before emitting the constant
2668 // pool (include the jump over the pool and the constant pool marker and 2693 // pool (include the jump over the pool and the constant pool marker and
2669 // the gap to the relocation information). 2694 // the gap to the relocation information).
2695 // Note 64-bit values are wider, and the first one needs to be 64-bit aligned.
2670 int jump_instr = require_jump ? kInstrSize : 0; 2696 int jump_instr = require_jump ? kInstrSize : 0;
2671 int size = jump_instr + kInstrSize + num_pending_reloc_info_ * kPointerSize; 2697 int size = kInstrSize + jump_instr + num_pending_reloc_info_ * kPointerSize;
2698 bool has_fp_values = (num_pending_64_bit_reloc_info_ > 0);
ulan 2012/10/22 09:18:25 Indentation.
2699 // 64-bit values must be 64-bit aligned.
2700 bool require_64_bit_align = has_fp_values && (((uintptr_t)pc_ + size) & 0x3);
2701 if (require_64_bit_align) {
2702 size += kInstrSize;
2703 }
2704 STATIC_ASSERT(kPointerSize == kDoubleSize / 2);
2705 size += num_pending_64_bit_reloc_info_ * (kDoubleSize / 2);
ulan 2012/10/22 09:18:25 Why is it (kDoubleSize / 2)?
2706 int marker_num = (size - kInstrSize - jump_instr) / 4;
2707
2708 // We emit a constant pool when:
2709 // * requested to do so by parameter force_emit (e.g. after each function).
2710 // * the distance from the first instruction accessing the constant pool to
2711 // any of the constant pool entries will exceed its limit the next
2712 // time the pool is checked. This is overly restrictive, but we don't emit
2713 // constant pool entries in-order so it's conservatively correct.
2714 // * the instruction doesn't require a jump after itself to jump over the
2715 // constant pool, and we're getting close to running out of range.
2716 if (!force_emit) {
2717 ASSERT((first_const_pool_use_ >= 0) && (num_pending_reloc_info_ > 0));
2718 int dist = pc_offset() + size - first_const_pool_use_;
2719 if (has_fp_values) {
2720 if ((dist < kMaxDistToFPPool - kCheckPoolInterval) &&
2721 (require_jump || (dist < kMaxDistToFPPool / 2))) {
2722 return;
2723 }
2724 } else {
2725 if ((dist < kMaxDistToIntPool - kCheckPoolInterval) &&
2726 (require_jump || (dist < kMaxDistToIntPool / 2))) {
2727 return;
2728 }
2729 }
2730 }
2731
2672 int needed_space = size + kGap; 2732 int needed_space = size + kGap;
2673 while (buffer_space() <= needed_space) GrowBuffer(); 2733 while (buffer_space() <= needed_space) GrowBuffer();
2674 2734
2675 { 2735 {
2676 // Block recursive calls to CheckConstPool. 2736 // Block recursive calls to CheckConstPool.
2677 BlockConstPoolScope block_const_pool(this); 2737 BlockConstPoolScope block_const_pool(this);
2678 RecordComment("[ Constant Pool"); 2738 RecordComment("[ Constant Pool");
2679 RecordConstPool(size); 2739 RecordConstPool(size);
2680 2740
2681 // Emit jump over constant pool if necessary. 2741 // Emit jump over constant pool if necessary.
2682 Label after_pool; 2742 Label after_pool;
2683 if (require_jump) { 2743 if (require_jump) {
2684 b(&after_pool); 2744 b(&after_pool);
2685 } 2745 }
2686 2746
2687 // Put down constant pool marker "Undefined instruction" as specified by 2747 // Put down constant pool marker "Undefined instruction" as specified by
2688 // A5.6 (ARMv7) Instruction set encoding. 2748 // A5.6 (ARMv7) Instruction set encoding.
2689 emit(kConstantPoolMarker | num_pending_reloc_info_); 2749 emit(kConstantPoolMarker | marker_num);
2690 2750
2691 // Emit constant pool entries. 2751 if (require_64_bit_align) {
2752 emit(kConstantPoolMarker);
2753 }
2754
2755 // Emit 64-bit constant pool entries first: their range is smaller than
2756 // 32-bit entries.
2757 for (int i = 0; i < num_pending_reloc_info_; i++) {
2758 ASSERT(!((uintptr_t)pc_ & 0x3)); // Check 64-bit alignment.
ulan 2012/10/22 09:18:25 Shouldn't this assert be after we check for NONE64
2759 RelocInfo& rinfo = pending_reloc_info_[i];
2760
2761 if (rinfo.rmode() != RelocInfo::NONE64) {
2762 // 32-bit values emitted later.
2763 continue;
2764 }
2765
2766 Instr instr = instr_at(rinfo.pc());
2767 // Instruction to patch must be 'vldr rd, [pc, #offset]' with offset == 0.
2768 ASSERT((IsVldrDPcImmediateOffset(instr) &&
2769 GetVldrDRegisterImmediateOffset(instr) == 0));
2770
2771 int delta = pc_ - rinfo.pc() - kPcLoadDelta;
2772 ASSERT(is_uint10(delta));
2773
2774 instr_at_put(rinfo.pc(), SetVldrDRegisterImmediateOffset(instr, delta));
2775
2776 const double double_data = rinfo.data64();
2777 uint64_t uint_data = 0;
2778 memcpy(&uint_data, &double_data, sizeof(double_data));
2779 emit(uint_data & 0xFFFFFFFF);
2780 emit(uint_data >> 32);
2781 }
2782
2783 // Emit 32-bit constant pool entries.
2692 for (int i = 0; i < num_pending_reloc_info_; i++) { 2784 for (int i = 0; i < num_pending_reloc_info_; i++) {
2693 RelocInfo& rinfo = pending_reloc_info_[i]; 2785 RelocInfo& rinfo = pending_reloc_info_[i];
2694 ASSERT(rinfo.rmode() != RelocInfo::COMMENT && 2786 ASSERT(rinfo.rmode() != RelocInfo::COMMENT &&
2695 rinfo.rmode() != RelocInfo::POSITION && 2787 rinfo.rmode() != RelocInfo::POSITION &&
2696 rinfo.rmode() != RelocInfo::STATEMENT_POSITION && 2788 rinfo.rmode() != RelocInfo::STATEMENT_POSITION &&
2697 rinfo.rmode() != RelocInfo::CONST_POOL); 2789 rinfo.rmode() != RelocInfo::CONST_POOL);
2698 2790
2791 if (rinfo.rmode() == RelocInfo::NONE64) {
2792 // 64-bit values emitted earlier.
2793 continue;
2794 }
2795
2699 Instr instr = instr_at(rinfo.pc()); 2796 Instr instr = instr_at(rinfo.pc());
2700 // Instruction to patch must be 'ldr rd, [pc, #offset]' with offset == 0. 2797 // Instruction to patch must be 'ldr rd, [pc, #offset]' with offset == 0.
2701 ASSERT(IsLdrPcImmediateOffset(instr) && 2798 ASSERT((IsLdrPcImmediateOffset(instr) &&
2702 GetLdrRegisterImmediateOffset(instr) == 0); 2799 GetLdrRegisterImmediateOffset(instr) == 0));
2703 2800
2704 int delta = pc_ - rinfo.pc() - kPcLoadDelta; 2801 int delta = pc_ - rinfo.pc() - kPcLoadDelta;
2705 // 0 is the smallest delta: 2802 // 0 is the smallest delta:
2706 // ldr rd, [pc, #0] 2803 // ldr rd, [pc, #0]
2707 // constant pool marker 2804 // constant pool marker
2708 // data 2805 // data
2709 ASSERT(is_uint12(delta)); 2806 ASSERT(is_uint12(delta));
2710 2807
2711 instr_at_put(rinfo.pc(), SetLdrRegisterImmediateOffset(instr, delta)); 2808 instr_at_put(rinfo.pc(), SetLdrRegisterImmediateOffset(instr, delta));
2712 emit(rinfo.data()); 2809 emit(rinfo.data());
2713 } 2810 }
2714 2811
2715 num_pending_reloc_info_ = 0; 2812 num_pending_reloc_info_ = 0;
2813 num_pending_64_bit_reloc_info_ = 0;
2716 first_const_pool_use_ = -1; 2814 first_const_pool_use_ = -1;
2717 2815
2718 RecordComment("]"); 2816 RecordComment("]");
2719 2817
2720 if (after_pool.is_linked()) { 2818 if (after_pool.is_linked()) {
2721 bind(&after_pool); 2819 bind(&after_pool);
2722 } 2820 }
2723 } 2821 }
2724 2822
2725 // Since a constant pool was just emitted, move the check offset forward by 2823 // Since a constant pool was just emitted, move the check offset forward by
2726 // the standard interval. 2824 // the standard interval.
2727 next_buffer_check_ = pc_offset() + kCheckPoolInterval; 2825 next_buffer_check_ = pc_offset() + kCheckPoolInterval;
2728 } 2826 }
2729 2827
2730 2828
2731 } } // namespace v8::internal 2829 } } // namespace v8::internal
2732 2830
2733 #endif // V8_TARGET_ARCH_ARM 2831 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/assembler-arm.h ('k') | src/arm/constants-arm.h » ('j') | src/assembler.h » ('J')

Powered by Google App Engine
This is Rietveld 408576698