Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(68)

Side by Side Diff: src/arm/assembler-arm.cc

Issue 11191029: Use VLDR instead of VMOVs from GPR when a 64-bit double can't be encoded as a VMOV immediate. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Fix comment by ulan: remove badly merged code (redundant). Created 8 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/assembler-arm.h ('k') | src/arm/constants-arm.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 1994-2006 Sun Microsystems Inc. 1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved. 2 // All Rights Reserved.
3 // 3 //
4 // Redistribution and use in source and binary forms, with or without 4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions 5 // modification, are permitted provided that the following conditions
6 // are met: 6 // are met:
7 // 7 //
8 // - Redistributions of source code must retain the above copyright notice, 8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer. 9 // this list of conditions and the following disclaimer.
10 // 10 //
(...skipping 264 matching lines...) Expand 10 before | Expand all | Expand 10 after
275 // register r is not encoded. 275 // register r is not encoded.
276 const Instr kPushRegPattern = 276 const Instr kPushRegPattern =
277 al | B26 | 4 | NegPreIndex | kRegister_sp_Code * B16; 277 al | B26 | 4 | NegPreIndex | kRegister_sp_Code * B16;
278 // ldr(r, MemOperand(sp, 4, PostIndex), al) instruction (aka pop(r)) 278 // ldr(r, MemOperand(sp, 4, PostIndex), al) instruction (aka pop(r))
279 // register r is not encoded. 279 // register r is not encoded.
280 const Instr kPopRegPattern = 280 const Instr kPopRegPattern =
281 al | B26 | L | 4 | PostIndex | kRegister_sp_Code * B16; 281 al | B26 | L | 4 | PostIndex | kRegister_sp_Code * B16;
282 // mov lr, pc 282 // mov lr, pc
283 const Instr kMovLrPc = al | MOV | kRegister_pc_Code | kRegister_lr_Code * B12; 283 const Instr kMovLrPc = al | MOV | kRegister_pc_Code | kRegister_lr_Code * B12;
284 // ldr rd, [pc, #offset] 284 // ldr rd, [pc, #offset]
285 const Instr kLdrPCMask = kCondMask | 15 * B24 | 7 * B20 | 15 * B16; 285 const Instr kLdrPCMask = 15 * B24 | 7 * B20 | 15 * B16;
286 const Instr kLdrPCPattern = al | 5 * B24 | L | kRegister_pc_Code * B16; 286 const Instr kLdrPCPattern = 5 * B24 | L | kRegister_pc_Code * B16;
287 // vldr dd, [pc, #offset]
288 const Instr kVldrDPCMask = 15 * B24 | 3 * B20 | 15 * B16 | 15 * B8;
289 const Instr kVldrDPCPattern = 13 * B24 | L | kRegister_pc_Code * B16 | 11 * B8;
287 // blxcc rm 290 // blxcc rm
288 const Instr kBlxRegMask = 291 const Instr kBlxRegMask =
289 15 * B24 | 15 * B20 | 15 * B16 | 15 * B12 | 15 * B8 | 15 * B4; 292 15 * B24 | 15 * B20 | 15 * B16 | 15 * B12 | 15 * B8 | 15 * B4;
290 const Instr kBlxRegPattern = 293 const Instr kBlxRegPattern =
291 B24 | B21 | 15 * B16 | 15 * B12 | 15 * B8 | BLX; 294 B24 | B21 | 15 * B16 | 15 * B12 | 15 * B8 | BLX;
292 const Instr kBlxIp = al | kBlxRegPattern | ip.code(); 295 const Instr kBlxIp = al | kBlxRegPattern | ip.code();
293 const Instr kMovMvnMask = 0x6d * B21 | 0xf * B16; 296 const Instr kMovMvnMask = 0x6d * B21 | 0xf * B16;
294 const Instr kMovMvnPattern = 0xd * B21; 297 const Instr kMovMvnPattern = 0xd * B21;
295 const Instr kMovMvnFlip = B22; 298 const Instr kMovMvnFlip = B22;
296 const Instr kMovLeaveCCMask = 0xdff * B16; 299 const Instr kMovLeaveCCMask = 0xdff * B16;
(...skipping 20 matching lines...) Expand all
317 const Instr kLdrStrInstrArgumentMask = 0x0000ffff; 320 const Instr kLdrStrInstrArgumentMask = 0x0000ffff;
318 const Instr kLdrStrOffsetMask = 0x00000fff; 321 const Instr kLdrStrOffsetMask = 0x00000fff;
319 322
320 323
321 Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size) 324 Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size)
322 : AssemblerBase(isolate, buffer, buffer_size), 325 : AssemblerBase(isolate, buffer, buffer_size),
323 recorded_ast_id_(TypeFeedbackId::None()), 326 recorded_ast_id_(TypeFeedbackId::None()),
324 positions_recorder_(this) { 327 positions_recorder_(this) {
325 reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_); 328 reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_);
326 num_pending_reloc_info_ = 0; 329 num_pending_reloc_info_ = 0;
330 num_pending_64_bit_reloc_info_ = 0;
327 next_buffer_check_ = 0; 331 next_buffer_check_ = 0;
328 const_pool_blocked_nesting_ = 0; 332 const_pool_blocked_nesting_ = 0;
329 no_const_pool_before_ = 0; 333 no_const_pool_before_ = 0;
330 first_const_pool_use_ = -1; 334 first_const_pool_use_ = -1;
331 last_bound_pos_ = 0; 335 last_bound_pos_ = 0;
332 ClearRecordedAstId(); 336 ClearRecordedAstId();
333 } 337 }
334 338
335 339
336 Assembler::~Assembler() { 340 Assembler::~Assembler() {
337 ASSERT(const_pool_blocked_nesting_ == 0); 341 ASSERT(const_pool_blocked_nesting_ == 0);
338 } 342 }
339 343
340 344
341 void Assembler::GetCode(CodeDesc* desc) { 345 void Assembler::GetCode(CodeDesc* desc) {
342 // Emit constant pool if necessary. 346 // Emit constant pool if necessary.
343 CheckConstPool(true, false); 347 CheckConstPool(true, false);
344 ASSERT(num_pending_reloc_info_ == 0); 348 ASSERT(num_pending_reloc_info_ == 0);
349 ASSERT(num_pending_64_bit_reloc_info_ == 0);
345 350
346 // Set up code descriptor. 351 // Set up code descriptor.
347 desc->buffer = buffer_; 352 desc->buffer = buffer_;
348 desc->buffer_size = buffer_size_; 353 desc->buffer_size = buffer_size_;
349 desc->instr_size = pc_offset(); 354 desc->instr_size = pc_offset();
350 desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos(); 355 desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos();
351 } 356 }
352 357
353 358
354 void Assembler::Align(int m) { 359 void Assembler::Align(int m) {
(...skipping 26 matching lines...) Expand all
381 // with 4 to get the offset in bytes. 386 // with 4 to get the offset in bytes.
382 return ((instr & kImm24Mask) << 8) >> 6; 387 return ((instr & kImm24Mask) << 8) >> 6;
383 } 388 }
384 389
385 390
386 bool Assembler::IsLdrRegisterImmediate(Instr instr) { 391 bool Assembler::IsLdrRegisterImmediate(Instr instr) {
387 return (instr & (B27 | B26 | B25 | B22 | B20)) == (B26 | B20); 392 return (instr & (B27 | B26 | B25 | B22 | B20)) == (B26 | B20);
388 } 393 }
389 394
390 395
396 bool Assembler::IsVldrDRegisterImmediate(Instr instr) {
397 return (instr & (15 * B24 | 3 * B20 | 15 * B8)) == (13 * B24 | B20 | 11 * B8);
398 }
399
400
391 int Assembler::GetLdrRegisterImmediateOffset(Instr instr) { 401 int Assembler::GetLdrRegisterImmediateOffset(Instr instr) {
392 ASSERT(IsLdrRegisterImmediate(instr)); 402 ASSERT(IsLdrRegisterImmediate(instr));
393 bool positive = (instr & B23) == B23; 403 bool positive = (instr & B23) == B23;
394 int offset = instr & kOff12Mask; // Zero extended offset. 404 int offset = instr & kOff12Mask; // Zero extended offset.
395 return positive ? offset : -offset; 405 return positive ? offset : -offset;
396 } 406 }
397 407
398 408
409 int Assembler::GetVldrDRegisterImmediateOffset(Instr instr) {
410 ASSERT(IsVldrDRegisterImmediate(instr));
411 bool positive = (instr & B23) == B23;
412 int offset = instr & kOff8Mask; // Zero extended offset.
413 offset <<= 2;
414 return positive ? offset : -offset;
415 }
416
417
399 Instr Assembler::SetLdrRegisterImmediateOffset(Instr instr, int offset) { 418 Instr Assembler::SetLdrRegisterImmediateOffset(Instr instr, int offset) {
400 ASSERT(IsLdrRegisterImmediate(instr)); 419 ASSERT(IsLdrRegisterImmediate(instr));
401 bool positive = offset >= 0; 420 bool positive = offset >= 0;
402 if (!positive) offset = -offset; 421 if (!positive) offset = -offset;
403 ASSERT(is_uint12(offset)); 422 ASSERT(is_uint12(offset));
404 // Set bit indicating whether the offset should be added. 423 // Set bit indicating whether the offset should be added.
405 instr = (instr & ~B23) | (positive ? B23 : 0); 424 instr = (instr & ~B23) | (positive ? B23 : 0);
406 // Set the actual offset. 425 // Set the actual offset.
407 return (instr & ~kOff12Mask) | offset; 426 return (instr & ~kOff12Mask) | offset;
408 } 427 }
409 428
410 429
430 Instr Assembler::SetVldrDRegisterImmediateOffset(Instr instr, int offset) {
431 ASSERT(IsVldrDRegisterImmediate(instr));
432 ASSERT((offset & ~3) == offset); // Must be 64-bit aligned.
433 bool positive = offset >= 0;
434 if (!positive) offset = -offset;
435 ASSERT(is_uint10(offset));
436 // Set bit indicating whether the offset should be added.
437 instr = (instr & ~B23) | (positive ? B23 : 0);
438 // Set the actual offset. Its bottom 2 bits are zero.
439 return (instr & ~kOff8Mask) | (offset >> 2);
440 }
441
442
411 bool Assembler::IsStrRegisterImmediate(Instr instr) { 443 bool Assembler::IsStrRegisterImmediate(Instr instr) {
412 return (instr & (B27 | B26 | B25 | B22 | B20)) == B26; 444 return (instr & (B27 | B26 | B25 | B22 | B20)) == B26;
413 } 445 }
414 446
415 447
416 Instr Assembler::SetStrRegisterImmediateOffset(Instr instr, int offset) { 448 Instr Assembler::SetStrRegisterImmediateOffset(Instr instr, int offset) {
417 ASSERT(IsStrRegisterImmediate(instr)); 449 ASSERT(IsStrRegisterImmediate(instr));
418 bool positive = offset >= 0; 450 bool positive = offset >= 0;
419 if (!positive) offset = -offset; 451 if (!positive) offset = -offset;
420 ASSERT(is_uint12(offset)); 452 ASSERT(is_uint12(offset));
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
486 518
487 519
488 bool Assembler::IsLdrRegFpNegOffset(Instr instr) { 520 bool Assembler::IsLdrRegFpNegOffset(Instr instr) {
489 return ((instr & kLdrStrInstrTypeMask) == kLdrRegFpNegOffsetPattern); 521 return ((instr & kLdrStrInstrTypeMask) == kLdrRegFpNegOffsetPattern);
490 } 522 }
491 523
492 524
493 bool Assembler::IsLdrPcImmediateOffset(Instr instr) { 525 bool Assembler::IsLdrPcImmediateOffset(Instr instr) {
494 // Check the instruction is indeed a 526 // Check the instruction is indeed a
495 // ldr<cond> <Rd>, [pc +/- offset_12]. 527 // ldr<cond> <Rd>, [pc +/- offset_12].
496 return (instr & (kLdrPCMask & ~kCondMask)) == 0x051f0000; 528 return (instr & kLdrPCMask) == kLdrPCPattern;
497 } 529 }
498 530
499 531
532 bool Assembler::IsVldrDPcImmediateOffset(Instr instr) {
533 // Check the instruction is indeed a
534 // vldr<cond> <Dd>, [pc +/- offset_10].
535 return (instr & kVldrDPCMask) == kVldrDPCPattern;
536 }
537
538
500 bool Assembler::IsTstImmediate(Instr instr) { 539 bool Assembler::IsTstImmediate(Instr instr) {
501 return (instr & (B27 | B26 | I | kOpCodeMask | S | kRdMask)) == 540 return (instr & (B27 | B26 | I | kOpCodeMask | S | kRdMask)) ==
502 (I | TST | S); 541 (I | TST | S);
503 } 542 }
504 543
505 544
506 bool Assembler::IsCmpRegister(Instr instr) { 545 bool Assembler::IsCmpRegister(Instr instr) {
507 return (instr & (B27 | B26 | I | kOpCodeMask | S | kRdMask | B4)) == 546 return (instr & (B27 | B26 | I | kOpCodeMask | S | kRdMask | B4)) ==
508 (CMP | S); 547 (CMP | S);
509 } 548 }
(...skipping 252 matching lines...) Expand 10 before | Expand all | Expand 10 after
762 // encoded. 801 // encoded.
763 bool Operand::must_output_reloc_info(const Assembler* assembler) const { 802 bool Operand::must_output_reloc_info(const Assembler* assembler) const {
764 if (rmode_ == RelocInfo::EXTERNAL_REFERENCE) { 803 if (rmode_ == RelocInfo::EXTERNAL_REFERENCE) {
765 #ifdef DEBUG 804 #ifdef DEBUG
766 if (!Serializer::enabled()) { 805 if (!Serializer::enabled()) {
767 Serializer::TooLateToEnableNow(); 806 Serializer::TooLateToEnableNow();
768 } 807 }
769 #endif // def DEBUG 808 #endif // def DEBUG
770 if (assembler != NULL && assembler->predictable_code_size()) return true; 809 if (assembler != NULL && assembler->predictable_code_size()) return true;
771 return Serializer::enabled(); 810 return Serializer::enabled();
772 } else if (rmode_ == RelocInfo::NONE) { 811 } else if (RelocInfo::IsNone(rmode_)) {
773 return false; 812 return false;
774 } 813 }
775 return true; 814 return true;
776 } 815 }
777 816
778 817
779 static bool use_movw_movt(const Operand& x, const Assembler* assembler) { 818 static bool use_movw_movt(const Operand& x, const Assembler* assembler) {
780 if (Assembler::use_immediate_embedded_pointer_loads(assembler)) { 819 if (Assembler::use_immediate_embedded_pointer_loads(assembler)) {
781 return true; 820 return true;
782 } 821 }
(...skipping 1210 matching lines...) Expand 10 before | Expand all | Expand 10 after
1993 const Register scratch, 2032 const Register scratch,
1994 const Condition cond) { 2033 const Condition cond) {
1995 // Dd = immediate 2034 // Dd = immediate
1996 // Instruction details available in ARM DDI 0406B, A8-640. 2035 // Instruction details available in ARM DDI 0406B, A8-640.
1997 ASSERT(CpuFeatures::IsEnabled(VFP2)); 2036 ASSERT(CpuFeatures::IsEnabled(VFP2));
1998 2037
1999 uint32_t enc; 2038 uint32_t enc;
2000 if (CpuFeatures::IsSupported(VFP3) && FitsVMOVDoubleImmediate(imm, &enc)) { 2039 if (CpuFeatures::IsSupported(VFP3) && FitsVMOVDoubleImmediate(imm, &enc)) {
2001 // The double can be encoded in the instruction. 2040 // The double can be encoded in the instruction.
2002 emit(cond | 0xE*B24 | 0xB*B20 | dst.code()*B12 | 0xB*B8 | enc); 2041 emit(cond | 0xE*B24 | 0xB*B20 | dst.code()*B12 | 0xB*B8 | enc);
2042 } else if (FLAG_enable_vldr_imm) {
2043 // TODO(jfb) Temporarily turned off until we have constant blinding or
2044 // some equivalent mitigation: an attacker can otherwise control
2045 // generated data which also happens to be executable, a Very Bad
2046 // Thing indeed.
2047 // Blinding gets tricky because we don't have xor, we probably
2048 // need to add/subtract without losing precision, which requires a
2049 // cookie value that Lithium is probably better positioned to
2050 // choose.
2051 // We could also add a few peepholes here like detecting 0.0 and
2052 // -0.0 and doing a vmov from the sequestered d14, forcing denorms
2053 // to zero (we set flush-to-zero), and normalizing NaN values.
2054 // We could also detect redundant values.
2055 // The code could also randomize the order of values, though
2056 // that's tricky because vldr has a limited reach. Furthermore
2057 // it breaks load locality.
2058 RecordRelocInfo(imm);
2059 vldr(dst, MemOperand(pc, 0), cond);
2003 } else { 2060 } else {
2004 // Synthesise the double from ARM immediates. This could be implemented 2061 // Synthesise the double from ARM immediates.
2005 // using vldr from a constant pool.
2006 uint32_t lo, hi; 2062 uint32_t lo, hi;
2007 DoubleAsTwoUInt32(imm, &lo, &hi); 2063 DoubleAsTwoUInt32(imm, &lo, &hi);
2008 mov(ip, Operand(lo)); 2064 mov(ip, Operand(lo));
2009 2065
2010 if (scratch.is(no_reg)) { 2066 if (scratch.is(no_reg)) {
2011 // Move the low part of the double into the lower of the corresponsing S 2067 // Move the low part of the double into the lower of the corresponsing S
2012 // registers of D register dst. 2068 // registers of D register dst.
2013 vmov(dst.low(), ip, cond); 2069 vmov(dst.low(), ip, cond);
2014 2070
2015 // Move the high part of the double into the higher of the corresponsing S 2071 // Move the high part of the double into the higher of the corresponsing S
(...skipping 542 matching lines...) Expand 10 before | Expand all | Expand 10 after
2558 } 2614 }
2559 } 2615 }
2560 } 2616 }
2561 2617
2562 2618
2563 void Assembler::db(uint8_t data) { 2619 void Assembler::db(uint8_t data) {
2564 // No relocation info should be pending while using db. db is used 2620 // No relocation info should be pending while using db. db is used
2565 // to write pure data with no pointers and the constant pool should 2621 // to write pure data with no pointers and the constant pool should
2566 // be emitted before using db. 2622 // be emitted before using db.
2567 ASSERT(num_pending_reloc_info_ == 0); 2623 ASSERT(num_pending_reloc_info_ == 0);
2624 ASSERT(num_pending_64_bit_reloc_info_ == 0);
2568 CheckBuffer(); 2625 CheckBuffer();
2569 *reinterpret_cast<uint8_t*>(pc_) = data; 2626 *reinterpret_cast<uint8_t*>(pc_) = data;
2570 pc_ += sizeof(uint8_t); 2627 pc_ += sizeof(uint8_t);
2571 } 2628 }
2572 2629
2573 2630
2574 void Assembler::dd(uint32_t data) { 2631 void Assembler::dd(uint32_t data) {
2575 // No relocation info should be pending while using dd. dd is used 2632 // No relocation info should be pending while using dd. dd is used
2576 // to write pure data with no pointers and the constant pool should 2633 // to write pure data with no pointers and the constant pool should
2577 // be emitted before using dd. 2634 // be emitted before using dd.
2578 ASSERT(num_pending_reloc_info_ == 0); 2635 ASSERT(num_pending_reloc_info_ == 0);
2636 ASSERT(num_pending_64_bit_reloc_info_ == 0);
2579 CheckBuffer(); 2637 CheckBuffer();
2580 *reinterpret_cast<uint32_t*>(pc_) = data; 2638 *reinterpret_cast<uint32_t*>(pc_) = data;
2581 pc_ += sizeof(uint32_t); 2639 pc_ += sizeof(uint32_t);
2582 } 2640 }
2583 2641
2584 2642
2585 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data, 2643 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data,
2586 UseConstantPoolMode mode) { 2644 UseConstantPoolMode mode) {
2587 // We do not try to reuse pool constants. 2645 // We do not try to reuse pool constants.
2588 RelocInfo rinfo(pc_, rmode, data, NULL); 2646 RelocInfo rinfo(pc_, rmode, data, NULL);
2589 if (((rmode >= RelocInfo::JS_RETURN) && 2647 if (((rmode >= RelocInfo::JS_RETURN) &&
2590 (rmode <= RelocInfo::DEBUG_BREAK_SLOT)) || 2648 (rmode <= RelocInfo::DEBUG_BREAK_SLOT)) ||
2591 (rmode == RelocInfo::CONST_POOL) || 2649 (rmode == RelocInfo::CONST_POOL) ||
2592 mode == DONT_USE_CONSTANT_POOL) { 2650 mode == DONT_USE_CONSTANT_POOL) {
2593 // Adjust code for new modes. 2651 // Adjust code for new modes.
2594 ASSERT(RelocInfo::IsDebugBreakSlot(rmode) 2652 ASSERT(RelocInfo::IsDebugBreakSlot(rmode)
2595 || RelocInfo::IsJSReturn(rmode) 2653 || RelocInfo::IsJSReturn(rmode)
2596 || RelocInfo::IsComment(rmode) 2654 || RelocInfo::IsComment(rmode)
2597 || RelocInfo::IsPosition(rmode) 2655 || RelocInfo::IsPosition(rmode)
2598 || RelocInfo::IsConstPool(rmode) 2656 || RelocInfo::IsConstPool(rmode)
2599 || mode == DONT_USE_CONSTANT_POOL); 2657 || mode == DONT_USE_CONSTANT_POOL);
2600 // These modes do not need an entry in the constant pool. 2658 // These modes do not need an entry in the constant pool.
2601 } else { 2659 } else {
2602 ASSERT(num_pending_reloc_info_ < kMaxNumPendingRelocInfo); 2660 RecordRelocInfoConstantPoolEntryHelper(rinfo);
2603 if (num_pending_reloc_info_ == 0) {
2604 first_const_pool_use_ = pc_offset();
2605 }
2606 pending_reloc_info_[num_pending_reloc_info_++] = rinfo;
2607 // Make sure the constant pool is not emitted in place of the next
2608 // instruction for which we just recorded relocation info.
2609 BlockConstPoolFor(1);
2610 } 2661 }
2611 if (rinfo.rmode() != RelocInfo::NONE) { 2662 if (!RelocInfo::IsNone(rinfo.rmode())) {
2612 // Don't record external references unless the heap will be serialized. 2663 // Don't record external references unless the heap will be serialized.
2613 if (rmode == RelocInfo::EXTERNAL_REFERENCE) { 2664 if (rmode == RelocInfo::EXTERNAL_REFERENCE) {
2614 #ifdef DEBUG 2665 #ifdef DEBUG
2615 if (!Serializer::enabled()) { 2666 if (!Serializer::enabled()) {
2616 Serializer::TooLateToEnableNow(); 2667 Serializer::TooLateToEnableNow();
2617 } 2668 }
2618 #endif 2669 #endif
2619 if (!Serializer::enabled() && !emit_debug_code()) { 2670 if (!Serializer::enabled() && !emit_debug_code()) {
2620 return; 2671 return;
2621 } 2672 }
2622 } 2673 }
2623 ASSERT(buffer_space() >= kMaxRelocSize); // too late to grow buffer here 2674 ASSERT(buffer_space() >= kMaxRelocSize); // too late to grow buffer here
2624 if (rmode == RelocInfo::CODE_TARGET_WITH_ID) { 2675 if (rmode == RelocInfo::CODE_TARGET_WITH_ID) {
2625 RelocInfo reloc_info_with_ast_id(pc_, 2676 RelocInfo reloc_info_with_ast_id(pc_,
2626 rmode, 2677 rmode,
2627 RecordedAstId().ToInt(), 2678 RecordedAstId().ToInt(),
2628 NULL); 2679 NULL);
2629 ClearRecordedAstId(); 2680 ClearRecordedAstId();
2630 reloc_info_writer.Write(&reloc_info_with_ast_id); 2681 reloc_info_writer.Write(&reloc_info_with_ast_id);
2631 } else { 2682 } else {
2632 reloc_info_writer.Write(&rinfo); 2683 reloc_info_writer.Write(&rinfo);
2633 } 2684 }
2634 } 2685 }
2635 } 2686 }
2636 2687
2688 void Assembler::RecordRelocInfo(double data) {
2689 // We do not try to reuse pool constants.
2690 RelocInfo rinfo(pc_, data);
2691 RecordRelocInfoConstantPoolEntryHelper(rinfo);
2692 }
2693
2694
2695 void Assembler::RecordRelocInfoConstantPoolEntryHelper(const RelocInfo& rinfo) {
2696 ASSERT(num_pending_reloc_info_ < kMaxNumPendingRelocInfo);
2697 if (num_pending_reloc_info_ == 0) {
2698 first_const_pool_use_ = pc_offset();
2699 }
2700 pending_reloc_info_[num_pending_reloc_info_++] = rinfo;
2701 if (rinfo.rmode() == RelocInfo::NONE64) {
2702 ++num_pending_64_bit_reloc_info_;
2703 }
2704 ASSERT(num_pending_64_bit_reloc_info_ <= num_pending_reloc_info_);
2705 // Make sure the constant pool is not emitted in place of the next
2706 // instruction for which we just recorded relocation info.
2707 BlockConstPoolFor(1);
2708 }
2709
2637 2710
2638 void Assembler::BlockConstPoolFor(int instructions) { 2711 void Assembler::BlockConstPoolFor(int instructions) {
2639 int pc_limit = pc_offset() + instructions * kInstrSize; 2712 int pc_limit = pc_offset() + instructions * kInstrSize;
2640 if (no_const_pool_before_ < pc_limit) { 2713 if (no_const_pool_before_ < pc_limit) {
2641 // If there are some pending entries, the constant pool cannot be blocked 2714 // If there are some pending entries, the constant pool cannot be blocked
2642 // further than first_const_pool_use_ + kMaxDistToPool 2715 // further than constant pool instruction's reach.
2643 ASSERT((num_pending_reloc_info_ == 0) || 2716 ASSERT((num_pending_reloc_info_ == 0) ||
2644 (pc_limit < (first_const_pool_use_ + kMaxDistToPool))); 2717 (pc_limit - first_const_pool_use_ < kMaxDistToIntPool));
2718 // TODO(jfb) Also check 64-bit entries are in range (requires splitting
2719 // them up from 32-bit entries).
2645 no_const_pool_before_ = pc_limit; 2720 no_const_pool_before_ = pc_limit;
2646 } 2721 }
2647 2722
2648 if (next_buffer_check_ < no_const_pool_before_) { 2723 if (next_buffer_check_ < no_const_pool_before_) {
2649 next_buffer_check_ = no_const_pool_before_; 2724 next_buffer_check_ = no_const_pool_before_;
2650 } 2725 }
2651 } 2726 }
2652 2727
2653 2728
2654 void Assembler::CheckConstPool(bool force_emit, bool require_jump) { 2729 void Assembler::CheckConstPool(bool force_emit, bool require_jump) {
2655 // Some short sequence of instruction mustn't be broken up by constant pool 2730 // Some short sequence of instruction mustn't be broken up by constant pool
2656 // emission, such sequences are protected by calls to BlockConstPoolFor and 2731 // emission, such sequences are protected by calls to BlockConstPoolFor and
2657 // BlockConstPoolScope. 2732 // BlockConstPoolScope.
2658 if (is_const_pool_blocked()) { 2733 if (is_const_pool_blocked()) {
2659 // Something is wrong if emission is forced and blocked at the same time. 2734 // Something is wrong if emission is forced and blocked at the same time.
2660 ASSERT(!force_emit); 2735 ASSERT(!force_emit);
2661 return; 2736 return;
2662 } 2737 }
2663 2738
2664 // There is nothing to do if there are no pending constant pool entries. 2739 // There is nothing to do if there are no pending constant pool entries.
2665 if (num_pending_reloc_info_ == 0) { 2740 if (num_pending_reloc_info_ == 0) {
2741 ASSERT(num_pending_64_bit_reloc_info_ == 0);
2666 // Calculate the offset of the next check. 2742 // Calculate the offset of the next check.
2667 next_buffer_check_ = pc_offset() + kCheckPoolInterval; 2743 next_buffer_check_ = pc_offset() + kCheckPoolInterval;
2668 return; 2744 return;
2669 } 2745 }
2670 2746
2671 // We emit a constant pool when:
2672 // * requested to do so by parameter force_emit (e.g. after each function).
2673 // * the distance to the first instruction accessing the constant pool is
2674 // kAvgDistToPool or more.
2675 // * no jump is required and the distance to the first instruction accessing
2676 // the constant pool is at least kMaxDistToPool / 2.
2677 ASSERT(first_const_pool_use_ >= 0);
2678 int dist = pc_offset() - first_const_pool_use_;
2679 if (!force_emit && dist < kAvgDistToPool &&
2680 (require_jump || (dist < (kMaxDistToPool / 2)))) {
2681 return;
2682 }
2683
2684 // Check that the code buffer is large enough before emitting the constant 2747 // Check that the code buffer is large enough before emitting the constant
2685 // pool (include the jump over the pool and the constant pool marker and 2748 // pool (include the jump over the pool and the constant pool marker and
2686 // the gap to the relocation information). 2749 // the gap to the relocation information).
2750 // Note 64-bit values are wider, and the first one needs to be 64-bit aligned.
2687 int jump_instr = require_jump ? kInstrSize : 0; 2751 int jump_instr = require_jump ? kInstrSize : 0;
2688 int size = jump_instr + kInstrSize + num_pending_reloc_info_ * kPointerSize; 2752 int size_up_to_marker = jump_instr + kInstrSize;
2753 int size_after_marker = num_pending_reloc_info_ * kPointerSize;
2754 bool has_fp_values = (num_pending_64_bit_reloc_info_ > 0);
2755 // 64-bit values must be 64-bit aligned.
2756 // We'll start emitting at PC: branch+marker, then 32-bit values, then
2757 // 64-bit values which might need to be aligned.
2758 bool require_64_bit_align = has_fp_values &&
2759 (((uintptr_t)pc_ + size_up_to_marker + size_after_marker) & 0x3);
2760 if (require_64_bit_align) {
2761 size_after_marker += kInstrSize;
2762 }
2763 // num_pending_reloc_info_ also contains 64-bit entries, the above code
2764 // therefore already counted half of the size for 64-bit entries. Add the
2765 // remaining size.
2766 STATIC_ASSERT(kPointerSize == kDoubleSize / 2);
2767 size_after_marker += num_pending_64_bit_reloc_info_ * (kDoubleSize / 2);
2768
2769 int size = size_up_to_marker + size_after_marker;
2770
2771 // We emit a constant pool when:
2772 // * requested to do so by parameter force_emit (e.g. after each function).
2773 // * the distance from the first instruction accessing the constant pool to
2774 // any of the constant pool entries will exceed its limit the next
2775 // time the pool is checked. This is overly restrictive, but we don't emit
2776 // constant pool entries in-order so it's conservatively correct.
2777 // * the instruction doesn't require a jump after itself to jump over the
2778 // constant pool, and we're getting close to running out of range.
2779 if (!force_emit) {
2780 ASSERT((first_const_pool_use_ >= 0) && (num_pending_reloc_info_ > 0));
2781 int dist = pc_offset() + size - first_const_pool_use_;
2782 if (has_fp_values) {
2783 if ((dist < kMaxDistToFPPool - kCheckPoolInterval) &&
2784 (require_jump || (dist < kMaxDistToFPPool / 2))) {
2785 return;
2786 }
2787 } else {
2788 if ((dist < kMaxDistToIntPool - kCheckPoolInterval) &&
2789 (require_jump || (dist < kMaxDistToIntPool / 2))) {
2790 return;
2791 }
2792 }
2793 }
2794
2689 int needed_space = size + kGap; 2795 int needed_space = size + kGap;
2690 while (buffer_space() <= needed_space) GrowBuffer(); 2796 while (buffer_space() <= needed_space) GrowBuffer();
2691 2797
2692 { 2798 {
2693 // Block recursive calls to CheckConstPool. 2799 // Block recursive calls to CheckConstPool.
2694 BlockConstPoolScope block_const_pool(this); 2800 BlockConstPoolScope block_const_pool(this);
2695 RecordComment("[ Constant Pool"); 2801 RecordComment("[ Constant Pool");
2696 RecordConstPool(size); 2802 RecordConstPool(size);
2697 2803
2698 // Emit jump over constant pool if necessary. 2804 // Emit jump over constant pool if necessary.
2699 Label after_pool; 2805 Label after_pool;
2700 if (require_jump) { 2806 if (require_jump) {
2701 b(&after_pool); 2807 b(&after_pool);
2702 } 2808 }
2703 2809
2704 // Put down constant pool marker "Undefined instruction". 2810 // Put down constant pool marker "Undefined instruction".
2705 emit(kConstantPoolMarker | 2811 // The data size helps disassembly know what to print.
2706 EncodeConstantPoolLength(num_pending_reloc_info_)); 2812 emit(kConstantPoolMarker | EncodeConstantPoolLength(size_after_marker));
2707 2813
2708 // Emit constant pool entries. 2814 if (require_64_bit_align) {
2815 emit(kConstantPoolMarker);
2816 }
2817
2818 // Emit 64-bit constant pool entries first: their range is smaller than
2819 // 32-bit entries.
2820 for (int i = 0; i < num_pending_reloc_info_; i++) {
2821 RelocInfo& rinfo = pending_reloc_info_[i];
2822
2823 if (rinfo.rmode() != RelocInfo::NONE64) {
2824 // 32-bit values emitted later.
2825 continue;
2826 }
2827
2828 ASSERT(!((uintptr_t)pc_ & 0x3)); // Check 64-bit alignment.
2829
2830 Instr instr = instr_at(rinfo.pc());
2831 // Instruction to patch must be 'vldr rd, [pc, #offset]' with offset == 0.
2832 ASSERT((IsVldrDPcImmediateOffset(instr) &&
2833 GetVldrDRegisterImmediateOffset(instr) == 0));
2834
2835 int delta = pc_ - rinfo.pc() - kPcLoadDelta;
2836 ASSERT(is_uint10(delta));
2837
2838 instr_at_put(rinfo.pc(), SetVldrDRegisterImmediateOffset(instr, delta));
2839
2840 const double double_data = rinfo.data64();
2841 uint64_t uint_data = 0;
2842 memcpy(&uint_data, &double_data, sizeof(double_data));
2843 emit(uint_data & 0xFFFFFFFF);
2844 emit(uint_data >> 32);
2845 }
2846
2847 // Emit 32-bit constant pool entries.
2709 for (int i = 0; i < num_pending_reloc_info_; i++) { 2848 for (int i = 0; i < num_pending_reloc_info_; i++) {
2710 RelocInfo& rinfo = pending_reloc_info_[i]; 2849 RelocInfo& rinfo = pending_reloc_info_[i];
2711 ASSERT(rinfo.rmode() != RelocInfo::COMMENT && 2850 ASSERT(rinfo.rmode() != RelocInfo::COMMENT &&
2712 rinfo.rmode() != RelocInfo::POSITION && 2851 rinfo.rmode() != RelocInfo::POSITION &&
2713 rinfo.rmode() != RelocInfo::STATEMENT_POSITION && 2852 rinfo.rmode() != RelocInfo::STATEMENT_POSITION &&
2714 rinfo.rmode() != RelocInfo::CONST_POOL); 2853 rinfo.rmode() != RelocInfo::CONST_POOL);
2715 2854
2855 if (rinfo.rmode() == RelocInfo::NONE64) {
2856 // 64-bit values emitted earlier.
2857 continue;
2858 }
2859
2860 // 64-bit loads shouldn't get here.
2861 ASSERT(!IsVldrDPcImmediateOffset(instr));
ulan 2012/12/28 13:12:53 I'll move this after the instr initialization, oth
2862
2716 Instr instr = instr_at(rinfo.pc()); 2863 Instr instr = instr_at(rinfo.pc());
2717 // Instruction to patch must be 'ldr rd, [pc, #offset]' with offset == 0. 2864 int delta = pc_ - rinfo.pc() - kPcLoadDelta;
2865 // 0 is the smallest delta:
2866 // ldr rd, [pc, #0]
2867 // constant pool marker
2868 // data
2869
2718 if (IsLdrPcImmediateOffset(instr) && 2870 if (IsLdrPcImmediateOffset(instr) &&
2719 GetLdrRegisterImmediateOffset(instr) == 0) { 2871 GetLdrRegisterImmediateOffset(instr) == 0) {
2720 int delta = pc_ - rinfo.pc() - kPcLoadDelta;
2721 // 0 is the smallest delta:
2722 // ldr rd, [pc, #0]
2723 // constant pool marker
2724 // data
2725 ASSERT(is_uint12(delta)); 2872 ASSERT(is_uint12(delta));
2726
2727 instr_at_put(rinfo.pc(), SetLdrRegisterImmediateOffset(instr, delta)); 2873 instr_at_put(rinfo.pc(), SetLdrRegisterImmediateOffset(instr, delta));
2874 emit(rinfo.data());
2728 } else { 2875 } else {
2729 ASSERT(IsMovW(instr)); 2876 ASSERT(IsMovW(instr));
2877 emit(rinfo.data());
2730 } 2878 }
2731 emit(rinfo.data());
2732 } 2879 }
2733 2880
2734 num_pending_reloc_info_ = 0; 2881 num_pending_reloc_info_ = 0;
2882 num_pending_64_bit_reloc_info_ = 0;
2735 first_const_pool_use_ = -1; 2883 first_const_pool_use_ = -1;
2736 2884
2737 RecordComment("]"); 2885 RecordComment("]");
2738 2886
2739 if (after_pool.is_linked()) { 2887 if (after_pool.is_linked()) {
2740 bind(&after_pool); 2888 bind(&after_pool);
2741 } 2889 }
2742 } 2890 }
2743 2891
2744 // Since a constant pool was just emitted, move the check offset forward by 2892 // Since a constant pool was just emitted, move the check offset forward by
2745 // the standard interval. 2893 // the standard interval.
2746 next_buffer_check_ = pc_offset() + kCheckPoolInterval; 2894 next_buffer_check_ = pc_offset() + kCheckPoolInterval;
2747 } 2895 }
2748 2896
2749 2897
2750 } } // namespace v8::internal 2898 } } // namespace v8::internal
2751 2899
2752 #endif // V8_TARGET_ARCH_ARM 2900 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/assembler-arm.h ('k') | src/arm/constants-arm.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698