Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(100)

Side by Side Diff: src/arm/assembler-arm.cc

Issue 10824235: Fix the full compiler on ARM to always generate the same code (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 8 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright (c) 1994-2006 Sun Microsystems Inc. 1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved. 2 // All Rights Reserved.
3 // 3 //
4 // Redistribution and use in source and binary forms, with or without 4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions 5 // modification, are permitted provided that the following conditions
6 // are met: 6 // are met:
7 // 7 //
8 // - Redistributions of source code must retain the above copyright notice, 8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer. 9 // this list of conditions and the following disclaimer.
10 // 10 //
(...skipping 284 matching lines...) Expand 10 before | Expand all | Expand 10 after
295 295
296 296
297 // Spare buffer. 297 // Spare buffer.
298 static const int kMinimalBufferSize = 4*KB; 298 static const int kMinimalBufferSize = 4*KB;
299 299
300 300
301 Assembler::Assembler(Isolate* arg_isolate, void* buffer, int buffer_size) 301 Assembler::Assembler(Isolate* arg_isolate, void* buffer, int buffer_size)
302 : AssemblerBase(arg_isolate), 302 : AssemblerBase(arg_isolate),
303 recorded_ast_id_(TypeFeedbackId::None()), 303 recorded_ast_id_(TypeFeedbackId::None()),
304 positions_recorder_(this), 304 positions_recorder_(this),
305 emit_debug_code_(FLAG_debug_code) { 305 emit_debug_code_(FLAG_debug_code),
306 predictable_code_size_(false) {
306 if (buffer == NULL) { 307 if (buffer == NULL) {
307 // Do our own buffer management. 308 // Do our own buffer management.
308 if (buffer_size <= kMinimalBufferSize) { 309 if (buffer_size <= kMinimalBufferSize) {
309 buffer_size = kMinimalBufferSize; 310 buffer_size = kMinimalBufferSize;
310 311
311 if (isolate()->assembler_spare_buffer() != NULL) { 312 if (isolate()->assembler_spare_buffer() != NULL) {
312 buffer = isolate()->assembler_spare_buffer(); 313 buffer = isolate()->assembler_spare_buffer();
313 isolate()->set_assembler_spare_buffer(NULL); 314 isolate()->set_assembler_spare_buffer(NULL);
314 } 315 }
315 } 316 }
(...skipping 461 matching lines...) Expand 10 before | Expand all | Expand 10 after
777 } 778 }
778 } 779 }
779 return false; 780 return false;
780 } 781 }
781 782
782 783
783 // We have to use the temporary register for things that can be relocated even 784 // We have to use the temporary register for things that can be relocated even
784 // if they can be encoded in the ARM's 12 bits of immediate-offset instruction 785 // if they can be encoded in the ARM's 12 bits of immediate-offset instruction
785 // space. There is no guarantee that the relocated location can be similarly 786 // space. There is no guarantee that the relocated location can be similarly
786 // encoded. 787 // encoded.
787 bool Operand::must_use_constant_pool() const { 788 bool Operand::must_use_constant_pool(const Assembler* assembler) const {
788 if (rmode_ == RelocInfo::EXTERNAL_REFERENCE) { 789 if (rmode_ == RelocInfo::EXTERNAL_REFERENCE) {
789 #ifdef DEBUG 790 #ifdef DEBUG
790 if (!Serializer::enabled()) { 791 if (!Serializer::enabled()) {
791 Serializer::TooLateToEnableNow(); 792 Serializer::TooLateToEnableNow();
792 } 793 }
793 #endif // def DEBUG 794 #endif // def DEBUG
795 if (assembler != NULL && assembler->predictable_code_size()) return true;
794 return Serializer::enabled(); 796 return Serializer::enabled();
795 } else if (rmode_ == RelocInfo::NONE) { 797 } else if (rmode_ == RelocInfo::NONE) {
796 return false; 798 return false;
797 } 799 }
798 return true; 800 return true;
799 } 801 }
800 802
801 803
802 bool Operand::is_single_instruction(Instr instr) const { 804 bool Operand::is_single_instruction(const Assembler* assembler,
805 Instr instr) const {
803 if (rm_.is_valid()) return true; 806 if (rm_.is_valid()) return true;
804 uint32_t dummy1, dummy2; 807 uint32_t dummy1, dummy2;
805 if (must_use_constant_pool() || 808 if (must_use_constant_pool(assembler) ||
806 !fits_shifter(imm32_, &dummy1, &dummy2, &instr)) { 809 !fits_shifter(imm32_, &dummy1, &dummy2, &instr)) {
807 // The immediate operand cannot be encoded as a shifter operand, or use of 810 // The immediate operand cannot be encoded as a shifter operand, or use of
808 // constant pool is required. For a mov instruction not setting the 811 // constant pool is required. For a mov instruction not setting the
809 // condition code additional instruction conventions can be used. 812 // condition code additional instruction conventions can be used.
810 if ((instr & ~kCondMask) == 13*B21) { // mov, S not set 813 if ((instr & ~kCondMask) == 13*B21) { // mov, S not set
811 if (must_use_constant_pool() || 814 if (must_use_constant_pool(assembler) ||
812 !CpuFeatures::IsSupported(ARMv7)) { 815 !CpuFeatures::IsSupported(ARMv7)) {
813 // mov instruction will be an ldr from constant pool (one instruction). 816 // mov instruction will be an ldr from constant pool (one instruction).
814 return true; 817 return true;
815 } else { 818 } else {
816 // mov instruction will be a mov or movw followed by movt (two 819 // mov instruction will be a mov or movw followed by movt (two
817 // instructions). 820 // instructions).
818 return false; 821 return false;
819 } 822 }
820 } else { 823 } else {
821 // If this is not a mov or mvn instruction there will always an additional 824 // If this is not a mov or mvn instruction there will always an additional
(...skipping 13 matching lines...) Expand all
835 void Assembler::addrmod1(Instr instr, 838 void Assembler::addrmod1(Instr instr,
836 Register rn, 839 Register rn,
837 Register rd, 840 Register rd,
838 const Operand& x) { 841 const Operand& x) {
839 CheckBuffer(); 842 CheckBuffer();
840 ASSERT((instr & ~(kCondMask | kOpCodeMask | S)) == 0); 843 ASSERT((instr & ~(kCondMask | kOpCodeMask | S)) == 0);
841 if (!x.rm_.is_valid()) { 844 if (!x.rm_.is_valid()) {
842 // Immediate. 845 // Immediate.
843 uint32_t rotate_imm; 846 uint32_t rotate_imm;
844 uint32_t immed_8; 847 uint32_t immed_8;
845 if (x.must_use_constant_pool() || 848 if (x.must_use_constant_pool(this) ||
846 !fits_shifter(x.imm32_, &rotate_imm, &immed_8, &instr)) { 849 !fits_shifter(x.imm32_, &rotate_imm, &immed_8, &instr)) {
847 // The immediate operand cannot be encoded as a shifter operand, so load 850 // The immediate operand cannot be encoded as a shifter operand, so load
848 // it first to register ip and change the original instruction to use ip. 851 // it first to register ip and change the original instruction to use ip.
849 // However, if the original instruction is a 'mov rd, x' (not setting the 852 // However, if the original instruction is a 'mov rd, x' (not setting the
850 // condition code), then replace it with a 'ldr rd, [pc]'. 853 // condition code), then replace it with a 'ldr rd, [pc]'.
851 CHECK(!rn.is(ip)); // rn should never be ip, or will be trashed 854 CHECK(!rn.is(ip)); // rn should never be ip, or will be trashed
852 Condition cond = Instruction::ConditionField(instr); 855 Condition cond = Instruction::ConditionField(instr);
853 if ((instr & ~kCondMask) == 13*B21) { // mov, S not set 856 if ((instr & ~kCondMask) == 13*B21) { // mov, S not set
854 if (x.must_use_constant_pool() || 857 if (x.must_use_constant_pool(this) ||
855 !CpuFeatures::IsSupported(ARMv7)) { 858 !CpuFeatures::IsSupported(ARMv7)) {
856 RecordRelocInfo(x.rmode_, x.imm32_); 859 RecordRelocInfo(x.rmode_, x.imm32_);
857 ldr(rd, MemOperand(pc, 0), cond); 860 ldr(rd, MemOperand(pc, 0), cond);
858 } else { 861 } else {
859 // Will probably use movw, will certainly not use constant pool. 862 // Will probably use movw, will certainly not use constant pool.
860 mov(rd, Operand(x.imm32_ & 0xffff), LeaveCC, cond); 863 mov(rd, Operand(x.imm32_ & 0xffff), LeaveCC, cond);
861 movt(rd, static_cast<uint32_t>(x.imm32_) >> 16, cond); 864 movt(rd, static_cast<uint32_t>(x.imm32_) >> 16, cond);
862 } 865 }
863 } else { 866 } else {
864 // If this is not a mov or mvn instruction we may still be able to avoid 867 // If this is not a mov or mvn instruction we may still be able to avoid
865 // a constant pool entry by using mvn or movw. 868 // a constant pool entry by using mvn or movw.
866 if (!x.must_use_constant_pool() && 869 if (!x.must_use_constant_pool(this) &&
867 (instr & kMovMvnMask) != kMovMvnPattern) { 870 (instr & kMovMvnMask) != kMovMvnPattern) {
868 mov(ip, x, LeaveCC, cond); 871 mov(ip, x, LeaveCC, cond);
869 } else { 872 } else {
870 RecordRelocInfo(x.rmode_, x.imm32_); 873 RecordRelocInfo(x.rmode_, x.imm32_);
871 ldr(ip, MemOperand(pc, 0), cond); 874 ldr(ip, MemOperand(pc, 0), cond);
872 } 875 }
873 addrmod1(instr, rn, rd, Operand(ip)); 876 addrmod1(instr, rn, rd, Operand(ip));
874 } 877 }
875 return; 878 return;
876 } 879 }
(...skipping 504 matching lines...) Expand 10 before | Expand all | Expand 10 after
1381 1384
1382 1385
1383 void Assembler::msr(SRegisterFieldMask fields, const Operand& src, 1386 void Assembler::msr(SRegisterFieldMask fields, const Operand& src,
1384 Condition cond) { 1387 Condition cond) {
1385 ASSERT(fields >= B16 && fields < B20); // at least one field set 1388 ASSERT(fields >= B16 && fields < B20); // at least one field set
1386 Instr instr; 1389 Instr instr;
1387 if (!src.rm_.is_valid()) { 1390 if (!src.rm_.is_valid()) {
1388 // Immediate. 1391 // Immediate.
1389 uint32_t rotate_imm; 1392 uint32_t rotate_imm;
1390 uint32_t immed_8; 1393 uint32_t immed_8;
1391 if (src.must_use_constant_pool() || 1394 if (src.must_use_constant_pool(this) ||
1392 !fits_shifter(src.imm32_, &rotate_imm, &immed_8, NULL)) { 1395 !fits_shifter(src.imm32_, &rotate_imm, &immed_8, NULL)) {
1393 // Immediate operand cannot be encoded, load it first to register ip. 1396 // Immediate operand cannot be encoded, load it first to register ip.
1394 RecordRelocInfo(src.rmode_, src.imm32_); 1397 RecordRelocInfo(src.rmode_, src.imm32_);
1395 ldr(ip, MemOperand(pc, 0), cond); 1398 ldr(ip, MemOperand(pc, 0), cond);
1396 msr(fields, Operand(ip), cond); 1399 msr(fields, Operand(ip), cond);
1397 return; 1400 return;
1398 } 1401 }
1399 instr = I | rotate_imm*B8 | immed_8; 1402 instr = I | rotate_imm*B8 | immed_8;
1400 } else { 1403 } else {
1401 ASSERT(!src.rs_.is_valid() && src.shift_imm_ == 0); // only rm allowed 1404 ASSERT(!src.rs_.is_valid() && src.shift_imm_ == 0); // only rm allowed
(...skipping 1283 matching lines...) Expand 10 before | Expand all | Expand 10 after
2685 2688
2686 // Since a constant pool was just emitted, move the check offset forward by 2689 // Since a constant pool was just emitted, move the check offset forward by
2687 // the standard interval. 2690 // the standard interval.
2688 next_buffer_check_ = pc_offset() + kCheckPoolInterval; 2691 next_buffer_check_ = pc_offset() + kCheckPoolInterval;
2689 } 2692 }
2690 2693
2691 2694
2692 } } // namespace v8::internal 2695 } } // namespace v8::internal
2693 2696
2694 #endif // V8_TARGET_ARCH_ARM 2697 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/assembler-arm.h ('k') | src/arm/code-stubs-arm.cc » ('j') | src/arm/code-stubs-arm.cc » ('J')

Powered by Google App Engine
This is Rietveld 408576698