Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(202)

Side by Side Diff: src/arm/macro-assembler-arm.cc

Issue 14188016: ARM: clean up code now that ARMv6 is the baseline. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 7 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/macro-assembler-arm.h ('k') | src/platform-freebsd.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
44 generating_stub_(false), 44 generating_stub_(false),
45 allow_stub_calls_(true), 45 allow_stub_calls_(true),
46 has_frame_(false) { 46 has_frame_(false) {
47 if (isolate() != NULL) { 47 if (isolate() != NULL) {
48 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(), 48 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
49 isolate()); 49 isolate());
50 } 50 }
51 } 51 }
52 52
53 53
54 // We always generate arm code, never thumb code, even if V8 is compiled to
55 // thumb, so we require inter-working support
56 #if defined(__thumb__) && !defined(USE_THUMB_INTERWORK)
57 #error "flag -mthumb-interwork missing"
58 #endif
59
60
61 // We do not support thumb inter-working with an arm architecture not supporting
62 // the blx instruction (below v5t). If you know what CPU you are compiling for
63 // you can use -march=armv7 or similar.
64 #if defined(USE_THUMB_INTERWORK) && !defined(CAN_USE_THUMB_INSTRUCTIONS)
65 # error "For thumb inter-working we require an architecture which supports blx"
66 #endif
67
68
69 // Using bx does not yield better code, so use it only when required
70 #if defined(USE_THUMB_INTERWORK)
71 #define USE_BX 1
72 #endif
73
74
75 void MacroAssembler::Jump(Register target, Condition cond) { 54 void MacroAssembler::Jump(Register target, Condition cond) {
76 #if USE_BX
77 bx(target, cond); 55 bx(target, cond);
78 #else
79 mov(pc, Operand(target), LeaveCC, cond);
80 #endif
81 } 56 }
82 57
83 58
84 void MacroAssembler::Jump(intptr_t target, RelocInfo::Mode rmode, 59 void MacroAssembler::Jump(intptr_t target, RelocInfo::Mode rmode,
85 Condition cond) { 60 Condition cond) {
86 #if USE_BX
87 mov(ip, Operand(target, rmode)); 61 mov(ip, Operand(target, rmode));
88 bx(ip, cond); 62 bx(ip, cond);
89 #else
90 mov(pc, Operand(target, rmode), LeaveCC, cond);
91 #endif
92 } 63 }
93 64
94 65
95 void MacroAssembler::Jump(Address target, RelocInfo::Mode rmode, 66 void MacroAssembler::Jump(Address target, RelocInfo::Mode rmode,
96 Condition cond) { 67 Condition cond) {
97 ASSERT(!RelocInfo::IsCodeTarget(rmode)); 68 ASSERT(!RelocInfo::IsCodeTarget(rmode));
98 Jump(reinterpret_cast<intptr_t>(target), rmode, cond); 69 Jump(reinterpret_cast<intptr_t>(target), rmode, cond);
99 } 70 }
100 71
101 72
102 void MacroAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode, 73 void MacroAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
103 Condition cond) { 74 Condition cond) {
104 ASSERT(RelocInfo::IsCodeTarget(rmode)); 75 ASSERT(RelocInfo::IsCodeTarget(rmode));
105 // 'code' is always generated ARM code, never THUMB code 76 // 'code' is always generated ARM code, never THUMB code
106 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond); 77 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
107 } 78 }
108 79
109 80
110 int MacroAssembler::CallSize(Register target, Condition cond) { 81 int MacroAssembler::CallSize(Register target, Condition cond) {
111 #ifdef USE_BLX
112 return kInstrSize; 82 return kInstrSize;
113 #else
114 return 2 * kInstrSize;
115 #endif
116 } 83 }
117 84
118 85
119 void MacroAssembler::Call(Register target, Condition cond) { 86 void MacroAssembler::Call(Register target, Condition cond) {
120 // Block constant pool for the call instruction sequence. 87 // Block constant pool for the call instruction sequence.
121 BlockConstPoolScope block_const_pool(this); 88 BlockConstPoolScope block_const_pool(this);
122 Label start; 89 Label start;
123 bind(&start); 90 bind(&start);
124 #ifdef USE_BLX
125 blx(target, cond); 91 blx(target, cond);
126 #else
127 // set lr for return at current pc + 8
128 mov(lr, Operand(pc), LeaveCC, cond);
129 mov(pc, Operand(target), LeaveCC, cond);
130 #endif
131 ASSERT_EQ(CallSize(target, cond), SizeOfCodeGeneratedSince(&start)); 92 ASSERT_EQ(CallSize(target, cond), SizeOfCodeGeneratedSince(&start));
132 } 93 }
133 94
134 95
135 int MacroAssembler::CallSize( 96 int MacroAssembler::CallSize(
136 Address target, RelocInfo::Mode rmode, Condition cond) { 97 Address target, RelocInfo::Mode rmode, Condition cond) {
137 int size = 2 * kInstrSize; 98 int size = 2 * kInstrSize;
138 Instr mov_instr = cond | MOV | LeaveCC; 99 Instr mov_instr = cond | MOV | LeaveCC;
139 intptr_t immediate = reinterpret_cast<intptr_t>(target); 100 intptr_t immediate = reinterpret_cast<intptr_t>(target);
140 if (!Operand(immediate, rmode).is_single_instruction(this, mov_instr)) { 101 if (!Operand(immediate, rmode).is_single_instruction(this, mov_instr)) {
(...skipping 22 matching lines...) Expand all
163 // Block constant pool for the call instruction sequence. 124 // Block constant pool for the call instruction sequence.
164 BlockConstPoolScope block_const_pool(this); 125 BlockConstPoolScope block_const_pool(this);
165 Label start; 126 Label start;
166 bind(&start); 127 bind(&start);
167 128
168 bool old_predictable_code_size = predictable_code_size(); 129 bool old_predictable_code_size = predictable_code_size();
169 if (mode == NEVER_INLINE_TARGET_ADDRESS) { 130 if (mode == NEVER_INLINE_TARGET_ADDRESS) {
170 set_predictable_code_size(true); 131 set_predictable_code_size(true);
171 } 132 }
172 133
173 #ifdef USE_BLX
174 // Call sequence on V7 or later may be : 134 // Call sequence on V7 or later may be :
175 // movw ip, #... @ call address low 16 135 // movw ip, #... @ call address low 16
176 // movt ip, #... @ call address high 16 136 // movt ip, #... @ call address high 16
177 // blx ip 137 // blx ip
178 // @ return address 138 // @ return address
179 // Or for pre-V7 or values that may be back-patched 139 // Or for pre-V7 or values that may be back-patched
180 // to avoid ICache flushes: 140 // to avoid ICache flushes:
181 // ldr ip, [pc, #...] @ call address 141 // ldr ip, [pc, #...] @ call address
182 // blx ip 142 // blx ip
183 // @ return address 143 // @ return address
184 144
185 // Statement positions are expected to be recorded when the target 145 // Statement positions are expected to be recorded when the target
186 // address is loaded. The mov method will automatically record 146 // address is loaded. The mov method will automatically record
187 // positions when pc is the target, since this is not the case here 147 // positions when pc is the target, since this is not the case here
188 // we have to do it explicitly. 148 // we have to do it explicitly.
189 positions_recorder()->WriteRecordedPositions(); 149 positions_recorder()->WriteRecordedPositions();
190 150
191 mov(ip, Operand(reinterpret_cast<int32_t>(target), rmode)); 151 mov(ip, Operand(reinterpret_cast<int32_t>(target), rmode));
192 blx(ip, cond); 152 blx(ip, cond);
193 153
194 #else
195 // Set lr for return at current pc + 8.
196 mov(lr, Operand(pc), LeaveCC, cond);
197 // Emit a ldr<cond> pc, [pc + offset of target in constant pool].
198 mov(pc, Operand(reinterpret_cast<int32_t>(target), rmode), LeaveCC, cond);
199 #endif
200 ASSERT_EQ(CallSize(target, rmode, cond), SizeOfCodeGeneratedSince(&start)); 154 ASSERT_EQ(CallSize(target, rmode, cond), SizeOfCodeGeneratedSince(&start));
201 if (mode == NEVER_INLINE_TARGET_ADDRESS) { 155 if (mode == NEVER_INLINE_TARGET_ADDRESS) {
202 set_predictable_code_size(old_predictable_code_size); 156 set_predictable_code_size(old_predictable_code_size);
203 } 157 }
204 } 158 }
205 159
206 160
207 int MacroAssembler::CallSize(Handle<Code> code, 161 int MacroAssembler::CallSize(Handle<Code> code,
208 RelocInfo::Mode rmode, 162 RelocInfo::Mode rmode,
209 TypeFeedbackId ast_id, 163 TypeFeedbackId ast_id,
(...skipping 13 matching lines...) Expand all
223 if (rmode == RelocInfo::CODE_TARGET && !ast_id.IsNone()) { 177 if (rmode == RelocInfo::CODE_TARGET && !ast_id.IsNone()) {
224 SetRecordedAstId(ast_id); 178 SetRecordedAstId(ast_id);
225 rmode = RelocInfo::CODE_TARGET_WITH_ID; 179 rmode = RelocInfo::CODE_TARGET_WITH_ID;
226 } 180 }
227 // 'code' is always generated ARM code, never THUMB code 181 // 'code' is always generated ARM code, never THUMB code
228 Call(reinterpret_cast<Address>(code.location()), rmode, cond, mode); 182 Call(reinterpret_cast<Address>(code.location()), rmode, cond, mode);
229 } 183 }
230 184
231 185
232 void MacroAssembler::Ret(Condition cond) { 186 void MacroAssembler::Ret(Condition cond) {
233 #if USE_BX
234 bx(lr, cond); 187 bx(lr, cond);
235 #else
236 mov(pc, Operand(lr), LeaveCC, cond);
237 #endif
238 } 188 }
239 189
240 190
241 void MacroAssembler::Drop(int count, Condition cond) { 191 void MacroAssembler::Drop(int count, Condition cond) {
242 if (count > 0) { 192 if (count > 0) {
243 add(sp, sp, Operand(count * kPointerSize), LeaveCC, cond); 193 add(sp, sp, Operand(count * kPointerSize), LeaveCC, cond);
244 } 194 }
245 } 195 }
246 196
247 197
(...skipping 2971 matching lines...) Expand 10 before | Expand all | Expand 10 after
3219 Label loop, entry; 3169 Label loop, entry;
3220 b(&entry); 3170 b(&entry);
3221 bind(&loop); 3171 bind(&loop);
3222 str(filler, MemOperand(start_offset, kPointerSize, PostIndex)); 3172 str(filler, MemOperand(start_offset, kPointerSize, PostIndex));
3223 bind(&entry); 3173 bind(&entry);
3224 cmp(start_offset, end_offset); 3174 cmp(start_offset, end_offset);
3225 b(lt, &loop); 3175 b(lt, &loop);
3226 } 3176 }
3227 3177
3228 3178
3229 void MacroAssembler::CountLeadingZeros(Register zeros, // Answer.
3230 Register source, // Input.
3231 Register scratch) {
3232 ASSERT(!zeros.is(source) || !source.is(scratch));
3233 ASSERT(!zeros.is(scratch));
3234 ASSERT(!scratch.is(ip));
3235 ASSERT(!source.is(ip));
3236 ASSERT(!zeros.is(ip));
3237 #ifdef CAN_USE_ARMV5_INSTRUCTIONS
3238 clz(zeros, source); // This instruction is only supported after ARM5.
3239 #else
3240 // Order of the next two lines is important: zeros register
3241 // can be the same as source register.
3242 Move(scratch, source);
3243 mov(zeros, Operand::Zero());
3244 // Top 16.
3245 tst(scratch, Operand(0xffff0000));
3246 add(zeros, zeros, Operand(16), LeaveCC, eq);
3247 mov(scratch, Operand(scratch, LSL, 16), LeaveCC, eq);
3248 // Top 8.
3249 tst(scratch, Operand(0xff000000));
3250 add(zeros, zeros, Operand(8), LeaveCC, eq);
3251 mov(scratch, Operand(scratch, LSL, 8), LeaveCC, eq);
3252 // Top 4.
3253 tst(scratch, Operand(0xf0000000));
3254 add(zeros, zeros, Operand(4), LeaveCC, eq);
3255 mov(scratch, Operand(scratch, LSL, 4), LeaveCC, eq);
3256 // Top 2.
3257 tst(scratch, Operand(0xc0000000));
3258 add(zeros, zeros, Operand(2), LeaveCC, eq);
3259 mov(scratch, Operand(scratch, LSL, 2), LeaveCC, eq);
3260 // Top bit.
3261 tst(scratch, Operand(0x80000000u));
3262 add(zeros, zeros, Operand(1), LeaveCC, eq);
3263 #endif
3264 }
3265
3266
3267 void MacroAssembler::CheckFor32DRegs(Register scratch) { 3179 void MacroAssembler::CheckFor32DRegs(Register scratch) {
3268 mov(scratch, Operand(ExternalReference::cpu_features())); 3180 mov(scratch, Operand(ExternalReference::cpu_features()));
3269 ldr(scratch, MemOperand(scratch)); 3181 ldr(scratch, MemOperand(scratch));
3270 tst(scratch, Operand(1u << VFP32DREGS)); 3182 tst(scratch, Operand(1u << VFP32DREGS));
3271 } 3183 }
3272 3184
3273 3185
3274 void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii( 3186 void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
3275 Register first, 3187 Register first,
3276 Register second, 3188 Register second,
(...skipping 559 matching lines...) Expand 10 before | Expand all | Expand 10 after
3836 void CodePatcher::EmitCondition(Condition cond) { 3748 void CodePatcher::EmitCondition(Condition cond) {
3837 Instr instr = Assembler::instr_at(masm_.pc_); 3749 Instr instr = Assembler::instr_at(masm_.pc_);
3838 instr = (instr & ~kCondMask) | cond; 3750 instr = (instr & ~kCondMask) | cond;
3839 masm_.emit(instr); 3751 masm_.emit(instr);
3840 } 3752 }
3841 3753
3842 3754
3843 } } // namespace v8::internal 3755 } } // namespace v8::internal
3844 3756
3845 #endif // V8_TARGET_ARCH_ARM 3757 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/macro-assembler-arm.h ('k') | src/platform-freebsd.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698