Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(430)

Side by Side Diff: src/arm/macro-assembler-arm.cc

Issue 11037023: Use movw/movt instead of constant pool on ARMv7 (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Don't use movw/movt for patchable target addresses Created 8 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/macro-assembler-arm.h ('k') | src/assembler.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after
101 101
102 void MacroAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode, 102 void MacroAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
103 Condition cond) { 103 Condition cond) {
104 ASSERT(RelocInfo::IsCodeTarget(rmode)); 104 ASSERT(RelocInfo::IsCodeTarget(rmode));
105 // 'code' is always generated ARM code, never THUMB code 105 // 'code' is always generated ARM code, never THUMB code
106 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond); 106 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
107 } 107 }
108 108
109 109
110 int MacroAssembler::CallSize(Register target, Condition cond) { 110 int MacroAssembler::CallSize(Register target, Condition cond) {
111 #if USE_BLX 111 #ifdef USE_BLX
112 return kInstrSize; 112 return kInstrSize;
113 #else 113 #else
114 return 2 * kInstrSize; 114 return 2 * kInstrSize;
115 #endif 115 #endif
116 } 116 }
117 117
118 118
119 void MacroAssembler::Call(Register target, Condition cond) { 119 void MacroAssembler::Call(Register target, Condition cond) {
120 // Block constant pool for the call instruction sequence. 120 // Block constant pool for the call instruction sequence.
121 BlockConstPoolScope block_const_pool(this); 121 BlockConstPoolScope block_const_pool(this);
122 Label start; 122 Label start;
123 bind(&start); 123 bind(&start);
124 #if USE_BLX 124 #ifdef USE_BLX
125 blx(target, cond); 125 blx(target, cond);
126 #else 126 #else
127 // set lr for return at current pc + 8 127 // set lr for return at current pc + 8
128 mov(lr, Operand(pc), LeaveCC, cond); 128 mov(lr, Operand(pc), LeaveCC, cond);
129 mov(pc, Operand(target), LeaveCC, cond); 129 mov(pc, Operand(target), LeaveCC, cond);
130 #endif 130 #endif
131 ASSERT_EQ(CallSize(target, cond), SizeOfCodeGeneratedSince(&start)); 131 ASSERT_EQ(CallSize(target, cond), SizeOfCodeGeneratedSince(&start));
132 } 132 }
133 133
134 134
(...skipping 16 matching lines...) Expand all
151 intptr_t immediate = reinterpret_cast<intptr_t>(target); 151 intptr_t immediate = reinterpret_cast<intptr_t>(target);
152 if (!Operand(immediate, rmode).is_single_instruction(NULL, mov_instr)) { 152 if (!Operand(immediate, rmode).is_single_instruction(NULL, mov_instr)) {
153 size += kInstrSize; 153 size += kInstrSize;
154 } 154 }
155 return size; 155 return size;
156 } 156 }
157 157
158 158
159 void MacroAssembler::Call(Address target, 159 void MacroAssembler::Call(Address target,
160 RelocInfo::Mode rmode, 160 RelocInfo::Mode rmode,
161 Condition cond) { 161 Condition cond,
162 TargetAddressStorageMode mode) {
162 // Block constant pool for the call instruction sequence. 163 // Block constant pool for the call instruction sequence.
163 BlockConstPoolScope block_const_pool(this); 164 BlockConstPoolScope block_const_pool(this);
164 Label start; 165 Label start;
165 bind(&start); 166 bind(&start);
166 #if USE_BLX 167
167 // On ARMv5 and after the recommended call sequence is: 168 bool old_predictable_code_size = predictable_code_size();
168 // ldr ip, [pc, #...] 169 if (mode == NEVER_INLINE_TARGET_ADDRESS) {
169 // blx ip 170 set_predictable_code_size(true);
171 }
172
173 #ifdef USE_BLX
174 // Call sequence on V7 or later may be :
175 // movw ip, #... @ call address low 16
176 // movt ip, #... @ call address high 16
177 // blx ip
bulach 2012/10/11 17:23:08 (just to keep documented) the minor optimization w
178 // @ return address
179 // Or for pre-V7 or values that may be back-patched
180 // to avoid ICache flushes:
181 // ldr ip, [pc, #...] @ call address
182 // blx ip
183 // @ return address
170 184
171 // Statement positions are expected to be recorded when the target 185 // Statement positions are expected to be recorded when the target
172 // address is loaded. The mov method will automatically record 186 // address is loaded. The mov method will automatically record
173 // positions when pc is the target, since this is not the case here 187 // positions when pc is the target, since this is not the case here
174 // we have to do it explicitly. 188 // we have to do it explicitly.
175 positions_recorder()->WriteRecordedPositions(); 189 positions_recorder()->WriteRecordedPositions();
176 190
177 mov(ip, Operand(reinterpret_cast<int32_t>(target), rmode)); 191 mov(ip, Operand(reinterpret_cast<int32_t>(target), rmode));
178 blx(ip, cond); 192 blx(ip, cond);
179 193
180 ASSERT(kCallTargetAddressOffset == 2 * kInstrSize);
181 #else 194 #else
182 // Set lr for return at current pc + 8. 195 // Set lr for return at current pc + 8.
183 mov(lr, Operand(pc), LeaveCC, cond); 196 mov(lr, Operand(pc), LeaveCC, cond);
184 // Emit a ldr<cond> pc, [pc + offset of target in constant pool]. 197 // Emit a ldr<cond> pc, [pc + offset of target in constant pool].
185 mov(pc, Operand(reinterpret_cast<int32_t>(target), rmode), LeaveCC, cond); 198 mov(pc, Operand(reinterpret_cast<int32_t>(target), rmode), LeaveCC, cond);
186 ASSERT(kCallTargetAddressOffset == kInstrSize);
187 #endif 199 #endif
188 ASSERT_EQ(CallSize(target, rmode, cond), SizeOfCodeGeneratedSince(&start)); 200 ASSERT_EQ(CallSize(target, rmode, cond), SizeOfCodeGeneratedSince(&start));
201 if (mode == NEVER_INLINE_TARGET_ADDRESS) {
202 set_predictable_code_size(old_predictable_code_size);
203 }
189 } 204 }
190 205
191 206
192 int MacroAssembler::CallSize(Handle<Code> code, 207 int MacroAssembler::CallSize(Handle<Code> code,
193 RelocInfo::Mode rmode, 208 RelocInfo::Mode rmode,
194 TypeFeedbackId ast_id, 209 TypeFeedbackId ast_id,
195 Condition cond) { 210 Condition cond) {
196 return CallSize(reinterpret_cast<Address>(code.location()), rmode, cond); 211 return CallSize(reinterpret_cast<Address>(code.location()), rmode, cond);
197 } 212 }
198 213
199 214
200 void MacroAssembler::Call(Handle<Code> code, 215 void MacroAssembler::Call(Handle<Code> code,
201 RelocInfo::Mode rmode, 216 RelocInfo::Mode rmode,
202 TypeFeedbackId ast_id, 217 TypeFeedbackId ast_id,
203 Condition cond) { 218 Condition cond,
219 TargetAddressStorageMode mode) {
204 Label start; 220 Label start;
205 bind(&start); 221 bind(&start);
206 ASSERT(RelocInfo::IsCodeTarget(rmode)); 222 ASSERT(RelocInfo::IsCodeTarget(rmode));
207 if (rmode == RelocInfo::CODE_TARGET && !ast_id.IsNone()) { 223 if (rmode == RelocInfo::CODE_TARGET && !ast_id.IsNone()) {
208 SetRecordedAstId(ast_id); 224 SetRecordedAstId(ast_id);
209 rmode = RelocInfo::CODE_TARGET_WITH_ID; 225 rmode = RelocInfo::CODE_TARGET_WITH_ID;
210 } 226 }
211 // 'code' is always generated ARM code, never THUMB code 227 // 'code' is always generated ARM code, never THUMB code
212 Call(reinterpret_cast<Address>(code.location()), rmode, cond); 228 Call(reinterpret_cast<Address>(code.location()), rmode, cond, mode);
213 ASSERT_EQ(CallSize(code, rmode, ast_id, cond),
214 SizeOfCodeGeneratedSince(&start));
215 } 229 }
216 230
217 231
218 void MacroAssembler::Ret(Condition cond) { 232 void MacroAssembler::Ret(Condition cond) {
219 #if USE_BX 233 #if USE_BX
220 bx(lr, cond); 234 bx(lr, cond);
221 #else 235 #else
222 mov(pc, Operand(lr), LeaveCC, cond); 236 mov(pc, Operand(lr), LeaveCC, cond);
223 #endif 237 #endif
224 } 238 }
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
281 CpuFeatures::Scope scope(VFP2); 295 CpuFeatures::Scope scope(VFP2);
282 if (!dst.is(src)) { 296 if (!dst.is(src)) {
283 vmov(dst, src); 297 vmov(dst, src);
284 } 298 }
285 } 299 }
286 300
287 301
288 void MacroAssembler::And(Register dst, Register src1, const Operand& src2, 302 void MacroAssembler::And(Register dst, Register src1, const Operand& src2,
289 Condition cond) { 303 Condition cond) {
290 if (!src2.is_reg() && 304 if (!src2.is_reg() &&
291 !src2.must_use_constant_pool(this) && 305 !src2.must_output_reloc_info(this) &&
292 src2.immediate() == 0) { 306 src2.immediate() == 0) {
293 mov(dst, Operand(0, RelocInfo::NONE), LeaveCC, cond); 307 mov(dst, Operand(0, RelocInfo::NONE), LeaveCC, cond);
294
295 } else if (!src2.is_single_instruction(this) && 308 } else if (!src2.is_single_instruction(this) &&
296 !src2.must_use_constant_pool(this) && 309 !src2.must_output_reloc_info(this) &&
297 CpuFeatures::IsSupported(ARMv7) && 310 CpuFeatures::IsSupported(ARMv7) &&
298 IsPowerOf2(src2.immediate() + 1)) { 311 IsPowerOf2(src2.immediate() + 1)) {
299 ubfx(dst, src1, 0, 312 ubfx(dst, src1, 0,
300 WhichPowerOf2(static_cast<uint32_t>(src2.immediate()) + 1), cond); 313 WhichPowerOf2(static_cast<uint32_t>(src2.immediate()) + 1), cond);
301
302 } else { 314 } else {
303 and_(dst, src1, src2, LeaveCC, cond); 315 and_(dst, src1, src2, LeaveCC, cond);
304 } 316 }
305 } 317 }
306 318
307 319
308 void MacroAssembler::Ubfx(Register dst, Register src1, int lsb, int width, 320 void MacroAssembler::Ubfx(Register dst, Register src1, int lsb, int width,
309 Condition cond) { 321 Condition cond) {
310 ASSERT(lsb < 32); 322 ASSERT(lsb < 32);
311 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) { 323 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
(...skipping 3544 matching lines...) Expand 10 before | Expand all | Expand 10 after
3856 void CodePatcher::EmitCondition(Condition cond) { 3868 void CodePatcher::EmitCondition(Condition cond) {
3857 Instr instr = Assembler::instr_at(masm_.pc_); 3869 Instr instr = Assembler::instr_at(masm_.pc_);
3858 instr = (instr & ~kCondMask) | cond; 3870 instr = (instr & ~kCondMask) | cond;
3859 masm_.emit(instr); 3871 masm_.emit(instr);
3860 } 3872 }
3861 3873
3862 3874
3863 } } // namespace v8::internal 3875 } } // namespace v8::internal
3864 3876
3865 #endif // V8_TARGET_ARCH_ARM 3877 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/macro-assembler-arm.h ('k') | src/assembler.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698