OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
172 // Emit a ldr<cond> pc, [pc + offset of target in constant pool]. | 172 // Emit a ldr<cond> pc, [pc + offset of target in constant pool]. |
173 mov(pc, Operand(reinterpret_cast<int32_t>(target), rmode), LeaveCC, cond); | 173 mov(pc, Operand(reinterpret_cast<int32_t>(target), rmode), LeaveCC, cond); |
174 ASSERT(kCallTargetAddressOffset == kInstrSize); | 174 ASSERT(kCallTargetAddressOffset == kInstrSize); |
175 #endif | 175 #endif |
176 ASSERT_EQ(CallSize(target, rmode, cond), SizeOfCodeGeneratedSince(&start)); | 176 ASSERT_EQ(CallSize(target, rmode, cond), SizeOfCodeGeneratedSince(&start)); |
177 } | 177 } |
178 | 178 |
179 | 179 |
180 int MacroAssembler::CallSize(Handle<Code> code, | 180 int MacroAssembler::CallSize(Handle<Code> code, |
181 RelocInfo::Mode rmode, | 181 RelocInfo::Mode rmode, |
182 unsigned ast_id, | 182 TypeFeedbackId ast_id, |
183 Condition cond) { | 183 Condition cond) { |
184 return CallSize(reinterpret_cast<Address>(code.location()), rmode, cond); | 184 return CallSize(reinterpret_cast<Address>(code.location()), rmode, cond); |
185 } | 185 } |
186 | 186 |
187 | 187 |
188 void MacroAssembler::Call(Handle<Code> code, | 188 void MacroAssembler::Call(Handle<Code> code, |
189 RelocInfo::Mode rmode, | 189 RelocInfo::Mode rmode, |
190 unsigned ast_id, | 190 TypeFeedbackId ast_id, |
191 Condition cond) { | 191 Condition cond) { |
192 Label start; | 192 Label start; |
193 bind(&start); | 193 bind(&start); |
194 ASSERT(RelocInfo::IsCodeTarget(rmode)); | 194 ASSERT(RelocInfo::IsCodeTarget(rmode)); |
195 if (rmode == RelocInfo::CODE_TARGET && ast_id != kNoASTId) { | 195 if (rmode == RelocInfo::CODE_TARGET && !ast_id.IsNone()) { |
196 SetRecordedAstId(ast_id); | 196 SetRecordedAstId(ast_id); |
197 rmode = RelocInfo::CODE_TARGET_WITH_ID; | 197 rmode = RelocInfo::CODE_TARGET_WITH_ID; |
198 } | 198 } |
199 // 'code' is always generated ARM code, never THUMB code | 199 // 'code' is always generated ARM code, never THUMB code |
200 Call(reinterpret_cast<Address>(code.location()), rmode, cond); | 200 Call(reinterpret_cast<Address>(code.location()), rmode, cond); |
201 ASSERT_EQ(CallSize(code, rmode, ast_id, cond), | 201 ASSERT_EQ(CallSize(code, rmode, ast_id, cond), |
202 SizeOfCodeGeneratedSince(&start)); | 202 SizeOfCodeGeneratedSince(&start)); |
203 } | 203 } |
204 | 204 |
205 | 205 |
(...skipping 1923 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2129 bind(&non_instance); | 2129 bind(&non_instance); |
2130 ldr(result, FieldMemOperand(result, Map::kConstructorOffset)); | 2130 ldr(result, FieldMemOperand(result, Map::kConstructorOffset)); |
2131 | 2131 |
2132 // All done. | 2132 // All done. |
2133 bind(&done); | 2133 bind(&done); |
2134 } | 2134 } |
2135 | 2135 |
2136 | 2136 |
2137 void MacroAssembler::CallStub(CodeStub* stub, Condition cond) { | 2137 void MacroAssembler::CallStub(CodeStub* stub, Condition cond) { |
2138 ASSERT(AllowThisStubCall(stub)); // Stub calls are not allowed in some stubs. | 2138 ASSERT(AllowThisStubCall(stub)); // Stub calls are not allowed in some stubs. |
2139 Call(stub->GetCode(), RelocInfo::CODE_TARGET, kNoASTId, cond); | 2139 Call(stub->GetCode(), RelocInfo::CODE_TARGET, TypeFeedbackId::None(), cond); |
2140 } | 2140 } |
2141 | 2141 |
2142 | 2142 |
2143 void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) { | 2143 void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) { |
2144 ASSERT(allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe()); | 2144 ASSERT(allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe()); |
2145 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond); | 2145 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond); |
2146 } | 2146 } |
2147 | 2147 |
2148 | 2148 |
2149 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { | 2149 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { |
(...skipping 1655 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3805 void CodePatcher::EmitCondition(Condition cond) { | 3805 void CodePatcher::EmitCondition(Condition cond) { |
3806 Instr instr = Assembler::instr_at(masm_.pc_); | 3806 Instr instr = Assembler::instr_at(masm_.pc_); |
3807 instr = (instr & ~kCondMask) | cond; | 3807 instr = (instr & ~kCondMask) | cond; |
3808 masm_.emit(instr); | 3808 masm_.emit(instr); |
3809 } | 3809 } |
3810 | 3810 |
3811 | 3811 |
3812 } } // namespace v8::internal | 3812 } } // namespace v8::internal |
3813 | 3813 |
3814 #endif // V8_TARGET_ARCH_ARM | 3814 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |