OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 272 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
283 vmov(dst, src); | 283 vmov(dst, src); |
284 } | 284 } |
285 } | 285 } |
286 | 286 |
287 | 287 |
288 void MacroAssembler::And(Register dst, Register src1, const Operand& src2, | 288 void MacroAssembler::And(Register dst, Register src1, const Operand& src2, |
289 Condition cond) { | 289 Condition cond) { |
290 if (!src2.is_reg() && | 290 if (!src2.is_reg() && |
291 !src2.must_use_constant_pool(this) && | 291 !src2.must_use_constant_pool(this) && |
292 src2.immediate() == 0) { | 292 src2.immediate() == 0) { |
293 mov(dst, Operand(0, RelocInfo::NONE), LeaveCC, cond); | 293 mov(dst, Operand::Zero(), LeaveCC, cond); |
294 | 294 |
295 } else if (!src2.is_single_instruction(this) && | 295 } else if (!src2.is_single_instruction(this) && |
296 !src2.must_use_constant_pool(this) && | 296 !src2.must_use_constant_pool(this) && |
297 CpuFeatures::IsSupported(ARMv7) && | 297 CpuFeatures::IsSupported(ARMv7) && |
298 IsPowerOf2(src2.immediate() + 1)) { | 298 IsPowerOf2(src2.immediate() + 1)) { |
299 ubfx(dst, src1, 0, | 299 ubfx(dst, src1, 0, |
300 WhichPowerOf2(static_cast<uint32_t>(src2.immediate()) + 1), cond); | 300 WhichPowerOf2(static_cast<uint32_t>(src2.immediate()) + 1), cond); |
301 | 301 |
302 } else { | 302 } else { |
303 and_(dst, src1, src2, LeaveCC, cond); | 303 and_(dst, src1, src2, LeaveCC, cond); |
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
391 int satval = (1 << satpos) - 1; | 391 int satval = (1 << satpos) - 1; |
392 | 392 |
393 if (cond != al) { | 393 if (cond != al) { |
394 b(NegateCondition(cond), &done); // Skip saturate if !condition. | 394 b(NegateCondition(cond), &done); // Skip saturate if !condition. |
395 } | 395 } |
396 if (!(src.is_reg() && dst.is(src.rm()))) { | 396 if (!(src.is_reg() && dst.is(src.rm()))) { |
397 mov(dst, src); | 397 mov(dst, src); |
398 } | 398 } |
399 tst(dst, Operand(~satval)); | 399 tst(dst, Operand(~satval)); |
400 b(eq, &done); | 400 b(eq, &done); |
401 mov(dst, Operand(0, RelocInfo::NONE), LeaveCC, mi); // 0 if negative. | 401 mov(dst, Operand::Zero(), LeaveCC, mi); // 0 if negative. |
402 mov(dst, Operand(satval), LeaveCC, pl); // satval if positive. | 402 mov(dst, Operand(satval), LeaveCC, pl); // satval if positive. |
403 bind(&done); | 403 bind(&done); |
404 } else { | 404 } else { |
405 usat(dst, satpos, src, cond); | 405 usat(dst, satpos, src, cond); |
406 } | 406 } |
407 } | 407 } |
408 | 408 |
409 | 409 |
410 void MacroAssembler::LoadRoot(Register destination, | 410 void MacroAssembler::LoadRoot(Register destination, |
411 Heap::RootListIndex index, | 411 Heap::RootListIndex index, |
(...skipping 422 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
834 void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space) { | 834 void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space) { |
835 // Set up the frame structure on the stack. | 835 // Set up the frame structure on the stack. |
836 ASSERT_EQ(2 * kPointerSize, ExitFrameConstants::kCallerSPDisplacement); | 836 ASSERT_EQ(2 * kPointerSize, ExitFrameConstants::kCallerSPDisplacement); |
837 ASSERT_EQ(1 * kPointerSize, ExitFrameConstants::kCallerPCOffset); | 837 ASSERT_EQ(1 * kPointerSize, ExitFrameConstants::kCallerPCOffset); |
838 ASSERT_EQ(0 * kPointerSize, ExitFrameConstants::kCallerFPOffset); | 838 ASSERT_EQ(0 * kPointerSize, ExitFrameConstants::kCallerFPOffset); |
839 Push(lr, fp); | 839 Push(lr, fp); |
840 mov(fp, Operand(sp)); // Set up new frame pointer. | 840 mov(fp, Operand(sp)); // Set up new frame pointer. |
841 // Reserve room for saved entry sp and code object. | 841 // Reserve room for saved entry sp and code object. |
842 sub(sp, sp, Operand(2 * kPointerSize)); | 842 sub(sp, sp, Operand(2 * kPointerSize)); |
843 if (emit_debug_code()) { | 843 if (emit_debug_code()) { |
844 mov(ip, Operand(0)); | 844 mov(ip, Operand::Zero()); |
845 str(ip, MemOperand(fp, ExitFrameConstants::kSPOffset)); | 845 str(ip, MemOperand(fp, ExitFrameConstants::kSPOffset)); |
846 } | 846 } |
847 mov(ip, Operand(CodeObject())); | 847 mov(ip, Operand(CodeObject())); |
848 str(ip, MemOperand(fp, ExitFrameConstants::kCodeOffset)); | 848 str(ip, MemOperand(fp, ExitFrameConstants::kCodeOffset)); |
849 | 849 |
850 // Save the frame pointer and the context in top. | 850 // Save the frame pointer and the context in top. |
851 mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate()))); | 851 mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate()))); |
852 str(fp, MemOperand(ip)); | 852 str(fp, MemOperand(ip)); |
853 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate()))); | 853 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate()))); |
854 str(cp, MemOperand(ip)); | 854 str(cp, MemOperand(ip)); |
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
918 // Calculate the stack location of the saved doubles and restore them. | 918 // Calculate the stack location of the saved doubles and restore them. |
919 const int offset = 2 * kPointerSize; | 919 const int offset = 2 * kPointerSize; |
920 sub(r3, fp, Operand(offset + DwVfpRegister::kNumRegisters * kDoubleSize)); | 920 sub(r3, fp, Operand(offset + DwVfpRegister::kNumRegisters * kDoubleSize)); |
921 DwVfpRegister first = d0; | 921 DwVfpRegister first = d0; |
922 DwVfpRegister last = | 922 DwVfpRegister last = |
923 DwVfpRegister::from_code(DwVfpRegister::kNumRegisters - 1); | 923 DwVfpRegister::from_code(DwVfpRegister::kNumRegisters - 1); |
924 vldm(ia, r3, first, last); | 924 vldm(ia, r3, first, last); |
925 } | 925 } |
926 | 926 |
927 // Clear top frame. | 927 // Clear top frame. |
928 mov(r3, Operand(0, RelocInfo::NONE)); | 928 mov(r3, Operand::Zero()); |
929 mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate()))); | 929 mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate()))); |
930 str(r3, MemOperand(ip)); | 930 str(r3, MemOperand(ip)); |
931 | 931 |
932 // Restore current context from top and clear it in debug mode. | 932 // Restore current context from top and clear it in debug mode. |
933 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate()))); | 933 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate()))); |
934 ldr(cp, MemOperand(ip)); | 934 ldr(cp, MemOperand(ip)); |
935 #ifdef DEBUG | 935 #ifdef DEBUG |
936 str(r3, MemOperand(ip)); | 936 str(r3, MemOperand(ip)); |
937 #endif | 937 #endif |
938 | 938 |
(...skipping 249 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1188 | 1188 |
1189 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset)); | 1189 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset)); |
1190 ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); | 1190 ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); |
1191 tst(scratch, Operand(kIsNotStringMask)); | 1191 tst(scratch, Operand(kIsNotStringMask)); |
1192 b(ne, fail); | 1192 b(ne, fail); |
1193 } | 1193 } |
1194 | 1194 |
1195 | 1195 |
1196 #ifdef ENABLE_DEBUGGER_SUPPORT | 1196 #ifdef ENABLE_DEBUGGER_SUPPORT |
1197 void MacroAssembler::DebugBreak() { | 1197 void MacroAssembler::DebugBreak() { |
1198 mov(r0, Operand(0, RelocInfo::NONE)); | 1198 mov(r0, Operand::Zero()); |
1199 mov(r1, Operand(ExternalReference(Runtime::kDebugBreak, isolate()))); | 1199 mov(r1, Operand(ExternalReference(Runtime::kDebugBreak, isolate()))); |
1200 CEntryStub ces(1); | 1200 CEntryStub ces(1); |
1201 ASSERT(AllowThisStubCall(&ces)); | 1201 ASSERT(AllowThisStubCall(&ces)); |
1202 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK); | 1202 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK); |
1203 } | 1203 } |
1204 #endif | 1204 #endif |
1205 | 1205 |
1206 | 1206 |
1207 void MacroAssembler::PushTryHandler(StackHandler::Kind kind, | 1207 void MacroAssembler::PushTryHandler(StackHandler::Kind kind, |
1208 int handler_index) { | 1208 int handler_index) { |
(...skipping 10 matching lines...) Expand all Loading... |
1219 // Set up the code object (r5) and the state (r6) for pushing. | 1219 // Set up the code object (r5) and the state (r6) for pushing. |
1220 unsigned state = | 1220 unsigned state = |
1221 StackHandler::IndexField::encode(handler_index) | | 1221 StackHandler::IndexField::encode(handler_index) | |
1222 StackHandler::KindField::encode(kind); | 1222 StackHandler::KindField::encode(kind); |
1223 mov(r5, Operand(CodeObject())); | 1223 mov(r5, Operand(CodeObject())); |
1224 mov(r6, Operand(state)); | 1224 mov(r6, Operand(state)); |
1225 | 1225 |
1226 // Push the frame pointer, context, state, and code object. | 1226 // Push the frame pointer, context, state, and code object. |
1227 if (kind == StackHandler::JS_ENTRY) { | 1227 if (kind == StackHandler::JS_ENTRY) { |
1228 mov(r7, Operand(Smi::FromInt(0))); // Indicates no context. | 1228 mov(r7, Operand(Smi::FromInt(0))); // Indicates no context. |
1229 mov(ip, Operand(0, RelocInfo::NONE)); // NULL frame pointer. | 1229 mov(ip, Operand::Zero()); // NULL frame pointer. |
1230 stm(db_w, sp, r5.bit() | r6.bit() | r7.bit() | ip.bit()); | 1230 stm(db_w, sp, r5.bit() | r6.bit() | r7.bit() | ip.bit()); |
1231 } else { | 1231 } else { |
1232 stm(db_w, sp, r5.bit() | r6.bit() | cp.bit() | fp.bit()); | 1232 stm(db_w, sp, r5.bit() | r6.bit() | cp.bit() | fp.bit()); |
1233 } | 1233 } |
1234 | 1234 |
1235 // Link the current handler as the next handler. | 1235 // Link the current handler as the next handler. |
1236 mov(r6, Operand(ExternalReference(Isolate::kHandlerAddress, isolate()))); | 1236 mov(r6, Operand(ExternalReference(Isolate::kHandlerAddress, isolate()))); |
1237 ldr(r5, MemOperand(r6)); | 1237 ldr(r5, MemOperand(r6)); |
1238 push(r5); | 1238 push(r5); |
1239 // Set this new handler as the current one. | 1239 // Set this new handler as the current one. |
(...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1343 Label same_contexts; | 1343 Label same_contexts; |
1344 | 1344 |
1345 ASSERT(!holder_reg.is(scratch)); | 1345 ASSERT(!holder_reg.is(scratch)); |
1346 ASSERT(!holder_reg.is(ip)); | 1346 ASSERT(!holder_reg.is(ip)); |
1347 ASSERT(!scratch.is(ip)); | 1347 ASSERT(!scratch.is(ip)); |
1348 | 1348 |
1349 // Load current lexical context from the stack frame. | 1349 // Load current lexical context from the stack frame. |
1350 ldr(scratch, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 1350 ldr(scratch, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
1351 // In debug mode, make sure the lexical context is set. | 1351 // In debug mode, make sure the lexical context is set. |
1352 #ifdef DEBUG | 1352 #ifdef DEBUG |
1353 cmp(scratch, Operand(0, RelocInfo::NONE)); | 1353 cmp(scratch, Operand::Zero()); |
1354 Check(ne, "we should not have an empty lexical context"); | 1354 Check(ne, "we should not have an empty lexical context"); |
1355 #endif | 1355 #endif |
1356 | 1356 |
1357 // Load the native context of the current context. | 1357 // Load the native context of the current context. |
1358 int offset = | 1358 int offset = |
1359 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize; | 1359 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize; |
1360 ldr(scratch, FieldMemOperand(scratch, offset)); | 1360 ldr(scratch, FieldMemOperand(scratch, offset)); |
1361 ldr(scratch, FieldMemOperand(scratch, GlobalObject::kNativeContextOffset)); | 1361 ldr(scratch, FieldMemOperand(scratch, GlobalObject::kNativeContextOffset)); |
1362 | 1362 |
1363 // Check the context is a native context. | 1363 // Check the context is a native context. |
(...skipping 605 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1969 str(mantissa_reg, FieldMemOperand(scratch1, FixedDoubleArray::kHeaderSize)); | 1969 str(mantissa_reg, FieldMemOperand(scratch1, FixedDoubleArray::kHeaderSize)); |
1970 uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32); | 1970 uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32); |
1971 str(exponent_reg, FieldMemOperand(scratch1, offset)); | 1971 str(exponent_reg, FieldMemOperand(scratch1, offset)); |
1972 jmp(&done); | 1972 jmp(&done); |
1973 | 1973 |
1974 bind(&maybe_nan); | 1974 bind(&maybe_nan); |
1975 // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise | 1975 // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise |
1976 // it's an Infinity, and the non-NaN code path applies. | 1976 // it's an Infinity, and the non-NaN code path applies. |
1977 b(gt, &is_nan); | 1977 b(gt, &is_nan); |
1978 ldr(mantissa_reg, FieldMemOperand(value_reg, HeapNumber::kMantissaOffset)); | 1978 ldr(mantissa_reg, FieldMemOperand(value_reg, HeapNumber::kMantissaOffset)); |
1979 cmp(mantissa_reg, Operand(0)); | 1979 cmp(mantissa_reg, Operand::Zero()); |
1980 b(eq, &have_double_value); | 1980 b(eq, &have_double_value); |
1981 bind(&is_nan); | 1981 bind(&is_nan); |
1982 // Load canonical NaN for storing into the double array. | 1982 // Load canonical NaN for storing into the double array. |
1983 uint64_t nan_int64 = BitCast<uint64_t>( | 1983 uint64_t nan_int64 = BitCast<uint64_t>( |
1984 FixedDoubleArray::canonical_not_the_hole_nan_as_double()); | 1984 FixedDoubleArray::canonical_not_the_hole_nan_as_double()); |
1985 mov(mantissa_reg, Operand(static_cast<uint32_t>(nan_int64))); | 1985 mov(mantissa_reg, Operand(static_cast<uint32_t>(nan_int64))); |
1986 mov(exponent_reg, Operand(static_cast<uint32_t>(nan_int64 >> 32))); | 1986 mov(exponent_reg, Operand(static_cast<uint32_t>(nan_int64 >> 32))); |
1987 jmp(&have_double_value); | 1987 jmp(&have_double_value); |
1988 | 1988 |
1989 bind(&smi_value); | 1989 bind(&smi_value); |
(...skipping 212 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2202 // DirectCEntry stub itself is generated early and never moves. | 2202 // DirectCEntry stub itself is generated early and never moves. |
2203 DirectCEntryStub stub; | 2203 DirectCEntryStub stub; |
2204 stub.GenerateCall(this, function); | 2204 stub.GenerateCall(this, function); |
2205 | 2205 |
2206 Label promote_scheduled_exception; | 2206 Label promote_scheduled_exception; |
2207 Label delete_allocated_handles; | 2207 Label delete_allocated_handles; |
2208 Label leave_exit_frame; | 2208 Label leave_exit_frame; |
2209 | 2209 |
2210 // If result is non-zero, dereference to get the result value | 2210 // If result is non-zero, dereference to get the result value |
2211 // otherwise set it to undefined. | 2211 // otherwise set it to undefined. |
2212 cmp(r0, Operand(0)); | 2212 cmp(r0, Operand::Zero()); |
2213 LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); | 2213 LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); |
2214 ldr(r0, MemOperand(r0), ne); | 2214 ldr(r0, MemOperand(r0), ne); |
2215 | 2215 |
2216 // No more valid handles (the result handle was the last one). Restore | 2216 // No more valid handles (the result handle was the last one). Restore |
2217 // previous handle scope. | 2217 // previous handle scope. |
2218 str(r4, MemOperand(r7, kNextOffset)); | 2218 str(r4, MemOperand(r7, kNextOffset)); |
2219 if (emit_debug_code()) { | 2219 if (emit_debug_code()) { |
2220 ldr(r1, MemOperand(r7, kLevelOffset)); | 2220 ldr(r1, MemOperand(r7, kLevelOffset)); |
2221 cmp(r1, r6); | 2221 cmp(r1, r6); |
2222 Check(eq, "Unexpected level after return from api call"); | 2222 Check(eq, "Unexpected level after return from api call"); |
(...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2381 Label right_exponent, done; | 2381 Label right_exponent, done; |
2382 // Get exponent word. | 2382 // Get exponent word. |
2383 ldr(scratch, FieldMemOperand(source, HeapNumber::kExponentOffset)); | 2383 ldr(scratch, FieldMemOperand(source, HeapNumber::kExponentOffset)); |
2384 // Get exponent alone in scratch2. | 2384 // Get exponent alone in scratch2. |
2385 Ubfx(scratch2, | 2385 Ubfx(scratch2, |
2386 scratch, | 2386 scratch, |
2387 HeapNumber::kExponentShift, | 2387 HeapNumber::kExponentShift, |
2388 HeapNumber::kExponentBits); | 2388 HeapNumber::kExponentBits); |
2389 // Load dest with zero. We use this either for the final shift or | 2389 // Load dest with zero. We use this either for the final shift or |
2390 // for the answer. | 2390 // for the answer. |
2391 mov(dest, Operand(0, RelocInfo::NONE)); | 2391 mov(dest, Operand::Zero()); |
2392 // Check whether the exponent matches a 32 bit signed int that is not a Smi. | 2392 // Check whether the exponent matches a 32 bit signed int that is not a Smi. |
2393 // A non-Smi integer is 1.xxx * 2^30 so the exponent is 30 (biased). This is | 2393 // A non-Smi integer is 1.xxx * 2^30 so the exponent is 30 (biased). This is |
2394 // the exponent that we are fastest at and also the highest exponent we can | 2394 // the exponent that we are fastest at and also the highest exponent we can |
2395 // handle here. | 2395 // handle here. |
2396 const uint32_t non_smi_exponent = HeapNumber::kExponentBias + 30; | 2396 const uint32_t non_smi_exponent = HeapNumber::kExponentBias + 30; |
2397 // The non_smi_exponent, 0x41d, is too big for ARM's immediate field so we | 2397 // The non_smi_exponent, 0x41d, is too big for ARM's immediate field so we |
2398 // split it up to avoid a constant pool entry. You can't do that in general | 2398 // split it up to avoid a constant pool entry. You can't do that in general |
2399 // for cmp because of the overflow flag, but we know the exponent is in the | 2399 // for cmp because of the overflow flag, but we know the exponent is in the |
2400 // range 0-2047 so there is no overflow. | 2400 // range 0-2047 so there is no overflow. |
2401 int fudge_factor = 0x400; | 2401 int fudge_factor = 0x400; |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2435 tst(scratch, Operand(HeapNumber::kSignMask)); | 2435 tst(scratch, Operand(HeapNumber::kSignMask)); |
2436 // Get the second half of the double. For some exponents we don't | 2436 // Get the second half of the double. For some exponents we don't |
2437 // actually need this because the bits get shifted out again, but | 2437 // actually need this because the bits get shifted out again, but |
2438 // it's probably slower to test than just to do it. | 2438 // it's probably slower to test than just to do it. |
2439 ldr(scratch, FieldMemOperand(source, HeapNumber::kMantissaOffset)); | 2439 ldr(scratch, FieldMemOperand(source, HeapNumber::kMantissaOffset)); |
2440 // Shift down 22 bits to get the last 10 bits. | 2440 // Shift down 22 bits to get the last 10 bits. |
2441 orr(scratch, scratch2, Operand(scratch, LSR, 32 - shift_distance)); | 2441 orr(scratch, scratch2, Operand(scratch, LSR, 32 - shift_distance)); |
2442 // Move down according to the exponent. | 2442 // Move down according to the exponent. |
2443 mov(dest, Operand(scratch, LSR, dest)); | 2443 mov(dest, Operand(scratch, LSR, dest)); |
2444 // Fix sign if sign bit was set. | 2444 // Fix sign if sign bit was set. |
2445 rsb(dest, dest, Operand(0, RelocInfo::NONE), LeaveCC, ne); | 2445 rsb(dest, dest, Operand::Zero(), LeaveCC, ne); |
2446 bind(&done); | 2446 bind(&done); |
2447 } | 2447 } |
2448 } | 2448 } |
2449 | 2449 |
2450 | 2450 |
2451 void MacroAssembler::EmitVFPTruncate(VFPRoundingMode rounding_mode, | 2451 void MacroAssembler::EmitVFPTruncate(VFPRoundingMode rounding_mode, |
2452 Register result, | 2452 Register result, |
2453 DwVfpRegister double_input, | 2453 DwVfpRegister double_input, |
2454 Register scratch, | 2454 Register scratch, |
2455 DwVfpRegister double_scratch, | 2455 DwVfpRegister double_scratch, |
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2516 Label done, normal_exponent, restore_sign; | 2516 Label done, normal_exponent, restore_sign; |
2517 | 2517 |
2518 // Extract the biased exponent in result. | 2518 // Extract the biased exponent in result. |
2519 Ubfx(result, | 2519 Ubfx(result, |
2520 input_high, | 2520 input_high, |
2521 HeapNumber::kExponentShift, | 2521 HeapNumber::kExponentShift, |
2522 HeapNumber::kExponentBits); | 2522 HeapNumber::kExponentBits); |
2523 | 2523 |
2524 // Check for Infinity and NaNs, which should return 0. | 2524 // Check for Infinity and NaNs, which should return 0. |
2525 cmp(result, Operand(HeapNumber::kExponentMask)); | 2525 cmp(result, Operand(HeapNumber::kExponentMask)); |
2526 mov(result, Operand(0), LeaveCC, eq); | 2526 mov(result, Operand::Zero(), LeaveCC, eq); |
2527 b(eq, &done); | 2527 b(eq, &done); |
2528 | 2528 |
2529 // Express exponent as delta to (number of mantissa bits + 31). | 2529 // Express exponent as delta to (number of mantissa bits + 31). |
2530 sub(result, | 2530 sub(result, |
2531 result, | 2531 result, |
2532 Operand(HeapNumber::kExponentBias + HeapNumber::kMantissaBits + 31), | 2532 Operand(HeapNumber::kExponentBias + HeapNumber::kMantissaBits + 31), |
2533 SetCC); | 2533 SetCC); |
2534 | 2534 |
2535 // If the delta is strictly positive, all bits would be shifted away, | 2535 // If the delta is strictly positive, all bits would be shifted away, |
2536 // which means that we can return 0. | 2536 // which means that we can return 0. |
2537 b(le, &normal_exponent); | 2537 b(le, &normal_exponent); |
2538 mov(result, Operand(0)); | 2538 mov(result, Operand::Zero()); |
2539 b(&done); | 2539 b(&done); |
2540 | 2540 |
2541 bind(&normal_exponent); | 2541 bind(&normal_exponent); |
2542 const int kShiftBase = HeapNumber::kNonMantissaBitsInTopWord - 1; | 2542 const int kShiftBase = HeapNumber::kNonMantissaBitsInTopWord - 1; |
2543 // Calculate shift. | 2543 // Calculate shift. |
2544 add(scratch, result, Operand(kShiftBase + HeapNumber::kMantissaBits), SetCC); | 2544 add(scratch, result, Operand(kShiftBase + HeapNumber::kMantissaBits), SetCC); |
2545 | 2545 |
2546 // Save the sign. | 2546 // Save the sign. |
2547 Register sign = result; | 2547 Register sign = result; |
2548 result = no_reg; | 2548 result = no_reg; |
2549 and_(sign, input_high, Operand(HeapNumber::kSignMask)); | 2549 and_(sign, input_high, Operand(HeapNumber::kSignMask)); |
2550 | 2550 |
2551 // Set the implicit 1 before the mantissa part in input_high. | 2551 // Set the implicit 1 before the mantissa part in input_high. |
2552 orr(input_high, | 2552 orr(input_high, |
2553 input_high, | 2553 input_high, |
2554 Operand(1 << HeapNumber::kMantissaBitsInTopWord)); | 2554 Operand(1 << HeapNumber::kMantissaBitsInTopWord)); |
2555 // Shift the mantissa bits to the correct position. | 2555 // Shift the mantissa bits to the correct position. |
2556 // We don't need to clear non-mantissa bits as they will be shifted away. | 2556 // We don't need to clear non-mantissa bits as they will be shifted away. |
2557 // If they weren't, it would mean that the answer is in the 32bit range. | 2557 // If they weren't, it would mean that the answer is in the 32bit range. |
2558 mov(input_high, Operand(input_high, LSL, scratch)); | 2558 mov(input_high, Operand(input_high, LSL, scratch)); |
2559 | 2559 |
2560 // Replace the shifted bits with bits from the lower mantissa word. | 2560 // Replace the shifted bits with bits from the lower mantissa word. |
2561 Label pos_shift, shift_done; | 2561 Label pos_shift, shift_done; |
2562 rsb(scratch, scratch, Operand(32), SetCC); | 2562 rsb(scratch, scratch, Operand(32), SetCC); |
2563 b(&pos_shift, ge); | 2563 b(&pos_shift, ge); |
2564 | 2564 |
2565 // Negate scratch. | 2565 // Negate scratch. |
2566 rsb(scratch, scratch, Operand(0)); | 2566 rsb(scratch, scratch, Operand::Zero()); |
2567 mov(input_low, Operand(input_low, LSL, scratch)); | 2567 mov(input_low, Operand(input_low, LSL, scratch)); |
2568 b(&shift_done); | 2568 b(&shift_done); |
2569 | 2569 |
2570 bind(&pos_shift); | 2570 bind(&pos_shift); |
2571 mov(input_low, Operand(input_low, LSR, scratch)); | 2571 mov(input_low, Operand(input_low, LSR, scratch)); |
2572 | 2572 |
2573 bind(&shift_done); | 2573 bind(&shift_done); |
2574 orr(input_high, input_high, Operand(input_low)); | 2574 orr(input_high, input_high, Operand(input_low)); |
2575 // Restore sign if necessary. | 2575 // Restore sign if necessary. |
2576 cmp(sign, Operand(0)); | 2576 cmp(sign, Operand::Zero()); |
2577 result = sign; | 2577 result = sign; |
2578 sign = no_reg; | 2578 sign = no_reg; |
2579 rsb(result, input_high, Operand(0), LeaveCC, ne); | 2579 rsb(result, input_high, Operand::Zero(), LeaveCC, ne); |
2580 mov(result, input_high, LeaveCC, eq); | 2580 mov(result, input_high, LeaveCC, eq); |
2581 bind(&done); | 2581 bind(&done); |
2582 } | 2582 } |
2583 | 2583 |
2584 | 2584 |
2585 void MacroAssembler::EmitECMATruncate(Register result, | 2585 void MacroAssembler::EmitECMATruncate(Register result, |
2586 DwVfpRegister double_input, | 2586 DwVfpRegister double_input, |
2587 SwVfpRegister single_scratch, | 2587 SwVfpRegister single_scratch, |
2588 Register scratch, | 2588 Register scratch, |
2589 Register input_high, | 2589 Register input_high, |
(...skipping 615 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3205 | 3205 |
3206 | 3206 |
3207 void MacroAssembler::CopyBytes(Register src, | 3207 void MacroAssembler::CopyBytes(Register src, |
3208 Register dst, | 3208 Register dst, |
3209 Register length, | 3209 Register length, |
3210 Register scratch) { | 3210 Register scratch) { |
3211 Label align_loop, align_loop_1, word_loop, byte_loop, byte_loop_1, done; | 3211 Label align_loop, align_loop_1, word_loop, byte_loop, byte_loop_1, done; |
3212 | 3212 |
3213 // Align src before copying in word size chunks. | 3213 // Align src before copying in word size chunks. |
3214 bind(&align_loop); | 3214 bind(&align_loop); |
3215 cmp(length, Operand(0)); | 3215 cmp(length, Operand::Zero()); |
3216 b(eq, &done); | 3216 b(eq, &done); |
3217 bind(&align_loop_1); | 3217 bind(&align_loop_1); |
3218 tst(src, Operand(kPointerSize - 1)); | 3218 tst(src, Operand(kPointerSize - 1)); |
3219 b(eq, &word_loop); | 3219 b(eq, &word_loop); |
3220 ldrb(scratch, MemOperand(src, 1, PostIndex)); | 3220 ldrb(scratch, MemOperand(src, 1, PostIndex)); |
3221 strb(scratch, MemOperand(dst, 1, PostIndex)); | 3221 strb(scratch, MemOperand(dst, 1, PostIndex)); |
3222 sub(length, length, Operand(1), SetCC); | 3222 sub(length, length, Operand(1), SetCC); |
3223 b(ne, &byte_loop_1); | 3223 b(ne, &byte_loop_1); |
3224 | 3224 |
3225 // Copy bytes in word size chunks. | 3225 // Copy bytes in word size chunks. |
(...skipping 14 matching lines...) Expand all Loading... |
3240 mov(scratch, Operand(scratch, LSR, 8)); | 3240 mov(scratch, Operand(scratch, LSR, 8)); |
3241 strb(scratch, MemOperand(dst, 1, PostIndex)); | 3241 strb(scratch, MemOperand(dst, 1, PostIndex)); |
3242 mov(scratch, Operand(scratch, LSR, 8)); | 3242 mov(scratch, Operand(scratch, LSR, 8)); |
3243 strb(scratch, MemOperand(dst, 1, PostIndex)); | 3243 strb(scratch, MemOperand(dst, 1, PostIndex)); |
3244 } | 3244 } |
3245 sub(length, length, Operand(kPointerSize)); | 3245 sub(length, length, Operand(kPointerSize)); |
3246 b(&word_loop); | 3246 b(&word_loop); |
3247 | 3247 |
3248 // Copy the last bytes if any left. | 3248 // Copy the last bytes if any left. |
3249 bind(&byte_loop); | 3249 bind(&byte_loop); |
3250 cmp(length, Operand(0)); | 3250 cmp(length, Operand::Zero()); |
3251 b(eq, &done); | 3251 b(eq, &done); |
3252 bind(&byte_loop_1); | 3252 bind(&byte_loop_1); |
3253 ldrb(scratch, MemOperand(src, 1, PostIndex)); | 3253 ldrb(scratch, MemOperand(src, 1, PostIndex)); |
3254 strb(scratch, MemOperand(dst, 1, PostIndex)); | 3254 strb(scratch, MemOperand(dst, 1, PostIndex)); |
3255 sub(length, length, Operand(1), SetCC); | 3255 sub(length, length, Operand(1), SetCC); |
3256 b(ne, &byte_loop_1); | 3256 b(ne, &byte_loop_1); |
3257 bind(&done); | 3257 bind(&done); |
3258 } | 3258 } |
3259 | 3259 |
3260 | 3260 |
(...skipping 17 matching lines...) Expand all Loading... |
3278 ASSERT(!zeros.is(scratch)); | 3278 ASSERT(!zeros.is(scratch)); |
3279 ASSERT(!scratch.is(ip)); | 3279 ASSERT(!scratch.is(ip)); |
3280 ASSERT(!source.is(ip)); | 3280 ASSERT(!source.is(ip)); |
3281 ASSERT(!zeros.is(ip)); | 3281 ASSERT(!zeros.is(ip)); |
3282 #ifdef CAN_USE_ARMV5_INSTRUCTIONS | 3282 #ifdef CAN_USE_ARMV5_INSTRUCTIONS |
3283 clz(zeros, source); // This instruction is only supported after ARM5. | 3283 clz(zeros, source); // This instruction is only supported after ARM5. |
3284 #else | 3284 #else |
3285 // Order of the next two lines is important: zeros register | 3285 // Order of the next two lines is important: zeros register |
3286 // can be the same as source register. | 3286 // can be the same as source register. |
3287 Move(scratch, source); | 3287 Move(scratch, source); |
3288 mov(zeros, Operand(0, RelocInfo::NONE)); | 3288 mov(zeros, Operand::Zero()); |
3289 // Top 16. | 3289 // Top 16. |
3290 tst(scratch, Operand(0xffff0000)); | 3290 tst(scratch, Operand(0xffff0000)); |
3291 add(zeros, zeros, Operand(16), LeaveCC, eq); | 3291 add(zeros, zeros, Operand(16), LeaveCC, eq); |
3292 mov(scratch, Operand(scratch, LSL, 16), LeaveCC, eq); | 3292 mov(scratch, Operand(scratch, LSL, 16), LeaveCC, eq); |
3293 // Top 8. | 3293 // Top 8. |
3294 tst(scratch, Operand(0xff000000)); | 3294 tst(scratch, Operand(0xff000000)); |
3295 add(zeros, zeros, Operand(8), LeaveCC, eq); | 3295 add(zeros, zeros, Operand(8), LeaveCC, eq); |
3296 mov(scratch, Operand(scratch, LSL, 8), LeaveCC, eq); | 3296 mov(scratch, Operand(scratch, LSL, 8), LeaveCC, eq); |
3297 // Top 4. | 3297 // Top 4. |
3298 tst(scratch, Operand(0xf0000000)); | 3298 tst(scratch, Operand(0xf0000000)); |
(...skipping 407 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3706 DoubleRegister temp_double_reg) { | 3706 DoubleRegister temp_double_reg) { |
3707 Label above_zero; | 3707 Label above_zero; |
3708 Label done; | 3708 Label done; |
3709 Label in_bounds; | 3709 Label in_bounds; |
3710 | 3710 |
3711 Vmov(temp_double_reg, 0.0); | 3711 Vmov(temp_double_reg, 0.0); |
3712 VFPCompareAndSetFlags(input_reg, temp_double_reg); | 3712 VFPCompareAndSetFlags(input_reg, temp_double_reg); |
3713 b(gt, &above_zero); | 3713 b(gt, &above_zero); |
3714 | 3714 |
3715 // Double value is less than zero, NaN or Inf, return 0. | 3715 // Double value is less than zero, NaN or Inf, return 0. |
3716 mov(result_reg, Operand(0)); | 3716 mov(result_reg, Operand::Zero()); |
3717 b(al, &done); | 3717 b(al, &done); |
3718 | 3718 |
3719 // Double value is >= 255, return 255. | 3719 // Double value is >= 255, return 255. |
3720 bind(&above_zero); | 3720 bind(&above_zero); |
3721 Vmov(temp_double_reg, 255.0, result_reg); | 3721 Vmov(temp_double_reg, 255.0, result_reg); |
3722 VFPCompareAndSetFlags(input_reg, temp_double_reg); | 3722 VFPCompareAndSetFlags(input_reg, temp_double_reg); |
3723 b(le, &in_bounds); | 3723 b(le, &in_bounds); |
3724 mov(result_reg, Operand(255)); | 3724 mov(result_reg, Operand(255)); |
3725 b(al, &done); | 3725 b(al, &done); |
3726 | 3726 |
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3856 void CodePatcher::EmitCondition(Condition cond) { | 3856 void CodePatcher::EmitCondition(Condition cond) { |
3857 Instr instr = Assembler::instr_at(masm_.pc_); | 3857 Instr instr = Assembler::instr_at(masm_.pc_); |
3858 instr = (instr & ~kCondMask) | cond; | 3858 instr = (instr & ~kCondMask) | cond; |
3859 masm_.emit(instr); | 3859 masm_.emit(instr); |
3860 } | 3860 } |
3861 | 3861 |
3862 | 3862 |
3863 } } // namespace v8::internal | 3863 } } // namespace v8::internal |
3864 | 3864 |
3865 #endif // V8_TARGET_ARCH_ARM | 3865 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |