| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 195 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 206 } | 206 } |
| 207 | 207 |
| 208 Label valid_result; | 208 Label valid_result; |
| 209 Label return_result; | 209 Label return_result; |
| 210 // If Invalid Operand or Zero Division exceptions are set, | 210 // If Invalid Operand or Zero Division exceptions are set, |
| 211 // return NaN. | 211 // return NaN. |
| 212 __ testb(rax, Immediate(5)); | 212 __ testb(rax, Immediate(5)); |
| 213 __ j(zero, &valid_result); | 213 __ j(zero, &valid_result); |
| 214 __ fstp(0); // Drop result in st(0). | 214 __ fstp(0); // Drop result in st(0). |
| 215 int64_t kNaNValue = V8_INT64_C(0x7ff8000000000000); | 215 int64_t kNaNValue = V8_INT64_C(0x7ff8000000000000); |
| 216 __ movq(rcx, kNaNValue, RelocInfo::NONE64); | 216 __ movq(rcx, kNaNValue); |
| 217 __ movq(Operand(rsp, kPointerSize), rcx); | 217 __ movq(Operand(rsp, kPointerSize), rcx); |
| 218 __ movsd(xmm0, Operand(rsp, kPointerSize)); | 218 __ movsd(xmm0, Operand(rsp, kPointerSize)); |
| 219 __ jmp(&return_result); | 219 __ jmp(&return_result); |
| 220 | 220 |
| 221 // If result is valid, return that. | 221 // If result is valid, return that. |
| 222 __ bind(&valid_result); | 222 __ bind(&valid_result); |
| 223 __ fstp_d(Operand(rsp, kPointerSize)); | 223 __ fstp_d(Operand(rsp, kPointerSize)); |
| 224 __ movsd(xmm0, Operand(rsp, kPointerSize)); | 224 __ movsd(xmm0, Operand(rsp, kPointerSize)); |
| 225 | 225 |
| 226 // Clean up FPU stack and exceptions and return xmm0 | 226 // Clean up FPU stack and exceptions and return xmm0 |
| (...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 331 kDontSaveFPRegs, | 331 kDontSaveFPRegs, |
| 332 EMIT_REMEMBERED_SET, | 332 EMIT_REMEMBERED_SET, |
| 333 OMIT_SMI_CHECK); | 333 OMIT_SMI_CHECK); |
| 334 | 334 |
| 335 // Convert smis to doubles and holes to hole NaNs. The Array's length | 335 // Convert smis to doubles and holes to hole NaNs. The Array's length |
| 336 // remains unchanged. | 336 // remains unchanged. |
| 337 STATIC_ASSERT(FixedDoubleArray::kLengthOffset == FixedArray::kLengthOffset); | 337 STATIC_ASSERT(FixedDoubleArray::kLengthOffset == FixedArray::kLengthOffset); |
| 338 STATIC_ASSERT(FixedDoubleArray::kHeaderSize == FixedArray::kHeaderSize); | 338 STATIC_ASSERT(FixedDoubleArray::kHeaderSize == FixedArray::kHeaderSize); |
| 339 | 339 |
| 340 Label loop, entry, convert_hole; | 340 Label loop, entry, convert_hole; |
| 341 __ movq(r15, BitCast<int64_t, uint64_t>(kHoleNanInt64), RelocInfo::NONE64); | 341 __ movq(r15, BitCast<int64_t, uint64_t>(kHoleNanInt64)); |
| 342 // r15: the-hole NaN | 342 // r15: the-hole NaN |
| 343 __ jmp(&entry); | 343 __ jmp(&entry); |
| 344 | 344 |
| 345 // Allocate new backing store. | 345 // Allocate new backing store. |
| 346 __ bind(&new_backing_store); | 346 __ bind(&new_backing_store); |
| 347 __ lea(rdi, Operand(r9, times_8, FixedArray::kHeaderSize)); | 347 __ lea(rdi, Operand(r9, times_8, FixedArray::kHeaderSize)); |
| 348 __ Allocate(rdi, r14, r11, r15, fail, TAG_OBJECT); | 348 __ Allocate(rdi, r14, r11, r15, fail, TAG_OBJECT); |
| 349 // Set backing store's map | 349 // Set backing store's map |
| 350 __ LoadRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex); | 350 __ LoadRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex); |
| 351 __ movq(FieldOperand(r14, HeapObject::kMapOffset), rdi); | 351 __ movq(FieldOperand(r14, HeapObject::kMapOffset), rdi); |
| (...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 433 // r9 : number of elements | 433 // r9 : number of elements |
| 434 __ lea(rdi, Operand(r9, times_pointer_size, FixedArray::kHeaderSize)); | 434 __ lea(rdi, Operand(r9, times_pointer_size, FixedArray::kHeaderSize)); |
| 435 __ Allocate(rdi, r11, r14, r15, &gc_required, TAG_OBJECT); | 435 __ Allocate(rdi, r11, r14, r15, &gc_required, TAG_OBJECT); |
| 436 // r11: destination FixedArray | 436 // r11: destination FixedArray |
| 437 __ LoadRoot(rdi, Heap::kFixedArrayMapRootIndex); | 437 __ LoadRoot(rdi, Heap::kFixedArrayMapRootIndex); |
| 438 __ movq(FieldOperand(r11, HeapObject::kMapOffset), rdi); | 438 __ movq(FieldOperand(r11, HeapObject::kMapOffset), rdi); |
| 439 __ Integer32ToSmi(r14, r9); | 439 __ Integer32ToSmi(r14, r9); |
| 440 __ movq(FieldOperand(r11, FixedArray::kLengthOffset), r14); | 440 __ movq(FieldOperand(r11, FixedArray::kLengthOffset), r14); |
| 441 | 441 |
| 442 // Prepare for conversion loop. | 442 // Prepare for conversion loop. |
| 443 __ movq(rsi, BitCast<int64_t, uint64_t>(kHoleNanInt64), RelocInfo::NONE64); | 443 __ movq(rsi, BitCast<int64_t, uint64_t>(kHoleNanInt64)); |
| 444 __ LoadRoot(rdi, Heap::kTheHoleValueRootIndex); | 444 __ LoadRoot(rdi, Heap::kTheHoleValueRootIndex); |
| 445 // rsi: the-hole NaN | 445 // rsi: the-hole NaN |
| 446 // rdi: pointer to the-hole | 446 // rdi: pointer to the-hole |
| 447 __ jmp(&entry); | 447 __ jmp(&entry); |
| 448 | 448 |
| 449 // Call into runtime if GC is required. | 449 // Call into runtime if GC is required. |
| 450 __ bind(&gc_required); | 450 __ bind(&gc_required); |
| 451 __ pop(rax); | 451 __ pop(rax); |
| 452 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 452 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 453 __ jmp(fail); | 453 __ jmp(fail); |
| 454 | 454 |
| 455 // Box doubles into heap numbers. | 455 // Box doubles into heap numbers. |
| 456 __ bind(&loop); | 456 __ bind(&loop); |
| 457 __ movq(r14, FieldOperand(r8, | 457 __ movq(r14, FieldOperand(r8, |
| 458 r9, | 458 r9, |
| 459 times_8, | 459 times_8, |
| 460 FixedDoubleArray::kHeaderSize)); | 460 FixedDoubleArray::kHeaderSize)); |
| 461 // r9 : current element's index | 461 // r9 : current element's index |
| 462 // r14: current element | 462 // r14: current element |
| 463 __ cmpq(r14, rsi); | 463 __ cmpq(r14, rsi); |
| 464 __ j(equal, &convert_hole); | 464 __ j(equal, &convert_hole); |
| 465 | 465 |
| 466 // Non-hole double, copy value into a heap number. | 466 // Non-hole double, copy value into a heap number. |
| 467 __ AllocateHeapNumber(rax, r15, &gc_required); | 467 __ AllocateHeapNumber(rax, r15, &gc_required); |
| 468 // rax: new heap number | 468 // rax: new heap number |
| 469 __ movq(FieldOperand(rax, HeapNumber::kValueOffset), r14); | 469 __ MoveDouble(FieldOperand(rax, HeapNumber::kValueOffset), r14); |
| 470 __ movq(FieldOperand(r11, | 470 __ movq(FieldOperand(r11, |
| 471 r9, | 471 r9, |
| 472 times_pointer_size, | 472 times_pointer_size, |
| 473 FixedArray::kHeaderSize), | 473 FixedArray::kHeaderSize), |
| 474 rax); | 474 rax); |
| 475 __ movq(r15, r9); | 475 __ movq(r15, r9); |
| 476 __ RecordWriteArray(r11, | 476 __ RecordWriteArray(r11, |
| 477 rax, | 477 rax, |
| 478 r15, | 478 r15, |
| 479 kDontSaveFPRegs, | 479 kDontSaveFPRegs, |
| (...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 628 Register temp1, | 628 Register temp1, |
| 629 Register temp2) { | 629 Register temp2) { |
| 630 ASSERT(!input.is(result)); | 630 ASSERT(!input.is(result)); |
| 631 ASSERT(!input.is(double_scratch)); | 631 ASSERT(!input.is(double_scratch)); |
| 632 ASSERT(!result.is(double_scratch)); | 632 ASSERT(!result.is(double_scratch)); |
| 633 ASSERT(!temp1.is(temp2)); | 633 ASSERT(!temp1.is(temp2)); |
| 634 ASSERT(ExternalReference::math_exp_constants(0).address() != NULL); | 634 ASSERT(ExternalReference::math_exp_constants(0).address() != NULL); |
| 635 | 635 |
| 636 Label done; | 636 Label done; |
| 637 | 637 |
| 638 __ movq(kScratchRegister, ExternalReference::math_exp_constants(0)); | 638 __ Move(kScratchRegister, ExternalReference::math_exp_constants(0)); |
| 639 __ movsd(double_scratch, Operand(kScratchRegister, 0 * kDoubleSize)); | 639 __ movsd(double_scratch, Operand(kScratchRegister, 0 * kDoubleSize)); |
| 640 __ xorpd(result, result); | 640 __ xorpd(result, result); |
| 641 __ ucomisd(double_scratch, input); | 641 __ ucomisd(double_scratch, input); |
| 642 __ j(above_equal, &done); | 642 __ j(above_equal, &done); |
| 643 __ ucomisd(input, Operand(kScratchRegister, 1 * kDoubleSize)); | 643 __ ucomisd(input, Operand(kScratchRegister, 1 * kDoubleSize)); |
| 644 __ movsd(result, Operand(kScratchRegister, 2 * kDoubleSize)); | 644 __ movsd(result, Operand(kScratchRegister, 2 * kDoubleSize)); |
| 645 __ j(above_equal, &done); | 645 __ j(above_equal, &done); |
| 646 __ movsd(double_scratch, Operand(kScratchRegister, 3 * kDoubleSize)); | 646 __ movsd(double_scratch, Operand(kScratchRegister, 3 * kDoubleSize)); |
| 647 __ movsd(result, Operand(kScratchRegister, 4 * kDoubleSize)); | 647 __ movsd(result, Operand(kScratchRegister, 4 * kDoubleSize)); |
| 648 __ mulsd(double_scratch, input); | 648 __ mulsd(double_scratch, input); |
| 649 __ addsd(double_scratch, result); | 649 __ addsd(double_scratch, result); |
| 650 __ movq(temp2, double_scratch); | 650 __ movq(temp2, double_scratch); |
| 651 __ subsd(double_scratch, result); | 651 __ subsd(double_scratch, result); |
| 652 __ movsd(result, Operand(kScratchRegister, 6 * kDoubleSize)); | 652 __ movsd(result, Operand(kScratchRegister, 6 * kDoubleSize)); |
| 653 __ lea(temp1, Operand(temp2, 0x1ff800)); | 653 __ lea(temp1, Operand(temp2, 0x1ff800)); |
| 654 __ and_(temp2, Immediate(0x7ff)); | 654 __ and_(temp2, Immediate(0x7ff)); |
| 655 __ shr(temp1, Immediate(11)); | 655 __ shr(temp1, Immediate(11)); |
| 656 __ mulsd(double_scratch, Operand(kScratchRegister, 5 * kDoubleSize)); | 656 __ mulsd(double_scratch, Operand(kScratchRegister, 5 * kDoubleSize)); |
| 657 __ movq(kScratchRegister, ExternalReference::math_exp_log_table()); | 657 __ Move(kScratchRegister, ExternalReference::math_exp_log_table()); |
| 658 __ shl(temp1, Immediate(52)); | 658 __ shl(temp1, Immediate(52)); |
| 659 __ or_(temp1, Operand(kScratchRegister, temp2, times_8, 0)); | 659 __ or_(temp1, Operand(kScratchRegister, temp2, times_8, 0)); |
| 660 __ movq(kScratchRegister, ExternalReference::math_exp_constants(0)); | 660 __ Move(kScratchRegister, ExternalReference::math_exp_constants(0)); |
| 661 __ subsd(double_scratch, input); | 661 __ subsd(double_scratch, input); |
| 662 __ movsd(input, double_scratch); | 662 __ movsd(input, double_scratch); |
| 663 __ subsd(result, double_scratch); | 663 __ subsd(result, double_scratch); |
| 664 __ mulsd(input, double_scratch); | 664 __ mulsd(input, double_scratch); |
| 665 __ mulsd(result, input); | 665 __ mulsd(result, input); |
| 666 __ movq(input, temp1); | 666 __ movq(input, temp1); |
| 667 __ mulsd(result, Operand(kScratchRegister, 7 * kDoubleSize)); | 667 __ mulsd(result, Operand(kScratchRegister, 7 * kDoubleSize)); |
| 668 __ subsd(result, double_scratch); | 668 __ subsd(result, double_scratch); |
| 669 __ addsd(result, Operand(kScratchRegister, 8 * kDoubleSize)); | 669 __ addsd(result, Operand(kScratchRegister, 8 * kDoubleSize)); |
| 670 __ mulsd(result, input); | 670 __ mulsd(result, input); |
| (...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 754 // argument_count_reg_ * times_pointer_size + (receiver - 1) * kPointerSize. | 754 // argument_count_reg_ * times_pointer_size + (receiver - 1) * kPointerSize. |
| 755 return Operand(base_reg_, argument_count_reg_, times_pointer_size, | 755 return Operand(base_reg_, argument_count_reg_, times_pointer_size, |
| 756 displacement_to_last_argument + (receiver - 1 - index) * kPointerSize); | 756 displacement_to_last_argument + (receiver - 1 - index) * kPointerSize); |
| 757 } | 757 } |
| 758 } | 758 } |
| 759 | 759 |
| 760 | 760 |
| 761 } } // namespace v8::internal | 761 } } // namespace v8::internal |
| 762 | 762 |
| 763 #endif // V8_TARGET_ARCH_X64 | 763 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |