| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 150 // Reserve space for the stack slots needed by the code. | 150 // Reserve space for the stack slots needed by the code. |
| 151 int slots = GetStackSlotCount(); | 151 int slots = GetStackSlotCount(); |
| 152 if (slots > 0) { | 152 if (slots > 0) { |
| 153 if (FLAG_debug_code) { | 153 if (FLAG_debug_code) { |
| 154 __ subq(rsp, Immediate(slots * kPointerSize)); | 154 __ subq(rsp, Immediate(slots * kPointerSize)); |
| 155 #ifdef _MSC_VER | 155 #ifdef _MSC_VER |
| 156 MakeSureStackPagesMapped(slots * kPointerSize); | 156 MakeSureStackPagesMapped(slots * kPointerSize); |
| 157 #endif | 157 #endif |
| 158 __ push(rax); | 158 __ push(rax); |
| 159 __ Set(rax, slots); | 159 __ Set(rax, slots); |
| 160 __ movq(kScratchRegister, kSlotsZapValue, RelocInfo::NONE64); | 160 __ movq(kScratchRegister, kSlotsZapValue); |
| 161 Label loop; | 161 Label loop; |
| 162 __ bind(&loop); | 162 __ bind(&loop); |
| 163 __ movq(MemOperand(rsp, rax, times_pointer_size, 0), | 163 __ movq(MemOperand(rsp, rax, times_pointer_size, 0), |
| 164 kScratchRegister); | 164 kScratchRegister); |
| 165 __ decl(rax); | 165 __ decl(rax); |
| 166 __ j(not_zero, &loop); | 166 __ j(not_zero, &loop); |
| 167 __ pop(rax); | 167 __ pop(rax); |
| 168 } else { | 168 } else { |
| 169 __ subq(rsp, Immediate(slots * kPointerSize)); | 169 __ subq(rsp, Immediate(slots * kPointerSize)); |
| 170 #ifdef _MSC_VER | 170 #ifdef _MSC_VER |
| (...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 254 __ bind(&jump_table_[i].label); | 254 __ bind(&jump_table_[i].label); |
| 255 Address entry = jump_table_[i].address; | 255 Address entry = jump_table_[i].address; |
| 256 Deoptimizer::BailoutType type = jump_table_[i].bailout_type; | 256 Deoptimizer::BailoutType type = jump_table_[i].bailout_type; |
| 257 int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type); | 257 int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type); |
| 258 if (id == Deoptimizer::kNotDeoptimizationEntry) { | 258 if (id == Deoptimizer::kNotDeoptimizationEntry) { |
| 259 Comment(";;; jump table entry %d.", i); | 259 Comment(";;; jump table entry %d.", i); |
| 260 } else { | 260 } else { |
| 261 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id); | 261 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id); |
| 262 } | 262 } |
| 263 if (jump_table_[i].needs_frame) { | 263 if (jump_table_[i].needs_frame) { |
| 264 __ movq(kScratchRegister, ExternalReference::ForDeoptEntry(entry)); | 264 __ Move(kScratchRegister, ExternalReference::ForDeoptEntry(entry)); |
| 265 if (needs_frame.is_bound()) { | 265 if (needs_frame.is_bound()) { |
| 266 __ jmp(&needs_frame); | 266 __ jmp(&needs_frame); |
| 267 } else { | 267 } else { |
| 268 __ bind(&needs_frame); | 268 __ bind(&needs_frame); |
| 269 __ movq(rsi, MemOperand(rbp, StandardFrameConstants::kContextOffset)); |
| 269 __ push(rbp); | 270 __ push(rbp); |
| 270 __ movq(rbp, rsp); | 271 __ movq(rbp, rsp); |
| 271 __ push(rsi); | 272 __ push(rsi); |
| 272 // This variant of deopt can only be used with stubs. Since we don't | 273 // This variant of deopt can only be used with stubs. Since we don't |
| 273 // have a function pointer to install in the stack frame that we're | 274 // have a function pointer to install in the stack frame that we're |
| 274 // building, install a special marker there instead. | 275 // building, install a special marker there instead. |
| 275 ASSERT(info()->IsStub()); | 276 ASSERT(info()->IsStub()); |
| 276 __ Move(rsi, Smi::FromInt(StackFrame::STUB)); | 277 __ Move(rsi, Smi::FromInt(StackFrame::STUB)); |
| 277 __ push(rsi); | 278 __ push(rsi); |
| 278 __ movq(rsi, MemOperand(rsp, kPointerSize)); | 279 __ movq(rsi, MemOperand(rsp, kPointerSize)); |
| (...skipping 303 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 582 SaveFPRegsMode save_doubles) { | 583 SaveFPRegsMode save_doubles) { |
| 583 ASSERT(instr != NULL); | 584 ASSERT(instr != NULL); |
| 584 ASSERT(instr->HasPointerMap()); | 585 ASSERT(instr->HasPointerMap()); |
| 585 | 586 |
| 586 __ CallRuntime(function, num_arguments, save_doubles); | 587 __ CallRuntime(function, num_arguments, save_doubles); |
| 587 | 588 |
| 588 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT, 0); | 589 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT, 0); |
| 589 } | 590 } |
| 590 | 591 |
| 591 | 592 |
| 593 void LCodeGen::LoadContextFromDeferred(LOperand* context) { |
| 594 if (context->IsRegister()) { |
| 595 if (!ToRegister(context).is(rsi)) { |
| 596 __ movq(rsi, ToRegister(context)); |
| 597 } |
| 598 } else if (context->IsStackSlot()) { |
| 599 __ movq(rsi, ToOperand(context)); |
| 600 } else if (context->IsConstantOperand()) { |
| 601 HConstant* constant = |
| 602 chunk_->LookupConstant(LConstantOperand::cast(context)); |
| 603 __ Move(rsi, Handle<Object>::cast(constant->handle(isolate()))); |
| 604 } else { |
| 605 UNREACHABLE(); |
| 606 } |
| 607 } |
| 608 |
| 609 |
| 610 |
| 592 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, | 611 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, |
| 593 int argc, | 612 int argc, |
| 594 LInstruction* instr) { | 613 LInstruction* instr, |
| 595 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 614 LOperand* context) { |
| 615 LoadContextFromDeferred(context); |
| 616 |
| 596 __ CallRuntimeSaveDoubles(id); | 617 __ CallRuntimeSaveDoubles(id); |
| 597 RecordSafepointWithRegisters( | 618 RecordSafepointWithRegisters( |
| 598 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt); | 619 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt); |
| 599 } | 620 } |
| 600 | 621 |
| 601 | 622 |
| 602 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment, | 623 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment, |
| 603 Safepoint::DeoptMode mode) { | 624 Safepoint::DeoptMode mode) { |
| 604 if (!environment->HasBeenRegistered()) { | 625 if (!environment->HasBeenRegistered()) { |
| 605 // Physical stack frame layout: | 626 // Physical stack frame layout: |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 642 ASSERT(environment->HasBeenRegistered()); | 663 ASSERT(environment->HasBeenRegistered()); |
| 643 int id = environment->deoptimization_index(); | 664 int id = environment->deoptimization_index(); |
| 644 ASSERT(info()->IsOptimizing() || info()->IsStub()); | 665 ASSERT(info()->IsOptimizing() || info()->IsStub()); |
| 645 Address entry = | 666 Address entry = |
| 646 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); | 667 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); |
| 647 if (entry == NULL) { | 668 if (entry == NULL) { |
| 648 Abort(kBailoutWasNotPrepared); | 669 Abort(kBailoutWasNotPrepared); |
| 649 return; | 670 return; |
| 650 } | 671 } |
| 651 | 672 |
| 652 ASSERT(FLAG_deopt_every_n_times == 0); // Not yet implemented on x64. | 673 if (DeoptEveryNTimes()) { |
| 674 ExternalReference count = ExternalReference::stress_deopt_count(isolate()); |
| 675 Label no_deopt; |
| 676 __ pushfq(); |
| 677 __ push(rax); |
| 678 Operand count_operand = masm()->ExternalOperand(count, kScratchRegister); |
| 679 __ movl(rax, count_operand); |
| 680 __ subl(rax, Immediate(1)); |
| 681 __ j(not_zero, &no_deopt, Label::kNear); |
| 682 if (FLAG_trap_on_deopt) __ int3(); |
| 683 __ movl(rax, Immediate(FLAG_deopt_every_n_times)); |
| 684 __ movl(count_operand, rax); |
| 685 __ pop(rax); |
| 686 __ popfq(); |
| 687 ASSERT(frame_is_built_); |
| 688 __ call(entry, RelocInfo::RUNTIME_ENTRY); |
| 689 __ bind(&no_deopt); |
| 690 __ movl(count_operand, rax); |
| 691 __ pop(rax); |
| 692 __ popfq(); |
| 693 } |
| 653 | 694 |
| 654 if (info()->ShouldTrapOnDeopt()) { | 695 if (info()->ShouldTrapOnDeopt()) { |
| 655 Label done; | 696 Label done; |
| 656 if (cc != no_condition) { | 697 if (cc != no_condition) { |
| 657 __ j(NegateCondition(cc), &done, Label::kNear); | 698 __ j(NegateCondition(cc), &done, Label::kNear); |
| 658 } | 699 } |
| 659 __ int3(); | 700 __ int3(); |
| 660 __ bind(&done); | 701 __ bind(&done); |
| 661 } | 702 } |
| 662 | 703 |
| (...skipping 146 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 809 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), | 850 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), |
| 810 kind, arguments, deopt_mode); | 851 kind, arguments, deopt_mode); |
| 811 for (int i = 0; i < operands->length(); i++) { | 852 for (int i = 0; i < operands->length(); i++) { |
| 812 LOperand* pointer = operands->at(i); | 853 LOperand* pointer = operands->at(i); |
| 813 if (pointer->IsStackSlot()) { | 854 if (pointer->IsStackSlot()) { |
| 814 safepoint.DefinePointerSlot(pointer->index(), zone()); | 855 safepoint.DefinePointerSlot(pointer->index(), zone()); |
| 815 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { | 856 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { |
| 816 safepoint.DefinePointerRegister(ToRegister(pointer), zone()); | 857 safepoint.DefinePointerRegister(ToRegister(pointer), zone()); |
| 817 } | 858 } |
| 818 } | 859 } |
| 819 if (kind & Safepoint::kWithRegisters) { | |
| 820 // Register rsi always contains a pointer to the context. | |
| 821 safepoint.DefinePointerRegister(rsi, zone()); | |
| 822 } | |
| 823 } | 860 } |
| 824 | 861 |
| 825 | 862 |
| 826 void LCodeGen::RecordSafepoint(LPointerMap* pointers, | 863 void LCodeGen::RecordSafepoint(LPointerMap* pointers, |
| 827 Safepoint::DeoptMode deopt_mode) { | 864 Safepoint::DeoptMode deopt_mode) { |
| 828 RecordSafepoint(pointers, Safepoint::kSimple, 0, deopt_mode); | 865 RecordSafepoint(pointers, Safepoint::kSimple, 0, deopt_mode); |
| 829 } | 866 } |
| 830 | 867 |
| 831 | 868 |
| 832 void LCodeGen::RecordSafepoint(Safepoint::DeoptMode deopt_mode) { | 869 void LCodeGen::RecordSafepoint(Safepoint::DeoptMode deopt_mode) { |
| (...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 888 DoGap(instr); | 925 DoGap(instr); |
| 889 } | 926 } |
| 890 | 927 |
| 891 | 928 |
| 892 void LCodeGen::DoParameter(LParameter* instr) { | 929 void LCodeGen::DoParameter(LParameter* instr) { |
| 893 // Nothing to do. | 930 // Nothing to do. |
| 894 } | 931 } |
| 895 | 932 |
| 896 | 933 |
| 897 void LCodeGen::DoCallStub(LCallStub* instr) { | 934 void LCodeGen::DoCallStub(LCallStub* instr) { |
| 935 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 898 ASSERT(ToRegister(instr->result()).is(rax)); | 936 ASSERT(ToRegister(instr->result()).is(rax)); |
| 899 switch (instr->hydrogen()->major_key()) { | 937 switch (instr->hydrogen()->major_key()) { |
| 900 case CodeStub::RegExpConstructResult: { | 938 case CodeStub::RegExpConstructResult: { |
| 901 RegExpConstructResultStub stub; | 939 RegExpConstructResultStub stub; |
| 902 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 940 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 903 break; | 941 break; |
| 904 } | 942 } |
| 905 case CodeStub::RegExpExec: { | 943 case CodeStub::RegExpExec: { |
| 906 RegExpExecStub stub; | 944 RegExpExecStub stub; |
| 907 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 945 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| (...skipping 208 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1116 // The multiplier is a uint32. | 1154 // The multiplier is a uint32. |
| 1117 ASSERT(multiplier > 0 && | 1155 ASSERT(multiplier > 0 && |
| 1118 multiplier < (static_cast<int64_t>(1) << 32)); | 1156 multiplier < (static_cast<int64_t>(1) << 32)); |
| 1119 // The multiply is int64, so sign-extend to r64. | 1157 // The multiply is int64, so sign-extend to r64. |
| 1120 __ movsxlq(reg1, dividend); | 1158 __ movsxlq(reg1, dividend); |
| 1121 if (divisor < 0 && | 1159 if (divisor < 0 && |
| 1122 instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1160 instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 1123 __ neg(reg1); | 1161 __ neg(reg1); |
| 1124 DeoptimizeIf(zero, instr->environment()); | 1162 DeoptimizeIf(zero, instr->environment()); |
| 1125 } | 1163 } |
| 1126 __ movq(reg2, multiplier, RelocInfo::NONE64); | 1164 __ Set(reg2, multiplier); |
| 1127 // Result just fit in r64, because it's int32 * uint32. | 1165 // Result just fit in r64, because it's int32 * uint32. |
| 1128 __ imul(reg2, reg1); | 1166 __ imul(reg2, reg1); |
| 1129 | 1167 |
| 1130 __ addq(reg2, Immediate(1 << 30)); | 1168 __ addq(reg2, Immediate(1 << 30)); |
| 1131 __ sar(reg2, Immediate(shift)); | 1169 __ sar(reg2, Immediate(shift)); |
| 1132 } | 1170 } |
| 1133 } | 1171 } |
| 1134 | 1172 |
| 1135 | 1173 |
| 1136 void LCodeGen::DoDivI(LDivI* instr) { | 1174 void LCodeGen::DoDivI(LDivI* instr) { |
| (...skipping 475 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1612 } else { | 1650 } else { |
| 1613 if (index->value() < JSDate::kFirstUncachedField) { | 1651 if (index->value() < JSDate::kFirstUncachedField) { |
| 1614 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); | 1652 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); |
| 1615 Operand stamp_operand = __ ExternalOperand(stamp); | 1653 Operand stamp_operand = __ ExternalOperand(stamp); |
| 1616 __ movq(kScratchRegister, stamp_operand); | 1654 __ movq(kScratchRegister, stamp_operand); |
| 1617 __ cmpq(kScratchRegister, FieldOperand(object, | 1655 __ cmpq(kScratchRegister, FieldOperand(object, |
| 1618 JSDate::kCacheStampOffset)); | 1656 JSDate::kCacheStampOffset)); |
| 1619 __ j(not_equal, &runtime, Label::kNear); | 1657 __ j(not_equal, &runtime, Label::kNear); |
| 1620 __ movq(result, FieldOperand(object, JSDate::kValueOffset + | 1658 __ movq(result, FieldOperand(object, JSDate::kValueOffset + |
| 1621 kPointerSize * index->value())); | 1659 kPointerSize * index->value())); |
| 1622 __ jmp(&done); | 1660 __ jmp(&done, Label::kNear); |
| 1623 } | 1661 } |
| 1624 __ bind(&runtime); | 1662 __ bind(&runtime); |
| 1625 __ PrepareCallCFunction(2); | 1663 __ PrepareCallCFunction(2); |
| 1626 __ movq(arg_reg_1, object); | 1664 __ movq(arg_reg_1, object); |
| 1627 __ movq(arg_reg_2, index, RelocInfo::NONE64); | 1665 __ movq(arg_reg_2, index, RelocInfo::NONE64); |
| 1628 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); | 1666 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); |
| 1629 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | |
| 1630 __ bind(&done); | 1667 __ bind(&done); |
| 1631 } | 1668 } |
| 1632 } | 1669 } |
| 1633 | 1670 |
| 1634 | 1671 |
| 1635 void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) { | 1672 Operand LCodeGen::BuildSeqStringOperand(Register string, |
| 1673 LOperand* index, |
| 1674 String::Encoding encoding) { |
| 1675 if (index->IsConstantOperand()) { |
| 1676 int offset = ToInteger32(LConstantOperand::cast(index)); |
| 1677 if (encoding == String::TWO_BYTE_ENCODING) { |
| 1678 offset *= kUC16Size; |
| 1679 } |
| 1680 STATIC_ASSERT(kCharSize == 1); |
| 1681 return FieldOperand(string, SeqString::kHeaderSize + offset); |
| 1682 } |
| 1683 return FieldOperand( |
| 1684 string, ToRegister(index), |
| 1685 encoding == String::ONE_BYTE_ENCODING ? times_1 : times_2, |
| 1686 SeqString::kHeaderSize); |
| 1687 } |
| 1688 |
| 1689 |
| 1690 void LCodeGen::DoSeqStringGetChar(LSeqStringGetChar* instr) { |
| 1691 String::Encoding encoding = instr->hydrogen()->encoding(); |
| 1692 Register result = ToRegister(instr->result()); |
| 1636 Register string = ToRegister(instr->string()); | 1693 Register string = ToRegister(instr->string()); |
| 1637 Register index = ToRegister(instr->index()); | |
| 1638 Register value = ToRegister(instr->value()); | |
| 1639 String::Encoding encoding = instr->encoding(); | |
| 1640 | 1694 |
| 1641 if (FLAG_debug_code) { | 1695 if (FLAG_debug_code) { |
| 1642 __ push(value); | 1696 __ push(string); |
| 1643 __ movq(value, FieldOperand(string, HeapObject::kMapOffset)); | 1697 __ movq(string, FieldOperand(string, HeapObject::kMapOffset)); |
| 1644 __ movzxbq(value, FieldOperand(value, Map::kInstanceTypeOffset)); | 1698 __ movzxbq(string, FieldOperand(string, Map::kInstanceTypeOffset)); |
| 1645 | 1699 |
| 1646 __ andb(value, Immediate(kStringRepresentationMask | kStringEncodingMask)); | 1700 __ andb(string, Immediate(kStringRepresentationMask | kStringEncodingMask)); |
| 1647 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; | 1701 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; |
| 1648 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; | 1702 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; |
| 1649 __ cmpq(value, Immediate(encoding == String::ONE_BYTE_ENCODING | 1703 __ cmpq(string, Immediate(encoding == String::ONE_BYTE_ENCODING |
| 1650 ? one_byte_seq_type : two_byte_seq_type)); | 1704 ? one_byte_seq_type : two_byte_seq_type)); |
| 1651 __ Check(equal, kUnexpectedStringType); | 1705 __ Check(equal, kUnexpectedStringType); |
| 1652 __ pop(value); | 1706 __ pop(string); |
| 1653 } | 1707 } |
| 1654 | 1708 |
| 1709 Operand operand = BuildSeqStringOperand(string, instr->index(), encoding); |
| 1655 if (encoding == String::ONE_BYTE_ENCODING) { | 1710 if (encoding == String::ONE_BYTE_ENCODING) { |
| 1656 __ movb(FieldOperand(string, index, times_1, SeqString::kHeaderSize), | 1711 __ movzxbl(result, operand); |
| 1657 value); | |
| 1658 } else { | 1712 } else { |
| 1659 __ movw(FieldOperand(string, index, times_2, SeqString::kHeaderSize), | 1713 __ movzxwl(result, operand); |
| 1660 value); | 1714 } |
| 1715 } |
| 1716 |
| 1717 |
| 1718 void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) { |
| 1719 String::Encoding encoding = instr->hydrogen()->encoding(); |
| 1720 Register string = ToRegister(instr->string()); |
| 1721 |
| 1722 if (FLAG_debug_code) { |
| 1723 __ push(string); |
| 1724 __ movq(string, FieldOperand(string, HeapObject::kMapOffset)); |
| 1725 __ movzxbq(string, FieldOperand(string, Map::kInstanceTypeOffset)); |
| 1726 |
| 1727 __ andb(string, Immediate(kStringRepresentationMask | kStringEncodingMask)); |
| 1728 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; |
| 1729 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; |
| 1730 __ cmpq(string, Immediate(encoding == String::ONE_BYTE_ENCODING |
| 1731 ? one_byte_seq_type : two_byte_seq_type)); |
| 1732 __ Check(equal, kUnexpectedStringType); |
| 1733 __ pop(string); |
| 1734 } |
| 1735 |
| 1736 Operand operand = BuildSeqStringOperand(string, instr->index(), encoding); |
| 1737 if (instr->value()->IsConstantOperand()) { |
| 1738 int value = ToInteger32(LConstantOperand::cast(instr->value())); |
| 1739 ASSERT_LE(0, value); |
| 1740 if (encoding == String::ONE_BYTE_ENCODING) { |
| 1741 ASSERT_LE(value, String::kMaxOneByteCharCode); |
| 1742 __ movb(operand, Immediate(value)); |
| 1743 } else { |
| 1744 ASSERT_LE(value, String::kMaxUtf16CodeUnit); |
| 1745 __ movw(operand, Immediate(value)); |
| 1746 } |
| 1747 } else { |
| 1748 Register value = ToRegister(instr->value()); |
| 1749 if (encoding == String::ONE_BYTE_ENCODING) { |
| 1750 __ movb(operand, value); |
| 1751 } else { |
| 1752 __ movw(operand, value); |
| 1753 } |
| 1661 } | 1754 } |
| 1662 } | 1755 } |
| 1663 | 1756 |
| 1664 | 1757 |
| 1665 void LCodeGen::DoThrow(LThrow* instr) { | 1758 void LCodeGen::DoThrow(LThrow* instr) { |
| 1666 __ push(ToRegister(instr->value())); | 1759 __ push(ToRegister(instr->value())); |
| 1760 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 1667 CallRuntime(Runtime::kThrow, 1, instr); | 1761 CallRuntime(Runtime::kThrow, 1, instr); |
| 1668 | 1762 |
| 1669 if (FLAG_debug_code) { | 1763 if (FLAG_debug_code) { |
| 1670 Comment("Unreachable code."); | 1764 Comment("Unreachable code."); |
| 1671 __ int3(); | 1765 __ int3(); |
| 1672 } | 1766 } |
| 1673 } | 1767 } |
| 1674 | 1768 |
| 1675 | 1769 |
| 1676 void LCodeGen::DoAddI(LAddI* instr) { | 1770 void LCodeGen::DoAddI(LAddI* instr) { |
| (...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1764 __ j(condition, &return_left, Label::kNear); | 1858 __ j(condition, &return_left, Label::kNear); |
| 1765 __ jmp(&return_right, Label::kNear); | 1859 __ jmp(&return_right, Label::kNear); |
| 1766 | 1860 |
| 1767 __ bind(&check_zero); | 1861 __ bind(&check_zero); |
| 1768 XMMRegister xmm_scratch = double_scratch0(); | 1862 XMMRegister xmm_scratch = double_scratch0(); |
| 1769 __ xorps(xmm_scratch, xmm_scratch); | 1863 __ xorps(xmm_scratch, xmm_scratch); |
| 1770 __ ucomisd(left_reg, xmm_scratch); | 1864 __ ucomisd(left_reg, xmm_scratch); |
| 1771 __ j(not_equal, &return_left, Label::kNear); // left == right != 0. | 1865 __ j(not_equal, &return_left, Label::kNear); // left == right != 0. |
| 1772 // At this point, both left and right are either 0 or -0. | 1866 // At this point, both left and right are either 0 or -0. |
| 1773 if (operation == HMathMinMax::kMathMin) { | 1867 if (operation == HMathMinMax::kMathMin) { |
| 1774 __ orpd(left_reg, right_reg); | 1868 __ orps(left_reg, right_reg); |
| 1775 } else { | 1869 } else { |
| 1776 // Since we operate on +0 and/or -0, addsd and andsd have the same effect. | 1870 // Since we operate on +0 and/or -0, addsd and andsd have the same effect. |
| 1777 __ addsd(left_reg, right_reg); | 1871 __ addsd(left_reg, right_reg); |
| 1778 } | 1872 } |
| 1779 __ jmp(&return_left, Label::kNear); | 1873 __ jmp(&return_left, Label::kNear); |
| 1780 | 1874 |
| 1781 __ bind(&check_nan_left); | 1875 __ bind(&check_nan_left); |
| 1782 __ ucomisd(left_reg, left_reg); // NaN check. | 1876 __ ucomisd(left_reg, left_reg); // NaN check. |
| 1783 __ j(parity_even, &return_left, Label::kNear); | 1877 __ j(parity_even, &return_left, Label::kNear); |
| 1784 __ bind(&return_right); | 1878 __ bind(&return_right); |
| 1785 __ movsd(left_reg, right_reg); | 1879 __ movaps(left_reg, right_reg); |
| 1786 | 1880 |
| 1787 __ bind(&return_left); | 1881 __ bind(&return_left); |
| 1788 } | 1882 } |
| 1789 } | 1883 } |
| 1790 | 1884 |
| 1791 | 1885 |
| 1792 void LCodeGen::DoArithmeticD(LArithmeticD* instr) { | 1886 void LCodeGen::DoArithmeticD(LArithmeticD* instr) { |
| 1793 XMMRegister left = ToDoubleRegister(instr->left()); | 1887 XMMRegister left = ToDoubleRegister(instr->left()); |
| 1794 XMMRegister right = ToDoubleRegister(instr->right()); | 1888 XMMRegister right = ToDoubleRegister(instr->right()); |
| 1795 XMMRegister result = ToDoubleRegister(instr->result()); | 1889 XMMRegister result = ToDoubleRegister(instr->result()); |
| (...skipping 15 matching lines...) Expand all Loading... |
| 1811 // when there is a mulsd depending on the result | 1905 // when there is a mulsd depending on the result |
| 1812 __ movaps(left, left); | 1906 __ movaps(left, left); |
| 1813 break; | 1907 break; |
| 1814 case Token::MOD: { | 1908 case Token::MOD: { |
| 1815 XMMRegister xmm_scratch = double_scratch0(); | 1909 XMMRegister xmm_scratch = double_scratch0(); |
| 1816 __ PrepareCallCFunction(2); | 1910 __ PrepareCallCFunction(2); |
| 1817 __ movaps(xmm_scratch, left); | 1911 __ movaps(xmm_scratch, left); |
| 1818 ASSERT(right.is(xmm1)); | 1912 ASSERT(right.is(xmm1)); |
| 1819 __ CallCFunction( | 1913 __ CallCFunction( |
| 1820 ExternalReference::double_fp_operation(Token::MOD, isolate()), 2); | 1914 ExternalReference::double_fp_operation(Token::MOD, isolate()), 2); |
| 1821 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | |
| 1822 __ movaps(result, xmm_scratch); | 1915 __ movaps(result, xmm_scratch); |
| 1823 break; | 1916 break; |
| 1824 } | 1917 } |
| 1825 default: | 1918 default: |
| 1826 UNREACHABLE(); | 1919 UNREACHABLE(); |
| 1827 break; | 1920 break; |
| 1828 } | 1921 } |
| 1829 } | 1922 } |
| 1830 | 1923 |
| 1831 | 1924 |
| 1832 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { | 1925 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { |
| 1926 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 1833 ASSERT(ToRegister(instr->left()).is(rdx)); | 1927 ASSERT(ToRegister(instr->left()).is(rdx)); |
| 1834 ASSERT(ToRegister(instr->right()).is(rax)); | 1928 ASSERT(ToRegister(instr->right()).is(rax)); |
| 1835 ASSERT(ToRegister(instr->result()).is(rax)); | 1929 ASSERT(ToRegister(instr->result()).is(rax)); |
| 1836 | 1930 |
| 1837 BinaryOpStub stub(instr->op(), NO_OVERWRITE); | 1931 BinaryOpStub stub(instr->op(), NO_OVERWRITE); |
| 1838 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 1932 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 1839 __ nop(); // Signals no inlined code. | 1933 __ nop(); // Signals no inlined code. |
| 1840 } | 1934 } |
| 1841 | 1935 |
| 1842 | 1936 |
| (...skipping 302 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2145 __ subq(rsp, Immediate(kDoubleSize)); | 2239 __ subq(rsp, Immediate(kDoubleSize)); |
| 2146 __ movsd(MemOperand(rsp, 0), input_reg); | 2240 __ movsd(MemOperand(rsp, 0), input_reg); |
| 2147 __ addq(rsp, Immediate(kDoubleSize)); | 2241 __ addq(rsp, Immediate(kDoubleSize)); |
| 2148 | 2242 |
| 2149 int offset = sizeof(kHoleNanUpper32); | 2243 int offset = sizeof(kHoleNanUpper32); |
| 2150 __ cmpl(MemOperand(rsp, -offset), Immediate(kHoleNanUpper32)); | 2244 __ cmpl(MemOperand(rsp, -offset), Immediate(kHoleNanUpper32)); |
| 2151 EmitBranch(instr, equal); | 2245 EmitBranch(instr, equal); |
| 2152 } | 2246 } |
| 2153 | 2247 |
| 2154 | 2248 |
| 2249 void LCodeGen::DoCompareMinusZeroAndBranch(LCompareMinusZeroAndBranch* instr) { |
| 2250 Representation rep = instr->hydrogen()->value()->representation(); |
| 2251 ASSERT(!rep.IsInteger32()); |
| 2252 |
| 2253 if (rep.IsDouble()) { |
| 2254 XMMRegister value = ToDoubleRegister(instr->value()); |
| 2255 XMMRegister xmm_scratch = double_scratch0(); |
| 2256 __ xorps(xmm_scratch, xmm_scratch); |
| 2257 __ ucomisd(xmm_scratch, value); |
| 2258 EmitFalseBranch(instr, not_equal); |
| 2259 __ movmskpd(kScratchRegister, value); |
| 2260 __ testl(kScratchRegister, Immediate(1)); |
| 2261 EmitBranch(instr, not_zero); |
| 2262 } else { |
| 2263 Register value = ToRegister(instr->value()); |
| 2264 Handle<Map> map = masm()->isolate()->factory()->heap_number_map(); |
| 2265 __ CheckMap(value, map, instr->FalseLabel(chunk()), DO_SMI_CHECK); |
| 2266 __ cmpl(FieldOperand(value, HeapNumber::kExponentOffset), |
| 2267 Immediate(0x80000000)); |
| 2268 EmitFalseBranch(instr, not_equal); |
| 2269 __ cmpl(FieldOperand(value, HeapNumber::kMantissaOffset), |
| 2270 Immediate(0x00000000)); |
| 2271 EmitBranch(instr, equal); |
| 2272 } |
| 2273 } |
| 2274 |
| 2275 |
| 2155 Condition LCodeGen::EmitIsObject(Register input, | 2276 Condition LCodeGen::EmitIsObject(Register input, |
| 2156 Label* is_not_object, | 2277 Label* is_not_object, |
| 2157 Label* is_object) { | 2278 Label* is_object) { |
| 2158 ASSERT(!input.is(kScratchRegister)); | 2279 ASSERT(!input.is(kScratchRegister)); |
| 2159 | 2280 |
| 2160 __ JumpIfSmi(input, is_not_object); | 2281 __ JumpIfSmi(input, is_not_object); |
| 2161 | 2282 |
| 2162 __ CompareRoot(input, Heap::kNullValueRootIndex); | 2283 __ CompareRoot(input, Heap::kNullValueRootIndex); |
| 2163 __ j(equal, is_object); | 2284 __ j(equal, is_object); |
| 2164 | 2285 |
| (...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2237 __ JumpIfSmi(input, instr->FalseLabel(chunk_)); | 2358 __ JumpIfSmi(input, instr->FalseLabel(chunk_)); |
| 2238 } | 2359 } |
| 2239 __ movq(temp, FieldOperand(input, HeapObject::kMapOffset)); | 2360 __ movq(temp, FieldOperand(input, HeapObject::kMapOffset)); |
| 2240 __ testb(FieldOperand(temp, Map::kBitFieldOffset), | 2361 __ testb(FieldOperand(temp, Map::kBitFieldOffset), |
| 2241 Immediate(1 << Map::kIsUndetectable)); | 2362 Immediate(1 << Map::kIsUndetectable)); |
| 2242 EmitBranch(instr, not_zero); | 2363 EmitBranch(instr, not_zero); |
| 2243 } | 2364 } |
| 2244 | 2365 |
| 2245 | 2366 |
| 2246 void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) { | 2367 void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) { |
| 2368 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 2247 Token::Value op = instr->op(); | 2369 Token::Value op = instr->op(); |
| 2248 | 2370 |
| 2249 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op); | 2371 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op); |
| 2250 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2372 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 2251 | 2373 |
| 2252 Condition condition = TokenToCondition(op, false); | 2374 Condition condition = TokenToCondition(op, false); |
| 2253 __ testq(rax, rax); | 2375 __ testq(rax, rax); |
| 2254 | 2376 |
| 2255 EmitBranch(instr, condition); | 2377 EmitBranch(instr, condition); |
| 2256 } | 2378 } |
| (...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2393 | 2515 |
| 2394 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) { | 2516 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) { |
| 2395 Register reg = ToRegister(instr->value()); | 2517 Register reg = ToRegister(instr->value()); |
| 2396 | 2518 |
| 2397 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map()); | 2519 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map()); |
| 2398 EmitBranch(instr, equal); | 2520 EmitBranch(instr, equal); |
| 2399 } | 2521 } |
| 2400 | 2522 |
| 2401 | 2523 |
| 2402 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { | 2524 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { |
| 2525 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 2403 InstanceofStub stub(InstanceofStub::kNoFlags); | 2526 InstanceofStub stub(InstanceofStub::kNoFlags); |
| 2404 __ push(ToRegister(instr->left())); | 2527 __ push(ToRegister(instr->left())); |
| 2405 __ push(ToRegister(instr->right())); | 2528 __ push(ToRegister(instr->right())); |
| 2406 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 2529 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 2407 Label true_value, done; | 2530 Label true_value, done; |
| 2408 __ testq(rax, rax); | 2531 __ testq(rax, rax); |
| 2409 __ j(zero, &true_value, Label::kNear); | 2532 __ j(zero, &true_value, Label::kNear); |
| 2410 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex); | 2533 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex); |
| 2411 __ jmp(&done, Label::kNear); | 2534 __ jmp(&done, Label::kNear); |
| 2412 __ bind(&true_value); | 2535 __ bind(&true_value); |
| (...skipping 11 matching lines...) Expand all Loading... |
| 2424 virtual void Generate() V8_OVERRIDE { | 2547 virtual void Generate() V8_OVERRIDE { |
| 2425 codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_); | 2548 codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_); |
| 2426 } | 2549 } |
| 2427 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } | 2550 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } |
| 2428 Label* map_check() { return &map_check_; } | 2551 Label* map_check() { return &map_check_; } |
| 2429 private: | 2552 private: |
| 2430 LInstanceOfKnownGlobal* instr_; | 2553 LInstanceOfKnownGlobal* instr_; |
| 2431 Label map_check_; | 2554 Label map_check_; |
| 2432 }; | 2555 }; |
| 2433 | 2556 |
| 2434 | 2557 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 2435 DeferredInstanceOfKnownGlobal* deferred; | 2558 DeferredInstanceOfKnownGlobal* deferred; |
| 2436 deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr); | 2559 deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr); |
| 2437 | 2560 |
| 2438 Label done, false_result; | 2561 Label done, false_result; |
| 2439 Register object = ToRegister(instr->value()); | 2562 Register object = ToRegister(instr->value()); |
| 2440 | 2563 |
| 2441 // A Smi is not an instance of anything. | 2564 // A Smi is not an instance of anything. |
| 2442 __ JumpIfSmi(object, &false_result); | 2565 __ JumpIfSmi(object, &false_result, Label::kNear); |
| 2443 | 2566 |
| 2444 // This is the inlined call site instanceof cache. The two occurences of the | 2567 // This is the inlined call site instanceof cache. The two occurences of the |
| 2445 // hole value will be patched to the last map/result pair generated by the | 2568 // hole value will be patched to the last map/result pair generated by the |
| 2446 // instanceof stub. | 2569 // instanceof stub. |
| 2447 Label cache_miss; | 2570 Label cache_miss; |
| 2448 // Use a temp register to avoid memory operands with variable lengths. | 2571 // Use a temp register to avoid memory operands with variable lengths. |
| 2449 Register map = ToRegister(instr->temp()); | 2572 Register map = ToRegister(instr->temp()); |
| 2450 __ movq(map, FieldOperand(object, HeapObject::kMapOffset)); | 2573 __ movq(map, FieldOperand(object, HeapObject::kMapOffset)); |
| 2451 __ bind(deferred->map_check()); // Label for calculating code patching. | 2574 __ bind(deferred->map_check()); // Label for calculating code patching. |
| 2452 Handle<Cell> cache_cell = factory()->NewCell(factory()->the_hole_value()); | 2575 Handle<Cell> cache_cell = factory()->NewCell(factory()->the_hole_value()); |
| 2453 __ movq(kScratchRegister, cache_cell, RelocInfo::CELL); | 2576 __ movq(kScratchRegister, cache_cell, RelocInfo::CELL); |
| 2454 __ cmpq(map, Operand(kScratchRegister, 0)); | 2577 __ cmpq(map, Operand(kScratchRegister, 0)); |
| 2455 __ j(not_equal, &cache_miss, Label::kNear); | 2578 __ j(not_equal, &cache_miss, Label::kNear); |
| 2456 // Patched to load either true or false. | 2579 // Patched to load either true or false. |
| 2457 __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex); | 2580 __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex); |
| 2458 #ifdef DEBUG | 2581 #ifdef DEBUG |
| 2459 // Check that the code size between patch label and patch sites is invariant. | 2582 // Check that the code size between patch label and patch sites is invariant. |
| 2460 Label end_of_patched_code; | 2583 Label end_of_patched_code; |
| 2461 __ bind(&end_of_patched_code); | 2584 __ bind(&end_of_patched_code); |
| 2462 ASSERT(true); | 2585 ASSERT(true); |
| 2463 #endif | 2586 #endif |
| 2464 __ jmp(&done); | 2587 __ jmp(&done, Label::kNear); |
| 2465 | 2588 |
| 2466 // The inlined call site cache did not match. Check for null and string | 2589 // The inlined call site cache did not match. Check for null and string |
| 2467 // before calling the deferred code. | 2590 // before calling the deferred code. |
| 2468 __ bind(&cache_miss); // Null is not an instance of anything. | 2591 __ bind(&cache_miss); // Null is not an instance of anything. |
| 2469 __ CompareRoot(object, Heap::kNullValueRootIndex); | 2592 __ CompareRoot(object, Heap::kNullValueRootIndex); |
| 2470 __ j(equal, &false_result, Label::kNear); | 2593 __ j(equal, &false_result, Label::kNear); |
| 2471 | 2594 |
| 2472 // String values are not instances of anything. | 2595 // String values are not instances of anything. |
| 2473 __ JumpIfNotString(object, kScratchRegister, deferred->entry()); | 2596 __ JumpIfNotString(object, kScratchRegister, deferred->entry()); |
| 2474 | 2597 |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2509 ASSERT(delta == masm_->SizeOfCodeGeneratedSince(map_check)); | 2632 ASSERT(delta == masm_->SizeOfCodeGeneratedSince(map_check)); |
| 2510 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment(); | 2633 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment(); |
| 2511 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 2634 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
| 2512 // Move result to a register that survives the end of the | 2635 // Move result to a register that survives the end of the |
| 2513 // PushSafepointRegisterScope. | 2636 // PushSafepointRegisterScope. |
| 2514 __ movq(kScratchRegister, rax); | 2637 __ movq(kScratchRegister, rax); |
| 2515 } | 2638 } |
| 2516 __ testq(kScratchRegister, kScratchRegister); | 2639 __ testq(kScratchRegister, kScratchRegister); |
| 2517 Label load_false; | 2640 Label load_false; |
| 2518 Label done; | 2641 Label done; |
| 2519 __ j(not_zero, &load_false); | 2642 __ j(not_zero, &load_false, Label::kNear); |
| 2520 __ LoadRoot(rax, Heap::kTrueValueRootIndex); | 2643 __ LoadRoot(rax, Heap::kTrueValueRootIndex); |
| 2521 __ jmp(&done); | 2644 __ jmp(&done, Label::kNear); |
| 2522 __ bind(&load_false); | 2645 __ bind(&load_false); |
| 2523 __ LoadRoot(rax, Heap::kFalseValueRootIndex); | 2646 __ LoadRoot(rax, Heap::kFalseValueRootIndex); |
| 2524 __ bind(&done); | 2647 __ bind(&done); |
| 2525 } | 2648 } |
| 2526 | 2649 |
| 2527 | 2650 |
| 2528 void LCodeGen::DoCmpT(LCmpT* instr) { | 2651 void LCodeGen::DoCmpT(LCmpT* instr) { |
| 2652 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 2529 Token::Value op = instr->op(); | 2653 Token::Value op = instr->op(); |
| 2530 | 2654 |
| 2531 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op); | 2655 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op); |
| 2532 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2656 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 2533 | 2657 |
| 2534 Condition condition = TokenToCondition(op, false); | 2658 Condition condition = TokenToCondition(op, false); |
| 2535 Label true_value, done; | 2659 Label true_value, done; |
| 2536 __ testq(rax, rax); | 2660 __ testq(rax, rax); |
| 2537 __ j(condition, &true_value, Label::kNear); | 2661 __ j(condition, &true_value, Label::kNear); |
| 2538 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex); | 2662 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex); |
| 2539 __ jmp(&done, Label::kNear); | 2663 __ jmp(&done, Label::kNear); |
| 2540 __ bind(&true_value); | 2664 __ bind(&true_value); |
| 2541 __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex); | 2665 __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex); |
| 2542 __ bind(&done); | 2666 __ bind(&done); |
| 2543 } | 2667 } |
| 2544 | 2668 |
| 2545 | 2669 |
| 2546 void LCodeGen::DoReturn(LReturn* instr) { | 2670 void LCodeGen::DoReturn(LReturn* instr) { |
| 2547 if (FLAG_trace && info()->IsOptimizing()) { | 2671 if (FLAG_trace && info()->IsOptimizing()) { |
| 2548 // Preserve the return value on the stack and rely on the runtime | 2672 // Preserve the return value on the stack and rely on the runtime call |
| 2549 // call to return the value in the same register. | 2673 // to return the value in the same register. We're leaving the code |
| 2674 // managed by the register allocator and tearing down the frame, it's |
| 2675 // safe to write to the context register. |
| 2550 __ push(rax); | 2676 __ push(rax); |
| 2677 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 2551 __ CallRuntime(Runtime::kTraceExit, 1); | 2678 __ CallRuntime(Runtime::kTraceExit, 1); |
| 2552 } | 2679 } |
| 2553 if (info()->saves_caller_doubles()) { | 2680 if (info()->saves_caller_doubles()) { |
| 2554 ASSERT(NeedsEagerFrame()); | 2681 ASSERT(NeedsEagerFrame()); |
| 2555 BitVector* doubles = chunk()->allocated_double_registers(); | 2682 BitVector* doubles = chunk()->allocated_double_registers(); |
| 2556 BitVector::Iterator save_iterator(doubles); | 2683 BitVector::Iterator save_iterator(doubles); |
| 2557 int count = 0; | 2684 int count = 0; |
| 2558 while (!save_iterator.Done()) { | 2685 while (!save_iterator.Done()) { |
| 2559 __ movsd(XMMRegister::FromAllocationIndex(save_iterator.Current()), | 2686 __ movsd(XMMRegister::FromAllocationIndex(save_iterator.Current()), |
| 2560 MemOperand(rsp, count * kDoubleSize)); | 2687 MemOperand(rsp, count * kDoubleSize)); |
| (...skipping 30 matching lines...) Expand all Loading... |
| 2591 Register result = ToRegister(instr->result()); | 2718 Register result = ToRegister(instr->result()); |
| 2592 __ LoadGlobalCell(result, instr->hydrogen()->cell().handle()); | 2719 __ LoadGlobalCell(result, instr->hydrogen()->cell().handle()); |
| 2593 if (instr->hydrogen()->RequiresHoleCheck()) { | 2720 if (instr->hydrogen()->RequiresHoleCheck()) { |
| 2594 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); | 2721 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); |
| 2595 DeoptimizeIf(equal, instr->environment()); | 2722 DeoptimizeIf(equal, instr->environment()); |
| 2596 } | 2723 } |
| 2597 } | 2724 } |
| 2598 | 2725 |
| 2599 | 2726 |
| 2600 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) { | 2727 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) { |
| 2728 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 2601 ASSERT(ToRegister(instr->global_object()).is(rax)); | 2729 ASSERT(ToRegister(instr->global_object()).is(rax)); |
| 2602 ASSERT(ToRegister(instr->result()).is(rax)); | 2730 ASSERT(ToRegister(instr->result()).is(rax)); |
| 2603 | 2731 |
| 2604 __ Move(rcx, instr->name()); | 2732 __ Move(rcx, instr->name()); |
| 2605 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET : | 2733 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET : |
| 2606 RelocInfo::CODE_TARGET_CONTEXT; | 2734 RelocInfo::CODE_TARGET_CONTEXT; |
| 2607 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); | 2735 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); |
| 2608 CallCode(ic, mode, instr); | 2736 CallCode(ic, mode, instr); |
| 2609 } | 2737 } |
| 2610 | 2738 |
| (...skipping 18 matching lines...) Expand all Loading... |
| 2629 } else { | 2757 } else { |
| 2630 // Store the value. | 2758 // Store the value. |
| 2631 __ movq(kScratchRegister, cell_handle, RelocInfo::CELL); | 2759 __ movq(kScratchRegister, cell_handle, RelocInfo::CELL); |
| 2632 __ movq(Operand(kScratchRegister, 0), value); | 2760 __ movq(Operand(kScratchRegister, 0), value); |
| 2633 } | 2761 } |
| 2634 // Cells are always rescanned, so no write barrier here. | 2762 // Cells are always rescanned, so no write barrier here. |
| 2635 } | 2763 } |
| 2636 | 2764 |
| 2637 | 2765 |
| 2638 void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) { | 2766 void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) { |
| 2767 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 2639 ASSERT(ToRegister(instr->global_object()).is(rdx)); | 2768 ASSERT(ToRegister(instr->global_object()).is(rdx)); |
| 2640 ASSERT(ToRegister(instr->value()).is(rax)); | 2769 ASSERT(ToRegister(instr->value()).is(rax)); |
| 2641 | 2770 |
| 2642 __ Move(rcx, instr->name()); | 2771 __ Move(rcx, instr->name()); |
| 2643 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) | 2772 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) |
| 2644 ? isolate()->builtins()->StoreIC_Initialize_Strict() | 2773 ? isolate()->builtins()->StoreIC_Initialize_Strict() |
| 2645 : isolate()->builtins()->StoreIC_Initialize(); | 2774 : isolate()->builtins()->StoreIC_Initialize(); |
| 2646 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr); | 2775 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr); |
| 2647 } | 2776 } |
| 2648 | 2777 |
| (...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2728 Register result = ToRegister(instr->result()); | 2857 Register result = ToRegister(instr->result()); |
| 2729 if (!access.IsInobject()) { | 2858 if (!access.IsInobject()) { |
| 2730 __ movq(result, FieldOperand(object, JSObject::kPropertiesOffset)); | 2859 __ movq(result, FieldOperand(object, JSObject::kPropertiesOffset)); |
| 2731 object = result; | 2860 object = result; |
| 2732 } | 2861 } |
| 2733 __ Load(result, FieldOperand(object, offset), access.representation()); | 2862 __ Load(result, FieldOperand(object, offset), access.representation()); |
| 2734 } | 2863 } |
| 2735 | 2864 |
| 2736 | 2865 |
| 2737 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { | 2866 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { |
| 2867 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 2738 ASSERT(ToRegister(instr->object()).is(rax)); | 2868 ASSERT(ToRegister(instr->object()).is(rax)); |
| 2739 ASSERT(ToRegister(instr->result()).is(rax)); | 2869 ASSERT(ToRegister(instr->result()).is(rax)); |
| 2740 | 2870 |
| 2741 __ Move(rcx, instr->name()); | 2871 __ Move(rcx, instr->name()); |
| 2742 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); | 2872 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); |
| 2743 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2873 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 2744 } | 2874 } |
| 2745 | 2875 |
| 2746 | 2876 |
| 2747 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) { | 2877 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) { |
| (...skipping 259 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3007 ScaleFactor scale_factor = static_cast<ScaleFactor>(shift_size); | 3137 ScaleFactor scale_factor = static_cast<ScaleFactor>(shift_size); |
| 3008 return Operand(elements_pointer_reg, | 3138 return Operand(elements_pointer_reg, |
| 3009 ToRegister(key), | 3139 ToRegister(key), |
| 3010 scale_factor, | 3140 scale_factor, |
| 3011 offset + (additional_index << shift_size)); | 3141 offset + (additional_index << shift_size)); |
| 3012 } | 3142 } |
| 3013 } | 3143 } |
| 3014 | 3144 |
| 3015 | 3145 |
| 3016 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { | 3146 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { |
| 3147 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 3017 ASSERT(ToRegister(instr->object()).is(rdx)); | 3148 ASSERT(ToRegister(instr->object()).is(rdx)); |
| 3018 ASSERT(ToRegister(instr->key()).is(rax)); | 3149 ASSERT(ToRegister(instr->key()).is(rax)); |
| 3019 | 3150 |
| 3020 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); | 3151 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); |
| 3021 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 3152 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 3022 } | 3153 } |
| 3023 | 3154 |
| 3024 | 3155 |
| 3025 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { | 3156 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { |
| 3026 Register result = ToRegister(instr->result()); | 3157 Register result = ToRegister(instr->result()); |
| (...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3076 | 3207 |
| 3077 | 3208 |
| 3078 void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) { | 3209 void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) { |
| 3079 Register receiver = ToRegister(instr->receiver()); | 3210 Register receiver = ToRegister(instr->receiver()); |
| 3080 Register function = ToRegister(instr->function()); | 3211 Register function = ToRegister(instr->function()); |
| 3081 | 3212 |
| 3082 // If the receiver is null or undefined, we have to pass the global | 3213 // If the receiver is null or undefined, we have to pass the global |
| 3083 // object as a receiver to normal functions. Values have to be | 3214 // object as a receiver to normal functions. Values have to be |
| 3084 // passed unchanged to builtins and strict-mode functions. | 3215 // passed unchanged to builtins and strict-mode functions. |
| 3085 Label global_object, receiver_ok; | 3216 Label global_object, receiver_ok; |
| 3217 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear; |
| 3086 | 3218 |
| 3087 // Do not transform the receiver to object for strict mode | 3219 // Do not transform the receiver to object for strict mode |
| 3088 // functions. | 3220 // functions. |
| 3089 __ movq(kScratchRegister, | 3221 __ movq(kScratchRegister, |
| 3090 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset)); | 3222 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset)); |
| 3091 __ testb(FieldOperand(kScratchRegister, | 3223 __ testb(FieldOperand(kScratchRegister, |
| 3092 SharedFunctionInfo::kStrictModeByteOffset), | 3224 SharedFunctionInfo::kStrictModeByteOffset), |
| 3093 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); | 3225 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); |
| 3094 __ j(not_equal, &receiver_ok, Label::kNear); | 3226 __ j(not_equal, &receiver_ok, dist); |
| 3095 | 3227 |
| 3096 // Do not transform the receiver to object for builtins. | 3228 // Do not transform the receiver to object for builtins. |
| 3097 __ testb(FieldOperand(kScratchRegister, | 3229 __ testb(FieldOperand(kScratchRegister, |
| 3098 SharedFunctionInfo::kNativeByteOffset), | 3230 SharedFunctionInfo::kNativeByteOffset), |
| 3099 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte)); | 3231 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte)); |
| 3100 __ j(not_equal, &receiver_ok, Label::kNear); | 3232 __ j(not_equal, &receiver_ok, dist); |
| 3101 | 3233 |
| 3102 // Normal function. Replace undefined or null with global receiver. | 3234 // Normal function. Replace undefined or null with global receiver. |
| 3103 __ CompareRoot(receiver, Heap::kNullValueRootIndex); | 3235 __ CompareRoot(receiver, Heap::kNullValueRootIndex); |
| 3104 __ j(equal, &global_object, Label::kNear); | 3236 __ j(equal, &global_object, Label::kNear); |
| 3105 __ CompareRoot(receiver, Heap::kUndefinedValueRootIndex); | 3237 __ CompareRoot(receiver, Heap::kUndefinedValueRootIndex); |
| 3106 __ j(equal, &global_object, Label::kNear); | 3238 __ j(equal, &global_object, Label::kNear); |
| 3107 | 3239 |
| 3108 // The receiver should be a JS object. | 3240 // The receiver should be a JS object. |
| 3109 Condition is_smi = __ CheckSmi(receiver); | 3241 Condition is_smi = __ CheckSmi(receiver); |
| 3110 DeoptimizeIf(is_smi, instr->environment()); | 3242 DeoptimizeIf(is_smi, instr->environment()); |
| 3111 __ CmpObjectType(receiver, FIRST_SPEC_OBJECT_TYPE, kScratchRegister); | 3243 __ CmpObjectType(receiver, FIRST_SPEC_OBJECT_TYPE, kScratchRegister); |
| 3112 DeoptimizeIf(below, instr->environment()); | 3244 DeoptimizeIf(below, instr->environment()); |
| 3113 __ jmp(&receiver_ok, Label::kNear); | 3245 __ jmp(&receiver_ok, Label::kNear); |
| 3114 | 3246 |
| 3115 __ bind(&global_object); | 3247 __ bind(&global_object); |
| 3116 // TODO(kmillikin): We have a hydrogen value for the global object. See | 3248 // TODO(kmillikin): We have a hydrogen value for the global object. See |
| 3117 // if it's better to use it than to explicitly fetch it from the context | 3249 // if it's better to use it than to explicitly fetch it from the context |
| 3118 // here. | 3250 // here. |
| 3119 __ movq(receiver, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX)); | 3251 __ movq(receiver, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 3252 __ movq(receiver, ContextOperand(receiver, Context::GLOBAL_OBJECT_INDEX)); |
| 3120 __ movq(receiver, | 3253 __ movq(receiver, |
| 3121 FieldOperand(receiver, JSGlobalObject::kGlobalReceiverOffset)); | 3254 FieldOperand(receiver, JSGlobalObject::kGlobalReceiverOffset)); |
| 3122 __ bind(&receiver_ok); | 3255 __ bind(&receiver_ok); |
| 3123 } | 3256 } |
| 3124 | 3257 |
| 3125 | 3258 |
| 3126 void LCodeGen::DoApplyArguments(LApplyArguments* instr) { | 3259 void LCodeGen::DoApplyArguments(LApplyArguments* instr) { |
| 3127 Register receiver = ToRegister(instr->receiver()); | 3260 Register receiver = ToRegister(instr->receiver()); |
| 3128 Register function = ToRegister(instr->function()); | 3261 Register function = ToRegister(instr->function()); |
| 3129 Register length = ToRegister(instr->length()); | 3262 Register length = ToRegister(instr->length()); |
| (...skipping 26 matching lines...) Expand all Loading... |
| 3156 | 3289 |
| 3157 // Invoke the function. | 3290 // Invoke the function. |
| 3158 __ bind(&invoke); | 3291 __ bind(&invoke); |
| 3159 ASSERT(instr->HasPointerMap()); | 3292 ASSERT(instr->HasPointerMap()); |
| 3160 LPointerMap* pointers = instr->pointer_map(); | 3293 LPointerMap* pointers = instr->pointer_map(); |
| 3161 SafepointGenerator safepoint_generator( | 3294 SafepointGenerator safepoint_generator( |
| 3162 this, pointers, Safepoint::kLazyDeopt); | 3295 this, pointers, Safepoint::kLazyDeopt); |
| 3163 ParameterCount actual(rax); | 3296 ParameterCount actual(rax); |
| 3164 __ InvokeFunction(function, actual, CALL_FUNCTION, | 3297 __ InvokeFunction(function, actual, CALL_FUNCTION, |
| 3165 safepoint_generator, CALL_AS_METHOD); | 3298 safepoint_generator, CALL_AS_METHOD); |
| 3166 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | |
| 3167 } | 3299 } |
| 3168 | 3300 |
| 3169 | 3301 |
| 3170 void LCodeGen::DoPushArgument(LPushArgument* instr) { | 3302 void LCodeGen::DoPushArgument(LPushArgument* instr) { |
| 3171 LOperand* argument = instr->value(); | 3303 LOperand* argument = instr->value(); |
| 3172 EmitPushTaggedOperand(argument); | 3304 EmitPushTaggedOperand(argument); |
| 3173 } | 3305 } |
| 3174 | 3306 |
| 3175 | 3307 |
| 3176 void LCodeGen::DoDrop(LDrop* instr) { | 3308 void LCodeGen::DoDrop(LDrop* instr) { |
| 3177 __ Drop(instr->count()); | 3309 __ Drop(instr->count()); |
| 3178 } | 3310 } |
| 3179 | 3311 |
| 3180 | 3312 |
| 3181 void LCodeGen::DoThisFunction(LThisFunction* instr) { | 3313 void LCodeGen::DoThisFunction(LThisFunction* instr) { |
| 3182 Register result = ToRegister(instr->result()); | 3314 Register result = ToRegister(instr->result()); |
| 3183 __ movq(result, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); | 3315 __ movq(result, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); |
| 3184 } | 3316 } |
| 3185 | 3317 |
| 3186 | 3318 |
| 3187 void LCodeGen::DoContext(LContext* instr) { | 3319 void LCodeGen::DoContext(LContext* instr) { |
| 3188 Register result = ToRegister(instr->result()); | 3320 Register result = ToRegister(instr->result()); |
| 3189 __ movq(result, rsi); | 3321 if (info()->IsOptimizing()) { |
| 3322 __ movq(result, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 3323 } else { |
| 3324 // If there is no frame, the context must be in rsi. |
| 3325 ASSERT(result.is(rsi)); |
| 3326 } |
| 3190 } | 3327 } |
| 3191 | 3328 |
| 3192 | 3329 |
| 3193 void LCodeGen::DoOuterContext(LOuterContext* instr) { | 3330 void LCodeGen::DoOuterContext(LOuterContext* instr) { |
| 3194 Register context = ToRegister(instr->context()); | 3331 Register context = ToRegister(instr->context()); |
| 3195 Register result = ToRegister(instr->result()); | 3332 Register result = ToRegister(instr->result()); |
| 3196 __ movq(result, | 3333 __ movq(result, |
| 3197 Operand(context, Context::SlotOffset(Context::PREVIOUS_INDEX))); | 3334 Operand(context, Context::SlotOffset(Context::PREVIOUS_INDEX))); |
| 3198 } | 3335 } |
| 3199 | 3336 |
| 3200 | 3337 |
| 3201 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) { | 3338 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) { |
| 3339 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 3202 __ push(rsi); // The context is the first argument. | 3340 __ push(rsi); // The context is the first argument. |
| 3203 __ Push(instr->hydrogen()->pairs()); | 3341 __ Push(instr->hydrogen()->pairs()); |
| 3204 __ Push(Smi::FromInt(instr->hydrogen()->flags())); | 3342 __ Push(Smi::FromInt(instr->hydrogen()->flags())); |
| 3205 CallRuntime(Runtime::kDeclareGlobals, 3, instr); | 3343 CallRuntime(Runtime::kDeclareGlobals, 3, instr); |
| 3206 } | 3344 } |
| 3207 | 3345 |
| 3208 | 3346 |
| 3209 void LCodeGen::DoGlobalObject(LGlobalObject* instr) { | 3347 void LCodeGen::DoGlobalObject(LGlobalObject* instr) { |
| 3348 Register context = ToRegister(instr->context()); |
| 3210 Register result = ToRegister(instr->result()); | 3349 Register result = ToRegister(instr->result()); |
| 3211 __ movq(result, GlobalObjectOperand()); | 3350 __ movq(result, |
| 3351 Operand(context, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); |
| 3212 } | 3352 } |
| 3213 | 3353 |
| 3214 | 3354 |
| 3215 void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) { | 3355 void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) { |
| 3216 Register global = ToRegister(instr->global()); | 3356 Register global = ToRegister(instr->global()); |
| 3217 Register result = ToRegister(instr->result()); | 3357 Register result = ToRegister(instr->result()); |
| 3218 __ movq(result, FieldOperand(global, GlobalObject::kGlobalReceiverOffset)); | 3358 __ movq(result, FieldOperand(global, GlobalObject::kGlobalReceiverOffset)); |
| 3219 } | 3359 } |
| 3220 | 3360 |
| 3221 | 3361 |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3258 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT, 0); | 3398 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT, 0); |
| 3259 } else { | 3399 } else { |
| 3260 // We need to adapt arguments. | 3400 // We need to adapt arguments. |
| 3261 SafepointGenerator generator( | 3401 SafepointGenerator generator( |
| 3262 this, pointers, Safepoint::kLazyDeopt); | 3402 this, pointers, Safepoint::kLazyDeopt); |
| 3263 ParameterCount count(arity); | 3403 ParameterCount count(arity); |
| 3264 ParameterCount expected(formal_parameter_count); | 3404 ParameterCount expected(formal_parameter_count); |
| 3265 __ InvokeFunction( | 3405 __ InvokeFunction( |
| 3266 function, expected, count, CALL_FUNCTION, generator, call_kind); | 3406 function, expected, count, CALL_FUNCTION, generator, call_kind); |
| 3267 } | 3407 } |
| 3268 | |
| 3269 // Restore context. | |
| 3270 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | |
| 3271 } | 3408 } |
| 3272 | 3409 |
| 3273 | 3410 |
| 3274 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { | 3411 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { |
| 3275 ASSERT(ToRegister(instr->result()).is(rax)); | 3412 ASSERT(ToRegister(instr->result()).is(rax)); |
| 3276 CallKnownFunction(instr->hydrogen()->function(), | 3413 CallKnownFunction(instr->hydrogen()->function(), |
| 3277 instr->hydrogen()->formal_parameter_count(), | 3414 instr->hydrogen()->formal_parameter_count(), |
| 3278 instr->arity(), | 3415 instr->arity(), |
| 3279 instr, | 3416 instr, |
| 3280 CALL_AS_METHOD, | 3417 CALL_AS_METHOD, |
| (...skipping 20 matching lines...) Expand all Loading... |
| 3301 // |result| are the same register and |input| will be restored | 3438 // |result| are the same register and |input| will be restored |
| 3302 // unchanged by popping safepoint registers. | 3439 // unchanged by popping safepoint registers. |
| 3303 __ testl(tmp, Immediate(HeapNumber::kSignMask)); | 3440 __ testl(tmp, Immediate(HeapNumber::kSignMask)); |
| 3304 __ j(zero, &done); | 3441 __ j(zero, &done); |
| 3305 | 3442 |
| 3306 __ AllocateHeapNumber(tmp, tmp2, &slow); | 3443 __ AllocateHeapNumber(tmp, tmp2, &slow); |
| 3307 __ jmp(&allocated, Label::kNear); | 3444 __ jmp(&allocated, Label::kNear); |
| 3308 | 3445 |
| 3309 // Slow case: Call the runtime system to do the number allocation. | 3446 // Slow case: Call the runtime system to do the number allocation. |
| 3310 __ bind(&slow); | 3447 __ bind(&slow); |
| 3311 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr); | 3448 CallRuntimeFromDeferred( |
| 3449 Runtime::kAllocateHeapNumber, 0, instr, instr->context()); |
| 3312 // Set the pointer to the new heap number in tmp. | 3450 // Set the pointer to the new heap number in tmp. |
| 3313 if (!tmp.is(rax)) __ movq(tmp, rax); | 3451 if (!tmp.is(rax)) __ movq(tmp, rax); |
| 3314 // Restore input_reg after call to runtime. | 3452 // Restore input_reg after call to runtime. |
| 3315 __ LoadFromSafepointRegisterSlot(input_reg, input_reg); | 3453 __ LoadFromSafepointRegisterSlot(input_reg, input_reg); |
| 3316 | 3454 |
| 3317 __ bind(&allocated); | 3455 __ bind(&allocated); |
| 3318 __ movq(tmp2, FieldOperand(input_reg, HeapNumber::kValueOffset)); | 3456 __ MoveDouble(tmp2, FieldOperand(input_reg, HeapNumber::kValueOffset)); |
| 3319 __ shl(tmp2, Immediate(1)); | 3457 __ shl(tmp2, Immediate(1)); |
| 3320 __ shr(tmp2, Immediate(1)); | 3458 __ shr(tmp2, Immediate(1)); |
| 3321 __ movq(FieldOperand(tmp, HeapNumber::kValueOffset), tmp2); | 3459 __ MoveDouble(FieldOperand(tmp, HeapNumber::kValueOffset), tmp2); |
| 3322 __ StoreToSafepointRegisterSlot(input_reg, tmp); | 3460 __ StoreToSafepointRegisterSlot(input_reg, tmp); |
| 3323 | 3461 |
| 3324 __ bind(&done); | 3462 __ bind(&done); |
| 3325 } | 3463 } |
| 3326 | 3464 |
| 3327 | 3465 |
| 3328 void LCodeGen::EmitIntegerMathAbs(LMathAbs* instr) { | 3466 void LCodeGen::EmitIntegerMathAbs(LMathAbs* instr) { |
| 3329 Register input_reg = ToRegister(instr->value()); | 3467 Register input_reg = ToRegister(instr->value()); |
| 3330 __ testl(input_reg, input_reg); | 3468 __ testl(input_reg, input_reg); |
| 3331 Label is_positive; | 3469 Label is_positive; |
| (...skipping 30 matching lines...) Expand all Loading... |
| 3362 }; | 3500 }; |
| 3363 | 3501 |
| 3364 ASSERT(instr->value()->Equals(instr->result())); | 3502 ASSERT(instr->value()->Equals(instr->result())); |
| 3365 Representation r = instr->hydrogen()->value()->representation(); | 3503 Representation r = instr->hydrogen()->value()->representation(); |
| 3366 | 3504 |
| 3367 if (r.IsDouble()) { | 3505 if (r.IsDouble()) { |
| 3368 XMMRegister scratch = double_scratch0(); | 3506 XMMRegister scratch = double_scratch0(); |
| 3369 XMMRegister input_reg = ToDoubleRegister(instr->value()); | 3507 XMMRegister input_reg = ToDoubleRegister(instr->value()); |
| 3370 __ xorps(scratch, scratch); | 3508 __ xorps(scratch, scratch); |
| 3371 __ subsd(scratch, input_reg); | 3509 __ subsd(scratch, input_reg); |
| 3372 __ andpd(input_reg, scratch); | 3510 __ andps(input_reg, scratch); |
| 3373 } else if (r.IsInteger32()) { | 3511 } else if (r.IsInteger32()) { |
| 3374 EmitIntegerMathAbs(instr); | 3512 EmitIntegerMathAbs(instr); |
| 3375 } else if (r.IsSmi()) { | 3513 } else if (r.IsSmi()) { |
| 3376 EmitSmiMathAbs(instr); | 3514 EmitSmiMathAbs(instr); |
| 3377 } else { // Tagged case. | 3515 } else { // Tagged case. |
| 3378 DeferredMathAbsTaggedHeapNumber* deferred = | 3516 DeferredMathAbsTaggedHeapNumber* deferred = |
| 3379 new(zone()) DeferredMathAbsTaggedHeapNumber(this, instr); | 3517 new(zone()) DeferredMathAbsTaggedHeapNumber(this, instr); |
| 3380 Register input_reg = ToRegister(instr->value()); | 3518 Register input_reg = ToRegister(instr->value()); |
| 3381 // Smi check. | 3519 // Smi check. |
| 3382 __ JumpIfNotSmi(input_reg, deferred->entry()); | 3520 __ JumpIfNotSmi(input_reg, deferred->entry()); |
| (...skipping 29 matching lines...) Expand all Loading... |
| 3412 __ j(below, &negative_sign, Label::kNear); | 3550 __ j(below, &negative_sign, Label::kNear); |
| 3413 | 3551 |
| 3414 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 3552 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 3415 // Check for negative zero. | 3553 // Check for negative zero. |
| 3416 Label positive_sign; | 3554 Label positive_sign; |
| 3417 __ j(above, &positive_sign, Label::kNear); | 3555 __ j(above, &positive_sign, Label::kNear); |
| 3418 __ movmskpd(output_reg, input_reg); | 3556 __ movmskpd(output_reg, input_reg); |
| 3419 __ testq(output_reg, Immediate(1)); | 3557 __ testq(output_reg, Immediate(1)); |
| 3420 DeoptimizeIf(not_zero, instr->environment()); | 3558 DeoptimizeIf(not_zero, instr->environment()); |
| 3421 __ Set(output_reg, 0); | 3559 __ Set(output_reg, 0); |
| 3422 __ jmp(&done); | 3560 __ jmp(&done, Label::kNear); |
| 3423 __ bind(&positive_sign); | 3561 __ bind(&positive_sign); |
| 3424 } | 3562 } |
| 3425 | 3563 |
| 3426 // Use truncating instruction (OK because input is positive). | 3564 // Use truncating instruction (OK because input is positive). |
| 3427 __ cvttsd2si(output_reg, input_reg); | 3565 __ cvttsd2si(output_reg, input_reg); |
| 3428 // Overflow is signalled with minint. | 3566 // Overflow is signalled with minint. |
| 3429 __ cmpl(output_reg, Immediate(0x80000000)); | 3567 __ cmpl(output_reg, Immediate(0x80000000)); |
| 3430 DeoptimizeIf(equal, instr->environment()); | 3568 DeoptimizeIf(equal, instr->environment()); |
| 3431 __ jmp(&done, Label::kNear); | 3569 __ jmp(&done, Label::kNear); |
| 3432 | 3570 |
| (...skipping 13 matching lines...) Expand all Loading... |
| 3446 | 3584 |
| 3447 | 3585 |
| 3448 void LCodeGen::DoMathRound(LMathRound* instr) { | 3586 void LCodeGen::DoMathRound(LMathRound* instr) { |
| 3449 const XMMRegister xmm_scratch = double_scratch0(); | 3587 const XMMRegister xmm_scratch = double_scratch0(); |
| 3450 Register output_reg = ToRegister(instr->result()); | 3588 Register output_reg = ToRegister(instr->result()); |
| 3451 XMMRegister input_reg = ToDoubleRegister(instr->value()); | 3589 XMMRegister input_reg = ToDoubleRegister(instr->value()); |
| 3452 static int64_t one_half = V8_INT64_C(0x3FE0000000000000); // 0.5 | 3590 static int64_t one_half = V8_INT64_C(0x3FE0000000000000); // 0.5 |
| 3453 static int64_t minus_one_half = V8_INT64_C(0xBFE0000000000000); // -0.5 | 3591 static int64_t minus_one_half = V8_INT64_C(0xBFE0000000000000); // -0.5 |
| 3454 | 3592 |
| 3455 Label done, round_to_zero, below_one_half, do_not_compensate, restore; | 3593 Label done, round_to_zero, below_one_half, do_not_compensate, restore; |
| 3456 __ movq(kScratchRegister, one_half, RelocInfo::NONE64); | 3594 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear; |
| 3595 __ movq(kScratchRegister, one_half); |
| 3457 __ movq(xmm_scratch, kScratchRegister); | 3596 __ movq(xmm_scratch, kScratchRegister); |
| 3458 __ ucomisd(xmm_scratch, input_reg); | 3597 __ ucomisd(xmm_scratch, input_reg); |
| 3459 __ j(above, &below_one_half); | 3598 __ j(above, &below_one_half, Label::kNear); |
| 3460 | 3599 |
| 3461 // CVTTSD2SI rounds towards zero, since 0.5 <= x, we use floor(0.5 + x). | 3600 // CVTTSD2SI rounds towards zero, since 0.5 <= x, we use floor(0.5 + x). |
| 3462 __ addsd(xmm_scratch, input_reg); | 3601 __ addsd(xmm_scratch, input_reg); |
| 3463 __ cvttsd2si(output_reg, xmm_scratch); | 3602 __ cvttsd2si(output_reg, xmm_scratch); |
| 3464 // Overflow is signalled with minint. | 3603 // Overflow is signalled with minint. |
| 3465 __ cmpl(output_reg, Immediate(0x80000000)); | 3604 __ cmpl(output_reg, Immediate(0x80000000)); |
| 3466 __ RecordComment("D2I conversion overflow"); | 3605 __ RecordComment("D2I conversion overflow"); |
| 3467 DeoptimizeIf(equal, instr->environment()); | 3606 DeoptimizeIf(equal, instr->environment()); |
| 3468 __ jmp(&done); | 3607 __ jmp(&done, dist); |
| 3469 | 3608 |
| 3470 __ bind(&below_one_half); | 3609 __ bind(&below_one_half); |
| 3471 __ movq(kScratchRegister, minus_one_half, RelocInfo::NONE64); | 3610 __ movq(kScratchRegister, minus_one_half); |
| 3472 __ movq(xmm_scratch, kScratchRegister); | 3611 __ movq(xmm_scratch, kScratchRegister); |
| 3473 __ ucomisd(xmm_scratch, input_reg); | 3612 __ ucomisd(xmm_scratch, input_reg); |
| 3474 __ j(below_equal, &round_to_zero); | 3613 __ j(below_equal, &round_to_zero, Label::kNear); |
| 3475 | 3614 |
| 3476 // CVTTSD2SI rounds towards zero, we use ceil(x - (-0.5)) and then | 3615 // CVTTSD2SI rounds towards zero, we use ceil(x - (-0.5)) and then |
| 3477 // compare and compensate. | 3616 // compare and compensate. |
| 3478 __ movq(kScratchRegister, input_reg); // Back up input_reg. | 3617 __ movq(kScratchRegister, input_reg); // Back up input_reg. |
| 3479 __ subsd(input_reg, xmm_scratch); | 3618 __ subsd(input_reg, xmm_scratch); |
| 3480 __ cvttsd2si(output_reg, input_reg); | 3619 __ cvttsd2si(output_reg, input_reg); |
| 3481 // Catch minint due to overflow, and to prevent overflow when compensating. | 3620 // Catch minint due to overflow, and to prevent overflow when compensating. |
| 3482 __ cmpl(output_reg, Immediate(0x80000000)); | 3621 __ cmpl(output_reg, Immediate(0x80000000)); |
| 3483 __ RecordComment("D2I conversion overflow"); | 3622 __ RecordComment("D2I conversion overflow"); |
| 3484 DeoptimizeIf(equal, instr->environment()); | 3623 DeoptimizeIf(equal, instr->environment()); |
| 3485 | 3624 |
| 3486 __ Cvtlsi2sd(xmm_scratch, output_reg); | 3625 __ Cvtlsi2sd(xmm_scratch, output_reg); |
| 3487 __ ucomisd(input_reg, xmm_scratch); | 3626 __ ucomisd(input_reg, xmm_scratch); |
| 3488 __ j(equal, &restore, Label::kNear); | 3627 __ j(equal, &restore, Label::kNear); |
| 3489 __ subl(output_reg, Immediate(1)); | 3628 __ subl(output_reg, Immediate(1)); |
| 3490 // No overflow because we already ruled out minint. | 3629 // No overflow because we already ruled out minint. |
| 3491 __ bind(&restore); | 3630 __ bind(&restore); |
| 3492 __ movq(input_reg, kScratchRegister); // Restore input_reg. | 3631 __ movq(input_reg, kScratchRegister); // Restore input_reg. |
| 3493 __ jmp(&done); | 3632 __ jmp(&done, dist); |
| 3494 | 3633 |
| 3495 __ bind(&round_to_zero); | 3634 __ bind(&round_to_zero); |
| 3496 // We return 0 for the input range [+0, 0.5[, or [-0.5, 0.5[ if | 3635 // We return 0 for the input range [+0, 0.5[, or [-0.5, 0.5[ if |
| 3497 // we can ignore the difference between a result of -0 and +0. | 3636 // we can ignore the difference between a result of -0 and +0. |
| 3498 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 3637 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 3499 __ movq(output_reg, input_reg); | 3638 __ movq(output_reg, input_reg); |
| 3500 __ testq(output_reg, output_reg); | 3639 __ testq(output_reg, output_reg); |
| 3501 __ RecordComment("Minus zero"); | 3640 __ RecordComment("Minus zero"); |
| 3502 DeoptimizeIf(negative, instr->environment()); | 3641 DeoptimizeIf(negative, instr->environment()); |
| 3503 } | 3642 } |
| (...skipping 13 matching lines...) Expand all Loading... |
| 3517 XMMRegister xmm_scratch = double_scratch0(); | 3656 XMMRegister xmm_scratch = double_scratch0(); |
| 3518 XMMRegister input_reg = ToDoubleRegister(instr->value()); | 3657 XMMRegister input_reg = ToDoubleRegister(instr->value()); |
| 3519 ASSERT(ToDoubleRegister(instr->result()).is(input_reg)); | 3658 ASSERT(ToDoubleRegister(instr->result()).is(input_reg)); |
| 3520 | 3659 |
| 3521 // Note that according to ECMA-262 15.8.2.13: | 3660 // Note that according to ECMA-262 15.8.2.13: |
| 3522 // Math.pow(-Infinity, 0.5) == Infinity | 3661 // Math.pow(-Infinity, 0.5) == Infinity |
| 3523 // Math.sqrt(-Infinity) == NaN | 3662 // Math.sqrt(-Infinity) == NaN |
| 3524 Label done, sqrt; | 3663 Label done, sqrt; |
| 3525 // Check base for -Infinity. According to IEEE-754, double-precision | 3664 // Check base for -Infinity. According to IEEE-754, double-precision |
| 3526 // -Infinity has the highest 12 bits set and the lowest 52 bits cleared. | 3665 // -Infinity has the highest 12 bits set and the lowest 52 bits cleared. |
| 3527 __ movq(kScratchRegister, V8_INT64_C(0xFFF0000000000000), RelocInfo::NONE64); | 3666 __ movq(kScratchRegister, V8_INT64_C(0xFFF0000000000000)); |
| 3528 __ movq(xmm_scratch, kScratchRegister); | 3667 __ movq(xmm_scratch, kScratchRegister); |
| 3529 __ ucomisd(xmm_scratch, input_reg); | 3668 __ ucomisd(xmm_scratch, input_reg); |
| 3530 // Comparing -Infinity with NaN results in "unordered", which sets the | 3669 // Comparing -Infinity with NaN results in "unordered", which sets the |
| 3531 // zero flag as if both were equal. However, it also sets the carry flag. | 3670 // zero flag as if both were equal. However, it also sets the carry flag. |
| 3532 __ j(not_equal, &sqrt, Label::kNear); | 3671 __ j(not_equal, &sqrt, Label::kNear); |
| 3533 __ j(carry, &sqrt, Label::kNear); | 3672 __ j(carry, &sqrt, Label::kNear); |
| 3534 // If input is -Infinity, return Infinity. | 3673 // If input is -Infinity, return Infinity. |
| 3535 __ xorps(input_reg, input_reg); | 3674 __ xorps(input_reg, input_reg); |
| 3536 __ subsd(input_reg, xmm_scratch); | 3675 __ subsd(input_reg, xmm_scratch); |
| 3537 __ jmp(&done, Label::kNear); | 3676 __ jmp(&done, Label::kNear); |
| (...skipping 18 matching lines...) Expand all Loading... |
| 3556 ASSERT(!instr->right()->IsDoubleRegister() || | 3695 ASSERT(!instr->right()->IsDoubleRegister() || |
| 3557 ToDoubleRegister(instr->right()).is(xmm1)); | 3696 ToDoubleRegister(instr->right()).is(xmm1)); |
| 3558 ASSERT(ToDoubleRegister(instr->left()).is(xmm2)); | 3697 ASSERT(ToDoubleRegister(instr->left()).is(xmm2)); |
| 3559 ASSERT(ToDoubleRegister(instr->result()).is(xmm3)); | 3698 ASSERT(ToDoubleRegister(instr->result()).is(xmm3)); |
| 3560 | 3699 |
| 3561 if (exponent_type.IsSmi()) { | 3700 if (exponent_type.IsSmi()) { |
| 3562 MathPowStub stub(MathPowStub::TAGGED); | 3701 MathPowStub stub(MathPowStub::TAGGED); |
| 3563 __ CallStub(&stub); | 3702 __ CallStub(&stub); |
| 3564 } else if (exponent_type.IsTagged()) { | 3703 } else if (exponent_type.IsTagged()) { |
| 3565 Label no_deopt; | 3704 Label no_deopt; |
| 3566 __ JumpIfSmi(exponent, &no_deopt); | 3705 __ JumpIfSmi(exponent, &no_deopt, Label::kNear); |
| 3567 __ CmpObjectType(exponent, HEAP_NUMBER_TYPE, rcx); | 3706 __ CmpObjectType(exponent, HEAP_NUMBER_TYPE, rcx); |
| 3568 DeoptimizeIf(not_equal, instr->environment()); | 3707 DeoptimizeIf(not_equal, instr->environment()); |
| 3569 __ bind(&no_deopt); | 3708 __ bind(&no_deopt); |
| 3570 MathPowStub stub(MathPowStub::TAGGED); | 3709 MathPowStub stub(MathPowStub::TAGGED); |
| 3571 __ CallStub(&stub); | 3710 __ CallStub(&stub); |
| 3572 } else if (exponent_type.IsInteger32()) { | 3711 } else if (exponent_type.IsInteger32()) { |
| 3573 MathPowStub stub(MathPowStub::INTEGER); | 3712 MathPowStub stub(MathPowStub::INTEGER); |
| 3574 __ CallStub(&stub); | 3713 __ CallStub(&stub); |
| 3575 } else { | 3714 } else { |
| 3576 ASSERT(exponent_type.IsDouble()); | 3715 ASSERT(exponent_type.IsDouble()); |
| (...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3625 Register random = state0; | 3764 Register random = state0; |
| 3626 __ shll(random, Immediate(14)); | 3765 __ shll(random, Immediate(14)); |
| 3627 __ andl(state1, Immediate(0x3FFFF)); | 3766 __ andl(state1, Immediate(0x3FFFF)); |
| 3628 __ addl(random, state1); | 3767 __ addl(random, state1); |
| 3629 | 3768 |
| 3630 // Convert 32 random bits in rax to 0.(32 random bits) in a double | 3769 // Convert 32 random bits in rax to 0.(32 random bits) in a double |
| 3631 // by computing: | 3770 // by computing: |
| 3632 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)). | 3771 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)). |
| 3633 XMMRegister result = ToDoubleRegister(instr->result()); | 3772 XMMRegister result = ToDoubleRegister(instr->result()); |
| 3634 XMMRegister scratch4 = double_scratch0(); | 3773 XMMRegister scratch4 = double_scratch0(); |
| 3635 __ movq(scratch3, V8_INT64_C(0x4130000000000000), | 3774 __ movq(scratch3, V8_INT64_C(0x4130000000000000)); // 1.0 x 2^20 as double |
| 3636 RelocInfo::NONE64); // 1.0 x 2^20 as double | |
| 3637 __ movq(scratch4, scratch3); | 3775 __ movq(scratch4, scratch3); |
| 3638 __ movd(result, random); | 3776 __ movd(result, random); |
| 3639 __ xorps(result, scratch4); | 3777 __ xorps(result, scratch4); |
| 3640 __ subsd(result, scratch4); | 3778 __ subsd(result, scratch4); |
| 3641 } | 3779 } |
| 3642 | 3780 |
| 3643 | 3781 |
| 3644 void LCodeGen::DoMathExp(LMathExp* instr) { | 3782 void LCodeGen::DoMathExp(LMathExp* instr) { |
| 3645 XMMRegister input = ToDoubleRegister(instr->value()); | 3783 XMMRegister input = ToDoubleRegister(instr->value()); |
| 3646 XMMRegister result = ToDoubleRegister(instr->result()); | 3784 XMMRegister result = ToDoubleRegister(instr->result()); |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3680 __ fyl2x(); | 3818 __ fyl2x(); |
| 3681 __ fstp_d(Operand(rsp, 0)); | 3819 __ fstp_d(Operand(rsp, 0)); |
| 3682 __ movsd(input_reg, Operand(rsp, 0)); | 3820 __ movsd(input_reg, Operand(rsp, 0)); |
| 3683 __ addq(rsp, Immediate(kDoubleSize)); | 3821 __ addq(rsp, Immediate(kDoubleSize)); |
| 3684 __ bind(&done); | 3822 __ bind(&done); |
| 3685 } | 3823 } |
| 3686 | 3824 |
| 3687 | 3825 |
| 3688 void LCodeGen::DoMathTan(LMathTan* instr) { | 3826 void LCodeGen::DoMathTan(LMathTan* instr) { |
| 3689 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); | 3827 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); |
| 3828 // Set the context register to a GC-safe fake value. Clobbering it is |
| 3829 // OK because this instruction is marked as a call. |
| 3830 __ Set(rsi, 0); |
| 3690 TranscendentalCacheStub stub(TranscendentalCache::TAN, | 3831 TranscendentalCacheStub stub(TranscendentalCache::TAN, |
| 3691 TranscendentalCacheStub::UNTAGGED); | 3832 TranscendentalCacheStub::UNTAGGED); |
| 3692 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 3833 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 3693 } | 3834 } |
| 3694 | 3835 |
| 3695 | 3836 |
| 3696 void LCodeGen::DoMathCos(LMathCos* instr) { | 3837 void LCodeGen::DoMathCos(LMathCos* instr) { |
| 3697 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); | 3838 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); |
| 3839 // Set the context register to a GC-safe fake value. Clobbering it is |
| 3840 // OK because this instruction is marked as a call. |
| 3841 __ Set(rsi, 0); |
| 3698 TranscendentalCacheStub stub(TranscendentalCache::COS, | 3842 TranscendentalCacheStub stub(TranscendentalCache::COS, |
| 3699 TranscendentalCacheStub::UNTAGGED); | 3843 TranscendentalCacheStub::UNTAGGED); |
| 3700 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 3844 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 3701 } | 3845 } |
| 3702 | 3846 |
| 3703 | 3847 |
| 3704 void LCodeGen::DoMathSin(LMathSin* instr) { | 3848 void LCodeGen::DoMathSin(LMathSin* instr) { |
| 3705 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); | 3849 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); |
| 3850 // Set the context register to a GC-safe fake value. Clobbering it is |
| 3851 // OK because this instruction is marked as a call. |
| 3852 __ Set(rsi, 0); |
| 3706 TranscendentalCacheStub stub(TranscendentalCache::SIN, | 3853 TranscendentalCacheStub stub(TranscendentalCache::SIN, |
| 3707 TranscendentalCacheStub::UNTAGGED); | 3854 TranscendentalCacheStub::UNTAGGED); |
| 3708 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 3855 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 3709 } | 3856 } |
| 3710 | 3857 |
| 3711 | 3858 |
| 3712 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { | 3859 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { |
| 3860 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 3713 ASSERT(ToRegister(instr->function()).is(rdi)); | 3861 ASSERT(ToRegister(instr->function()).is(rdi)); |
| 3714 ASSERT(instr->HasPointerMap()); | 3862 ASSERT(instr->HasPointerMap()); |
| 3715 | 3863 |
| 3716 Handle<JSFunction> known_function = instr->hydrogen()->known_function(); | 3864 Handle<JSFunction> known_function = instr->hydrogen()->known_function(); |
| 3717 if (known_function.is_null()) { | 3865 if (known_function.is_null()) { |
| 3718 LPointerMap* pointers = instr->pointer_map(); | 3866 LPointerMap* pointers = instr->pointer_map(); |
| 3719 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); | 3867 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); |
| 3720 ParameterCount count(instr->arity()); | 3868 ParameterCount count(instr->arity()); |
| 3721 __ InvokeFunction(rdi, count, CALL_FUNCTION, generator, CALL_AS_METHOD); | 3869 __ InvokeFunction(rdi, count, CALL_FUNCTION, generator, CALL_AS_METHOD); |
| 3722 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | |
| 3723 } else { | 3870 } else { |
| 3724 CallKnownFunction(known_function, | 3871 CallKnownFunction(known_function, |
| 3725 instr->hydrogen()->formal_parameter_count(), | 3872 instr->hydrogen()->formal_parameter_count(), |
| 3726 instr->arity(), | 3873 instr->arity(), |
| 3727 instr, | 3874 instr, |
| 3728 CALL_AS_METHOD, | 3875 CALL_AS_METHOD, |
| 3729 RDI_CONTAINS_TARGET); | 3876 RDI_CONTAINS_TARGET); |
| 3730 } | 3877 } |
| 3731 } | 3878 } |
| 3732 | 3879 |
| 3733 | 3880 |
| 3734 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { | 3881 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { |
| 3882 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 3735 ASSERT(ToRegister(instr->key()).is(rcx)); | 3883 ASSERT(ToRegister(instr->key()).is(rcx)); |
| 3736 ASSERT(ToRegister(instr->result()).is(rax)); | 3884 ASSERT(ToRegister(instr->result()).is(rax)); |
| 3737 | 3885 |
| 3738 int arity = instr->arity(); | 3886 int arity = instr->arity(); |
| 3739 Handle<Code> ic = | 3887 Handle<Code> ic = |
| 3740 isolate()->stub_cache()->ComputeKeyedCallInitialize(arity); | 3888 isolate()->stub_cache()->ComputeKeyedCallInitialize(arity); |
| 3741 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 3889 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 3742 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | |
| 3743 } | 3890 } |
| 3744 | 3891 |
| 3745 | 3892 |
| 3746 void LCodeGen::DoCallNamed(LCallNamed* instr) { | 3893 void LCodeGen::DoCallNamed(LCallNamed* instr) { |
| 3894 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 3747 ASSERT(ToRegister(instr->result()).is(rax)); | 3895 ASSERT(ToRegister(instr->result()).is(rax)); |
| 3748 | 3896 |
| 3749 int arity = instr->arity(); | 3897 int arity = instr->arity(); |
| 3750 RelocInfo::Mode mode = RelocInfo::CODE_TARGET; | 3898 RelocInfo::Mode mode = RelocInfo::CODE_TARGET; |
| 3751 Handle<Code> ic = | 3899 Handle<Code> ic = |
| 3752 isolate()->stub_cache()->ComputeCallInitialize(arity, mode); | 3900 isolate()->stub_cache()->ComputeCallInitialize(arity, mode); |
| 3753 __ Move(rcx, instr->name()); | 3901 __ Move(rcx, instr->name()); |
| 3754 CallCode(ic, mode, instr); | 3902 CallCode(ic, mode, instr); |
| 3755 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | |
| 3756 } | 3903 } |
| 3757 | 3904 |
| 3758 | 3905 |
| 3759 void LCodeGen::DoCallFunction(LCallFunction* instr) { | 3906 void LCodeGen::DoCallFunction(LCallFunction* instr) { |
| 3907 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 3760 ASSERT(ToRegister(instr->function()).is(rdi)); | 3908 ASSERT(ToRegister(instr->function()).is(rdi)); |
| 3761 ASSERT(ToRegister(instr->result()).is(rax)); | 3909 ASSERT(ToRegister(instr->result()).is(rax)); |
| 3762 | 3910 |
| 3763 int arity = instr->arity(); | 3911 int arity = instr->arity(); |
| 3764 CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS); | 3912 CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS); |
| 3765 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 3913 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 3766 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | |
| 3767 } | 3914 } |
| 3768 | 3915 |
| 3769 | 3916 |
| 3770 void LCodeGen::DoCallGlobal(LCallGlobal* instr) { | 3917 void LCodeGen::DoCallGlobal(LCallGlobal* instr) { |
| 3918 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 3771 ASSERT(ToRegister(instr->result()).is(rax)); | 3919 ASSERT(ToRegister(instr->result()).is(rax)); |
| 3772 int arity = instr->arity(); | 3920 int arity = instr->arity(); |
| 3773 RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT; | 3921 RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT; |
| 3774 Handle<Code> ic = | 3922 Handle<Code> ic = |
| 3775 isolate()->stub_cache()->ComputeCallInitialize(arity, mode); | 3923 isolate()->stub_cache()->ComputeCallInitialize(arity, mode); |
| 3776 __ Move(rcx, instr->name()); | 3924 __ Move(rcx, instr->name()); |
| 3777 CallCode(ic, mode, instr); | 3925 CallCode(ic, mode, instr); |
| 3778 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | |
| 3779 } | 3926 } |
| 3780 | 3927 |
| 3781 | 3928 |
| 3782 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) { | 3929 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) { |
| 3783 ASSERT(ToRegister(instr->result()).is(rax)); | 3930 ASSERT(ToRegister(instr->result()).is(rax)); |
| 3784 CallKnownFunction(instr->hydrogen()->target(), | 3931 CallKnownFunction(instr->hydrogen()->target(), |
| 3785 instr->hydrogen()->formal_parameter_count(), | 3932 instr->hydrogen()->formal_parameter_count(), |
| 3786 instr->arity(), | 3933 instr->arity(), |
| 3787 instr, | 3934 instr, |
| 3788 CALL_AS_FUNCTION, | 3935 CALL_AS_FUNCTION, |
| 3789 RDI_UNINITIALIZED); | 3936 RDI_UNINITIALIZED); |
| 3790 } | 3937 } |
| 3791 | 3938 |
| 3792 | 3939 |
| 3793 void LCodeGen::DoCallNew(LCallNew* instr) { | 3940 void LCodeGen::DoCallNew(LCallNew* instr) { |
| 3941 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 3794 ASSERT(ToRegister(instr->constructor()).is(rdi)); | 3942 ASSERT(ToRegister(instr->constructor()).is(rdi)); |
| 3795 ASSERT(ToRegister(instr->result()).is(rax)); | 3943 ASSERT(ToRegister(instr->result()).is(rax)); |
| 3796 | 3944 |
| 3797 __ Set(rax, instr->arity()); | 3945 __ Set(rax, instr->arity()); |
| 3798 // No cell in ebx for construct type feedback in optimized code | 3946 // No cell in ebx for construct type feedback in optimized code |
| 3799 Handle<Object> undefined_value(isolate()->factory()->undefined_value()); | 3947 Handle<Object> undefined_value(isolate()->factory()->undefined_value()); |
| 3800 __ Move(rbx, undefined_value); | 3948 __ Move(rbx, undefined_value); |
| 3801 CallConstructStub stub(NO_CALL_FUNCTION_FLAGS); | 3949 CallConstructStub stub(NO_CALL_FUNCTION_FLAGS); |
| 3802 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); | 3950 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); |
| 3803 } | 3951 } |
| 3804 | 3952 |
| 3805 | 3953 |
| 3806 void LCodeGen::DoCallNewArray(LCallNewArray* instr) { | 3954 void LCodeGen::DoCallNewArray(LCallNewArray* instr) { |
| 3955 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 3807 ASSERT(ToRegister(instr->constructor()).is(rdi)); | 3956 ASSERT(ToRegister(instr->constructor()).is(rdi)); |
| 3808 ASSERT(ToRegister(instr->result()).is(rax)); | 3957 ASSERT(ToRegister(instr->result()).is(rax)); |
| 3809 | 3958 |
| 3810 __ Set(rax, instr->arity()); | 3959 __ Set(rax, instr->arity()); |
| 3811 __ Move(rbx, instr->hydrogen()->property_cell()); | 3960 __ Move(rbx, instr->hydrogen()->property_cell()); |
| 3812 ElementsKind kind = instr->hydrogen()->elements_kind(); | 3961 ElementsKind kind = instr->hydrogen()->elements_kind(); |
| 3813 AllocationSiteOverrideMode override_mode = | 3962 AllocationSiteOverrideMode override_mode = |
| 3814 (AllocationSite::GetMode(kind) == TRACK_ALLOCATION_SITE) | 3963 (AllocationSite::GetMode(kind) == TRACK_ALLOCATION_SITE) |
| 3815 ? DISABLE_ALLOCATION_SITES | 3964 ? DISABLE_ALLOCATION_SITES |
| 3816 : DONT_OVERRIDE; | 3965 : DONT_OVERRIDE; |
| 3817 ContextCheckMode context_mode = CONTEXT_CHECK_NOT_REQUIRED; | 3966 ContextCheckMode context_mode = CONTEXT_CHECK_NOT_REQUIRED; |
| 3818 | 3967 |
| 3819 if (instr->arity() == 0) { | 3968 if (instr->arity() == 0) { |
| 3820 ArrayNoArgumentConstructorStub stub(kind, context_mode, override_mode); | 3969 ArrayNoArgumentConstructorStub stub(kind, context_mode, override_mode); |
| 3821 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); | 3970 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); |
| 3822 } else if (instr->arity() == 1) { | 3971 } else if (instr->arity() == 1) { |
| 3823 Label done; | 3972 Label done; |
| 3824 if (IsFastPackedElementsKind(kind)) { | 3973 if (IsFastPackedElementsKind(kind)) { |
| 3825 Label packed_case; | 3974 Label packed_case; |
| 3826 // We might need a change here | 3975 // We might need a change here |
| 3827 // look at the first argument | 3976 // look at the first argument |
| 3828 __ movq(rcx, Operand(rsp, 0)); | 3977 __ movq(rcx, Operand(rsp, 0)); |
| 3829 __ testq(rcx, rcx); | 3978 __ testq(rcx, rcx); |
| 3830 __ j(zero, &packed_case); | 3979 __ j(zero, &packed_case, Label::kNear); |
| 3831 | 3980 |
| 3832 ElementsKind holey_kind = GetHoleyElementsKind(kind); | 3981 ElementsKind holey_kind = GetHoleyElementsKind(kind); |
| 3833 ArraySingleArgumentConstructorStub stub(holey_kind, context_mode, | 3982 ArraySingleArgumentConstructorStub stub(holey_kind, context_mode, |
| 3834 override_mode); | 3983 override_mode); |
| 3835 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); | 3984 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); |
| 3836 __ jmp(&done); | 3985 __ jmp(&done, Label::kNear); |
| 3837 __ bind(&packed_case); | 3986 __ bind(&packed_case); |
| 3838 } | 3987 } |
| 3839 | 3988 |
| 3840 ArraySingleArgumentConstructorStub stub(kind, context_mode, override_mode); | 3989 ArraySingleArgumentConstructorStub stub(kind, context_mode, override_mode); |
| 3841 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); | 3990 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); |
| 3842 __ bind(&done); | 3991 __ bind(&done); |
| 3843 } else { | 3992 } else { |
| 3844 ArrayNArgumentsConstructorStub stub(kind, context_mode, override_mode); | 3993 ArrayNArgumentsConstructorStub stub(kind, context_mode, override_mode); |
| 3845 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); | 3994 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); |
| 3846 } | 3995 } |
| 3847 } | 3996 } |
| 3848 | 3997 |
| 3849 | 3998 |
| 3850 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { | 3999 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { |
| 4000 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 3851 CallRuntime(instr->function(), instr->arity(), instr, instr->save_doubles()); | 4001 CallRuntime(instr->function(), instr->arity(), instr, instr->save_doubles()); |
| 3852 } | 4002 } |
| 3853 | 4003 |
| 3854 | 4004 |
| 3855 void LCodeGen::DoStoreCodeEntry(LStoreCodeEntry* instr) { | 4005 void LCodeGen::DoStoreCodeEntry(LStoreCodeEntry* instr) { |
| 3856 Register function = ToRegister(instr->function()); | 4006 Register function = ToRegister(instr->function()); |
| 3857 Register code_object = ToRegister(instr->code_object()); | 4007 Register code_object = ToRegister(instr->code_object()); |
| 3858 __ lea(code_object, FieldOperand(code_object, Code::kHeaderSize)); | 4008 __ lea(code_object, FieldOperand(code_object, Code::kHeaderSize)); |
| 3859 __ movq(FieldOperand(function, JSFunction::kCodeEntryOffset), code_object); | 4009 __ movq(FieldOperand(function, JSFunction::kCodeEntryOffset), code_object); |
| 3860 } | 4010 } |
| (...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3947 if (!access.IsInobject()) { | 4097 if (!access.IsInobject()) { |
| 3948 write_register = ToRegister(instr->temp()); | 4098 write_register = ToRegister(instr->temp()); |
| 3949 __ movq(write_register, FieldOperand(object, JSObject::kPropertiesOffset)); | 4099 __ movq(write_register, FieldOperand(object, JSObject::kPropertiesOffset)); |
| 3950 } | 4100 } |
| 3951 | 4101 |
| 3952 if (instr->value()->IsConstantOperand()) { | 4102 if (instr->value()->IsConstantOperand()) { |
| 3953 LConstantOperand* operand_value = LConstantOperand::cast(instr->value()); | 4103 LConstantOperand* operand_value = LConstantOperand::cast(instr->value()); |
| 3954 if (operand_value->IsRegister()) { | 4104 if (operand_value->IsRegister()) { |
| 3955 Register value = ToRegister(operand_value); | 4105 Register value = ToRegister(operand_value); |
| 3956 __ Store(FieldOperand(write_register, offset), value, representation); | 4106 __ Store(FieldOperand(write_register, offset), value, representation); |
| 4107 } else if (representation.IsInteger32()) { |
| 4108 int32_t value = ToInteger32(operand_value); |
| 4109 ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); |
| 4110 __ movl(FieldOperand(write_register, offset), Immediate(value)); |
| 3957 } else { | 4111 } else { |
| 3958 Handle<Object> handle_value = ToHandle(operand_value); | 4112 Handle<Object> handle_value = ToHandle(operand_value); |
| 3959 ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); | 4113 ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); |
| 3960 __ Move(FieldOperand(write_register, offset), handle_value); | 4114 __ Move(FieldOperand(write_register, offset), handle_value); |
| 3961 } | 4115 } |
| 3962 } else { | 4116 } else { |
| 3963 Register value = ToRegister(instr->value()); | 4117 Register value = ToRegister(instr->value()); |
| 3964 __ Store(FieldOperand(write_register, offset), value, representation); | 4118 __ Store(FieldOperand(write_register, offset), value, representation); |
| 3965 } | 4119 } |
| 3966 | 4120 |
| 3967 if (instr->hydrogen()->NeedsWriteBarrier()) { | 4121 if (instr->hydrogen()->NeedsWriteBarrier()) { |
| 3968 Register value = ToRegister(instr->value()); | 4122 Register value = ToRegister(instr->value()); |
| 3969 Register temp = access.IsInobject() ? ToRegister(instr->temp()) : object; | 4123 Register temp = access.IsInobject() ? ToRegister(instr->temp()) : object; |
| 3970 // Update the write barrier for the object for in-object properties. | 4124 // Update the write barrier for the object for in-object properties. |
| 3971 __ RecordWriteField(write_register, | 4125 __ RecordWriteField(write_register, |
| 3972 offset, | 4126 offset, |
| 3973 value, | 4127 value, |
| 3974 temp, | 4128 temp, |
| 3975 kSaveFPRegs, | 4129 kSaveFPRegs, |
| 3976 EMIT_REMEMBERED_SET, | 4130 EMIT_REMEMBERED_SET, |
| 3977 check_needed); | 4131 check_needed); |
| 3978 } | 4132 } |
| 3979 } | 4133 } |
| 3980 | 4134 |
| 3981 | 4135 |
| 3982 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { | 4136 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { |
| 4137 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 3983 ASSERT(ToRegister(instr->object()).is(rdx)); | 4138 ASSERT(ToRegister(instr->object()).is(rdx)); |
| 3984 ASSERT(ToRegister(instr->value()).is(rax)); | 4139 ASSERT(ToRegister(instr->value()).is(rax)); |
| 3985 | 4140 |
| 3986 __ Move(rcx, instr->hydrogen()->name()); | 4141 __ Move(rcx, instr->hydrogen()->name()); |
| 3987 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) | 4142 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) |
| 3988 ? isolate()->builtins()->StoreIC_Initialize_Strict() | 4143 ? isolate()->builtins()->StoreIC_Initialize_Strict() |
| 3989 : isolate()->builtins()->StoreIC_Initialize(); | 4144 : isolate()->builtins()->StoreIC_Initialize(); |
| 3990 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 4145 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 3991 } | 4146 } |
| 3992 | 4147 |
| (...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4122 // Sign extend key because it could be a 32 bit negative value | 4277 // Sign extend key because it could be a 32 bit negative value |
| 4123 // and the dehoisted address computation happens in 64 bits | 4278 // and the dehoisted address computation happens in 64 bits |
| 4124 __ movsxlq(key_reg, key_reg); | 4279 __ movsxlq(key_reg, key_reg); |
| 4125 } | 4280 } |
| 4126 } | 4281 } |
| 4127 | 4282 |
| 4128 if (instr->NeedsCanonicalization()) { | 4283 if (instr->NeedsCanonicalization()) { |
| 4129 Label have_value; | 4284 Label have_value; |
| 4130 | 4285 |
| 4131 __ ucomisd(value, value); | 4286 __ ucomisd(value, value); |
| 4132 __ j(parity_odd, &have_value); // NaN. | 4287 __ j(parity_odd, &have_value, Label::kNear); // NaN. |
| 4133 | 4288 |
| 4134 __ Set(kScratchRegister, BitCast<uint64_t>( | 4289 __ Set(kScratchRegister, BitCast<uint64_t>( |
| 4135 FixedDoubleArray::canonical_not_the_hole_nan_as_double())); | 4290 FixedDoubleArray::canonical_not_the_hole_nan_as_double())); |
| 4136 __ movq(value, kScratchRegister); | 4291 __ movq(value, kScratchRegister); |
| 4137 | 4292 |
| 4138 __ bind(&have_value); | 4293 __ bind(&have_value); |
| 4139 } | 4294 } |
| 4140 | 4295 |
| 4141 Operand double_store_operand = BuildFastArrayOperand( | 4296 Operand double_store_operand = BuildFastArrayOperand( |
| 4142 instr->elements(), | 4297 instr->elements(), |
| (...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4210 DoStoreKeyedExternalArray(instr); | 4365 DoStoreKeyedExternalArray(instr); |
| 4211 } else if (instr->hydrogen()->value()->representation().IsDouble()) { | 4366 } else if (instr->hydrogen()->value()->representation().IsDouble()) { |
| 4212 DoStoreKeyedFixedDoubleArray(instr); | 4367 DoStoreKeyedFixedDoubleArray(instr); |
| 4213 } else { | 4368 } else { |
| 4214 DoStoreKeyedFixedArray(instr); | 4369 DoStoreKeyedFixedArray(instr); |
| 4215 } | 4370 } |
| 4216 } | 4371 } |
| 4217 | 4372 |
| 4218 | 4373 |
| 4219 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { | 4374 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { |
| 4375 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 4220 ASSERT(ToRegister(instr->object()).is(rdx)); | 4376 ASSERT(ToRegister(instr->object()).is(rdx)); |
| 4221 ASSERT(ToRegister(instr->key()).is(rcx)); | 4377 ASSERT(ToRegister(instr->key()).is(rcx)); |
| 4222 ASSERT(ToRegister(instr->value()).is(rax)); | 4378 ASSERT(ToRegister(instr->value()).is(rax)); |
| 4223 | 4379 |
| 4224 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) | 4380 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) |
| 4225 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() | 4381 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() |
| 4226 : isolate()->builtins()->KeyedStoreIC_Initialize(); | 4382 : isolate()->builtins()->KeyedStoreIC_Initialize(); |
| 4227 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 4383 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 4228 } | 4384 } |
| 4229 | 4385 |
| (...skipping 15 matching lines...) Expand all Loading... |
| 4245 __ movq(FieldOperand(object_reg, HeapObject::kMapOffset), new_map_reg); | 4401 __ movq(FieldOperand(object_reg, HeapObject::kMapOffset), new_map_reg); |
| 4246 // Write barrier. | 4402 // Write barrier. |
| 4247 ASSERT_NE(instr->temp(), NULL); | 4403 ASSERT_NE(instr->temp(), NULL); |
| 4248 __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg, | 4404 __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg, |
| 4249 ToRegister(instr->temp()), kDontSaveFPRegs); | 4405 ToRegister(instr->temp()), kDontSaveFPRegs); |
| 4250 } else { | 4406 } else { |
| 4251 PushSafepointRegistersScope scope(this); | 4407 PushSafepointRegistersScope scope(this); |
| 4252 if (!object_reg.is(rax)) { | 4408 if (!object_reg.is(rax)) { |
| 4253 __ movq(rax, object_reg); | 4409 __ movq(rax, object_reg); |
| 4254 } | 4410 } |
| 4411 LoadContextFromDeferred(instr->context()); |
| 4255 __ Move(rbx, to_map); | 4412 __ Move(rbx, to_map); |
| 4256 TransitionElementsKindStub stub(from_kind, to_kind); | 4413 TransitionElementsKindStub stub(from_kind, to_kind); |
| 4257 __ CallStub(&stub); | 4414 __ CallStub(&stub); |
| 4258 RecordSafepointWithRegisters( | 4415 RecordSafepointWithRegisters( |
| 4259 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); | 4416 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); |
| 4260 } | 4417 } |
| 4261 __ bind(¬_applicable); | 4418 __ bind(¬_applicable); |
| 4262 } | 4419 } |
| 4263 | 4420 |
| 4264 | 4421 |
| 4265 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { | 4422 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { |
| 4266 Register object = ToRegister(instr->object()); | 4423 Register object = ToRegister(instr->object()); |
| 4267 Register temp = ToRegister(instr->temp()); | 4424 Register temp = ToRegister(instr->temp()); |
| 4268 Label no_memento_found; | 4425 Label no_memento_found; |
| 4269 __ TestJSArrayForAllocationMemento(object, temp, &no_memento_found); | 4426 __ TestJSArrayForAllocationMemento(object, temp, &no_memento_found); |
| 4270 DeoptimizeIf(equal, instr->environment()); | 4427 DeoptimizeIf(equal, instr->environment()); |
| 4271 __ bind(&no_memento_found); | 4428 __ bind(&no_memento_found); |
| 4272 } | 4429 } |
| 4273 | 4430 |
| 4274 | 4431 |
| 4275 void LCodeGen::DoStringAdd(LStringAdd* instr) { | 4432 void LCodeGen::DoStringAdd(LStringAdd* instr) { |
| 4276 EmitPushTaggedOperand(instr->left()); | 4433 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 4277 EmitPushTaggedOperand(instr->right()); | 4434 if (FLAG_new_string_add) { |
| 4278 StringAddStub stub(instr->hydrogen()->flags()); | 4435 ASSERT(ToRegister(instr->left()).is(rdx)); |
| 4279 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 4436 ASSERT(ToRegister(instr->right()).is(rax)); |
| 4437 NewStringAddStub stub(instr->hydrogen()->flags(), |
| 4438 isolate()->heap()->GetPretenureMode()); |
| 4439 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 4440 } else { |
| 4441 EmitPushTaggedOperand(instr->left()); |
| 4442 EmitPushTaggedOperand(instr->right()); |
| 4443 StringAddStub stub(instr->hydrogen()->flags()); |
| 4444 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 4445 } |
| 4280 } | 4446 } |
| 4281 | 4447 |
| 4282 | 4448 |
| 4283 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { | 4449 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { |
| 4284 class DeferredStringCharCodeAt V8_FINAL : public LDeferredCode { | 4450 class DeferredStringCharCodeAt V8_FINAL : public LDeferredCode { |
| 4285 public: | 4451 public: |
| 4286 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr) | 4452 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr) |
| 4287 : LDeferredCode(codegen), instr_(instr) { } | 4453 : LDeferredCode(codegen), instr_(instr) { } |
| 4288 virtual void Generate() V8_OVERRIDE { | 4454 virtual void Generate() V8_OVERRIDE { |
| 4289 codegen()->DoDeferredStringCharCodeAt(instr_); | 4455 codegen()->DoDeferredStringCharCodeAt(instr_); |
| (...skipping 30 matching lines...) Expand all Loading... |
| 4320 // DoStringCharCodeAt above. | 4486 // DoStringCharCodeAt above. |
| 4321 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue); | 4487 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue); |
| 4322 if (instr->index()->IsConstantOperand()) { | 4488 if (instr->index()->IsConstantOperand()) { |
| 4323 int32_t const_index = ToInteger32(LConstantOperand::cast(instr->index())); | 4489 int32_t const_index = ToInteger32(LConstantOperand::cast(instr->index())); |
| 4324 __ Push(Smi::FromInt(const_index)); | 4490 __ Push(Smi::FromInt(const_index)); |
| 4325 } else { | 4491 } else { |
| 4326 Register index = ToRegister(instr->index()); | 4492 Register index = ToRegister(instr->index()); |
| 4327 __ Integer32ToSmi(index, index); | 4493 __ Integer32ToSmi(index, index); |
| 4328 __ push(index); | 4494 __ push(index); |
| 4329 } | 4495 } |
| 4330 CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr); | 4496 CallRuntimeFromDeferred( |
| 4497 Runtime::kStringCharCodeAt, 2, instr, instr->context()); |
| 4331 __ AssertSmi(rax); | 4498 __ AssertSmi(rax); |
| 4332 __ SmiToInteger32(rax, rax); | 4499 __ SmiToInteger32(rax, rax); |
| 4333 __ StoreToSafepointRegisterSlot(result, rax); | 4500 __ StoreToSafepointRegisterSlot(result, rax); |
| 4334 } | 4501 } |
| 4335 | 4502 |
| 4336 | 4503 |
| 4337 void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) { | 4504 void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) { |
| 4338 class DeferredStringCharFromCode V8_FINAL : public LDeferredCode { | 4505 class DeferredStringCharFromCode V8_FINAL : public LDeferredCode { |
| 4339 public: | 4506 public: |
| 4340 DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr) | 4507 DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr) |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4373 Register result = ToRegister(instr->result()); | 4540 Register result = ToRegister(instr->result()); |
| 4374 | 4541 |
| 4375 // TODO(3095996): Get rid of this. For now, we need to make the | 4542 // TODO(3095996): Get rid of this. For now, we need to make the |
| 4376 // result register contain a valid pointer because it is already | 4543 // result register contain a valid pointer because it is already |
| 4377 // contained in the register pointer map. | 4544 // contained in the register pointer map. |
| 4378 __ Set(result, 0); | 4545 __ Set(result, 0); |
| 4379 | 4546 |
| 4380 PushSafepointRegistersScope scope(this); | 4547 PushSafepointRegistersScope scope(this); |
| 4381 __ Integer32ToSmi(char_code, char_code); | 4548 __ Integer32ToSmi(char_code, char_code); |
| 4382 __ push(char_code); | 4549 __ push(char_code); |
| 4383 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr); | 4550 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr, instr->context()); |
| 4384 __ StoreToSafepointRegisterSlot(result, rax); | 4551 __ StoreToSafepointRegisterSlot(result, rax); |
| 4385 } | 4552 } |
| 4386 | 4553 |
| 4387 | 4554 |
| 4388 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) { | 4555 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) { |
| 4389 LOperand* input = instr->value(); | 4556 LOperand* input = instr->value(); |
| 4390 ASSERT(input->IsRegister() || input->IsStackSlot()); | 4557 ASSERT(input->IsRegister() || input->IsStackSlot()); |
| 4391 LOperand* output = instr->result(); | 4558 LOperand* output = instr->result(); |
| 4392 ASSERT(output->IsDoubleRegister()); | 4559 ASSERT(output->IsDoubleRegister()); |
| 4393 if (input->IsRegister()) { | 4560 if (input->IsRegister()) { |
| (...skipping 20 matching lines...) Expand all Loading... |
| 4414 LOperand* input = instr->value(); | 4581 LOperand* input = instr->value(); |
| 4415 LOperand* output = instr->result(); | 4582 LOperand* output = instr->result(); |
| 4416 LOperand* temp = instr->temp(); | 4583 LOperand* temp = instr->temp(); |
| 4417 | 4584 |
| 4418 __ LoadUint32(ToDoubleRegister(output), | 4585 __ LoadUint32(ToDoubleRegister(output), |
| 4419 ToRegister(input), | 4586 ToRegister(input), |
| 4420 ToDoubleRegister(temp)); | 4587 ToDoubleRegister(temp)); |
| 4421 } | 4588 } |
| 4422 | 4589 |
| 4423 | 4590 |
| 4591 void LCodeGen::DoUint32ToSmi(LUint32ToSmi* instr) { |
| 4592 LOperand* input = instr->value(); |
| 4593 ASSERT(input->IsRegister()); |
| 4594 LOperand* output = instr->result(); |
| 4595 if (!instr->hydrogen()->value()->HasRange() || |
| 4596 !instr->hydrogen()->value()->range()->IsInSmiRange() || |
| 4597 instr->hydrogen()->value()->range()->upper() == kMaxInt) { |
| 4598 // The Range class can't express upper bounds in the (kMaxInt, kMaxUint32] |
| 4599 // interval, so we treat kMaxInt as a sentinel for this entire interval. |
| 4600 __ testl(ToRegister(input), Immediate(0x80000000)); |
| 4601 DeoptimizeIf(not_zero, instr->environment()); |
| 4602 } |
| 4603 __ Integer32ToSmi(ToRegister(output), ToRegister(input)); |
| 4604 } |
| 4605 |
| 4606 |
| 4424 void LCodeGen::DoNumberTagI(LNumberTagI* instr) { | 4607 void LCodeGen::DoNumberTagI(LNumberTagI* instr) { |
| 4425 LOperand* input = instr->value(); | 4608 LOperand* input = instr->value(); |
| 4426 ASSERT(input->IsRegister() && input->Equals(instr->result())); | 4609 ASSERT(input->IsRegister() && input->Equals(instr->result())); |
| 4427 Register reg = ToRegister(input); | 4610 Register reg = ToRegister(input); |
| 4428 | 4611 |
| 4429 __ Integer32ToSmi(reg, reg); | 4612 __ Integer32ToSmi(reg, reg); |
| 4430 } | 4613 } |
| 4431 | 4614 |
| 4432 | 4615 |
| 4433 void LCodeGen::DoNumberTagU(LNumberTagU* instr) { | 4616 void LCodeGen::DoNumberTagU(LNumberTagU* instr) { |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4477 } | 4660 } |
| 4478 | 4661 |
| 4479 // Slow case: Call the runtime system to do the number allocation. | 4662 // Slow case: Call the runtime system to do the number allocation. |
| 4480 __ bind(&slow); | 4663 __ bind(&slow); |
| 4481 | 4664 |
| 4482 // Put a valid pointer value in the stack slot where the result | 4665 // Put a valid pointer value in the stack slot where the result |
| 4483 // register is stored, as this register is in the pointer map, but contains an | 4666 // register is stored, as this register is in the pointer map, but contains an |
| 4484 // integer value. | 4667 // integer value. |
| 4485 __ StoreToSafepointRegisterSlot(reg, Immediate(0)); | 4668 __ StoreToSafepointRegisterSlot(reg, Immediate(0)); |
| 4486 | 4669 |
| 4487 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr); | 4670 // NumberTagU uses the context from the frame, rather than |
| 4671 // the environment's HContext or HInlinedContext value. |
| 4672 // They only call Runtime::kAllocateHeapNumber. |
| 4673 // The corresponding HChange instructions are added in a phase that does |
| 4674 // not have easy access to the local context. |
| 4675 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 4676 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); |
| 4677 RecordSafepointWithRegisters( |
| 4678 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); |
| 4679 |
| 4488 if (!reg.is(rax)) __ movq(reg, rax); | 4680 if (!reg.is(rax)) __ movq(reg, rax); |
| 4489 | 4681 |
| 4490 // Done. Put the value in temp_xmm into the value of the allocated heap | 4682 // Done. Put the value in temp_xmm into the value of the allocated heap |
| 4491 // number. | 4683 // number. |
| 4492 __ bind(&done); | 4684 __ bind(&done); |
| 4493 __ movsd(FieldOperand(reg, HeapNumber::kValueOffset), temp_xmm); | 4685 __ movsd(FieldOperand(reg, HeapNumber::kValueOffset), temp_xmm); |
| 4494 __ StoreToSafepointRegisterSlot(reg, reg); | 4686 __ StoreToSafepointRegisterSlot(reg, reg); |
| 4495 } | 4687 } |
| 4496 | 4688 |
| 4497 | 4689 |
| (...skipping 27 matching lines...) Expand all Loading... |
| 4525 | 4717 |
| 4526 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) { | 4718 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) { |
| 4527 // TODO(3095996): Get rid of this. For now, we need to make the | 4719 // TODO(3095996): Get rid of this. For now, we need to make the |
| 4528 // result register contain a valid pointer because it is already | 4720 // result register contain a valid pointer because it is already |
| 4529 // contained in the register pointer map. | 4721 // contained in the register pointer map. |
| 4530 Register reg = ToRegister(instr->result()); | 4722 Register reg = ToRegister(instr->result()); |
| 4531 __ Move(reg, Smi::FromInt(0)); | 4723 __ Move(reg, Smi::FromInt(0)); |
| 4532 | 4724 |
| 4533 { | 4725 { |
| 4534 PushSafepointRegistersScope scope(this); | 4726 PushSafepointRegistersScope scope(this); |
| 4535 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr); | 4727 // NumberTagD uses the context from the frame, rather than |
| 4536 // Ensure that value in rax survives popping registers. | 4728 // the environment's HContext or HInlinedContext value. |
| 4729 // They only call Runtime::kAllocateHeapNumber. |
| 4730 // The corresponding HChange instructions are added in a phase that does |
| 4731 // not have easy access to the local context. |
| 4732 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 4733 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); |
| 4734 RecordSafepointWithRegisters( |
| 4735 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); |
| 4537 __ movq(kScratchRegister, rax); | 4736 __ movq(kScratchRegister, rax); |
| 4538 } | 4737 } |
| 4539 __ movq(reg, kScratchRegister); | 4738 __ movq(reg, kScratchRegister); |
| 4540 } | 4739 } |
| 4541 | 4740 |
| 4542 | 4741 |
| 4543 void LCodeGen::DoSmiTag(LSmiTag* instr) { | 4742 void LCodeGen::DoSmiTag(LSmiTag* instr) { |
| 4544 ASSERT(instr->value()->Equals(instr->result())); | 4743 ASSERT(instr->value()->Equals(instr->result())); |
| 4545 Register input = ToRegister(instr->value()); | 4744 Register input = ToRegister(instr->value()); |
| 4546 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow)); | 4745 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow)); |
| (...skipping 28 matching lines...) Expand all Loading... |
| 4575 | 4774 |
| 4576 // Heap number map check. | 4775 // Heap number map check. |
| 4577 __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset), | 4776 __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset), |
| 4578 Heap::kHeapNumberMapRootIndex); | 4777 Heap::kHeapNumberMapRootIndex); |
| 4579 | 4778 |
| 4580 // On x64 it is safe to load at heap number offset before evaluating the map | 4779 // On x64 it is safe to load at heap number offset before evaluating the map |
| 4581 // check, since all heap objects are at least two words long. | 4780 // check, since all heap objects are at least two words long. |
| 4582 __ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset)); | 4781 __ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset)); |
| 4583 | 4782 |
| 4584 if (can_convert_undefined_to_nan) { | 4783 if (can_convert_undefined_to_nan) { |
| 4585 __ j(not_equal, &convert); | 4784 __ j(not_equal, &convert, Label::kNear); |
| 4586 } else { | 4785 } else { |
| 4587 DeoptimizeIf(not_equal, env); | 4786 DeoptimizeIf(not_equal, env); |
| 4588 } | 4787 } |
| 4589 | 4788 |
| 4590 if (deoptimize_on_minus_zero) { | 4789 if (deoptimize_on_minus_zero) { |
| 4591 XMMRegister xmm_scratch = double_scratch0(); | 4790 XMMRegister xmm_scratch = double_scratch0(); |
| 4592 __ xorps(xmm_scratch, xmm_scratch); | 4791 __ xorps(xmm_scratch, xmm_scratch); |
| 4593 __ ucomisd(xmm_scratch, result_reg); | 4792 __ ucomisd(xmm_scratch, result_reg); |
| 4594 __ j(not_equal, &done, Label::kNear); | 4793 __ j(not_equal, &done, Label::kNear); |
| 4595 __ movmskpd(kScratchRegister, result_reg); | 4794 __ movmskpd(kScratchRegister, result_reg); |
| (...skipping 236 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4832 Register reg = ToRegister(instr->value()); | 5031 Register reg = ToRegister(instr->value()); |
| 4833 __ Cmp(reg, instr->hydrogen()->object().handle()); | 5032 __ Cmp(reg, instr->hydrogen()->object().handle()); |
| 4834 DeoptimizeIf(not_equal, instr->environment()); | 5033 DeoptimizeIf(not_equal, instr->environment()); |
| 4835 } | 5034 } |
| 4836 | 5035 |
| 4837 | 5036 |
| 4838 void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { | 5037 void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { |
| 4839 { | 5038 { |
| 4840 PushSafepointRegistersScope scope(this); | 5039 PushSafepointRegistersScope scope(this); |
| 4841 __ push(object); | 5040 __ push(object); |
| 4842 CallRuntimeFromDeferred(Runtime::kMigrateInstance, 1, instr); | 5041 __ Set(rsi, 0); |
| 5042 __ CallRuntimeSaveDoubles(Runtime::kMigrateInstance); |
| 5043 RecordSafepointWithRegisters( |
| 5044 instr->pointer_map(), 1, Safepoint::kNoLazyDeopt); |
| 5045 |
| 4843 __ testq(rax, Immediate(kSmiTagMask)); | 5046 __ testq(rax, Immediate(kSmiTagMask)); |
| 4844 } | 5047 } |
| 4845 DeoptimizeIf(zero, instr->environment()); | 5048 DeoptimizeIf(zero, instr->environment()); |
| 4846 } | 5049 } |
| 4847 | 5050 |
| 4848 | 5051 |
| 4849 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { | 5052 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { |
| 4850 class DeferredCheckMaps V8_FINAL : public LDeferredCode { | 5053 class DeferredCheckMaps V8_FINAL : public LDeferredCode { |
| 4851 public: | 5054 public: |
| 4852 DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register object) | 5055 DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register object) |
| (...skipping 20 matching lines...) Expand all Loading... |
| 4873 DeferredCheckMaps* deferred = NULL; | 5076 DeferredCheckMaps* deferred = NULL; |
| 4874 if (instr->hydrogen()->has_migration_target()) { | 5077 if (instr->hydrogen()->has_migration_target()) { |
| 4875 deferred = new(zone()) DeferredCheckMaps(this, instr, reg); | 5078 deferred = new(zone()) DeferredCheckMaps(this, instr, reg); |
| 4876 __ bind(deferred->check_maps()); | 5079 __ bind(deferred->check_maps()); |
| 4877 } | 5080 } |
| 4878 | 5081 |
| 4879 UniqueSet<Map> map_set = instr->hydrogen()->map_set(); | 5082 UniqueSet<Map> map_set = instr->hydrogen()->map_set(); |
| 4880 Label success; | 5083 Label success; |
| 4881 for (int i = 0; i < map_set.size() - 1; i++) { | 5084 for (int i = 0; i < map_set.size() - 1; i++) { |
| 4882 Handle<Map> map = map_set.at(i).handle(); | 5085 Handle<Map> map = map_set.at(i).handle(); |
| 4883 __ CompareMap(reg, map, &success); | 5086 __ CompareMap(reg, map); |
| 4884 __ j(equal, &success); | 5087 __ j(equal, &success, Label::kNear); |
| 4885 } | 5088 } |
| 4886 | 5089 |
| 4887 Handle<Map> map = map_set.at(map_set.size() - 1).handle(); | 5090 Handle<Map> map = map_set.at(map_set.size() - 1).handle(); |
| 4888 __ CompareMap(reg, map, &success); | 5091 __ CompareMap(reg, map); |
| 4889 if (instr->hydrogen()->has_migration_target()) { | 5092 if (instr->hydrogen()->has_migration_target()) { |
| 4890 __ j(not_equal, deferred->entry()); | 5093 __ j(not_equal, deferred->entry()); |
| 4891 } else { | 5094 } else { |
| 4892 DeoptimizeIf(not_equal, instr->environment()); | 5095 DeoptimizeIf(not_equal, instr->environment()); |
| 4893 } | 5096 } |
| 4894 | 5097 |
| 4895 __ bind(&success); | 5098 __ bind(&success); |
| 4896 } | 5099 } |
| 4897 | 5100 |
| 4898 | 5101 |
| (...skipping 11 matching lines...) Expand all Loading... |
| 4910 __ ClampUint8(value_reg); | 5113 __ ClampUint8(value_reg); |
| 4911 } | 5114 } |
| 4912 | 5115 |
| 4913 | 5116 |
| 4914 void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) { | 5117 void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) { |
| 4915 ASSERT(instr->unclamped()->Equals(instr->result())); | 5118 ASSERT(instr->unclamped()->Equals(instr->result())); |
| 4916 Register input_reg = ToRegister(instr->unclamped()); | 5119 Register input_reg = ToRegister(instr->unclamped()); |
| 4917 XMMRegister temp_xmm_reg = ToDoubleRegister(instr->temp_xmm()); | 5120 XMMRegister temp_xmm_reg = ToDoubleRegister(instr->temp_xmm()); |
| 4918 XMMRegister xmm_scratch = double_scratch0(); | 5121 XMMRegister xmm_scratch = double_scratch0(); |
| 4919 Label is_smi, done, heap_number; | 5122 Label is_smi, done, heap_number; |
| 4920 | 5123 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear; |
| 4921 __ JumpIfSmi(input_reg, &is_smi); | 5124 __ JumpIfSmi(input_reg, &is_smi, dist); |
| 4922 | 5125 |
| 4923 // Check for heap number | 5126 // Check for heap number |
| 4924 __ Cmp(FieldOperand(input_reg, HeapObject::kMapOffset), | 5127 __ Cmp(FieldOperand(input_reg, HeapObject::kMapOffset), |
| 4925 factory()->heap_number_map()); | 5128 factory()->heap_number_map()); |
| 4926 __ j(equal, &heap_number, Label::kNear); | 5129 __ j(equal, &heap_number, Label::kNear); |
| 4927 | 5130 |
| 4928 // Check for undefined. Undefined is converted to zero for clamping | 5131 // Check for undefined. Undefined is converted to zero for clamping |
| 4929 // conversions. | 5132 // conversions. |
| 4930 __ Cmp(input_reg, factory()->undefined_value()); | 5133 __ Cmp(input_reg, factory()->undefined_value()); |
| 4931 DeoptimizeIf(not_equal, instr->environment()); | 5134 DeoptimizeIf(not_equal, instr->environment()); |
| (...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5024 __ Integer32ToSmi(size, size); | 5227 __ Integer32ToSmi(size, size); |
| 5025 __ push(size); | 5228 __ push(size); |
| 5026 } else { | 5229 } else { |
| 5027 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); | 5230 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); |
| 5028 __ Push(Smi::FromInt(size)); | 5231 __ Push(Smi::FromInt(size)); |
| 5029 } | 5232 } |
| 5030 | 5233 |
| 5031 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) { | 5234 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) { |
| 5032 ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation()); | 5235 ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation()); |
| 5033 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); | 5236 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); |
| 5034 CallRuntimeFromDeferred(Runtime::kAllocateInOldPointerSpace, 1, instr); | 5237 CallRuntimeFromDeferred( |
| 5238 Runtime::kAllocateInOldPointerSpace, 1, instr, instr->context()); |
| 5035 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) { | 5239 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) { |
| 5036 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); | 5240 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); |
| 5037 CallRuntimeFromDeferred(Runtime::kAllocateInOldDataSpace, 1, instr); | 5241 CallRuntimeFromDeferred( |
| 5242 Runtime::kAllocateInOldDataSpace, 1, instr, instr->context()); |
| 5038 } else { | 5243 } else { |
| 5039 CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr); | 5244 CallRuntimeFromDeferred( |
| 5245 Runtime::kAllocateInNewSpace, 1, instr, instr->context()); |
| 5040 } | 5246 } |
| 5041 __ StoreToSafepointRegisterSlot(result, rax); | 5247 __ StoreToSafepointRegisterSlot(result, rax); |
| 5042 } | 5248 } |
| 5043 | 5249 |
| 5044 | 5250 |
| 5045 void LCodeGen::DoToFastProperties(LToFastProperties* instr) { | 5251 void LCodeGen::DoToFastProperties(LToFastProperties* instr) { |
| 5046 ASSERT(ToRegister(instr->value()).is(rax)); | 5252 ASSERT(ToRegister(instr->value()).is(rax)); |
| 5047 __ push(rax); | 5253 __ push(rax); |
| 5048 CallRuntime(Runtime::kToFastProperties, 1, instr); | 5254 CallRuntime(Runtime::kToFastProperties, 1, instr); |
| 5049 } | 5255 } |
| 5050 | 5256 |
| 5051 | 5257 |
| 5052 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { | 5258 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { |
| 5259 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 5053 Label materialized; | 5260 Label materialized; |
| 5054 // Registers will be used as follows: | 5261 // Registers will be used as follows: |
| 5055 // rcx = literals array. | 5262 // rcx = literals array. |
| 5056 // rbx = regexp literal. | 5263 // rbx = regexp literal. |
| 5057 // rax = regexp literal clone. | 5264 // rax = regexp literal clone. |
| 5058 int literal_offset = | 5265 int literal_offset = |
| 5059 FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index()); | 5266 FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index()); |
| 5060 __ Move(rcx, instr->hydrogen()->literals()); | 5267 __ Move(rcx, instr->hydrogen()->literals()); |
| 5061 __ movq(rbx, FieldOperand(rcx, literal_offset)); | 5268 __ movq(rbx, FieldOperand(rcx, literal_offset)); |
| 5062 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); | 5269 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); |
| 5063 __ j(not_equal, &materialized, Label::kNear); | 5270 __ j(not_equal, &materialized, Label::kNear); |
| 5064 | 5271 |
| 5065 // Create regexp literal using runtime function | 5272 // Create regexp literal using runtime function |
| 5066 // Result will be in rax. | 5273 // Result will be in rax. |
| 5067 __ push(rcx); | 5274 __ push(rcx); |
| 5068 __ Push(Smi::FromInt(instr->hydrogen()->literal_index())); | 5275 __ Push(Smi::FromInt(instr->hydrogen()->literal_index())); |
| 5069 __ Push(instr->hydrogen()->pattern()); | 5276 __ Push(instr->hydrogen()->pattern()); |
| 5070 __ Push(instr->hydrogen()->flags()); | 5277 __ Push(instr->hydrogen()->flags()); |
| 5071 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr); | 5278 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr); |
| 5072 __ movq(rbx, rax); | 5279 __ movq(rbx, rax); |
| 5073 | 5280 |
| 5074 __ bind(&materialized); | 5281 __ bind(&materialized); |
| 5075 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; | 5282 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; |
| 5076 Label allocated, runtime_allocate; | 5283 Label allocated, runtime_allocate; |
| 5077 __ Allocate(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT); | 5284 __ Allocate(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT); |
| 5078 __ jmp(&allocated); | 5285 __ jmp(&allocated, Label::kNear); |
| 5079 | 5286 |
| 5080 __ bind(&runtime_allocate); | 5287 __ bind(&runtime_allocate); |
| 5081 __ push(rbx); | 5288 __ push(rbx); |
| 5082 __ Push(Smi::FromInt(size)); | 5289 __ Push(Smi::FromInt(size)); |
| 5083 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); | 5290 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); |
| 5084 __ pop(rbx); | 5291 __ pop(rbx); |
| 5085 | 5292 |
| 5086 __ bind(&allocated); | 5293 __ bind(&allocated); |
| 5087 // Copy the content into the newly allocated memory. | 5294 // Copy the content into the newly allocated memory. |
| 5088 // (Unroll copy loop once for better throughput). | 5295 // (Unroll copy loop once for better throughput). |
| 5089 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) { | 5296 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) { |
| 5090 __ movq(rdx, FieldOperand(rbx, i)); | 5297 __ movq(rdx, FieldOperand(rbx, i)); |
| 5091 __ movq(rcx, FieldOperand(rbx, i + kPointerSize)); | 5298 __ movq(rcx, FieldOperand(rbx, i + kPointerSize)); |
| 5092 __ movq(FieldOperand(rax, i), rdx); | 5299 __ movq(FieldOperand(rax, i), rdx); |
| 5093 __ movq(FieldOperand(rax, i + kPointerSize), rcx); | 5300 __ movq(FieldOperand(rax, i + kPointerSize), rcx); |
| 5094 } | 5301 } |
| 5095 if ((size % (2 * kPointerSize)) != 0) { | 5302 if ((size % (2 * kPointerSize)) != 0) { |
| 5096 __ movq(rdx, FieldOperand(rbx, size - kPointerSize)); | 5303 __ movq(rdx, FieldOperand(rbx, size - kPointerSize)); |
| 5097 __ movq(FieldOperand(rax, size - kPointerSize), rdx); | 5304 __ movq(FieldOperand(rax, size - kPointerSize), rdx); |
| 5098 } | 5305 } |
| 5099 } | 5306 } |
| 5100 | 5307 |
| 5101 | 5308 |
| 5102 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { | 5309 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { |
| 5310 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 5103 // Use the fast case closure allocation code that allocates in new | 5311 // Use the fast case closure allocation code that allocates in new |
| 5104 // space for nested functions that don't need literals cloning. | 5312 // space for nested functions that don't need literals cloning. |
| 5105 bool pretenure = instr->hydrogen()->pretenure(); | 5313 bool pretenure = instr->hydrogen()->pretenure(); |
| 5106 if (!pretenure && instr->hydrogen()->has_no_literals()) { | 5314 if (!pretenure && instr->hydrogen()->has_no_literals()) { |
| 5107 FastNewClosureStub stub(instr->hydrogen()->language_mode(), | 5315 FastNewClosureStub stub(instr->hydrogen()->language_mode(), |
| 5108 instr->hydrogen()->is_generator()); | 5316 instr->hydrogen()->is_generator()); |
| 5109 __ Move(rbx, instr->hydrogen()->shared_info()); | 5317 __ Move(rbx, instr->hydrogen()->shared_info()); |
| 5110 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 5318 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 5111 } else { | 5319 } else { |
| 5112 __ push(rsi); | 5320 __ push(rsi); |
| 5113 __ Push(instr->hydrogen()->shared_info()); | 5321 __ Push(instr->hydrogen()->shared_info()); |
| 5114 __ PushRoot(pretenure ? Heap::kTrueValueRootIndex : | 5322 __ PushRoot(pretenure ? Heap::kTrueValueRootIndex : |
| 5115 Heap::kFalseValueRootIndex); | 5323 Heap::kFalseValueRootIndex); |
| 5116 CallRuntime(Runtime::kNewClosure, 3, instr); | 5324 CallRuntime(Runtime::kNewClosure, 3, instr); |
| 5117 } | 5325 } |
| 5118 } | 5326 } |
| 5119 | 5327 |
| 5120 | 5328 |
| 5121 void LCodeGen::DoTypeof(LTypeof* instr) { | 5329 void LCodeGen::DoTypeof(LTypeof* instr) { |
| 5330 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 5122 LOperand* input = instr->value(); | 5331 LOperand* input = instr->value(); |
| 5123 EmitPushTaggedOperand(input); | 5332 EmitPushTaggedOperand(input); |
| 5124 CallRuntime(Runtime::kTypeof, 1, instr); | 5333 CallRuntime(Runtime::kTypeof, 1, instr); |
| 5125 } | 5334 } |
| 5126 | 5335 |
| 5127 | 5336 |
| 5128 void LCodeGen::EmitPushTaggedOperand(LOperand* operand) { | 5337 void LCodeGen::EmitPushTaggedOperand(LOperand* operand) { |
| 5129 ASSERT(!operand->IsDoubleRegister()); | 5338 ASSERT(!operand->IsDoubleRegister()); |
| 5130 if (operand->IsConstantOperand()) { | 5339 if (operand->IsConstantOperand()) { |
| 5131 __ Push(ToHandle(LConstantOperand::cast(operand))); | 5340 __ Push(ToHandle(LConstantOperand::cast(operand))); |
| (...skipping 149 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5281 // the special case below. | 5490 // the special case below. |
| 5282 if (info()->IsStub() && type == Deoptimizer::EAGER) { | 5491 if (info()->IsStub() && type == Deoptimizer::EAGER) { |
| 5283 type = Deoptimizer::LAZY; | 5492 type = Deoptimizer::LAZY; |
| 5284 } | 5493 } |
| 5285 | 5494 |
| 5286 Comment(";;; deoptimize: %s", instr->hydrogen()->reason()); | 5495 Comment(";;; deoptimize: %s", instr->hydrogen()->reason()); |
| 5287 DeoptimizeIf(no_condition, instr->environment(), type); | 5496 DeoptimizeIf(no_condition, instr->environment(), type); |
| 5288 } | 5497 } |
| 5289 | 5498 |
| 5290 | 5499 |
| 5500 void LCodeGen::DoDummy(LDummy* instr) { |
| 5501 // Nothing to see here, move on! |
| 5502 } |
| 5503 |
| 5504 |
| 5291 void LCodeGen::DoDummyUse(LDummyUse* instr) { | 5505 void LCodeGen::DoDummyUse(LDummyUse* instr) { |
| 5292 // Nothing to see here, move on! | 5506 // Nothing to see here, move on! |
| 5293 } | 5507 } |
| 5294 | 5508 |
| 5295 | 5509 |
| 5296 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { | 5510 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { |
| 5297 PushSafepointRegistersScope scope(this); | 5511 PushSafepointRegistersScope scope(this); |
| 5298 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 5512 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 5299 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); | 5513 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); |
| 5300 RecordSafepointWithLazyDeopt(instr, RECORD_SAFEPOINT_WITH_REGISTERS, 0); | 5514 RecordSafepointWithLazyDeopt(instr, RECORD_SAFEPOINT_WITH_REGISTERS, 0); |
| (...skipping 18 matching lines...) Expand all Loading... |
| 5319 | 5533 |
| 5320 ASSERT(instr->HasEnvironment()); | 5534 ASSERT(instr->HasEnvironment()); |
| 5321 LEnvironment* env = instr->environment(); | 5535 LEnvironment* env = instr->environment(); |
| 5322 // There is no LLazyBailout instruction for stack-checks. We have to | 5536 // There is no LLazyBailout instruction for stack-checks. We have to |
| 5323 // prepare for lazy deoptimization explicitly here. | 5537 // prepare for lazy deoptimization explicitly here. |
| 5324 if (instr->hydrogen()->is_function_entry()) { | 5538 if (instr->hydrogen()->is_function_entry()) { |
| 5325 // Perform stack overflow check. | 5539 // Perform stack overflow check. |
| 5326 Label done; | 5540 Label done; |
| 5327 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); | 5541 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); |
| 5328 __ j(above_equal, &done, Label::kNear); | 5542 __ j(above_equal, &done, Label::kNear); |
| 5543 |
| 5544 ASSERT(instr->context()->IsRegister()); |
| 5545 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 5329 CallCode(isolate()->builtins()->StackCheck(), | 5546 CallCode(isolate()->builtins()->StackCheck(), |
| 5330 RelocInfo::CODE_TARGET, | 5547 RelocInfo::CODE_TARGET, |
| 5331 instr); | 5548 instr); |
| 5332 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | 5549 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
| 5333 last_lazy_deopt_pc_ = masm()->pc_offset(); | 5550 last_lazy_deopt_pc_ = masm()->pc_offset(); |
| 5334 __ bind(&done); | 5551 __ bind(&done); |
| 5335 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 5552 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
| 5336 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 5553 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
| 5337 } else { | 5554 } else { |
| 5338 ASSERT(instr->hydrogen()->is_backwards_branch()); | 5555 ASSERT(instr->hydrogen()->is_backwards_branch()); |
| (...skipping 23 matching lines...) Expand all Loading... |
| 5362 // If the environment were already registered, we would have no way of | 5579 // If the environment were already registered, we would have no way of |
| 5363 // backpatching it with the spill slot operands. | 5580 // backpatching it with the spill slot operands. |
| 5364 ASSERT(!environment->HasBeenRegistered()); | 5581 ASSERT(!environment->HasBeenRegistered()); |
| 5365 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); | 5582 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); |
| 5366 | 5583 |
| 5367 GenerateOsrPrologue(); | 5584 GenerateOsrPrologue(); |
| 5368 } | 5585 } |
| 5369 | 5586 |
| 5370 | 5587 |
| 5371 void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) { | 5588 void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) { |
| 5589 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 5372 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex); | 5590 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex); |
| 5373 DeoptimizeIf(equal, instr->environment()); | 5591 DeoptimizeIf(equal, instr->environment()); |
| 5374 | 5592 |
| 5375 Register null_value = rdi; | 5593 Register null_value = rdi; |
| 5376 __ LoadRoot(null_value, Heap::kNullValueRootIndex); | 5594 __ LoadRoot(null_value, Heap::kNullValueRootIndex); |
| 5377 __ cmpq(rax, null_value); | 5595 __ cmpq(rax, null_value); |
| 5378 DeoptimizeIf(equal, instr->environment()); | 5596 DeoptimizeIf(equal, instr->environment()); |
| 5379 | 5597 |
| 5380 Condition cc = masm()->CheckSmi(rax); | 5598 Condition cc = masm()->CheckSmi(rax); |
| 5381 DeoptimizeIf(cc, instr->environment()); | 5599 DeoptimizeIf(cc, instr->environment()); |
| (...skipping 19 matching lines...) Expand all Loading... |
| 5401 __ bind(&use_cache); | 5619 __ bind(&use_cache); |
| 5402 } | 5620 } |
| 5403 | 5621 |
| 5404 | 5622 |
| 5405 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) { | 5623 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) { |
| 5406 Register map = ToRegister(instr->map()); | 5624 Register map = ToRegister(instr->map()); |
| 5407 Register result = ToRegister(instr->result()); | 5625 Register result = ToRegister(instr->result()); |
| 5408 Label load_cache, done; | 5626 Label load_cache, done; |
| 5409 __ EnumLength(result, map); | 5627 __ EnumLength(result, map); |
| 5410 __ Cmp(result, Smi::FromInt(0)); | 5628 __ Cmp(result, Smi::FromInt(0)); |
| 5411 __ j(not_equal, &load_cache); | 5629 __ j(not_equal, &load_cache, Label::kNear); |
| 5412 __ LoadRoot(result, Heap::kEmptyFixedArrayRootIndex); | 5630 __ LoadRoot(result, Heap::kEmptyFixedArrayRootIndex); |
| 5413 __ jmp(&done); | 5631 __ jmp(&done, Label::kNear); |
| 5414 __ bind(&load_cache); | 5632 __ bind(&load_cache); |
| 5415 __ LoadInstanceDescriptors(map, result); | 5633 __ LoadInstanceDescriptors(map, result); |
| 5416 __ movq(result, | 5634 __ movq(result, |
| 5417 FieldOperand(result, DescriptorArray::kEnumCacheOffset)); | 5635 FieldOperand(result, DescriptorArray::kEnumCacheOffset)); |
| 5418 __ movq(result, | 5636 __ movq(result, |
| 5419 FieldOperand(result, FixedArray::SizeFor(instr->idx()))); | 5637 FieldOperand(result, FixedArray::SizeFor(instr->idx()))); |
| 5420 __ bind(&done); | 5638 __ bind(&done); |
| 5421 Condition cc = masm()->CheckSmi(result); | 5639 Condition cc = masm()->CheckSmi(result); |
| 5422 DeoptimizeIf(cc, instr->environment()); | 5640 DeoptimizeIf(cc, instr->environment()); |
| 5423 } | 5641 } |
| 5424 | 5642 |
| 5425 | 5643 |
| 5426 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { | 5644 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { |
| 5427 Register object = ToRegister(instr->value()); | 5645 Register object = ToRegister(instr->value()); |
| 5428 __ cmpq(ToRegister(instr->map()), | 5646 __ cmpq(ToRegister(instr->map()), |
| 5429 FieldOperand(object, HeapObject::kMapOffset)); | 5647 FieldOperand(object, HeapObject::kMapOffset)); |
| 5430 DeoptimizeIf(not_equal, instr->environment()); | 5648 DeoptimizeIf(not_equal, instr->environment()); |
| 5431 } | 5649 } |
| 5432 | 5650 |
| 5433 | 5651 |
| 5434 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { | 5652 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { |
| 5435 Register object = ToRegister(instr->object()); | 5653 Register object = ToRegister(instr->object()); |
| 5436 Register index = ToRegister(instr->index()); | 5654 Register index = ToRegister(instr->index()); |
| 5437 | 5655 |
| 5438 Label out_of_object, done; | 5656 Label out_of_object, done; |
| 5439 __ SmiToInteger32(index, index); | 5657 __ SmiToInteger32(index, index); |
| 5440 __ cmpl(index, Immediate(0)); | 5658 __ cmpl(index, Immediate(0)); |
| 5441 __ j(less, &out_of_object); | 5659 __ j(less, &out_of_object, Label::kNear); |
| 5442 __ movq(object, FieldOperand(object, | 5660 __ movq(object, FieldOperand(object, |
| 5443 index, | 5661 index, |
| 5444 times_pointer_size, | 5662 times_pointer_size, |
| 5445 JSObject::kHeaderSize)); | 5663 JSObject::kHeaderSize)); |
| 5446 __ jmp(&done, Label::kNear); | 5664 __ jmp(&done, Label::kNear); |
| 5447 | 5665 |
| 5448 __ bind(&out_of_object); | 5666 __ bind(&out_of_object); |
| 5449 __ movq(object, FieldOperand(object, JSObject::kPropertiesOffset)); | 5667 __ movq(object, FieldOperand(object, JSObject::kPropertiesOffset)); |
| 5450 __ negl(index); | 5668 __ negl(index); |
| 5451 // Index is now equal to out of object property index plus 1. | 5669 // Index is now equal to out of object property index plus 1. |
| 5452 __ movq(object, FieldOperand(object, | 5670 __ movq(object, FieldOperand(object, |
| 5453 index, | 5671 index, |
| 5454 times_pointer_size, | 5672 times_pointer_size, |
| 5455 FixedArray::kHeaderSize - kPointerSize)); | 5673 FixedArray::kHeaderSize - kPointerSize)); |
| 5456 __ bind(&done); | 5674 __ bind(&done); |
| 5457 } | 5675 } |
| 5458 | 5676 |
| 5459 | 5677 |
| 5460 #undef __ | 5678 #undef __ |
| 5461 | 5679 |
| 5462 } } // namespace v8::internal | 5680 } } // namespace v8::internal |
| 5463 | 5681 |
| 5464 #endif // V8_TARGET_ARCH_X64 | 5682 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |