| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 348 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 359 if (r.IsInteger32()) { | 359 if (r.IsInteger32()) { |
| 360 ASSERT(literal->IsNumber()); | 360 ASSERT(literal->IsNumber()); |
| 361 __ li(scratch, Operand(static_cast<int32_t>(literal->Number()))); | 361 __ li(scratch, Operand(static_cast<int32_t>(literal->Number()))); |
| 362 } else if (r.IsSmi()) { | 362 } else if (r.IsSmi()) { |
| 363 ASSERT(constant->HasSmiValue()); | 363 ASSERT(constant->HasSmiValue()); |
| 364 __ li(scratch, Operand(Smi::FromInt(constant->Integer32Value()))); | 364 __ li(scratch, Operand(Smi::FromInt(constant->Integer32Value()))); |
| 365 } else if (r.IsDouble()) { | 365 } else if (r.IsDouble()) { |
| 366 Abort(kEmitLoadRegisterUnsupportedDoubleImmediate); | 366 Abort(kEmitLoadRegisterUnsupportedDoubleImmediate); |
| 367 } else { | 367 } else { |
| 368 ASSERT(r.IsSmiOrTagged()); | 368 ASSERT(r.IsSmiOrTagged()); |
| 369 __ LoadObject(scratch, literal); | 369 __ li(scratch, literal); |
| 370 } | 370 } |
| 371 return scratch; | 371 return scratch; |
| 372 } else if (op->IsStackSlot() || op->IsArgument()) { | 372 } else if (op->IsStackSlot() || op->IsArgument()) { |
| 373 __ lw(scratch, ToMemOperand(op)); | 373 __ lw(scratch, ToMemOperand(op)); |
| 374 return scratch; | 374 return scratch; |
| 375 } | 375 } |
| 376 UNREACHABLE(); | 376 UNREACHABLE(); |
| 377 return scratch; | 377 return scratch; |
| 378 } | 378 } |
| 379 | 379 |
| (...skipping 281 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 661 | 661 |
| 662 | 662 |
| 663 void LCodeGen::LoadContextFromDeferred(LOperand* context) { | 663 void LCodeGen::LoadContextFromDeferred(LOperand* context) { |
| 664 if (context->IsRegister()) { | 664 if (context->IsRegister()) { |
| 665 __ Move(cp, ToRegister(context)); | 665 __ Move(cp, ToRegister(context)); |
| 666 } else if (context->IsStackSlot()) { | 666 } else if (context->IsStackSlot()) { |
| 667 __ lw(cp, ToMemOperand(context)); | 667 __ lw(cp, ToMemOperand(context)); |
| 668 } else if (context->IsConstantOperand()) { | 668 } else if (context->IsConstantOperand()) { |
| 669 HConstant* constant = | 669 HConstant* constant = |
| 670 chunk_->LookupConstant(LConstantOperand::cast(context)); | 670 chunk_->LookupConstant(LConstantOperand::cast(context)); |
| 671 __ LoadObject(cp, Handle<Object>::cast(constant->handle(isolate()))); | 671 __ li(cp, Handle<Object>::cast(constant->handle(isolate()))); |
| 672 } else { | 672 } else { |
| 673 UNREACHABLE(); | 673 UNREACHABLE(); |
| 674 } | 674 } |
| 675 } | 675 } |
| 676 | 676 |
| 677 | 677 |
| 678 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, | 678 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, |
| 679 int argc, | 679 int argc, |
| 680 LInstruction* instr, | 680 LInstruction* instr, |
| 681 LOperand* context) { | 681 LOperand* context) { |
| (...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 731 ASSERT(environment->HasBeenRegistered()); | 731 ASSERT(environment->HasBeenRegistered()); |
| 732 int id = environment->deoptimization_index(); | 732 int id = environment->deoptimization_index(); |
| 733 ASSERT(info()->IsOptimizing() || info()->IsStub()); | 733 ASSERT(info()->IsOptimizing() || info()->IsStub()); |
| 734 Address entry = | 734 Address entry = |
| 735 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); | 735 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); |
| 736 if (entry == NULL) { | 736 if (entry == NULL) { |
| 737 Abort(kBailoutWasNotPrepared); | 737 Abort(kBailoutWasNotPrepared); |
| 738 return; | 738 return; |
| 739 } | 739 } |
| 740 | 740 |
| 741 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on MIPS. | 741 if (FLAG_deopt_every_n_times != 0 && !info()->IsStub()) { |
| 742 if (FLAG_deopt_every_n_times == 1 && | 742 Register scratch = scratch0(); |
| 743 !info()->IsStub() && | 743 ExternalReference count = ExternalReference::stress_deopt_count(isolate()); |
| 744 info()->opt_count() == id) { | 744 Label no_deopt; |
| 745 ASSERT(frame_is_built_); | 745 __ Push(a1, scratch); |
| 746 __ li(scratch, Operand(count)); |
| 747 __ lw(a1, MemOperand(scratch)); |
| 748 __ Subu(a1, a1, Operand(1)); |
| 749 __ Branch(&no_deopt, ne, a1, Operand(zero_reg)); |
| 750 __ li(a1, Operand(FLAG_deopt_every_n_times)); |
| 751 __ sw(a1, MemOperand(scratch)); |
| 752 __ Pop(a1, scratch); |
| 753 |
| 746 __ Call(entry, RelocInfo::RUNTIME_ENTRY); | 754 __ Call(entry, RelocInfo::RUNTIME_ENTRY); |
| 747 return; | 755 __ bind(&no_deopt); |
| 756 __ sw(a1, MemOperand(scratch)); |
| 757 __ Pop(a1, scratch); |
| 748 } | 758 } |
| 749 | 759 |
| 750 if (info()->ShouldTrapOnDeopt()) { | 760 if (info()->ShouldTrapOnDeopt()) { |
| 751 Label skip; | 761 Label skip; |
| 752 if (condition != al) { | 762 if (condition != al) { |
| 753 __ Branch(&skip, NegateCondition(condition), src1, src2); | 763 __ Branch(&skip, NegateCondition(condition), src1, src2); |
| 754 } | 764 } |
| 755 __ stop("trap_on_deopt"); | 765 __ stop("trap_on_deopt"); |
| 756 __ bind(&skip); | 766 __ bind(&skip); |
| 757 } | 767 } |
| (...skipping 882 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1640 | 1650 |
| 1641 | 1651 |
| 1642 void LCodeGen::DoConstantE(LConstantE* instr) { | 1652 void LCodeGen::DoConstantE(LConstantE* instr) { |
| 1643 __ li(ToRegister(instr->result()), Operand(instr->value())); | 1653 __ li(ToRegister(instr->result()), Operand(instr->value())); |
| 1644 } | 1654 } |
| 1645 | 1655 |
| 1646 | 1656 |
| 1647 void LCodeGen::DoConstantT(LConstantT* instr) { | 1657 void LCodeGen::DoConstantT(LConstantT* instr) { |
| 1648 Handle<Object> value = instr->value(isolate()); | 1658 Handle<Object> value = instr->value(isolate()); |
| 1649 AllowDeferredHandleDereference smi_check; | 1659 AllowDeferredHandleDereference smi_check; |
| 1650 __ LoadObject(ToRegister(instr->result()), value); | 1660 __ li(ToRegister(instr->result()), value); |
| 1651 } | 1661 } |
| 1652 | 1662 |
| 1653 | 1663 |
| 1654 void LCodeGen::DoMapEnumLength(LMapEnumLength* instr) { | 1664 void LCodeGen::DoMapEnumLength(LMapEnumLength* instr) { |
| 1655 Register result = ToRegister(instr->result()); | 1665 Register result = ToRegister(instr->result()); |
| 1656 Register map = ToRegister(instr->value()); | 1666 Register map = ToRegister(instr->value()); |
| 1657 __ EnumLength(result, map); | 1667 __ EnumLength(result, map); |
| 1658 } | 1668 } |
| 1659 | 1669 |
| 1660 | 1670 |
| (...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1724 } | 1734 } |
| 1725 __ bind(&runtime); | 1735 __ bind(&runtime); |
| 1726 __ PrepareCallCFunction(2, scratch); | 1736 __ PrepareCallCFunction(2, scratch); |
| 1727 __ li(a1, Operand(index)); | 1737 __ li(a1, Operand(index)); |
| 1728 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); | 1738 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); |
| 1729 __ bind(&done); | 1739 __ bind(&done); |
| 1730 } | 1740 } |
| 1731 } | 1741 } |
| 1732 | 1742 |
| 1733 | 1743 |
| 1744 MemOperand LCodeGen::BuildSeqStringOperand(Register string, |
| 1745 LOperand* index, |
| 1746 String::Encoding encoding) { |
| 1747 if (index->IsConstantOperand()) { |
| 1748 int offset = ToInteger32(LConstantOperand::cast(index)); |
| 1749 if (encoding == String::TWO_BYTE_ENCODING) { |
| 1750 offset *= kUC16Size; |
| 1751 } |
| 1752 STATIC_ASSERT(kCharSize == 1); |
| 1753 return FieldMemOperand(string, SeqString::kHeaderSize + offset); |
| 1754 } |
| 1755 Register scratch = scratch0(); |
| 1756 ASSERT(!scratch.is(string)); |
| 1757 ASSERT(!scratch.is(ToRegister(index))); |
| 1758 if (encoding == String::ONE_BYTE_ENCODING) { |
| 1759 __ Addu(scratch, string, ToRegister(index)); |
| 1760 } else { |
| 1761 STATIC_ASSERT(kUC16Size == 2); |
| 1762 __ sll(scratch, ToRegister(index), 1); |
| 1763 __ Addu(scratch, string, scratch); |
| 1764 } |
| 1765 return FieldMemOperand(scratch, SeqString::kHeaderSize); |
| 1766 } |
| 1767 |
| 1768 |
| 1769 void LCodeGen::DoSeqStringGetChar(LSeqStringGetChar* instr) { |
| 1770 String::Encoding encoding = instr->hydrogen()->encoding(); |
| 1771 Register string = ToRegister(instr->string()); |
| 1772 Register result = ToRegister(instr->result()); |
| 1773 |
| 1774 if (FLAG_debug_code) { |
| 1775 Register scratch = scratch0(); |
| 1776 __ lw(scratch, FieldMemOperand(string, HeapObject::kMapOffset)); |
| 1777 __ lbu(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); |
| 1778 |
| 1779 __ And(scratch, scratch, |
| 1780 Operand(kStringRepresentationMask | kStringEncodingMask)); |
| 1781 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; |
| 1782 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; |
| 1783 __ Subu(at, scratch, Operand(encoding == String::ONE_BYTE_ENCODING |
| 1784 ? one_byte_seq_type : two_byte_seq_type)); |
| 1785 __ Check(eq, kUnexpectedStringType, at, Operand(zero_reg)); |
| 1786 } |
| 1787 |
| 1788 MemOperand operand = BuildSeqStringOperand(string, instr->index(), encoding); |
| 1789 if (encoding == String::ONE_BYTE_ENCODING) { |
| 1790 __ lbu(result, operand); |
| 1791 } else { |
| 1792 __ lhu(result, operand); |
| 1793 } |
| 1794 } |
| 1795 |
| 1796 |
| 1734 void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) { | 1797 void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) { |
| 1798 String::Encoding encoding = instr->hydrogen()->encoding(); |
| 1735 Register string = ToRegister(instr->string()); | 1799 Register string = ToRegister(instr->string()); |
| 1736 LOperand* index_op = instr->index(); | |
| 1737 Register value = ToRegister(instr->value()); | 1800 Register value = ToRegister(instr->value()); |
| 1738 Register scratch = scratch0(); | |
| 1739 String::Encoding encoding = instr->encoding(); | |
| 1740 | 1801 |
| 1741 if (FLAG_debug_code) { | 1802 if (FLAG_debug_code) { |
| 1803 Register scratch = scratch0(); |
| 1742 __ lw(scratch, FieldMemOperand(string, HeapObject::kMapOffset)); | 1804 __ lw(scratch, FieldMemOperand(string, HeapObject::kMapOffset)); |
| 1743 __ lbu(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); | 1805 __ lbu(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); |
| 1744 | 1806 |
| 1745 __ And(scratch, scratch, | 1807 __ And(scratch, scratch, |
| 1746 Operand(kStringRepresentationMask | kStringEncodingMask)); | 1808 Operand(kStringRepresentationMask | kStringEncodingMask)); |
| 1747 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; | 1809 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; |
| 1748 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; | 1810 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; |
| 1749 __ Subu(at, scratch, Operand(encoding == String::ONE_BYTE_ENCODING | 1811 __ Subu(at, scratch, Operand(encoding == String::ONE_BYTE_ENCODING |
| 1750 ? one_byte_seq_type : two_byte_seq_type)); | 1812 ? one_byte_seq_type : two_byte_seq_type)); |
| 1751 __ Check(eq, kUnexpectedStringType, at, Operand(zero_reg)); | 1813 __ Check(eq, kUnexpectedStringType, at, Operand(zero_reg)); |
| 1752 } | 1814 } |
| 1753 | 1815 |
| 1754 if (index_op->IsConstantOperand()) { | 1816 MemOperand operand = BuildSeqStringOperand(string, instr->index(), encoding); |
| 1755 int constant_index = ToInteger32(LConstantOperand::cast(index_op)); | 1817 if (encoding == String::ONE_BYTE_ENCODING) { |
| 1756 if (encoding == String::ONE_BYTE_ENCODING) { | 1818 __ sb(value, operand); |
| 1757 __ sb(value, | |
| 1758 FieldMemOperand(string, SeqString::kHeaderSize + constant_index)); | |
| 1759 } else { | |
| 1760 __ sh(value, | |
| 1761 FieldMemOperand(string, SeqString::kHeaderSize + constant_index * 2)); | |
| 1762 } | |
| 1763 } else { | 1819 } else { |
| 1764 Register index = ToRegister(index_op); | 1820 __ sh(value, operand); |
| 1765 if (encoding == String::ONE_BYTE_ENCODING) { | |
| 1766 __ Addu(scratch, string, Operand(index)); | |
| 1767 __ sb(value, FieldMemOperand(scratch, SeqString::kHeaderSize)); | |
| 1768 } else { | |
| 1769 __ sll(scratch, index, 1); | |
| 1770 __ Addu(scratch, string, scratch); | |
| 1771 __ sh(value, FieldMemOperand(scratch, SeqString::kHeaderSize)); | |
| 1772 } | |
| 1773 } | 1821 } |
| 1774 } | 1822 } |
| 1775 | 1823 |
| 1776 | 1824 |
| 1777 void LCodeGen::DoThrow(LThrow* instr) { | 1825 void LCodeGen::DoThrow(LThrow* instr) { |
| 1778 Register input_reg = EmitLoadRegister(instr->value(), at); | 1826 Register input_reg = EmitLoadRegister(instr->value(), at); |
| 1779 __ push(input_reg); | 1827 __ push(input_reg); |
| 1780 ASSERT(ToRegister(instr->context()).is(cp)); | 1828 ASSERT(ToRegister(instr->context()).is(cp)); |
| 1781 CallRuntime(Runtime::kThrow, 1, instr); | 1829 CallRuntime(Runtime::kThrow, 1, instr); |
| 1782 | 1830 |
| (...skipping 201 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1984 condition, src1, src2); | 2032 condition, src1, src2); |
| 1985 } else { | 2033 } else { |
| 1986 __ BranchF(chunk_->GetAssemblyLabel(left_block), NULL, | 2034 __ BranchF(chunk_->GetAssemblyLabel(left_block), NULL, |
| 1987 condition, src1, src2); | 2035 condition, src1, src2); |
| 1988 __ Branch(chunk_->GetAssemblyLabel(right_block)); | 2036 __ Branch(chunk_->GetAssemblyLabel(right_block)); |
| 1989 } | 2037 } |
| 1990 } | 2038 } |
| 1991 | 2039 |
| 1992 | 2040 |
| 1993 template<class InstrType> | 2041 template<class InstrType> |
| 2042 void LCodeGen::EmitFalseBranch(InstrType instr, |
| 2043 Condition condition, |
| 2044 Register src1, |
| 2045 const Operand& src2) { |
| 2046 int false_block = instr->FalseDestination(chunk_); |
| 2047 __ Branch(chunk_->GetAssemblyLabel(false_block), condition, src1, src2); |
| 2048 } |
| 2049 |
| 2050 |
| 2051 template<class InstrType> |
| 1994 void LCodeGen::EmitFalseBranchF(InstrType instr, | 2052 void LCodeGen::EmitFalseBranchF(InstrType instr, |
| 1995 Condition condition, | 2053 Condition condition, |
| 1996 FPURegister src1, | 2054 FPURegister src1, |
| 1997 FPURegister src2) { | 2055 FPURegister src2) { |
| 1998 int false_block = instr->FalseDestination(chunk_); | 2056 int false_block = instr->FalseDestination(chunk_); |
| 1999 __ BranchF(chunk_->GetAssemblyLabel(false_block), NULL, | 2057 __ BranchF(chunk_->GetAssemblyLabel(false_block), NULL, |
| 2000 condition, src1, src2); | 2058 condition, src1, src2); |
| 2001 } | 2059 } |
| 2002 | 2060 |
| 2003 | 2061 |
| (...skipping 252 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2256 | 2314 |
| 2257 DoubleRegister input_reg = ToDoubleRegister(instr->object()); | 2315 DoubleRegister input_reg = ToDoubleRegister(instr->object()); |
| 2258 EmitFalseBranchF(instr, eq, input_reg, input_reg); | 2316 EmitFalseBranchF(instr, eq, input_reg, input_reg); |
| 2259 | 2317 |
| 2260 Register scratch = scratch0(); | 2318 Register scratch = scratch0(); |
| 2261 __ FmoveHigh(scratch, input_reg); | 2319 __ FmoveHigh(scratch, input_reg); |
| 2262 EmitBranch(instr, eq, scratch, Operand(kHoleNanUpper32)); | 2320 EmitBranch(instr, eq, scratch, Operand(kHoleNanUpper32)); |
| 2263 } | 2321 } |
| 2264 | 2322 |
| 2265 | 2323 |
| 2324 void LCodeGen::DoCompareMinusZeroAndBranch(LCompareMinusZeroAndBranch* instr) { |
| 2325 Representation rep = instr->hydrogen()->value()->representation(); |
| 2326 ASSERT(!rep.IsInteger32()); |
| 2327 Register scratch = ToRegister(instr->temp()); |
| 2328 |
| 2329 if (rep.IsDouble()) { |
| 2330 DoubleRegister value = ToDoubleRegister(instr->value()); |
| 2331 EmitFalseBranchF(instr, ne, value, kDoubleRegZero); |
| 2332 __ FmoveHigh(scratch, value); |
| 2333 __ li(at, 0x80000000); |
| 2334 } else { |
| 2335 Register value = ToRegister(instr->value()); |
| 2336 __ CheckMap(value, |
| 2337 scratch, |
| 2338 Heap::kHeapNumberMapRootIndex, |
| 2339 instr->FalseLabel(chunk()), |
| 2340 DO_SMI_CHECK); |
| 2341 __ lw(scratch, FieldMemOperand(value, HeapNumber::kExponentOffset)); |
| 2342 EmitFalseBranch(instr, ne, scratch, Operand(0x80000000)); |
| 2343 __ lw(scratch, FieldMemOperand(value, HeapNumber::kMantissaOffset)); |
| 2344 __ mov(at, zero_reg); |
| 2345 } |
| 2346 EmitBranch(instr, eq, scratch, Operand(at)); |
| 2347 } |
| 2348 |
| 2349 |
| 2266 Condition LCodeGen::EmitIsObject(Register input, | 2350 Condition LCodeGen::EmitIsObject(Register input, |
| 2267 Register temp1, | 2351 Register temp1, |
| 2268 Register temp2, | 2352 Register temp2, |
| 2269 Label* is_not_object, | 2353 Label* is_not_object, |
| 2270 Label* is_object) { | 2354 Label* is_object) { |
| 2271 __ JumpIfSmi(input, is_not_object); | 2355 __ JumpIfSmi(input, is_not_object); |
| 2272 | 2356 |
| 2273 __ LoadRoot(temp2, Heap::kNullValueRootIndex); | 2357 __ LoadRoot(temp2, Heap::kNullValueRootIndex); |
| 2274 __ Branch(is_object, eq, input, Operand(temp2)); | 2358 __ Branch(is_object, eq, input, Operand(temp2)); |
| 2275 | 2359 |
| (...skipping 368 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2644 InstanceofStub stub(flags); | 2728 InstanceofStub stub(flags); |
| 2645 | 2729 |
| 2646 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); | 2730 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); |
| 2647 LoadContextFromDeferred(instr->context()); | 2731 LoadContextFromDeferred(instr->context()); |
| 2648 | 2732 |
| 2649 // Get the temp register reserved by the instruction. This needs to be t0 as | 2733 // Get the temp register reserved by the instruction. This needs to be t0 as |
| 2650 // its slot of the pushing of safepoint registers is used to communicate the | 2734 // its slot of the pushing of safepoint registers is used to communicate the |
| 2651 // offset to the location of the map check. | 2735 // offset to the location of the map check. |
| 2652 Register temp = ToRegister(instr->temp()); | 2736 Register temp = ToRegister(instr->temp()); |
| 2653 ASSERT(temp.is(t0)); | 2737 ASSERT(temp.is(t0)); |
| 2654 __ LoadHeapObject(InstanceofStub::right(), instr->function()); | 2738 __ li(InstanceofStub::right(), instr->function()); |
| 2655 static const int kAdditionalDelta = 7; | 2739 static const int kAdditionalDelta = 7; |
| 2656 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta; | 2740 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta; |
| 2657 Label before_push_delta; | 2741 Label before_push_delta; |
| 2658 __ bind(&before_push_delta); | 2742 __ bind(&before_push_delta); |
| 2659 { | 2743 { |
| 2660 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | 2744 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); |
| 2661 __ li(temp, Operand(delta * kPointerSize), CONSTANT_SIZE); | 2745 __ li(temp, Operand(delta * kPointerSize), CONSTANT_SIZE); |
| 2662 __ StoreToSafepointRegisterSlot(temp, temp); | 2746 __ StoreToSafepointRegisterSlot(temp, temp); |
| 2663 } | 2747 } |
| 2664 CallCodeGeneric(stub.GetCode(isolate()), | 2748 CallCodeGeneric(stub.GetCode(isolate()), |
| (...skipping 201 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2866 | 2950 |
| 2867 | 2951 |
| 2868 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) { | 2952 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) { |
| 2869 HObjectAccess access = instr->hydrogen()->access(); | 2953 HObjectAccess access = instr->hydrogen()->access(); |
| 2870 int offset = access.offset(); | 2954 int offset = access.offset(); |
| 2871 Register object = ToRegister(instr->object()); | 2955 Register object = ToRegister(instr->object()); |
| 2872 | 2956 |
| 2873 if (access.IsExternalMemory()) { | 2957 if (access.IsExternalMemory()) { |
| 2874 Register result = ToRegister(instr->result()); | 2958 Register result = ToRegister(instr->result()); |
| 2875 MemOperand operand = MemOperand(object, offset); | 2959 MemOperand operand = MemOperand(object, offset); |
| 2876 if (access.representation().IsByte()) { | 2960 __ Load(result, operand, access.representation()); |
| 2877 __ lb(result, operand); | |
| 2878 } else { | |
| 2879 __ lw(result, operand); | |
| 2880 } | |
| 2881 return; | 2961 return; |
| 2882 } | 2962 } |
| 2883 | 2963 |
| 2884 if (instr->hydrogen()->representation().IsDouble()) { | 2964 if (instr->hydrogen()->representation().IsDouble()) { |
| 2885 DoubleRegister result = ToDoubleRegister(instr->result()); | 2965 DoubleRegister result = ToDoubleRegister(instr->result()); |
| 2886 __ ldc1(result, FieldMemOperand(object, offset)); | 2966 __ ldc1(result, FieldMemOperand(object, offset)); |
| 2887 return; | 2967 return; |
| 2888 } | 2968 } |
| 2889 | 2969 |
| 2890 Register result = ToRegister(instr->result()); | 2970 Register result = ToRegister(instr->result()); |
| 2891 if (!access.IsInobject()) { | 2971 if (!access.IsInobject()) { |
| 2892 __ lw(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); | 2972 __ lw(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); |
| 2893 object = result; | 2973 object = result; |
| 2894 } | 2974 } |
| 2895 MemOperand operand = FieldMemOperand(object, offset); | 2975 MemOperand operand = FieldMemOperand(object, offset); |
| 2896 if (access.representation().IsByte()) { | 2976 __ Load(result, operand, access.representation()); |
| 2897 __ lb(result, operand); | |
| 2898 } else { | |
| 2899 __ lw(result, operand); | |
| 2900 } | |
| 2901 } | 2977 } |
| 2902 | 2978 |
| 2903 | 2979 |
| 2904 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { | 2980 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { |
| 2905 ASSERT(ToRegister(instr->context()).is(cp)); | 2981 ASSERT(ToRegister(instr->context()).is(cp)); |
| 2906 ASSERT(ToRegister(instr->object()).is(a0)); | 2982 ASSERT(ToRegister(instr->object()).is(a0)); |
| 2907 ASSERT(ToRegister(instr->result()).is(v0)); | 2983 ASSERT(ToRegister(instr->result()).is(v0)); |
| 2908 | 2984 |
| 2909 // Name is always in a2. | 2985 // Name is always in a2. |
| 2910 __ li(a2, Operand(instr->name())); | 2986 __ li(a2, Operand(instr->name())); |
| (...skipping 485 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3396 void LCodeGen::DoOuterContext(LOuterContext* instr) { | 3472 void LCodeGen::DoOuterContext(LOuterContext* instr) { |
| 3397 Register context = ToRegister(instr->context()); | 3473 Register context = ToRegister(instr->context()); |
| 3398 Register result = ToRegister(instr->result()); | 3474 Register result = ToRegister(instr->result()); |
| 3399 __ lw(result, | 3475 __ lw(result, |
| 3400 MemOperand(context, Context::SlotOffset(Context::PREVIOUS_INDEX))); | 3476 MemOperand(context, Context::SlotOffset(Context::PREVIOUS_INDEX))); |
| 3401 } | 3477 } |
| 3402 | 3478 |
| 3403 | 3479 |
| 3404 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) { | 3480 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) { |
| 3405 ASSERT(ToRegister(instr->context()).is(cp)); | 3481 ASSERT(ToRegister(instr->context()).is(cp)); |
| 3406 __ LoadHeapObject(scratch0(), instr->hydrogen()->pairs()); | 3482 __ li(scratch0(), instr->hydrogen()->pairs()); |
| 3407 __ li(scratch1(), Operand(Smi::FromInt(instr->hydrogen()->flags()))); | 3483 __ li(scratch1(), Operand(Smi::FromInt(instr->hydrogen()->flags()))); |
| 3408 // The context is the first argument. | 3484 // The context is the first argument. |
| 3409 __ Push(cp, scratch0(), scratch1()); | 3485 __ Push(cp, scratch0(), scratch1()); |
| 3410 CallRuntime(Runtime::kDeclareGlobals, 3, instr); | 3486 CallRuntime(Runtime::kDeclareGlobals, 3, instr); |
| 3411 } | 3487 } |
| 3412 | 3488 |
| 3413 | 3489 |
| 3414 void LCodeGen::DoGlobalObject(LGlobalObject* instr) { | 3490 void LCodeGen::DoGlobalObject(LGlobalObject* instr) { |
| 3415 Register context = ToRegister(instr->context()); | 3491 Register context = ToRegister(instr->context()); |
| 3416 Register result = ToRegister(instr->result()); | 3492 Register result = ToRegister(instr->result()); |
| (...skipping 16 matching lines...) Expand all Loading... |
| 3433 A1State a1_state) { | 3509 A1State a1_state) { |
| 3434 bool dont_adapt_arguments = | 3510 bool dont_adapt_arguments = |
| 3435 formal_parameter_count == SharedFunctionInfo::kDontAdaptArgumentsSentinel; | 3511 formal_parameter_count == SharedFunctionInfo::kDontAdaptArgumentsSentinel; |
| 3436 bool can_invoke_directly = | 3512 bool can_invoke_directly = |
| 3437 dont_adapt_arguments || formal_parameter_count == arity; | 3513 dont_adapt_arguments || formal_parameter_count == arity; |
| 3438 | 3514 |
| 3439 LPointerMap* pointers = instr->pointer_map(); | 3515 LPointerMap* pointers = instr->pointer_map(); |
| 3440 | 3516 |
| 3441 if (can_invoke_directly) { | 3517 if (can_invoke_directly) { |
| 3442 if (a1_state == A1_UNINITIALIZED) { | 3518 if (a1_state == A1_UNINITIALIZED) { |
| 3443 __ LoadHeapObject(a1, function); | 3519 __ li(a1, function); |
| 3444 } | 3520 } |
| 3445 | 3521 |
| 3446 // Change context. | 3522 // Change context. |
| 3447 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); | 3523 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); |
| 3448 | 3524 |
| 3449 // Set r0 to arguments count if adaption is not needed. Assumes that r0 | 3525 // Set r0 to arguments count if adaption is not needed. Assumes that r0 |
| 3450 // is available to write to at this point. | 3526 // is available to write to at this point. |
| 3451 if (dont_adapt_arguments) { | 3527 if (dont_adapt_arguments) { |
| 3452 __ li(a0, Operand(arity)); | 3528 __ li(a0, Operand(arity)); |
| 3453 } | 3529 } |
| (...skipping 597 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4051 Representation representation = instr->representation(); | 4127 Representation representation = instr->representation(); |
| 4052 | 4128 |
| 4053 Register object = ToRegister(instr->object()); | 4129 Register object = ToRegister(instr->object()); |
| 4054 Register scratch = scratch0(); | 4130 Register scratch = scratch0(); |
| 4055 HObjectAccess access = instr->hydrogen()->access(); | 4131 HObjectAccess access = instr->hydrogen()->access(); |
| 4056 int offset = access.offset(); | 4132 int offset = access.offset(); |
| 4057 | 4133 |
| 4058 if (access.IsExternalMemory()) { | 4134 if (access.IsExternalMemory()) { |
| 4059 Register value = ToRegister(instr->value()); | 4135 Register value = ToRegister(instr->value()); |
| 4060 MemOperand operand = MemOperand(object, offset); | 4136 MemOperand operand = MemOperand(object, offset); |
| 4061 if (representation.IsByte()) { | 4137 __ Store(value, operand, representation); |
| 4062 __ sb(value, operand); | |
| 4063 } else { | |
| 4064 __ sw(value, operand); | |
| 4065 } | |
| 4066 return; | 4138 return; |
| 4067 } | 4139 } |
| 4068 | 4140 |
| 4069 Handle<Map> transition = instr->transition(); | 4141 Handle<Map> transition = instr->transition(); |
| 4070 | 4142 |
| 4071 if (FLAG_track_heap_object_fields && representation.IsHeapObject()) { | 4143 if (FLAG_track_heap_object_fields && representation.IsHeapObject()) { |
| 4072 Register value = ToRegister(instr->value()); | 4144 Register value = ToRegister(instr->value()); |
| 4073 if (!instr->hydrogen()->value()->type().IsHeapObject()) { | 4145 if (!instr->hydrogen()->value()->type().IsHeapObject()) { |
| 4074 __ And(scratch, value, Operand(kSmiTagMask)); | 4146 __ And(scratch, value, Operand(kSmiTagMask)); |
| 4075 DeoptimizeIf(eq, instr->environment(), scratch, Operand(zero_reg)); | 4147 DeoptimizeIf(eq, instr->environment(), scratch, Operand(zero_reg)); |
| (...skipping 25 matching lines...) Expand all Loading... |
| 4101 } | 4173 } |
| 4102 | 4174 |
| 4103 // Do the store. | 4175 // Do the store. |
| 4104 Register value = ToRegister(instr->value()); | 4176 Register value = ToRegister(instr->value()); |
| 4105 ASSERT(!object.is(value)); | 4177 ASSERT(!object.is(value)); |
| 4106 SmiCheck check_needed = | 4178 SmiCheck check_needed = |
| 4107 instr->hydrogen()->value()->IsHeapObject() | 4179 instr->hydrogen()->value()->IsHeapObject() |
| 4108 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; | 4180 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; |
| 4109 if (access.IsInobject()) { | 4181 if (access.IsInobject()) { |
| 4110 MemOperand operand = FieldMemOperand(object, offset); | 4182 MemOperand operand = FieldMemOperand(object, offset); |
| 4111 if (representation.IsByte()) { | 4183 __ Store(value, operand, representation); |
| 4112 __ sb(value, operand); | |
| 4113 } else { | |
| 4114 __ sw(value, operand); | |
| 4115 } | |
| 4116 if (instr->hydrogen()->NeedsWriteBarrier()) { | 4184 if (instr->hydrogen()->NeedsWriteBarrier()) { |
| 4117 // Update the write barrier for the object for in-object properties. | 4185 // Update the write barrier for the object for in-object properties. |
| 4118 __ RecordWriteField(object, | 4186 __ RecordWriteField(object, |
| 4119 offset, | 4187 offset, |
| 4120 value, | 4188 value, |
| 4121 scratch, | 4189 scratch, |
| 4122 GetRAState(), | 4190 GetRAState(), |
| 4123 kSaveFPRegs, | 4191 kSaveFPRegs, |
| 4124 EMIT_REMEMBERED_SET, | 4192 EMIT_REMEMBERED_SET, |
| 4125 check_needed); | 4193 check_needed); |
| 4126 } | 4194 } |
| 4127 } else { | 4195 } else { |
| 4128 __ lw(scratch, FieldMemOperand(object, JSObject::kPropertiesOffset)); | 4196 __ lw(scratch, FieldMemOperand(object, JSObject::kPropertiesOffset)); |
| 4129 MemOperand operand = FieldMemOperand(scratch, offset); | 4197 MemOperand operand = FieldMemOperand(scratch, offset); |
| 4130 if (representation.IsByte()) { | 4198 __ Store(value, operand, representation); |
| 4131 __ sb(value, operand); | |
| 4132 } else { | |
| 4133 __ sw(value, operand); | |
| 4134 } | |
| 4135 if (instr->hydrogen()->NeedsWriteBarrier()) { | 4199 if (instr->hydrogen()->NeedsWriteBarrier()) { |
| 4136 // Update the write barrier for the properties array. | 4200 // Update the write barrier for the properties array. |
| 4137 // object is used as a scratch register. | 4201 // object is used as a scratch register. |
| 4138 __ RecordWriteField(scratch, | 4202 __ RecordWriteField(scratch, |
| 4139 offset, | 4203 offset, |
| 4140 value, | 4204 value, |
| 4141 object, | 4205 object, |
| 4142 GetRAState(), | 4206 GetRAState(), |
| 4143 kSaveFPRegs, | 4207 kSaveFPRegs, |
| 4144 EMIT_REMEMBERED_SET, | 4208 EMIT_REMEMBERED_SET, |
| (...skipping 300 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4445 Label no_memento_found; | 4509 Label no_memento_found; |
| 4446 __ TestJSArrayForAllocationMemento(object, temp, &no_memento_found, | 4510 __ TestJSArrayForAllocationMemento(object, temp, &no_memento_found, |
| 4447 ne, &no_memento_found); | 4511 ne, &no_memento_found); |
| 4448 DeoptimizeIf(al, instr->environment()); | 4512 DeoptimizeIf(al, instr->environment()); |
| 4449 __ bind(&no_memento_found); | 4513 __ bind(&no_memento_found); |
| 4450 } | 4514 } |
| 4451 | 4515 |
| 4452 | 4516 |
| 4453 void LCodeGen::DoStringAdd(LStringAdd* instr) { | 4517 void LCodeGen::DoStringAdd(LStringAdd* instr) { |
| 4454 ASSERT(ToRegister(instr->context()).is(cp)); | 4518 ASSERT(ToRegister(instr->context()).is(cp)); |
| 4455 __ push(ToRegister(instr->left())); | 4519 if (FLAG_new_string_add) { |
| 4456 __ push(ToRegister(instr->right())); | 4520 ASSERT(ToRegister(instr->left()).is(a1)); |
| 4457 StringAddStub stub(instr->hydrogen()->flags()); | 4521 ASSERT(ToRegister(instr->right()).is(a0)); |
| 4458 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 4522 NewStringAddStub stub(instr->hydrogen()->flags(), |
| 4523 isolate()->heap()->GetPretenureMode()); |
| 4524 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 4525 } else { |
| 4526 __ push(ToRegister(instr->left())); |
| 4527 __ push(ToRegister(instr->right())); |
| 4528 StringAddStub stub(instr->hydrogen()->flags()); |
| 4529 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 4530 } |
| 4459 } | 4531 } |
| 4460 | 4532 |
| 4461 | 4533 |
| 4462 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { | 4534 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { |
| 4463 class DeferredStringCharCodeAt V8_FINAL : public LDeferredCode { | 4535 class DeferredStringCharCodeAt V8_FINAL : public LDeferredCode { |
| 4464 public: | 4536 public: |
| 4465 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr) | 4537 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr) |
| 4466 : LDeferredCode(codegen), instr_(instr) { } | 4538 : LDeferredCode(codegen), instr_(instr) { } |
| 4467 virtual void Generate() V8_OVERRIDE { | 4539 virtual void Generate() V8_OVERRIDE { |
| 4468 codegen()->DoDeferredStringCharCodeAt(instr_); | 4540 codegen()->DoDeferredStringCharCodeAt(instr_); |
| (...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4577 __ mtc1(scratch, single_scratch); | 4649 __ mtc1(scratch, single_scratch); |
| 4578 } else { | 4650 } else { |
| 4579 __ mtc1(ToRegister(input), single_scratch); | 4651 __ mtc1(ToRegister(input), single_scratch); |
| 4580 } | 4652 } |
| 4581 __ cvt_d_w(ToDoubleRegister(output), single_scratch); | 4653 __ cvt_d_w(ToDoubleRegister(output), single_scratch); |
| 4582 } | 4654 } |
| 4583 | 4655 |
| 4584 | 4656 |
| 4585 void LCodeGen::DoInteger32ToSmi(LInteger32ToSmi* instr) { | 4657 void LCodeGen::DoInteger32ToSmi(LInteger32ToSmi* instr) { |
| 4586 LOperand* input = instr->value(); | 4658 LOperand* input = instr->value(); |
| 4587 ASSERT(input->IsRegister()); | |
| 4588 LOperand* output = instr->result(); | 4659 LOperand* output = instr->result(); |
| 4589 ASSERT(output->IsRegister()); | |
| 4590 Register scratch = scratch0(); | 4660 Register scratch = scratch0(); |
| 4591 | 4661 |
| 4592 __ SmiTagCheckOverflow(ToRegister(output), ToRegister(input), scratch); | 4662 __ SmiTagCheckOverflow(ToRegister(output), ToRegister(input), scratch); |
| 4593 if (!instr->hydrogen()->value()->HasRange() || | 4663 if (!instr->hydrogen()->value()->HasRange() || |
| 4594 !instr->hydrogen()->value()->range()->IsInSmiRange()) { | 4664 !instr->hydrogen()->value()->range()->IsInSmiRange()) { |
| 4595 DeoptimizeIf(lt, instr->environment(), scratch, Operand(zero_reg)); | 4665 DeoptimizeIf(lt, instr->environment(), scratch, Operand(zero_reg)); |
| 4596 } | 4666 } |
| 4597 } | 4667 } |
| 4598 | 4668 |
| 4599 | 4669 |
| 4600 void LCodeGen::DoUint32ToDouble(LUint32ToDouble* instr) { | 4670 void LCodeGen::DoUint32ToDouble(LUint32ToDouble* instr) { |
| 4601 LOperand* input = instr->value(); | 4671 LOperand* input = instr->value(); |
| 4602 LOperand* output = instr->result(); | 4672 LOperand* output = instr->result(); |
| 4603 | 4673 |
| 4604 FPURegister dbl_scratch = double_scratch0(); | 4674 FPURegister dbl_scratch = double_scratch0(); |
| 4605 __ mtc1(ToRegister(input), dbl_scratch); | 4675 __ mtc1(ToRegister(input), dbl_scratch); |
| 4606 __ Cvt_d_uw(ToDoubleRegister(output), dbl_scratch, f22); | 4676 __ Cvt_d_uw(ToDoubleRegister(output), dbl_scratch, f22); |
| 4607 } | 4677 } |
| 4608 | 4678 |
| 4609 | 4679 |
| 4680 void LCodeGen::DoUint32ToSmi(LUint32ToSmi* instr) { |
| 4681 LOperand* input = instr->value(); |
| 4682 LOperand* output = instr->result(); |
| 4683 if (!instr->hydrogen()->value()->HasRange() || |
| 4684 !instr->hydrogen()->value()->range()->IsInSmiRange()) { |
| 4685 Register scratch = scratch0(); |
| 4686 __ And(scratch, ToRegister(input), Operand(0xc0000000)); |
| 4687 DeoptimizeIf(ne, instr->environment(), scratch, Operand(zero_reg)); |
| 4688 } |
| 4689 __ SmiTag(ToRegister(output), ToRegister(input)); |
| 4690 } |
| 4691 |
| 4692 |
| 4610 void LCodeGen::DoNumberTagI(LNumberTagI* instr) { | 4693 void LCodeGen::DoNumberTagI(LNumberTagI* instr) { |
| 4611 class DeferredNumberTagI V8_FINAL : public LDeferredCode { | 4694 class DeferredNumberTagI V8_FINAL : public LDeferredCode { |
| 4612 public: | 4695 public: |
| 4613 DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr) | 4696 DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr) |
| 4614 : LDeferredCode(codegen), instr_(instr) { } | 4697 : LDeferredCode(codegen), instr_(instr) { } |
| 4615 virtual void Generate() V8_OVERRIDE { | 4698 virtual void Generate() V8_OVERRIDE { |
| 4616 codegen()->DoDeferredNumberTagI(instr_, | 4699 codegen()->DoDeferredNumberTagI(instr_, |
| 4617 instr_->value(), | 4700 instr_->value(), |
| 4618 SIGNED_INT32); | 4701 SIGNED_INT32); |
| 4619 } | 4702 } |
| (...skipping 736 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5356 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { | 5439 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { |
| 5357 ASSERT(ToRegister(instr->context()).is(cp)); | 5440 ASSERT(ToRegister(instr->context()).is(cp)); |
| 5358 Label materialized; | 5441 Label materialized; |
| 5359 // Registers will be used as follows: | 5442 // Registers will be used as follows: |
| 5360 // t3 = literals array. | 5443 // t3 = literals array. |
| 5361 // a1 = regexp literal. | 5444 // a1 = regexp literal. |
| 5362 // a0 = regexp literal clone. | 5445 // a0 = regexp literal clone. |
| 5363 // a2 and t0-t2 are used as temporaries. | 5446 // a2 and t0-t2 are used as temporaries. |
| 5364 int literal_offset = | 5447 int literal_offset = |
| 5365 FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index()); | 5448 FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index()); |
| 5366 __ LoadHeapObject(t3, instr->hydrogen()->literals()); | 5449 __ li(t3, instr->hydrogen()->literals()); |
| 5367 __ lw(a1, FieldMemOperand(t3, literal_offset)); | 5450 __ lw(a1, FieldMemOperand(t3, literal_offset)); |
| 5368 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | 5451 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); |
| 5369 __ Branch(&materialized, ne, a1, Operand(at)); | 5452 __ Branch(&materialized, ne, a1, Operand(at)); |
| 5370 | 5453 |
| 5371 // Create regexp literal using runtime function | 5454 // Create regexp literal using runtime function |
| 5372 // Result will be in v0. | 5455 // Result will be in v0. |
| 5373 __ li(t2, Operand(Smi::FromInt(instr->hydrogen()->literal_index()))); | 5456 __ li(t2, Operand(Smi::FromInt(instr->hydrogen()->literal_index()))); |
| 5374 __ li(t1, Operand(instr->hydrogen()->pattern())); | 5457 __ li(t1, Operand(instr->hydrogen()->pattern())); |
| 5375 __ li(t0, Operand(instr->hydrogen()->flags())); | 5458 __ li(t0, Operand(instr->hydrogen()->flags())); |
| 5376 __ Push(t3, t2, t1, t0); | 5459 __ Push(t3, t2, t1, t0); |
| (...skipping 247 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5624 // the special case below. | 5707 // the special case below. |
| 5625 if (info()->IsStub() && type == Deoptimizer::EAGER) { | 5708 if (info()->IsStub() && type == Deoptimizer::EAGER) { |
| 5626 type = Deoptimizer::LAZY; | 5709 type = Deoptimizer::LAZY; |
| 5627 } | 5710 } |
| 5628 | 5711 |
| 5629 Comment(";;; deoptimize: %s", instr->hydrogen()->reason()); | 5712 Comment(";;; deoptimize: %s", instr->hydrogen()->reason()); |
| 5630 DeoptimizeIf(al, instr->environment(), type, zero_reg, Operand(zero_reg)); | 5713 DeoptimizeIf(al, instr->environment(), type, zero_reg, Operand(zero_reg)); |
| 5631 } | 5714 } |
| 5632 | 5715 |
| 5633 | 5716 |
| 5717 void LCodeGen::DoDummy(LDummy* instr) { |
| 5718 // Nothing to see here, move on! |
| 5719 } |
| 5720 |
| 5721 |
| 5634 void LCodeGen::DoDummyUse(LDummyUse* instr) { | 5722 void LCodeGen::DoDummyUse(LDummyUse* instr) { |
| 5635 // Nothing to see here, move on! | 5723 // Nothing to see here, move on! |
| 5636 } | 5724 } |
| 5637 | 5725 |
| 5638 | 5726 |
| 5639 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { | 5727 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { |
| 5640 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); | 5728 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); |
| 5641 LoadContextFromDeferred(instr->context()); | 5729 LoadContextFromDeferred(instr->context()); |
| 5642 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); | 5730 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); |
| 5643 RecordSafepointWithLazyDeopt( | 5731 RecordSafepointWithLazyDeopt( |
| (...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5802 __ Subu(scratch, result, scratch); | 5890 __ Subu(scratch, result, scratch); |
| 5803 __ lw(result, FieldMemOperand(scratch, | 5891 __ lw(result, FieldMemOperand(scratch, |
| 5804 FixedArray::kHeaderSize - kPointerSize)); | 5892 FixedArray::kHeaderSize - kPointerSize)); |
| 5805 __ bind(&done); | 5893 __ bind(&done); |
| 5806 } | 5894 } |
| 5807 | 5895 |
| 5808 | 5896 |
| 5809 #undef __ | 5897 #undef __ |
| 5810 | 5898 |
| 5811 } } // namespace v8::internal | 5899 } } // namespace v8::internal |
| OLD | NEW |