| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 114 // r1: Callee's JS function. | 114 // r1: Callee's JS function. |
| 115 // cp: Callee's context. | 115 // cp: Callee's context. |
| 116 // fp: Caller's frame pointer. | 116 // fp: Caller's frame pointer. |
| 117 // lr: Caller's pc. | 117 // lr: Caller's pc. |
| 118 | 118 |
| 119 // Strict mode functions and builtins need to replace the receiver | 119 // Strict mode functions and builtins need to replace the receiver |
| 120 // with undefined when called as functions (without an explicit | 120 // with undefined when called as functions (without an explicit |
| 121 // receiver object). r5 is zero for method calls and non-zero for | 121 // receiver object). r5 is zero for method calls and non-zero for |
| 122 // function calls. | 122 // function calls. |
| 123 if (!info_->is_classic_mode() || info_->is_native()) { | 123 if (!info_->is_classic_mode() || info_->is_native()) { |
| 124 Label ok; | |
| 125 __ cmp(r5, Operand::Zero()); | 124 __ cmp(r5, Operand::Zero()); |
| 126 __ b(eq, &ok); | |
| 127 int receiver_offset = scope()->num_parameters() * kPointerSize; | 125 int receiver_offset = scope()->num_parameters() * kPointerSize; |
| 128 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); | 126 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); |
| 129 __ str(r2, MemOperand(sp, receiver_offset)); | 127 __ str(r2, MemOperand(sp, receiver_offset), ne); |
| 130 __ bind(&ok); | |
| 131 } | 128 } |
| 132 } | 129 } |
| 133 | 130 |
| 134 info()->set_prologue_offset(masm_->pc_offset()); | 131 info()->set_prologue_offset(masm_->pc_offset()); |
| 135 if (NeedsEagerFrame()) { | 132 if (NeedsEagerFrame()) { |
| 136 __ Prologue(info()->IsStub() ? BUILD_STUB_FRAME : BUILD_FUNCTION_FRAME); | 133 __ Prologue(info()->IsStub() ? BUILD_STUB_FRAME : BUILD_FUNCTION_FRAME); |
| 137 frame_is_built_ = true; | 134 frame_is_built_ = true; |
| 138 info_->AddNoFrameRange(0, masm_->pc_offset()); | 135 info_->AddNoFrameRange(0, masm_->pc_offset()); |
| 139 } | 136 } |
| 140 | 137 |
| (...skipping 240 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 381 HConstant* constant = chunk_->LookupConstant(const_op); | 378 HConstant* constant = chunk_->LookupConstant(const_op); |
| 382 Handle<Object> literal = constant->handle(isolate()); | 379 Handle<Object> literal = constant->handle(isolate()); |
| 383 Representation r = chunk_->LookupLiteralRepresentation(const_op); | 380 Representation r = chunk_->LookupLiteralRepresentation(const_op); |
| 384 if (r.IsInteger32()) { | 381 if (r.IsInteger32()) { |
| 385 ASSERT(literal->IsNumber()); | 382 ASSERT(literal->IsNumber()); |
| 386 __ mov(scratch, Operand(static_cast<int32_t>(literal->Number()))); | 383 __ mov(scratch, Operand(static_cast<int32_t>(literal->Number()))); |
| 387 } else if (r.IsDouble()) { | 384 } else if (r.IsDouble()) { |
| 388 Abort(kEmitLoadRegisterUnsupportedDoubleImmediate); | 385 Abort(kEmitLoadRegisterUnsupportedDoubleImmediate); |
| 389 } else { | 386 } else { |
| 390 ASSERT(r.IsSmiOrTagged()); | 387 ASSERT(r.IsSmiOrTagged()); |
| 391 __ LoadObject(scratch, literal); | 388 __ Move(scratch, literal); |
| 392 } | 389 } |
| 393 return scratch; | 390 return scratch; |
| 394 } else if (op->IsStackSlot() || op->IsArgument()) { | 391 } else if (op->IsStackSlot() || op->IsArgument()) { |
| 395 __ ldr(scratch, ToMemOperand(op)); | 392 __ ldr(scratch, ToMemOperand(op)); |
| 396 return scratch; | 393 return scratch; |
| 397 } | 394 } |
| 398 UNREACHABLE(); | 395 UNREACHABLE(); |
| 399 return scratch; | 396 return scratch; |
| 400 } | 397 } |
| 401 | 398 |
| (...skipping 295 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 697 | 694 |
| 698 | 695 |
| 699 void LCodeGen::LoadContextFromDeferred(LOperand* context) { | 696 void LCodeGen::LoadContextFromDeferred(LOperand* context) { |
| 700 if (context->IsRegister()) { | 697 if (context->IsRegister()) { |
| 701 __ Move(cp, ToRegister(context)); | 698 __ Move(cp, ToRegister(context)); |
| 702 } else if (context->IsStackSlot()) { | 699 } else if (context->IsStackSlot()) { |
| 703 __ ldr(cp, ToMemOperand(context)); | 700 __ ldr(cp, ToMemOperand(context)); |
| 704 } else if (context->IsConstantOperand()) { | 701 } else if (context->IsConstantOperand()) { |
| 705 HConstant* constant = | 702 HConstant* constant = |
| 706 chunk_->LookupConstant(LConstantOperand::cast(context)); | 703 chunk_->LookupConstant(LConstantOperand::cast(context)); |
| 707 __ LoadObject(cp, Handle<Object>::cast(constant->handle(isolate()))); | 704 __ Move(cp, Handle<Object>::cast(constant->handle(isolate()))); |
| 708 } else { | 705 } else { |
| 709 UNREACHABLE(); | 706 UNREACHABLE(); |
| 710 } | 707 } |
| 711 } | 708 } |
| 712 | 709 |
| 713 | 710 |
| 714 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, | 711 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, |
| 715 int argc, | 712 int argc, |
| 716 LInstruction* instr, | 713 LInstruction* instr, |
| 717 LOperand* context) { | 714 LOperand* context) { |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 765 ASSERT(environment->HasBeenRegistered()); | 762 ASSERT(environment->HasBeenRegistered()); |
| 766 int id = environment->deoptimization_index(); | 763 int id = environment->deoptimization_index(); |
| 767 ASSERT(info()->IsOptimizing() || info()->IsStub()); | 764 ASSERT(info()->IsOptimizing() || info()->IsStub()); |
| 768 Address entry = | 765 Address entry = |
| 769 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); | 766 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); |
| 770 if (entry == NULL) { | 767 if (entry == NULL) { |
| 771 Abort(kBailoutWasNotPrepared); | 768 Abort(kBailoutWasNotPrepared); |
| 772 return; | 769 return; |
| 773 } | 770 } |
| 774 | 771 |
| 775 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on ARM. | 772 if (FLAG_deopt_every_n_times != 0 && !info()->IsStub()) { |
| 776 if (FLAG_deopt_every_n_times == 1 && | 773 Register scratch = scratch0(); |
| 777 !info()->IsStub() && | 774 ExternalReference count = ExternalReference::stress_deopt_count(isolate()); |
| 778 info()->opt_count() == id) { | 775 |
| 779 ASSERT(frame_is_built_); | 776 // Store the condition on the stack if necessary |
| 780 __ Call(entry, RelocInfo::RUNTIME_ENTRY); | 777 if (condition != al) { |
| 781 return; | 778 __ mov(scratch, Operand::Zero(), LeaveCC, NegateCondition(condition)); |
| 779 __ mov(scratch, Operand(1), LeaveCC, condition); |
| 780 __ push(scratch); |
| 781 } |
| 782 |
| 783 __ push(r1); |
| 784 __ mov(scratch, Operand(count)); |
| 785 __ ldr(r1, MemOperand(scratch)); |
| 786 __ sub(r1, r1, Operand(1), SetCC); |
| 787 __ movw(r1, FLAG_deopt_every_n_times, eq); |
| 788 __ str(r1, MemOperand(scratch)); |
| 789 __ pop(r1); |
| 790 |
| 791 if (condition != al) { |
| 792 // Clean up the stack before the deoptimizer call |
| 793 __ pop(scratch); |
| 794 } |
| 795 |
| 796 __ Call(entry, RelocInfo::RUNTIME_ENTRY, eq); |
| 797 |
| 798 // 'Restore' the condition in a slightly hacky way. (It would be better |
| 799 // to use 'msr' and 'mrs' instructions here, but they are not supported by |
| 800 // our ARM simulator). |
| 801 if (condition != al) { |
| 802 condition = ne; |
| 803 __ cmp(scratch, Operand::Zero()); |
| 804 } |
| 782 } | 805 } |
| 783 | 806 |
| 784 if (info()->ShouldTrapOnDeopt()) { | 807 if (info()->ShouldTrapOnDeopt()) { |
| 785 __ stop("trap_on_deopt", condition); | 808 __ stop("trap_on_deopt", condition); |
| 786 } | 809 } |
| 787 | 810 |
| 788 ASSERT(info()->IsStub() || frame_is_built_); | 811 ASSERT(info()->IsStub() || frame_is_built_); |
| 789 if (condition == al && frame_is_built_) { | 812 if (condition == al && frame_is_built_) { |
| 790 __ Call(entry, RelocInfo::RUNTIME_ENTRY); | 813 __ Call(entry, RelocInfo::RUNTIME_ENTRY); |
| 791 } else { | 814 } else { |
| (...skipping 1041 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1833 | 1856 |
| 1834 | 1857 |
| 1835 void LCodeGen::DoConstantE(LConstantE* instr) { | 1858 void LCodeGen::DoConstantE(LConstantE* instr) { |
| 1836 __ mov(ToRegister(instr->result()), Operand(instr->value())); | 1859 __ mov(ToRegister(instr->result()), Operand(instr->value())); |
| 1837 } | 1860 } |
| 1838 | 1861 |
| 1839 | 1862 |
| 1840 void LCodeGen::DoConstantT(LConstantT* instr) { | 1863 void LCodeGen::DoConstantT(LConstantT* instr) { |
| 1841 Handle<Object> value = instr->value(isolate()); | 1864 Handle<Object> value = instr->value(isolate()); |
| 1842 AllowDeferredHandleDereference smi_check; | 1865 AllowDeferredHandleDereference smi_check; |
| 1843 __ LoadObject(ToRegister(instr->result()), value); | 1866 __ Move(ToRegister(instr->result()), value); |
| 1844 } | 1867 } |
| 1845 | 1868 |
| 1846 | 1869 |
| 1847 void LCodeGen::DoMapEnumLength(LMapEnumLength* instr) { | 1870 void LCodeGen::DoMapEnumLength(LMapEnumLength* instr) { |
| 1848 Register result = ToRegister(instr->result()); | 1871 Register result = ToRegister(instr->result()); |
| 1849 Register map = ToRegister(instr->value()); | 1872 Register map = ToRegister(instr->value()); |
| 1850 __ EnumLength(result, map); | 1873 __ EnumLength(result, map); |
| 1851 } | 1874 } |
| 1852 | 1875 |
| 1853 | 1876 |
| (...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1920 } | 1943 } |
| 1921 __ bind(&runtime); | 1944 __ bind(&runtime); |
| 1922 __ PrepareCallCFunction(2, scratch); | 1945 __ PrepareCallCFunction(2, scratch); |
| 1923 __ mov(r1, Operand(index)); | 1946 __ mov(r1, Operand(index)); |
| 1924 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); | 1947 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); |
| 1925 __ bind(&done); | 1948 __ bind(&done); |
| 1926 } | 1949 } |
| 1927 } | 1950 } |
| 1928 | 1951 |
| 1929 | 1952 |
| 1953 MemOperand LCodeGen::BuildSeqStringOperand(Register string, |
| 1954 LOperand* index, |
| 1955 String::Encoding encoding) { |
| 1956 if (index->IsConstantOperand()) { |
| 1957 int offset = ToInteger32(LConstantOperand::cast(index)); |
| 1958 if (encoding == String::TWO_BYTE_ENCODING) { |
| 1959 offset *= kUC16Size; |
| 1960 } |
| 1961 STATIC_ASSERT(kCharSize == 1); |
| 1962 return FieldMemOperand(string, SeqString::kHeaderSize + offset); |
| 1963 } |
| 1964 Register scratch = scratch0(); |
| 1965 ASSERT(!scratch.is(string)); |
| 1966 ASSERT(!scratch.is(ToRegister(index))); |
| 1967 if (encoding == String::ONE_BYTE_ENCODING) { |
| 1968 __ add(scratch, string, Operand(ToRegister(index))); |
| 1969 } else { |
| 1970 STATIC_ASSERT(kUC16Size == 2); |
| 1971 __ add(scratch, string, Operand(ToRegister(index), LSL, 1)); |
| 1972 } |
| 1973 return FieldMemOperand(scratch, SeqString::kHeaderSize); |
| 1974 } |
| 1975 |
| 1976 |
| 1977 void LCodeGen::DoSeqStringGetChar(LSeqStringGetChar* instr) { |
| 1978 String::Encoding encoding = instr->hydrogen()->encoding(); |
| 1979 Register string = ToRegister(instr->string()); |
| 1980 Register result = ToRegister(instr->result()); |
| 1981 |
| 1982 if (FLAG_debug_code) { |
| 1983 Register scratch = scratch0(); |
| 1984 __ ldr(scratch, FieldMemOperand(string, HeapObject::kMapOffset)); |
| 1985 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); |
| 1986 |
| 1987 __ and_(scratch, scratch, |
| 1988 Operand(kStringRepresentationMask | kStringEncodingMask)); |
| 1989 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; |
| 1990 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; |
| 1991 __ cmp(scratch, Operand(encoding == String::ONE_BYTE_ENCODING |
| 1992 ? one_byte_seq_type : two_byte_seq_type)); |
| 1993 __ Check(eq, kUnexpectedStringType); |
| 1994 } |
| 1995 |
| 1996 MemOperand operand = BuildSeqStringOperand(string, instr->index(), encoding); |
| 1997 if (encoding == String::ONE_BYTE_ENCODING) { |
| 1998 __ ldrb(result, operand); |
| 1999 } else { |
| 2000 __ ldrh(result, operand); |
| 2001 } |
| 2002 } |
| 2003 |
| 2004 |
| 1930 void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) { | 2005 void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) { |
| 2006 String::Encoding encoding = instr->hydrogen()->encoding(); |
| 1931 Register string = ToRegister(instr->string()); | 2007 Register string = ToRegister(instr->string()); |
| 1932 LOperand* index_op = instr->index(); | |
| 1933 Register value = ToRegister(instr->value()); | 2008 Register value = ToRegister(instr->value()); |
| 1934 Register scratch = scratch0(); | |
| 1935 String::Encoding encoding = instr->encoding(); | |
| 1936 | 2009 |
| 1937 if (FLAG_debug_code) { | 2010 if (FLAG_debug_code) { |
| 2011 Register scratch = scratch0(); |
| 1938 __ ldr(scratch, FieldMemOperand(string, HeapObject::kMapOffset)); | 2012 __ ldr(scratch, FieldMemOperand(string, HeapObject::kMapOffset)); |
| 1939 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); | 2013 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); |
| 1940 | 2014 |
| 1941 __ and_(scratch, scratch, | 2015 __ and_(scratch, scratch, |
| 1942 Operand(kStringRepresentationMask | kStringEncodingMask)); | 2016 Operand(kStringRepresentationMask | kStringEncodingMask)); |
| 1943 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; | 2017 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; |
| 1944 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; | 2018 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; |
| 1945 __ cmp(scratch, Operand(encoding == String::ONE_BYTE_ENCODING | 2019 __ cmp(scratch, Operand(encoding == String::ONE_BYTE_ENCODING |
| 1946 ? one_byte_seq_type : two_byte_seq_type)); | 2020 ? one_byte_seq_type : two_byte_seq_type)); |
| 1947 __ Check(eq, kUnexpectedStringType); | 2021 __ Check(eq, kUnexpectedStringType); |
| 1948 } | 2022 } |
| 1949 | 2023 |
| 1950 if (index_op->IsConstantOperand()) { | 2024 MemOperand operand = BuildSeqStringOperand(string, instr->index(), encoding); |
| 1951 int constant_index = ToInteger32(LConstantOperand::cast(index_op)); | 2025 if (encoding == String::ONE_BYTE_ENCODING) { |
| 1952 if (encoding == String::ONE_BYTE_ENCODING) { | 2026 __ strb(value, operand); |
| 1953 __ strb(value, | |
| 1954 FieldMemOperand(string, SeqString::kHeaderSize + constant_index)); | |
| 1955 } else { | |
| 1956 __ strh(value, | |
| 1957 FieldMemOperand(string, SeqString::kHeaderSize + constant_index * 2)); | |
| 1958 } | |
| 1959 } else { | 2027 } else { |
| 1960 Register index = ToRegister(index_op); | 2028 __ strh(value, operand); |
| 1961 if (encoding == String::ONE_BYTE_ENCODING) { | |
| 1962 __ add(scratch, string, Operand(index)); | |
| 1963 __ strb(value, FieldMemOperand(scratch, SeqString::kHeaderSize)); | |
| 1964 } else { | |
| 1965 __ add(scratch, string, Operand(index, LSL, 1)); | |
| 1966 __ strh(value, FieldMemOperand(scratch, SeqString::kHeaderSize)); | |
| 1967 } | |
| 1968 } | 2029 } |
| 1969 } | 2030 } |
| 1970 | 2031 |
| 1971 | 2032 |
| 1972 void LCodeGen::DoThrow(LThrow* instr) { | 2033 void LCodeGen::DoThrow(LThrow* instr) { |
| 1973 Register input_reg = EmitLoadRegister(instr->value(), ip); | 2034 Register input_reg = EmitLoadRegister(instr->value(), ip); |
| 1974 __ push(input_reg); | 2035 __ push(input_reg); |
| 1975 ASSERT(ToRegister(instr->context()).is(cp)); | 2036 ASSERT(ToRegister(instr->context()).is(cp)); |
| 1976 CallRuntime(Runtime::kThrow, 1, instr); | 2037 CallRuntime(Runtime::kThrow, 1, instr); |
| 1977 | 2038 |
| (...skipping 426 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2404 __ VFPCompareAndSetFlags(input_reg, input_reg); | 2465 __ VFPCompareAndSetFlags(input_reg, input_reg); |
| 2405 EmitFalseBranch(instr, vc); | 2466 EmitFalseBranch(instr, vc); |
| 2406 | 2467 |
| 2407 Register scratch = scratch0(); | 2468 Register scratch = scratch0(); |
| 2408 __ VmovHigh(scratch, input_reg); | 2469 __ VmovHigh(scratch, input_reg); |
| 2409 __ cmp(scratch, Operand(kHoleNanUpper32)); | 2470 __ cmp(scratch, Operand(kHoleNanUpper32)); |
| 2410 EmitBranch(instr, eq); | 2471 EmitBranch(instr, eq); |
| 2411 } | 2472 } |
| 2412 | 2473 |
| 2413 | 2474 |
| 2475 void LCodeGen::DoCompareMinusZeroAndBranch(LCompareMinusZeroAndBranch* instr) { |
| 2476 Representation rep = instr->hydrogen()->value()->representation(); |
| 2477 ASSERT(!rep.IsInteger32()); |
| 2478 Register scratch = ToRegister(instr->temp()); |
| 2479 |
| 2480 if (rep.IsDouble()) { |
| 2481 DwVfpRegister value = ToDoubleRegister(instr->value()); |
| 2482 __ VFPCompareAndSetFlags(value, 0.0); |
| 2483 EmitFalseBranch(instr, ne); |
| 2484 __ VmovHigh(scratch, value); |
| 2485 __ cmp(scratch, Operand(0x80000000)); |
| 2486 } else { |
| 2487 Register value = ToRegister(instr->value()); |
| 2488 __ CheckMap(value, |
| 2489 scratch, |
| 2490 Heap::kHeapNumberMapRootIndex, |
| 2491 instr->FalseLabel(chunk()), |
| 2492 DO_SMI_CHECK); |
| 2493 __ ldr(scratch, FieldMemOperand(value, HeapNumber::kExponentOffset)); |
| 2494 __ ldr(ip, FieldMemOperand(value, HeapNumber::kMantissaOffset)); |
| 2495 __ cmp(scratch, Operand(0x80000000)); |
| 2496 __ cmp(ip, Operand(0x00000000), eq); |
| 2497 } |
| 2498 EmitBranch(instr, eq); |
| 2499 } |
| 2500 |
| 2501 |
| 2414 Condition LCodeGen::EmitIsObject(Register input, | 2502 Condition LCodeGen::EmitIsObject(Register input, |
| 2415 Register temp1, | 2503 Register temp1, |
| 2416 Label* is_not_object, | 2504 Label* is_not_object, |
| 2417 Label* is_object) { | 2505 Label* is_object) { |
| 2418 Register temp2 = scratch0(); | 2506 Register temp2 = scratch0(); |
| 2419 __ JumpIfSmi(input, is_not_object); | 2507 __ JumpIfSmi(input, is_not_object); |
| 2420 | 2508 |
| 2421 __ LoadRoot(temp2, Heap::kNullValueRootIndex); | 2509 __ LoadRoot(temp2, Heap::kNullValueRootIndex); |
| 2422 __ cmp(input, temp2); | 2510 __ cmp(input, temp2); |
| 2423 __ b(eq, is_object); | 2511 __ b(eq, is_object); |
| (...skipping 366 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2790 InstanceofStub stub(flags); | 2878 InstanceofStub stub(flags); |
| 2791 | 2879 |
| 2792 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); | 2880 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); |
| 2793 LoadContextFromDeferred(instr->context()); | 2881 LoadContextFromDeferred(instr->context()); |
| 2794 | 2882 |
| 2795 // Get the temp register reserved by the instruction. This needs to be r4 as | 2883 // Get the temp register reserved by the instruction. This needs to be r4 as |
| 2796 // its slot of the pushing of safepoint registers is used to communicate the | 2884 // its slot of the pushing of safepoint registers is used to communicate the |
| 2797 // offset to the location of the map check. | 2885 // offset to the location of the map check. |
| 2798 Register temp = ToRegister(instr->temp()); | 2886 Register temp = ToRegister(instr->temp()); |
| 2799 ASSERT(temp.is(r4)); | 2887 ASSERT(temp.is(r4)); |
| 2800 __ LoadHeapObject(InstanceofStub::right(), instr->function()); | 2888 __ Move(InstanceofStub::right(), instr->function()); |
| 2801 static const int kAdditionalDelta = 5; | 2889 static const int kAdditionalDelta = 5; |
| 2802 // Make sure that code size is predicable, since we use specific constants | 2890 // Make sure that code size is predicable, since we use specific constants |
| 2803 // offsets in the code to find embedded values.. | 2891 // offsets in the code to find embedded values.. |
| 2804 PredictableCodeSizeScope predictable(masm_, 6 * Assembler::kInstrSize); | 2892 PredictableCodeSizeScope predictable(masm_, 6 * Assembler::kInstrSize); |
| 2805 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta; | 2893 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta; |
| 2806 Label before_push_delta; | 2894 Label before_push_delta; |
| 2807 __ bind(&before_push_delta); | 2895 __ bind(&before_push_delta); |
| 2808 __ BlockConstPoolFor(kAdditionalDelta); | 2896 __ BlockConstPoolFor(kAdditionalDelta); |
| 2809 __ mov(temp, Operand(delta * kPointerSize)); | 2897 __ mov(temp, Operand(delta * kPointerSize)); |
| 2810 // The mov above can generate one or two instructions. The delta was computed | 2898 // The mov above can generate one or two instructions. The delta was computed |
| (...skipping 201 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3012 | 3100 |
| 3013 | 3101 |
| 3014 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) { | 3102 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) { |
| 3015 HObjectAccess access = instr->hydrogen()->access(); | 3103 HObjectAccess access = instr->hydrogen()->access(); |
| 3016 int offset = access.offset(); | 3104 int offset = access.offset(); |
| 3017 Register object = ToRegister(instr->object()); | 3105 Register object = ToRegister(instr->object()); |
| 3018 | 3106 |
| 3019 if (access.IsExternalMemory()) { | 3107 if (access.IsExternalMemory()) { |
| 3020 Register result = ToRegister(instr->result()); | 3108 Register result = ToRegister(instr->result()); |
| 3021 MemOperand operand = MemOperand(object, offset); | 3109 MemOperand operand = MemOperand(object, offset); |
| 3022 if (access.representation().IsByte()) { | 3110 __ Load(result, operand, access.representation()); |
| 3023 __ ldrb(result, operand); | |
| 3024 } else { | |
| 3025 __ ldr(result, operand); | |
| 3026 } | |
| 3027 return; | 3111 return; |
| 3028 } | 3112 } |
| 3029 | 3113 |
| 3030 if (instr->hydrogen()->representation().IsDouble()) { | 3114 if (instr->hydrogen()->representation().IsDouble()) { |
| 3031 DwVfpRegister result = ToDoubleRegister(instr->result()); | 3115 DwVfpRegister result = ToDoubleRegister(instr->result()); |
| 3032 __ vldr(result, FieldMemOperand(object, offset)); | 3116 __ vldr(result, FieldMemOperand(object, offset)); |
| 3033 return; | 3117 return; |
| 3034 } | 3118 } |
| 3035 | 3119 |
| 3036 Register result = ToRegister(instr->result()); | 3120 Register result = ToRegister(instr->result()); |
| 3037 if (!access.IsInobject()) { | 3121 if (!access.IsInobject()) { |
| 3038 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); | 3122 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); |
| 3039 object = result; | 3123 object = result; |
| 3040 } | 3124 } |
| 3041 MemOperand operand = FieldMemOperand(object, offset); | 3125 MemOperand operand = FieldMemOperand(object, offset); |
| 3042 if (access.representation().IsByte()) { | 3126 __ Load(result, operand, access.representation()); |
| 3043 __ ldrb(result, operand); | |
| 3044 } else { | |
| 3045 __ ldr(result, operand); | |
| 3046 } | |
| 3047 } | 3127 } |
| 3048 | 3128 |
| 3049 | 3129 |
| 3050 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { | 3130 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { |
| 3051 ASSERT(ToRegister(instr->context()).is(cp)); | 3131 ASSERT(ToRegister(instr->context()).is(cp)); |
| 3052 ASSERT(ToRegister(instr->object()).is(r0)); | 3132 ASSERT(ToRegister(instr->object()).is(r0)); |
| 3053 ASSERT(ToRegister(instr->result()).is(r0)); | 3133 ASSERT(ToRegister(instr->result()).is(r0)); |
| 3054 | 3134 |
| 3055 // Name is always in r2. | 3135 // Name is always in r2. |
| 3056 __ mov(r2, Operand(instr->name())); | 3136 __ mov(r2, Operand(instr->name())); |
| (...skipping 473 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3530 Register context = ToRegister(instr->context()); | 3610 Register context = ToRegister(instr->context()); |
| 3531 Register result = ToRegister(instr->result()); | 3611 Register result = ToRegister(instr->result()); |
| 3532 __ ldr(result, | 3612 __ ldr(result, |
| 3533 MemOperand(context, Context::SlotOffset(Context::PREVIOUS_INDEX))); | 3613 MemOperand(context, Context::SlotOffset(Context::PREVIOUS_INDEX))); |
| 3534 } | 3614 } |
| 3535 | 3615 |
| 3536 | 3616 |
| 3537 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) { | 3617 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) { |
| 3538 ASSERT(ToRegister(instr->context()).is(cp)); | 3618 ASSERT(ToRegister(instr->context()).is(cp)); |
| 3539 __ push(cp); // The context is the first argument. | 3619 __ push(cp); // The context is the first argument. |
| 3540 __ LoadHeapObject(scratch0(), instr->hydrogen()->pairs()); | 3620 __ Move(scratch0(), instr->hydrogen()->pairs()); |
| 3541 __ push(scratch0()); | 3621 __ push(scratch0()); |
| 3542 __ mov(scratch0(), Operand(Smi::FromInt(instr->hydrogen()->flags()))); | 3622 __ mov(scratch0(), Operand(Smi::FromInt(instr->hydrogen()->flags()))); |
| 3543 __ push(scratch0()); | 3623 __ push(scratch0()); |
| 3544 CallRuntime(Runtime::kDeclareGlobals, 3, instr); | 3624 CallRuntime(Runtime::kDeclareGlobals, 3, instr); |
| 3545 } | 3625 } |
| 3546 | 3626 |
| 3547 | 3627 |
| 3548 void LCodeGen::DoGlobalObject(LGlobalObject* instr) { | 3628 void LCodeGen::DoGlobalObject(LGlobalObject* instr) { |
| 3549 Register context = ToRegister(instr->context()); | 3629 Register context = ToRegister(instr->context()); |
| 3550 Register result = ToRegister(instr->result()); | 3630 Register result = ToRegister(instr->result()); |
| (...skipping 16 matching lines...) Expand all Loading... |
| 3567 R1State r1_state) { | 3647 R1State r1_state) { |
| 3568 bool dont_adapt_arguments = | 3648 bool dont_adapt_arguments = |
| 3569 formal_parameter_count == SharedFunctionInfo::kDontAdaptArgumentsSentinel; | 3649 formal_parameter_count == SharedFunctionInfo::kDontAdaptArgumentsSentinel; |
| 3570 bool can_invoke_directly = | 3650 bool can_invoke_directly = |
| 3571 dont_adapt_arguments || formal_parameter_count == arity; | 3651 dont_adapt_arguments || formal_parameter_count == arity; |
| 3572 | 3652 |
| 3573 LPointerMap* pointers = instr->pointer_map(); | 3653 LPointerMap* pointers = instr->pointer_map(); |
| 3574 | 3654 |
| 3575 if (can_invoke_directly) { | 3655 if (can_invoke_directly) { |
| 3576 if (r1_state == R1_UNINITIALIZED) { | 3656 if (r1_state == R1_UNINITIALIZED) { |
| 3577 __ LoadHeapObject(r1, function); | 3657 __ Move(r1, function); |
| 3578 } | 3658 } |
| 3579 | 3659 |
| 3580 // Change context. | 3660 // Change context. |
| 3581 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); | 3661 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); |
| 3582 | 3662 |
| 3583 // Set r0 to arguments count if adaption is not needed. Assumes that r0 | 3663 // Set r0 to arguments count if adaption is not needed. Assumes that r0 |
| 3584 // is available to write to at this point. | 3664 // is available to write to at this point. |
| 3585 if (dont_adapt_arguments) { | 3665 if (dont_adapt_arguments) { |
| 3586 __ mov(r0, Operand(arity)); | 3666 __ mov(r0, Operand(arity)); |
| 3587 } | 3667 } |
| (...skipping 551 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4139 Representation representation = instr->representation(); | 4219 Representation representation = instr->representation(); |
| 4140 | 4220 |
| 4141 Register object = ToRegister(instr->object()); | 4221 Register object = ToRegister(instr->object()); |
| 4142 Register scratch = scratch0(); | 4222 Register scratch = scratch0(); |
| 4143 HObjectAccess access = instr->hydrogen()->access(); | 4223 HObjectAccess access = instr->hydrogen()->access(); |
| 4144 int offset = access.offset(); | 4224 int offset = access.offset(); |
| 4145 | 4225 |
| 4146 if (access.IsExternalMemory()) { | 4226 if (access.IsExternalMemory()) { |
| 4147 Register value = ToRegister(instr->value()); | 4227 Register value = ToRegister(instr->value()); |
| 4148 MemOperand operand = MemOperand(object, offset); | 4228 MemOperand operand = MemOperand(object, offset); |
| 4149 if (representation.IsByte()) { | 4229 __ Store(value, operand, representation); |
| 4150 __ strb(value, operand); | |
| 4151 } else { | |
| 4152 __ str(value, operand); | |
| 4153 } | |
| 4154 return; | 4230 return; |
| 4155 } | 4231 } |
| 4156 | 4232 |
| 4157 Handle<Map> transition = instr->transition(); | 4233 Handle<Map> transition = instr->transition(); |
| 4158 | 4234 |
| 4159 if (FLAG_track_heap_object_fields && representation.IsHeapObject()) { | 4235 if (FLAG_track_heap_object_fields && representation.IsHeapObject()) { |
| 4160 Register value = ToRegister(instr->value()); | 4236 Register value = ToRegister(instr->value()); |
| 4161 if (!instr->hydrogen()->value()->type().IsHeapObject()) { | 4237 if (!instr->hydrogen()->value()->type().IsHeapObject()) { |
| 4162 __ SmiTst(value); | 4238 __ SmiTst(value); |
| 4163 DeoptimizeIf(eq, instr->environment()); | 4239 DeoptimizeIf(eq, instr->environment()); |
| (...skipping 25 matching lines...) Expand all Loading... |
| 4189 } | 4265 } |
| 4190 | 4266 |
| 4191 // Do the store. | 4267 // Do the store. |
| 4192 Register value = ToRegister(instr->value()); | 4268 Register value = ToRegister(instr->value()); |
| 4193 ASSERT(!object.is(value)); | 4269 ASSERT(!object.is(value)); |
| 4194 SmiCheck check_needed = | 4270 SmiCheck check_needed = |
| 4195 instr->hydrogen()->value()->IsHeapObject() | 4271 instr->hydrogen()->value()->IsHeapObject() |
| 4196 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; | 4272 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; |
| 4197 if (access.IsInobject()) { | 4273 if (access.IsInobject()) { |
| 4198 MemOperand operand = FieldMemOperand(object, offset); | 4274 MemOperand operand = FieldMemOperand(object, offset); |
| 4199 if (representation.IsByte()) { | 4275 __ Store(value, operand, representation); |
| 4200 __ strb(value, operand); | |
| 4201 } else { | |
| 4202 __ str(value, operand); | |
| 4203 } | |
| 4204 if (instr->hydrogen()->NeedsWriteBarrier()) { | 4276 if (instr->hydrogen()->NeedsWriteBarrier()) { |
| 4205 // Update the write barrier for the object for in-object properties. | 4277 // Update the write barrier for the object for in-object properties. |
| 4206 __ RecordWriteField(object, | 4278 __ RecordWriteField(object, |
| 4207 offset, | 4279 offset, |
| 4208 value, | 4280 value, |
| 4209 scratch, | 4281 scratch, |
| 4210 GetLinkRegisterState(), | 4282 GetLinkRegisterState(), |
| 4211 kSaveFPRegs, | 4283 kSaveFPRegs, |
| 4212 EMIT_REMEMBERED_SET, | 4284 EMIT_REMEMBERED_SET, |
| 4213 check_needed); | 4285 check_needed); |
| 4214 } | 4286 } |
| 4215 } else { | 4287 } else { |
| 4216 __ ldr(scratch, FieldMemOperand(object, JSObject::kPropertiesOffset)); | 4288 __ ldr(scratch, FieldMemOperand(object, JSObject::kPropertiesOffset)); |
| 4217 MemOperand operand = FieldMemOperand(scratch, offset); | 4289 MemOperand operand = FieldMemOperand(scratch, offset); |
| 4218 if (representation.IsByte()) { | 4290 __ Store(value, operand, representation); |
| 4219 __ strb(value, operand); | |
| 4220 } else { | |
| 4221 __ str(value, operand); | |
| 4222 } | |
| 4223 if (instr->hydrogen()->NeedsWriteBarrier()) { | 4291 if (instr->hydrogen()->NeedsWriteBarrier()) { |
| 4224 // Update the write barrier for the properties array. | 4292 // Update the write barrier for the properties array. |
| 4225 // object is used as a scratch register. | 4293 // object is used as a scratch register. |
| 4226 __ RecordWriteField(scratch, | 4294 __ RecordWriteField(scratch, |
| 4227 offset, | 4295 offset, |
| 4228 value, | 4296 value, |
| 4229 object, | 4297 object, |
| 4230 GetLinkRegisterState(), | 4298 GetLinkRegisterState(), |
| 4231 kSaveFPRegs, | 4299 kSaveFPRegs, |
| 4232 EMIT_REMEMBERED_SET, | 4300 EMIT_REMEMBERED_SET, |
| (...skipping 281 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4514 Register temp = ToRegister(instr->temp()); | 4582 Register temp = ToRegister(instr->temp()); |
| 4515 Label no_memento_found; | 4583 Label no_memento_found; |
| 4516 __ TestJSArrayForAllocationMemento(object, temp, &no_memento_found); | 4584 __ TestJSArrayForAllocationMemento(object, temp, &no_memento_found); |
| 4517 DeoptimizeIf(eq, instr->environment()); | 4585 DeoptimizeIf(eq, instr->environment()); |
| 4518 __ bind(&no_memento_found); | 4586 __ bind(&no_memento_found); |
| 4519 } | 4587 } |
| 4520 | 4588 |
| 4521 | 4589 |
| 4522 void LCodeGen::DoStringAdd(LStringAdd* instr) { | 4590 void LCodeGen::DoStringAdd(LStringAdd* instr) { |
| 4523 ASSERT(ToRegister(instr->context()).is(cp)); | 4591 ASSERT(ToRegister(instr->context()).is(cp)); |
| 4524 __ push(ToRegister(instr->left())); | 4592 if (FLAG_new_string_add) { |
| 4525 __ push(ToRegister(instr->right())); | 4593 ASSERT(ToRegister(instr->left()).is(r1)); |
| 4526 StringAddStub stub(instr->hydrogen()->flags()); | 4594 ASSERT(ToRegister(instr->right()).is(r0)); |
| 4527 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 4595 NewStringAddStub stub(instr->hydrogen()->flags(), |
| 4596 isolate()->heap()->GetPretenureMode()); |
| 4597 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 4598 } else { |
| 4599 __ push(ToRegister(instr->left())); |
| 4600 __ push(ToRegister(instr->right())); |
| 4601 StringAddStub stub(instr->hydrogen()->flags()); |
| 4602 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 4603 } |
| 4528 } | 4604 } |
| 4529 | 4605 |
| 4530 | 4606 |
| 4531 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { | 4607 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { |
| 4532 class DeferredStringCharCodeAt V8_FINAL : public LDeferredCode { | 4608 class DeferredStringCharCodeAt V8_FINAL : public LDeferredCode { |
| 4533 public: | 4609 public: |
| 4534 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr) | 4610 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr) |
| 4535 : LDeferredCode(codegen), instr_(instr) { } | 4611 : LDeferredCode(codegen), instr_(instr) { } |
| 4536 virtual void Generate() V8_OVERRIDE { | 4612 virtual void Generate() V8_OVERRIDE { |
| 4537 codegen()->DoDeferredStringCharCodeAt(instr_); | 4613 codegen()->DoDeferredStringCharCodeAt(instr_); |
| (...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4646 __ vmov(single_scratch, scratch); | 4722 __ vmov(single_scratch, scratch); |
| 4647 } else { | 4723 } else { |
| 4648 __ vmov(single_scratch, ToRegister(input)); | 4724 __ vmov(single_scratch, ToRegister(input)); |
| 4649 } | 4725 } |
| 4650 __ vcvt_f64_s32(ToDoubleRegister(output), single_scratch); | 4726 __ vcvt_f64_s32(ToDoubleRegister(output), single_scratch); |
| 4651 } | 4727 } |
| 4652 | 4728 |
| 4653 | 4729 |
| 4654 void LCodeGen::DoInteger32ToSmi(LInteger32ToSmi* instr) { | 4730 void LCodeGen::DoInteger32ToSmi(LInteger32ToSmi* instr) { |
| 4655 LOperand* input = instr->value(); | 4731 LOperand* input = instr->value(); |
| 4656 ASSERT(input->IsRegister()); | |
| 4657 LOperand* output = instr->result(); | 4732 LOperand* output = instr->result(); |
| 4658 ASSERT(output->IsRegister()); | |
| 4659 __ SmiTag(ToRegister(output), ToRegister(input), SetCC); | 4733 __ SmiTag(ToRegister(output), ToRegister(input), SetCC); |
| 4660 if (!instr->hydrogen()->value()->HasRange() || | 4734 if (!instr->hydrogen()->value()->HasRange() || |
| 4661 !instr->hydrogen()->value()->range()->IsInSmiRange()) { | 4735 !instr->hydrogen()->value()->range()->IsInSmiRange()) { |
| 4662 DeoptimizeIf(vs, instr->environment()); | 4736 DeoptimizeIf(vs, instr->environment()); |
| 4663 } | 4737 } |
| 4664 } | 4738 } |
| 4665 | 4739 |
| 4666 | 4740 |
| 4667 void LCodeGen::DoUint32ToDouble(LUint32ToDouble* instr) { | 4741 void LCodeGen::DoUint32ToDouble(LUint32ToDouble* instr) { |
| 4668 LOperand* input = instr->value(); | 4742 LOperand* input = instr->value(); |
| 4669 LOperand* output = instr->result(); | 4743 LOperand* output = instr->result(); |
| 4670 | 4744 |
| 4671 SwVfpRegister flt_scratch = double_scratch0().low(); | 4745 SwVfpRegister flt_scratch = double_scratch0().low(); |
| 4672 __ vmov(flt_scratch, ToRegister(input)); | 4746 __ vmov(flt_scratch, ToRegister(input)); |
| 4673 __ vcvt_f64_u32(ToDoubleRegister(output), flt_scratch); | 4747 __ vcvt_f64_u32(ToDoubleRegister(output), flt_scratch); |
| 4674 } | 4748 } |
| 4675 | 4749 |
| 4676 | 4750 |
| 4751 void LCodeGen::DoUint32ToSmi(LUint32ToSmi* instr) { |
| 4752 LOperand* input = instr->value(); |
| 4753 LOperand* output = instr->result(); |
| 4754 if (!instr->hydrogen()->value()->HasRange() || |
| 4755 !instr->hydrogen()->value()->range()->IsInSmiRange()) { |
| 4756 __ tst(ToRegister(input), Operand(0xc0000000)); |
| 4757 DeoptimizeIf(ne, instr->environment()); |
| 4758 } |
| 4759 __ SmiTag(ToRegister(output), ToRegister(input)); |
| 4760 } |
| 4761 |
| 4762 |
| 4677 void LCodeGen::DoNumberTagI(LNumberTagI* instr) { | 4763 void LCodeGen::DoNumberTagI(LNumberTagI* instr) { |
| 4678 class DeferredNumberTagI V8_FINAL : public LDeferredCode { | 4764 class DeferredNumberTagI V8_FINAL : public LDeferredCode { |
| 4679 public: | 4765 public: |
| 4680 DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr) | 4766 DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr) |
| 4681 : LDeferredCode(codegen), instr_(instr) { } | 4767 : LDeferredCode(codegen), instr_(instr) { } |
| 4682 virtual void Generate() V8_OVERRIDE { | 4768 virtual void Generate() V8_OVERRIDE { |
| 4683 codegen()->DoDeferredNumberTagI(instr_, | 4769 codegen()->DoDeferredNumberTagI(instr_, |
| 4684 instr_->value(), | 4770 instr_->value(), |
| 4685 SIGNED_INT32); | 4771 SIGNED_INT32); |
| 4686 } | 4772 } |
| (...skipping 727 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5414 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { | 5500 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { |
| 5415 ASSERT(ToRegister(instr->context()).is(cp)); | 5501 ASSERT(ToRegister(instr->context()).is(cp)); |
| 5416 Label materialized; | 5502 Label materialized; |
| 5417 // Registers will be used as follows: | 5503 // Registers will be used as follows: |
| 5418 // r6 = literals array. | 5504 // r6 = literals array. |
| 5419 // r1 = regexp literal. | 5505 // r1 = regexp literal. |
| 5420 // r0 = regexp literal clone. | 5506 // r0 = regexp literal clone. |
| 5421 // r2-5 are used as temporaries. | 5507 // r2-5 are used as temporaries. |
| 5422 int literal_offset = | 5508 int literal_offset = |
| 5423 FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index()); | 5509 FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index()); |
| 5424 __ LoadHeapObject(r6, instr->hydrogen()->literals()); | 5510 __ Move(r6, instr->hydrogen()->literals()); |
| 5425 __ ldr(r1, FieldMemOperand(r6, literal_offset)); | 5511 __ ldr(r1, FieldMemOperand(r6, literal_offset)); |
| 5426 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); | 5512 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); |
| 5427 __ cmp(r1, ip); | 5513 __ cmp(r1, ip); |
| 5428 __ b(ne, &materialized); | 5514 __ b(ne, &materialized); |
| 5429 | 5515 |
| 5430 // Create regexp literal using runtime function | 5516 // Create regexp literal using runtime function |
| 5431 // Result will be in r0. | 5517 // Result will be in r0. |
| 5432 __ mov(r5, Operand(Smi::FromInt(instr->hydrogen()->literal_index()))); | 5518 __ mov(r5, Operand(Smi::FromInt(instr->hydrogen()->literal_index()))); |
| 5433 __ mov(r4, Operand(instr->hydrogen()->pattern())); | 5519 __ mov(r4, Operand(instr->hydrogen()->pattern())); |
| 5434 __ mov(r3, Operand(instr->hydrogen()->flags())); | 5520 __ mov(r3, Operand(instr->hydrogen()->flags())); |
| (...skipping 201 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5636 // the special case below. | 5722 // the special case below. |
| 5637 if (info()->IsStub() && type == Deoptimizer::EAGER) { | 5723 if (info()->IsStub() && type == Deoptimizer::EAGER) { |
| 5638 type = Deoptimizer::LAZY; | 5724 type = Deoptimizer::LAZY; |
| 5639 } | 5725 } |
| 5640 | 5726 |
| 5641 Comment(";;; deoptimize: %s", instr->hydrogen()->reason()); | 5727 Comment(";;; deoptimize: %s", instr->hydrogen()->reason()); |
| 5642 DeoptimizeIf(al, instr->environment(), type); | 5728 DeoptimizeIf(al, instr->environment(), type); |
| 5643 } | 5729 } |
| 5644 | 5730 |
| 5645 | 5731 |
| 5732 void LCodeGen::DoDummy(LDummy* instr) { |
| 5733 // Nothing to see here, move on! |
| 5734 } |
| 5735 |
| 5736 |
| 5646 void LCodeGen::DoDummyUse(LDummyUse* instr) { | 5737 void LCodeGen::DoDummyUse(LDummyUse* instr) { |
| 5647 // Nothing to see here, move on! | 5738 // Nothing to see here, move on! |
| 5648 } | 5739 } |
| 5649 | 5740 |
| 5650 | 5741 |
| 5651 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { | 5742 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { |
| 5652 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); | 5743 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); |
| 5653 LoadContextFromDeferred(instr->context()); | 5744 LoadContextFromDeferred(instr->context()); |
| 5654 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); | 5745 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); |
| 5655 RecordSafepointWithLazyDeopt( | 5746 RecordSafepointWithLazyDeopt( |
| (...skipping 163 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5819 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index)); | 5910 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index)); |
| 5820 __ ldr(result, FieldMemOperand(scratch, | 5911 __ ldr(result, FieldMemOperand(scratch, |
| 5821 FixedArray::kHeaderSize - kPointerSize)); | 5912 FixedArray::kHeaderSize - kPointerSize)); |
| 5822 __ bind(&done); | 5913 __ bind(&done); |
| 5823 } | 5914 } |
| 5824 | 5915 |
| 5825 | 5916 |
| 5826 #undef __ | 5917 #undef __ |
| 5827 | 5918 |
| 5828 } } // namespace v8::internal | 5919 } } // namespace v8::internal |
| OLD | NEW |