| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 17 matching lines...) Expand all Loading... |
| 28 #include <limits.h> // For LONG_MIN, LONG_MAX. | 28 #include <limits.h> // For LONG_MIN, LONG_MAX. |
| 29 | 29 |
| 30 #include "v8.h" | 30 #include "v8.h" |
| 31 | 31 |
| 32 #if V8_TARGET_ARCH_MIPS | 32 #if V8_TARGET_ARCH_MIPS |
| 33 | 33 |
| 34 #include "bootstrapper.h" | 34 #include "bootstrapper.h" |
| 35 #include "codegen.h" | 35 #include "codegen.h" |
| 36 #include "cpu-profiler.h" | 36 #include "cpu-profiler.h" |
| 37 #include "debug.h" | 37 #include "debug.h" |
| 38 #include "isolate-inl.h" |
| 38 #include "runtime.h" | 39 #include "runtime.h" |
| 39 | 40 |
| 40 namespace v8 { | 41 namespace v8 { |
| 41 namespace internal { | 42 namespace internal { |
| 42 | 43 |
| 43 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size) | 44 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size) |
| 44 : Assembler(arg_isolate, buffer, size), | 45 : Assembler(arg_isolate, buffer, size), |
| 45 generating_stub_(false), | 46 generating_stub_(false), |
| 46 allow_stub_calls_(true), | 47 allow_stub_calls_(true), |
| 47 has_frame_(false) { | 48 has_frame_(false) { |
| 48 if (isolate() != NULL) { | 49 if (isolate() != NULL) { |
| 49 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(), | 50 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(), |
| 50 isolate()); | 51 isolate()); |
| 51 } | 52 } |
| 52 } | 53 } |
| 53 | 54 |
| 54 | 55 |
| 56 void MacroAssembler::Load(Register dst, |
| 57 const MemOperand& src, |
| 58 Representation r) { |
| 59 ASSERT(!r.IsDouble()); |
| 60 if (r.IsInteger8()) { |
| 61 lb(dst, src); |
| 62 } else if (r.IsUInteger8()) { |
| 63 lbu(dst, src); |
| 64 } else if (r.IsInteger16()) { |
| 65 lh(dst, src); |
| 66 } else if (r.IsUInteger16()) { |
| 67 lhu(dst, src); |
| 68 } else { |
| 69 lw(dst, src); |
| 70 } |
| 71 } |
| 72 |
| 73 |
| 74 void MacroAssembler::Store(Register src, |
| 75 const MemOperand& dst, |
| 76 Representation r) { |
| 77 ASSERT(!r.IsDouble()); |
| 78 if (r.IsInteger8() || r.IsUInteger8()) { |
| 79 sb(src, dst); |
| 80 } else if (r.IsInteger16() || r.IsUInteger16()) { |
| 81 sh(src, dst); |
| 82 } else { |
| 83 sw(src, dst); |
| 84 } |
| 85 } |
| 86 |
| 87 |
| 55 void MacroAssembler::LoadRoot(Register destination, | 88 void MacroAssembler::LoadRoot(Register destination, |
| 56 Heap::RootListIndex index) { | 89 Heap::RootListIndex index) { |
| 57 lw(destination, MemOperand(s6, index << kPointerSizeLog2)); | 90 lw(destination, MemOperand(s6, index << kPointerSizeLog2)); |
| 58 } | 91 } |
| 59 | 92 |
| 60 | 93 |
| 61 void MacroAssembler::LoadRoot(Register destination, | 94 void MacroAssembler::LoadRoot(Register destination, |
| 62 Heap::RootListIndex index, | 95 Heap::RootListIndex index, |
| 63 Condition cond, | 96 Condition cond, |
| 64 Register src1, const Operand& src2) { | 97 Register src1, const Operand& src2) { |
| (...skipping 10 matching lines...) Expand all Loading... |
| 75 | 108 |
| 76 void MacroAssembler::StoreRoot(Register source, | 109 void MacroAssembler::StoreRoot(Register source, |
| 77 Heap::RootListIndex index, | 110 Heap::RootListIndex index, |
| 78 Condition cond, | 111 Condition cond, |
| 79 Register src1, const Operand& src2) { | 112 Register src1, const Operand& src2) { |
| 80 Branch(2, NegateCondition(cond), src1, src2); | 113 Branch(2, NegateCondition(cond), src1, src2); |
| 81 sw(source, MemOperand(s6, index << kPointerSizeLog2)); | 114 sw(source, MemOperand(s6, index << kPointerSizeLog2)); |
| 82 } | 115 } |
| 83 | 116 |
| 84 | 117 |
| 85 void MacroAssembler::LoadHeapObject(Register result, | |
| 86 Handle<HeapObject> object) { | |
| 87 AllowDeferredHandleDereference using_raw_address; | |
| 88 if (isolate()->heap()->InNewSpace(*object)) { | |
| 89 Handle<Cell> cell = isolate()->factory()->NewCell(object); | |
| 90 li(result, Operand(cell)); | |
| 91 lw(result, FieldMemOperand(result, Cell::kValueOffset)); | |
| 92 } else { | |
| 93 li(result, Operand(object)); | |
| 94 } | |
| 95 } | |
| 96 | |
| 97 | |
| 98 // Push and pop all registers that can hold pointers. | 118 // Push and pop all registers that can hold pointers. |
| 99 void MacroAssembler::PushSafepointRegisters() { | 119 void MacroAssembler::PushSafepointRegisters() { |
| 100 // Safepoints expect a block of kNumSafepointRegisters values on the | 120 // Safepoints expect a block of kNumSafepointRegisters values on the |
| 101 // stack, so adjust the stack for unsaved registers. | 121 // stack, so adjust the stack for unsaved registers. |
| 102 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters; | 122 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters; |
| 103 ASSERT(num_unsaved >= 0); | 123 ASSERT(num_unsaved >= 0); |
| 104 if (num_unsaved > 0) { | 124 if (num_unsaved > 0) { |
| 105 Subu(sp, sp, Operand(num_unsaved * kPointerSize)); | 125 Subu(sp, sp, Operand(num_unsaved * kPointerSize)); |
| 106 } | 126 } |
| 107 MultiPush(kSafepointSavedRegisters); | 127 MultiPush(kSafepointSavedRegisters); |
| (...skipping 652 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 760 sll(rd, rs, (0x20 - rt.imm32_) & 0x1f); | 780 sll(rd, rs, (0x20 - rt.imm32_) & 0x1f); |
| 761 or_(rd, rd, at); | 781 or_(rd, rd, at); |
| 762 } | 782 } |
| 763 } | 783 } |
| 764 } | 784 } |
| 765 } | 785 } |
| 766 | 786 |
| 767 | 787 |
| 768 //------------Pseudo-instructions------------- | 788 //------------Pseudo-instructions------------- |
| 769 | 789 |
| 790 void MacroAssembler::li(Register dst, Handle<Object> value, LiFlags mode) { |
| 791 AllowDeferredHandleDereference smi_check; |
| 792 if (value->IsSmi()) { |
| 793 li(dst, Operand(value), mode); |
| 794 } else { |
| 795 ASSERT(value->IsHeapObject()); |
| 796 if (isolate()->heap()->InNewSpace(*value)) { |
| 797 Handle<Cell> cell = isolate()->factory()->NewCell(value); |
| 798 li(dst, Operand(cell)); |
| 799 lw(dst, FieldMemOperand(dst, Cell::kValueOffset)); |
| 800 } else { |
| 801 li(dst, Operand(value)); |
| 802 } |
| 803 } |
| 804 } |
| 805 |
| 806 |
| 770 void MacroAssembler::li(Register rd, Operand j, LiFlags mode) { | 807 void MacroAssembler::li(Register rd, Operand j, LiFlags mode) { |
| 771 ASSERT(!j.is_reg()); | 808 ASSERT(!j.is_reg()); |
| 772 BlockTrampolinePoolScope block_trampoline_pool(this); | 809 BlockTrampolinePoolScope block_trampoline_pool(this); |
| 773 if (!MustUseReg(j.rmode_) && mode == OPTIMIZE_SIZE) { | 810 if (!MustUseReg(j.rmode_) && mode == OPTIMIZE_SIZE) { |
| 774 // Normal load of an immediate value which does not need Relocation Info. | 811 // Normal load of an immediate value which does not need Relocation Info. |
| 775 if (is_int16(j.imm32_)) { | 812 if (is_int16(j.imm32_)) { |
| 776 addiu(rd, zero_reg, j.imm32_); | 813 addiu(rd, zero_reg, j.imm32_); |
| 777 } else if (!(j.imm32_ & kHiMask)) { | 814 } else if (!(j.imm32_ & kHiMask)) { |
| 778 ori(rd, zero_reg, j.imm32_); | 815 ori(rd, zero_reg, j.imm32_); |
| 779 } else if (!(j.imm32_ & kImm16Mask)) { | 816 } else if (!(j.imm32_ & kImm16Mask)) { |
| (...skipping 2909 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3689 void MacroAssembler::InvokeFunction(Handle<JSFunction> function, | 3726 void MacroAssembler::InvokeFunction(Handle<JSFunction> function, |
| 3690 const ParameterCount& expected, | 3727 const ParameterCount& expected, |
| 3691 const ParameterCount& actual, | 3728 const ParameterCount& actual, |
| 3692 InvokeFlag flag, | 3729 InvokeFlag flag, |
| 3693 const CallWrapper& call_wrapper, | 3730 const CallWrapper& call_wrapper, |
| 3694 CallKind call_kind) { | 3731 CallKind call_kind) { |
| 3695 // You can't call a function without a valid frame. | 3732 // You can't call a function without a valid frame. |
| 3696 ASSERT(flag == JUMP_FUNCTION || has_frame()); | 3733 ASSERT(flag == JUMP_FUNCTION || has_frame()); |
| 3697 | 3734 |
| 3698 // Get the function and setup the context. | 3735 // Get the function and setup the context. |
| 3699 LoadHeapObject(a1, function); | 3736 li(a1, function); |
| 3700 lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); | 3737 lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); |
| 3701 | 3738 |
| 3702 // We call indirectly through the code field in the function to | 3739 // We call indirectly through the code field in the function to |
| 3703 // allow recompilation to take effect without changing any of the | 3740 // allow recompilation to take effect without changing any of the |
| 3704 // call sites. | 3741 // call sites. |
| 3705 lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); | 3742 lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); |
| 3706 InvokeCode(a3, expected, actual, flag, call_wrapper, call_kind); | 3743 InvokeCode(a3, expected, actual, flag, call_wrapper, call_kind); |
| 3707 } | 3744 } |
| 3708 | 3745 |
| 3709 | 3746 |
| (...skipping 879 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4589 if (frame_mode == BUILD_STUB_FRAME) { | 4626 if (frame_mode == BUILD_STUB_FRAME) { |
| 4590 Push(ra, fp, cp); | 4627 Push(ra, fp, cp); |
| 4591 Push(Smi::FromInt(StackFrame::STUB)); | 4628 Push(Smi::FromInt(StackFrame::STUB)); |
| 4592 // Adjust FP to point to saved FP. | 4629 // Adjust FP to point to saved FP. |
| 4593 Addu(fp, sp, Operand(2 * kPointerSize)); | 4630 Addu(fp, sp, Operand(2 * kPointerSize)); |
| 4594 } else { | 4631 } else { |
| 4595 PredictableCodeSizeScope predictible_code_size_scope( | 4632 PredictableCodeSizeScope predictible_code_size_scope( |
| 4596 this, kNoCodeAgeSequenceLength * Assembler::kInstrSize); | 4633 this, kNoCodeAgeSequenceLength * Assembler::kInstrSize); |
| 4597 // The following three instructions must remain together and unmodified | 4634 // The following three instructions must remain together and unmodified |
| 4598 // for code aging to work properly. | 4635 // for code aging to work properly. |
| 4599 if (FLAG_optimize_for_size && FLAG_age_code) { | 4636 if (isolate()->IsCodePreAgingActive()) { |
| 4600 // Pre-age the code. | 4637 // Pre-age the code. |
| 4601 Code* stub = Code::GetPreAgedCodeAgeStub(isolate()); | 4638 Code* stub = Code::GetPreAgedCodeAgeStub(isolate()); |
| 4602 nop(Assembler::CODE_AGE_MARKER_NOP); | 4639 nop(Assembler::CODE_AGE_MARKER_NOP); |
| 4603 // Save the function's original return address | 4640 // Save the function's original return address |
| 4604 // (it will be clobbered by Call(t9)) | 4641 // (it will be clobbered by Call(t9)). |
| 4605 mov(at, ra); | 4642 mov(at, ra); |
| 4606 // Load the stub address to t9 and call it | 4643 // Load the stub address to t9 and call it. |
| 4607 li(t9, | 4644 li(t9, |
| 4608 Operand(reinterpret_cast<uint32_t>(stub->instruction_start()))); | 4645 Operand(reinterpret_cast<uint32_t>(stub->instruction_start()))); |
| 4609 Call(t9); | 4646 Call(t9); |
| 4610 // Record the stub address in the empty space for GetCodeAgeAndParity() | 4647 // Record the stub address in the empty space for GetCodeAgeAndParity(). |
| 4611 dd(reinterpret_cast<uint32_t>(stub->instruction_start())); | 4648 emit_code_stub_address(stub); |
| 4612 } else { | 4649 } else { |
| 4613 Push(ra, fp, cp, a1); | 4650 Push(ra, fp, cp, a1); |
| 4614 nop(Assembler::CODE_AGE_SEQUENCE_NOP); | 4651 nop(Assembler::CODE_AGE_SEQUENCE_NOP); |
| 4615 // Adjust fp to point to caller's fp. | 4652 // Adjust fp to point to caller's fp. |
| 4616 Addu(fp, sp, Operand(2 * kPointerSize)); | 4653 Addu(fp, sp, Operand(2 * kPointerSize)); |
| 4617 } | 4654 } |
| 4618 } | 4655 } |
| 4619 } | 4656 } |
| 4620 | 4657 |
| 4621 | 4658 |
| (...skipping 1020 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5642 for (int i = 0; i < Register::NumAllocatableRegisters(); i++) { | 5679 for (int i = 0; i < Register::NumAllocatableRegisters(); i++) { |
| 5643 Register candidate = Register::FromAllocationIndex(i); | 5680 Register candidate = Register::FromAllocationIndex(i); |
| 5644 if (regs & candidate.bit()) continue; | 5681 if (regs & candidate.bit()) continue; |
| 5645 return candidate; | 5682 return candidate; |
| 5646 } | 5683 } |
| 5647 UNREACHABLE(); | 5684 UNREACHABLE(); |
| 5648 return no_reg; | 5685 return no_reg; |
| 5649 } | 5686 } |
| 5650 | 5687 |
| 5651 | 5688 |
| 5689 void MacroAssembler::JumpIfDictionaryInPrototypeChain( |
| 5690 Register object, |
| 5691 Register scratch0, |
| 5692 Register scratch1, |
| 5693 Label* found) { |
| 5694 ASSERT(!scratch1.is(scratch0)); |
| 5695 Factory* factory = isolate()->factory(); |
| 5696 Register current = scratch0; |
| 5697 Label loop_again; |
| 5698 |
| 5699 // Scratch contained elements pointer. |
| 5700 Move(current, object); |
| 5701 |
| 5702 // Loop based on the map going up the prototype chain. |
| 5703 bind(&loop_again); |
| 5704 lw(current, FieldMemOperand(current, HeapObject::kMapOffset)); |
| 5705 lb(scratch1, FieldMemOperand(current, Map::kBitField2Offset)); |
| 5706 Ext(scratch1, scratch1, Map::kElementsKindShift, Map::kElementsKindBitCount); |
| 5707 Branch(found, eq, scratch1, Operand(DICTIONARY_ELEMENTS)); |
| 5708 lw(current, FieldMemOperand(current, Map::kPrototypeOffset)); |
| 5709 Branch(&loop_again, ne, current, Operand(factory->null_value())); |
| 5710 } |
| 5711 |
| 5712 |
| 5652 bool AreAliased(Register r1, Register r2, Register r3, Register r4) { | 5713 bool AreAliased(Register r1, Register r2, Register r3, Register r4) { |
| 5653 if (r1.is(r2)) return true; | 5714 if (r1.is(r2)) return true; |
| 5654 if (r1.is(r3)) return true; | 5715 if (r1.is(r3)) return true; |
| 5655 if (r1.is(r4)) return true; | 5716 if (r1.is(r4)) return true; |
| 5656 if (r2.is(r3)) return true; | 5717 if (r2.is(r3)) return true; |
| 5657 if (r2.is(r4)) return true; | 5718 if (r2.is(r4)) return true; |
| 5658 if (r3.is(r4)) return true; | 5719 if (r3.is(r4)) return true; |
| 5659 return false; | 5720 return false; |
| 5660 } | 5721 } |
| 5661 | 5722 |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5709 opcode == BGTZL); | 5770 opcode == BGTZL); |
| 5710 opcode = (cond == eq) ? BEQ : BNE; | 5771 opcode = (cond == eq) ? BEQ : BNE; |
| 5711 instr = (instr & ~kOpcodeMask) | opcode; | 5772 instr = (instr & ~kOpcodeMask) | opcode; |
| 5712 masm_.emit(instr); | 5773 masm_.emit(instr); |
| 5713 } | 5774 } |
| 5714 | 5775 |
| 5715 | 5776 |
| 5716 } } // namespace v8::internal | 5777 } } // namespace v8::internal |
| 5717 | 5778 |
| 5718 #endif // V8_TARGET_ARCH_MIPS | 5779 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |