| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 261 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 272 | 272 |
| 273 // Jump unconditionally to given label. | 273 // Jump unconditionally to given label. |
| 274 // We NEED a nop in the branch delay slot, as it used by v8, for example in | 274 // We NEED a nop in the branch delay slot, as it used by v8, for example in |
| 275 // CodeGenerator::ProcessDeferred(). | 275 // CodeGenerator::ProcessDeferred(). |
| 276 // Currently the branch delay slot is filled by the MacroAssembler. | 276 // Currently the branch delay slot is filled by the MacroAssembler. |
| 277 // Use rather b(Label) for code generation. | 277 // Use rather b(Label) for code generation. |
| 278 void jmp(Label* L) { | 278 void jmp(Label* L) { |
| 279 Branch(L); | 279 Branch(L); |
| 280 } | 280 } |
| 281 | 281 |
| 282 void Load(Register dst, const MemOperand& src, Representation r); |
| 283 void Store(Register src, const MemOperand& dst, Representation r); |
| 284 |
| 282 // Load an object from the root table. | 285 // Load an object from the root table. |
| 283 void LoadRoot(Register destination, | 286 void LoadRoot(Register destination, |
| 284 Heap::RootListIndex index); | 287 Heap::RootListIndex index); |
| 285 void LoadRoot(Register destination, | 288 void LoadRoot(Register destination, |
| 286 Heap::RootListIndex index, | 289 Heap::RootListIndex index, |
| 287 Condition cond, Register src1, const Operand& src2); | 290 Condition cond, Register src1, const Operand& src2); |
| 288 | 291 |
| 289 // Store an object to the root table. | 292 // Store an object to the root table. |
| 290 void StoreRoot(Register source, | 293 void StoreRoot(Register source, |
| 291 Heap::RootListIndex index); | 294 Heap::RootListIndex index); |
| 292 void StoreRoot(Register source, | 295 void StoreRoot(Register source, |
| 293 Heap::RootListIndex index, | 296 Heap::RootListIndex index, |
| 294 Condition cond, Register src1, const Operand& src2); | 297 Condition cond, Register src1, const Operand& src2); |
| 295 | 298 |
| 296 void LoadHeapObject(Register dst, Handle<HeapObject> object); | |
| 297 | |
| 298 void LoadObject(Register result, Handle<Object> object) { | |
| 299 AllowDeferredHandleDereference heap_object_check; | |
| 300 if (object->IsHeapObject()) { | |
| 301 LoadHeapObject(result, Handle<HeapObject>::cast(object)); | |
| 302 } else { | |
| 303 li(result, object); | |
| 304 } | |
| 305 } | |
| 306 | |
| 307 // --------------------------------------------------------------------------- | 299 // --------------------------------------------------------------------------- |
| 308 // GC Support | 300 // GC Support |
| 309 | 301 |
| 310 void IncrementalMarkingRecordWriteHelper(Register object, | 302 void IncrementalMarkingRecordWriteHelper(Register object, |
| 311 Register value, | 303 Register value, |
| 312 Register address); | 304 Register address); |
| 313 | 305 |
| 314 enum RememberedSetFinalAction { | 306 enum RememberedSetFinalAction { |
| 315 kReturnAtEnd, | 307 kReturnAtEnd, |
| 316 kFallThroughAtEnd | 308 kFallThroughAtEnd |
| (...skipping 296 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 613 // --------------------------------------------------------------------------- | 605 // --------------------------------------------------------------------------- |
| 614 // Pseudo-instructions. | 606 // Pseudo-instructions. |
| 615 | 607 |
| 616 void mov(Register rd, Register rt) { or_(rd, rt, zero_reg); } | 608 void mov(Register rd, Register rt) { or_(rd, rt, zero_reg); } |
| 617 | 609 |
| 618 // Load int32 in the rd register. | 610 // Load int32 in the rd register. |
| 619 void li(Register rd, Operand j, LiFlags mode = OPTIMIZE_SIZE); | 611 void li(Register rd, Operand j, LiFlags mode = OPTIMIZE_SIZE); |
| 620 inline void li(Register rd, int32_t j, LiFlags mode = OPTIMIZE_SIZE) { | 612 inline void li(Register rd, int32_t j, LiFlags mode = OPTIMIZE_SIZE) { |
| 621 li(rd, Operand(j), mode); | 613 li(rd, Operand(j), mode); |
| 622 } | 614 } |
| 623 inline void li(Register dst, Handle<Object> value, | 615 void li(Register dst, Handle<Object> value, LiFlags mode = OPTIMIZE_SIZE); |
| 624 LiFlags mode = OPTIMIZE_SIZE) { | |
| 625 li(dst, Operand(value), mode); | |
| 626 } | |
| 627 | 616 |
| 628 // Push multiple registers on the stack. | 617 // Push multiple registers on the stack. |
| 629 // Registers are saved in numerical order, with higher numbered registers | 618 // Registers are saved in numerical order, with higher numbered registers |
| 630 // saved in higher memory addresses. | 619 // saved in higher memory addresses. |
| 631 void MultiPush(RegList regs); | 620 void MultiPush(RegList regs); |
| 632 void MultiPushReversed(RegList regs); | 621 void MultiPushReversed(RegList regs); |
| 633 | 622 |
| 634 void MultiPushFPU(RegList regs); | 623 void MultiPushFPU(RegList regs); |
| 635 void MultiPushReversedFPU(RegList regs); | 624 void MultiPushReversedFPU(RegList regs); |
| 636 | 625 |
| (...skipping 896 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1533 | 1522 |
| 1534 void JumpIfJSArrayHasAllocationMemento(Register receiver_reg, | 1523 void JumpIfJSArrayHasAllocationMemento(Register receiver_reg, |
| 1535 Register scratch_reg, | 1524 Register scratch_reg, |
| 1536 Label* memento_found) { | 1525 Label* memento_found) { |
| 1537 Label no_memento_found; | 1526 Label no_memento_found; |
| 1538 TestJSArrayForAllocationMemento(receiver_reg, scratch_reg, | 1527 TestJSArrayForAllocationMemento(receiver_reg, scratch_reg, |
| 1539 &no_memento_found, eq, memento_found); | 1528 &no_memento_found, eq, memento_found); |
| 1540 bind(&no_memento_found); | 1529 bind(&no_memento_found); |
| 1541 } | 1530 } |
| 1542 | 1531 |
| 1532 // Jumps to found label if a prototype map has dictionary elements. |
| 1533 void JumpIfDictionaryInPrototypeChain(Register object, Register scratch0, |
| 1534 Register scratch1, Label* found); |
| 1535 |
| 1543 private: | 1536 private: |
| 1544 void CallCFunctionHelper(Register function, | 1537 void CallCFunctionHelper(Register function, |
| 1545 int num_reg_arguments, | 1538 int num_reg_arguments, |
| 1546 int num_double_arguments); | 1539 int num_double_arguments); |
| 1547 | 1540 |
| 1548 void BranchShort(int16_t offset, BranchDelaySlot bdslot = PROTECT); | 1541 void BranchShort(int16_t offset, BranchDelaySlot bdslot = PROTECT); |
| 1549 void BranchShort(int16_t offset, Condition cond, Register rs, | 1542 void BranchShort(int16_t offset, Condition cond, Register rs, |
| 1550 const Operand& rt, | 1543 const Operand& rt, |
| 1551 BranchDelaySlot bdslot = PROTECT); | 1544 BranchDelaySlot bdslot = PROTECT); |
| 1552 void BranchShort(Label* L, BranchDelaySlot bdslot = PROTECT); | 1545 void BranchShort(Label* L, BranchDelaySlot bdslot = PROTECT); |
| (...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1656 #define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x) | 1649 #define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x) |
| 1657 #define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__) | 1650 #define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__) |
| 1658 #define ACCESS_MASM(masm) masm->stop(__FILE_LINE__); masm-> | 1651 #define ACCESS_MASM(masm) masm->stop(__FILE_LINE__); masm-> |
| 1659 #else | 1652 #else |
| 1660 #define ACCESS_MASM(masm) masm-> | 1653 #define ACCESS_MASM(masm) masm-> |
| 1661 #endif | 1654 #endif |
| 1662 | 1655 |
| 1663 } } // namespace v8::internal | 1656 } } // namespace v8::internal |
| 1664 | 1657 |
| 1665 #endif // V8_MIPS_MACRO_ASSEMBLER_MIPS_H_ | 1658 #endif // V8_MIPS_MACRO_ASSEMBLER_MIPS_H_ |
| OLD | NEW |