OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1672 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1683 if (FLAG_trace_ic) { | 1683 if (FLAG_trace_ic) { |
1684 PrintF("[CompareIC (%s->%s)#%s]\n", | 1684 PrintF("[CompareIC (%s->%s)#%s]\n", |
1685 GetStateName(previous_state), | 1685 GetStateName(previous_state), |
1686 GetStateName(state), | 1686 GetStateName(state), |
1687 Token::Name(op_)); | 1687 Token::Name(op_)); |
1688 } | 1688 } |
1689 #endif | 1689 #endif |
1690 | 1690 |
1691 // Activate inlined smi code. | 1691 // Activate inlined smi code. |
1692 if (previous_state == UNINITIALIZED) { | 1692 if (previous_state == UNINITIALIZED) { |
1693 PatchInlinedSmiCode(address()); | 1693 PatchInlinedSmiCode(address(), ENABLE_INLINED_SMI_CHECK); |
1694 } | 1694 } |
1695 } | 1695 } |
1696 | 1696 |
1697 | 1697 |
1698 void PatchInlinedSmiCode(Address address) { | 1698 void PatchInlinedSmiCode(Address address, InlinedSmiCheck check) { |
1699 Address cmp_instruction_address = | 1699 Address cmp_instruction_address = |
1700 address + Assembler::kCallTargetAddressOffset; | 1700 address + Assembler::kCallTargetAddressOffset; |
1701 | 1701 |
1702 // If the instruction following the call is not a cmp rx, #yyy, nothing | 1702 // If the instruction following the call is not a cmp rx, #yyy, nothing |
1703 // was inlined. | 1703 // was inlined. |
1704 Instr instr = Assembler::instr_at(cmp_instruction_address); | 1704 Instr instr = Assembler::instr_at(cmp_instruction_address); |
1705 if (!Assembler::IsCmpImmediate(instr)) { | 1705 if (!Assembler::IsCmpImmediate(instr)) { |
1706 return; | 1706 return; |
1707 } | 1707 } |
1708 | 1708 |
(...skipping 13 matching lines...) Expand all Loading... |
1722 PrintF("[ patching ic at %p, cmp=%p, delta=%d\n", | 1722 PrintF("[ patching ic at %p, cmp=%p, delta=%d\n", |
1723 address, cmp_instruction_address, delta); | 1723 address, cmp_instruction_address, delta); |
1724 } | 1724 } |
1725 #endif | 1725 #endif |
1726 | 1726 |
1727 Address patch_address = | 1727 Address patch_address = |
1728 cmp_instruction_address - delta * Instruction::kInstrSize; | 1728 cmp_instruction_address - delta * Instruction::kInstrSize; |
1729 Instr instr_at_patch = Assembler::instr_at(patch_address); | 1729 Instr instr_at_patch = Assembler::instr_at(patch_address); |
1730 Instr branch_instr = | 1730 Instr branch_instr = |
1731 Assembler::instr_at(patch_address + Instruction::kInstrSize); | 1731 Assembler::instr_at(patch_address + Instruction::kInstrSize); |
1732 ASSERT(Assembler::IsCmpRegister(instr_at_patch)); | 1732 // This is patching a conditional "jump if not smi/jump if smi" site. |
1733 ASSERT_EQ(Assembler::GetRn(instr_at_patch).code(), | 1733 // Enabling by changing from |
1734 Assembler::GetRm(instr_at_patch).code()); | 1734 // cmp rx, rx |
| 1735 // b eq/ne, <target> |
| 1736 // to |
| 1737 // tst rx, #kSmiTagMask |
| 1738 // b ne/eq, <target> |
| 1739 // and vice-versa to be disabled again. |
| 1740 CodePatcher patcher(patch_address, 2); |
| 1741 Register reg = Assembler::GetRn(instr_at_patch); |
| 1742 if (check == ENABLE_INLINED_SMI_CHECK) { |
| 1743 ASSERT(Assembler::IsCmpRegister(instr_at_patch)); |
| 1744 ASSERT_EQ(Assembler::GetRn(instr_at_patch).code(), |
| 1745 Assembler::GetRm(instr_at_patch).code()); |
| 1746 patcher.masm()->tst(reg, Operand(kSmiTagMask)); |
| 1747 } else { |
| 1748 ASSERT(check == DISABLE_INLINED_SMI_CHECK); |
| 1749 ASSERT(Assembler::IsTstImmediate(instr_at_patch)); |
| 1750 patcher.masm()->cmp(reg, reg); |
| 1751 } |
1735 ASSERT(Assembler::IsBranch(branch_instr)); | 1752 ASSERT(Assembler::IsBranch(branch_instr)); |
1736 if (Assembler::GetCondition(branch_instr) == eq) { | 1753 if (Assembler::GetCondition(branch_instr) == eq) { |
1737 // This is patching a "jump if not smi" site to be active. | |
1738 // Changing | |
1739 // cmp rx, rx | |
1740 // b eq, <target> | |
1741 // to | |
1742 // tst rx, #kSmiTagMask | |
1743 // b ne, <target> | |
1744 CodePatcher patcher(patch_address, 2); | |
1745 Register reg = Assembler::GetRn(instr_at_patch); | |
1746 patcher.masm()->tst(reg, Operand(kSmiTagMask)); | |
1747 patcher.EmitCondition(ne); | 1754 patcher.EmitCondition(ne); |
1748 } else { | 1755 } else { |
1749 ASSERT(Assembler::GetCondition(branch_instr) == ne); | 1756 ASSERT(Assembler::GetCondition(branch_instr) == ne); |
1750 // This is patching a "jump if smi" site to be active. | |
1751 // Changing | |
1752 // cmp rx, rx | |
1753 // b ne, <target> | |
1754 // to | |
1755 // tst rx, #kSmiTagMask | |
1756 // b eq, <target> | |
1757 CodePatcher patcher(patch_address, 2); | |
1758 Register reg = Assembler::GetRn(instr_at_patch); | |
1759 patcher.masm()->tst(reg, Operand(kSmiTagMask)); | |
1760 patcher.EmitCondition(eq); | 1757 patcher.EmitCondition(eq); |
1761 } | 1758 } |
1762 } | 1759 } |
1763 | 1760 |
1764 | 1761 |
1765 } } // namespace v8::internal | 1762 } } // namespace v8::internal |
1766 | 1763 |
1767 #endif // V8_TARGET_ARCH_ARM | 1764 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |