OLD | NEW |
1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" | 5 #include "vm/globals.h" |
6 #if defined(TARGET_ARCH_IA32) | 6 #if defined(TARGET_ARCH_IA32) |
7 | 7 |
8 #include "vm/assembler.h" | 8 #include "vm/assembler.h" |
9 #include "vm/heap.h" | 9 #include "vm/heap.h" |
10 #include "vm/memory_region.h" | 10 #include "vm/memory_region.h" |
(...skipping 1516 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1527 void Assembler::ReserveAlignedFrameSpace(intptr_t frame_space) { | 1527 void Assembler::ReserveAlignedFrameSpace(intptr_t frame_space) { |
1528 // Reserve space for arguments and align frame before entering | 1528 // Reserve space for arguments and align frame before entering |
1529 // the C++ world. | 1529 // the C++ world. |
1530 AddImmediate(ESP, Immediate(-frame_space)); | 1530 AddImmediate(ESP, Immediate(-frame_space)); |
1531 if (OS::ActivationFrameAlignment() > 0) { | 1531 if (OS::ActivationFrameAlignment() > 0) { |
1532 andl(ESP, Immediate(~(OS::ActivationFrameAlignment() - 1))); | 1532 andl(ESP, Immediate(~(OS::ActivationFrameAlignment() - 1))); |
1533 } | 1533 } |
1534 } | 1534 } |
1535 | 1535 |
1536 | 1536 |
1537 // TODO(srdjan): Add XMM registers once they are used by the compiler. | |
1538 static const intptr_t kNumberOfVolatileCpuRegisters = 3; | 1537 static const intptr_t kNumberOfVolatileCpuRegisters = 3; |
1539 static const Register volatile_cpu_registers[kNumberOfVolatileCpuRegisters] = { | 1538 static const Register volatile_cpu_registers[kNumberOfVolatileCpuRegisters] = { |
1540 EAX, ECX, EDX | 1539 EAX, ECX, EDX |
1541 }; | 1540 }; |
1542 | 1541 |
1543 | 1542 |
| 1543 // XMM0 is used only as a scratch register in the optimized code. No need to |
| 1544 // save it. |
| 1545 static const intptr_t kNumberOfVolatileXmmRegisters = |
| 1546 kNumberOfXmmRegisters - 1; |
| 1547 |
| 1548 |
| 1549 static const intptr_t kNumberOfVolatileRegisters = |
| 1550 kNumberOfVolatileCpuRegisters + kNumberOfVolatileXmmRegisters; |
| 1551 |
| 1552 |
1544 void Assembler::EnterCallRuntimeFrame(intptr_t frame_space) { | 1553 void Assembler::EnterCallRuntimeFrame(intptr_t frame_space) { |
1545 enter(Immediate(0)); | 1554 enter(Immediate(0)); |
1546 | 1555 |
1547 // Preserve volatile registers. | 1556 // Preserve volatile CPU registers. |
1548 for (intptr_t i = 0; i < kNumberOfVolatileCpuRegisters; i++) { | 1557 for (intptr_t i = 0; i < kNumberOfVolatileCpuRegisters; i++) { |
1549 pushl(volatile_cpu_registers[i]); | 1558 pushl(volatile_cpu_registers[i]); |
1550 } | 1559 } |
1551 | 1560 |
| 1561 // Preserve all XMM registers except XMM0 |
| 1562 subl(ESP, Immediate((kNumberOfXmmRegisters - 1) * kDoubleSize)); |
| 1563 // Store XMM registers with the lowest register number at the lowest |
| 1564 // address. |
| 1565 intptr_t offset = 0; |
| 1566 for (intptr_t reg_idx = 1; reg_idx < kNumberOfXmmRegisters; ++reg_idx) { |
| 1567 XmmRegister xmm_reg = static_cast<XmmRegister>(reg_idx); |
| 1568 movsd(Address(ESP, offset), xmm_reg); |
| 1569 offset += kDoubleSize; |
| 1570 } |
| 1571 |
1552 ReserveAlignedFrameSpace(frame_space); | 1572 ReserveAlignedFrameSpace(frame_space); |
1553 } | 1573 } |
1554 | 1574 |
1555 | 1575 |
1556 void Assembler::LeaveCallRuntimeFrame() { | 1576 void Assembler::LeaveCallRuntimeFrame() { |
1557 // ESP might have been modified to reserve space for arguments | 1577 // ESP might have been modified to reserve space for arguments |
1558 // and ensure proper alignment of the stack frame. | 1578 // and ensure proper alignment of the stack frame. |
1559 // We need to restore it before restoring registers. | 1579 // We need to restore it before restoring registers. |
1560 leal(ESP, Address(EBP, -kNumberOfVolatileCpuRegisters * kWordSize)); | 1580 leal(ESP, Address(EBP, -kNumberOfVolatileRegisters * kWordSize)); |
1561 | 1581 |
1562 // Restore volatile registers. | 1582 // Restore all XMM registers except XMM0 |
| 1583 // XMM registers have the lowest register number at the lowest address. |
| 1584 intptr_t offset = 0; |
| 1585 for (intptr_t reg_idx = 1; reg_idx < kNumberOfXmmRegisters; ++reg_idx) { |
| 1586 XmmRegister xmm_reg = static_cast<XmmRegister>(reg_idx); |
| 1587 movsd(xmm_reg, Address(ESP, offset)); |
| 1588 offset += kDoubleSize; |
| 1589 } |
| 1590 addl(ESP, Immediate(offset)); |
| 1591 |
| 1592 // Restore volatile CPU registers. |
1563 for (intptr_t i = kNumberOfVolatileCpuRegisters - 1; i >= 0; i--) { | 1593 for (intptr_t i = kNumberOfVolatileCpuRegisters - 1; i >= 0; i--) { |
1564 popl(volatile_cpu_registers[i]); | 1594 popl(volatile_cpu_registers[i]); |
1565 } | 1595 } |
1566 | 1596 |
1567 leave(); | 1597 leave(); |
1568 } | 1598 } |
1569 | 1599 |
1570 | 1600 |
1571 void Assembler::CallRuntime(const RuntimeEntry& entry) { | 1601 void Assembler::CallRuntime(const RuntimeEntry& entry) { |
1572 entry.Call(this); | 1602 entry.Call(this); |
(...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1805 | 1835 |
1806 const char* Assembler::XmmRegisterName(XmmRegister reg) { | 1836 const char* Assembler::XmmRegisterName(XmmRegister reg) { |
1807 ASSERT((0 <= reg) && (reg < kNumberOfXmmRegisters)); | 1837 ASSERT((0 <= reg) && (reg < kNumberOfXmmRegisters)); |
1808 return xmm_reg_names[reg]; | 1838 return xmm_reg_names[reg]; |
1809 } | 1839 } |
1810 | 1840 |
1811 | 1841 |
1812 } // namespace dart | 1842 } // namespace dart |
1813 | 1843 |
1814 #endif // defined TARGET_ARCH_IA32 | 1844 #endif // defined TARGET_ARCH_IA32 |
OLD | NEW |