OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
103 __ movl(rax, Immediate(1)); | 103 __ movl(rax, Immediate(1)); |
104 supported_ = kDefaultCpuFeatures | (1 << CPUID); | 104 supported_ = kDefaultCpuFeatures | (1 << CPUID); |
105 { Scope fscope(CPUID); | 105 { Scope fscope(CPUID); |
106 __ cpuid(); | 106 __ cpuid(); |
107 // Move the result from ecx:edx to rdi. | 107 // Move the result from ecx:edx to rdi. |
108 __ movl(rdi, rdx); // Zero-extended to 64 bits. | 108 __ movl(rdi, rdx); // Zero-extended to 64 bits. |
109 __ shl(rcx, Immediate(32)); | 109 __ shl(rcx, Immediate(32)); |
110 __ or_(rdi, rcx); | 110 __ or_(rdi, rcx); |
111 | 111 |
112 // Get the sahf supported flag, from CPUID(0x80000001) | 112 // Get the sahf supported flag, from CPUID(0x80000001) |
113 __ movq(rax, 0x80000001, RelocInfo::NONE); | 113 __ movq(rax, 0x80000001, RelocInfo::NONE64); |
114 __ cpuid(); | 114 __ cpuid(); |
115 } | 115 } |
116 supported_ = kDefaultCpuFeatures; | 116 supported_ = kDefaultCpuFeatures; |
117 | 117 |
118 // Put the CPU flags in rax. | 118 // Put the CPU flags in rax. |
119 // rax = (rcx & 1) | (rdi & ~1) | (1 << CPUID). | 119 // rax = (rcx & 1) | (rdi & ~1) | (1 << CPUID). |
120 __ movl(rax, Immediate(1)); | 120 __ movl(rax, Immediate(1)); |
121 __ and_(rcx, rax); // Bit 0 is set if SAHF instruction supported. | 121 __ and_(rcx, rax); // Bit 0 is set if SAHF instruction supported. |
122 __ not_(rax); | 122 __ not_(rax); |
123 __ and_(rax, rdi); | 123 __ and_(rax, rdi); |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
166 // Create a code patcher. | 166 // Create a code patcher. |
167 CodePatcher patcher(pc_, code_size); | 167 CodePatcher patcher(pc_, code_size); |
168 | 168 |
169 // Add a label for checking the size of the code used for returning. | 169 // Add a label for checking the size of the code used for returning. |
170 #ifdef DEBUG | 170 #ifdef DEBUG |
171 Label check_codesize; | 171 Label check_codesize; |
172 patcher.masm()->bind(&check_codesize); | 172 patcher.masm()->bind(&check_codesize); |
173 #endif | 173 #endif |
174 | 174 |
175 // Patch the code. | 175 // Patch the code. |
176 patcher.masm()->movq(r10, target, RelocInfo::NONE); | 176 patcher.masm()->movq(r10, target, RelocInfo::NONE64); |
177 patcher.masm()->call(r10); | 177 patcher.masm()->call(r10); |
178 | 178 |
179 // Check that the size of the code generated is as expected. | 179 // Check that the size of the code generated is as expected. |
180 ASSERT_EQ(kCallCodeSize, | 180 ASSERT_EQ(kCallCodeSize, |
181 patcher.masm()->SizeOfCodeGeneratedSince(&check_codesize)); | 181 patcher.masm()->SizeOfCodeGeneratedSince(&check_codesize)); |
182 | 182 |
183 // Add the requested number of int3 instructions after the call. | 183 // Add the requested number of int3 instructions after the call. |
184 for (int i = 0; i < guard_bytes; i++) { | 184 for (int i = 0; i < guard_bytes; i++) { |
185 patcher.masm()->int3(); | 185 patcher.masm()->int3(); |
186 } | 186 } |
(...skipping 1304 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1491 ASSERT(rmode > RelocInfo::LAST_GCED_ENUM); | 1491 ASSERT(rmode > RelocInfo::LAST_GCED_ENUM); |
1492 EnsureSpace ensure_space(this); | 1492 EnsureSpace ensure_space(this); |
1493 emit_rex_64(dst); | 1493 emit_rex_64(dst); |
1494 emit(0xB8 | dst.low_bits()); | 1494 emit(0xB8 | dst.low_bits()); |
1495 emitq(reinterpret_cast<uintptr_t>(value), rmode); | 1495 emitq(reinterpret_cast<uintptr_t>(value), rmode); |
1496 } | 1496 } |
1497 | 1497 |
1498 | 1498 |
1499 void Assembler::movq(Register dst, int64_t value, RelocInfo::Mode rmode) { | 1499 void Assembler::movq(Register dst, int64_t value, RelocInfo::Mode rmode) { |
1500 // Non-relocatable values might not need a 64-bit representation. | 1500 // Non-relocatable values might not need a 64-bit representation. |
1501 if (rmode == RelocInfo::NONE) { | 1501 if (RelocInfo::IsNone(rmode)) { |
1502 // Sadly, there is no zero or sign extending move for 8-bit immediates. | 1502 // Sadly, there is no zero or sign extending move for 8-bit immediates. |
1503 if (is_int32(value)) { | 1503 if (is_int32(value)) { |
1504 movq(dst, Immediate(static_cast<int32_t>(value))); | 1504 movq(dst, Immediate(static_cast<int32_t>(value))); |
1505 return; | 1505 return; |
1506 } else if (is_uint32(value)) { | 1506 } else if (is_uint32(value)) { |
1507 movl(dst, Immediate(static_cast<int32_t>(value))); | 1507 movl(dst, Immediate(static_cast<int32_t>(value))); |
1508 return; | 1508 return; |
1509 } | 1509 } |
1510 // Value cannot be represented by 32 bits, so do a full 64 bit immediate | 1510 // Value cannot be represented by 32 bits, so do a full 64 bit immediate |
1511 // value. | 1511 // value. |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1551 int32_t current = pc_offset(); | 1551 int32_t current = pc_offset(); |
1552 emitl(current); | 1552 emitl(current); |
1553 src->link_to(current); | 1553 src->link_to(current); |
1554 } | 1554 } |
1555 } | 1555 } |
1556 | 1556 |
1557 | 1557 |
1558 void Assembler::movq(Register dst, Handle<Object> value, RelocInfo::Mode mode) { | 1558 void Assembler::movq(Register dst, Handle<Object> value, RelocInfo::Mode mode) { |
1559 // If there is no relocation info, emit the value of the handle efficiently | 1559 // If there is no relocation info, emit the value of the handle efficiently |
1560 // (possibly using less that 8 bytes for the value). | 1560 // (possibly using less that 8 bytes for the value). |
1561 if (mode == RelocInfo::NONE) { | 1561 if (RelocInfo::IsNone(mode)) { |
1562 // There is no possible reason to store a heap pointer without relocation | 1562 // There is no possible reason to store a heap pointer without relocation |
1563 // info, so it must be a smi. | 1563 // info, so it must be a smi. |
1564 ASSERT(value->IsSmi()); | 1564 ASSERT(value->IsSmi()); |
1565 movq(dst, reinterpret_cast<int64_t>(*value), RelocInfo::NONE); | 1565 movq(dst, reinterpret_cast<int64_t>(*value), RelocInfo::NONE64); |
1566 } else { | 1566 } else { |
1567 EnsureSpace ensure_space(this); | 1567 EnsureSpace ensure_space(this); |
1568 ASSERT(value->IsHeapObject()); | 1568 ASSERT(value->IsHeapObject()); |
1569 ASSERT(!HEAP->InNewSpace(*value)); | 1569 ASSERT(!HEAP->InNewSpace(*value)); |
1570 emit_rex_64(dst); | 1570 emit_rex_64(dst); |
1571 emit(0xB8 | dst.low_bits()); | 1571 emit(0xB8 | dst.low_bits()); |
1572 emitq(reinterpret_cast<uintptr_t>(value.location()), mode); | 1572 emitq(reinterpret_cast<uintptr_t>(value.location()), mode); |
1573 } | 1573 } |
1574 } | 1574 } |
1575 | 1575 |
(...skipping 1412 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2988 | 2988 |
2989 void Assembler::dd(uint32_t data) { | 2989 void Assembler::dd(uint32_t data) { |
2990 EnsureSpace ensure_space(this); | 2990 EnsureSpace ensure_space(this); |
2991 emitl(data); | 2991 emitl(data); |
2992 } | 2992 } |
2993 | 2993 |
2994 | 2994 |
2995 // Relocation information implementations. | 2995 // Relocation information implementations. |
2996 | 2996 |
2997 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { | 2997 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { |
2998 ASSERT(rmode != RelocInfo::NONE); | 2998 ASSERT(!RelocInfo::IsNone(rmode)); |
2999 // Don't record external references unless the heap will be serialized. | 2999 // Don't record external references unless the heap will be serialized. |
3000 if (rmode == RelocInfo::EXTERNAL_REFERENCE) { | 3000 if (rmode == RelocInfo::EXTERNAL_REFERENCE) { |
3001 #ifdef DEBUG | 3001 #ifdef DEBUG |
3002 if (!Serializer::enabled()) { | 3002 if (!Serializer::enabled()) { |
3003 Serializer::TooLateToEnableNow(); | 3003 Serializer::TooLateToEnableNow(); |
3004 } | 3004 } |
3005 #endif | 3005 #endif |
3006 if (!Serializer::enabled() && !emit_debug_code()) { | 3006 if (!Serializer::enabled() && !emit_debug_code()) { |
3007 return; | 3007 return; |
3008 } | 3008 } |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3041 bool RelocInfo::IsCodedSpecially() { | 3041 bool RelocInfo::IsCodedSpecially() { |
3042 // The deserializer needs to know whether a pointer is specially coded. Being | 3042 // The deserializer needs to know whether a pointer is specially coded. Being |
3043 // specially coded on x64 means that it is a relative 32 bit address, as used | 3043 // specially coded on x64 means that it is a relative 32 bit address, as used |
3044 // by branch instructions. | 3044 // by branch instructions. |
3045 return (1 << rmode_) & kApplyMask; | 3045 return (1 << rmode_) & kApplyMask; |
3046 } | 3046 } |
3047 | 3047 |
3048 } } // namespace v8::internal | 3048 } } // namespace v8::internal |
3049 | 3049 |
3050 #endif // V8_TARGET_ARCH_X64 | 3050 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |