| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 104 function->ReplaceCode(function->shared()->code()); | 104 function->ReplaceCode(function->shared()->code()); |
| 105 | 105 |
| 106 if (FLAG_trace_deopt) { | 106 if (FLAG_trace_deopt) { |
| 107 PrintF("[forced deoptimization: "); | 107 PrintF("[forced deoptimization: "); |
| 108 function->PrintName(); | 108 function->PrintName(); |
| 109 PrintF(" / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function)); | 109 PrintF(" / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function)); |
| 110 } | 110 } |
| 111 } | 111 } |
| 112 | 112 |
| 113 | 113 |
| 114 static const byte kJnsInstruction = 0x79; |
| 115 static const byte kJnsOffset = 0x1f; |
| 116 static const byte kJnsOffsetDebugCode = 0x53; |
| 117 static const byte kJaeInstruction = 0x73; |
| 118 static const byte kJaeOffset = 0x07; |
| 119 static const byte kCallInstruction = 0xe8; |
| 120 static const byte kNopByteOne = 0x66; |
| 121 static const byte kNopByteTwo = 0x90; |
| 122 |
| 114 void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code, | 123 void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code, |
| 115 Address pc_after, | 124 Address pc_after, |
| 116 Code* check_code, | 125 Code* check_code, |
| 117 Code* replacement_code) { | 126 Code* replacement_code) { |
| 118 Address call_target_address = pc_after - kIntSize; | 127 Address call_target_address = pc_after - kIntSize; |
| 119 ASSERT(check_code->entry() == | 128 ASSERT_EQ(check_code->entry(), |
| 120 Assembler::target_address_at(call_target_address)); | 129 Assembler::target_address_at(call_target_address)); |
| 121 // The stack check code matches the pattern: | 130 // The stack check code matches the pattern: |
| 122 // | 131 // |
| 123 // cmp rsp, <limit> | 132 // cmp rsp, <limit> |
| 124 // jae ok | 133 // jae ok |
| 125 // call <stack guard> | 134 // call <stack guard> |
| 126 // test rax, <loop nesting depth> | 135 // test rax, <loop nesting depth> |
| 127 // ok: ... | 136 // ok: ... |
| 128 // | 137 // |
| 129 // We will patch away the branch so the code is: | 138 // We will patch away the branch so the code is: |
| 130 // | 139 // |
| 131 // cmp rsp, <limit> ;; Not changed | 140 // cmp rsp, <limit> ;; Not changed |
| 132 // nop | 141 // nop |
| 133 // nop | 142 // nop |
| 134 // call <on-stack replacment> | 143 // call <on-stack replacment> |
| 135 // test rax, <loop nesting depth> | 144 // test rax, <loop nesting depth> |
| 136 // ok: | 145 // ok: |
| 137 // | 146 // |
| 138 ASSERT(*(call_target_address - 3) == 0x73 && // jae | 147 if (FLAG_count_based_interrupts) { |
| 139 *(call_target_address - 2) == 0x07 && // offset | 148 ASSERT_EQ(kJnsInstruction, *(call_target_address - 3)); |
| 140 *(call_target_address - 1) == 0xe8); // call | 149 if (FLAG_debug_code) { |
| 141 *(call_target_address - 3) = 0x66; // 2 byte nop part 1 | 150 // FullCodeGenerator::EmitProfilingCounterReset() makes use of |
| 142 *(call_target_address - 2) = 0x90; // 2 byte nop part 2 | 151 // masm->Move(Operand&, Smi*), which generates additional code |
| 152 // when FLAG_debug_code is set, so the jump offset is larger |
| 153 // in that case. |
| 154 ASSERT_EQ(kJnsOffsetDebugCode, *(call_target_address - 2)); |
| 155 } else { |
| 156 ASSERT_EQ(kJnsOffset, *(call_target_address - 2)); |
| 157 } |
| 158 } else { |
| 159 ASSERT_EQ(kJaeInstruction, *(call_target_address - 3)); |
| 160 ASSERT_EQ(kJaeOffset, *(call_target_address - 2)); |
| 161 } |
| 162 ASSERT_EQ(kCallInstruction, *(call_target_address - 1)); |
| 163 *(call_target_address - 3) = kNopByteOne; |
| 164 *(call_target_address - 2) = kNopByteTwo; |
| 143 Assembler::set_target_address_at(call_target_address, | 165 Assembler::set_target_address_at(call_target_address, |
| 144 replacement_code->entry()); | 166 replacement_code->entry()); |
| 145 | 167 |
| 146 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( | 168 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( |
| 147 unoptimized_code, call_target_address, replacement_code); | 169 unoptimized_code, call_target_address, replacement_code); |
| 148 } | 170 } |
| 149 | 171 |
| 150 | 172 |
| 151 void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code, | 173 void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code, |
| 152 Address pc_after, | 174 Address pc_after, |
| 153 Code* check_code, | 175 Code* check_code, |
| 154 Code* replacement_code) { | 176 Code* replacement_code) { |
| 155 Address call_target_address = pc_after - kIntSize; | 177 Address call_target_address = pc_after - kIntSize; |
| 156 ASSERT(replacement_code->entry() == | 178 ASSERT(replacement_code->entry() == |
| 157 Assembler::target_address_at(call_target_address)); | 179 Assembler::target_address_at(call_target_address)); |
| 158 // Replace the nops from patching (Deoptimizer::PatchStackCheckCode) to | 180 // Replace the nops from patching (Deoptimizer::PatchStackCheckCode) to |
| 159 // restore the conditional branch. | 181 // restore the conditional branch. |
| 160 ASSERT(*(call_target_address - 3) == 0x66 && // 2 byte nop part 1 | 182 ASSERT_EQ(kNopByteOne, *(call_target_address - 3)); |
| 161 *(call_target_address - 2) == 0x90 && // 2 byte nop part 2 | 183 ASSERT_EQ(kNopByteTwo, *(call_target_address - 2)); |
| 162 *(call_target_address - 1) == 0xe8); // call | 184 ASSERT_EQ(kCallInstruction, *(call_target_address - 1)); |
| 163 *(call_target_address - 3) = 0x73; // jae | 185 if (FLAG_count_based_interrupts) { |
| 164 *(call_target_address - 2) = 0x07; // offset | 186 *(call_target_address - 3) = kJnsInstruction; |
| 187 if (FLAG_debug_code) { |
| 188 // See comment above: larger jump offset if debug code is generated. |
| 189 *(call_target_address - 2) = kJnsOffsetDebugCode; |
| 190 } else { |
| 191 *(call_target_address - 2) = kJnsOffset; |
| 192 } |
| 193 } else { |
| 194 *(call_target_address - 3) = kJaeInstruction; |
| 195 *(call_target_address - 2) = kJaeOffset; |
| 196 } |
| 165 Assembler::set_target_address_at(call_target_address, | 197 Assembler::set_target_address_at(call_target_address, |
| 166 check_code->entry()); | 198 check_code->entry()); |
| 167 | 199 |
| 168 check_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( | 200 check_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( |
| 169 unoptimized_code, call_target_address, check_code); | 201 unoptimized_code, call_target_address, check_code); |
| 170 } | 202 } |
| 171 | 203 |
| 172 | 204 |
| 173 static int LookupBailoutId(DeoptimizationInputData* data, unsigned ast_id) { | 205 static int LookupBailoutId(DeoptimizationInputData* data, unsigned ast_id) { |
| 174 ByteArray* translations = data->TranslationByteArray(); | 206 ByteArray* translations = data->TranslationByteArray(); |
| (...skipping 772 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 947 } | 979 } |
| 948 __ bind(&done); | 980 __ bind(&done); |
| 949 } | 981 } |
| 950 | 982 |
| 951 #undef __ | 983 #undef __ |
| 952 | 984 |
| 953 | 985 |
| 954 } } // namespace v8::internal | 986 } } // namespace v8::internal |
| 955 | 987 |
| 956 #endif // V8_TARGET_ARCH_X64 | 988 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |