| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 187 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 198 function->ReplaceCode(function->shared()->code()); | 198 function->ReplaceCode(function->shared()->code()); |
| 199 | 199 |
| 200 if (FLAG_trace_deopt) { | 200 if (FLAG_trace_deopt) { |
| 201 PrintF("[forced deoptimization: "); | 201 PrintF("[forced deoptimization: "); |
| 202 function->PrintName(); | 202 function->PrintName(); |
| 203 PrintF(" / %x]\n", reinterpret_cast<uint32_t>(function)); | 203 PrintF(" / %x]\n", reinterpret_cast<uint32_t>(function)); |
| 204 } | 204 } |
| 205 } | 205 } |
| 206 | 206 |
| 207 | 207 |
| 208 static const byte kJnsInstruction = 0x79; |
| 209 static const byte kJnsOffset = 0x11; |
| 210 static const byte kJaeInstruction = 0x73; |
| 211 static const byte kJaeOffset = 0x07; |
| 212 static const byte kCallInstruction = 0xe8; |
| 213 static const byte kNopByteOne = 0x66; |
| 214 static const byte kNopByteTwo = 0x90; |
| 215 |
| 216 |
| 208 void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code, | 217 void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code, |
| 209 Address pc_after, | 218 Address pc_after, |
| 210 Code* check_code, | 219 Code* check_code, |
| 211 Code* replacement_code) { | 220 Code* replacement_code) { |
| 212 Address call_target_address = pc_after - kIntSize; | 221 Address call_target_address = pc_after - kIntSize; |
| 213 ASSERT(check_code->entry() == | 222 ASSERT(check_code->entry() == |
| 214 Assembler::target_address_at(call_target_address)); | 223 Assembler::target_address_at(call_target_address)); |
| 215 // The stack check code matches the pattern: | 224 // The stack check code matches the pattern: |
| 216 // | 225 // |
| 217 // cmp esp, <limit> | 226 // cmp esp, <limit> |
| 218 // jae ok | 227 // jae ok |
| 219 // call <stack guard> | 228 // call <stack guard> |
| 220 // test eax, <loop nesting depth> | 229 // test eax, <loop nesting depth> |
| 221 // ok: ... | 230 // ok: ... |
| 222 // | 231 // |
| 223 // We will patch away the branch so the code is: | 232 // We will patch away the branch so the code is: |
| 224 // | 233 // |
| 225 // cmp esp, <limit> ;; Not changed | 234 // cmp esp, <limit> ;; Not changed |
| 226 // nop | 235 // nop |
| 227 // nop | 236 // nop |
| 228 // call <on-stack replacment> | 237 // call <on-stack replacment> |
| 229 // test eax, <loop nesting depth> | 238 // test eax, <loop nesting depth> |
| 230 // ok: | 239 // ok: |
| 231 ASSERT(*(call_target_address - 3) == 0x73 && // jae | 240 |
| 232 *(call_target_address - 2) == 0x07 && // offset | 241 if (FLAG_count_based_interrupts) { |
| 233 *(call_target_address - 1) == 0xe8); // call | 242 ASSERT(*(call_target_address - 3) == kJnsInstruction); |
| 234 *(call_target_address - 3) = 0x66; // 2 byte nop part 1 | 243 ASSERT(*(call_target_address - 2) == kJnsOffset); |
| 235 *(call_target_address - 2) = 0x90; // 2 byte nop part 2 | 244 } else { |
| 245 ASSERT(*(call_target_address - 3) == kJaeInstruction); |
| 246 ASSERT(*(call_target_address - 2) == kJaeOffset); |
| 247 } |
| 248 ASSERT(*(call_target_address - 1) == kCallInstruction); |
| 249 *(call_target_address - 3) = kNopByteOne; |
| 250 *(call_target_address - 2) = kNopByteTwo; |
| 236 Assembler::set_target_address_at(call_target_address, | 251 Assembler::set_target_address_at(call_target_address, |
| 237 replacement_code->entry()); | 252 replacement_code->entry()); |
| 238 | 253 |
| 239 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( | 254 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( |
| 240 unoptimized_code, call_target_address, replacement_code); | 255 unoptimized_code, call_target_address, replacement_code); |
| 241 } | 256 } |
| 242 | 257 |
| 243 | 258 |
| 244 void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code, | 259 void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code, |
| 245 Address pc_after, | 260 Address pc_after, |
| 246 Code* check_code, | 261 Code* check_code, |
| 247 Code* replacement_code) { | 262 Code* replacement_code) { |
| 248 Address call_target_address = pc_after - kIntSize; | 263 Address call_target_address = pc_after - kIntSize; |
| 249 ASSERT(replacement_code->entry() == | 264 ASSERT(replacement_code->entry() == |
| 250 Assembler::target_address_at(call_target_address)); | 265 Assembler::target_address_at(call_target_address)); |
| 266 |
| 251 // Replace the nops from patching (Deoptimizer::PatchStackCheckCode) to | 267 // Replace the nops from patching (Deoptimizer::PatchStackCheckCode) to |
| 252 // restore the conditional branch. | 268 // restore the conditional branch. |
| 253 ASSERT(*(call_target_address - 3) == 0x66 && // 2 byte nop part 1 | 269 ASSERT(*(call_target_address - 3) == kNopByteOne && |
| 254 *(call_target_address - 2) == 0x90 && // 2 byte nop part 2 | 270 *(call_target_address - 2) == kNopByteTwo && |
| 255 *(call_target_address - 1) == 0xe8); // call | 271 *(call_target_address - 1) == kCallInstruction); |
| 256 *(call_target_address - 3) = 0x73; // jae | 272 if (FLAG_count_based_interrupts) { |
| 257 *(call_target_address - 2) = 0x07; // offset | 273 *(call_target_address - 3) = kJnsInstruction; |
| 274 *(call_target_address - 2) = kJnsOffset; |
| 275 } else { |
| 276 *(call_target_address - 3) = kJaeInstruction; |
| 277 *(call_target_address - 2) = kJaeOffset; |
| 278 } |
| 258 Assembler::set_target_address_at(call_target_address, | 279 Assembler::set_target_address_at(call_target_address, |
| 259 check_code->entry()); | 280 check_code->entry()); |
| 260 | 281 |
| 261 check_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( | 282 check_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( |
| 262 unoptimized_code, call_target_address, check_code); | 283 unoptimized_code, call_target_address, check_code); |
| 263 } | 284 } |
| 264 | 285 |
| 265 | 286 |
| 266 static int LookupBailoutId(DeoptimizationInputData* data, unsigned ast_id) { | 287 static int LookupBailoutId(DeoptimizationInputData* data, unsigned ast_id) { |
| 267 ByteArray* translations = data->TranslationByteArray(); | 288 ByteArray* translations = data->TranslationByteArray(); |
| (...skipping 649 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 917 } | 938 } |
| 918 __ bind(&done); | 939 __ bind(&done); |
| 919 } | 940 } |
| 920 | 941 |
| 921 #undef __ | 942 #undef __ |
| 922 | 943 |
| 923 | 944 |
| 924 } } // namespace v8::internal | 945 } } // namespace v8::internal |
| 925 | 946 |
| 926 #endif // V8_TARGET_ARCH_IA32 | 947 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |