| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 221 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 232 // We will patch away the branch so the code is: | 232 // We will patch away the branch so the code is: |
| 233 // | 233 // |
| 234 // cmp esp, <limit> ;; Not changed | 234 // cmp esp, <limit> ;; Not changed |
| 235 // nop | 235 // nop |
| 236 // nop | 236 // nop |
| 237 // call <on-stack replacment> | 237 // call <on-stack replacment> |
| 238 // test eax, <loop nesting depth> | 238 // test eax, <loop nesting depth> |
| 239 // ok: | 239 // ok: |
| 240 | 240 |
| 241 if (FLAG_count_based_interrupts) { | 241 if (FLAG_count_based_interrupts) { |
| 242 ASSERT_EQ(*(call_target_address - 3), kJnsInstruction); | 242 ASSERT_EQ(kJnsInstruction, *(call_target_address - 3)); |
| 243 ASSERT_EQ(*(call_target_address - 2), kJnsOffset); | 243 ASSERT_EQ(kJnsOffset, *(call_target_address - 2)); |
| 244 } else { | 244 } else { |
| 245 ASSERT_EQ(*(call_target_address - 3), kJaeInstruction); | 245 ASSERT_EQ(kJaeInstruction, *(call_target_address - 3)); |
| 246 ASSERT_EQ(*(call_target_address - 2), kJaeOffset); | 246 ASSERT_EQ(kJaeOffset, *(call_target_address - 2)); |
| 247 } | 247 } |
| 248 ASSERT_EQ(*(call_target_address - 1), kCallInstruction); | 248 ASSERT_EQ(kCallInstruction, *(call_target_address - 1)); |
| 249 *(call_target_address - 3) = kNopByteOne; | 249 *(call_target_address - 3) = kNopByteOne; |
| 250 *(call_target_address - 2) = kNopByteTwo; | 250 *(call_target_address - 2) = kNopByteTwo; |
| 251 Assembler::set_target_address_at(call_target_address, | 251 Assembler::set_target_address_at(call_target_address, |
| 252 replacement_code->entry()); | 252 replacement_code->entry()); |
| 253 | 253 |
| 254 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( | 254 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( |
| 255 unoptimized_code, call_target_address, replacement_code); | 255 unoptimized_code, call_target_address, replacement_code); |
| 256 } | 256 } |
| 257 | 257 |
| 258 | 258 |
| 259 void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code, | 259 void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code, |
| 260 Address pc_after, | 260 Address pc_after, |
| 261 Code* check_code, | 261 Code* check_code, |
| 262 Code* replacement_code) { | 262 Code* replacement_code) { |
| 263 Address call_target_address = pc_after - kIntSize; | 263 Address call_target_address = pc_after - kIntSize; |
| 264 ASSERT_EQ(replacement_code->entry(), | 264 ASSERT_EQ(replacement_code->entry(), |
| 265 Assembler::target_address_at(call_target_address)); | 265 Assembler::target_address_at(call_target_address)); |
| 266 | 266 |
| 267 // Replace the nops from patching (Deoptimizer::PatchStackCheckCode) to | 267 // Replace the nops from patching (Deoptimizer::PatchStackCheckCode) to |
| 268 // restore the conditional branch. | 268 // restore the conditional branch. |
| 269 ASSERT_EQ(*(call_target_address - 3), kNopByteOne); | 269 ASSERT_EQ(kNopByteOne, *(call_target_address - 3)); |
| 270 ASSERT_EQ(*(call_target_address - 2), kNopByteTwo); | 270 ASSERT_EQ(kNopByteTwo, *(call_target_address - 2)); |
| 271 ASSERT_EQ(*(call_target_address - 1), kCallInstruction); | 271 ASSERT_EQ(kCallInstruction, *(call_target_address - 1)); |
| 272 if (FLAG_count_based_interrupts) { | 272 if (FLAG_count_based_interrupts) { |
| 273 *(call_target_address - 3) = kJnsInstruction; | 273 *(call_target_address - 3) = kJnsInstruction; |
| 274 *(call_target_address - 2) = kJnsOffset; | 274 *(call_target_address - 2) = kJnsOffset; |
| 275 } else { | 275 } else { |
| 276 *(call_target_address - 3) = kJaeInstruction; | 276 *(call_target_address - 3) = kJaeInstruction; |
| 277 *(call_target_address - 2) = kJaeOffset; | 277 *(call_target_address - 2) = kJaeOffset; |
| 278 } | 278 } |
| 279 Assembler::set_target_address_at(call_target_address, | 279 Assembler::set_target_address_at(call_target_address, |
| 280 check_code->entry()); | 280 check_code->entry()); |
| 281 | 281 |
| (...skipping 720 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1002 } | 1002 } |
| 1003 __ bind(&done); | 1003 __ bind(&done); |
| 1004 } | 1004 } |
| 1005 | 1005 |
| 1006 #undef __ | 1006 #undef __ |
| 1007 | 1007 |
| 1008 | 1008 |
| 1009 } } // namespace v8::internal | 1009 } } // namespace v8::internal |
| 1010 | 1010 |
| 1011 #endif // V8_TARGET_ARCH_IA32 | 1011 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |