| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 110 if (FLAG_trace_deopt) { | 110 if (FLAG_trace_deopt) { |
| 111 PrintF("[forced deoptimization: "); | 111 PrintF("[forced deoptimization: "); |
| 112 function->PrintName(); | 112 function->PrintName(); |
| 113 PrintF(" / %x]\n", reinterpret_cast<uint32_t>(function)); | 113 PrintF(" / %x]\n", reinterpret_cast<uint32_t>(function)); |
| 114 } | 114 } |
| 115 } | 115 } |
| 116 | 116 |
| 117 | 117 |
| 118 static const int32_t kBranchBeforeInterrupt = 0x5a000004; | 118 static const int32_t kBranchBeforeInterrupt = 0x5a000004; |
| 119 | 119 |
| 120 // The back edge bookkeeping code matches the pattern: |
| 121 // |
| 122 // <decrement profiling counter> |
| 123 // 2a 00 00 01 bpl ok |
| 124 // e5 9f c? ?? ldr ip, [pc, <interrupt stub address>] |
| 125 // e1 2f ff 3c blx ip |
| 126 // ok-label |
| 127 // |
| 128 // We patch the code to the following form: |
| 129 // |
| 130 // <decrement profiling counter> |
| 131 // e1 a0 00 00 mov r0, r0 (NOP) |
| 132 // e5 9f c? ?? ldr ip, [pc, <on-stack replacement address>] |
| 133 // e1 2f ff 3c blx ip |
| 134 // ok-label |
| 120 | 135 |
| 121 void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code, | 136 void Deoptimizer::PatchInterruptCodeAt(Code* unoptimized_code, |
| 122 Address pc_after, | 137 Address pc_after, |
| 123 Code* check_code, | 138 Code* interrupt_code, |
| 124 Code* replacement_code) { | 139 Code* replacement_code) { |
| 125 const int kInstrSize = Assembler::kInstrSize; | 140 ASSERT(!InterruptCodeIsPatched(unoptimized_code, |
| 126 // The back edge bookkeeping code matches the pattern: | 141 pc_after, |
| 127 // | 142 interrupt_code, |
| 128 // <decrement profiling counter> | 143 replacement_code)); |
| 129 // 2a 00 00 01 bpl ok | 144 static const int kInstrSize = Assembler::kInstrSize; |
| 130 // e5 9f c? ?? ldr ip, [pc, <stack guard address>] | 145 // Turn the jump into nops. |
| 131 // e1 2f ff 3c blx ip | |
| 132 ASSERT(Memory::int32_at(pc_after - kInstrSize) == kBlxIp); | |
| 133 ASSERT(Assembler::IsLdrPcImmediateOffset( | |
| 134 Assembler::instr_at(pc_after - 2 * kInstrSize))); | |
| 135 ASSERT_EQ(kBranchBeforeInterrupt, | |
| 136 Memory::int32_at(pc_after - 3 * kInstrSize)); | |
| 137 | |
| 138 // We patch the code to the following form: | |
| 139 // | |
| 140 // <decrement profiling counter> | |
| 141 // e1 a0 00 00 mov r0, r0 (NOP) | |
| 142 // e5 9f c? ?? ldr ip, [pc, <on-stack replacement address>] | |
| 143 // e1 2f ff 3c blx ip | |
| 144 // and overwrite the constant containing the | |
| 145 // address of the stack check stub. | |
| 146 | |
| 147 // Replace conditional jump with NOP. | |
| 148 CodePatcher patcher(pc_after - 3 * kInstrSize, 1); | 146 CodePatcher patcher(pc_after - 3 * kInstrSize, 1); |
| 149 patcher.masm()->nop(); | 147 patcher.masm()->nop(); |
| 150 | 148 // Replace the call address. |
| 151 // Replace the stack check address in the constant pool | 149 uint32_t interrupt_address_offset = Memory::uint16_at(pc_after - |
| 152 // with the entry address of the replacement code. | |
| 153 uint32_t stack_check_address_offset = Memory::uint16_at(pc_after - | |
| 154 2 * kInstrSize) & 0xfff; | 150 2 * kInstrSize) & 0xfff; |
| 155 Address stack_check_address_pointer = pc_after + stack_check_address_offset; | 151 Address interrupt_address_pointer = pc_after + interrupt_address_offset; |
| 156 ASSERT(Memory::uint32_at(stack_check_address_pointer) == | 152 Memory::uint32_at(interrupt_address_pointer) = |
| 157 reinterpret_cast<uint32_t>(check_code->entry())); | |
| 158 Memory::uint32_at(stack_check_address_pointer) = | |
| 159 reinterpret_cast<uint32_t>(replacement_code->entry()); | 153 reinterpret_cast<uint32_t>(replacement_code->entry()); |
| 160 | 154 |
| 161 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( | 155 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( |
| 162 unoptimized_code, pc_after - 2 * kInstrSize, replacement_code); | 156 unoptimized_code, pc_after - 2 * kInstrSize, replacement_code); |
| 163 } | 157 } |
| 164 | 158 |
| 165 | 159 |
| 166 void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code, | 160 void Deoptimizer::RevertInterruptCodeAt(Code* unoptimized_code, |
| 167 Address pc_after, | 161 Address pc_after, |
| 168 Code* check_code, | 162 Code* interrupt_code, |
| 169 Code* replacement_code) { | 163 Code* replacement_code) { |
| 170 const int kInstrSize = Assembler::kInstrSize; | 164 ASSERT(InterruptCodeIsPatched(unoptimized_code, |
| 171 ASSERT(Memory::int32_at(pc_after - kInstrSize) == kBlxIp); | 165 pc_after, |
| 172 ASSERT(Assembler::IsLdrPcImmediateOffset( | 166 interrupt_code, |
| 173 Assembler::instr_at(pc_after - 2 * kInstrSize))); | 167 replacement_code)); |
| 168 static const int kInstrSize = Assembler::kInstrSize; |
| 169 // Restore the original jump. |
| 170 CodePatcher patcher(pc_after - 3 * kInstrSize, 1); |
| 171 patcher.masm()->b(4 * kInstrSize, pl); // ok-label is 4 instructions later. |
| 172 // Restore the original call address. |
| 173 uint32_t interrupt_address_offset = Memory::uint16_at(pc_after - |
| 174 2 * kInstrSize) & 0xfff; |
| 175 Address interrupt_address_pointer = pc_after + interrupt_address_offset; |
| 176 Memory::uint32_at(interrupt_address_pointer) = |
| 177 reinterpret_cast<uint32_t>(interrupt_code->entry()); |
| 174 | 178 |
| 175 // Replace NOP with conditional jump. | 179 interrupt_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( |
| 176 CodePatcher patcher(pc_after - 3 * kInstrSize, 1); | 180 unoptimized_code, pc_after - 2 * kInstrSize, interrupt_code); |
| 177 patcher.masm()->b(+16, pl); | |
| 178 ASSERT_EQ(kBranchBeforeInterrupt, | |
| 179 Memory::int32_at(pc_after - 3 * kInstrSize)); | |
| 180 | |
| 181 // Replace the stack check address in the constant pool | |
| 182 // with the entry address of the replacement code. | |
| 183 uint32_t stack_check_address_offset = Memory::uint16_at(pc_after - | |
| 184 2 * kInstrSize) & 0xfff; | |
| 185 Address stack_check_address_pointer = pc_after + stack_check_address_offset; | |
| 186 ASSERT(Memory::uint32_at(stack_check_address_pointer) == | |
| 187 reinterpret_cast<uint32_t>(replacement_code->entry())); | |
| 188 Memory::uint32_at(stack_check_address_pointer) = | |
| 189 reinterpret_cast<uint32_t>(check_code->entry()); | |
| 190 | |
| 191 check_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( | |
| 192 unoptimized_code, pc_after - 2 * kInstrSize, check_code); | |
| 193 } | 181 } |
| 194 | 182 |
| 195 | 183 |
| 184 #ifdef DEBUG |
| 185 bool Deoptimizer::InterruptCodeIsPatched(Code* unoptimized_code, |
| 186 Address pc_after, |
| 187 Code* interrupt_code, |
| 188 Code* replacement_code) { |
| 189 Address call_target_address = pc_after - kIntSize; |
| 190 ASSERT_EQ(kBlxIp, Memory::int32_at(pc_after - kInstrSize)); |
| 191 |
| 192 uint32_t interrupt_address_offset = |
| 193 Memory::uint16_at(pc_after - 2 * kInstrSize) & 0xfff; |
| 194 Address interrupt_address_pointer = pc_after + interrupt_address_offset; |
| 195 |
| 196 if (Assembler::IsNop(Assembler::instr_at(pc_after - 3 * kInstrSize))) { |
| 197 ASSERT(Assembler::IsLdrPcImmediateOffset( |
| 198 Assembler::instr_at(pc_after - 2 * kInstrSize))); |
| 199 ASSERT_EQ(kBranchBeforeInterrupt, |
| 200 Memory::int32_at(pc_after - 3 * kInstrSize)); |
| 201 ASSERT_EQ(reinterpret_cast<uint32_t>(replacement_code->entry()), |
| 202 Memory::uint32_at(interrupt_address_pointer)); |
| 203 return true; |
| 204 } else { |
| 205 ASSERT(Assembler::IsLdrPcImmediateOffset( |
| 206 Assembler::instr_at(pc_after - 2 * kInstrSize))); |
| 207 ASSERT_EQ(kBranchBeforeInterrupt, |
| 208 Memory::int32_at(pc_after - 3 * kInstrSize)); |
| 209 ASSERT_EQ(reinterpret_cast<uint32_t>(interrupt_code->entry()), |
| 210 Memory::uint32_at(interrupt_address_pointer)); |
| 211 return false; |
| 212 } |
| 213 } |
| 214 #endif // DEBUG |
| 215 |
| 216 |
| 196 static int LookupBailoutId(DeoptimizationInputData* data, BailoutId ast_id) { | 217 static int LookupBailoutId(DeoptimizationInputData* data, BailoutId ast_id) { |
| 197 ByteArray* translations = data->TranslationByteArray(); | 218 ByteArray* translations = data->TranslationByteArray(); |
| 198 int length = data->DeoptCount(); | 219 int length = data->DeoptCount(); |
| 199 for (int i = 0; i < length; i++) { | 220 for (int i = 0; i < length; i++) { |
| 200 if (data->AstId(i) == ast_id) { | 221 if (data->AstId(i) == ast_id) { |
| 201 TranslationIterator it(translations, data->TranslationIndex(i)->value()); | 222 TranslationIterator it(translations, data->TranslationIndex(i)->value()); |
| 202 int value = it.Next(); | 223 int value = it.Next(); |
| 203 ASSERT(Translation::BEGIN == static_cast<Translation::Opcode>(value)); | 224 ASSERT(Translation::BEGIN == static_cast<Translation::Opcode>(value)); |
| 204 // Read the number of frames. | 225 // Read the number of frames. |
| 205 value = it.Next(); | 226 value = it.Next(); |
| (...skipping 599 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 805 __ push(ip); | 826 __ push(ip); |
| 806 __ b(&done); | 827 __ b(&done); |
| 807 ASSERT(masm()->pc_offset() - start == table_entry_size_); | 828 ASSERT(masm()->pc_offset() - start == table_entry_size_); |
| 808 } | 829 } |
| 809 __ bind(&done); | 830 __ bind(&done); |
| 810 } | 831 } |
| 811 | 832 |
| 812 #undef __ | 833 #undef __ |
| 813 | 834 |
| 814 } } // namespace v8::internal | 835 } } // namespace v8::internal |
| OLD | NEW |