OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
106 if (FLAG_trace_deopt) { | 106 if (FLAG_trace_deopt) { |
107 PrintF("[forced deoptimization: "); | 107 PrintF("[forced deoptimization: "); |
108 function->PrintName(); | 108 function->PrintName(); |
109 PrintF(" / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function)); | 109 PrintF(" / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function)); |
110 } | 110 } |
111 } | 111 } |
112 | 112 |
113 | 113 |
114 static const byte kJnsInstruction = 0x79; | 114 static const byte kJnsInstruction = 0x79; |
115 static const byte kJnsOffset = 0x1f; | 115 static const byte kJnsOffset = 0x1f; |
116 static const byte kJnsOffsetDebugCode = 0x53; | |
117 static const byte kJaeInstruction = 0x73; | 116 static const byte kJaeInstruction = 0x73; |
118 static const byte kJaeOffset = 0x07; | 117 static const byte kJaeOffset = 0x07; |
119 static const byte kCallInstruction = 0xe8; | 118 static const byte kCallInstruction = 0xe8; |
120 static const byte kNopByteOne = 0x66; | 119 static const byte kNopByteOne = 0x66; |
121 static const byte kNopByteTwo = 0x90; | 120 static const byte kNopByteTwo = 0x90; |
122 | 121 |
123 void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code, | 122 void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code, |
124 Address pc_after, | 123 Address pc_after, |
125 Code* check_code, | 124 Code* check_code, |
126 Code* replacement_code) { | 125 Code* replacement_code) { |
(...skipping 12 matching lines...) Expand all Loading... |
139 // | 138 // |
140 // cmp rsp, <limit> ;; Not changed | 139 // cmp rsp, <limit> ;; Not changed |
141 // nop | 140 // nop |
142 // nop | 141 // nop |
143 // call <on-stack replacment> | 142 // call <on-stack replacment> |
144 // test rax, <loop nesting depth> | 143 // test rax, <loop nesting depth> |
145 // ok: | 144 // ok: |
146 // | 145 // |
147 if (FLAG_count_based_interrupts) { | 146 if (FLAG_count_based_interrupts) { |
148 ASSERT_EQ(kJnsInstruction, *(call_target_address - 3)); | 147 ASSERT_EQ(kJnsInstruction, *(call_target_address - 3)); |
149 if (FLAG_debug_code) { | 148 ASSERT_EQ(kJnsOffset, *(call_target_address - 2)); |
150 // FullCodeGenerator::EmitProfilingCounterReset() makes use of | |
151 // masm->Move(Operand&, Smi*), which generates additional code | |
152 // when FLAG_debug_code is set, so the jump offset is larger | |
153 // in that case. | |
154 ASSERT_EQ(kJnsOffsetDebugCode, *(call_target_address - 2)); | |
155 } else { | |
156 ASSERT_EQ(kJnsOffset, *(call_target_address - 2)); | |
157 } | |
158 } else { | 149 } else { |
159 ASSERT_EQ(kJaeInstruction, *(call_target_address - 3)); | 150 ASSERT_EQ(kJaeInstruction, *(call_target_address - 3)); |
160 ASSERT_EQ(kJaeOffset, *(call_target_address - 2)); | 151 ASSERT_EQ(kJaeOffset, *(call_target_address - 2)); |
161 } | 152 } |
162 ASSERT_EQ(kCallInstruction, *(call_target_address - 1)); | 153 ASSERT_EQ(kCallInstruction, *(call_target_address - 1)); |
163 *(call_target_address - 3) = kNopByteOne; | 154 *(call_target_address - 3) = kNopByteOne; |
164 *(call_target_address - 2) = kNopByteTwo; | 155 *(call_target_address - 2) = kNopByteTwo; |
165 Assembler::set_target_address_at(call_target_address, | 156 Assembler::set_target_address_at(call_target_address, |
166 replacement_code->entry()); | 157 replacement_code->entry()); |
167 | 158 |
168 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( | 159 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( |
169 unoptimized_code, call_target_address, replacement_code); | 160 unoptimized_code, call_target_address, replacement_code); |
170 } | 161 } |
171 | 162 |
172 | 163 |
173 void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code, | 164 void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code, |
174 Address pc_after, | 165 Address pc_after, |
175 Code* check_code, | 166 Code* check_code, |
176 Code* replacement_code) { | 167 Code* replacement_code) { |
177 Address call_target_address = pc_after - kIntSize; | 168 Address call_target_address = pc_after - kIntSize; |
178 ASSERT(replacement_code->entry() == | 169 ASSERT(replacement_code->entry() == |
179 Assembler::target_address_at(call_target_address)); | 170 Assembler::target_address_at(call_target_address)); |
180 // Replace the nops from patching (Deoptimizer::PatchStackCheckCode) to | 171 // Replace the nops from patching (Deoptimizer::PatchStackCheckCode) to |
181 // restore the conditional branch. | 172 // restore the conditional branch. |
182 ASSERT_EQ(kNopByteOne, *(call_target_address - 3)); | 173 ASSERT_EQ(kNopByteOne, *(call_target_address - 3)); |
183 ASSERT_EQ(kNopByteTwo, *(call_target_address - 2)); | 174 ASSERT_EQ(kNopByteTwo, *(call_target_address - 2)); |
184 ASSERT_EQ(kCallInstruction, *(call_target_address - 1)); | 175 ASSERT_EQ(kCallInstruction, *(call_target_address - 1)); |
185 if (FLAG_count_based_interrupts) { | 176 if (FLAG_count_based_interrupts) { |
186 *(call_target_address - 3) = kJnsInstruction; | 177 *(call_target_address - 3) = kJnsInstruction; |
187 if (FLAG_debug_code) { | 178 *(call_target_address - 2) = kJnsOffset; |
188 // See comment above: larger jump offset if debug code is generated. | |
189 *(call_target_address - 2) = kJnsOffsetDebugCode; | |
190 } else { | |
191 *(call_target_address - 2) = kJnsOffset; | |
192 } | |
193 } else { | 179 } else { |
194 *(call_target_address - 3) = kJaeInstruction; | 180 *(call_target_address - 3) = kJaeInstruction; |
195 *(call_target_address - 2) = kJaeOffset; | 181 *(call_target_address - 2) = kJaeOffset; |
196 } | 182 } |
197 Assembler::set_target_address_at(call_target_address, | 183 Assembler::set_target_address_at(call_target_address, |
198 check_code->entry()); | 184 check_code->entry()); |
199 | 185 |
200 check_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( | 186 check_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( |
201 unoptimized_code, call_target_address, check_code); | 187 unoptimized_code, call_target_address, check_code); |
202 } | 188 } |
(...skipping 776 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
979 } | 965 } |
980 __ bind(&done); | 966 __ bind(&done); |
981 } | 967 } |
982 | 968 |
983 #undef __ | 969 #undef __ |
984 | 970 |
985 | 971 |
986 } } // namespace v8::internal | 972 } } // namespace v8::internal |
987 | 973 |
988 #endif // V8_TARGET_ARCH_X64 | 974 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |