OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
104 function->ReplaceCode(function->shared()->code()); | 104 function->ReplaceCode(function->shared()->code()); |
105 | 105 |
106 if (FLAG_trace_deopt) { | 106 if (FLAG_trace_deopt) { |
107 PrintF("[forced deoptimization: "); | 107 PrintF("[forced deoptimization: "); |
108 function->PrintName(); | 108 function->PrintName(); |
109 PrintF(" / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function)); | 109 PrintF(" / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function)); |
110 } | 110 } |
111 } | 111 } |
112 | 112 |
113 | 113 |
114 static const byte kJnsInstruction = 0x79; | |
115 static const byte kJnsOffset = 0x1f; | |
116 static const byte kJnsOffsetDebugCode = 0x53; | |
117 static const byte kJaeInstruction = 0x73; | |
118 static const byte kJaeOffset = 0x07; | |
119 static const byte kCallInstruction = 0xe8; | |
120 static const byte kNopByteOne = 0x66; | |
121 static const byte kNopByteTwo = 0x90; | |
122 | |
114 void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code, | 123 void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code, |
115 Address pc_after, | 124 Address pc_after, |
116 Code* check_code, | 125 Code* check_code, |
117 Code* replacement_code) { | 126 Code* replacement_code) { |
118 Address call_target_address = pc_after - kIntSize; | 127 Address call_target_address = pc_after - kIntSize; |
119 ASSERT(check_code->entry() == | 128 ASSERT_EQ(check_code->entry(), |
120 Assembler::target_address_at(call_target_address)); | 129 Assembler::target_address_at(call_target_address)); |
121 // The stack check code matches the pattern: | 130 // The stack check code matches the pattern: |
122 // | 131 // |
123 // cmp rsp, <limit> | 132 // cmp rsp, <limit> |
124 // jae ok | 133 // jae ok |
125 // call <stack guard> | 134 // call <stack guard> |
126 // test rax, <loop nesting depth> | 135 // test rax, <loop nesting depth> |
127 // ok: ... | 136 // ok: ... |
128 // | 137 // |
129 // We will patch away the branch so the code is: | 138 // We will patch away the branch so the code is: |
130 // | 139 // |
131 // cmp rsp, <limit> ;; Not changed | 140 // cmp rsp, <limit> ;; Not changed |
132 // nop | 141 // nop |
133 // nop | 142 // nop |
134 // call <on-stack replacment> | 143 // call <on-stack replacment> |
135 // test rax, <loop nesting depth> | 144 // test rax, <loop nesting depth> |
136 // ok: | 145 // ok: |
137 // | 146 // |
138 ASSERT(*(call_target_address - 3) == 0x73 && // jae | 147 if (FLAG_count_based_interrupts) { |
139 *(call_target_address - 2) == 0x07 && // offset | 148 ASSERT_EQ(kJnsInstruction, *(call_target_address - 3)); |
140 *(call_target_address - 1) == 0xe8); // call | 149 if (FLAG_debug_code) { |
141 *(call_target_address - 3) = 0x66; // 2 byte nop part 1 | 150 ASSERT_EQ(kJnsOffsetDebugCode, *(call_target_address - 2)); |
danno
2012/03/26 14:27:06
Whoa, this was totally non-obvious, perhaps a comm
Jakob Kummerow
2012/03/27 11:17:09
Done.
| |
142 *(call_target_address - 2) = 0x90; // 2 byte nop part 2 | 151 } else { |
152 ASSERT_EQ(kJnsOffset, *(call_target_address - 2)); | |
153 } | |
154 } else { | |
155 ASSERT_EQ(kJaeInstruction, *(call_target_address - 3)); | |
156 ASSERT_EQ(kJaeOffset, *(call_target_address - 2)); | |
157 } | |
158 ASSERT_EQ(kCallInstruction, *(call_target_address - 1)); | |
159 *(call_target_address - 3) = kNopByteOne; | |
160 *(call_target_address - 2) = kNopByteTwo; | |
143 Assembler::set_target_address_at(call_target_address, | 161 Assembler::set_target_address_at(call_target_address, |
144 replacement_code->entry()); | 162 replacement_code->entry()); |
145 | 163 |
146 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( | 164 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( |
147 unoptimized_code, call_target_address, replacement_code); | 165 unoptimized_code, call_target_address, replacement_code); |
148 } | 166 } |
149 | 167 |
150 | 168 |
151 void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code, | 169 void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code, |
152 Address pc_after, | 170 Address pc_after, |
153 Code* check_code, | 171 Code* check_code, |
154 Code* replacement_code) { | 172 Code* replacement_code) { |
155 Address call_target_address = pc_after - kIntSize; | 173 Address call_target_address = pc_after - kIntSize; |
156 ASSERT(replacement_code->entry() == | 174 ASSERT(replacement_code->entry() == |
157 Assembler::target_address_at(call_target_address)); | 175 Assembler::target_address_at(call_target_address)); |
158 // Replace the nops from patching (Deoptimizer::PatchStackCheckCode) to | 176 // Replace the nops from patching (Deoptimizer::PatchStackCheckCode) to |
159 // restore the conditional branch. | 177 // restore the conditional branch. |
160 ASSERT(*(call_target_address - 3) == 0x66 && // 2 byte nop part 1 | 178 ASSERT_EQ(kNopByteOne, *(call_target_address - 3)); |
161 *(call_target_address - 2) == 0x90 && // 2 byte nop part 2 | 179 ASSERT_EQ(kNopByteTwo, *(call_target_address - 2)); |
162 *(call_target_address - 1) == 0xe8); // call | 180 ASSERT_EQ(kCallInstruction, *(call_target_address - 1)); |
163 *(call_target_address - 3) = 0x73; // jae | 181 if (FLAG_count_based_interrupts) { |
164 *(call_target_address - 2) = 0x07; // offset | 182 *(call_target_address - 3) = kJnsInstruction; |
183 if (FLAG_debug_code) { | |
184 *(call_target_address - 2) = kJnsOffsetDebugCode; | |
185 } else { | |
186 *(call_target_address - 2) = kJnsOffset; | |
187 } | |
188 } else { | |
189 *(call_target_address - 3) = kJaeInstruction; | |
190 *(call_target_address - 2) = kJaeOffset; | |
191 } | |
165 Assembler::set_target_address_at(call_target_address, | 192 Assembler::set_target_address_at(call_target_address, |
166 check_code->entry()); | 193 check_code->entry()); |
167 | 194 |
168 check_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( | 195 check_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( |
169 unoptimized_code, call_target_address, check_code); | 196 unoptimized_code, call_target_address, check_code); |
170 } | 197 } |
171 | 198 |
172 | 199 |
173 static int LookupBailoutId(DeoptimizationInputData* data, unsigned ast_id) { | 200 static int LookupBailoutId(DeoptimizationInputData* data, unsigned ast_id) { |
174 ByteArray* translations = data->TranslationByteArray(); | 201 ByteArray* translations = data->TranslationByteArray(); |
(...skipping 772 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
947 } | 974 } |
948 __ bind(&done); | 975 __ bind(&done); |
949 } | 976 } |
950 | 977 |
951 #undef __ | 978 #undef __ |
952 | 979 |
953 | 980 |
954 } } // namespace v8::internal | 981 } } // namespace v8::internal |
955 | 982 |
956 #endif // V8_TARGET_ARCH_X64 | 983 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |