OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
133 *ic_total_count = info->ic_total_count(); | 133 *ic_total_count = info->ic_total_count(); |
134 } | 134 } |
135 *percentage = *ic_total_count > 0 | 135 *percentage = *ic_total_count > 0 |
136 ? 100 * *ic_with_type_info_count / *ic_total_count | 136 ? 100 * *ic_with_type_info_count / *ic_total_count |
137 : 100; | 137 : 100; |
138 } | 138 } |
139 | 139 |
140 | 140 |
141 void RuntimeProfiler::Optimize(JSFunction* function, const char* reason) { | 141 void RuntimeProfiler::Optimize(JSFunction* function, const char* reason) { |
142 ASSERT(function->IsOptimizable()); | 142 ASSERT(function->IsOptimizable()); |
143 // If we are in manual mode, don't auto-optimize anything. | |
144 if (FLAG_manual_parallel_recompilation) return; | |
145 | 143 |
146 if (FLAG_trace_opt) { | 144 if (FLAG_trace_opt) { |
147 PrintF("[marking "); | 145 PrintF("[marking "); |
148 function->PrintName(); | 146 function->PrintName(); |
149 PrintF(" 0x%" V8PRIxPTR, reinterpret_cast<intptr_t>(function->address())); | 147 PrintF(" 0x%" V8PRIxPTR, reinterpret_cast<intptr_t>(function->address())); |
150 PrintF(" for recompilation, reason: %s", reason); | 148 PrintF(" for recompilation, reason: %s", reason); |
151 if (FLAG_type_info_threshold > 0) { | 149 if (FLAG_type_info_threshold > 0) { |
152 int typeinfo, total, percentage; | 150 int typeinfo, total, percentage; |
153 GetICCounts(function, &typeinfo, &total, &percentage); | 151 GetICCounts(function, &typeinfo, &total, &percentage); |
154 PrintF(", ICs with typeinfo: %d/%d (%d%%)", typeinfo, total, percentage); | 152 PrintF(", ICs with typeinfo: %d/%d (%d%%)", typeinfo, total, percentage); |
155 } | 153 } |
156 PrintF("]\n"); | 154 PrintF("]\n"); |
157 } | 155 } |
158 | 156 |
159 if (FLAG_parallel_recompilation) { | 157 if (FLAG_parallel_recompilation) { |
| 158 ASSERT(!function->IsMarkedForInstallingRecompiledCode()); |
| 159 ASSERT(!function->IsInRecompileQueue()); |
160 function->MarkForParallelRecompilation(); | 160 function->MarkForParallelRecompilation(); |
161 } else { | 161 } else { |
162 // The next call to the function will trigger optimization. | 162 // The next call to the function will trigger optimization. |
163 function->MarkForLazyRecompilation(); | 163 function->MarkForLazyRecompilation(); |
164 } | 164 } |
165 } | 165 } |
166 | 166 |
167 | 167 |
168 void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function) { | 168 void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function) { |
169 // See AlwaysFullCompiler (in compiler.cc) comment on why we need | 169 // See AlwaysFullCompiler (in compiler.cc) comment on why we need |
170 // Debug::has_break_points(). | 170 // Debug::has_break_points(). |
171 ASSERT(function->IsMarkedForLazyRecompilation() || | 171 ASSERT(function->IsMarkedForLazyRecompilation() || |
172 function->IsMarkedForParallelRecompilation()); | 172 function->IsMarkedForParallelRecompilation() || |
| 173 function->IsOptimized()); |
173 if (!FLAG_use_osr || | 174 if (!FLAG_use_osr || |
174 isolate_->DebuggerHasBreakPoints() || | 175 isolate_->DebuggerHasBreakPoints() || |
175 function->IsBuiltin()) { | 176 function->IsBuiltin()) { |
176 return; | 177 return; |
177 } | 178 } |
178 | 179 |
179 SharedFunctionInfo* shared = function->shared(); | 180 SharedFunctionInfo* shared = function->shared(); |
180 // If the code is not optimizable, don't try OSR. | 181 // If the code is not optimizable, don't try OSR. |
181 if (!shared->code()->optimizable()) return; | 182 if (!shared->code()->optimizable()) return; |
182 | 183 |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
238 sampler_window_[sampler_window_position_] = function; | 239 sampler_window_[sampler_window_position_] = function; |
239 sampler_window_weight_[sampler_window_position_] = weight; | 240 sampler_window_weight_[sampler_window_position_] = weight; |
240 sampler_window_position_ = (sampler_window_position_ + 1) & | 241 sampler_window_position_ = (sampler_window_position_ + 1) & |
241 (kSamplerWindowSize - 1); | 242 (kSamplerWindowSize - 1); |
242 } | 243 } |
243 | 244 |
244 | 245 |
245 void RuntimeProfiler::OptimizeNow() { | 246 void RuntimeProfiler::OptimizeNow() { |
246 HandleScope scope(isolate_); | 247 HandleScope scope(isolate_); |
247 | 248 |
| 249 if (FLAG_parallel_recompilation) { |
| 250 // Take this as opportunity to process the optimizing compiler thread's |
| 251 // output queue so that it does not unnecessarily keep objects alive. |
| 252 isolate_->optimizing_compiler_thread()->InstallOptimizedFunctions(); |
| 253 } |
| 254 |
248 // Run through the JavaScript frames and collect them. If we already | 255 // Run through the JavaScript frames and collect them. If we already |
249 // have a sample of the function, we mark it for optimizations | 256 // have a sample of the function, we mark it for optimizations |
250 // (eagerly or lazily). | 257 // (eagerly or lazily). |
251 JSFunction* samples[kSamplerFrameCount]; | 258 JSFunction* samples[kSamplerFrameCount]; |
252 int sample_count = 0; | 259 int sample_count = 0; |
253 int frame_count = 0; | 260 int frame_count = 0; |
254 int frame_count_limit = FLAG_watch_ic_patching ? FLAG_frame_count | 261 int frame_count_limit = FLAG_watch_ic_patching ? FLAG_frame_count |
255 : kSamplerFrameCount; | 262 : kSamplerFrameCount; |
256 for (JavaScriptFrameIterator it(isolate_); | 263 for (JavaScriptFrameIterator it(isolate_); |
257 frame_count++ < frame_count_limit && !it.done(); | 264 frame_count++ < frame_count_limit && !it.done(); |
(...skipping 15 matching lines...) Expand all Loading... |
273 kSamplerTicksBetweenThresholdAdjustment; | 280 kSamplerTicksBetweenThresholdAdjustment; |
274 } | 281 } |
275 } | 282 } |
276 } | 283 } |
277 } | 284 } |
278 | 285 |
279 SharedFunctionInfo* shared = function->shared(); | 286 SharedFunctionInfo* shared = function->shared(); |
280 Code* shared_code = shared->code(); | 287 Code* shared_code = shared->code(); |
281 | 288 |
282 if (shared_code->kind() != Code::FUNCTION) continue; | 289 if (shared_code->kind() != Code::FUNCTION) continue; |
| 290 if (function->IsInRecompileQueue()) continue; |
283 | 291 |
284 if (function->IsMarkedForLazyRecompilation() || | 292 // Attempt OSR if we are still running unoptimized code even though the |
285 function->IsMarkedForParallelRecompilation()) { | 293 // the function has long been marked or even already been optimized. |
| 294 if (!frame->is_optimized() && |
| 295 (function->IsMarkedForLazyRecompilation() || |
| 296 function->IsMarkedForParallelRecompilation() || |
| 297 function->IsOptimized())) { |
286 int nesting = shared_code->allow_osr_at_loop_nesting_level(); | 298 int nesting = shared_code->allow_osr_at_loop_nesting_level(); |
287 if (nesting == 0) AttemptOnStackReplacement(function); | 299 if (nesting == 0) AttemptOnStackReplacement(function); |
288 int new_nesting = Min(nesting + 1, Code::kMaxLoopNestingMarker); | 300 int new_nesting = Min(nesting + 1, Code::kMaxLoopNestingMarker); |
289 shared_code->set_allow_osr_at_loop_nesting_level(new_nesting); | 301 shared_code->set_allow_osr_at_loop_nesting_level(new_nesting); |
290 } | 302 } |
291 | 303 |
292 // Only record top-level code on top of the execution stack and | 304 // Only record top-level code on top of the execution stack and |
293 // avoid optimizing excessively large scripts since top-level code | 305 // avoid optimizing excessively large scripts since top-level code |
294 // will be executed only once. | 306 // will be executed only once. |
295 const int kMaxToplevelSourceSize = 10 * 1024; | 307 const int kMaxToplevelSourceSize = 10 * 1024; |
(...skipping 177 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
473 | 485 |
474 | 486 |
475 void RuntimeProfiler::UpdateSamplesAfterCompact(ObjectVisitor* visitor) { | 487 void RuntimeProfiler::UpdateSamplesAfterCompact(ObjectVisitor* visitor) { |
476 for (int i = 0; i < kSamplerWindowSize; i++) { | 488 for (int i = 0; i < kSamplerWindowSize; i++) { |
477 visitor->VisitPointer(&sampler_window_[i]); | 489 visitor->VisitPointer(&sampler_window_[i]); |
478 } | 490 } |
479 } | 491 } |
480 | 492 |
481 | 493 |
482 } } // namespace v8::internal | 494 } } // namespace v8::internal |
OLD | NEW |