OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
140 // any back edge in any unoptimized frame will trigger on-stack | 140 // any back edge in any unoptimized frame will trigger on-stack |
141 // replacement for that frame. | 141 // replacement for that frame. |
142 if (FLAG_trace_osr) { | 142 if (FLAG_trace_osr) { |
143 PrintF("[patching stack checks in "); | 143 PrintF("[patching stack checks in "); |
144 function->PrintName(); | 144 function->PrintName(); |
145 PrintF(" for on-stack replacement]\n"); | 145 PrintF(" for on-stack replacement]\n"); |
146 } | 146 } |
147 | 147 |
148 // Get the stack check stub code object to match against. We aren't | 148 // Get the stack check stub code object to match against. We aren't |
149 // prepared to generate it, but we don't expect to have to. | 149 // prepared to generate it, but we don't expect to have to. |
150 StackCheckStub check_stub; | 150 bool found_code = false; |
151 Code* stack_check_code = NULL; | 151 Code* stack_check_code = NULL; |
152 if (check_stub.FindCodeInCache(&stack_check_code)) { | 152 if (FLAG_count_based_interrupts) { |
| 153 InterruptStub interrupt_stub; |
| 154 found_code = interrupt_stub.FindCodeInCache(&stack_check_code); |
| 155 } else { |
| 156 StackCheckStub check_stub; |
| 157 found_code = check_stub.FindCodeInCache(&stack_check_code); |
| 158 } |
| 159 if (found_code) { |
153 Code* replacement_code = | 160 Code* replacement_code = |
154 isolate_->builtins()->builtin(Builtins::kOnStackReplacement); | 161 isolate_->builtins()->builtin(Builtins::kOnStackReplacement); |
155 Code* unoptimized_code = shared->code(); | 162 Code* unoptimized_code = shared->code(); |
156 Deoptimizer::PatchStackCheckCode(unoptimized_code, | 163 Deoptimizer::PatchStackCheckCode(unoptimized_code, |
157 stack_check_code, | 164 stack_check_code, |
158 replacement_code); | 165 replacement_code); |
159 } | 166 } |
160 } | 167 } |
161 | 168 |
162 | 169 |
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
248 !any_ic_changed_ && | 255 !any_ic_changed_ && |
249 total_code_generated_ > 0 && | 256 total_code_generated_ > 0 && |
250 total_code_generated_ < 2000) { | 257 total_code_generated_ < 2000) { |
251 // If no code was generated and no IC was patched since the last tick, | 258 // If no code was generated and no IC was patched since the last tick, |
252 // but a little code has already been generated since last Reset(), | 259 // but a little code has already been generated since last Reset(), |
253 // then type info might already be stable and we can optimize now. | 260 // then type info might already be stable and we can optimize now. |
254 Optimize(function, "stable on startup"); | 261 Optimize(function, "stable on startup"); |
255 } else { | 262 } else { |
256 function->shared()->set_profiler_ticks(ticks + 1); | 263 function->shared()->set_profiler_ticks(ticks + 1); |
257 } | 264 } |
258 } else { // !FLAG_counting_profiler | 265 } else { // !FLAG_watch_ic_patching |
259 samples[sample_count++] = function; | 266 samples[sample_count++] = function; |
260 | 267 |
261 int function_size = function->shared()->SourceSize(); | 268 int function_size = function->shared()->SourceSize(); |
262 int threshold_size_factor = (function_size > kSizeLimit) | 269 int threshold_size_factor = (function_size > kSizeLimit) |
263 ? sampler_threshold_size_factor_ | 270 ? sampler_threshold_size_factor_ |
264 : 1; | 271 : 1; |
265 | 272 |
266 int threshold = sampler_threshold_ * threshold_size_factor; | 273 int threshold = sampler_threshold_ * threshold_size_factor; |
267 | 274 |
268 if (LookupSample(function) >= threshold) { | 275 if (LookupSample(function) >= threshold) { |
269 Optimize(function, "sampler window lookup"); | 276 Optimize(function, "sampler window lookup"); |
270 } | 277 } |
271 } | 278 } |
272 } | 279 } |
273 if (FLAG_watch_ic_patching) { | 280 if (FLAG_watch_ic_patching) { |
274 any_ic_changed_ = false; | 281 any_ic_changed_ = false; |
275 code_generated_ = false; | 282 code_generated_ = false; |
276 } else { // !FLAG_counting_profiler | 283 } else { // !FLAG_watch_ic_patching |
277 // Add the collected functions as samples. It's important not to do | 284 // Add the collected functions as samples. It's important not to do |
278 // this as part of collecting them because this will interfere with | 285 // this as part of collecting them because this will interfere with |
279 // the sample lookup in case of recursive functions. | 286 // the sample lookup in case of recursive functions. |
280 for (int i = 0; i < sample_count; i++) { | 287 for (int i = 0; i < sample_count; i++) { |
281 AddSample(samples[i], kSamplerFrameWeight[i]); | 288 AddSample(samples[i], kSamplerFrameWeight[i]); |
282 } | 289 } |
283 } | 290 } |
284 } | 291 } |
285 | 292 |
286 | 293 |
287 void RuntimeProfiler::NotifyTick() { | 294 void RuntimeProfiler::NotifyTick() { |
| 295 if (FLAG_count_based_interrupts) return; |
288 isolate_->stack_guard()->RequestRuntimeProfilerTick(); | 296 isolate_->stack_guard()->RequestRuntimeProfilerTick(); |
289 } | 297 } |
290 | 298 |
291 | 299 |
292 void RuntimeProfiler::SetUp() { | 300 void RuntimeProfiler::SetUp() { |
293 ASSERT(has_been_globally_set_up_); | 301 ASSERT(has_been_globally_set_up_); |
294 if (!FLAG_watch_ic_patching) { | 302 if (!FLAG_watch_ic_patching) { |
295 ClearSampleBuffer(); | 303 ClearSampleBuffer(); |
296 } | 304 } |
297 // If the ticker hasn't already started, make sure to do so to get | 305 // If the ticker hasn't already started, make sure to do so to get |
298 // the ticks for the runtime profiler. | 306 // the ticks for the runtime profiler. |
299 if (IsEnabled()) isolate_->logger()->EnsureTickerStarted(); | 307 if (IsEnabled()) isolate_->logger()->EnsureTickerStarted(); |
300 } | 308 } |
301 | 309 |
302 | 310 |
303 void RuntimeProfiler::Reset() { | 311 void RuntimeProfiler::Reset() { |
304 if (FLAG_watch_ic_patching) { | 312 if (FLAG_watch_ic_patching) { |
305 total_code_generated_ = 0; | 313 total_code_generated_ = 0; |
306 } else { // !FLAG_counting_profiler | 314 } else { // !FLAG_watch_ic_patching |
307 sampler_threshold_ = kSamplerThresholdInit; | 315 sampler_threshold_ = kSamplerThresholdInit; |
308 sampler_threshold_size_factor_ = kSamplerThresholdSizeFactorInit; | 316 sampler_threshold_size_factor_ = kSamplerThresholdSizeFactorInit; |
309 sampler_ticks_until_threshold_adjustment_ = | 317 sampler_ticks_until_threshold_adjustment_ = |
310 kSamplerTicksBetweenThresholdAdjustment; | 318 kSamplerTicksBetweenThresholdAdjustment; |
311 } | 319 } |
312 } | 320 } |
313 | 321 |
314 | 322 |
315 void RuntimeProfiler::TearDown() { | 323 void RuntimeProfiler::TearDown() { |
316 // Nothing to do. | 324 // Nothing to do. |
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
405 | 413 |
406 bool RuntimeProfilerRateLimiter::SuspendIfNecessary() { | 414 bool RuntimeProfilerRateLimiter::SuspendIfNecessary() { |
407 if (!RuntimeProfiler::IsSomeIsolateInJS()) { | 415 if (!RuntimeProfiler::IsSomeIsolateInJS()) { |
408 return RuntimeProfiler::WaitForSomeIsolateToEnterJS(); | 416 return RuntimeProfiler::WaitForSomeIsolateToEnterJS(); |
409 } | 417 } |
410 return false; | 418 return false; |
411 } | 419 } |
412 | 420 |
413 | 421 |
414 } } // namespace v8::internal | 422 } } // namespace v8::internal |
OLD | NEW |