| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 172 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 183 | 183 |
| 184 // We are not prepared to do OSR for a function that already has an | 184 // We are not prepared to do OSR for a function that already has an |
| 185 // allocated arguments object. The optimized code would bypass it for | 185 // allocated arguments object. The optimized code would bypass it for |
| 186 // arguments accesses, which is unsound. Don't try OSR. | 186 // arguments accesses, which is unsound. Don't try OSR. |
| 187 if (shared->uses_arguments()) return; | 187 if (shared->uses_arguments()) return; |
| 188 | 188 |
| 189 // We're using on-stack replacement: patch the unoptimized code so that | 189 // We're using on-stack replacement: patch the unoptimized code so that |
| 190 // any back edge in any unoptimized frame will trigger on-stack | 190 // any back edge in any unoptimized frame will trigger on-stack |
| 191 // replacement for that frame. | 191 // replacement for that frame. |
| 192 if (FLAG_trace_osr) { | 192 if (FLAG_trace_osr) { |
| 193 PrintF("[patching stack checks in "); | 193 PrintF("[patching back edges in "); |
| 194 function->PrintName(); | 194 function->PrintName(); |
| 195 PrintF(" for on-stack replacement]\n"); | 195 PrintF(" for on-stack replacement]\n"); |
| 196 } | 196 } |
| 197 | 197 |
| 198 // Get the stack check stub code object to match against. We aren't | 198 // Get the interrupt stub code object to match against. We aren't |
| 199 // prepared to generate it, but we don't expect to have to. | 199 // prepared to generate it, but we don't expect to have to. |
| 200 Code* stack_check_code = NULL; | 200 Code* interrupt_code = NULL; |
| 201 InterruptStub interrupt_stub; | 201 InterruptStub interrupt_stub; |
| 202 bool found_code = interrupt_stub.FindCodeInCache(&stack_check_code, isolate_); | 202 bool found_code = interrupt_stub.FindCodeInCache(&interrupt_code, isolate_); |
| 203 if (found_code) { | 203 if (found_code) { |
| 204 Code* replacement_code = | 204 Code* replacement_code = |
| 205 isolate_->builtins()->builtin(Builtins::kOnStackReplacement); | 205 isolate_->builtins()->builtin(Builtins::kOnStackReplacement); |
| 206 Code* unoptimized_code = shared->code(); | 206 Code* unoptimized_code = shared->code(); |
| 207 Deoptimizer::PatchStackCheckCode(unoptimized_code, | 207 Deoptimizer::PatchInterruptCode( |
| 208 stack_check_code, | 208 unoptimized_code, interrupt_code, replacement_code); |
| 209 replacement_code); | |
| 210 } | 209 } |
| 211 } | 210 } |
| 212 | 211 |
| 213 | 212 |
| 214 void RuntimeProfiler::ClearSampleBuffer() { | 213 void RuntimeProfiler::ClearSampleBuffer() { |
| 215 memset(sampler_window_, 0, sizeof(sampler_window_)); | 214 memset(sampler_window_, 0, sizeof(sampler_window_)); |
| 216 memset(sampler_window_weight_, 0, sizeof(sampler_window_weight_)); | 215 memset(sampler_window_weight_, 0, sizeof(sampler_window_weight_)); |
| 217 } | 216 } |
| 218 | 217 |
| 219 | 218 |
| (...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 289 if (shared_code->kind() != Code::FUNCTION) continue; | 288 if (shared_code->kind() != Code::FUNCTION) continue; |
| 290 if (function->IsInRecompileQueue()) continue; | 289 if (function->IsInRecompileQueue()) continue; |
| 291 | 290 |
| 292 // Attempt OSR if we are still running unoptimized code even though the | 291 // Attempt OSR if we are still running unoptimized code even though the |
| 293 // the function has long been marked or even already been optimized. | 292 // the function has long been marked or even already been optimized. |
| 294 if (!frame->is_optimized() && | 293 if (!frame->is_optimized() && |
| 295 (function->IsMarkedForLazyRecompilation() || | 294 (function->IsMarkedForLazyRecompilation() || |
| 296 function->IsMarkedForParallelRecompilation() || | 295 function->IsMarkedForParallelRecompilation() || |
| 297 function->IsOptimized())) { | 296 function->IsOptimized())) { |
| 298 int nesting = shared_code->allow_osr_at_loop_nesting_level(); | 297 int nesting = shared_code->allow_osr_at_loop_nesting_level(); |
| 299 if (nesting == 0) AttemptOnStackReplacement(function); | 298 if (nesting < Code::kMaxLoopNestingMarker) { |
| 300 int new_nesting = Min(nesting + 1, Code::kMaxLoopNestingMarker); | 299 int new_nesting = nesting + 1; |
| 301 shared_code->set_allow_osr_at_loop_nesting_level(new_nesting); | 300 shared_code->set_allow_osr_at_loop_nesting_level(new_nesting); |
| 301 AttemptOnStackReplacement(function); |
| 302 } |
| 302 } | 303 } |
| 303 | 304 |
| 304 // Only record top-level code on top of the execution stack and | 305 // Only record top-level code on top of the execution stack and |
| 305 // avoid optimizing excessively large scripts since top-level code | 306 // avoid optimizing excessively large scripts since top-level code |
| 306 // will be executed only once. | 307 // will be executed only once. |
| 307 const int kMaxToplevelSourceSize = 10 * 1024; | 308 const int kMaxToplevelSourceSize = 10 * 1024; |
| 308 if (shared->is_toplevel() && | 309 if (shared->is_toplevel() && |
| 309 (frame_count > 1 || shared->SourceSize() > kMaxToplevelSourceSize)) { | 310 (frame_count > 1 || shared->SourceSize() > kMaxToplevelSourceSize)) { |
| 310 continue; | 311 continue; |
| 311 } | 312 } |
| (...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 482 | 483 |
| 483 | 484 |
| 484 void RuntimeProfiler::UpdateSamplesAfterCompact(ObjectVisitor* visitor) { | 485 void RuntimeProfiler::UpdateSamplesAfterCompact(ObjectVisitor* visitor) { |
| 485 for (int i = 0; i < kSamplerWindowSize; i++) { | 486 for (int i = 0; i < kSamplerWindowSize; i++) { |
| 486 visitor->VisitPointer(&sampler_window_[i]); | 487 visitor->VisitPointer(&sampler_window_[i]); |
| 487 } | 488 } |
| 488 } | 489 } |
| 489 | 490 |
| 490 | 491 |
| 491 } } // namespace v8::internal | 492 } } // namespace v8::internal |
| OLD | NEW |