OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
73 static const int kProfilerTicksBeforeReenablingOptimization = 250; | 73 static const int kProfilerTicksBeforeReenablingOptimization = 250; |
74 // If a function does not have enough type info (according to | 74 // If a function does not have enough type info (according to |
75 // FLAG_type_info_threshold), but has seen a huge number of ticks, | 75 // FLAG_type_info_threshold), but has seen a huge number of ticks, |
76 // optimize it as it is. | 76 // optimize it as it is. |
77 static const int kTicksWhenNotEnoughTypeInfo = 100; | 77 static const int kTicksWhenNotEnoughTypeInfo = 100; |
78 // We only have one byte to store the number of ticks. | 78 // We only have one byte to store the number of ticks. |
79 STATIC_ASSERT(kProfilerTicksBeforeOptimization < 256); | 79 STATIC_ASSERT(kProfilerTicksBeforeOptimization < 256); |
80 STATIC_ASSERT(kProfilerTicksBeforeReenablingOptimization < 256); | 80 STATIC_ASSERT(kProfilerTicksBeforeReenablingOptimization < 256); |
81 STATIC_ASSERT(kTicksWhenNotEnoughTypeInfo < 256); | 81 STATIC_ASSERT(kTicksWhenNotEnoughTypeInfo < 256); |
82 | 82 |
| 83 // Maximum size in bytes of generate code for a function to allow OSR. |
| 84 static const int kOSRCodeSizeAllowanceBase = |
| 85 100 * FullCodeGenerator::kCodeSizeMultiplier; |
| 86 |
| 87 static const int kOSRCodeSizeAllowancePerTick = |
| 88 3 * FullCodeGenerator::kCodeSizeMultiplier; |
83 | 89 |
84 // Maximum size in bytes of generated code for a function to be optimized | 90 // Maximum size in bytes of generated code for a function to be optimized |
85 // the very first time it is seen on the stack. | 91 // the very first time it is seen on the stack. |
86 static const int kMaxSizeEarlyOpt = | 92 static const int kMaxSizeEarlyOpt = |
87 5 * FullCodeGenerator::kBackEdgeDistanceUnit; | 93 5 * FullCodeGenerator::kCodeSizeMultiplier; |
88 | 94 |
89 | 95 |
90 RuntimeProfiler::RuntimeProfiler(Isolate* isolate) | 96 RuntimeProfiler::RuntimeProfiler(Isolate* isolate) |
91 : isolate_(isolate), | 97 : isolate_(isolate), |
92 sampler_threshold_(kSamplerThresholdInit), | 98 sampler_threshold_(kSamplerThresholdInit), |
93 sampler_threshold_size_factor_(kSamplerThresholdSizeFactorInit), | 99 sampler_threshold_size_factor_(kSamplerThresholdSizeFactorInit), |
94 sampler_ticks_until_threshold_adjustment_( | 100 sampler_ticks_until_threshold_adjustment_( |
95 kSamplerTicksBetweenThresholdAdjustment), | 101 kSamplerTicksBetweenThresholdAdjustment), |
96 sampler_window_position_(0), | 102 sampler_window_position_(0), |
97 any_ic_changed_(false), | 103 any_ic_changed_(false), |
98 code_generated_(false) { | 104 code_generated_(false) { |
99 ClearSampleBuffer(); | 105 ClearSampleBuffer(); |
100 } | 106 } |
101 | 107 |
102 | 108 |
103 static void GetICCounts(JSFunction* function, | 109 static void GetICCounts(Code* shared_code, |
104 int* ic_with_type_info_count, | 110 int* ic_with_type_info_count, |
105 int* ic_total_count, | 111 int* ic_total_count, |
106 int* percentage) { | 112 int* percentage) { |
107 *ic_total_count = 0; | 113 *ic_total_count = 0; |
108 *ic_with_type_info_count = 0; | 114 *ic_with_type_info_count = 0; |
109 Object* raw_info = | 115 Object* raw_info = shared_code->type_feedback_info(); |
110 function->shared()->code()->type_feedback_info(); | |
111 if (raw_info->IsTypeFeedbackInfo()) { | 116 if (raw_info->IsTypeFeedbackInfo()) { |
112 TypeFeedbackInfo* info = TypeFeedbackInfo::cast(raw_info); | 117 TypeFeedbackInfo* info = TypeFeedbackInfo::cast(raw_info); |
113 *ic_with_type_info_count = info->ic_with_type_info_count(); | 118 *ic_with_type_info_count = info->ic_with_type_info_count(); |
114 *ic_total_count = info->ic_total_count(); | 119 *ic_total_count = info->ic_total_count(); |
115 } | 120 } |
116 *percentage = *ic_total_count > 0 | 121 *percentage = *ic_total_count > 0 |
117 ? 100 * *ic_with_type_info_count / *ic_total_count | 122 ? 100 * *ic_with_type_info_count / *ic_total_count |
118 : 100; | 123 : 100; |
119 } | 124 } |
120 | 125 |
121 | 126 |
122 void RuntimeProfiler::Optimize(JSFunction* function, const char* reason) { | 127 void RuntimeProfiler::Optimize(JSFunction* function, const char* reason) { |
123 ASSERT(function->IsOptimizable()); | 128 ASSERT(function->IsOptimizable()); |
124 | 129 |
125 if (FLAG_trace_opt && function->PassesHydrogenFilter()) { | 130 if (FLAG_trace_opt && function->PassesHydrogenFilter()) { |
126 PrintF("[marking "); | 131 PrintF("[marking "); |
127 function->ShortPrint(); | 132 function->ShortPrint(); |
128 PrintF(" for recompilation, reason: %s", reason); | 133 PrintF(" for recompilation, reason: %s", reason); |
129 if (FLAG_type_info_threshold > 0) { | 134 if (FLAG_type_info_threshold > 0) { |
130 int typeinfo, total, percentage; | 135 int typeinfo, total, percentage; |
131 GetICCounts(function, &typeinfo, &total, &percentage); | 136 GetICCounts(function->shared()->code(), &typeinfo, &total, &percentage); |
132 PrintF(", ICs with typeinfo: %d/%d (%d%%)", typeinfo, total, percentage); | 137 PrintF(", ICs with typeinfo: %d/%d (%d%%)", typeinfo, total, percentage); |
133 } | 138 } |
134 PrintF("]\n"); | 139 PrintF("]\n"); |
135 } | 140 } |
136 | 141 |
137 if (FLAG_parallel_recompilation && !isolate_->bootstrapper()->IsActive()) { | 142 if (FLAG_parallel_recompilation && !isolate_->bootstrapper()->IsActive()) { |
138 ASSERT(!function->IsMarkedForInstallingRecompiledCode()); | 143 ASSERT(!function->IsMarkedForInstallingRecompiledCode()); |
139 ASSERT(!function->IsInRecompileQueue()); | 144 ASSERT(!function->IsInRecompileQueue()); |
140 function->MarkForParallelRecompilation(); | 145 function->MarkForParallelRecompilation(); |
141 } else { | 146 } else { |
(...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
267 | 272 |
268 if (shared_code->kind() != Code::FUNCTION) continue; | 273 if (shared_code->kind() != Code::FUNCTION) continue; |
269 if (function->IsInRecompileQueue()) continue; | 274 if (function->IsInRecompileQueue()) continue; |
270 | 275 |
271 // Attempt OSR if we are still running unoptimized code even though the | 276 // Attempt OSR if we are still running unoptimized code even though the |
272 // the function has long been marked or even already been optimized. | 277 // the function has long been marked or even already been optimized. |
273 if (!frame->is_optimized() && | 278 if (!frame->is_optimized() && |
274 (function->IsMarkedForLazyRecompilation() || | 279 (function->IsMarkedForLazyRecompilation() || |
275 function->IsMarkedForParallelRecompilation() || | 280 function->IsMarkedForParallelRecompilation() || |
276 function->IsOptimized())) { | 281 function->IsOptimized())) { |
277 int nesting = shared_code->allow_osr_at_loop_nesting_level(); | 282 int ticks = shared_code->profiler_ticks(); |
278 if (nesting < Code::kMaxLoopNestingMarker) { | 283 int allowance = kOSRCodeSizeAllowanceBase + |
279 int new_nesting = nesting + 1; | 284 ticks * kOSRCodeSizeAllowancePerTick; |
280 shared_code->set_allow_osr_at_loop_nesting_level(new_nesting); | 285 if (shared_code->CodeSize() > allowance) { |
281 AttemptOnStackReplacement(function); | 286 if (ticks < 255) shared_code->set_profiler_ticks(ticks + 1); |
| 287 } else { |
| 288 int nesting = shared_code->allow_osr_at_loop_nesting_level(); |
| 289 if (nesting < Code::kMaxLoopNestingMarker) { |
| 290 int new_nesting = nesting + 1; |
| 291 shared_code->set_allow_osr_at_loop_nesting_level(new_nesting); |
| 292 AttemptOnStackReplacement(function); |
| 293 } |
282 } | 294 } |
| 295 continue; |
283 } | 296 } |
284 | 297 |
285 // Only record top-level code on top of the execution stack and | 298 // Only record top-level code on top of the execution stack and |
286 // avoid optimizing excessively large scripts since top-level code | 299 // avoid optimizing excessively large scripts since top-level code |
287 // will be executed only once. | 300 // will be executed only once. |
288 const int kMaxToplevelSourceSize = 10 * 1024; | 301 const int kMaxToplevelSourceSize = 10 * 1024; |
289 if (shared->is_toplevel() && | 302 if (shared->is_toplevel() && |
290 (frame_count > 1 || shared->SourceSize() > kMaxToplevelSourceSize)) { | 303 (frame_count > 1 || shared->SourceSize() > kMaxToplevelSourceSize)) { |
291 continue; | 304 continue; |
292 } | 305 } |
(...skipping 13 matching lines...) Expand all Loading... |
306 } | 319 } |
307 continue; | 320 continue; |
308 } | 321 } |
309 if (!function->IsOptimizable()) continue; | 322 if (!function->IsOptimizable()) continue; |
310 | 323 |
311 if (FLAG_watch_ic_patching) { | 324 if (FLAG_watch_ic_patching) { |
312 int ticks = shared_code->profiler_ticks(); | 325 int ticks = shared_code->profiler_ticks(); |
313 | 326 |
314 if (ticks >= kProfilerTicksBeforeOptimization) { | 327 if (ticks >= kProfilerTicksBeforeOptimization) { |
315 int typeinfo, total, percentage; | 328 int typeinfo, total, percentage; |
316 GetICCounts(function, &typeinfo, &total, &percentage); | 329 GetICCounts(shared_code, &typeinfo, &total, &percentage); |
317 if (percentage >= FLAG_type_info_threshold) { | 330 if (percentage >= FLAG_type_info_threshold) { |
318 // If this particular function hasn't had any ICs patched for enough | 331 // If this particular function hasn't had any ICs patched for enough |
319 // ticks, optimize it now. | 332 // ticks, optimize it now. |
320 Optimize(function, "hot and stable"); | 333 Optimize(function, "hot and stable"); |
321 } else if (ticks >= kTicksWhenNotEnoughTypeInfo) { | 334 } else if (ticks >= kTicksWhenNotEnoughTypeInfo) { |
322 Optimize(function, "not much type info but very hot"); | 335 Optimize(function, "not much type info but very hot"); |
323 } else { | 336 } else { |
324 shared_code->set_profiler_ticks(ticks + 1); | 337 shared_code->set_profiler_ticks(ticks + 1); |
325 if (FLAG_trace_opt_verbose) { | 338 if (FLAG_trace_opt_verbose) { |
326 PrintF("[not yet optimizing "); | 339 PrintF("[not yet optimizing "); |
(...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
420 | 433 |
421 | 434 |
422 void RuntimeProfiler::UpdateSamplesAfterCompact(ObjectVisitor* visitor) { | 435 void RuntimeProfiler::UpdateSamplesAfterCompact(ObjectVisitor* visitor) { |
423 for (int i = 0; i < kSamplerWindowSize; i++) { | 436 for (int i = 0; i < kSamplerWindowSize; i++) { |
424 visitor->VisitPointer(&sampler_window_[i]); | 437 visitor->VisitPointer(&sampler_window_[i]); |
425 } | 438 } |
426 } | 439 } |
427 | 440 |
428 | 441 |
429 } } // namespace v8::internal | 442 } } // namespace v8::internal |
OLD | NEW |