| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 7591 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7602 CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0); | 7602 CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0); |
| 7603 | 7603 |
| 7604 if (!function->IsOptimizable()) return isolate->heap()->undefined_value(); | 7604 if (!function->IsOptimizable()) return isolate->heap()->undefined_value(); |
| 7605 function->MarkForLazyRecompilation(); | 7605 function->MarkForLazyRecompilation(); |
| 7606 | 7606 |
| 7607 Code* unoptimized = function->shared()->code(); | 7607 Code* unoptimized = function->shared()->code(); |
| 7608 if (args.length() == 2 && | 7608 if (args.length() == 2 && |
| 7609 unoptimized->kind() == Code::FUNCTION) { | 7609 unoptimized->kind() == Code::FUNCTION) { |
| 7610 CONVERT_ARG_HANDLE_CHECKED(String, type, 1); | 7610 CONVERT_ARG_HANDLE_CHECKED(String, type, 1); |
| 7611 if (type->IsOneByteEqualTo(STATIC_ASCII_VECTOR("osr"))) { | 7611 if (type->IsOneByteEqualTo(STATIC_ASCII_VECTOR("osr"))) { |
| 7612 isolate->runtime_profiler()->AttemptOnStackReplacement(*function); | 7612 for (int i = 0; i <= Code::kMaxLoopNestingMarker; i++) { |
| 7613 unoptimized->set_allow_osr_at_loop_nesting_level( | 7613 unoptimized->set_allow_osr_at_loop_nesting_level(i); |
| 7614 Code::kMaxLoopNestingMarker); | 7614 isolate->runtime_profiler()->AttemptOnStackReplacement(*function); |
| 7615 } |
| 7615 } else if (type->IsOneByteEqualTo(STATIC_ASCII_VECTOR("parallel"))) { | 7616 } else if (type->IsOneByteEqualTo(STATIC_ASCII_VECTOR("parallel"))) { |
| 7616 function->MarkForParallelRecompilation(); | 7617 function->MarkForParallelRecompilation(); |
| 7617 } | 7618 } |
| 7618 } | 7619 } |
| 7619 | 7620 |
| 7620 return isolate->heap()->undefined_value(); | 7621 return isolate->heap()->undefined_value(); |
| 7621 } | 7622 } |
| 7622 | 7623 |
| 7623 | 7624 |
| 7624 RUNTIME_FUNCTION(MaybeObject*, Runtime_WaitUntilOptimized) { | 7625 RUNTIME_FUNCTION(MaybeObject*, Runtime_WaitUntilOptimized) { |
| (...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7701 JavaScriptFrameIterator it(isolate); | 7702 JavaScriptFrameIterator it(isolate); |
| 7702 JavaScriptFrame* frame = it.frame(); | 7703 JavaScriptFrame* frame = it.frame(); |
| 7703 ASSERT(frame->function() == *function); | 7704 ASSERT(frame->function() == *function); |
| 7704 ASSERT(frame->LookupCode() == *unoptimized); | 7705 ASSERT(frame->LookupCode() == *unoptimized); |
| 7705 ASSERT(unoptimized->contains(frame->pc())); | 7706 ASSERT(unoptimized->contains(frame->pc())); |
| 7706 | 7707 |
| 7707 // Use linear search of the unoptimized code's stack check table to find | 7708 // Use linear search of the unoptimized code's stack check table to find |
| 7708 // the AST id matching the PC. | 7709 // the AST id matching the PC. |
| 7709 Address start = unoptimized->instruction_start(); | 7710 Address start = unoptimized->instruction_start(); |
| 7710 unsigned target_pc_offset = static_cast<unsigned>(frame->pc() - start); | 7711 unsigned target_pc_offset = static_cast<unsigned>(frame->pc() - start); |
| 7711 Address table_cursor = start + unoptimized->stack_check_table_offset(); | 7712 Address table_cursor = start + unoptimized->back_edge_table_offset(); |
| 7712 uint32_t table_length = Memory::uint32_at(table_cursor); | 7713 uint32_t table_length = Memory::uint32_at(table_cursor); |
| 7713 table_cursor += kIntSize; | 7714 table_cursor += kIntSize; |
| 7715 static const int kBackEdgeEntrySize = 2 * kIntSize + kOneByteSize; |
| 7716 uint8_t loop_depth = 0; |
| 7714 for (unsigned i = 0; i < table_length; ++i) { | 7717 for (unsigned i = 0; i < table_length; ++i) { |
| 7715 // Table entries are (AST id, pc offset) pairs. | 7718 // Table entries are (AST id, pc offset) pairs. |
| 7716 uint32_t pc_offset = Memory::uint32_at(table_cursor + kIntSize); | 7719 uint32_t pc_offset = Memory::uint32_at(table_cursor + kIntSize); |
| 7717 if (pc_offset == target_pc_offset) { | 7720 if (pc_offset == target_pc_offset) { |
| 7718 ast_id = BailoutId(static_cast<int>(Memory::uint32_at(table_cursor))); | 7721 ast_id = BailoutId(static_cast<int>(Memory::uint32_at(table_cursor))); |
| 7722 loop_depth = Memory::uint8_at(table_cursor + 2 * kIntSize); |
| 7719 break; | 7723 break; |
| 7720 } | 7724 } |
| 7721 table_cursor += 2 * kIntSize; | 7725 table_cursor += kBackEdgeEntrySize; |
| 7722 } | 7726 } |
| 7723 ASSERT(!ast_id.IsNone()); | 7727 ASSERT(!ast_id.IsNone()); |
| 7724 if (FLAG_trace_osr) { | 7728 if (FLAG_trace_osr) { |
| 7725 PrintF("[replacing on-stack at AST id %d in ", ast_id.ToInt()); | 7729 PrintF("[replacing on-stack at AST id %d, loop depth %d in ", |
| 7730 ast_id.ToInt(), loop_depth); |
| 7726 function->PrintName(); | 7731 function->PrintName(); |
| 7727 PrintF("]\n"); | 7732 PrintF("]\n"); |
| 7728 } | 7733 } |
| 7729 | 7734 |
| 7730 // Try to compile the optimized code. A true return value from | 7735 // Try to compile the optimized code. A true return value from |
| 7731 // CompileOptimized means that compilation succeeded, not necessarily | 7736 // CompileOptimized means that compilation succeeded, not necessarily |
| 7732 // that optimization succeeded. | 7737 // that optimization succeeded. |
| 7733 if (JSFunction::CompileOptimized(function, ast_id, CLEAR_EXCEPTION) && | 7738 if (JSFunction::CompileOptimized(function, ast_id, CLEAR_EXCEPTION) && |
| 7734 function->IsOptimized()) { | 7739 function->IsOptimized()) { |
| 7735 DeoptimizationInputData* data = DeoptimizationInputData::cast( | 7740 DeoptimizationInputData* data = DeoptimizationInputData::cast( |
| (...skipping 14 matching lines...) Expand all Loading... |
| 7750 } | 7755 } |
| 7751 } | 7756 } |
| 7752 | 7757 |
| 7753 // Revert to the original stack checks in the original unoptimized code. | 7758 // Revert to the original stack checks in the original unoptimized code. |
| 7754 if (FLAG_trace_osr) { | 7759 if (FLAG_trace_osr) { |
| 7755 PrintF("[restoring original stack checks in "); | 7760 PrintF("[restoring original stack checks in "); |
| 7756 function->PrintName(); | 7761 function->PrintName(); |
| 7757 PrintF("]\n"); | 7762 PrintF("]\n"); |
| 7758 } | 7763 } |
| 7759 InterruptStub interrupt_stub; | 7764 InterruptStub interrupt_stub; |
| 7760 Handle<Code> check_code = interrupt_stub.GetCode(isolate); | 7765 Handle<Code> interrupt_code = interrupt_stub.GetCode(isolate); |
| 7761 Handle<Code> replacement_code = isolate->builtins()->OnStackReplacement(); | 7766 Handle<Code> replacement_code = isolate->builtins()->OnStackReplacement(); |
| 7762 Deoptimizer::RevertStackCheckCode(*unoptimized, | 7767 Deoptimizer::RevertInterruptCode(*unoptimized, |
| 7763 *check_code, | 7768 *interrupt_code, |
| 7764 *replacement_code); | 7769 *replacement_code); |
| 7765 | 7770 |
| 7766 // Allow OSR only at nesting level zero again. | 7771 // Allow OSR only at nesting level zero again. |
| 7767 unoptimized->set_allow_osr_at_loop_nesting_level(0); | 7772 unoptimized->set_allow_osr_at_loop_nesting_level(0); |
| 7768 | 7773 |
| 7769 // If the optimization attempt succeeded, return the AST id tagged as a | 7774 // If the optimization attempt succeeded, return the AST id tagged as a |
| 7770 // smi. This tells the builtin that we need to translate the unoptimized | 7775 // smi. This tells the builtin that we need to translate the unoptimized |
| 7771 // frame to an optimized one. | 7776 // frame to an optimized one. |
| 7772 if (succeeded) { | 7777 if (succeeded) { |
| 7773 ASSERT(function->code()->kind() == Code::OPTIMIZED_FUNCTION); | 7778 ASSERT(function->code()->kind() == Code::OPTIMIZED_FUNCTION); |
| 7774 return Smi::FromInt(ast_id.ToInt()); | 7779 return Smi::FromInt(ast_id.ToInt()); |
| (...skipping 5276 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 13051 // Handle last resort GC and make sure to allow future allocations | 13056 // Handle last resort GC and make sure to allow future allocations |
| 13052 // to grow the heap without causing GCs (if possible). | 13057 // to grow the heap without causing GCs (if possible). |
| 13053 isolate->counters()->gc_last_resort_from_js()->Increment(); | 13058 isolate->counters()->gc_last_resort_from_js()->Increment(); |
| 13054 isolate->heap()->CollectAllGarbage(Heap::kNoGCFlags, | 13059 isolate->heap()->CollectAllGarbage(Heap::kNoGCFlags, |
| 13055 "Runtime::PerformGC"); | 13060 "Runtime::PerformGC"); |
| 13056 } | 13061 } |
| 13057 } | 13062 } |
| 13058 | 13063 |
| 13059 | 13064 |
| 13060 } } // namespace v8::internal | 13065 } } // namespace v8::internal |
| OLD | NEW |