OLD | NEW |
---|---|
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1758 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1769 | 1769 |
1770 // Keep the list of activated functions in a handlified list as it | 1770 // Keep the list of activated functions in a handlified list as it |
1771 // is used both in GC and non-GC code. | 1771 // is used both in GC and non-GC code. |
1772 List<Handle<JSFunction> > active_functions(100); | 1772 List<Handle<JSFunction> > active_functions(100); |
1773 | 1773 |
1774 { | 1774 { |
1775 // We are going to iterate heap to find all functions without | 1775 // We are going to iterate heap to find all functions without |
1776 // debug break slots. | 1776 // debug break slots. |
1777 isolate_->heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask); | 1777 isolate_->heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask); |
1778 | 1778 |
1779 // Ensure no GC in this scope as we are comparing raw pointer | 1779 // Ensure no GC in this scope as we are going to use gc_metadata |
1780 // values and performing a heap iteration. | 1780 // field in the Code object to mark active functions. |
1781 AssertNoAllocation no_allocation; | 1781 AssertNoAllocation no_allocation; |
1782 | 1782 |
1783 Object* active_code_marker = isolate_->heap()->the_hole_value(); | |
Erik Corry
2012/01/25 14:05:28
It seems we only go through the stack for the curr
| |
1784 | |
1783 // Find all non-optimized code functions with activation frames | 1785 // Find all non-optimized code functions with activation frames |
1784 // on the stack. This includes functions which have optimized | 1786 // on the stack. This includes functions which have optimized |
1785 // activations (including inlined functions) on the stack as the | 1787 // activations (including inlined functions) on the stack as the |
1786 // non-optimized code is needed for the lazy deoptimization. | 1788 // non-optimized code is needed for the lazy deoptimization. |
1787 for (JavaScriptFrameIterator it(isolate_); !it.done(); it.Advance()) { | 1789 for (JavaScriptFrameIterator it(isolate_); !it.done(); it.Advance()) { |
1788 JavaScriptFrame* frame = it.frame(); | 1790 JavaScriptFrame* frame = it.frame(); |
1789 if (frame->is_optimized()) { | 1791 if (frame->is_optimized()) { |
1790 List<JSFunction*> functions(Compiler::kMaxInliningLevels + 1); | 1792 List<JSFunction*> functions(Compiler::kMaxInliningLevels + 1); |
1791 frame->GetFunctions(&functions); | 1793 frame->GetFunctions(&functions); |
1792 for (int i = 0; i < functions.length(); i++) { | 1794 for (int i = 0; i < functions.length(); i++) { |
1793 if (!functions[i]->shared()->code()->has_debug_break_slots()) { | 1795 JSFunction* function = functions[i]; |
1794 active_functions.Add(Handle<JSFunction>(functions[i])); | 1796 active_functions.Add(Handle<JSFunction>(function)); |
1795 } | 1797 function->shared()->code()->set_gc_metadata(active_code_marker); |
1796 } | 1798 } |
1797 } else if (frame->function()->IsJSFunction()) { | 1799 } else if (frame->function()->IsJSFunction()) { |
1798 JSFunction* function = JSFunction::cast(frame->function()); | 1800 JSFunction* function = JSFunction::cast(frame->function()); |
1799 ASSERT(frame->LookupCode()->kind() == Code::FUNCTION); | 1801 ASSERT(frame->LookupCode()->kind() == Code::FUNCTION); |
1800 if (!frame->LookupCode()->has_debug_break_slots() || | 1802 active_functions.Add(Handle<JSFunction>(function)); |
1801 !function->shared()->code()->has_debug_break_slots()) { | 1803 function->shared()->code()->set_gc_metadata(active_code_marker); |
1802 active_functions.Add(Handle<JSFunction>(function)); | |
1803 } | |
1804 } | 1804 } |
1805 } | 1805 } |
1806 | 1806 |
1807 // Sort the functions on the object pointer value to prepare for | |
1808 // the binary search below. | |
1809 active_functions.Sort(HandleObjectPointerCompare<JSFunction>); | |
1810 | |
1811 // Scan the heap for all non-optimized functions which has no | 1807 // Scan the heap for all non-optimized functions which has no |
Erik Corry
2012/01/25 14:05:28
has -> have
| |
1812 // debug break slots. | 1808 // debug break slots and are not active or inlined into an active |
1809 // function and mark them for lazy compilation. | |
1813 HeapIterator iterator; | 1810 HeapIterator iterator; |
1814 HeapObject* obj = NULL; | 1811 HeapObject* obj = NULL; |
1815 while (((obj = iterator.next()) != NULL)) { | 1812 while (((obj = iterator.next()) != NULL)) { |
1816 if (obj->IsJSFunction()) { | 1813 if (obj->IsJSFunction()) { |
1817 JSFunction* function = JSFunction::cast(obj); | 1814 JSFunction* function = JSFunction::cast(obj); |
1818 if (function->shared()->allows_lazy_compilation() && | 1815 SharedFunctionInfo* shared = function->shared(); |
1819 function->shared()->script()->IsScript() && | 1816 if (shared->allows_lazy_compilation() && |
1817 shared->script()->IsScript() && | |
1820 function->code()->kind() == Code::FUNCTION && | 1818 function->code()->kind() == Code::FUNCTION && |
1821 !function->code()->has_debug_break_slots()) { | 1819 !function->code()->has_debug_break_slots() && |
1822 bool has_activation = | 1820 shared->code()->gc_metadata() != active_code_marker) { |
1823 SortedListBSearch<Handle<JSFunction> >( | 1821 function->set_code(*lazy_compile); |
1824 active_functions, | 1822 function->shared()->set_code(*lazy_compile); |
1825 Handle<JSFunction>(function), | |
1826 HandleObjectPointerCompare<JSFunction>) != -1; | |
1827 if (!has_activation) { | |
1828 function->set_code(*lazy_compile); | |
1829 function->shared()->set_code(*lazy_compile); | |
1830 } | |
1831 } | 1823 } |
1832 } | 1824 } |
1833 } | 1825 } |
1826 | |
1827 // Clear gc_metadata field. | |
1828 for (int i = 0; i < active_functions.length(); i++) { | |
1829 Handle<JSFunction> function = active_functions[i]; | |
1830 function->shared()->code()->set_gc_metadata(Smi::FromInt(0)); | |
1831 } | |
1834 } | 1832 } |
1835 | 1833 |
1836 // Now the non-GC scope is left, and the sorting of the functions | |
1837 // in active_function is not ensured any more. The code below does | |
1838 // not rely on it. | |
1839 | |
1840 // Now recompile all functions with activation frames and and | 1834 // Now recompile all functions with activation frames and and |
1841 // patch the return address to run in the new compiled code. | 1835 // patch the return address to run in the new compiled code. |
1842 for (int i = 0; i < active_functions.length(); i++) { | 1836 for (int i = 0; i < active_functions.length(); i++) { |
1843 Handle<JSFunction> function = active_functions[i]; | 1837 Handle<JSFunction> function = active_functions[i]; |
1838 | |
1839 if (function->code()->kind() == Code::FUNCTION && | |
1840 function->code()->has_debug_break_slots()) { | |
1841 // Nothing to do. Function code already had debug break slots. | |
1842 continue; | |
1843 } | |
1844 | |
1844 Handle<SharedFunctionInfo> shared(function->shared()); | 1845 Handle<SharedFunctionInfo> shared(function->shared()); |
1845 // If recompilation is not possible just skip it. | 1846 // If recompilation is not possible just skip it. |
1846 if (shared->is_toplevel() || | 1847 if (shared->is_toplevel() || |
1847 !shared->allows_lazy_compilation() || | 1848 !shared->allows_lazy_compilation() || |
1848 shared->code()->kind() == Code::BUILTIN) { | 1849 shared->code()->kind() == Code::BUILTIN) { |
1849 continue; | 1850 continue; |
1850 } | 1851 } |
1851 | 1852 |
1852 // Make sure that the shared full code is compiled with debug | 1853 // Make sure that the shared full code is compiled with debug |
1853 // break slots. | 1854 // break slots. |
1854 if (function->code() == *lazy_compile) { | |
1855 function->set_code(shared->code()); | |
1856 } | |
1857 if (!shared->code()->has_debug_break_slots()) { | 1855 if (!shared->code()->has_debug_break_slots()) { |
1858 // Try to compile the full code with debug break slots. If it | 1856 // Try to compile the full code with debug break slots. If it |
1859 // fails just keep the current code. | 1857 // fails just keep the current code. |
1860 Handle<Code> current_code(function->shared()->code()); | 1858 Handle<Code> current_code(function->shared()->code()); |
1861 ZoneScope zone_scope(isolate_, DELETE_ON_EXIT); | 1859 ZoneScope zone_scope(isolate_, DELETE_ON_EXIT); |
1862 shared->set_code(*lazy_compile); | 1860 shared->set_code(*lazy_compile); |
1863 bool prev_force_debugger_active = | 1861 bool prev_force_debugger_active = |
1864 isolate_->debugger()->force_debugger_active(); | 1862 isolate_->debugger()->force_debugger_active(); |
1865 isolate_->debugger()->set_force_debugger_active(true); | 1863 isolate_->debugger()->set_force_debugger_active(true); |
1866 ASSERT(current_code->kind() == Code::FUNCTION); | 1864 ASSERT(current_code->kind() == Code::FUNCTION); |
1867 CompileFullCodeForDebugging(shared, current_code); | 1865 CompileFullCodeForDebugging(shared, current_code); |
1868 isolate_->debugger()->set_force_debugger_active( | 1866 isolate_->debugger()->set_force_debugger_active( |
1869 prev_force_debugger_active); | 1867 prev_force_debugger_active); |
1870 if (!shared->is_compiled()) { | 1868 if (!shared->is_compiled()) { |
1871 shared->set_code(*current_code); | 1869 shared->set_code(*current_code); |
1872 continue; | 1870 continue; |
1873 } | 1871 } |
1874 } | 1872 } |
1873 | |
1874 // Keep function code in sync with shared function info. | |
1875 function->set_code(shared->code()); | |
1876 | |
1875 Handle<Code> new_code(shared->code()); | 1877 Handle<Code> new_code(shared->code()); |
1876 | 1878 |
1877 // Find the function and patch the return address. | 1879 // Find the function and patch the return address. |
1878 for (JavaScriptFrameIterator it(isolate_); !it.done(); it.Advance()) { | 1880 for (JavaScriptFrameIterator it(isolate_); !it.done(); it.Advance()) { |
1879 JavaScriptFrame* frame = it.frame(); | 1881 JavaScriptFrame* frame = it.frame(); |
1880 // If the current frame is for this function in its | 1882 // If the current frame is for this function in its |
1881 // non-optimized form rewrite the return address to continue | 1883 // non-optimized form rewrite the return address to continue |
1882 // in the newly compiled full code with debug break slots. | 1884 // in the newly compiled full code with debug break slots. |
1883 if (!frame->is_optimized() && | 1885 if (!frame->is_optimized() && |
1884 frame->function()->IsJSFunction() && | 1886 frame->function()->IsJSFunction() && |
(...skipping 1605 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3490 { | 3492 { |
3491 Locker locker; | 3493 Locker locker; |
3492 Isolate::Current()->debugger()->CallMessageDispatchHandler(); | 3494 Isolate::Current()->debugger()->CallMessageDispatchHandler(); |
3493 } | 3495 } |
3494 } | 3496 } |
3495 } | 3497 } |
3496 | 3498 |
3497 #endif // ENABLE_DEBUGGER_SUPPORT | 3499 #endif // ENABLE_DEBUGGER_SUPPORT |
3498 | 3500 |
3499 } } // namespace v8::internal | 3501 } } // namespace v8::internal |
OLD | NEW |