Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(71)

Side by Side Diff: src/debug.cc

Issue 9290013: When preparing heap for breakpoints make sure not to recompile inlined functions. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: collect active functions from archived threads Created 8 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | src/heap.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 1740 matching lines...) Expand 10 before | Expand all | Expand 10 after
1751 ASSERT(new_code->has_debug_break_slots()); 1751 ASSERT(new_code->has_debug_break_slots());
1752 ASSERT(current_code->is_compiled_optimizable() == 1752 ASSERT(current_code->is_compiled_optimizable() ==
1753 new_code->is_compiled_optimizable()); 1753 new_code->is_compiled_optimizable());
1754 ASSERT(current_code->instruction_size() <= new_code->instruction_size()); 1754 ASSERT(current_code->instruction_size() <= new_code->instruction_size());
1755 } 1755 }
1756 #endif 1756 #endif
1757 return result; 1757 return result;
1758 } 1758 }
1759 1759
1760 1760
1761 static void CollectActiveFunctionsFromThread(
1762 Isolate* isolate,
1763 ThreadLocalTop* top,
1764 List<Handle<JSFunction> >* active_functions,
1765 Object* active_code_marker) {
1766 // Find all non-optimized code functions with activation frames
1767 // on the stack. This includes functions which have optimized
1768 // activations (including inlined functions) on the stack as the
1769 // non-optimized code is needed for the lazy deoptimization.
1770 for (JavaScriptFrameIterator it(isolate, top); !it.done(); it.Advance()) {
1771 JavaScriptFrame* frame = it.frame();
1772 if (frame->is_optimized()) {
1773 List<JSFunction*> functions(Compiler::kMaxInliningLevels + 1);
1774 frame->GetFunctions(&functions);
1775 for (int i = 0; i < functions.length(); i++) {
1776 JSFunction* function = functions[i];
1777 active_functions->Add(Handle<JSFunction>(function));
1778 function->shared()->code()->set_gc_metadata(active_code_marker);
1779 }
1780 } else if (frame->function()->IsJSFunction()) {
1781 JSFunction* function = JSFunction::cast(frame->function());
1782 ASSERT(frame->LookupCode()->kind() == Code::FUNCTION);
1783 active_functions->Add(Handle<JSFunction>(function));
1784 function->shared()->code()->set_gc_metadata(active_code_marker);
1785 }
1786 }
1787 }
1788
1789
1790 static void RedirectActivationsToRecompiledCodeOnThread(
1791 Isolate* isolate,
1792 ThreadLocalTop* top) {
1793 for (JavaScriptFrameIterator it(isolate, top); !it.done(); it.Advance()) {
1794 JavaScriptFrame* frame = it.frame();
1795
1796 if (frame->is_optimized() || !frame->function()->IsJSFunction()) continue;
1797
1798 JSFunction* function = JSFunction::cast(frame->function());
1799
1800 ASSERT(frame->LookupCode()->kind() == Code::FUNCTION);
1801
1802 Handle<Code> frame_code(frame->LookupCode());
1803 if (frame_code->has_debug_break_slots()) continue;
1804
1805 Handle<Code> new_code(function->shared()->code());
1806 if (new_code->kind() != Code::FUNCTION ||
1807 !new_code->has_debug_break_slots()) {
1808 continue;
1809 }
1810
1811 intptr_t delta = frame->pc() - frame_code->instruction_start();
1812 int debug_break_slot_count = 0;
1813 int mask = RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT);
1814 for (RelocIterator it(*new_code, mask); !it.done(); it.next()) {
1815 // Check if the pc in the new code with debug break
1816 // slots is before this slot.
1817 RelocInfo* info = it.rinfo();
1818 int debug_break_slot_bytes =
1819 debug_break_slot_count * Assembler::kDebugBreakSlotLength;
1820 intptr_t new_delta =
1821 info->pc() -
1822 new_code->instruction_start() -
1823 debug_break_slot_bytes;
1824 if (new_delta > delta) {
1825 break;
1826 }
1827
1828 // Passed a debug break slot in the full code with debug
1829 // break slots.
1830 debug_break_slot_count++;
1831 }
1832 int debug_break_slot_bytes =
1833 debug_break_slot_count * Assembler::kDebugBreakSlotLength;
1834 if (FLAG_trace_deopt) {
1835 PrintF("Replacing code %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) "
1836 "with %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) "
1837 "for debugging, "
1838 "changing pc from %08" V8PRIxPTR " to %08" V8PRIxPTR "\n",
1839 reinterpret_cast<intptr_t>(
1840 frame_code->instruction_start()),
1841 reinterpret_cast<intptr_t>(
1842 frame_code->instruction_start()) +
1843 frame_code->instruction_size(),
Erik Corry 2012/01/25 15:00:57 Can we indent lines like this so that we have only
1844 frame_code->instruction_size(),
1845 reinterpret_cast<intptr_t>(new_code->instruction_start()),
1846 reinterpret_cast<intptr_t>(new_code->instruction_start()) +
1847 new_code->instruction_size(),
1848 new_code->instruction_size(),
1849 reinterpret_cast<intptr_t>(frame->pc()),
1850 reinterpret_cast<intptr_t>(new_code->instruction_start()) +
1851 delta + debug_break_slot_bytes);
1852 }
1853
1854 // Patch the return address to return into the code with
1855 // debug break slots.
1856 frame->set_pc(
1857 new_code->instruction_start() + delta + debug_break_slot_bytes);
1858 }
1859 }
1860
1861
1862 class ActiveFunctionsCollector : public ThreadVisitor {
1863 public:
1864 explicit ActiveFunctionsCollector(List<Handle<JSFunction> >* active_functions,
1865 Object* active_code_marker)
1866 : active_functions_(active_functions),
1867 active_code_marker_(active_code_marker) { }
1868
1869 void VisitThread(Isolate* isolate, ThreadLocalTop* top) {
1870 CollectActiveFunctionsFromThread(isolate,
1871 top,
1872 active_functions_,
1873 active_code_marker_);
1874 }
1875
1876 private:
1877 List<Handle<JSFunction> >* active_functions_;
1878 Object* active_code_marker_;
1879 };
1880
1881
1882 class ActiveFunctionsRedirector : public ThreadVisitor {
1883 public:
1884 void VisitThread(Isolate* isolate, ThreadLocalTop* top) {
1885 RedirectActivationsToRecompiledCodeOnThread(isolate, top);
1886 }
1887 };
1888
1889
1761 void Debug::PrepareForBreakPoints() { 1890 void Debug::PrepareForBreakPoints() {
1762 // If preparing for the first break point make sure to deoptimize all 1891 // If preparing for the first break point make sure to deoptimize all
1763 // functions as debugging does not work with optimized code. 1892 // functions as debugging does not work with optimized code.
1764 if (!has_break_points_) { 1893 if (!has_break_points_) {
1765 Deoptimizer::DeoptimizeAll(); 1894 Deoptimizer::DeoptimizeAll();
1766 1895
1767 Handle<Code> lazy_compile = 1896 Handle<Code> lazy_compile =
1768 Handle<Code>(isolate_->builtins()->builtin(Builtins::kLazyCompile)); 1897 Handle<Code>(isolate_->builtins()->builtin(Builtins::kLazyCompile));
1769 1898
1770 // Keep the list of activated functions in a handlified list as it 1899 // Keep the list of activated functions in a handlified list as it
1771 // is used both in GC and non-GC code. 1900 // is used both in GC and non-GC code.
1772 List<Handle<JSFunction> > active_functions(100); 1901 List<Handle<JSFunction> > active_functions(100);
1773 1902
1774 { 1903 {
1904 Object* active_code_marker = isolate_->heap()->the_hole_value();
Erik Corry 2012/01/25 15:00:57 I know the hole never moves or dies, but it seems
1905
1775 // We are going to iterate heap to find all functions without 1906 // We are going to iterate heap to find all functions without
1776 // debug break slots. 1907 // debug break slots.
1777 isolate_->heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask); 1908 isolate_->heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask);
1778 1909
1779 // Ensure no GC in this scope as we are comparing raw pointer 1910 // Ensure no GC in this scope as we are going to use gc_metadata
1780 // values and performing a heap iteration. 1911 // field in the Code object to mark active functions.
1781 AssertNoAllocation no_allocation; 1912 AssertNoAllocation no_allocation;
1782 1913
1783 // Find all non-optimized code functions with activation frames 1914 CollectActiveFunctionsFromThread(isolate_,
1784 // on the stack. This includes functions which have optimized 1915 isolate_->thread_local_top(),
1785 // activations (including inlined functions) on the stack as the 1916 &active_functions,
1786 // non-optimized code is needed for the lazy deoptimization. 1917 active_code_marker);
1787 for (JavaScriptFrameIterator it(isolate_); !it.done(); it.Advance()) { 1918 ActiveFunctionsCollector active_functions_collector(&active_functions,
1788 JavaScriptFrame* frame = it.frame(); 1919 active_code_marker);
1789 if (frame->is_optimized()) { 1920 isolate_->thread_manager()->IterateArchivedThreads(
1790 List<JSFunction*> functions(Compiler::kMaxInliningLevels + 1); 1921 &active_functions_collector);
1791 frame->GetFunctions(&functions);
1792 for (int i = 0; i < functions.length(); i++) {
1793 if (!functions[i]->shared()->code()->has_debug_break_slots()) {
1794 active_functions.Add(Handle<JSFunction>(functions[i]));
1795 }
1796 }
1797 } else if (frame->function()->IsJSFunction()) {
1798 JSFunction* function = JSFunction::cast(frame->function());
1799 ASSERT(frame->LookupCode()->kind() == Code::FUNCTION);
1800 if (!frame->LookupCode()->has_debug_break_slots() ||
1801 !function->shared()->code()->has_debug_break_slots()) {
1802 active_functions.Add(Handle<JSFunction>(function));
1803 }
1804 }
1805 }
1806
1807 // Sort the functions on the object pointer value to prepare for
1808 // the binary search below.
1809 active_functions.Sort(HandleObjectPointerCompare<JSFunction>);
1810 1922
1811 // Scan the heap for all non-optimized functions which has no 1923 // Scan the heap for all non-optimized functions which has no
1812 // debug break slots. 1924 // debug break slots and are not active or inlined into an active
1925 // function and mark them for lazy compilation.
1813 HeapIterator iterator; 1926 HeapIterator iterator;
1814 HeapObject* obj = NULL; 1927 HeapObject* obj = NULL;
1815 while (((obj = iterator.next()) != NULL)) { 1928 while (((obj = iterator.next()) != NULL)) {
1816 if (obj->IsJSFunction()) { 1929 if (obj->IsJSFunction()) {
1817 JSFunction* function = JSFunction::cast(obj); 1930 JSFunction* function = JSFunction::cast(obj);
1818 if (function->shared()->allows_lazy_compilation() && 1931 SharedFunctionInfo* shared = function->shared();
1819 function->shared()->script()->IsScript() && 1932 if (shared->allows_lazy_compilation() &&
1933 shared->script()->IsScript() &&
1820 function->code()->kind() == Code::FUNCTION && 1934 function->code()->kind() == Code::FUNCTION &&
1821 !function->code()->has_debug_break_slots()) { 1935 !function->code()->has_debug_break_slots() &&
1822 bool has_activation = 1936 shared->code()->gc_metadata() != active_code_marker) {
1823 SortedListBSearch<Handle<JSFunction> >( 1937 function->set_code(*lazy_compile);
1824 active_functions, 1938 function->shared()->set_code(*lazy_compile);
1825 Handle<JSFunction>(function),
1826 HandleObjectPointerCompare<JSFunction>) != -1;
1827 if (!has_activation) {
1828 function->set_code(*lazy_compile);
1829 function->shared()->set_code(*lazy_compile);
1830 }
1831 } 1939 }
1832 } 1940 }
1833 } 1941 }
1942
1943 // Clear gc_metadata field.
1944 for (int i = 0; i < active_functions.length(); i++) {
1945 Handle<JSFunction> function = active_functions[i];
1946 function->shared()->code()->set_gc_metadata(Smi::FromInt(0));
1947 }
1834 } 1948 }
1835 1949
1836 // Now the non-GC scope is left, and the sorting of the functions
1837 // in active_function is not ensured any more. The code below does
1838 // not rely on it.
1839
1840 // Now recompile all functions with activation frames and and 1950 // Now recompile all functions with activation frames and and
1841 // patch the return address to run in the new compiled code. 1951 // patch the return address to run in the new compiled code.
1842 for (int i = 0; i < active_functions.length(); i++) { 1952 for (int i = 0; i < active_functions.length(); i++) {
1843 Handle<JSFunction> function = active_functions[i]; 1953 Handle<JSFunction> function = active_functions[i];
1954
1955 if (function->code()->kind() == Code::FUNCTION &&
1956 function->code()->has_debug_break_slots()) {
1957 // Nothing to do. Function code already had debug break slots.
1958 continue;
1959 }
1960
1844 Handle<SharedFunctionInfo> shared(function->shared()); 1961 Handle<SharedFunctionInfo> shared(function->shared());
1845 // If recompilation is not possible just skip it. 1962 // If recompilation is not possible just skip it.
1846 if (shared->is_toplevel() || 1963 if (shared->is_toplevel() ||
1847 !shared->allows_lazy_compilation() || 1964 !shared->allows_lazy_compilation() ||
1848 shared->code()->kind() == Code::BUILTIN) { 1965 shared->code()->kind() == Code::BUILTIN) {
1849 continue; 1966 continue;
1850 } 1967 }
1851 1968
1852 // Make sure that the shared full code is compiled with debug 1969 // Make sure that the shared full code is compiled with debug
1853 // break slots. 1970 // break slots.
1854 if (function->code() == *lazy_compile) {
1855 function->set_code(shared->code());
1856 }
1857 if (!shared->code()->has_debug_break_slots()) { 1971 if (!shared->code()->has_debug_break_slots()) {
1858 // Try to compile the full code with debug break slots. If it 1972 // Try to compile the full code with debug break slots. If it
1859 // fails just keep the current code. 1973 // fails just keep the current code.
1860 Handle<Code> current_code(function->shared()->code()); 1974 Handle<Code> current_code(function->shared()->code());
1861 ZoneScope zone_scope(isolate_, DELETE_ON_EXIT); 1975 ZoneScope zone_scope(isolate_, DELETE_ON_EXIT);
1862 shared->set_code(*lazy_compile); 1976 shared->set_code(*lazy_compile);
1863 bool prev_force_debugger_active = 1977 bool prev_force_debugger_active =
1864 isolate_->debugger()->force_debugger_active(); 1978 isolate_->debugger()->force_debugger_active();
1865 isolate_->debugger()->set_force_debugger_active(true); 1979 isolate_->debugger()->set_force_debugger_active(true);
1866 ASSERT(current_code->kind() == Code::FUNCTION); 1980 ASSERT(current_code->kind() == Code::FUNCTION);
1867 CompileFullCodeForDebugging(shared, current_code); 1981 CompileFullCodeForDebugging(shared, current_code);
1868 isolate_->debugger()->set_force_debugger_active( 1982 isolate_->debugger()->set_force_debugger_active(
1869 prev_force_debugger_active); 1983 prev_force_debugger_active);
1870 if (!shared->is_compiled()) { 1984 if (!shared->is_compiled()) {
1871 shared->set_code(*current_code); 1985 shared->set_code(*current_code);
1872 continue; 1986 continue;
1873 } 1987 }
1874 } 1988 }
1875 Handle<Code> new_code(shared->code());
1876 1989
1877 // Find the function and patch the return address. 1990 // Keep function code in sync with shared function info.
1878 for (JavaScriptFrameIterator it(isolate_); !it.done(); it.Advance()) { 1991 function->set_code(shared->code());
1879 JavaScriptFrame* frame = it.frame(); 1992 }
1880 // If the current frame is for this function in its
1881 // non-optimized form rewrite the return address to continue
1882 // in the newly compiled full code with debug break slots.
1883 if (!frame->is_optimized() &&
1884 frame->function()->IsJSFunction() &&
1885 frame->function() == *function) {
1886 ASSERT(frame->LookupCode()->kind() == Code::FUNCTION);
1887 Handle<Code> frame_code(frame->LookupCode());
1888 if (frame_code->has_debug_break_slots()) continue;
1889 intptr_t delta = frame->pc() - frame_code->instruction_start();
1890 int debug_break_slot_count = 0;
1891 int mask = RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT);
1892 for (RelocIterator it(*new_code, mask); !it.done(); it.next()) {
1893 // Check if the pc in the new code with debug break
1894 // slots is before this slot.
1895 RelocInfo* info = it.rinfo();
1896 int debug_break_slot_bytes =
1897 debug_break_slot_count * Assembler::kDebugBreakSlotLength;
1898 intptr_t new_delta =
1899 info->pc() -
1900 new_code->instruction_start() -
1901 debug_break_slot_bytes;
1902 if (new_delta > delta) {
1903 break;
1904 }
1905 1993
1906 // Passed a debug break slot in the full code with debug 1994 RedirectActivationsToRecompiledCodeOnThread(isolate_,
1907 // break slots. 1995 isolate_->thread_local_top());
1908 debug_break_slot_count++;
1909 }
1910 int debug_break_slot_bytes =
1911 debug_break_slot_count * Assembler::kDebugBreakSlotLength;
1912 if (FLAG_trace_deopt) {
1913 PrintF("Replacing code %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) "
1914 "with %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) "
1915 "for debugging, "
1916 "changing pc from %08" V8PRIxPTR " to %08" V8PRIxPTR "\n",
1917 reinterpret_cast<intptr_t>(
1918 frame_code->instruction_start()),
1919 reinterpret_cast<intptr_t>(
1920 frame_code->instruction_start()) +
1921 frame_code->instruction_size(),
1922 frame_code->instruction_size(),
1923 reinterpret_cast<intptr_t>(new_code->instruction_start()),
1924 reinterpret_cast<intptr_t>(new_code->instruction_start()) +
1925 new_code->instruction_size(),
1926 new_code->instruction_size(),
1927 reinterpret_cast<intptr_t>(frame->pc()),
1928 reinterpret_cast<intptr_t>(new_code->instruction_start()) +
1929 delta + debug_break_slot_bytes);
1930 }
1931 1996
1932 // Patch the return address to return into the code with 1997 ActiveFunctionsRedirector active_functions_redirector;
1933 // debug break slots. 1998 isolate_->thread_manager()->IterateArchivedThreads(
1934 frame->set_pc( 1999 &active_functions_redirector);
1935 new_code->instruction_start() + delta + debug_break_slot_bytes); 2000
1936 }
1937 }
1938 }
1939 } 2001 }
1940 } 2002 }
1941 2003
1942 2004
1943 // Ensures the debug information is present for shared. 2005 // Ensures the debug information is present for shared.
1944 bool Debug::EnsureDebugInfo(Handle<SharedFunctionInfo> shared) { 2006 bool Debug::EnsureDebugInfo(Handle<SharedFunctionInfo> shared) {
1945 // Return if we already have the debug info for shared. 2007 // Return if we already have the debug info for shared.
1946 if (HasDebugInfo(shared)) { 2008 if (HasDebugInfo(shared)) {
1947 ASSERT(shared->is_compiled()); 2009 ASSERT(shared->is_compiled());
1948 return true; 2010 return true;
(...skipping 1541 matching lines...) Expand 10 before | Expand all | Expand 10 after
3490 { 3552 {
3491 Locker locker; 3553 Locker locker;
3492 Isolate::Current()->debugger()->CallMessageDispatchHandler(); 3554 Isolate::Current()->debugger()->CallMessageDispatchHandler();
3493 } 3555 }
3494 } 3556 }
3495 } 3557 }
3496 3558
3497 #endif // ENABLE_DEBUGGER_SUPPORT 3559 #endif // ENABLE_DEBUGGER_SUPPORT
3498 3560
3499 } } // namespace v8::internal 3561 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « no previous file | src/heap.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698