| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1975 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1986 return true; | 1986 return true; |
| 1987 } | 1987 } |
| 1988 | 1988 |
| 1989 | 1989 |
| 1990 void Deoptimizer::PatchStackCheckCode(Code* unoptimized_code, | 1990 void Deoptimizer::PatchStackCheckCode(Code* unoptimized_code, |
| 1991 Code* check_code, | 1991 Code* check_code, |
| 1992 Code* replacement_code) { | 1992 Code* replacement_code) { |
| 1993 // Iterate over the stack check table and patch every stack check | 1993 // Iterate over the stack check table and patch every stack check |
| 1994 // call to an unconditional call to the replacement code. | 1994 // call to an unconditional call to the replacement code. |
| 1995 ASSERT(unoptimized_code->kind() == Code::FUNCTION); | 1995 ASSERT(unoptimized_code->kind() == Code::FUNCTION); |
| 1996 ASSERT(!unoptimized_code->stack_check_patched_for_osr()); |
| 1996 Address stack_check_cursor = unoptimized_code->instruction_start() + | 1997 Address stack_check_cursor = unoptimized_code->instruction_start() + |
| 1997 unoptimized_code->stack_check_table_offset(); | 1998 unoptimized_code->stack_check_table_offset(); |
| 1998 uint32_t table_length = Memory::uint32_at(stack_check_cursor); | 1999 uint32_t table_length = Memory::uint32_at(stack_check_cursor); |
| 1999 stack_check_cursor += kIntSize; | 2000 stack_check_cursor += kIntSize; |
| 2000 for (uint32_t i = 0; i < table_length; ++i) { | 2001 for (uint32_t i = 0; i < table_length; ++i) { |
| 2001 uint32_t pc_offset = Memory::uint32_at(stack_check_cursor + kIntSize); | 2002 uint32_t pc_offset = Memory::uint32_at(stack_check_cursor + kIntSize); |
| 2002 Address pc_after = unoptimized_code->instruction_start() + pc_offset; | 2003 Address pc_after = unoptimized_code->instruction_start() + pc_offset; |
| 2003 PatchStackCheckCodeAt(unoptimized_code, | 2004 PatchStackCheckCodeAt(unoptimized_code, |
| 2004 pc_after, | 2005 pc_after, |
| 2005 check_code, | 2006 check_code, |
| 2006 replacement_code); | 2007 replacement_code); |
| 2007 stack_check_cursor += 2 * kIntSize; | 2008 stack_check_cursor += 2 * kIntSize; |
| 2008 } | 2009 } |
| 2010 unoptimized_code->set_stack_check_patched_for_osr(true); |
| 2009 } | 2011 } |
| 2010 | 2012 |
| 2011 | 2013 |
| 2012 void Deoptimizer::RevertStackCheckCode(Code* unoptimized_code, | 2014 void Deoptimizer::RevertStackCheckCode(Code* unoptimized_code, |
| 2013 Code* check_code, | 2015 Code* check_code, |
| 2014 Code* replacement_code) { | 2016 Code* replacement_code) { |
| 2015 // Iterate over the stack check table and revert the patched | 2017 // Iterate over the stack check table and revert the patched |
| 2016 // stack check calls. | 2018 // stack check calls. |
| 2017 ASSERT(unoptimized_code->kind() == Code::FUNCTION); | 2019 ASSERT(unoptimized_code->kind() == Code::FUNCTION); |
| 2020 ASSERT(unoptimized_code->stack_check_patched_for_osr()); |
| 2018 Address stack_check_cursor = unoptimized_code->instruction_start() + | 2021 Address stack_check_cursor = unoptimized_code->instruction_start() + |
| 2019 unoptimized_code->stack_check_table_offset(); | 2022 unoptimized_code->stack_check_table_offset(); |
| 2020 uint32_t table_length = Memory::uint32_at(stack_check_cursor); | 2023 uint32_t table_length = Memory::uint32_at(stack_check_cursor); |
| 2021 stack_check_cursor += kIntSize; | 2024 stack_check_cursor += kIntSize; |
| 2022 for (uint32_t i = 0; i < table_length; ++i) { | 2025 for (uint32_t i = 0; i < table_length; ++i) { |
| 2023 uint32_t pc_offset = Memory::uint32_at(stack_check_cursor + kIntSize); | 2026 uint32_t pc_offset = Memory::uint32_at(stack_check_cursor + kIntSize); |
| 2024 Address pc_after = unoptimized_code->instruction_start() + pc_offset; | 2027 Address pc_after = unoptimized_code->instruction_start() + pc_offset; |
| 2025 RevertStackCheckCodeAt(unoptimized_code, | 2028 RevertStackCheckCodeAt(unoptimized_code, |
| 2026 pc_after, | 2029 pc_after, |
| 2027 check_code, | 2030 check_code, |
| 2028 replacement_code); | 2031 replacement_code); |
| 2029 stack_check_cursor += 2 * kIntSize; | 2032 stack_check_cursor += 2 * kIntSize; |
| 2030 } | 2033 } |
| 2034 unoptimized_code->set_stack_check_patched_for_osr(false); |
| 2031 } | 2035 } |
| 2032 | 2036 |
| 2033 | 2037 |
| 2034 unsigned Deoptimizer::ComputeInputFrameSize() const { | 2038 unsigned Deoptimizer::ComputeInputFrameSize() const { |
| 2035 unsigned fixed_size = ComputeFixedSize(function_); | 2039 unsigned fixed_size = ComputeFixedSize(function_); |
| 2036 // The fp-to-sp delta already takes the context and the function | 2040 // The fp-to-sp delta already takes the context and the function |
| 2037 // into account so we have to avoid double counting them (-2). | 2041 // into account so we have to avoid double counting them (-2). |
| 2038 unsigned result = fixed_size + fp_to_sp_delta_ - (2 * kPointerSize); | 2042 unsigned result = fixed_size + fp_to_sp_delta_ - (2 * kPointerSize); |
| 2039 #ifdef DEBUG | 2043 #ifdef DEBUG |
| 2040 if (bailout_type_ == OSR) { | 2044 if (bailout_type_ == OSR) { |
| (...skipping 639 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2680 | 2684 |
| 2681 void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) { | 2685 void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) { |
| 2682 v->VisitPointer(BitCast<Object**>(&function_)); | 2686 v->VisitPointer(BitCast<Object**>(&function_)); |
| 2683 v->VisitPointers(parameters_, parameters_ + parameters_count_); | 2687 v->VisitPointers(parameters_, parameters_ + parameters_count_); |
| 2684 v->VisitPointers(expression_stack_, expression_stack_ + expression_count_); | 2688 v->VisitPointers(expression_stack_, expression_stack_ + expression_count_); |
| 2685 } | 2689 } |
| 2686 | 2690 |
| 2687 #endif // ENABLE_DEBUGGER_SUPPORT | 2691 #endif // ENABLE_DEBUGGER_SUPPORT |
| 2688 | 2692 |
| 2689 } } // namespace v8::internal | 2693 } } // namespace v8::internal |
| OLD | NEW |