OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 2012 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2023 UNREACHABLE(); | 2023 UNREACHABLE(); |
2024 return false; | 2024 return false; |
2025 } | 2025 } |
2026 } | 2026 } |
2027 | 2027 |
2028 if (!duplicate) *input_offset -= kPointerSize; | 2028 if (!duplicate) *input_offset -= kPointerSize; |
2029 return true; | 2029 return true; |
2030 } | 2030 } |
2031 | 2031 |
2032 | 2032 |
2033 void Deoptimizer::PatchStackCheckCode(Code* unoptimized_code, | 2033 void Deoptimizer::PatchInterruptCode(Code* unoptimized_code, |
2034 Code* check_code, | 2034 Code* interrupt_code, |
2035 Code* replacement_code) { | 2035 Code* replacement_code) { |
2036 // Iterate over the stack check table and patch every stack check | 2036 // Iterate over the stack check table and patch every stack check |
2037 // call to an unconditional call to the replacement code. | 2037 // call to an unconditional call to the replacement code. |
2038 ASSERT(unoptimized_code->kind() == Code::FUNCTION); | 2038 ASSERT(unoptimized_code->kind() == Code::FUNCTION); |
2039 ASSERT(!unoptimized_code->stack_check_patched_for_osr()); | 2039 int loop_nesting_level = unoptimized_code->allow_osr_at_loop_nesting_level(); |
2040 Address stack_check_cursor = unoptimized_code->instruction_start() + | 2040 Address back_edge_cursor = unoptimized_code->instruction_start() + |
2041 unoptimized_code->stack_check_table_offset(); | 2041 unoptimized_code->back_edge_table_offset(); |
2042 uint32_t table_length = Memory::uint32_at(stack_check_cursor); | 2042 uint32_t table_length = Memory::uint32_at(back_edge_cursor); |
2043 stack_check_cursor += kIntSize; | 2043 back_edge_cursor += kIntSize; |
2044 for (uint32_t i = 0; i < table_length; ++i) { | 2044 for (uint32_t i = 0; i < table_length; ++i) { |
2045 uint32_t pc_offset = Memory::uint32_at(stack_check_cursor + kIntSize); | 2045 uint8_t loop_depth = Memory::uint8_at(back_edge_cursor + 2 * kIntSize); |
2046 Address pc_after = unoptimized_code->instruction_start() + pc_offset; | 2046 if (loop_depth == loop_nesting_level) { |
2047 PatchStackCheckCodeAt(unoptimized_code, | 2047 // Loop back edge has the loop depth that we want to patch. |
2048 pc_after, | 2048 uint32_t pc_offset = Memory::uint32_at(back_edge_cursor + kIntSize); |
2049 check_code, | 2049 Address pc_after = unoptimized_code->instruction_start() + pc_offset; |
2050 replacement_code); | 2050 PatchInterruptCodeAt(unoptimized_code, |
2051 stack_check_cursor += 2 * kIntSize; | 2051 pc_after, |
| 2052 interrupt_code, |
| 2053 replacement_code); |
| 2054 } |
| 2055 back_edge_cursor += kBackEdgeEntrySize; |
2052 } | 2056 } |
2053 unoptimized_code->set_stack_check_patched_for_osr(true); | 2057 unoptimized_code->set_back_edges_patched_for_osr(true); |
| 2058 #ifdef DEBUG |
| 2059 Deoptimizer::VerifyInterruptCode( |
| 2060 unoptimized_code, interrupt_code, replacement_code, loop_nesting_level); |
| 2061 #endif // DEBUG |
2054 } | 2062 } |
2055 | 2063 |
2056 | 2064 |
2057 void Deoptimizer::RevertStackCheckCode(Code* unoptimized_code, | 2065 void Deoptimizer::RevertInterruptCode(Code* unoptimized_code, |
2058 Code* check_code, | 2066 Code* interrupt_code, |
2059 Code* replacement_code) { | 2067 Code* replacement_code) { |
2060 // Iterate over the stack check table and revert the patched | 2068 // Iterate over the stack check table and revert the patched |
2061 // stack check calls. | 2069 // stack check calls. |
2062 ASSERT(unoptimized_code->kind() == Code::FUNCTION); | 2070 ASSERT(unoptimized_code->kind() == Code::FUNCTION); |
2063 ASSERT(unoptimized_code->stack_check_patched_for_osr()); | 2071 ASSERT(unoptimized_code->back_edges_patched_for_osr()); |
2064 Address stack_check_cursor = unoptimized_code->instruction_start() + | 2072 int loop_nesting_level = unoptimized_code->allow_osr_at_loop_nesting_level(); |
2065 unoptimized_code->stack_check_table_offset(); | 2073 Address back_edge_cursor = unoptimized_code->instruction_start() + |
2066 uint32_t table_length = Memory::uint32_at(stack_check_cursor); | 2074 unoptimized_code->back_edge_table_offset(); |
2067 stack_check_cursor += kIntSize; | 2075 uint32_t table_length = Memory::uint32_at(back_edge_cursor); |
| 2076 back_edge_cursor += kIntSize; |
2068 for (uint32_t i = 0; i < table_length; ++i) { | 2077 for (uint32_t i = 0; i < table_length; ++i) { |
2069 uint32_t pc_offset = Memory::uint32_at(stack_check_cursor + kIntSize); | 2078 uint8_t loop_depth = Memory::uint8_at(back_edge_cursor + 2 * kIntSize); |
2070 Address pc_after = unoptimized_code->instruction_start() + pc_offset; | 2079 if (loop_depth <= loop_nesting_level) { |
2071 RevertStackCheckCodeAt(unoptimized_code, | 2080 uint32_t pc_offset = Memory::uint32_at(back_edge_cursor + kIntSize); |
2072 pc_after, | 2081 Address pc_after = unoptimized_code->instruction_start() + pc_offset; |
2073 check_code, | 2082 RevertInterruptCodeAt(unoptimized_code, |
2074 replacement_code); | 2083 pc_after, |
2075 stack_check_cursor += 2 * kIntSize; | 2084 interrupt_code, |
| 2085 replacement_code); |
| 2086 } |
| 2087 back_edge_cursor += kBackEdgeEntrySize; |
2076 } | 2088 } |
2077 unoptimized_code->set_stack_check_patched_for_osr(false); | 2089 unoptimized_code->set_back_edges_patched_for_osr(false); |
| 2090 #ifdef DEBUG |
| 2091 // Assert that none of the back edges are patched anymore. |
| 2092 Deoptimizer::VerifyInterruptCode( |
| 2093 unoptimized_code, interrupt_code, replacement_code, -1); |
| 2094 #endif // DEBUG |
2078 } | 2095 } |
2079 | 2096 |
2080 | 2097 |
| 2098 #ifdef DEBUG |
| 2099 void Deoptimizer::VerifyInterruptCode(Code* unoptimized_code, |
| 2100 Code* interrupt_code, |
| 2101 Code* replacement_code, |
| 2102 int loop_nesting_level) { |
| 2103 CHECK(unoptimized_code->kind() == Code::FUNCTION); |
| 2104 Address back_edge_cursor = unoptimized_code->instruction_start() + |
| 2105 unoptimized_code->back_edge_table_offset(); |
| 2106 uint32_t table_length = Memory::uint32_at(back_edge_cursor); |
| 2107 back_edge_cursor += kIntSize; |
| 2108 for (uint32_t i = 0; i < table_length; ++i) { |
| 2109 uint8_t loop_depth = Memory::uint8_at(back_edge_cursor + 2 * kIntSize); |
| 2110 CHECK_LE(loop_depth, Code::kMaxLoopNestingMarker); |
| 2111 // Assert that all back edges for shallower loops (and only those) |
| 2112 // have already been patched. |
| 2113 uint32_t pc_offset = Memory::uint32_at(back_edge_cursor + kIntSize); |
| 2114 Address pc_after = unoptimized_code->instruction_start() + pc_offset; |
| 2115 CHECK_EQ((loop_depth <= loop_nesting_level), |
| 2116 InterruptCodeIsPatched(unoptimized_code, |
| 2117 pc_after, |
| 2118 interrupt_code, |
| 2119 replacement_code)); |
| 2120 back_edge_cursor += kBackEdgeEntrySize; |
| 2121 } |
| 2122 } |
| 2123 #endif // DEBUG |
| 2124 |
| 2125 |
2081 unsigned Deoptimizer::ComputeInputFrameSize() const { | 2126 unsigned Deoptimizer::ComputeInputFrameSize() const { |
2082 unsigned fixed_size = ComputeFixedSize(function_); | 2127 unsigned fixed_size = ComputeFixedSize(function_); |
2083 // The fp-to-sp delta already takes the context and the function | 2128 // The fp-to-sp delta already takes the context and the function |
2084 // into account so we have to avoid double counting them (-2). | 2129 // into account so we have to avoid double counting them (-2). |
2085 unsigned result = fixed_size + fp_to_sp_delta_ - (2 * kPointerSize); | 2130 unsigned result = fixed_size + fp_to_sp_delta_ - (2 * kPointerSize); |
2086 #ifdef DEBUG | 2131 #ifdef DEBUG |
2087 if (bailout_type_ == OSR) { | 2132 if (bailout_type_ == OSR) { |
2088 // TODO(kasperl): It would be nice if we could verify that the | 2133 // TODO(kasperl): It would be nice if we could verify that the |
2089 // size matches with the stack height we can compute based on the | 2134 // size matches with the stack height we can compute based on the |
2090 // environment at the OSR entry. The code for that his built into | 2135 // environment at the OSR entry. The code for that his built into |
(...skipping 636 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2727 | 2772 |
2728 void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) { | 2773 void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) { |
2729 v->VisitPointer(BitCast<Object**>(&function_)); | 2774 v->VisitPointer(BitCast<Object**>(&function_)); |
2730 v->VisitPointers(parameters_, parameters_ + parameters_count_); | 2775 v->VisitPointers(parameters_, parameters_ + parameters_count_); |
2731 v->VisitPointers(expression_stack_, expression_stack_ + expression_count_); | 2776 v->VisitPointers(expression_stack_, expression_stack_ + expression_count_); |
2732 } | 2777 } |
2733 | 2778 |
2734 #endif // ENABLE_DEBUGGER_SUPPORT | 2779 #endif // ENABLE_DEBUGGER_SUPPORT |
2735 | 2780 |
2736 } } // namespace v8::internal | 2781 } } // namespace v8::internal |
OLD | NEW |