OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 26 matching lines...) Expand all Loading... | |
37 | 37 |
38 const int Deoptimizer::table_entry_size_ = 16; | 38 const int Deoptimizer::table_entry_size_ = 16; |
39 | 39 |
40 | 40 |
41 int Deoptimizer::patch_size() { | 41 int Deoptimizer::patch_size() { |
42 const int kCallInstructionSizeInWords = 3; | 42 const int kCallInstructionSizeInWords = 3; |
43 return kCallInstructionSizeInWords * Assembler::kInstrSize; | 43 return kCallInstructionSizeInWords * Assembler::kInstrSize; |
44 } | 44 } |
45 | 45 |
46 | 46 |
47 void Deoptimizer::DeoptimizeFunction(JSFunction* function) { | 47 void Deoptimizer::DeoptimizeFunctionWithPreparedFunctionList( |
48 HandleScope scope; | 48 JSFunction* function) { |
49 Isolate* isolate = function->GetIsolate(); | |
ulan
2012/12/12 13:50:57
This changes are to sync code for different archit
| |
50 HandleScope scope(isolate); | |
49 AssertNoAllocation no_allocation; | 51 AssertNoAllocation no_allocation; |
50 | 52 |
51 if (!function->IsOptimized()) return; | 53 ASSERT(function->IsOptimized()); |
54 ASSERT(function->FunctionsInFunctionListShareSameCode()); | |
52 | 55 |
53 // The optimized code is going to be patched, so we cannot use it | 56 // The optimized code is going to be patched, so we cannot use it |
54 // any more. Play safe and reset the whole cache. | 57 // any more. Play safe and reset the whole cache. |
55 function->shared()->ClearOptimizedCodeMap(); | 58 function->shared()->ClearOptimizedCodeMap(); |
56 | 59 |
57 // Get the optimized code. | 60 // Get the optimized code. |
58 Code* code = function->code(); | 61 Code* code = function->code(); |
59 Address code_start_address = code->instruction_start(); | 62 Address code_start_address = code->instruction_start(); |
60 | 63 |
61 // Invalidate the relocation information, as it will become invalid by the | 64 // Invalidate the relocation information, as it will become invalid by the |
(...skipping 22 matching lines...) Expand all Loading... | |
84 CodePatcher patcher(call_address, call_size_in_words); | 87 CodePatcher patcher(call_address, call_size_in_words); |
85 patcher.masm()->Call(deopt_entry, RelocInfo::NONE); | 88 patcher.masm()->Call(deopt_entry, RelocInfo::NONE); |
86 ASSERT(prev_call_address == NULL || | 89 ASSERT(prev_call_address == NULL || |
87 call_address >= prev_call_address + patch_size()); | 90 call_address >= prev_call_address + patch_size()); |
88 ASSERT(call_address + patch_size() <= code->instruction_end()); | 91 ASSERT(call_address + patch_size() <= code->instruction_end()); |
89 #ifdef DEBUG | 92 #ifdef DEBUG |
90 prev_call_address = call_address; | 93 prev_call_address = call_address; |
91 #endif | 94 #endif |
92 } | 95 } |
93 | 96 |
94 Isolate* isolate = code->GetIsolate(); | |
95 | |
96 // Add the deoptimizing code to the list. | 97 // Add the deoptimizing code to the list. |
97 DeoptimizingCodeListNode* node = new DeoptimizingCodeListNode(code); | 98 DeoptimizingCodeListNode* node = new DeoptimizingCodeListNode(code); |
98 DeoptimizerData* data = isolate->deoptimizer_data(); | 99 DeoptimizerData* data = isolate->deoptimizer_data(); |
99 node->set_next(data->deoptimizing_code_list_); | 100 node->set_next(data->deoptimizing_code_list_); |
100 data->deoptimizing_code_list_ = node; | 101 data->deoptimizing_code_list_ = node; |
101 | 102 |
102 // We might be in the middle of incremental marking with compaction. | 103 // We might be in the middle of incremental marking with compaction. |
103 // Tell collector to treat this code object in a special way and | 104 // Tell collector to treat this code object in a special way and |
104 // ignore all slots that might have been recorded on it. | 105 // ignore all slots that might have been recorded on it. |
105 isolate->heap()->mark_compact_collector()->InvalidateCode(code); | 106 isolate->heap()->mark_compact_collector()->InvalidateCode(code); |
(...skipping 994 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1100 __ push(ip); | 1101 __ push(ip); |
1101 __ b(&done); | 1102 __ b(&done); |
1102 ASSERT(masm()->pc_offset() - start == table_entry_size_); | 1103 ASSERT(masm()->pc_offset() - start == table_entry_size_); |
1103 } | 1104 } |
1104 __ bind(&done); | 1105 __ bind(&done); |
1105 } | 1106 } |
1106 | 1107 |
1107 #undef __ | 1108 #undef __ |
1108 | 1109 |
1109 } } // namespace v8::internal | 1110 } } // namespace v8::internal |
OLD | NEW |