OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
45 return Assembler::kCallInstructionLength; | 45 return Assembler::kCallInstructionLength; |
46 } | 46 } |
47 | 47 |
48 | 48 |
49 void Deoptimizer::DeoptimizeFunction(JSFunction* function) { | 49 void Deoptimizer::DeoptimizeFunction(JSFunction* function) { |
50 HandleScope scope; | 50 HandleScope scope; |
51 AssertNoAllocation no_allocation; | 51 AssertNoAllocation no_allocation; |
52 | 52 |
53 if (!function->IsOptimized()) return; | 53 if (!function->IsOptimized()) return; |
54 | 54 |
| 55 // The optimized code is going to be patched, so we cannot use it |
| 56 // any more. Play safe and reset the whole cache. |
| 57 function->shared()->ClearOptimizedCodeMap(); |
| 58 |
55 // Get the optimized code. | 59 // Get the optimized code. |
56 Code* code = function->code(); | 60 Code* code = function->code(); |
57 | 61 |
58 // Invalidate the relocation information, as it will become invalid by the | 62 // Invalidate the relocation information, as it will become invalid by the |
59 // code patching below, and is not needed any more. | 63 // code patching below, and is not needed any more. |
60 code->InvalidateRelocation(); | 64 code->InvalidateRelocation(); |
61 | 65 |
62 // For each LLazyBailout instruction insert a absolute call to the | 66 // For each LLazyBailout instruction insert a absolute call to the |
63 // corresponding deoptimization entry, or a short call to an absolute | 67 // corresponding deoptimization entry, or a short call to an absolute |
64 // jump if space is short. The absolute jumps are put in a table just | 68 // jump if space is short. The absolute jumps are put in a table just |
(...skipping 28 matching lines...) Expand all Loading... |
93 DeoptimizingCodeListNode* node = new DeoptimizingCodeListNode(code); | 97 DeoptimizingCodeListNode* node = new DeoptimizingCodeListNode(code); |
94 DeoptimizerData* data = isolate->deoptimizer_data(); | 98 DeoptimizerData* data = isolate->deoptimizer_data(); |
95 node->set_next(data->deoptimizing_code_list_); | 99 node->set_next(data->deoptimizing_code_list_); |
96 data->deoptimizing_code_list_ = node; | 100 data->deoptimizing_code_list_ = node; |
97 | 101 |
98 // We might be in the middle of incremental marking with compaction. | 102 // We might be in the middle of incremental marking with compaction. |
99 // Tell collector to treat this code object in a special way and | 103 // Tell collector to treat this code object in a special way and |
100 // ignore all slots that might have been recorded on it. | 104 // ignore all slots that might have been recorded on it. |
101 isolate->heap()->mark_compact_collector()->InvalidateCode(code); | 105 isolate->heap()->mark_compact_collector()->InvalidateCode(code); |
102 | 106 |
103 // Set the code for the function to non-optimized version. | 107 // Iterate over all the functions which share the same code object |
104 function->ReplaceCode(function->shared()->code()); | 108 // and make them use unoptimized version. |
| 109 Context* context = function->context()->global_context(); |
| 110 Object* element = context->get(Context::OPTIMIZED_FUNCTIONS_LIST); |
| 111 SharedFunctionInfo* shared = function->shared(); |
| 112 while (!element->IsUndefined()) { |
| 113 JSFunction* func = JSFunction::cast(element); |
| 114 // Grab element before code replacement as ReplaceCode alters the list. |
| 115 element = func->next_function_link(); |
| 116 if (func->code() == code) { |
| 117 func->ReplaceCode(shared->code()); |
| 118 } |
| 119 } |
105 | 120 |
106 if (FLAG_trace_deopt) { | 121 if (FLAG_trace_deopt) { |
107 PrintF("[forced deoptimization: "); | 122 PrintF("[forced deoptimization: "); |
108 function->PrintName(); | 123 function->PrintName(); |
109 PrintF(" / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function)); | 124 PrintF(" / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function)); |
110 } | 125 } |
111 } | 126 } |
112 | 127 |
113 | 128 |
114 static const byte kJnsInstruction = 0x79; | 129 static const byte kJnsInstruction = 0x79; |
(...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
227 iterator.Skip(1); // Drop JS frame count. | 242 iterator.Skip(1); // Drop JS frame count. |
228 ASSERT(count == 1); | 243 ASSERT(count == 1); |
229 USE(count); | 244 USE(count); |
230 | 245 |
231 opcode = static_cast<Translation::Opcode>(iterator.Next()); | 246 opcode = static_cast<Translation::Opcode>(iterator.Next()); |
232 USE(opcode); | 247 USE(opcode); |
233 ASSERT(Translation::JS_FRAME == opcode); | 248 ASSERT(Translation::JS_FRAME == opcode); |
234 unsigned node_id = iterator.Next(); | 249 unsigned node_id = iterator.Next(); |
235 USE(node_id); | 250 USE(node_id); |
236 ASSERT(node_id == ast_id); | 251 ASSERT(node_id == ast_id); |
237 JSFunction* function = JSFunction::cast(ComputeLiteral(iterator.Next())); | 252 int closure_id = iterator.Next(); |
238 USE(function); | 253 USE(closure_id); |
239 ASSERT(function == function_); | 254 ASSERT_EQ(Translation::kSelfLiteralId, closure_id); |
240 unsigned height = iterator.Next(); | 255 unsigned height = iterator.Next(); |
241 unsigned height_in_bytes = height * kPointerSize; | 256 unsigned height_in_bytes = height * kPointerSize; |
242 USE(height_in_bytes); | 257 USE(height_in_bytes); |
243 | 258 |
244 unsigned fixed_size = ComputeFixedSize(function_); | 259 unsigned fixed_size = ComputeFixedSize(function_); |
245 unsigned input_frame_size = input_->GetFrameSize(); | 260 unsigned input_frame_size = input_->GetFrameSize(); |
246 ASSERT(fixed_size + height_in_bytes == input_frame_size); | 261 ASSERT(fixed_size + height_in_bytes == input_frame_size); |
247 | 262 |
248 unsigned stack_slot_size = optimized_code_->stack_slots() * kPointerSize; | 263 unsigned stack_slot_size = optimized_code_->stack_slots() * kPointerSize; |
249 unsigned outgoing_height = data->ArgumentsStackHeight(bailout_id)->value(); | 264 unsigned outgoing_height = data->ArgumentsStackHeight(bailout_id)->value(); |
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
334 // Set up the frame pointer and the context pointer. | 349 // Set up the frame pointer and the context pointer. |
335 output_[0]->SetRegister(rbp.code(), input_->GetRegister(rbp.code())); | 350 output_[0]->SetRegister(rbp.code(), input_->GetRegister(rbp.code())); |
336 output_[0]->SetRegister(rsi.code(), input_->GetRegister(rsi.code())); | 351 output_[0]->SetRegister(rsi.code(), input_->GetRegister(rsi.code())); |
337 | 352 |
338 unsigned pc_offset = data->OsrPcOffset()->value(); | 353 unsigned pc_offset = data->OsrPcOffset()->value(); |
339 intptr_t pc = reinterpret_cast<intptr_t>( | 354 intptr_t pc = reinterpret_cast<intptr_t>( |
340 optimized_code_->entry() + pc_offset); | 355 optimized_code_->entry() + pc_offset); |
341 output_[0]->SetPc(pc); | 356 output_[0]->SetPc(pc); |
342 } | 357 } |
343 Code* continuation = | 358 Code* continuation = |
344 function->GetIsolate()->builtins()->builtin(Builtins::kNotifyOSR); | 359 function_->GetIsolate()->builtins()->builtin(Builtins::kNotifyOSR); |
345 output_[0]->SetContinuation( | 360 output_[0]->SetContinuation( |
346 reinterpret_cast<intptr_t>(continuation->entry())); | 361 reinterpret_cast<intptr_t>(continuation->entry())); |
347 | 362 |
348 if (FLAG_trace_osr) { | 363 if (FLAG_trace_osr) { |
349 PrintF("[on-stack replacement translation %s: 0x%08" V8PRIxPTR " ", | 364 PrintF("[on-stack replacement translation %s: 0x%08" V8PRIxPTR " ", |
350 ok ? "finished" : "aborted", | 365 ok ? "finished" : "aborted", |
351 reinterpret_cast<intptr_t>(function)); | 366 reinterpret_cast<intptr_t>(function_)); |
352 function->PrintName(); | 367 function_->PrintName(); |
353 PrintF(" => pc=0x%0" V8PRIxPTR "]\n", output_[0]->GetPc()); | 368 PrintF(" => pc=0x%0" V8PRIxPTR "]\n", output_[0]->GetPc()); |
354 } | 369 } |
355 } | 370 } |
356 | 371 |
357 | 372 |
358 void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator, | 373 void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator, |
359 int frame_index) { | 374 int frame_index) { |
360 JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next())); | 375 JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next())); |
361 unsigned height = iterator->Next(); | 376 unsigned height = iterator->Next(); |
362 unsigned height_in_bytes = height * kPointerSize; | 377 unsigned height_in_bytes = height * kPointerSize; |
(...skipping 209 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
572 intptr_t pc = reinterpret_cast<intptr_t>( | 587 intptr_t pc = reinterpret_cast<intptr_t>( |
573 construct_stub->instruction_start() + | 588 construct_stub->instruction_start() + |
574 isolate_->heap()->construct_stub_deopt_pc_offset()->value()); | 589 isolate_->heap()->construct_stub_deopt_pc_offset()->value()); |
575 output_frame->SetPc(pc); | 590 output_frame->SetPc(pc); |
576 } | 591 } |
577 | 592 |
578 | 593 |
579 void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator, | 594 void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator, |
580 int frame_index) { | 595 int frame_index) { |
581 int node_id = iterator->Next(); | 596 int node_id = iterator->Next(); |
582 JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next())); | 597 JSFunction* function; |
| 598 if (frame_index != 0) { |
| 599 function = JSFunction::cast(ComputeLiteral(iterator->Next())); |
| 600 } else { |
| 601 int closure_id = iterator->Next(); |
| 602 USE(closure_id); |
| 603 ASSERT_EQ(Translation::kSelfLiteralId, closure_id); |
| 604 function = function_; |
| 605 } |
583 unsigned height = iterator->Next(); | 606 unsigned height = iterator->Next(); |
584 unsigned height_in_bytes = height * kPointerSize; | 607 unsigned height_in_bytes = height * kPointerSize; |
585 if (FLAG_trace_deopt) { | 608 if (FLAG_trace_deopt) { |
586 PrintF(" translating "); | 609 PrintF(" translating "); |
587 function->PrintName(); | 610 function->PrintName(); |
588 PrintF(" => node=%d, height=%d\n", node_id, height_in_bytes); | 611 PrintF(" => node=%d, height=%d\n", node_id, height_in_bytes); |
589 } | 612 } |
590 | 613 |
591 // The 'fixed' part of the frame consists of the incoming parameters and | 614 // The 'fixed' part of the frame consists of the incoming parameters and |
592 // the part described by JavaScriptFrameConstants. | 615 // the part described by JavaScriptFrameConstants. |
(...skipping 382 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
975 } | 998 } |
976 __ bind(&done); | 999 __ bind(&done); |
977 } | 1000 } |
978 | 1001 |
979 #undef __ | 1002 #undef __ |
980 | 1003 |
981 | 1004 |
982 } } // namespace v8::internal | 1005 } } // namespace v8::internal |
983 | 1006 |
984 #endif // V8_TARGET_ARCH_X64 | 1007 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |