| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 43 return kCallInstructionSizeInWords * Assembler::kInstrSize; | 43 return kCallInstructionSizeInWords * Assembler::kInstrSize; |
| 44 } | 44 } |
| 45 | 45 |
| 46 | 46 |
| 47 void Deoptimizer::DeoptimizeFunction(JSFunction* function) { | 47 void Deoptimizer::DeoptimizeFunction(JSFunction* function) { |
| 48 HandleScope scope; | 48 HandleScope scope; |
| 49 AssertNoAllocation no_allocation; | 49 AssertNoAllocation no_allocation; |
| 50 | 50 |
| 51 if (!function->IsOptimized()) return; | 51 if (!function->IsOptimized()) return; |
| 52 | 52 |
| 53 // The optimized code is going to be patched, so we cannot use it |
| 54 // any more. Play safe and reset the whole cache. |
| 55 function->shared()->ClearOptimizedCodeMap(); |
| 56 |
| 53 // Get the optimized code. | 57 // Get the optimized code. |
| 54 Code* code = function->code(); | 58 Code* code = function->code(); |
| 55 Address code_start_address = code->instruction_start(); | 59 Address code_start_address = code->instruction_start(); |
| 56 | 60 |
| 57 // Invalidate the relocation information, as it will become invalid by the | 61 // Invalidate the relocation information, as it will become invalid by the |
| 58 // code patching below, and is not needed any more. | 62 // code patching below, and is not needed any more. |
| 59 code->InvalidateRelocation(); | 63 code->InvalidateRelocation(); |
| 60 | 64 |
| 61 // For each LLazyBailout instruction insert a call to the corresponding | 65 // For each LLazyBailout instruction insert a call to the corresponding |
| 62 // deoptimization entry. | 66 // deoptimization entry. |
| (...skipping 27 matching lines...) Expand all Loading... |
| 90 DeoptimizingCodeListNode* node = new DeoptimizingCodeListNode(code); | 94 DeoptimizingCodeListNode* node = new DeoptimizingCodeListNode(code); |
| 91 DeoptimizerData* data = isolate->deoptimizer_data(); | 95 DeoptimizerData* data = isolate->deoptimizer_data(); |
| 92 node->set_next(data->deoptimizing_code_list_); | 96 node->set_next(data->deoptimizing_code_list_); |
| 93 data->deoptimizing_code_list_ = node; | 97 data->deoptimizing_code_list_ = node; |
| 94 | 98 |
| 95 // We might be in the middle of incremental marking with compaction. | 99 // We might be in the middle of incremental marking with compaction. |
| 96 // Tell collector to treat this code object in a special way and | 100 // Tell collector to treat this code object in a special way and |
| 97 // ignore all slots that might have been recorded on it. | 101 // ignore all slots that might have been recorded on it. |
| 98 isolate->heap()->mark_compact_collector()->InvalidateCode(code); | 102 isolate->heap()->mark_compact_collector()->InvalidateCode(code); |
| 99 | 103 |
| 100 // Set the code for the function to non-optimized version. | 104 // Iterate over all the functions which share the same code object |
| 101 function->ReplaceCode(function->shared()->code()); | 105 // and make them use unoptimized version. |
| 106 Context* context = function->context()->global_context(); |
| 107 Object* element = context->get(Context::OPTIMIZED_FUNCTIONS_LIST); |
| 108 SharedFunctionInfo* shared = function->shared(); |
| 109 while (!element->IsUndefined()) { |
| 110 JSFunction* func = JSFunction::cast(element); |
| 111 // Grab element before code replacement as ReplaceCode alters the list. |
| 112 element = func->next_function_link(); |
| 113 if (func->code() == code) { |
| 114 func->ReplaceCode(shared->code()); |
| 115 } |
| 116 } |
| 102 | 117 |
| 103 if (FLAG_trace_deopt) { | 118 if (FLAG_trace_deopt) { |
| 104 PrintF("[forced deoptimization: "); | 119 PrintF("[forced deoptimization: "); |
| 105 function->PrintName(); | 120 function->PrintName(); |
| 106 PrintF(" / %x]\n", reinterpret_cast<uint32_t>(function)); | 121 PrintF(" / %x]\n", reinterpret_cast<uint32_t>(function)); |
| 107 } | 122 } |
| 108 } | 123 } |
| 109 | 124 |
| 110 | 125 |
| 111 static const int32_t kBranchBeforeStackCheck = 0x2a000001; | 126 static const int32_t kBranchBeforeStackCheck = 0x2a000001; |
| (...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 232 iterator.Skip(1); // Drop JS frame count. | 247 iterator.Skip(1); // Drop JS frame count. |
| 233 ASSERT(count == 1); | 248 ASSERT(count == 1); |
| 234 USE(count); | 249 USE(count); |
| 235 | 250 |
| 236 opcode = static_cast<Translation::Opcode>(iterator.Next()); | 251 opcode = static_cast<Translation::Opcode>(iterator.Next()); |
| 237 USE(opcode); | 252 USE(opcode); |
| 238 ASSERT(Translation::JS_FRAME == opcode); | 253 ASSERT(Translation::JS_FRAME == opcode); |
| 239 unsigned node_id = iterator.Next(); | 254 unsigned node_id = iterator.Next(); |
| 240 USE(node_id); | 255 USE(node_id); |
| 241 ASSERT(node_id == ast_id); | 256 ASSERT(node_id == ast_id); |
| 242 JSFunction* function = JSFunction::cast(ComputeLiteral(iterator.Next())); | 257 int closure_id = iterator.Next(); |
| 243 USE(function); | 258 USE(closure_id); |
| 244 ASSERT(function == function_); | 259 ASSERT_EQ(Translation::kSelfLiteralId, closure_id); |
| 245 unsigned height = iterator.Next(); | 260 unsigned height = iterator.Next(); |
| 246 unsigned height_in_bytes = height * kPointerSize; | 261 unsigned height_in_bytes = height * kPointerSize; |
| 247 USE(height_in_bytes); | 262 USE(height_in_bytes); |
| 248 | 263 |
| 249 unsigned fixed_size = ComputeFixedSize(function_); | 264 unsigned fixed_size = ComputeFixedSize(function_); |
| 250 unsigned input_frame_size = input_->GetFrameSize(); | 265 unsigned input_frame_size = input_->GetFrameSize(); |
| 251 ASSERT(fixed_size + height_in_bytes == input_frame_size); | 266 ASSERT(fixed_size + height_in_bytes == input_frame_size); |
| 252 | 267 |
| 253 unsigned stack_slot_size = optimized_code_->stack_slots() * kPointerSize; | 268 unsigned stack_slot_size = optimized_code_->stack_slots() * kPointerSize; |
| 254 unsigned outgoing_height = data->ArgumentsStackHeight(bailout_id)->value(); | 269 unsigned outgoing_height = data->ArgumentsStackHeight(bailout_id)->value(); |
| (...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 345 optimized_code_->entry() + pc_offset); | 360 optimized_code_->entry() + pc_offset); |
| 346 output_[0]->SetPc(pc); | 361 output_[0]->SetPc(pc); |
| 347 } | 362 } |
| 348 Code* continuation = isolate_->builtins()->builtin(Builtins::kNotifyOSR); | 363 Code* continuation = isolate_->builtins()->builtin(Builtins::kNotifyOSR); |
| 349 output_[0]->SetContinuation( | 364 output_[0]->SetContinuation( |
| 350 reinterpret_cast<uint32_t>(continuation->entry())); | 365 reinterpret_cast<uint32_t>(continuation->entry())); |
| 351 | 366 |
| 352 if (FLAG_trace_osr) { | 367 if (FLAG_trace_osr) { |
| 353 PrintF("[on-stack replacement translation %s: 0x%08" V8PRIxPTR " ", | 368 PrintF("[on-stack replacement translation %s: 0x%08" V8PRIxPTR " ", |
| 354 ok ? "finished" : "aborted", | 369 ok ? "finished" : "aborted", |
| 355 reinterpret_cast<intptr_t>(function)); | 370 reinterpret_cast<intptr_t>(function_)); |
| 356 function->PrintName(); | 371 function_->PrintName(); |
| 357 PrintF(" => pc=0x%0x]\n", output_[0]->GetPc()); | 372 PrintF(" => pc=0x%0x]\n", output_[0]->GetPc()); |
| 358 } | 373 } |
| 359 } | 374 } |
| 360 | 375 |
| 361 | 376 |
| 362 void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator, | 377 void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator, |
| 363 int frame_index) { | 378 int frame_index) { |
| 364 JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next())); | 379 JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next())); |
| 365 unsigned height = iterator->Next(); | 380 unsigned height = iterator->Next(); |
| 366 unsigned height_in_bytes = height * kPointerSize; | 381 unsigned height_in_bytes = height * kPointerSize; |
| (...skipping 209 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 576 output_frame->SetPc(pc); | 591 output_frame->SetPc(pc); |
| 577 } | 592 } |
| 578 | 593 |
| 579 | 594 |
| 580 // This code is very similar to ia32 code, but relies on register names (fp, sp) | 595 // This code is very similar to ia32 code, but relies on register names (fp, sp) |
| 581 // and how the frame is laid out. | 596 // and how the frame is laid out. |
| 582 void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator, | 597 void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator, |
| 583 int frame_index) { | 598 int frame_index) { |
| 584 // Read the ast node id, function, and frame height for this output frame. | 599 // Read the ast node id, function, and frame height for this output frame. |
| 585 int node_id = iterator->Next(); | 600 int node_id = iterator->Next(); |
| 586 JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next())); | 601 JSFunction* function; |
| 602 if (frame_index != 0) { |
| 603 function = JSFunction::cast(ComputeLiteral(iterator->Next())); |
| 604 } else { |
| 605 int closure_id = iterator->Next(); |
| 606 USE(closure_id); |
| 607 ASSERT_EQ(Translation::kSelfLiteralId, closure_id); |
| 608 function = function_; |
| 609 } |
| 587 unsigned height = iterator->Next(); | 610 unsigned height = iterator->Next(); |
| 588 unsigned height_in_bytes = height * kPointerSize; | 611 unsigned height_in_bytes = height * kPointerSize; |
| 589 if (FLAG_trace_deopt) { | 612 if (FLAG_trace_deopt) { |
| 590 PrintF(" translating "); | 613 PrintF(" translating "); |
| 591 function->PrintName(); | 614 function->PrintName(); |
| 592 PrintF(" => node=%d, height=%d\n", node_id, height_in_bytes); | 615 PrintF(" => node=%d, height=%d\n", node_id, height_in_bytes); |
| 593 } | 616 } |
| 594 | 617 |
| 595 // The 'fixed' part of the frame consists of the incoming parameters and | 618 // The 'fixed' part of the frame consists of the incoming parameters and |
| 596 // the part described by JavaScriptFrameConstants. | 619 // the part described by JavaScriptFrameConstants. |
| (...skipping 381 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 978 __ push(ip); | 1001 __ push(ip); |
| 979 __ b(&done); | 1002 __ b(&done); |
| 980 ASSERT(masm()->pc_offset() - start == table_entry_size_); | 1003 ASSERT(masm()->pc_offset() - start == table_entry_size_); |
| 981 } | 1004 } |
| 982 __ bind(&done); | 1005 __ bind(&done); |
| 983 } | 1006 } |
| 984 | 1007 |
| 985 #undef __ | 1008 #undef __ |
| 986 | 1009 |
| 987 } } // namespace v8::internal | 1010 } } // namespace v8::internal |
| OLD | NEW |