Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(544)

Side by Side Diff: src/ia32/deoptimizer-ia32.cc

Issue 10103035: Share optimized code for closures. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: added x64 and ARM ports Created 8 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after
110 } 110 }
111 // Replace relocation information on the code object. 111 // Replace relocation information on the code object.
112 code->set_relocation_info(*new_reloc); 112 code->set_relocation_info(*new_reloc);
113 } 113 }
114 } 114 }
115 115
116 116
117 void Deoptimizer::DeoptimizeFunction(JSFunction* function) { 117 void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
118 if (!function->IsOptimized()) return; 118 if (!function->IsOptimized()) return;
119 119
120 // The optimized code is going to be patched, so we cannot use it
121 // any more. Play safe and reset the whole cache.
122 function->shared()->set_optimized_code_map(Smi::FromInt(0));
Michael Starzinger 2012/05/23 11:16:29 I think it would make sense to have shared->ClearO
fschneider 2012/06/14 11:08:23 Done.
123
120 Isolate* isolate = function->GetIsolate(); 124 Isolate* isolate = function->GetIsolate();
121 HandleScope scope(isolate); 125 HandleScope scope(isolate);
122 AssertNoAllocation no_allocation; 126 AssertNoAllocation no_allocation;
123 127
124 // Get the optimized code. 128 // Get the optimized code.
125 Code* code = function->code(); 129 Code* code = function->code();
126 Address code_start_address = code->instruction_start(); 130 Address code_start_address = code->instruction_start();
127 131
128 // We will overwrite the code's relocation info in-place. Relocation info 132 // We will overwrite the code's relocation info in-place. Relocation info
129 // is written backward. The relocation info is the payload of a byte 133 // is written backward. The relocation info is the payload of a byte
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after
187 DeoptimizingCodeListNode* node = new DeoptimizingCodeListNode(code); 191 DeoptimizingCodeListNode* node = new DeoptimizingCodeListNode(code);
188 DeoptimizerData* data = isolate->deoptimizer_data(); 192 DeoptimizerData* data = isolate->deoptimizer_data();
189 node->set_next(data->deoptimizing_code_list_); 193 node->set_next(data->deoptimizing_code_list_);
190 data->deoptimizing_code_list_ = node; 194 data->deoptimizing_code_list_ = node;
191 195
192 // We might be in the middle of incremental marking with compaction. 196 // We might be in the middle of incremental marking with compaction.
193 // Tell collector to treat this code object in a special way and 197 // Tell collector to treat this code object in a special way and
194 // ignore all slots that might have been recorded on it. 198 // ignore all slots that might have been recorded on it.
195 isolate->heap()->mark_compact_collector()->InvalidateCode(code); 199 isolate->heap()->mark_compact_collector()->InvalidateCode(code);
196 200
197 // Set the code for the function to non-optimized version. 201 // Iterate over all the functions which share the same code object
198 function->ReplaceCode(function->shared()->code()); 202 // and make them use unoptimized version.
203 Context* context = function->context()->global_context();
204 Object* element = context->get(Context::OPTIMIZED_FUNCTIONS_LIST);
205 SharedFunctionInfo* shared = function->shared();
206 while (!element->IsUndefined()) {
207 JSFunction* func = JSFunction::cast(element);
208 // Grab element before code replacement as ReplaceCode alters the list.
209 element = func->next_function_link();
210 if (func->code() == code) {
211 func->ReplaceCode(shared->code());
212 }
213 }
199 214
200 if (FLAG_trace_deopt) { 215 if (FLAG_trace_deopt) {
201 PrintF("[forced deoptimization: "); 216 PrintF("[forced deoptimization: ");
202 function->PrintName(); 217 function->PrintName();
203 PrintF(" / %x]\n", reinterpret_cast<uint32_t>(function)); 218 PrintF(" / %x]\n", reinterpret_cast<uint32_t>(function));
204 } 219 }
205 } 220 }
206 221
207 222
208 static const byte kJnsInstruction = 0x79; 223 static const byte kJnsInstruction = 0x79;
(...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after
323 iterator.Next(); // Drop JS frames count. 338 iterator.Next(); // Drop JS frames count.
324 ASSERT(count == 1); 339 ASSERT(count == 1);
325 USE(count); 340 USE(count);
326 341
327 opcode = static_cast<Translation::Opcode>(iterator.Next()); 342 opcode = static_cast<Translation::Opcode>(iterator.Next());
328 USE(opcode); 343 USE(opcode);
329 ASSERT(Translation::JS_FRAME == opcode); 344 ASSERT(Translation::JS_FRAME == opcode);
330 unsigned node_id = iterator.Next(); 345 unsigned node_id = iterator.Next();
331 USE(node_id); 346 USE(node_id);
332 ASSERT(node_id == ast_id); 347 ASSERT(node_id == ast_id);
333 JSFunction* function = JSFunction::cast(ComputeLiteral(iterator.Next())); 348 int closure_id = iterator.Next();
334 USE(function); 349 USE(closure_id);
335 ASSERT(function == function_); 350 ASSERT_EQ(Translation::kSelfLiteralId, closure_id);
336 unsigned height = iterator.Next(); 351 unsigned height = iterator.Next();
337 unsigned height_in_bytes = height * kPointerSize; 352 unsigned height_in_bytes = height * kPointerSize;
338 USE(height_in_bytes); 353 USE(height_in_bytes);
339 354
340 unsigned fixed_size = ComputeFixedSize(function_); 355 unsigned fixed_size = ComputeFixedSize(function_);
341 unsigned input_frame_size = input_->GetFrameSize(); 356 unsigned input_frame_size = input_->GetFrameSize();
342 ASSERT(fixed_size + height_in_bytes == input_frame_size); 357 ASSERT(fixed_size + height_in_bytes == input_frame_size);
343 358
344 unsigned stack_slot_size = optimized_code_->stack_slots() * kPointerSize; 359 unsigned stack_slot_size = optimized_code_->stack_slots() * kPointerSize;
345 unsigned outgoing_height = data->ArgumentsStackHeight(bailout_id)->value(); 360 unsigned outgoing_height = data->ArgumentsStackHeight(bailout_id)->value();
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after
429 // Set up the frame pointer and the context pointer. 444 // Set up the frame pointer and the context pointer.
430 output_[0]->SetRegister(ebp.code(), input_->GetRegister(ebp.code())); 445 output_[0]->SetRegister(ebp.code(), input_->GetRegister(ebp.code()));
431 output_[0]->SetRegister(esi.code(), input_->GetRegister(esi.code())); 446 output_[0]->SetRegister(esi.code(), input_->GetRegister(esi.code()));
432 447
433 unsigned pc_offset = data->OsrPcOffset()->value(); 448 unsigned pc_offset = data->OsrPcOffset()->value();
434 uint32_t pc = reinterpret_cast<uint32_t>( 449 uint32_t pc = reinterpret_cast<uint32_t>(
435 optimized_code_->entry() + pc_offset); 450 optimized_code_->entry() + pc_offset);
436 output_[0]->SetPc(pc); 451 output_[0]->SetPc(pc);
437 } 452 }
438 Code* continuation = 453 Code* continuation =
439 function->GetIsolate()->builtins()->builtin(Builtins::kNotifyOSR); 454 function_->GetIsolate()->builtins()->builtin(Builtins::kNotifyOSR);
440 output_[0]->SetContinuation( 455 output_[0]->SetContinuation(
441 reinterpret_cast<uint32_t>(continuation->entry())); 456 reinterpret_cast<uint32_t>(continuation->entry()));
442 457
443 if (FLAG_trace_osr) { 458 if (FLAG_trace_osr) {
444 PrintF("[on-stack replacement translation %s: 0x%08" V8PRIxPTR " ", 459 PrintF("[on-stack replacement translation %s: 0x%08" V8PRIxPTR " ",
445 ok ? "finished" : "aborted", 460 ok ? "finished" : "aborted",
446 reinterpret_cast<intptr_t>(function)); 461 reinterpret_cast<intptr_t>(function_));
447 function->PrintName(); 462 function_->PrintName();
448 PrintF(" => pc=0x%0x]\n", output_[0]->GetPc()); 463 PrintF(" => pc=0x%0x]\n", output_[0]->GetPc());
449 } 464 }
450 } 465 }
451 466
452 467
453 void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator, 468 void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
454 int frame_index) { 469 int frame_index) {
455 JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next())); 470 JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
456 unsigned height = iterator->Next(); 471 unsigned height = iterator->Next();
457 unsigned height_in_bytes = height * kPointerSize; 472 unsigned height_in_bytes = height * kPointerSize;
(...skipping 197 matching lines...) Expand 10 before | Expand all | Expand 10 after
655 uint32_t pc = reinterpret_cast<uint32_t>( 670 uint32_t pc = reinterpret_cast<uint32_t>(
656 construct_stub->instruction_start() + 671 construct_stub->instruction_start() +
657 isolate_->heap()->construct_stub_deopt_pc_offset()->value()); 672 isolate_->heap()->construct_stub_deopt_pc_offset()->value());
658 output_frame->SetPc(pc); 673 output_frame->SetPc(pc);
659 } 674 }
660 675
661 676
662 void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator, 677 void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
663 int frame_index) { 678 int frame_index) {
664 int node_id = iterator->Next(); 679 int node_id = iterator->Next();
665 JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next())); 680 JSFunction* function;
681 if (frame_index != 0) {
682 function = JSFunction::cast(ComputeLiteral(iterator->Next()));
683 } else {
684 int closure_id = iterator->Next();
685 USE(closure_id);
686 ASSERT_EQ(Translation::kSelfLiteralId, closure_id);
687 function = function_;
688 }
666 unsigned height = iterator->Next(); 689 unsigned height = iterator->Next();
667 unsigned height_in_bytes = height * kPointerSize; 690 unsigned height_in_bytes = height * kPointerSize;
668 if (FLAG_trace_deopt) { 691 if (FLAG_trace_deopt) {
669 PrintF(" translating "); 692 PrintF(" translating ");
670 function->PrintName(); 693 function->PrintName();
671 PrintF(" => node=%d, height=%d\n", node_id, height_in_bytes); 694 PrintF(" => node=%d, height=%d\n", node_id, height_in_bytes);
672 } 695 }
673 696
674 // The 'fixed' part of the frame consists of the incoming parameters and 697 // The 'fixed' part of the frame consists of the incoming parameters and
675 // the part described by JavaScriptFrameConstants. 698 // the part described by JavaScriptFrameConstants.
(...skipping 335 matching lines...) Expand 10 before | Expand all | Expand 10 after
1011 } 1034 }
1012 __ bind(&done); 1035 __ bind(&done);
1013 } 1036 }
1014 1037
1015 #undef __ 1038 #undef __
1016 1039
1017 1040
1018 } } // namespace v8::internal 1041 } } // namespace v8::internal
1019 1042
1020 #endif // V8_TARGET_ARCH_IA32 1043 #endif // V8_TARGET_ARCH_IA32
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698