OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 259 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
270 if (FLAG_code_comments && instr->HasInterestingComment(this)) { | 270 if (FLAG_code_comments && instr->HasInterestingComment(this)) { |
271 Comment(";;; <@%d,#%d> %s", | 271 Comment(";;; <@%d,#%d> %s", |
272 current_instruction_, | 272 current_instruction_, |
273 instr->hydrogen_value()->id(), | 273 instr->hydrogen_value()->id(), |
274 instr->Mnemonic()); | 274 instr->Mnemonic()); |
275 } | 275 } |
276 | 276 |
277 instr->CompileToNative(this); | 277 instr->CompileToNative(this); |
278 } | 278 } |
279 EnsureSpaceForLazyDeopt(); | 279 EnsureSpaceForLazyDeopt(); |
| 280 last_lazy_deopt_pc_ = masm()->pc_offset(); |
280 return !is_aborted(); | 281 return !is_aborted(); |
281 } | 282 } |
282 | 283 |
283 | 284 |
284 bool LCodeGen::GenerateDeferredCode() { | 285 bool LCodeGen::GenerateDeferredCode() { |
285 ASSERT(is_generating()); | 286 ASSERT(is_generating()); |
286 if (deferred_.length() > 0) { | 287 if (deferred_.length() > 0) { |
287 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { | 288 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { |
288 LDeferredCode* code = deferred_[i]; | 289 LDeferredCode* code = deferred_[i]; |
289 Comment(";;; <@%d,#%d> " | 290 Comment(";;; <@%d,#%d> " |
(...skipping 379 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
669 TargetAddressStorageMode storage_mode) { | 670 TargetAddressStorageMode storage_mode) { |
670 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT, storage_mode); | 671 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT, storage_mode); |
671 } | 672 } |
672 | 673 |
673 | 674 |
674 void LCodeGen::CallCodeGeneric(Handle<Code> code, | 675 void LCodeGen::CallCodeGeneric(Handle<Code> code, |
675 RelocInfo::Mode mode, | 676 RelocInfo::Mode mode, |
676 LInstruction* instr, | 677 LInstruction* instr, |
677 SafepointMode safepoint_mode, | 678 SafepointMode safepoint_mode, |
678 TargetAddressStorageMode storage_mode) { | 679 TargetAddressStorageMode storage_mode) { |
| 680 EnsureSpaceForLazyDeopt(); |
679 ASSERT(instr != NULL); | 681 ASSERT(instr != NULL); |
680 // Block literal pool emission to ensure nop indicating no inlined smi code | 682 // Block literal pool emission to ensure nop indicating no inlined smi code |
681 // is in the correct position. | 683 // is in the correct position. |
682 Assembler::BlockConstPoolScope block_const_pool(masm()); | 684 Assembler::BlockConstPoolScope block_const_pool(masm()); |
683 LPointerMap* pointers = instr->pointer_map(); | 685 LPointerMap* pointers = instr->pointer_map(); |
684 RecordPosition(pointers->position()); | 686 RecordPosition(pointers->position()); |
685 __ Call(code, mode, TypeFeedbackId::None(), al, storage_mode); | 687 __ Call(code, mode, TypeFeedbackId::None(), al, storage_mode); |
686 RecordSafepointWithLazyDeopt(instr, safepoint_mode); | 688 RecordSafepointWithLazyDeopt(instr, safepoint_mode); |
687 | 689 |
688 // Signal that we don't inline smi code before these stubs in the | 690 // Signal that we don't inline smi code before these stubs in the |
(...skipping 4933 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5622 if (current_pc < last_lazy_deopt_pc_ + patch_size) { | 5624 if (current_pc < last_lazy_deopt_pc_ + patch_size) { |
5623 // Block literal pool emission for duration of padding. | 5625 // Block literal pool emission for duration of padding. |
5624 Assembler::BlockConstPoolScope block_const_pool(masm()); | 5626 Assembler::BlockConstPoolScope block_const_pool(masm()); |
5625 int padding_size = last_lazy_deopt_pc_ + patch_size - current_pc; | 5627 int padding_size = last_lazy_deopt_pc_ + patch_size - current_pc; |
5626 ASSERT_EQ(0, padding_size % Assembler::kInstrSize); | 5628 ASSERT_EQ(0, padding_size % Assembler::kInstrSize); |
5627 while (padding_size > 0) { | 5629 while (padding_size > 0) { |
5628 __ nop(); | 5630 __ nop(); |
5629 padding_size -= Assembler::kInstrSize; | 5631 padding_size -= Assembler::kInstrSize; |
5630 } | 5632 } |
5631 } | 5633 } |
5632 last_lazy_deopt_pc_ = masm()->pc_offset(); | |
5633 } | 5634 } |
5634 | 5635 |
5635 | 5636 |
5636 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { | 5637 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { |
5637 EnsureSpaceForLazyDeopt(); | 5638 EnsureSpaceForLazyDeopt(); |
| 5639 last_lazy_deopt_pc_ = masm()->pc_offset(); |
5638 ASSERT(instr->HasEnvironment()); | 5640 ASSERT(instr->HasEnvironment()); |
5639 LEnvironment* env = instr->environment(); | 5641 LEnvironment* env = instr->environment(); |
5640 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 5642 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
5641 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 5643 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
5642 } | 5644 } |
5643 | 5645 |
5644 | 5646 |
5645 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { | 5647 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { |
5646 if (instr->hydrogen_value()->IsSoftDeoptimize()) { | 5648 if (instr->hydrogen_value()->IsSoftDeoptimize()) { |
5647 SoftDeoptimize(instr->environment()); | 5649 SoftDeoptimize(instr->environment()); |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5685 if (instr->hydrogen()->is_function_entry()) { | 5687 if (instr->hydrogen()->is_function_entry()) { |
5686 // Perform stack overflow check. | 5688 // Perform stack overflow check. |
5687 Label done; | 5689 Label done; |
5688 __ LoadRoot(ip, Heap::kStackLimitRootIndex); | 5690 __ LoadRoot(ip, Heap::kStackLimitRootIndex); |
5689 __ cmp(sp, Operand(ip)); | 5691 __ cmp(sp, Operand(ip)); |
5690 __ b(hs, &done); | 5692 __ b(hs, &done); |
5691 StackCheckStub stub; | 5693 StackCheckStub stub; |
5692 PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize); | 5694 PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize); |
5693 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 5695 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
5694 EnsureSpaceForLazyDeopt(); | 5696 EnsureSpaceForLazyDeopt(); |
| 5697 last_lazy_deopt_pc_ = masm()->pc_offset(); |
5695 __ bind(&done); | 5698 __ bind(&done); |
5696 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 5699 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
5697 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 5700 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
5698 } else { | 5701 } else { |
5699 ASSERT(instr->hydrogen()->is_backwards_branch()); | 5702 ASSERT(instr->hydrogen()->is_backwards_branch()); |
5700 // Perform stack overflow check if this goto needs it before jumping. | 5703 // Perform stack overflow check if this goto needs it before jumping. |
5701 DeferredStackCheck* deferred_stack_check = | 5704 DeferredStackCheck* deferred_stack_check = |
5702 new(zone()) DeferredStackCheck(this, instr); | 5705 new(zone()) DeferredStackCheck(this, instr); |
5703 __ LoadRoot(ip, Heap::kStackLimitRootIndex); | 5706 __ LoadRoot(ip, Heap::kStackLimitRootIndex); |
5704 __ cmp(sp, Operand(ip)); | 5707 __ cmp(sp, Operand(ip)); |
5705 __ b(lo, deferred_stack_check->entry()); | 5708 __ b(lo, deferred_stack_check->entry()); |
5706 EnsureSpaceForLazyDeopt(); | 5709 EnsureSpaceForLazyDeopt(); |
| 5710 last_lazy_deopt_pc_ = masm()->pc_offset(); |
5707 __ bind(instr->done_label()); | 5711 __ bind(instr->done_label()); |
5708 deferred_stack_check->SetExit(instr->done_label()); | 5712 deferred_stack_check->SetExit(instr->done_label()); |
5709 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 5713 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
5710 // Don't record a deoptimization index for the safepoint here. | 5714 // Don't record a deoptimization index for the safepoint here. |
5711 // This will be done explicitly when emitting call and the safepoint in | 5715 // This will be done explicitly when emitting call and the safepoint in |
5712 // the deferred code. | 5716 // the deferred code. |
5713 } | 5717 } |
5714 } | 5718 } |
5715 | 5719 |
5716 | 5720 |
(...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5821 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index)); | 5825 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index)); |
5822 __ ldr(result, FieldMemOperand(scratch, | 5826 __ ldr(result, FieldMemOperand(scratch, |
5823 FixedArray::kHeaderSize - kPointerSize)); | 5827 FixedArray::kHeaderSize - kPointerSize)); |
5824 __ bind(&done); | 5828 __ bind(&done); |
5825 } | 5829 } |
5826 | 5830 |
5827 | 5831 |
5828 #undef __ | 5832 #undef __ |
5829 | 5833 |
5830 } } // namespace v8::internal | 5834 } } // namespace v8::internal |
OLD | NEW |