| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 253 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 264 if (FLAG_code_comments && instr->HasInterestingComment(this)) { | 264 if (FLAG_code_comments && instr->HasInterestingComment(this)) { |
| 265 Comment(";;; <@%d,#%d> %s", | 265 Comment(";;; <@%d,#%d> %s", |
| 266 current_instruction_, | 266 current_instruction_, |
| 267 instr->hydrogen_value()->id(), | 267 instr->hydrogen_value()->id(), |
| 268 instr->Mnemonic()); | 268 instr->Mnemonic()); |
| 269 } | 269 } |
| 270 | 270 |
| 271 instr->CompileToNative(this); | 271 instr->CompileToNative(this); |
| 272 } | 272 } |
| 273 EnsureSpaceForLazyDeopt(); | 273 EnsureSpaceForLazyDeopt(); |
| 274 last_lazy_deopt_pc_ = masm()->pc_offset(); |
| 274 return !is_aborted(); | 275 return !is_aborted(); |
| 275 } | 276 } |
| 276 | 277 |
| 277 | 278 |
| 278 bool LCodeGen::GenerateDeferredCode() { | 279 bool LCodeGen::GenerateDeferredCode() { |
| 279 ASSERT(is_generating()); | 280 ASSERT(is_generating()); |
| 280 if (deferred_.length() > 0) { | 281 if (deferred_.length() > 0) { |
| 281 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { | 282 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { |
| 282 LDeferredCode* code = deferred_[i]; | 283 LDeferredCode* code = deferred_[i]; |
| 283 Comment(";;; <@%d,#%d> " | 284 Comment(";;; <@%d,#%d> " |
| (...skipping 5324 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5608 int current_pc = masm()->pc_offset(); | 5609 int current_pc = masm()->pc_offset(); |
| 5609 int patch_size = Deoptimizer::patch_size(); | 5610 int patch_size = Deoptimizer::patch_size(); |
| 5610 if (current_pc < last_lazy_deopt_pc_ + patch_size) { | 5611 if (current_pc < last_lazy_deopt_pc_ + patch_size) { |
| 5611 int padding_size = last_lazy_deopt_pc_ + patch_size - current_pc; | 5612 int padding_size = last_lazy_deopt_pc_ + patch_size - current_pc; |
| 5612 ASSERT_EQ(0, padding_size % Assembler::kInstrSize); | 5613 ASSERT_EQ(0, padding_size % Assembler::kInstrSize); |
| 5613 while (padding_size > 0) { | 5614 while (padding_size > 0) { |
| 5614 __ nop(); | 5615 __ nop(); |
| 5615 padding_size -= Assembler::kInstrSize; | 5616 padding_size -= Assembler::kInstrSize; |
| 5616 } | 5617 } |
| 5617 } | 5618 } |
| 5618 last_lazy_deopt_pc_ = masm()->pc_offset(); | |
| 5619 } | 5619 } |
| 5620 | 5620 |
| 5621 | 5621 |
| 5622 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { | 5622 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { |
| 5623 EnsureSpaceForLazyDeopt(); | 5623 EnsureSpaceForLazyDeopt(); |
| 5624 last_lazy_deopt_pc_ = masm()->pc_offset(); |
| 5624 ASSERT(instr->HasEnvironment()); | 5625 ASSERT(instr->HasEnvironment()); |
| 5625 LEnvironment* env = instr->environment(); | 5626 LEnvironment* env = instr->environment(); |
| 5626 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 5627 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
| 5627 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 5628 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
| 5628 } | 5629 } |
| 5629 | 5630 |
| 5630 | 5631 |
| 5631 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { | 5632 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { |
| 5632 if (instr->hydrogen_value()->IsSoftDeoptimize()) { | 5633 if (instr->hydrogen_value()->IsSoftDeoptimize()) { |
| 5633 SoftDeoptimize(instr->environment(), zero_reg, Operand(zero_reg)); | 5634 SoftDeoptimize(instr->environment(), zero_reg, Operand(zero_reg)); |
| (...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5669 // There is no LLazyBailout instruction for stack-checks. We have to | 5670 // There is no LLazyBailout instruction for stack-checks. We have to |
| 5670 // prepare for lazy deoptimization explicitly here. | 5671 // prepare for lazy deoptimization explicitly here. |
| 5671 if (instr->hydrogen()->is_function_entry()) { | 5672 if (instr->hydrogen()->is_function_entry()) { |
| 5672 // Perform stack overflow check. | 5673 // Perform stack overflow check. |
| 5673 Label done; | 5674 Label done; |
| 5674 __ LoadRoot(at, Heap::kStackLimitRootIndex); | 5675 __ LoadRoot(at, Heap::kStackLimitRootIndex); |
| 5675 __ Branch(&done, hs, sp, Operand(at)); | 5676 __ Branch(&done, hs, sp, Operand(at)); |
| 5676 StackCheckStub stub; | 5677 StackCheckStub stub; |
| 5677 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 5678 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 5678 EnsureSpaceForLazyDeopt(); | 5679 EnsureSpaceForLazyDeopt(); |
| 5680 last_lazy_deopt_pc_ = masm()->pc_offset(); |
| 5679 __ bind(&done); | 5681 __ bind(&done); |
| 5680 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 5682 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
| 5681 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 5683 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
| 5682 } else { | 5684 } else { |
| 5683 ASSERT(instr->hydrogen()->is_backwards_branch()); | 5685 ASSERT(instr->hydrogen()->is_backwards_branch()); |
| 5684 // Perform stack overflow check if this goto needs it before jumping. | 5686 // Perform stack overflow check if this goto needs it before jumping. |
| 5685 DeferredStackCheck* deferred_stack_check = | 5687 DeferredStackCheck* deferred_stack_check = |
| 5686 new(zone()) DeferredStackCheck(this, instr); | 5688 new(zone()) DeferredStackCheck(this, instr); |
| 5687 __ LoadRoot(at, Heap::kStackLimitRootIndex); | 5689 __ LoadRoot(at, Heap::kStackLimitRootIndex); |
| 5688 __ Branch(deferred_stack_check->entry(), lo, sp, Operand(at)); | 5690 __ Branch(deferred_stack_check->entry(), lo, sp, Operand(at)); |
| 5689 EnsureSpaceForLazyDeopt(); | 5691 EnsureSpaceForLazyDeopt(); |
| 5692 last_lazy_deopt_pc_ = masm()->pc_offset(); |
| 5690 __ bind(instr->done_label()); | 5693 __ bind(instr->done_label()); |
| 5691 deferred_stack_check->SetExit(instr->done_label()); | 5694 deferred_stack_check->SetExit(instr->done_label()); |
| 5692 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 5695 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
| 5693 // Don't record a deoptimization index for the safepoint here. | 5696 // Don't record a deoptimization index for the safepoint here. |
| 5694 // This will be done explicitly when emitting call and the safepoint in | 5697 // This will be done explicitly when emitting call and the safepoint in |
| 5695 // the deferred code. | 5698 // the deferred code. |
| 5696 } | 5699 } |
| 5697 } | 5700 } |
| 5698 | 5701 |
| 5699 | 5702 |
| (...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5802 __ Subu(scratch, result, scratch); | 5805 __ Subu(scratch, result, scratch); |
| 5803 __ lw(result, FieldMemOperand(scratch, | 5806 __ lw(result, FieldMemOperand(scratch, |
| 5804 FixedArray::kHeaderSize - kPointerSize)); | 5807 FixedArray::kHeaderSize - kPointerSize)); |
| 5805 __ bind(&done); | 5808 __ bind(&done); |
| 5806 } | 5809 } |
| 5807 | 5810 |
| 5808 | 5811 |
| 5809 #undef __ | 5812 #undef __ |
| 5810 | 5813 |
| 5811 } } // namespace v8::internal | 5814 } } // namespace v8::internal |
| OLD | NEW |