Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(129)

Side by Side Diff: src/mips/lithium-codegen-mips.cc

Issue 78283002: Restore saved caller FP registers on stub failure (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Port to ia32, arm, and mips Created 7 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/mips/deoptimizer-mips.cc ('k') | src/mips/macro-assembler-mips.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after
91 info()->CommitDependencies(code); 91 info()->CommitDependencies(code);
92 } 92 }
93 93
94 94
95 void LChunkBuilder::Abort(BailoutReason reason) { 95 void LChunkBuilder::Abort(BailoutReason reason) {
96 info()->set_bailout_reason(reason); 96 info()->set_bailout_reason(reason);
97 status_ = ABORTED; 97 status_ = ABORTED;
98 } 98 }
99 99
100 100
101 void LCodeGen::SaveCallerDoubles() {
102 ASSERT(info()->saves_caller_doubles());
103 ASSERT(NeedsEagerFrame());
104 Comment(";;; Save clobbered callee double registers");
105 int count = 0;
106 BitVector* doubles = chunk()->allocated_double_registers();
107 BitVector::Iterator save_iterator(doubles);
108 while (!save_iterator.Done()) {
109 __ sdc1(DoubleRegister::FromAllocationIndex(save_iterator.Current()),
110 MemOperand(sp, count * kDoubleSize));
111 save_iterator.Advance();
112 count++;
113 }
114 }
115
116
117 void LCodeGen::RestoreCallerDoubles() {
118 ASSERT(info()->saves_caller_doubles());
119 ASSERT(NeedsEagerFrame());
120 Comment(";;; Restore clobbered callee double registers");
121 BitVector* doubles = chunk()->allocated_double_registers();
122 BitVector::Iterator save_iterator(doubles);
123 int count = 0;
124 while (!save_iterator.Done()) {
125 __ ldc1(DoubleRegister::FromAllocationIndex(save_iterator.Current()),
126 MemOperand(sp, count * kDoubleSize));
127 save_iterator.Advance();
128 count++;
129 }
130 }
131
132
101 bool LCodeGen::GeneratePrologue() { 133 bool LCodeGen::GeneratePrologue() {
102 ASSERT(is_generating()); 134 ASSERT(is_generating());
103 135
104 if (info()->IsOptimizing()) { 136 if (info()->IsOptimizing()) {
105 ProfileEntryHookStub::MaybeCallEntryHook(masm_); 137 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
106 138
107 #ifdef DEBUG 139 #ifdef DEBUG
108 if (strlen(FLAG_stop_at) > 0 && 140 if (strlen(FLAG_stop_at) > 0 &&
109 info_->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) { 141 info_->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
110 __ stop("stop_at"); 142 __ stop("stop_at");
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
153 __ sw(a1, MemOperand(a0, 2 * kPointerSize)); 185 __ sw(a1, MemOperand(a0, 2 * kPointerSize));
154 __ Branch(&loop, ne, a0, Operand(sp)); 186 __ Branch(&loop, ne, a0, Operand(sp));
155 __ pop(a1); 187 __ pop(a1);
156 __ pop(a0); 188 __ pop(a0);
157 } else { 189 } else {
158 __ Subu(sp, sp, Operand(slots * kPointerSize)); 190 __ Subu(sp, sp, Operand(slots * kPointerSize));
159 } 191 }
160 } 192 }
161 193
162 if (info()->saves_caller_doubles()) { 194 if (info()->saves_caller_doubles()) {
163 Comment(";;; Save clobbered callee double registers"); 195 SaveCallerDoubles();
164 int count = 0;
165 BitVector* doubles = chunk()->allocated_double_registers();
166 BitVector::Iterator save_iterator(doubles);
167 while (!save_iterator.Done()) {
168 __ sdc1(DoubleRegister::FromAllocationIndex(save_iterator.Current()),
169 MemOperand(sp, count * kDoubleSize));
170 save_iterator.Advance();
171 count++;
172 }
173 } 196 }
174 197
175 // Possibly allocate a local context. 198 // Possibly allocate a local context.
176 int heap_slots = info()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; 199 int heap_slots = info()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
177 if (heap_slots > 0) { 200 if (heap_slots > 0) {
178 Comment(";;; Allocate local context"); 201 Comment(";;; Allocate local context");
179 // Argument to NewContext is the function, which is in a1. 202 // Argument to NewContext is the function, which is in a1.
180 __ push(a1); 203 __ push(a1);
181 if (heap_slots <= FastNewContextStub::kMaximumSlots) { 204 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
182 FastNewContextStub stub(heap_slots); 205 FastNewContextStub stub(heap_slots);
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after
291 Address entry = deopt_jump_table_[i].address; 314 Address entry = deopt_jump_table_[i].address;
292 Deoptimizer::BailoutType type = deopt_jump_table_[i].bailout_type; 315 Deoptimizer::BailoutType type = deopt_jump_table_[i].bailout_type;
293 int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type); 316 int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
294 if (id == Deoptimizer::kNotDeoptimizationEntry) { 317 if (id == Deoptimizer::kNotDeoptimizationEntry) {
295 Comment(";;; jump table entry %d.", i); 318 Comment(";;; jump table entry %d.", i);
296 } else { 319 } else {
297 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id); 320 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id);
298 } 321 }
299 __ li(t9, Operand(ExternalReference::ForDeoptEntry(entry))); 322 __ li(t9, Operand(ExternalReference::ForDeoptEntry(entry)));
300 if (deopt_jump_table_[i].needs_frame) { 323 if (deopt_jump_table_[i].needs_frame) {
324 ASSERT(!info()->saves_caller_doubles());
301 if (needs_frame.is_bound()) { 325 if (needs_frame.is_bound()) {
302 __ Branch(&needs_frame); 326 __ Branch(&needs_frame);
303 } else { 327 } else {
304 __ bind(&needs_frame); 328 __ bind(&needs_frame);
305 __ MultiPush(cp.bit() | fp.bit() | ra.bit()); 329 __ MultiPush(cp.bit() | fp.bit() | ra.bit());
306 // This variant of deopt can only be used with stubs. Since we don't 330 // This variant of deopt can only be used with stubs. Since we don't
307 // have a function pointer to install in the stack frame that we're 331 // have a function pointer to install in the stack frame that we're
308 // building, install a special marker there instead. 332 // building, install a special marker there instead.
309 ASSERT(info()->IsStub()); 333 ASSERT(info()->IsStub());
310 __ li(scratch0(), Operand(Smi::FromInt(StackFrame::STUB))); 334 __ li(scratch0(), Operand(Smi::FromInt(StackFrame::STUB)));
311 __ push(scratch0()); 335 __ push(scratch0());
312 __ Addu(fp, sp, Operand(2 * kPointerSize)); 336 __ Addu(fp, sp, Operand(2 * kPointerSize));
313 __ Call(t9); 337 __ Call(t9);
314 } 338 }
315 } else { 339 } else {
340 if (info()->saves_caller_doubles()) {
341 ASSERT(info()->IsStub());
342 RestoreCallerDoubles();
343 }
316 __ Call(t9); 344 __ Call(t9);
317 } 345 }
318 } 346 }
319 __ RecordComment("]"); 347 __ RecordComment("]");
320 348
321 // The deoptimization jump table is the last part of the instruction 349 // The deoptimization jump table is the last part of the instruction
322 // sequence. Mark the generated code as done unless we bailed out. 350 // sequence. Mark the generated code as done unless we bailed out.
323 if (!is_aborted()) status_ = DONE; 351 if (!is_aborted()) status_ = DONE;
324 return !is_aborted(); 352 return !is_aborted();
325 } 353 }
(...skipping 453 matching lines...) Expand 10 before | Expand all | Expand 10 after
779 if (info()->ShouldTrapOnDeopt()) { 807 if (info()->ShouldTrapOnDeopt()) {
780 Label skip; 808 Label skip;
781 if (condition != al) { 809 if (condition != al) {
782 __ Branch(&skip, NegateCondition(condition), src1, src2); 810 __ Branch(&skip, NegateCondition(condition), src1, src2);
783 } 811 }
784 __ stop("trap_on_deopt"); 812 __ stop("trap_on_deopt");
785 __ bind(&skip); 813 __ bind(&skip);
786 } 814 }
787 815
788 ASSERT(info()->IsStub() || frame_is_built_); 816 ASSERT(info()->IsStub() || frame_is_built_);
789 if (condition == al && frame_is_built_) { 817 // Go through jump table if we need to handle condition, build frame, or
818 // restore caller doubles.
819 if (condition == al && frame_is_built_ &&
820 !info()->saves_caller_doubles()) {
790 __ Call(entry, RelocInfo::RUNTIME_ENTRY, condition, src1, src2); 821 __ Call(entry, RelocInfo::RUNTIME_ENTRY, condition, src1, src2);
791 } else { 822 } else {
792 // We often have several deopts to the same entry, reuse the last 823 // We often have several deopts to the same entry, reuse the last
793 // jump entry if this is the case. 824 // jump entry if this is the case.
794 if (deopt_jump_table_.is_empty() || 825 if (deopt_jump_table_.is_empty() ||
795 (deopt_jump_table_.last().address != entry) || 826 (deopt_jump_table_.last().address != entry) ||
796 (deopt_jump_table_.last().bailout_type != bailout_type) || 827 (deopt_jump_table_.last().bailout_type != bailout_type) ||
797 (deopt_jump_table_.last().needs_frame != !frame_is_built_)) { 828 (deopt_jump_table_.last().needs_frame != !frame_is_built_)) {
798 Deoptimizer::JumpTableEntry table_entry(entry, 829 Deoptimizer::JumpTableEntry table_entry(entry,
799 bailout_type, 830 bailout_type,
(...skipping 1999 matching lines...) Expand 10 before | Expand all | Expand 10 after
2799 if (FLAG_trace && info()->IsOptimizing()) { 2830 if (FLAG_trace && info()->IsOptimizing()) {
2800 // Push the return value on the stack as the parameter. 2831 // Push the return value on the stack as the parameter.
2801 // Runtime::TraceExit returns its parameter in v0. We're leaving the code 2832 // Runtime::TraceExit returns its parameter in v0. We're leaving the code
2802 // managed by the register allocator and tearing down the frame, it's 2833 // managed by the register allocator and tearing down the frame, it's
2803 // safe to write to the context register. 2834 // safe to write to the context register.
2804 __ push(v0); 2835 __ push(v0);
2805 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2836 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2806 __ CallRuntime(Runtime::kTraceExit, 1); 2837 __ CallRuntime(Runtime::kTraceExit, 1);
2807 } 2838 }
2808 if (info()->saves_caller_doubles()) { 2839 if (info()->saves_caller_doubles()) {
2809 ASSERT(NeedsEagerFrame()); 2840 RestoreCallerDoubles();
2810 BitVector* doubles = chunk()->allocated_double_registers();
2811 BitVector::Iterator save_iterator(doubles);
2812 int count = 0;
2813 while (!save_iterator.Done()) {
2814 __ ldc1(DoubleRegister::FromAllocationIndex(save_iterator.Current()),
2815 MemOperand(sp, count * kDoubleSize));
2816 save_iterator.Advance();
2817 count++;
2818 }
2819 } 2841 }
2820 int no_frame_start = -1; 2842 int no_frame_start = -1;
2821 if (NeedsEagerFrame()) { 2843 if (NeedsEagerFrame()) {
2822 __ mov(sp, fp); 2844 __ mov(sp, fp);
2823 no_frame_start = masm_->pc_offset(); 2845 no_frame_start = masm_->pc_offset();
2824 __ Pop(ra, fp); 2846 __ Pop(ra, fp);
2825 } 2847 }
2826 if (instr->has_constant_parameter_count()) { 2848 if (instr->has_constant_parameter_count()) {
2827 int parameter_count = ToInteger32(instr->constant_parameter_count()); 2849 int parameter_count = ToInteger32(instr->constant_parameter_count());
2828 int32_t sp_delta = (parameter_count + 1) * kPointerSize; 2850 int32_t sp_delta = (parameter_count + 1) * kPointerSize;
(...skipping 3085 matching lines...) Expand 10 before | Expand all | Expand 10 after
5914 __ Subu(scratch, result, scratch); 5936 __ Subu(scratch, result, scratch);
5915 __ lw(result, FieldMemOperand(scratch, 5937 __ lw(result, FieldMemOperand(scratch,
5916 FixedArray::kHeaderSize - kPointerSize)); 5938 FixedArray::kHeaderSize - kPointerSize));
5917 __ bind(&done); 5939 __ bind(&done);
5918 } 5940 }
5919 5941
5920 5942
5921 #undef __ 5943 #undef __
5922 5944
5923 } } // namespace v8::internal 5945 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/mips/deoptimizer-mips.cc ('k') | src/mips/macro-assembler-mips.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698