Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1329)

Side by Side Diff: src/x64/lithium-codegen-x64.cc

Issue 78283002: Restore saved caller FP registers on stub failure (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Port to ia32, arm, and mips Created 7 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/lithium-codegen-x64.h ('k') | src/x64/macro-assembler-x64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after
104 #ifdef _MSC_VER 104 #ifdef _MSC_VER
105 void LCodeGen::MakeSureStackPagesMapped(int offset) { 105 void LCodeGen::MakeSureStackPagesMapped(int offset) {
106 const int kPageSize = 4 * KB; 106 const int kPageSize = 4 * KB;
107 for (offset -= kPageSize; offset > 0; offset -= kPageSize) { 107 for (offset -= kPageSize; offset > 0; offset -= kPageSize) {
108 __ movq(Operand(rsp, offset), rax); 108 __ movq(Operand(rsp, offset), rax);
109 } 109 }
110 } 110 }
111 #endif 111 #endif
112 112
113 113
114 void LCodeGen::SaveCallerDoubles() {
115 ASSERT(info()->saves_caller_doubles());
116 ASSERT(NeedsEagerFrame());
117 Comment(";;; Save clobbered callee double registers");
118 int count = 0;
119 BitVector* doubles = chunk()->allocated_double_registers();
120 BitVector::Iterator save_iterator(doubles);
121 while (!save_iterator.Done()) {
122 __ movsd(MemOperand(rsp, count * kDoubleSize),
123 XMMRegister::FromAllocationIndex(save_iterator.Current()));
124 save_iterator.Advance();
125 count++;
126 }
127 }
128
129
130 void LCodeGen::RestoreCallerDoubles() {
131 ASSERT(info()->saves_caller_doubles());
132 ASSERT(NeedsEagerFrame());
133 Comment(";;; Restore clobbered callee double registers");
134 BitVector* doubles = chunk()->allocated_double_registers();
135 BitVector::Iterator save_iterator(doubles);
136 int count = 0;
137 while (!save_iterator.Done()) {
138 __ movsd(XMMRegister::FromAllocationIndex(save_iterator.Current()),
139 MemOperand(rsp, count * kDoubleSize));
140 save_iterator.Advance();
141 count++;
142 }
143 }
144
145
114 bool LCodeGen::GeneratePrologue() { 146 bool LCodeGen::GeneratePrologue() {
115 ASSERT(is_generating()); 147 ASSERT(is_generating());
116 148
117 if (info()->IsOptimizing()) { 149 if (info()->IsOptimizing()) {
118 ProfileEntryHookStub::MaybeCallEntryHook(masm_); 150 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
119 151
120 #ifdef DEBUG 152 #ifdef DEBUG
121 if (strlen(FLAG_stop_at) > 0 && 153 if (strlen(FLAG_stop_at) > 0 &&
122 info_->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) { 154 info_->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
123 __ int3(); 155 __ int3();
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
166 __ j(not_zero, &loop); 198 __ j(not_zero, &loop);
167 __ pop(rax); 199 __ pop(rax);
168 } else { 200 } else {
169 __ subq(rsp, Immediate(slots * kPointerSize)); 201 __ subq(rsp, Immediate(slots * kPointerSize));
170 #ifdef _MSC_VER 202 #ifdef _MSC_VER
171 MakeSureStackPagesMapped(slots * kPointerSize); 203 MakeSureStackPagesMapped(slots * kPointerSize);
172 #endif 204 #endif
173 } 205 }
174 206
175 if (info()->saves_caller_doubles()) { 207 if (info()->saves_caller_doubles()) {
176 Comment(";;; Save clobbered callee double registers"); 208 SaveCallerDoubles();
177 int count = 0;
178 BitVector* doubles = chunk()->allocated_double_registers();
179 BitVector::Iterator save_iterator(doubles);
180 while (!save_iterator.Done()) {
181 __ movsd(MemOperand(rsp, count * kDoubleSize),
182 XMMRegister::FromAllocationIndex(save_iterator.Current()));
183 save_iterator.Advance();
184 count++;
185 }
186 } 209 }
187 } 210 }
188 211
189 // Possibly allocate a local context. 212 // Possibly allocate a local context.
190 int heap_slots = info_->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; 213 int heap_slots = info_->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
191 if (heap_slots > 0) { 214 if (heap_slots > 0) {
192 Comment(";;; Allocate local context"); 215 Comment(";;; Allocate local context");
193 // Argument to NewContext is the function, which is still in rdi. 216 // Argument to NewContext is the function, which is still in rdi.
194 __ push(rdi); 217 __ push(rdi);
195 if (heap_slots <= FastNewContextStub::kMaximumSlots) { 218 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
254 __ bind(&jump_table_[i].label); 277 __ bind(&jump_table_[i].label);
255 Address entry = jump_table_[i].address; 278 Address entry = jump_table_[i].address;
256 Deoptimizer::BailoutType type = jump_table_[i].bailout_type; 279 Deoptimizer::BailoutType type = jump_table_[i].bailout_type;
257 int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type); 280 int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
258 if (id == Deoptimizer::kNotDeoptimizationEntry) { 281 if (id == Deoptimizer::kNotDeoptimizationEntry) {
259 Comment(";;; jump table entry %d.", i); 282 Comment(";;; jump table entry %d.", i);
260 } else { 283 } else {
261 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id); 284 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id);
262 } 285 }
263 if (jump_table_[i].needs_frame) { 286 if (jump_table_[i].needs_frame) {
287 ASSERT(!info()->saves_caller_doubles());
264 __ Move(kScratchRegister, ExternalReference::ForDeoptEntry(entry)); 288 __ Move(kScratchRegister, ExternalReference::ForDeoptEntry(entry));
265 if (needs_frame.is_bound()) { 289 if (needs_frame.is_bound()) {
266 __ jmp(&needs_frame); 290 __ jmp(&needs_frame);
267 } else { 291 } else {
268 __ bind(&needs_frame); 292 __ bind(&needs_frame);
269 __ movq(rsi, MemOperand(rbp, StandardFrameConstants::kContextOffset)); 293 __ movq(rsi, MemOperand(rbp, StandardFrameConstants::kContextOffset));
270 __ push(rbp); 294 __ push(rbp);
271 __ movq(rbp, rsp); 295 __ movq(rbp, rsp);
272 __ push(rsi); 296 __ push(rsi);
273 // This variant of deopt can only be used with stubs. Since we don't 297 // This variant of deopt can only be used with stubs. Since we don't
274 // have a function pointer to install in the stack frame that we're 298 // have a function pointer to install in the stack frame that we're
275 // building, install a special marker there instead. 299 // building, install a special marker there instead.
276 ASSERT(info()->IsStub()); 300 ASSERT(info()->IsStub());
277 __ Move(rsi, Smi::FromInt(StackFrame::STUB)); 301 __ Move(rsi, Smi::FromInt(StackFrame::STUB));
278 __ push(rsi); 302 __ push(rsi);
279 __ movq(rsi, MemOperand(rsp, kPointerSize)); 303 __ movq(rsi, MemOperand(rsp, kPointerSize));
280 __ call(kScratchRegister); 304 __ call(kScratchRegister);
281 } 305 }
282 } else { 306 } else {
307 if (info()->saves_caller_doubles()) {
308 ASSERT(info()->IsStub());
309 RestoreCallerDoubles();
310 }
283 __ call(entry, RelocInfo::RUNTIME_ENTRY); 311 __ call(entry, RelocInfo::RUNTIME_ENTRY);
284 } 312 }
285 } 313 }
286 return !is_aborted(); 314 return !is_aborted();
287 } 315 }
288 316
289 317
290 bool LCodeGen::GenerateDeferredCode() { 318 bool LCodeGen::GenerateDeferredCode() {
291 ASSERT(is_generating()); 319 ASSERT(is_generating());
292 if (deferred_.length() > 0) { 320 if (deferred_.length() > 0) {
(...skipping 414 matching lines...) Expand 10 before | Expand all | Expand 10 after
707 if (info()->ShouldTrapOnDeopt()) { 735 if (info()->ShouldTrapOnDeopt()) {
708 Label done; 736 Label done;
709 if (cc != no_condition) { 737 if (cc != no_condition) {
710 __ j(NegateCondition(cc), &done, Label::kNear); 738 __ j(NegateCondition(cc), &done, Label::kNear);
711 } 739 }
712 __ int3(); 740 __ int3();
713 __ bind(&done); 741 __ bind(&done);
714 } 742 }
715 743
716 ASSERT(info()->IsStub() || frame_is_built_); 744 ASSERT(info()->IsStub() || frame_is_built_);
717 if (cc == no_condition && frame_is_built_) { 745 // Go through jump table if we need to handle condition, build frame, or
746 // restore caller doubles.
747 if (cc == no_condition && frame_is_built_ &&
748 !info()->saves_caller_doubles()) {
718 __ call(entry, RelocInfo::RUNTIME_ENTRY); 749 __ call(entry, RelocInfo::RUNTIME_ENTRY);
719 } else { 750 } else {
720 // We often have several deopts to the same entry, reuse the last 751 // We often have several deopts to the same entry, reuse the last
721 // jump entry if this is the case. 752 // jump entry if this is the case.
722 if (jump_table_.is_empty() || 753 if (jump_table_.is_empty() ||
723 jump_table_.last().address != entry || 754 jump_table_.last().address != entry ||
724 jump_table_.last().needs_frame != !frame_is_built_ || 755 jump_table_.last().needs_frame != !frame_is_built_ ||
725 jump_table_.last().bailout_type != bailout_type) { 756 jump_table_.last().bailout_type != bailout_type) {
726 Deoptimizer::JumpTableEntry table_entry(entry, 757 Deoptimizer::JumpTableEntry table_entry(entry,
727 bailout_type, 758 bailout_type,
(...skipping 1952 matching lines...) Expand 10 before | Expand all | Expand 10 after
2680 if (FLAG_trace && info()->IsOptimizing()) { 2711 if (FLAG_trace && info()->IsOptimizing()) {
2681 // Preserve the return value on the stack and rely on the runtime call 2712 // Preserve the return value on the stack and rely on the runtime call
2682 // to return the value in the same register. We're leaving the code 2713 // to return the value in the same register. We're leaving the code
2683 // managed by the register allocator and tearing down the frame, it's 2714 // managed by the register allocator and tearing down the frame, it's
2684 // safe to write to the context register. 2715 // safe to write to the context register.
2685 __ push(rax); 2716 __ push(rax);
2686 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 2717 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2687 __ CallRuntime(Runtime::kTraceExit, 1); 2718 __ CallRuntime(Runtime::kTraceExit, 1);
2688 } 2719 }
2689 if (info()->saves_caller_doubles()) { 2720 if (info()->saves_caller_doubles()) {
2690 ASSERT(NeedsEagerFrame()); 2721 RestoreCallerDoubles();
2691 BitVector* doubles = chunk()->allocated_double_registers();
2692 BitVector::Iterator save_iterator(doubles);
2693 int count = 0;
2694 while (!save_iterator.Done()) {
2695 __ movsd(XMMRegister::FromAllocationIndex(save_iterator.Current()),
2696 MemOperand(rsp, count * kDoubleSize));
2697 save_iterator.Advance();
2698 count++;
2699 }
2700 } 2722 }
2701 int no_frame_start = -1; 2723 int no_frame_start = -1;
2702 if (NeedsEagerFrame()) { 2724 if (NeedsEagerFrame()) {
2703 __ movq(rsp, rbp); 2725 __ movq(rsp, rbp);
2704 __ pop(rbp); 2726 __ pop(rbp);
2705 no_frame_start = masm_->pc_offset(); 2727 no_frame_start = masm_->pc_offset();
2706 } 2728 }
2707 if (instr->has_constant_parameter_count()) { 2729 if (instr->has_constant_parameter_count()) {
2708 __ Ret((ToInteger32(instr->constant_parameter_count()) + 1) * kPointerSize, 2730 __ Ret((ToInteger32(instr->constant_parameter_count()) + 1) * kPointerSize,
2709 rcx); 2731 rcx);
(...skipping 2984 matching lines...) Expand 10 before | Expand all | Expand 10 after
5694 FixedArray::kHeaderSize - kPointerSize)); 5716 FixedArray::kHeaderSize - kPointerSize));
5695 __ bind(&done); 5717 __ bind(&done);
5696 } 5718 }
5697 5719
5698 5720
5699 #undef __ 5721 #undef __
5700 5722
5701 } } // namespace v8::internal 5723 } } // namespace v8::internal
5702 5724
5703 #endif // V8_TARGET_ARCH_X64 5725 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/lithium-codegen-x64.h ('k') | src/x64/macro-assembler-x64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698