Index: runtime/vm/flow_graph_compiler_ia32.cc |
diff --git a/runtime/vm/flow_graph_compiler_ia32.cc b/runtime/vm/flow_graph_compiler_ia32.cc |
index 4141857d1071ac19e1c6f563f5b197abe3a9900f..0b17febc118f6e9dc8edaa2807e26cf241d6cb2b 100644 |
--- a/runtime/vm/flow_graph_compiler_ia32.cc |
+++ b/runtime/vm/flow_graph_compiler_ia32.cc |
@@ -1109,18 +1109,51 @@ void FlowGraphCompiler::LoadDoubleOrSmiToXmm(XmmRegister result, |
#define __ compiler_->assembler()-> |
+static Address ToAddress(Location loc) { |
srdjan
2012/07/23 16:18:59
The name is to generic: s/ToAddress/ToSpillAddress
|
+ ASSERT(loc.IsSpillSlot()); |
+ const intptr_t offset = |
+ (ParsedFunction::kFirstLocalSlotIndex - loc.spill_index()) * kWordSize; |
+ return Address(EBP, offset); |
+} |
+ |
+ |
void ParallelMoveResolver::EmitMove(int index) { |
MoveOperands* move = moves_[index]; |
const Location source = move->src(); |
const Location destination = move->dest(); |
- ASSERT(destination.IsRegister()); |
if (source.IsRegister()) { |
- __ movl(destination.reg(), source.reg()); |
+ if (destination.IsRegister()) { |
+ __ movl(destination.reg(), source.reg()); |
+ } else { |
+ ASSERT(destination.IsSpillSlot()); |
+ __ movl(ToAddress(destination), source.reg()); |
+ } |
+ } else if (source.IsSpillSlot()) { |
+ if (destination.IsRegister()) { |
+ __ movl(destination.reg(), ToAddress(source)); |
+ } else { |
+ ASSERT(destination.IsSpillSlot()); |
+ // TODO(vegorov): allocate temporary register for such moves. |
+ __ pushl(EAX); |
+ __ movl(EAX, ToAddress(source)); |
+ __ movl(ToAddress(destination), EAX); |
+ __ popl(EAX); |
+ } |
} else { |
ASSERT(source.IsConstant()); |
- __ LoadObject(destination.reg(), source.constant()); |
+ if (destination.IsRegister()) { |
+ __ LoadObject(destination.reg(), source.constant()); |
+ } else { |
+ ASSERT(destination.IsSpillSlot()); |
+ // TODO(vegorov): allocate temporary register for such moves. |
+ __ pushl(EAX); |
+ __ LoadObject(EAX, source.constant()); |
+ __ movl(ToAddress(destination), EAX); |
+ __ popl(EAX); |
+ } |
} |
+ |
move->Eliminate(); |
} |
@@ -1130,8 +1163,37 @@ void ParallelMoveResolver::EmitSwap(int index) { |
const Location source = move->src(); |
const Location destination = move->dest(); |
- ASSERT(source.IsRegister() && destination.IsRegister()); |
- __ xchgl(destination.reg(), source.reg()); |
+ if (source.IsRegister() && destination.IsRegister()) { |
+ __ xchgl(destination.reg(), source.reg()); |
+ } else if (source.IsRegister() && destination.IsSpillSlot()) { |
+ // TODO(vegorov): allocate temporary register for such moves. |
+ Register scratch = (source.reg() == EAX) ? ECX : EAX; |
+ __ pushl(scratch); |
+ __ movl(scratch, ToAddress(destination)); |
+ __ xchgl(scratch, source.reg()); |
+ __ movl(ToAddress(destination), scratch); |
+ __ popl(scratch); |
+ } else if (source.IsSpillSlot() && destination.IsRegister()) { |
+ // TODO(vegorov): allocate temporary register for such moves. |
+ Register scratch = (destination.reg() == EAX) ? ECX : EAX; |
+ __ pushl(scratch); |
+ __ movl(scratch, ToAddress(source)); |
+ __ xchgl(scratch, destination.reg()); |
+ __ movl(ToAddress(source), scratch); |
+ __ popl(scratch); |
+ } else if (source.IsSpillSlot() && destination.IsSpillSlot()) { |
+ // TODO(vegorov): allocate temporary registers for such moves. |
+ __ pushl(EAX); |
+ __ pushl(ECX); |
+ __ movl(EAX, ToAddress(source)); |
Kevin Millikin (Google)
2012/07/23 12:52:30
This code would be much nicer if there were helper
Vyacheslav Egorov (Google)
2012/07/23 14:06:10
Done. But it looks a bit ugly because on x64 only
|
+ __ movl(ECX, ToAddress(destination)); |
srdjan
2012/07/23 16:18:59
Please upload the latest CL once comments are addr
|
+ __ movl(ToAddress(source), ECX); |
+ __ movl(ToAddress(destination), EAX); |
+ __ popl(ECX); |
+ __ popl(EAX); |
+ } else { |
+ UNREACHABLE(); |
+ } |
// The swap of source and destination has executed a move from source to |
// destination. |