Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(386)

Unified Diff: runtime/vm/intermediate_language_ia32.cc

Issue 10458050: Move ReturnInstr to new scheme (x64 and ia32) and implement more code in new ia32 compiler. (Closed) Base URL: http://dart.googlecode.com/svn/branches/bleeding_edge/dart/
Patch Set: Created 8 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « runtime/vm/intermediate_language.h ('k') | runtime/vm/intermediate_language_x64.cc » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: runtime/vm/intermediate_language_ia32.cc
===================================================================
--- runtime/vm/intermediate_language_ia32.cc (revision 8165)
+++ runtime/vm/intermediate_language_ia32.cc (working copy)
@@ -7,17 +7,112 @@
#include "vm/flow_graph_compiler.h"
#include "vm/locations.h"
+#include "vm/stub_code.h"
#define __ compiler->assembler()->
namespace dart {
+DECLARE_FLAG(bool, optimization_counter_threshold);
+DECLARE_FLAG(bool, trace_functions);
+// True iff. the arguments to a call will be properly pushed and can
+// be popped after the call.
+template <typename T> static bool VerifyCallComputation(T* comp) {
+ // Argument values should be consecutive temps.
+ //
+ // TODO(kmillikin): implement stack height tracking so we can also assert
+ // they are on top of the stack.
+ intptr_t previous = -1;
+ for (int i = 0; i < comp->ArgumentCount(); ++i) {
+ Value* val = comp->ArgumentAt(i);
+ if (!val->IsUse()) return false;
+ intptr_t current = val->AsUse()->definition()->temp_index();
+ if (i != 0) {
+ if (current != (previous + 1)) return false;
+ }
+ previous = current;
+ }
+ return true;
+}
+
+
+// Generic summary for call instructions that have all arguments pushed
+// on the stack and return the result in a fixed register EAX.
+static LocationSummary* MakeCallSummary() {
+ LocationSummary* result = new LocationSummary(0, 0);
+ result->set_out(Location::RegisterLocation(EAX));
+ return result;
+}
+
+
void BindInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
- UNIMPLEMENTED();
+ computation()->EmitNativeCode(compiler);
+ __ pushl(locs()->out().reg());
}
+LocationSummary* ReturnInstr::MakeLocationSummary() const {
+ const intptr_t kNumInputs = 1;
+ const intptr_t kNumTemps = 1;
+ LocationSummary* locs = new LocationSummary(kNumInputs, kNumTemps);
+ locs->set_in(0, Location::RegisterLocation(EAX));
+ locs->set_temp(0, Location::RequiresRegister());
+ return locs;
+}
+
+
+void ReturnInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
+ Register result = locs()->in(0).reg();
+ Register temp = locs()->temp(0).reg();
+ ASSERT(result == EAX);
+ if (!compiler->is_optimizing()) {
+ // Count only in unoptimized code.
+ // TODO(srdjan): Replace the counting code with a type feedback
+ // collection and counting stub.
+ const Function& function =
+ Function::ZoneHandle(compiler->parsed_function().function().raw());
+ __ LoadObject(temp, function);
+ __ incl(FieldAddress(temp, Function::usage_counter_offset()));
+ if (CodeGenerator::CanOptimize()) {
+ // Do not optimize if usage count must be reported.
+ __ cmpl(FieldAddress(temp, Function::usage_counter_offset()),
+ Immediate(FLAG_optimization_counter_threshold));
+ Label not_yet_hot;
+ __ j(LESS_EQUAL, &not_yet_hot, Assembler::kNearJump);
+ __ pushl(result); // Preserve result.
+ __ pushl(temp); // Argument for runtime: function to optimize.
+ __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry);
+ __ popl(temp); // Remove argument.
+ __ popl(result); // Restore result.
+ __ Bind(&not_yet_hot);
+ }
+ }
+ if (FLAG_trace_functions) {
+ const Function& function =
+ Function::ZoneHandle(compiler->parsed_function().function().raw());
+ __ LoadObject(temp, function);
+ __ pushl(result); // Preserve result.
+ __ pushl(temp);
+ compiler->GenerateCallRuntime(AstNode::kNoId,
+ 0,
+ CatchClauseNode::kInvalidTryIndex,
+ kTraceFunctionExitRuntimeEntry);
+ __ popl(temp); // Remove argument.
+ __ popl(result); // Restore result.
+ }
+ __ LeaveFrame();
+ __ ret();
+ // Add a NOP to make return code pattern 5 bytes long for patching
+ // in breakpoints during debugging.
+ __ nop(1);
+ compiler->AddCurrentDescriptor(PcDescriptors::kReturn,
+ cid(),
+ token_index(),
+ CatchClauseNode::kInvalidTryIndex);
+}
+
+
LocationSummary* ThrowInstr::MakeLocationSummary() const {
return NULL;
}
@@ -53,7 +148,7 @@
}
-void CurrentContextComp::EmitNativeCode(FlowGraphCompiler* compile) {
+void CurrentContextComp::EmitNativeCode(FlowGraphCompiler* compiler) {
UNIMPLEMENTED();
}
@@ -63,7 +158,7 @@
}
-void StoreContextComp::EmitNativeCode(FlowGraphCompiler* compile) {
+void StoreContextComp::EmitNativeCode(FlowGraphCompiler* compiler) {
UNIMPLEMENTED();
}
@@ -73,7 +168,7 @@
}
-void StrictCompareComp::EmitNativeCode(FlowGraphCompiler* compile) {
+void StrictCompareComp::EmitNativeCode(FlowGraphCompiler* compiler) {
UNIMPLEMENTED();
}
@@ -83,58 +178,84 @@
}
-void ClosureCallComp::EmitNativeCode(FlowGraphCompiler* compile) {
+void ClosureCallComp::EmitNativeCode(FlowGraphCompiler* compiler) {
UNIMPLEMENTED();
}
LocationSummary* InstanceCallComp::MakeLocationSummary() const {
- return NULL;
+ return MakeCallSummary();
}
-void InstanceCallComp::EmitNativeCode(FlowGraphCompiler* compile) {
- UNIMPLEMENTED();
+void InstanceCallComp::EmitNativeCode(FlowGraphCompiler* compiler) {
+ ASSERT(VerifyCallComputation(this));
+ compiler->AddCurrentDescriptor(PcDescriptors::kDeopt,
+ cid(),
+ token_index(),
+ try_index());
+ compiler->EmitInstanceCall(cid(),
+ token_index(),
+ try_index(),
+ function_name(),
+ ArgumentCount(),
+ argument_names(),
+ checked_argument_count());
}
LocationSummary* StaticCallComp::MakeLocationSummary() const {
- return NULL;
+ return MakeCallSummary();
}
-void StaticCallComp::EmitNativeCode(FlowGraphCompiler* compile) {
- UNIMPLEMENTED();
+void StaticCallComp::EmitNativeCode(FlowGraphCompiler* compiler) {
+ ASSERT(VerifyCallComputation(this));
+ compiler->EmitStaticCall(token_index(),
+ try_index(),
+ function(),
+ ArgumentCount(),
+ argument_names());
}
LocationSummary* LoadLocalComp::MakeLocationSummary() const {
- return NULL;
+ return LocationSummary::Make(0, Location::RequiresRegister());
}
-void LoadLocalComp::EmitNativeCode(FlowGraphCompiler* compile) {
- UNIMPLEMENTED();
+void LoadLocalComp::EmitNativeCode(FlowGraphCompiler* compiler) {
+ Register result = locs()->out().reg();
+ __ movl(result, Address(EBP, local().index() * kWordSize));
}
LocationSummary* StoreLocalComp::MakeLocationSummary() const {
- return NULL;
+ return LocationSummary::Make(1, Location::SameAsFirstInput());
}
-void StoreLocalComp::EmitNativeCode(FlowGraphCompiler* compile) {
- UNIMPLEMENTED();
+void StoreLocalComp::EmitNativeCode(FlowGraphCompiler* compiler) {
+ Register value = locs()->in(0).reg();
+ Register result = locs()->out().reg();
+ ASSERT(result == value); // Assert that register assignment is correct.
+ __ movl(Address(EBP, local().index() * kWordSize), value);
}
LocationSummary* ConstantVal::MakeLocationSummary() const {
- return NULL;
+ return LocationSummary::Make(0, Location::RequiresRegister());
}
-void ConstantVal::EmitNativeCode(FlowGraphCompiler* compile) {
- UNIMPLEMENTED();
+void ConstantVal::EmitNativeCode(FlowGraphCompiler* compiler) {
+ Register result = locs()->out().reg();
+ if (value().IsSmi()) {
+ int32_t imm = reinterpret_cast<int32_t>(value().raw());
+ __ movl(result, Immediate(imm));
+ } else {
+ __ LoadObject(result, value());
+ }
}
@@ -143,7 +264,7 @@
}
-void UseVal::EmitNativeCode(FlowGraphCompiler* compile) {
+void UseVal::EmitNativeCode(FlowGraphCompiler* compiler) {
UNIMPLEMENTED();
}
@@ -153,7 +274,7 @@
}
-void AssertAssignableComp::EmitNativeCode(FlowGraphCompiler* compile) {
+void AssertAssignableComp::EmitNativeCode(FlowGraphCompiler* compiler) {
UNIMPLEMENTED();
}
@@ -163,7 +284,7 @@
}
-void AssertBooleanComp::EmitNativeCode(FlowGraphCompiler* compile) {
+void AssertBooleanComp::EmitNativeCode(FlowGraphCompiler* compiler) {
UNIMPLEMENTED();
}
@@ -173,18 +294,42 @@
}
-void EqualityCompareComp::EmitNativeCode(FlowGraphCompiler* compile) {
+void EqualityCompareComp::EmitNativeCode(FlowGraphCompiler* compiler) {
UNIMPLEMENTED();
}
LocationSummary* NativeCallComp::MakeLocationSummary() const {
- return NULL;
+ LocationSummary* locs = new LocationSummary(0, 3);
+ locs->set_temp(0, Location::RegisterLocation(EAX));
+ locs->set_temp(1, Location::RegisterLocation(ECX));
+ locs->set_temp(2, Location::RegisterLocation(EDX));
+ locs->set_out(Location::RequiresRegister());
+ return locs;
}
-void NativeCallComp::EmitNativeCode(FlowGraphCompiler* compile) {
- UNIMPLEMENTED();
+void NativeCallComp::EmitNativeCode(FlowGraphCompiler* compiler) {
+ ASSERT(locs()->temp(0).reg() == EAX);
+ ASSERT(locs()->temp(1).reg() == ECX);
+ ASSERT(locs()->temp(2).reg() == EDX);
+ Register result = locs()->out().reg();
+ // Push the result place holder initialized to NULL.
+ __ PushObject(Object::ZoneHandle());
+ // Pass a pointer to the first argument in EAX.
+ if (!has_optional_parameters()) {
+ __ leal(EAX, Address(EBP, (1 + argument_count()) * kWordSize));
+ } else {
+ __ leal(EAX,
+ Address(EBP, ParsedFunction::kFirstLocalSlotIndex * kWordSize));
+ }
+ __ movl(ECX, Immediate(reinterpret_cast<uword>(native_c_function())));
+ __ movl(EDX, Immediate(argument_count()));
+ compiler->GenerateCall(token_index(),
+ try_index(),
+ &StubCode::CallNativeCFunctionLabel(),
+ PcDescriptors::kOther);
+ __ popl(result);
}
@@ -193,7 +338,7 @@
}
-void StoreIndexedComp::EmitNativeCode(FlowGraphCompiler* compile) {
+void StoreIndexedComp::EmitNativeCode(FlowGraphCompiler* compiler) {
UNIMPLEMENTED();
}
@@ -203,7 +348,7 @@
}
-void InstanceSetterComp::EmitNativeCode(FlowGraphCompiler* compile) {
+void InstanceSetterComp::EmitNativeCode(FlowGraphCompiler* compiler) {
UNIMPLEMENTED();
}
@@ -213,7 +358,7 @@
}
-void StaticSetterComp::EmitNativeCode(FlowGraphCompiler* compile) {
+void StaticSetterComp::EmitNativeCode(FlowGraphCompiler* compiler) {
UNIMPLEMENTED();
}
@@ -223,7 +368,7 @@
}
-void LoadInstanceFieldComp::EmitNativeCode(FlowGraphCompiler* compile) {
+void LoadInstanceFieldComp::EmitNativeCode(FlowGraphCompiler* compiler) {
UNIMPLEMENTED();
}
@@ -233,7 +378,7 @@
}
-void StoreInstanceFieldComp::EmitNativeCode(FlowGraphCompiler* compile) {
+void StoreInstanceFieldComp::EmitNativeCode(FlowGraphCompiler* compiler) {
UNIMPLEMENTED();
}
@@ -243,7 +388,7 @@
}
-void LoadStaticFieldComp::EmitNativeCode(FlowGraphCompiler* compile) {
+void LoadStaticFieldComp::EmitNativeCode(FlowGraphCompiler* compiler) {
UNIMPLEMENTED();
}
@@ -253,7 +398,7 @@
}
-void StoreStaticFieldComp::EmitNativeCode(FlowGraphCompiler* compile) {
+void StoreStaticFieldComp::EmitNativeCode(FlowGraphCompiler* compiler) {
UNIMPLEMENTED();
}
@@ -263,7 +408,7 @@
}
-void BooleanNegateComp::EmitNativeCode(FlowGraphCompiler* compile) {
+void BooleanNegateComp::EmitNativeCode(FlowGraphCompiler* compiler) {
UNIMPLEMENTED();
}
@@ -273,7 +418,7 @@
}
-void InstanceOfComp::EmitNativeCode(FlowGraphCompiler* compile) {
+void InstanceOfComp::EmitNativeCode(FlowGraphCompiler* compiler) {
UNIMPLEMENTED();
}
@@ -283,7 +428,7 @@
}
-void CreateArrayComp::EmitNativeCode(FlowGraphCompiler* compile) {
+void CreateArrayComp::EmitNativeCode(FlowGraphCompiler* compiler) {
UNIMPLEMENTED();
}
@@ -293,7 +438,7 @@
}
-void CreateClosureComp::EmitNativeCode(FlowGraphCompiler* compile) {
+void CreateClosureComp::EmitNativeCode(FlowGraphCompiler* compiler) {
UNIMPLEMENTED();
}
@@ -303,7 +448,7 @@
}
-void AllocateObjectComp::EmitNativeCode(FlowGraphCompiler* compile) {
+void AllocateObjectComp::EmitNativeCode(FlowGraphCompiler* compiler) {
UNIMPLEMENTED();
}
@@ -315,7 +460,7 @@
void AllocateObjectWithBoundsCheckComp::EmitNativeCode(
- FlowGraphCompiler* compile) {
+ FlowGraphCompiler* compiler) {
UNIMPLEMENTED();
}
@@ -325,7 +470,7 @@
}
-void LoadVMFieldComp::EmitNativeCode(FlowGraphCompiler* compile) {
+void LoadVMFieldComp::EmitNativeCode(FlowGraphCompiler* compiler) {
UNIMPLEMENTED();
}
@@ -335,7 +480,7 @@
}
-void StoreVMFieldComp::EmitNativeCode(FlowGraphCompiler* compile) {
+void StoreVMFieldComp::EmitNativeCode(FlowGraphCompiler* compiler) {
UNIMPLEMENTED();
}
@@ -346,7 +491,7 @@
void InstantiateTypeArgumentsComp::EmitNativeCode(
- FlowGraphCompiler* compile) {
+ FlowGraphCompiler* compiler) {
UNIMPLEMENTED();
}
@@ -358,7 +503,7 @@
void ExtractConstructorTypeArgumentsComp::EmitNativeCode(
- FlowGraphCompiler* compile) {
+ FlowGraphCompiler* compiler) {
UNIMPLEMENTED();
}
@@ -370,7 +515,7 @@
void ExtractConstructorInstantiatorComp::EmitNativeCode(
- FlowGraphCompiler* compile) {
+ FlowGraphCompiler* compiler) {
UNIMPLEMENTED();
}
@@ -380,7 +525,7 @@
}
-void AllocateContextComp::EmitNativeCode(FlowGraphCompiler* compile) {
+void AllocateContextComp::EmitNativeCode(FlowGraphCompiler* compiler) {
UNIMPLEMENTED();
}
@@ -390,7 +535,7 @@
}
-void ChainContextComp::EmitNativeCode(FlowGraphCompiler* compile) {
+void ChainContextComp::EmitNativeCode(FlowGraphCompiler* compiler) {
UNIMPLEMENTED();
}
@@ -400,7 +545,7 @@
}
-void CloneContextComp::EmitNativeCode(FlowGraphCompiler* compile) {
+void CloneContextComp::EmitNativeCode(FlowGraphCompiler* compiler) {
UNIMPLEMENTED();
}
@@ -410,18 +555,34 @@
}
-void CatchEntryComp::EmitNativeCode(FlowGraphCompiler* compile) {
+void CatchEntryComp::EmitNativeCode(FlowGraphCompiler* compiler) {
UNIMPLEMENTED();
}
LocationSummary* BinaryOpComp::MakeLocationSummary() const {
- return NULL;
+ const intptr_t kNumInputs = 2;
+ const intptr_t kNumTemps = 0;
+ LocationSummary* summary = new LocationSummary(kNumInputs, kNumTemps);
+ summary->set_in(0, Location::RequiresRegister());
+ summary->set_in(1, Location::RequiresRegister());
+ summary->set_out(Location::SameAsFirstInput());
+ return summary;
}
-void BinaryOpComp::EmitNativeCode(FlowGraphCompiler* compile) {
- UNIMPLEMENTED();
+void BinaryOpComp::EmitNativeCode(FlowGraphCompiler* compiler) {
+ // TODO(srdjan): Remove this code once BinaryOpComp has been implemeneted
+ // for all intended operations.
+ Register left = locs()->in(0).reg();
+ Register right = locs()->in(1).reg();
+ __ pushl(left);
+ __ pushl(right);
+ InstanceCallComp* instance_call_comp = instance_call();
+ instance_call_comp->EmitNativeCode(compiler);
+ if (locs()->out().reg() != EAX) {
+ __ movl(locs()->out().reg(), EAX);
+ }
}
« no previous file with comments | « runtime/vm/intermediate_language.h ('k') | runtime/vm/intermediate_language_x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698