| Index: runtime/vm/intermediate_language.cc
|
| diff --git a/runtime/vm/intermediate_language.cc b/runtime/vm/intermediate_language.cc
|
| index a4b15daba7f230441c606145fd88fdd7874a5b57..61b27886d5ff9fc9bcdda33b58c1b52b170ad1ee 100644
|
| --- a/runtime/vm/intermediate_language.cc
|
| +++ b/runtime/vm/intermediate_language.cc
|
| @@ -22,8 +22,8 @@ namespace dart {
|
| DECLARE_FLAG(bool, enable_type_checks);
|
|
|
|
|
| -intptr_t Computation::Hashcode() const {
|
| - intptr_t result = computation_kind();
|
| +intptr_t Definition::Hashcode() const {
|
| + intptr_t result = tag();
|
| for (intptr_t i = 0; i < InputCount(); ++i) {
|
| Value* value = InputAt(i);
|
| intptr_t j = value->definition()->ssa_temp_index();
|
| @@ -33,8 +33,8 @@ intptr_t Computation::Hashcode() const {
|
| }
|
|
|
|
|
| -bool Computation::Equals(Computation* other) const {
|
| - if (computation_kind() != other->computation_kind()) return false;
|
| +bool Definition::Equals(Definition* other) const {
|
| + if (tag() != other->tag()) return false;
|
| for (intptr_t i = 0; i < InputCount(); ++i) {
|
| if (!InputAt(i)->Equals(other->InputAt(i))) return false;
|
| }
|
| @@ -47,9 +47,9 @@ bool Value::Equals(Value* other) const {
|
| }
|
|
|
|
|
| -bool CheckClassComp::AttributesEqual(Computation* other) const {
|
| - CheckClassComp* other_check = other->AsCheckClass();
|
| - if (other_check == NULL) return false;
|
| +bool CheckClassInstr::AttributesEqual(Definition* other) const {
|
| + CheckClassInstr* other_check = other->AsCheckClass();
|
| + ASSERT(other_check != NULL);
|
| if (unary_checks().NumberOfChecks() !=
|
| other_check->unary_checks().NumberOfChecks()) {
|
| return false;
|
| @@ -65,45 +65,38 @@ bool CheckClassComp::AttributesEqual(Computation* other) const {
|
| }
|
|
|
|
|
| -bool CheckArrayBoundComp::AttributesEqual(Computation* other) const {
|
| - CheckArrayBoundComp* other_check = other->AsCheckArrayBound();
|
| - if (other_check == NULL) return false;
|
| +bool CheckArrayBoundInstr::AttributesEqual(Definition* other) const {
|
| + CheckArrayBoundInstr* other_check = other->AsCheckArrayBound();
|
| + ASSERT(other_check != NULL);
|
| return array_type() == other_check->array_type();
|
| }
|
|
|
|
|
| // Returns true if the value represents a constant.
|
| bool Value::BindsToConstant() const {
|
| - BindInstr* bind = definition()->AsBind();
|
| - return (bind != NULL) && (bind->computation()->AsConstant() != NULL);
|
| + return definition()->IsConstant();
|
| }
|
|
|
|
|
| // Returns true if the value represents constant null.
|
| bool Value::BindsToConstantNull() const {
|
| - BindInstr* bind = definition()->AsBind();
|
| - if (bind == NULL) {
|
| - return false;
|
| - }
|
| - ConstantComp* constant = bind->computation()->AsConstant();
|
| + ConstantInstr* constant = definition()->AsConstant();
|
| return (constant != NULL) && constant->value().IsNull();
|
| }
|
|
|
|
|
| const Object& Value::BoundConstant() const {
|
| ASSERT(BindsToConstant());
|
| - BindInstr* bind = definition()->AsBind();
|
| - ASSERT(bind != NULL);
|
| - ConstantComp* constant = bind->computation()->AsConstant();
|
| + ConstantInstr* constant = definition()->AsConstant();
|
| ASSERT(constant != NULL);
|
| return constant->value();
|
| }
|
|
|
|
|
| -bool ConstantComp::AttributesEqual(Computation* other) const {
|
| - ConstantComp* other_constant = other->AsConstant();
|
| - return (other_constant != NULL) &&
|
| - (value().raw() == other_constant->value().raw());
|
| +bool ConstantInstr::AttributesEqual(Definition* other) const {
|
| + ConstantInstr* other_constant = other->AsConstant();
|
| + ASSERT(other_constant != NULL);
|
| + return (value().raw() == other_constant->value().raw());
|
| }
|
|
|
|
|
| @@ -112,8 +105,7 @@ GraphEntryInstr::GraphEntryInstr(TargetEntryInstr* normal_entry)
|
| normal_entry_(normal_entry),
|
| catch_entries_(),
|
| start_env_(NULL),
|
| - constant_null_(new BindInstr(Definition::kValue,
|
| - new ConstantComp(Object::ZoneHandle()))),
|
| + constant_null_(new ConstantInstr(Object::ZoneHandle())),
|
| spill_slot_count_(0) {
|
| }
|
|
|
| @@ -157,16 +149,6 @@ RECOGNIZED_LIST(KIND_TO_STRING)
|
|
|
|
|
| // ==== Support for visiting flow graphs.
|
| -#define DEFINE_ACCEPT(ShortName, ClassName) \
|
| -void ClassName::Accept(FlowGraphVisitor* visitor, BindInstr* instr) { \
|
| - visitor->Visit##ShortName(this, instr); \
|
| -}
|
| -
|
| -FOR_EACH_COMPUTATION(DEFINE_ACCEPT)
|
| -
|
| -#undef DEFINE_ACCEPT
|
| -
|
| -
|
| #define DEFINE_ACCEPT(ShortName) \
|
| void ShortName##Instr::Accept(FlowGraphVisitor* visitor) { \
|
| visitor->Visit##ShortName(this); \
|
| @@ -219,14 +201,23 @@ void Definition::InsertAfter(Instruction* prev) {
|
| }
|
|
|
|
|
| -ConstantComp* Definition::AsConstant() {
|
| - BindInstr* bind = AsBind();
|
| - return (bind != NULL) ? bind->computation()->AsConstant() : NULL;
|
| +void ForwardInstructionIterator::RemoveCurrentFromGraph() {
|
| + current_ = current_->RemoveFromGraph(true); // Set current_ to previous.
|
| }
|
|
|
|
|
| -void ForwardInstructionIterator::RemoveCurrentFromGraph() {
|
| - current_ = current_->RemoveFromGraph(true); // Set current_ to previous.
|
| +void ForwardInstructionIterator::ReplaceCurrentWith(Definition* other) {
|
| + Definition* defn = current_->AsDefinition();
|
| + ASSERT(defn != NULL);
|
| + defn->ReplaceUsesWith(other);
|
| + ASSERT(other->env() == NULL);
|
| + other->set_env(defn->env());
|
| + defn->set_env(NULL);
|
| + ASSERT(!other->HasSSATemp());
|
| + if (defn->HasSSATemp()) other->set_ssa_temp_index(defn->ssa_temp_index());
|
| +
|
| + other->InsertBefore(current_); // So other will be current.
|
| + RemoveCurrentFromGraph();
|
| }
|
|
|
|
|
| @@ -378,14 +369,14 @@ intptr_t JoinEntryInstr::IndexOfPredecessor(BlockEntryInstr* pred) const {
|
|
|
|
|
| // ==== Recording assigned variables.
|
| -void Computation::RecordAssignedVars(BitVector* assigned_vars,
|
| - intptr_t fixed_parameter_count) {
|
| +void Definition::RecordAssignedVars(BitVector* assigned_vars,
|
| + intptr_t fixed_parameter_count) {
|
| // Nothing to do for the base class.
|
| }
|
|
|
|
|
| -void StoreLocalComp::RecordAssignedVars(BitVector* assigned_vars,
|
| - intptr_t fixed_parameter_count) {
|
| +void StoreLocalInstr::RecordAssignedVars(BitVector* assigned_vars,
|
| + intptr_t fixed_parameter_count) {
|
| if (!local().is_captured()) {
|
| assigned_vars->Add(local().BitIndexIn(fixed_parameter_count));
|
| }
|
| @@ -428,6 +419,29 @@ void Definition::ReplaceUsesWith(Definition* other) {
|
| }
|
|
|
|
|
| +void Definition::ReplaceWith(Definition* other,
|
| + ForwardInstructionIterator* iterator) {
|
| + if ((iterator != NULL) && (other == iterator->Current())) {
|
| + iterator->ReplaceCurrentWith(other);
|
| + } else {
|
| + ReplaceUsesWith(other);
|
| + ASSERT(other->env() == NULL);
|
| + other->set_env(env());
|
| + set_env(NULL);
|
| + ASSERT(!other->HasSSATemp());
|
| + if (HasSSATemp()) other->set_ssa_temp_index(ssa_temp_index());
|
| +
|
| + other->set_previous(previous());
|
| + previous()->set_next(other);
|
| + set_previous(NULL);
|
| +
|
| + other->set_next(next());
|
| + next()->set_previous(other);
|
| + set_next(NULL);
|
| + }
|
| +}
|
| +
|
| +
|
| bool Definition::SetPropagatedCid(intptr_t cid) {
|
| if (cid == kIllegalCid) {
|
| return false;
|
| @@ -442,25 +456,23 @@ bool Definition::SetPropagatedCid(intptr_t cid) {
|
| return has_changed;
|
| }
|
|
|
| -RawAbstractType* BindInstr::CompileType() const {
|
| - ASSERT(!HasPropagatedType());
|
| - // The compile type may be requested when building the flow graph, i.e. before
|
| - // type propagation has occurred.
|
| - return computation()->CompileType();
|
| -}
|
| -
|
|
|
| -intptr_t BindInstr::GetPropagatedCid() {
|
| +intptr_t Definition::GetPropagatedCid() {
|
| if (has_propagated_cid()) return propagated_cid();
|
| - intptr_t cid = computation()->ResultCid();
|
| + intptr_t cid = ResultCid();
|
| ASSERT(cid != kIllegalCid);
|
| SetPropagatedCid(cid);
|
| return cid;
|
| }
|
|
|
| -void BindInstr::RecordAssignedVars(BitVector* assigned_vars,
|
| - intptr_t fixed_parameter_count) {
|
| - computation()->RecordAssignedVars(assigned_vars, fixed_parameter_count);
|
| +
|
| +intptr_t PhiInstr::GetPropagatedCid() {
|
| + return propagated_cid();
|
| +}
|
| +
|
| +
|
| +intptr_t ParameterInstr::GetPropagatedCid() {
|
| + return propagated_cid();
|
| }
|
|
|
|
|
| @@ -703,7 +715,7 @@ intptr_t Value::ResultCid() const {
|
|
|
|
|
|
|
| -RawAbstractType* ConstantComp::CompileType() const {
|
| +RawAbstractType* ConstantInstr::CompileType() const {
|
| if (value().IsNull()) {
|
| return Type::NullType();
|
| }
|
| @@ -716,7 +728,7 @@ RawAbstractType* ConstantComp::CompileType() const {
|
| }
|
|
|
|
|
| -intptr_t ConstantComp::ResultCid() const {
|
| +intptr_t ConstantInstr::ResultCid() const {
|
| if (value().IsNull()) {
|
| return kNullCid;
|
| }
|
| @@ -729,7 +741,7 @@ intptr_t ConstantComp::ResultCid() const {
|
| }
|
|
|
|
|
| -RawAbstractType* AssertAssignableComp::CompileType() const {
|
| +RawAbstractType* AssertAssignableInstr::CompileType() const {
|
| const AbstractType& value_compile_type =
|
| AbstractType::Handle(value()->CompileType());
|
| if (!value_compile_type.IsNull() &&
|
| @@ -740,27 +752,27 @@ RawAbstractType* AssertAssignableComp::CompileType() const {
|
| }
|
|
|
|
|
| -RawAbstractType* AssertBooleanComp::CompileType() const {
|
| +RawAbstractType* AssertBooleanInstr::CompileType() const {
|
| return Type::BoolType();
|
| }
|
|
|
|
|
| -RawAbstractType* ArgumentDefinitionTestComp::CompileType() const {
|
| +RawAbstractType* ArgumentDefinitionTestInstr::CompileType() const {
|
| return Type::BoolType();
|
| }
|
|
|
|
|
| -RawAbstractType* CurrentContextComp::CompileType() const {
|
| +RawAbstractType* CurrentContextInstr::CompileType() const {
|
| return AbstractType::null();
|
| }
|
|
|
|
|
| -RawAbstractType* StoreContextComp::CompileType() const {
|
| +RawAbstractType* StoreContextInstr::CompileType() const {
|
| return AbstractType::null();
|
| }
|
|
|
|
|
| -RawAbstractType* ClosureCallComp::CompileType() const {
|
| +RawAbstractType* ClosureCallInstr::CompileType() const {
|
| // Because of function subtyping rules, the declared return type of a closure
|
| // call cannot be relied upon for compile type analysis. For example, a
|
| // function returning Dynamic can be assigned to a closure variable declared
|
| @@ -769,19 +781,19 @@ RawAbstractType* ClosureCallComp::CompileType() const {
|
| }
|
|
|
|
|
| -RawAbstractType* InstanceCallComp::CompileType() const {
|
| +RawAbstractType* InstanceCallInstr::CompileType() const {
|
| // TODO(regis): Return a more specific type than Dynamic for recognized
|
| // combinations of receiver type and method name.
|
| return Type::DynamicType();
|
| }
|
|
|
|
|
| -RawAbstractType* PolymorphicInstanceCallComp::CompileType() const {
|
| +RawAbstractType* PolymorphicInstanceCallInstr::CompileType() const {
|
| return Type::DynamicType();
|
| }
|
|
|
|
|
| -RawAbstractType* StaticCallComp::CompileType() const {
|
| +RawAbstractType* StaticCallInstr::CompileType() const {
|
| if (FLAG_enable_type_checks) {
|
| return function().result_type();
|
| }
|
| @@ -789,7 +801,7 @@ RawAbstractType* StaticCallComp::CompileType() const {
|
| }
|
|
|
|
|
| -RawAbstractType* LoadLocalComp::CompileType() const {
|
| +RawAbstractType* LoadLocalInstr::CompileType() const {
|
| if (FLAG_enable_type_checks) {
|
| return local().type().raw();
|
| }
|
| @@ -797,18 +809,18 @@ RawAbstractType* LoadLocalComp::CompileType() const {
|
| }
|
|
|
|
|
| -RawAbstractType* StoreLocalComp::CompileType() const {
|
| +RawAbstractType* StoreLocalInstr::CompileType() const {
|
| return value()->CompileType();
|
| }
|
|
|
|
|
| -RawAbstractType* StrictCompareComp::CompileType() const {
|
| +RawAbstractType* StrictCompareInstr::CompileType() const {
|
| return Type::BoolType();
|
| }
|
|
|
|
|
| // Only known == targets return a Boolean.
|
| -RawAbstractType* EqualityCompareComp::CompileType() const {
|
| +RawAbstractType* EqualityCompareInstr::CompileType() const {
|
| if ((receiver_class_id() == kSmiCid) ||
|
| (receiver_class_id() == kDoubleCid) ||
|
| (receiver_class_id() == kNumberCid)) {
|
| @@ -818,7 +830,7 @@ RawAbstractType* EqualityCompareComp::CompileType() const {
|
| }
|
|
|
|
|
| -intptr_t EqualityCompareComp::ResultCid() const {
|
| +intptr_t EqualityCompareInstr::ResultCid() const {
|
| if ((receiver_class_id() == kSmiCid) ||
|
| (receiver_class_id() == kDoubleCid) ||
|
| (receiver_class_id() == kNumberCid)) {
|
| @@ -829,7 +841,7 @@ intptr_t EqualityCompareComp::ResultCid() const {
|
| }
|
|
|
|
|
| -RawAbstractType* RelationalOpComp::CompileType() const {
|
| +RawAbstractType* RelationalOpInstr::CompileType() const {
|
| if ((operands_class_id() == kSmiCid) ||
|
| (operands_class_id() == kDoubleCid) ||
|
| (operands_class_id() == kNumberCid)) {
|
| @@ -840,7 +852,7 @@ RawAbstractType* RelationalOpComp::CompileType() const {
|
| }
|
|
|
|
|
| -intptr_t RelationalOpComp::ResultCid() const {
|
| +intptr_t RelationalOpInstr::ResultCid() const {
|
| if ((operands_class_id() == kSmiCid) ||
|
| (operands_class_id() == kDoubleCid) ||
|
| (operands_class_id() == kNumberCid)) {
|
| @@ -851,7 +863,7 @@ intptr_t RelationalOpComp::ResultCid() const {
|
| }
|
|
|
|
|
| -RawAbstractType* NativeCallComp::CompileType() const {
|
| +RawAbstractType* NativeCallInstr::CompileType() const {
|
| // The result type of the native function is identical to the result type of
|
| // the enclosing native Dart function. However, we prefer to check the type
|
| // of the value returned from the native call.
|
| @@ -859,17 +871,17 @@ RawAbstractType* NativeCallComp::CompileType() const {
|
| }
|
|
|
|
|
| -RawAbstractType* LoadIndexedComp::CompileType() const {
|
| +RawAbstractType* LoadIndexedInstr::CompileType() const {
|
| return Type::DynamicType();
|
| }
|
|
|
|
|
| -RawAbstractType* StoreIndexedComp::CompileType() const {
|
| +RawAbstractType* StoreIndexedInstr::CompileType() const {
|
| return AbstractType::null();
|
| }
|
|
|
|
|
| -RawAbstractType* LoadInstanceFieldComp::CompileType() const {
|
| +RawAbstractType* LoadInstanceFieldInstr::CompileType() const {
|
| if (FLAG_enable_type_checks) {
|
| return field().type();
|
| }
|
| @@ -877,12 +889,12 @@ RawAbstractType* LoadInstanceFieldComp::CompileType() const {
|
| }
|
|
|
|
|
| -RawAbstractType* StoreInstanceFieldComp::CompileType() const {
|
| +RawAbstractType* StoreInstanceFieldInstr::CompileType() const {
|
| return value()->CompileType();
|
| }
|
|
|
|
|
| -RawAbstractType* LoadStaticFieldComp::CompileType() const {
|
| +RawAbstractType* LoadStaticFieldInstr::CompileType() const {
|
| if (FLAG_enable_type_checks) {
|
| return field().type();
|
| }
|
| @@ -890,46 +902,46 @@ RawAbstractType* LoadStaticFieldComp::CompileType() const {
|
| }
|
|
|
|
|
| -RawAbstractType* StoreStaticFieldComp::CompileType() const {
|
| +RawAbstractType* StoreStaticFieldInstr::CompileType() const {
|
| return value()->CompileType();
|
| }
|
|
|
|
|
| -RawAbstractType* BooleanNegateComp::CompileType() const {
|
| +RawAbstractType* BooleanNegateInstr::CompileType() const {
|
| return Type::BoolType();
|
| }
|
|
|
|
|
| -RawAbstractType* InstanceOfComp::CompileType() const {
|
| +RawAbstractType* InstanceOfInstr::CompileType() const {
|
| return Type::BoolType();
|
| }
|
|
|
|
|
| -RawAbstractType* CreateArrayComp::CompileType() const {
|
| +RawAbstractType* CreateArrayInstr::CompileType() const {
|
| return type().raw();
|
| }
|
|
|
|
|
| -RawAbstractType* CreateClosureComp::CompileType() const {
|
| +RawAbstractType* CreateClosureInstr::CompileType() const {
|
| const Function& fun = function();
|
| const Class& signature_class = Class::Handle(fun.signature_class());
|
| return signature_class.SignatureType();
|
| }
|
|
|
|
|
| -RawAbstractType* AllocateObjectComp::CompileType() const {
|
| +RawAbstractType* AllocateObjectInstr::CompileType() const {
|
| // TODO(regis): Be more specific.
|
| return Type::DynamicType();
|
| }
|
|
|
|
|
| -RawAbstractType* AllocateObjectWithBoundsCheckComp::CompileType() const {
|
| +RawAbstractType* AllocateObjectWithBoundsCheckInstr::CompileType() const {
|
| // TODO(regis): Be more specific.
|
| return Type::DynamicType();
|
| }
|
|
|
|
|
| -RawAbstractType* LoadVMFieldComp::CompileType() const {
|
| +RawAbstractType* LoadVMFieldInstr::CompileType() const {
|
| // Type may be null if the field is a VM field, e.g. context parent.
|
| // Keep it as null for debug purposes and do not return Dynamic in production
|
| // mode, since misuse of the type would remain undetected.
|
| @@ -943,62 +955,62 @@ RawAbstractType* LoadVMFieldComp::CompileType() const {
|
| }
|
|
|
|
|
| -RawAbstractType* StoreVMFieldComp::CompileType() const {
|
| +RawAbstractType* StoreVMFieldInstr::CompileType() const {
|
| return value()->CompileType();
|
| }
|
|
|
|
|
| -RawAbstractType* InstantiateTypeArgumentsComp::CompileType() const {
|
| +RawAbstractType* InstantiateTypeArgumentsInstr::CompileType() const {
|
| return AbstractType::null();
|
| }
|
|
|
|
|
| -RawAbstractType* ExtractConstructorTypeArgumentsComp::CompileType() const {
|
| +RawAbstractType* ExtractConstructorTypeArgumentsInstr::CompileType() const {
|
| return AbstractType::null();
|
| }
|
|
|
|
|
| -RawAbstractType* ExtractConstructorInstantiatorComp::CompileType() const {
|
| +RawAbstractType* ExtractConstructorInstantiatorInstr::CompileType() const {
|
| return AbstractType::null();
|
| }
|
|
|
|
|
| -RawAbstractType* AllocateContextComp::CompileType() const {
|
| +RawAbstractType* AllocateContextInstr::CompileType() const {
|
| return AbstractType::null();
|
| }
|
|
|
|
|
| -RawAbstractType* ChainContextComp::CompileType() const {
|
| +RawAbstractType* ChainContextInstr::CompileType() const {
|
| return AbstractType::null();
|
| }
|
|
|
|
|
| -RawAbstractType* CloneContextComp::CompileType() const {
|
| +RawAbstractType* CloneContextInstr::CompileType() const {
|
| return AbstractType::null();
|
| }
|
|
|
|
|
| -RawAbstractType* CatchEntryComp::CompileType() const {
|
| +RawAbstractType* CatchEntryInstr::CompileType() const {
|
| return AbstractType::null();
|
| }
|
|
|
|
|
| -RawAbstractType* CheckStackOverflowComp::CompileType() const {
|
| +RawAbstractType* CheckStackOverflowInstr::CompileType() const {
|
| return AbstractType::null();
|
| }
|
|
|
|
|
| -RawAbstractType* BinarySmiOpComp::CompileType() const {
|
| +RawAbstractType* BinarySmiOpInstr::CompileType() const {
|
| return (op_kind() == Token::kSHL) ? Type::IntInterface() : Type::SmiType();
|
| }
|
|
|
|
|
| -intptr_t BinarySmiOpComp::ResultCid() const {
|
| +intptr_t BinarySmiOpInstr::ResultCid() const {
|
| return (op_kind() == Token::kSHL) ? kDynamicCid : kSmiCid;
|
| }
|
|
|
|
|
| -bool BinarySmiOpComp::CanDeoptimize() const {
|
| +bool BinarySmiOpInstr::CanDeoptimize() const {
|
| switch (op_kind()) {
|
| case Token::kBIT_AND:
|
| case Token::kBIT_OR:
|
| @@ -1010,85 +1022,85 @@ bool BinarySmiOpComp::CanDeoptimize() const {
|
| }
|
|
|
|
|
| -RawAbstractType* BinaryMintOpComp::CompileType() const {
|
| +RawAbstractType* BinaryMintOpInstr::CompileType() const {
|
| return Type::MintType();
|
| }
|
|
|
|
|
| -intptr_t BinaryMintOpComp::ResultCid() const {
|
| +intptr_t BinaryMintOpInstr::ResultCid() const {
|
| return kMintCid;
|
| }
|
|
|
|
|
| -RawAbstractType* UnboxedDoubleBinaryOpComp::CompileType() const {
|
| +RawAbstractType* UnboxedDoubleBinaryOpInstr::CompileType() const {
|
| return Type::Double();
|
| }
|
|
|
|
|
| -RawAbstractType* UnboxDoubleComp::CompileType() const {
|
| +RawAbstractType* UnboxDoubleInstr::CompileType() const {
|
| return Type::null();
|
| }
|
|
|
|
|
| -intptr_t BoxDoubleComp::ResultCid() const {
|
| +intptr_t BoxDoubleInstr::ResultCid() const {
|
| return kDoubleCid;
|
| }
|
|
|
|
|
| -RawAbstractType* BoxDoubleComp::CompileType() const {
|
| +RawAbstractType* BoxDoubleInstr::CompileType() const {
|
| return Type::Double();
|
| }
|
|
|
|
|
| -RawAbstractType* UnarySmiOpComp::CompileType() const {
|
| +RawAbstractType* UnarySmiOpInstr::CompileType() const {
|
| return Type::SmiType();
|
| }
|
|
|
|
|
| -RawAbstractType* NumberNegateComp::CompileType() const {
|
| +RawAbstractType* NumberNegateInstr::CompileType() const {
|
| // Implemented only for doubles.
|
| return Type::Double();
|
| }
|
|
|
|
|
| -RawAbstractType* DoubleToDoubleComp::CompileType() const {
|
| +RawAbstractType* DoubleToDoubleInstr::CompileType() const {
|
| return Type::Double();
|
| }
|
|
|
|
|
| -RawAbstractType* SmiToDoubleComp::CompileType() const {
|
| +RawAbstractType* SmiToDoubleInstr::CompileType() const {
|
| return Type::Double();
|
| }
|
|
|
|
|
| -RawAbstractType* CheckClassComp::CompileType() const {
|
| +RawAbstractType* CheckClassInstr::CompileType() const {
|
| return AbstractType::null();
|
| }
|
|
|
|
|
| -RawAbstractType* CheckSmiComp::CompileType() const {
|
| +RawAbstractType* CheckSmiInstr::CompileType() const {
|
| return AbstractType::null();
|
| }
|
|
|
|
|
| -RawAbstractType* CheckArrayBoundComp::CompileType() const {
|
| +RawAbstractType* CheckArrayBoundInstr::CompileType() const {
|
| return AbstractType::null();
|
| }
|
|
|
|
|
| -RawAbstractType* CheckEitherNonSmiComp::CompileType() const {
|
| +RawAbstractType* CheckEitherNonSmiInstr::CompileType() const {
|
| return AbstractType::null();
|
| }
|
|
|
|
|
| // Optimizations that eliminate or simplify individual computations.
|
| -Definition* Computation::TryReplace(BindInstr* instr) const {
|
| - return instr;
|
| +Definition* Definition::Canonicalize() {
|
| + return this;
|
| }
|
|
|
|
|
| -Definition* StrictCompareComp::TryReplace(BindInstr* instr) const {
|
| - if (!right()->BindsToConstant()) return instr;
|
| +Definition* StrictCompareInstr::Canonicalize() {
|
| + if (!right()->BindsToConstant()) return this;
|
| const Object& right_constant = right()->BoundConstant();
|
| Definition* left_defn = left()->definition();
|
| // TODO(fschneider): Handle other cases: e === false and e !== true/false.
|
| @@ -1097,18 +1109,16 @@ Definition* StrictCompareComp::TryReplace(BindInstr* instr) const {
|
| (right_constant.raw() == Bool::True()) &&
|
| (left()->ResultCid() == kBoolCid)) {
|
| // Remove the constant from the graph.
|
| - BindInstr* right_defn = right()->definition()->AsBind();
|
| - if (right_defn != NULL) {
|
| - right_defn->RemoveFromGraph();
|
| - }
|
| + Definition* right_defn = right()->definition();
|
| + right_defn->RemoveFromGraph();
|
| // Return left subexpression as the replacement for this instruction.
|
| return left_defn;
|
| }
|
| - return instr;
|
| + return this;
|
| }
|
|
|
|
|
| -Definition* CheckClassComp::TryReplace(BindInstr* instr) const {
|
| +Definition* CheckClassInstr::Canonicalize() {
|
| const intptr_t v_cid = value()->ResultCid();
|
| const intptr_t num_checks = unary_checks().NumberOfChecks();
|
| if ((num_checks == 1) &&
|
| @@ -1116,21 +1126,21 @@ Definition* CheckClassComp::TryReplace(BindInstr* instr) const {
|
| // No checks needed.
|
| return NULL;
|
| }
|
| - return instr;
|
| + return this;
|
| }
|
|
|
|
|
| -Definition* CheckSmiComp::TryReplace(BindInstr* instr) const {
|
| - return (value()->ResultCid() == kSmiCid) ? NULL : instr;
|
| +Definition* CheckSmiInstr::Canonicalize() {
|
| + return (value()->ResultCid() == kSmiCid) ? NULL : this;
|
| }
|
|
|
|
|
| -Definition* CheckEitherNonSmiComp::TryReplace(BindInstr* instr) const {
|
| +Definition* CheckEitherNonSmiInstr::Canonicalize() {
|
| if ((left()->ResultCid() == kDoubleCid) ||
|
| (right()->ResultCid() == kDoubleCid)) {
|
| return NULL; // Remove from the graph.
|
| }
|
| - return instr;
|
| + return this;
|
| }
|
|
|
|
|
| @@ -1166,6 +1176,71 @@ void TargetEntryInstr::PrepareEntry(FlowGraphCompiler* compiler) {
|
| }
|
|
|
|
|
| +LocationSummary* GraphEntryInstr::MakeLocationSummary() const {
|
| + UNREACHABLE();
|
| + return NULL;
|
| +}
|
| +
|
| +
|
| +void GraphEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| + UNREACHABLE();
|
| +}
|
| +
|
| +
|
| +LocationSummary* JoinEntryInstr::MakeLocationSummary() const {
|
| + UNREACHABLE();
|
| + return NULL;
|
| +}
|
| +
|
| +
|
| +void JoinEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| + UNREACHABLE();
|
| +}
|
| +
|
| +
|
| +LocationSummary* TargetEntryInstr::MakeLocationSummary() const {
|
| + UNREACHABLE();
|
| + return NULL;
|
| +}
|
| +
|
| +
|
| +void TargetEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| + UNREACHABLE();
|
| +}
|
| +
|
| +
|
| +LocationSummary* PhiInstr::MakeLocationSummary() const {
|
| + UNREACHABLE();
|
| + return NULL;
|
| +}
|
| +
|
| +
|
| +void PhiInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| + UNREACHABLE();
|
| +}
|
| +
|
| +
|
| +LocationSummary* ParameterInstr::MakeLocationSummary() const {
|
| + UNREACHABLE();
|
| + return NULL;
|
| +}
|
| +
|
| +
|
| +void ParameterInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| + UNREACHABLE();
|
| +}
|
| +
|
| +
|
| +LocationSummary* ParallelMoveInstr::MakeLocationSummary() const {
|
| + return NULL;
|
| +}
|
| +
|
| +
|
| +void ParallelMoveInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| + UNREACHABLE();
|
| +}
|
| +
|
| +
|
| LocationSummary* ThrowInstr::MakeLocationSummary() const {
|
| return new LocationSummary(0, 0, LocationSummary::kCall);
|
| }
|
| @@ -1246,19 +1321,19 @@ void ControlInstruction::EmitBranchOnCondition(FlowGraphCompiler* compiler,
|
| }
|
|
|
|
|
| -LocationSummary* CurrentContextComp::MakeLocationSummary() const {
|
| +LocationSummary* CurrentContextInstr::MakeLocationSummary() const {
|
| return LocationSummary::Make(0,
|
| Location::RequiresRegister(),
|
| LocationSummary::kNoCall);
|
| }
|
|
|
|
|
| -void CurrentContextComp::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| +void CurrentContextInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| __ MoveRegister(locs()->out().reg(), CTX);
|
| }
|
|
|
|
|
| -LocationSummary* StoreContextComp::MakeLocationSummary() const {
|
| +LocationSummary* StoreContextInstr::MakeLocationSummary() const {
|
| const intptr_t kNumInputs = 1;
|
| const intptr_t kNumTemps = 0;
|
| LocationSummary* summary =
|
| @@ -1268,20 +1343,20 @@ LocationSummary* StoreContextComp::MakeLocationSummary() const {
|
| }
|
|
|
|
|
| -void StoreContextComp::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| +void StoreContextInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| // Nothing to do. Context register were loaded by register allocator.
|
| ASSERT(locs()->in(0).reg() == CTX);
|
| }
|
|
|
|
|
| -LocationSummary* StrictCompareComp::MakeLocationSummary() const {
|
| +LocationSummary* StrictCompareInstr::MakeLocationSummary() const {
|
| return LocationSummary::Make(2,
|
| Location::SameAsFirstInput(),
|
| LocationSummary::kNoCall);
|
| }
|
|
|
|
|
| -void StrictCompareComp::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| +void StrictCompareInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| Register left = locs()->in(0).reg();
|
| Register right = locs()->in(1).reg();
|
|
|
| @@ -1300,7 +1375,7 @@ void StrictCompareComp::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| }
|
|
|
|
|
| -void StrictCompareComp::EmitBranchCode(FlowGraphCompiler* compiler,
|
| +void StrictCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler,
|
| BranchInstr* branch) {
|
| Register left = locs()->in(0).reg();
|
| Register right = locs()->in(1).reg();
|
| @@ -1311,7 +1386,7 @@ void StrictCompareComp::EmitBranchCode(FlowGraphCompiler* compiler,
|
| }
|
|
|
|
|
| -void ClosureCallComp::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| +void ClosureCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| // The arguments to the stub include the closure. The arguments
|
| // descriptor describes the closure's arguments (and so does not include
|
| // the closure).
|
| @@ -1330,12 +1405,12 @@ void ClosureCallComp::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| }
|
|
|
|
|
| -LocationSummary* InstanceCallComp::MakeLocationSummary() const {
|
| +LocationSummary* InstanceCallInstr::MakeLocationSummary() const {
|
| return MakeCallSummary();
|
| }
|
|
|
|
|
| -void InstanceCallComp::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| +void InstanceCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| compiler->AddCurrentDescriptor(PcDescriptors::kDeoptBefore,
|
| deopt_id(),
|
| token_pos());
|
| @@ -1349,12 +1424,12 @@ void InstanceCallComp::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| }
|
|
|
|
|
| -LocationSummary* StaticCallComp::MakeLocationSummary() const {
|
| +LocationSummary* StaticCallInstr::MakeLocationSummary() const {
|
| return MakeCallSummary();
|
| }
|
|
|
|
|
| -void StaticCallComp::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| +void StaticCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| Label done;
|
| if (recognized() == MethodRecognizer::kMathSqrt) {
|
| compiler->GenerateInlinedMathSqrt(&done);
|
| @@ -1370,7 +1445,7 @@ void StaticCallComp::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| }
|
|
|
|
|
| -void AssertAssignableComp::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| +void AssertAssignableInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| if (!is_eliminated()) {
|
| compiler->GenerateAssertAssignable(token_pos(),
|
| dst_type(),
|
| @@ -1381,14 +1456,14 @@ void AssertAssignableComp::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| }
|
|
|
|
|
| -LocationSummary* BooleanNegateComp::MakeLocationSummary() const {
|
| +LocationSummary* BooleanNegateInstr::MakeLocationSummary() const {
|
| return LocationSummary::Make(1,
|
| Location::RequiresRegister(),
|
| LocationSummary::kNoCall);
|
| }
|
|
|
|
|
| -void BooleanNegateComp::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| +void BooleanNegateInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| Register value = locs()->in(0).reg();
|
| Register result = locs()->out().reg();
|
|
|
| @@ -1401,14 +1476,14 @@ void BooleanNegateComp::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| }
|
|
|
|
|
| -LocationSummary* ChainContextComp::MakeLocationSummary() const {
|
| +LocationSummary* ChainContextInstr::MakeLocationSummary() const {
|
| return LocationSummary::Make(1,
|
| Location::NoLocation(),
|
| LocationSummary::kNoCall);
|
| }
|
|
|
|
|
| -void ChainContextComp::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| +void ChainContextInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| Register context_value = locs()->in(0).reg();
|
|
|
| // Chain the new context in context_value to its parent in CTX.
|
| @@ -1420,14 +1495,14 @@ void ChainContextComp::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| }
|
|
|
|
|
| -LocationSummary* StoreVMFieldComp::MakeLocationSummary() const {
|
| +LocationSummary* StoreVMFieldInstr::MakeLocationSummary() const {
|
| return LocationSummary::Make(2,
|
| Location::SameAsFirstInput(),
|
| LocationSummary::kNoCall);
|
| }
|
|
|
|
|
| -void StoreVMFieldComp::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| +void StoreVMFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| Register value_reg = locs()->in(0).reg();
|
| Register dest_reg = locs()->in(1).reg();
|
| ASSERT(value_reg == locs()->out().reg());
|
| @@ -1442,12 +1517,12 @@ void StoreVMFieldComp::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| }
|
|
|
|
|
| -LocationSummary* AllocateObjectComp::MakeLocationSummary() const {
|
| +LocationSummary* AllocateObjectInstr::MakeLocationSummary() const {
|
| return MakeCallSummary();
|
| }
|
|
|
|
|
| -void AllocateObjectComp::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| +void AllocateObjectInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| const Class& cls = Class::ZoneHandle(constructor().Owner());
|
| const Code& stub = Code::Handle(StubCode::GetAllocationStubForClass(cls));
|
| const ExternalLabel label(cls.ToCString(), stub.EntryPoint());
|
| @@ -1459,12 +1534,12 @@ void AllocateObjectComp::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| }
|
|
|
|
|
| -LocationSummary* CreateClosureComp::MakeLocationSummary() const {
|
| +LocationSummary* CreateClosureInstr::MakeLocationSummary() const {
|
| return MakeCallSummary();
|
| }
|
|
|
|
|
| -void CreateClosureComp::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| +void CreateClosureInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| const Function& closure_function = function();
|
| const Code& stub = Code::Handle(
|
| StubCode::GetAllocationStubForClosure(closure_function));
|
|
|