Index: runtime/vm/flow_graph_compiler.cc |
diff --git a/runtime/vm/flow_graph_compiler.cc b/runtime/vm/flow_graph_compiler.cc |
index 665191594e575f62d16268796dc49709577d789d..3a7b22f7383344cadf7be96c72c4d9f16fa5b5d0 100644 |
--- a/runtime/vm/flow_graph_compiler.cc |
+++ b/runtime/vm/flow_graph_compiler.cc |
@@ -1025,8 +1025,9 @@ void FlowGraphCompiler::FinalizeStaticCallTargetsTable(const Code& code) { |
// Returns 'true' if code generation for this function is complete, i.e., |
-// no fall-through to regular code is needed. |
-void FlowGraphCompiler::TryIntrinsify() { |
+// no fall-through to regular code is needed and regular code contains no |
+// deopt ids. |
srdjan
2015/10/05 17:38:25
Please adjust comment even more
rmacnak
2015/10/05 23:43:22
Returns 'true' if regular code generation should b
|
+bool FlowGraphCompiler::TryIntrinsify() { |
// Intrinsification skips arguments checks, therefore disable if in checked |
// mode. |
if (FLAG_intrinsify && !isolate()->flags().type_checks()) { |
@@ -1043,8 +1044,9 @@ void FlowGraphCompiler::TryIntrinsify() { |
// Reading from a mutable double box requires allocating a fresh double. |
if (load_node.field().guarded_cid() == kDynamicCid) { |
GenerateInlinedGetter(load_node.field().Offset()); |
+ return true; |
} |
- return; |
+ return false; |
} |
if (parsed_function().function().kind() == RawFunction::kImplicitSetter) { |
// An implicit setter must have a specific AST structure. |
@@ -1057,7 +1059,7 @@ void FlowGraphCompiler::TryIntrinsify() { |
*sequence_node.NodeAt(0)->AsStoreInstanceFieldNode(); |
if (store_node.field().guarded_cid() == kDynamicCid) { |
GenerateInlinedSetter(store_node.field().Offset()); |
- return; |
+ return true; |
} |
} |
} |
@@ -1074,6 +1076,7 @@ void FlowGraphCompiler::TryIntrinsify() { |
// before any deoptimization point. |
ASSERT(!intrinsic_slow_path_label_.IsBound()); |
assembler()->Bind(&intrinsic_slow_path_label_); |
+ return false; |
} |