OLD | NEW |
(Empty) | |
| 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. |
| 4 |
| 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_XXX. |
| 6 |
| 7 #include "vm/flow_graph_compiler.h" |
| 8 |
| 9 #include "vm/debugger.h" |
| 10 #include "vm/il_printer.h" |
| 11 #include "vm/intrinsifier.h" |
| 12 #include "vm/longjump.h" |
| 13 #include "vm/parser.h" |
| 14 #include "vm/stub_code.h" |
| 15 |
| 16 namespace dart { |
| 17 |
| 18 DECLARE_FLAG(bool, code_comments); |
| 19 DECLARE_FLAG(bool, enable_type_checks); |
| 20 DECLARE_FLAG(bool, intrinsify); |
| 21 DECLARE_FLAG(bool, report_usage_count); |
| 22 DECLARE_FLAG(bool, trace_functions); |
| 23 DECLARE_FLAG(int, optimization_counter_threshold); |
| 24 |
| 25 |
| 26 FlowGraphCompiler::FlowGraphCompiler( |
| 27 Assembler* assembler, |
| 28 const ParsedFunction& parsed_function, |
| 29 const GrowableArray<BlockEntryInstr*>& block_order, |
| 30 bool is_optimizing) |
| 31 : assembler_(assembler), |
| 32 parsed_function_(parsed_function), |
| 33 block_order_(block_order), |
| 34 current_block_(NULL), |
| 35 exception_handlers_list_(NULL), |
| 36 pc_descriptors_list_(NULL), |
| 37 stackmap_builder_(NULL), |
| 38 block_info_(block_order.length()), |
| 39 deopt_stubs_(), |
| 40 is_optimizing_(is_optimizing) { |
| 41 ASSERT(assembler != NULL); |
| 42 } |
| 43 |
| 44 |
| 45 FlowGraphCompiler::~FlowGraphCompiler() { |
| 46 // BlockInfos are zone-allocated, so their destructors are not called. |
| 47 // Verify the labels explicitly here. |
| 48 for (int i = 0; i < block_info_.length(); ++i) { |
| 49 ASSERT(!block_info_[i]->label.IsLinked()); |
| 50 ASSERT(!block_info_[i]->label.HasNear()); |
| 51 } |
| 52 } |
| 53 |
| 54 |
| 55 void FlowGraphCompiler::InitCompiler() { |
| 56 pc_descriptors_list_ = new DescriptorList(); |
| 57 exception_handlers_list_ = new ExceptionHandlerList(); |
| 58 block_info_.Clear(); |
| 59 for (int i = 0; i < block_order_.length(); ++i) { |
| 60 block_info_.Add(new BlockInfo()); |
| 61 } |
| 62 } |
| 63 |
| 64 |
| 65 void FlowGraphCompiler::VisitBlocks() { |
| 66 for (intptr_t i = 0; i < block_order().length(); ++i) { |
| 67 assembler()->Comment("B%d", i); |
| 68 // Compile the block entry. |
| 69 set_current_block(block_order()[i]); |
| 70 current_block()->PrepareEntry(this); |
| 71 Instruction* instr = current_block()->StraightLineSuccessor(); |
| 72 // Compile all successors until an exit, branch, or a block entry. |
| 73 while ((instr != NULL) && !instr->IsBlockEntry()) { |
| 74 if (FLAG_code_comments) EmitComment(instr); |
| 75 ASSERT(instr->locs() != NULL); |
| 76 EmitInstructionPrologue(instr); |
| 77 instr->EmitNativeCode(this); |
| 78 instr = instr->StraightLineSuccessor(); |
| 79 } |
| 80 BlockEntryInstr* successor = |
| 81 (instr == NULL) ? NULL : instr->AsBlockEntry(); |
| 82 if (successor != NULL) { |
| 83 // Block ended with a "goto". We can fall through if it is the |
| 84 // next block in the list. Otherwise, we need a jump. |
| 85 if ((i == block_order().length() - 1) || |
| 86 (block_order()[i + 1] != successor)) { |
| 87 assembler()->jmp(GetBlockLabel(successor)); |
| 88 } |
| 89 } |
| 90 } |
| 91 } |
| 92 |
| 93 |
| 94 void FlowGraphCompiler::Bailout(const char* reason) { |
| 95 const char* kFormat = "FlowGraphCompiler Bailout: %s %s."; |
| 96 const char* function_name = parsed_function().function().ToCString(); |
| 97 intptr_t len = OS::SNPrint(NULL, 0, kFormat, function_name, reason) + 1; |
| 98 char* chars = reinterpret_cast<char*>( |
| 99 Isolate::Current()->current_zone()->Allocate(len)); |
| 100 OS::SNPrint(chars, len, kFormat, function_name, reason); |
| 101 const Error& error = Error::Handle( |
| 102 LanguageError::New(String::Handle(String::New(chars)))); |
| 103 Isolate::Current()->long_jump_base()->Jump(1, error); |
| 104 } |
| 105 |
| 106 |
| 107 intptr_t FlowGraphCompiler::StackSize() const { |
| 108 return parsed_function_.stack_local_count() + |
| 109 parsed_function_.copied_parameter_count(); |
| 110 } |
| 111 |
| 112 |
| 113 Label* FlowGraphCompiler::GetBlockLabel( |
| 114 BlockEntryInstr* block_entry) const { |
| 115 intptr_t block_index = block_entry->postorder_number(); |
| 116 return &block_info_[block_index]->label; |
| 117 } |
| 118 |
| 119 |
| 120 bool FlowGraphCompiler::IsNextBlock(TargetEntryInstr* block_entry) const { |
| 121 intptr_t current_index = reverse_index(current_block()->postorder_number()); |
| 122 return block_order_[current_index + 1] == block_entry; |
| 123 } |
| 124 |
| 125 |
| 126 void FlowGraphCompiler::GenerateDeferredCode() { |
| 127 for (intptr_t i = 0; i < deopt_stubs_.length(); i++) { |
| 128 deopt_stubs_[i]->GenerateCode(this); |
| 129 } |
| 130 } |
| 131 |
| 132 |
| 133 void FlowGraphCompiler::AddExceptionHandler(intptr_t try_index, |
| 134 intptr_t pc_offset) { |
| 135 exception_handlers_list_->AddHandler(try_index, pc_offset); |
| 136 } |
| 137 |
| 138 |
| 139 // Uses current pc position and try-index. |
| 140 void FlowGraphCompiler::AddCurrentDescriptor(PcDescriptors::Kind kind, |
| 141 intptr_t cid, |
| 142 intptr_t token_index, |
| 143 intptr_t try_index) { |
| 144 pc_descriptors_list()->AddDescriptor(kind, |
| 145 assembler()->CodeSize(), |
| 146 cid, |
| 147 token_index, |
| 148 try_index); |
| 149 } |
| 150 |
| 151 |
| 152 Label* FlowGraphCompiler::AddDeoptStub(intptr_t deopt_id, |
| 153 intptr_t deopt_token_index, |
| 154 intptr_t try_index, |
| 155 DeoptReasonId reason, |
| 156 Register reg1, |
| 157 Register reg2) { |
| 158 DeoptimizationStub* stub = |
| 159 new DeoptimizationStub(deopt_id, deopt_token_index, try_index, reason); |
| 160 stub->Push(reg1); |
| 161 stub->Push(reg2); |
| 162 deopt_stubs_.Add(stub); |
| 163 return stub->entry_label(); |
| 164 } |
| 165 |
| 166 |
| 167 void FlowGraphCompiler::FinalizeExceptionHandlers(const Code& code) { |
| 168 ASSERT(exception_handlers_list_ != NULL); |
| 169 const ExceptionHandlers& handlers = ExceptionHandlers::Handle( |
| 170 exception_handlers_list_->FinalizeExceptionHandlers(code.EntryPoint())); |
| 171 code.set_exception_handlers(handlers); |
| 172 } |
| 173 |
| 174 |
| 175 void FlowGraphCompiler::FinalizePcDescriptors(const Code& code) { |
| 176 ASSERT(pc_descriptors_list_ != NULL); |
| 177 const PcDescriptors& descriptors = PcDescriptors::Handle( |
| 178 pc_descriptors_list_->FinalizePcDescriptors(code.EntryPoint())); |
| 179 descriptors.Verify(parsed_function_.function().is_optimizable()); |
| 180 code.set_pc_descriptors(descriptors); |
| 181 } |
| 182 |
| 183 |
| 184 void FlowGraphCompiler::FinalizeStackmaps(const Code& code) { |
| 185 if (stackmap_builder_ == NULL) { |
| 186 // The unoptimizing compiler has no stack maps. |
| 187 code.set_stackmaps(Array::Handle()); |
| 188 } else { |
| 189 // Finalize the stack map array and add it to the code object. |
| 190 code.set_stackmaps( |
| 191 Array::Handle(stackmap_builder_->FinalizeStackmaps(code))); |
| 192 } |
| 193 } |
| 194 |
| 195 |
| 196 void FlowGraphCompiler::FinalizeVarDescriptors(const Code& code) { |
| 197 const LocalVarDescriptors& var_descs = LocalVarDescriptors::Handle( |
| 198 parsed_function_.node_sequence()->scope()->GetVarDescriptors()); |
| 199 code.set_var_descriptors(var_descs); |
| 200 } |
| 201 |
| 202 |
| 203 void FlowGraphCompiler::FinalizeComments(const Code& code) { |
| 204 code.set_comments(assembler()->GetCodeComments()); |
| 205 } |
| 206 |
| 207 |
| 208 static bool CanOptimize() { |
| 209 return !FLAG_report_usage_count && |
| 210 (FLAG_optimization_counter_threshold >= 0) && |
| 211 !Isolate::Current()->debugger()->IsActive(); |
| 212 } |
| 213 |
| 214 |
| 215 // Returns 'true' if code generation for this function is complete, i.e., |
| 216 // no fall-through to regular code is needed. |
| 217 bool FlowGraphCompiler::TryIntrinsify() { |
| 218 if (!CanOptimize()) return false; |
| 219 // Intrinsification skips arguments checks, therefore disable if in checked |
| 220 // mode. |
| 221 if (FLAG_intrinsify && !FLAG_trace_functions && !FLAG_enable_type_checks) { |
| 222 if ((parsed_function().function().kind() == RawFunction::kImplicitGetter)) { |
| 223 // An implicit getter must have a specific AST structure. |
| 224 const SequenceNode& sequence_node = *parsed_function().node_sequence(); |
| 225 ASSERT(sequence_node.length() == 1); |
| 226 ASSERT(sequence_node.NodeAt(0)->IsReturnNode()); |
| 227 const ReturnNode& return_node = *sequence_node.NodeAt(0)->AsReturnNode(); |
| 228 ASSERT(return_node.value()->IsLoadInstanceFieldNode()); |
| 229 const LoadInstanceFieldNode& load_node = |
| 230 *return_node.value()->AsLoadInstanceFieldNode(); |
| 231 GenerateInlinedGetter(load_node.field().Offset()); |
| 232 return true; |
| 233 } |
| 234 if ((parsed_function().function().kind() == RawFunction::kImplicitSetter)) { |
| 235 // An implicit setter must have a specific AST structure. |
| 236 // Sequence node has one store node and one return NULL node. |
| 237 const SequenceNode& sequence_node = *parsed_function().node_sequence(); |
| 238 ASSERT(sequence_node.length() == 2); |
| 239 ASSERT(sequence_node.NodeAt(0)->IsStoreInstanceFieldNode()); |
| 240 ASSERT(sequence_node.NodeAt(1)->IsReturnNode()); |
| 241 const StoreInstanceFieldNode& store_node = |
| 242 *sequence_node.NodeAt(0)->AsStoreInstanceFieldNode(); |
| 243 GenerateInlinedSetter(store_node.field().Offset()); |
| 244 return true; |
| 245 } |
| 246 } |
| 247 // Even if an intrinsified version of the function was successfully |
| 248 // generated, it may fall through to the non-intrinsified method body. |
| 249 if (!FLAG_trace_functions) { |
| 250 return Intrinsifier::Intrinsify(parsed_function().function(), assembler()); |
| 251 } |
| 252 return false; |
| 253 } |
| 254 |
| 255 |
| 256 void FlowGraphCompiler::GenerateInstanceCall( |
| 257 intptr_t cid, |
| 258 intptr_t token_index, |
| 259 intptr_t try_index, |
| 260 const String& function_name, |
| 261 intptr_t argument_count, |
| 262 const Array& argument_names, |
| 263 intptr_t checked_argument_count) { |
| 264 ICData& ic_data = |
| 265 ICData::ZoneHandle(ICData::New(parsed_function().function(), |
| 266 function_name, |
| 267 cid, |
| 268 checked_argument_count)); |
| 269 const Array& arguments_descriptor = |
| 270 CodeGenerator::ArgumentsDescriptor(argument_count, argument_names); |
| 271 uword label_address = 0; |
| 272 switch (checked_argument_count) { |
| 273 case 1: |
| 274 label_address = StubCode::OneArgCheckInlineCacheEntryPoint(); |
| 275 break; |
| 276 case 2: |
| 277 label_address = StubCode::TwoArgsCheckInlineCacheEntryPoint(); |
| 278 break; |
| 279 default: |
| 280 UNIMPLEMENTED(); |
| 281 } |
| 282 ExternalLabel target_label("InlineCache", label_address); |
| 283 |
| 284 const intptr_t descr_offset = EmitInstanceCall(&target_label, |
| 285 ic_data, |
| 286 arguments_descriptor, |
| 287 argument_count); |
| 288 pc_descriptors_list()->AddDescriptor(PcDescriptors::kIcCall, |
| 289 descr_offset, |
| 290 cid, |
| 291 token_index, |
| 292 try_index); |
| 293 } |
| 294 |
| 295 |
| 296 void FlowGraphCompiler::GenerateStaticCall(intptr_t cid, |
| 297 intptr_t token_index, |
| 298 intptr_t try_index, |
| 299 const Function& function, |
| 300 intptr_t argument_count, |
| 301 const Array& argument_names) { |
| 302 const Array& arguments_descriptor = |
| 303 CodeGenerator::ArgumentsDescriptor(argument_count, argument_names); |
| 304 const intptr_t descr_offset = EmitStaticCall(function, |
| 305 arguments_descriptor, |
| 306 argument_count); |
| 307 pc_descriptors_list()->AddDescriptor(PcDescriptors::kFuncCall, |
| 308 descr_offset, |
| 309 cid, |
| 310 token_index, |
| 311 try_index); |
| 312 } |
| 313 |
| 314 |
| 315 void FlowGraphCompiler::GenerateNumberTypeCheck(Register kClassIdReg, |
| 316 const AbstractType& type, |
| 317 Label* is_instance_lbl, |
| 318 Label* is_not_instance_lbl) { |
| 319 GrowableArray<intptr_t> args; |
| 320 if (type.IsNumberInterface()) { |
| 321 args.Add(kDouble); |
| 322 args.Add(kMint); |
| 323 args.Add(kBigint); |
| 324 } else if (type.IsIntInterface()) { |
| 325 args.Add(kMint); |
| 326 args.Add(kBigint); |
| 327 } else if (type.IsDoubleInterface()) { |
| 328 args.Add(kDouble); |
| 329 } |
| 330 CheckClassIds(kClassIdReg, args, is_instance_lbl, is_not_instance_lbl); |
| 331 } |
| 332 |
| 333 |
| 334 void FlowGraphCompiler::GenerateStringTypeCheck(Register kClassIdReg, |
| 335 Label* is_instance_lbl, |
| 336 Label* is_not_instance_lbl) { |
| 337 GrowableArray<intptr_t> args; |
| 338 args.Add(kOneByteString); |
| 339 args.Add(kTwoByteString); |
| 340 args.Add(kFourByteString); |
| 341 args.Add(kExternalOneByteString); |
| 342 args.Add(kExternalTwoByteString); |
| 343 args.Add(kExternalFourByteString); |
| 344 CheckClassIds(kClassIdReg, args, is_instance_lbl, is_not_instance_lbl); |
| 345 } |
| 346 |
| 347 |
| 348 void FlowGraphCompiler::GenerateListTypeCheck(Register kClassIdReg, |
| 349 Label* is_instance_lbl) { |
| 350 Label unknown; |
| 351 GrowableArray<intptr_t> args; |
| 352 args.Add(kArray); |
| 353 args.Add(kGrowableObjectArray); |
| 354 args.Add(kImmutableArray); |
| 355 CheckClassIds(kClassIdReg, args, is_instance_lbl, &unknown); |
| 356 assembler()->Bind(&unknown); |
| 357 } |
| 358 |
| 359 |
| 360 void FlowGraphCompiler::EmitComment(Instruction* instr) { |
| 361 char buffer[80]; |
| 362 BufferFormatter f(buffer, sizeof(buffer)); |
| 363 instr->PrintTo(&f); |
| 364 assembler()->Comment("@%d: %s", instr->cid(), buffer); |
| 365 } |
| 366 |
| 367 } // namespace dart |
OLD | NEW |