OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. |
6 #if defined(TARGET_ARCH_X64) | 6 #if defined(TARGET_ARCH_X64) |
7 | 7 |
8 #include "vm/flow_graph_compiler.h" | 8 #include "vm/flow_graph_compiler.h" |
9 | 9 |
10 #include "vm/ast_printer.h" | 10 #include "vm/ast_printer.h" |
(...skipping 249 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
260 } | 260 } |
261 | 261 |
262 | 262 |
263 // Jumps to labels 'is_instance' or 'is_not_instance' respectively, if | 263 // Jumps to labels 'is_instance' or 'is_not_instance' respectively, if |
264 // type test is conclusive, otherwise fallthrough if a type test could not | 264 // type test is conclusive, otherwise fallthrough if a type test could not |
265 // be completed. | 265 // be completed. |
266 // RAX: instance (must survive). | 266 // RAX: instance (must survive). |
267 // Clobbers R10. | 267 // Clobbers R10. |
268 RawSubtypeTestCache* | 268 RawSubtypeTestCache* |
269 FlowGraphCompiler::GenerateInstantiatedTypeWithArgumentsTest( | 269 FlowGraphCompiler::GenerateInstantiatedTypeWithArgumentsTest( |
270 intptr_t token_pos, | 270 TokenPosition token_pos, |
271 const AbstractType& type, | 271 const AbstractType& type, |
272 Label* is_instance_lbl, | 272 Label* is_instance_lbl, |
273 Label* is_not_instance_lbl) { | 273 Label* is_not_instance_lbl) { |
274 __ Comment("InstantiatedTypeWithArgumentsTest"); | 274 __ Comment("InstantiatedTypeWithArgumentsTest"); |
275 ASSERT(type.IsInstantiated()); | 275 ASSERT(type.IsInstantiated()); |
276 const Class& type_class = Class::ZoneHandle(zone(), type.type_class()); | 276 const Class& type_class = Class::ZoneHandle(zone(), type.type_class()); |
277 ASSERT(type.IsFunctionType() || (type_class.NumTypeArguments() > 0)); | 277 ASSERT(type.IsFunctionType() || (type_class.NumTypeArguments() > 0)); |
278 const Register kInstanceReg = RAX; | 278 const Register kInstanceReg = RAX; |
279 Error& bound_error = Error::Handle(zone()); | 279 Error& bound_error = Error::Handle(zone()); |
280 const Type& int_type = Type::Handle(zone(), Type::IntType()); | 280 const Type& int_type = Type::Handle(zone(), Type::IntType()); |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
349 __ jmp(is_not_equal_lbl); | 349 __ jmp(is_not_equal_lbl); |
350 } | 350 } |
351 | 351 |
352 | 352 |
353 // Testing against an instantiated type with no arguments, without | 353 // Testing against an instantiated type with no arguments, without |
354 // SubtypeTestCache. | 354 // SubtypeTestCache. |
355 // RAX: instance to test against (preserved). | 355 // RAX: instance to test against (preserved). |
356 // Clobbers R10, R13. | 356 // Clobbers R10, R13. |
357 // Returns true if there is a fallthrough. | 357 // Returns true if there is a fallthrough. |
358 bool FlowGraphCompiler::GenerateInstantiatedTypeNoArgumentsTest( | 358 bool FlowGraphCompiler::GenerateInstantiatedTypeNoArgumentsTest( |
359 intptr_t token_pos, | 359 TokenPosition token_pos, |
360 const AbstractType& type, | 360 const AbstractType& type, |
361 Label* is_instance_lbl, | 361 Label* is_instance_lbl, |
362 Label* is_not_instance_lbl) { | 362 Label* is_not_instance_lbl) { |
363 __ Comment("InstantiatedTypeNoArgumentsTest"); | 363 __ Comment("InstantiatedTypeNoArgumentsTest"); |
364 ASSERT(type.IsInstantiated()); | 364 ASSERT(type.IsInstantiated()); |
365 if (type.IsFunctionType()) { | 365 if (type.IsFunctionType()) { |
366 // Fallthrough. | 366 // Fallthrough. |
367 return true; | 367 return true; |
368 } | 368 } |
369 const Class& type_class = Class::Handle(zone(), type.type_class()); | 369 const Class& type_class = Class::Handle(zone(), type.type_class()); |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
419 | 419 |
420 | 420 |
421 // Uses SubtypeTestCache to store instance class and result. | 421 // Uses SubtypeTestCache to store instance class and result. |
422 // RAX: instance to test. | 422 // RAX: instance to test. |
423 // Clobbers R10, R13. | 423 // Clobbers R10, R13. |
424 // Immediate class test already done. | 424 // Immediate class test already done. |
425 // TODO(srdjan): Implement a quicker subtype check, as type test | 425 // TODO(srdjan): Implement a quicker subtype check, as type test |
426 // arrays can grow too high, but they may be useful when optimizing | 426 // arrays can grow too high, but they may be useful when optimizing |
427 // code (type-feedback). | 427 // code (type-feedback). |
428 RawSubtypeTestCache* FlowGraphCompiler::GenerateSubtype1TestCacheLookup( | 428 RawSubtypeTestCache* FlowGraphCompiler::GenerateSubtype1TestCacheLookup( |
429 intptr_t token_pos, | 429 TokenPosition token_pos, |
430 const Class& type_class, | 430 const Class& type_class, |
431 Label* is_instance_lbl, | 431 Label* is_instance_lbl, |
432 Label* is_not_instance_lbl) { | 432 Label* is_not_instance_lbl) { |
433 __ Comment("Subtype1TestCacheLookup"); | 433 __ Comment("Subtype1TestCacheLookup"); |
434 const Register kInstanceReg = RAX; | 434 const Register kInstanceReg = RAX; |
435 __ LoadClass(R10, kInstanceReg); | 435 __ LoadClass(R10, kInstanceReg); |
436 // R10: instance class. | 436 // R10: instance class. |
437 // Check immediate superclass equality. | 437 // Check immediate superclass equality. |
438 __ movq(R13, FieldAddress(R10, Class::super_type_offset())); | 438 __ movq(R13, FieldAddress(R10, Class::super_type_offset())); |
439 __ movq(R13, FieldAddress(R13, Type::type_class_offset())); | 439 __ movq(R13, FieldAddress(R13, Type::type_class_offset())); |
440 __ CompareObject(R13, type_class); | 440 __ CompareObject(R13, type_class); |
441 __ j(EQUAL, is_instance_lbl); | 441 __ j(EQUAL, is_instance_lbl); |
442 | 442 |
443 const Register kTypeArgumentsReg = kNoRegister; | 443 const Register kTypeArgumentsReg = kNoRegister; |
444 const Register kTempReg = R10; | 444 const Register kTempReg = R10; |
445 return GenerateCallSubtypeTestStub(kTestTypeOneArg, | 445 return GenerateCallSubtypeTestStub(kTestTypeOneArg, |
446 kInstanceReg, | 446 kInstanceReg, |
447 kTypeArgumentsReg, | 447 kTypeArgumentsReg, |
448 kTempReg, | 448 kTempReg, |
449 is_instance_lbl, | 449 is_instance_lbl, |
450 is_not_instance_lbl); | 450 is_not_instance_lbl); |
451 } | 451 } |
452 | 452 |
453 | 453 |
454 // Generates inlined check if 'type' is a type parameter or type itself | 454 // Generates inlined check if 'type' is a type parameter or type itself |
455 // RAX: instance (preserved). | 455 // RAX: instance (preserved). |
456 // Clobbers RDI, RDX, R10. | 456 // Clobbers RDI, RDX, R10. |
457 RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest( | 457 RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest( |
458 intptr_t token_pos, | 458 TokenPosition token_pos, |
459 const AbstractType& type, | 459 const AbstractType& type, |
460 Label* is_instance_lbl, | 460 Label* is_instance_lbl, |
461 Label* is_not_instance_lbl) { | 461 Label* is_not_instance_lbl) { |
462 __ Comment("UninstantiatedTypeTest"); | 462 __ Comment("UninstantiatedTypeTest"); |
463 ASSERT(!type.IsInstantiated()); | 463 ASSERT(!type.IsInstantiated()); |
464 // Skip check if destination is a dynamic type. | 464 // Skip check if destination is a dynamic type. |
465 if (type.IsTypeParameter()) { | 465 if (type.IsTypeParameter()) { |
466 const TypeParameter& type_param = TypeParameter::Cast(type); | 466 const TypeParameter& type_param = TypeParameter::Cast(type); |
467 // Load instantiator type arguments on stack. | 467 // Load instantiator type arguments on stack. |
468 __ movq(RDX, Address(RSP, 0)); // Get instantiator type arguments. | 468 __ movq(RDX, Address(RSP, 0)); // Get instantiator type arguments. |
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
532 // Inputs: | 532 // Inputs: |
533 // - RAX: instance to test against (preserved). | 533 // - RAX: instance to test against (preserved). |
534 // - RDX: optional instantiator type arguments (preserved). | 534 // - RDX: optional instantiator type arguments (preserved). |
535 // Clobbers R10, R13. | 535 // Clobbers R10, R13. |
536 // Returns: | 536 // Returns: |
537 // - preserved instance in RAX and optional instantiator type arguments in RDX. | 537 // - preserved instance in RAX and optional instantiator type arguments in RDX. |
538 // Note that this inlined code must be followed by the runtime_call code, as it | 538 // Note that this inlined code must be followed by the runtime_call code, as it |
539 // may fall through to it. Otherwise, this inline code will jump to the label | 539 // may fall through to it. Otherwise, this inline code will jump to the label |
540 // is_instance or to the label is_not_instance. | 540 // is_instance or to the label is_not_instance. |
541 RawSubtypeTestCache* FlowGraphCompiler::GenerateInlineInstanceof( | 541 RawSubtypeTestCache* FlowGraphCompiler::GenerateInlineInstanceof( |
542 intptr_t token_pos, | 542 TokenPosition token_pos, |
543 const AbstractType& type, | 543 const AbstractType& type, |
544 Label* is_instance_lbl, | 544 Label* is_instance_lbl, |
545 Label* is_not_instance_lbl) { | 545 Label* is_not_instance_lbl) { |
546 __ Comment("InlineInstanceof"); | 546 __ Comment("InlineInstanceof"); |
547 if (type.IsVoidType()) { | 547 if (type.IsVoidType()) { |
548 // A non-null value is returned from a void function, which will result in a | 548 // A non-null value is returned from a void function, which will result in a |
549 // type error. A null value is handled prior to executing this inline code. | 549 // type error. A null value is handled prior to executing this inline code. |
550 return SubtypeTestCache::null(); | 550 return SubtypeTestCache::null(); |
551 } | 551 } |
552 if (type.IsInstantiated()) { | 552 if (type.IsInstantiated()) { |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
586 // therefore eliminated, optimize it by adding inlined tests for: | 586 // therefore eliminated, optimize it by adding inlined tests for: |
587 // - NULL -> return false. | 587 // - NULL -> return false. |
588 // - Smi -> compile time subtype check (only if dst class is not parameterized). | 588 // - Smi -> compile time subtype check (only if dst class is not parameterized). |
589 // - Class equality (only if class is not parameterized). | 589 // - Class equality (only if class is not parameterized). |
590 // Inputs: | 590 // Inputs: |
591 // - RAX: object. | 591 // - RAX: object. |
592 // - RDX: instantiator type arguments or raw_null. | 592 // - RDX: instantiator type arguments or raw_null. |
593 // Clobbers RDX. | 593 // Clobbers RDX. |
594 // Returns: | 594 // Returns: |
595 // - true or false in RAX. | 595 // - true or false in RAX. |
596 void FlowGraphCompiler::GenerateInstanceOf(intptr_t token_pos, | 596 void FlowGraphCompiler::GenerateInstanceOf(TokenPosition token_pos, |
597 intptr_t deopt_id, | 597 intptr_t deopt_id, |
598 const AbstractType& type, | 598 const AbstractType& type, |
599 bool negate_result, | 599 bool negate_result, |
600 LocationSummary* locs) { | 600 LocationSummary* locs) { |
601 ASSERT(type.IsFinalized() && !type.IsMalformedOrMalbounded()); | 601 ASSERT(type.IsFinalized() && !type.IsMalformedOrMalbounded()); |
602 | 602 |
603 Label is_instance, is_not_instance; | 603 Label is_instance, is_not_instance; |
604 __ pushq(RDX); // Store instantiator type arguments. | 604 __ pushq(RDX); // Store instantiator type arguments. |
605 // If type is instantiated and non-parameterized, we can inline code | 605 // If type is instantiated and non-parameterized, we can inline code |
606 // checking whether the tested instance is a Smi. | 606 // checking whether the tested instance is a Smi. |
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
666 // - NULL -> return NULL. | 666 // - NULL -> return NULL. |
667 // - Smi -> compile time subtype check (only if dst class is not parameterized). | 667 // - Smi -> compile time subtype check (only if dst class is not parameterized). |
668 // - Class equality (only if class is not parameterized). | 668 // - Class equality (only if class is not parameterized). |
669 // Inputs: | 669 // Inputs: |
670 // - RAX: object. | 670 // - RAX: object. |
671 // - RDX: instantiator type arguments or raw_null. | 671 // - RDX: instantiator type arguments or raw_null. |
672 // Returns: | 672 // Returns: |
673 // - object in RAX for successful assignable check (or throws TypeError). | 673 // - object in RAX for successful assignable check (or throws TypeError). |
674 // Performance notes: positive checks must be quick, negative checks can be slow | 674 // Performance notes: positive checks must be quick, negative checks can be slow |
675 // as they throw an exception. | 675 // as they throw an exception. |
676 void FlowGraphCompiler::GenerateAssertAssignable(intptr_t token_pos, | 676 void FlowGraphCompiler::GenerateAssertAssignable(TokenPosition token_pos, |
677 intptr_t deopt_id, | 677 intptr_t deopt_id, |
678 const AbstractType& dst_type, | 678 const AbstractType& dst_type, |
679 const String& dst_name, | 679 const String& dst_name, |
680 LocationSummary* locs) { | 680 LocationSummary* locs) { |
681 ASSERT(!Token::IsClassifying(token_pos)); | 681 ASSERT(!token_pos.IsClassifying()); |
682 ASSERT(!dst_type.IsNull()); | 682 ASSERT(!dst_type.IsNull()); |
683 ASSERT(dst_type.IsFinalized()); | 683 ASSERT(dst_type.IsFinalized()); |
684 // Assignable check is skipped in FlowGraphBuilder, not here. | 684 // Assignable check is skipped in FlowGraphBuilder, not here. |
685 ASSERT(dst_type.IsMalformedOrMalbounded() || | 685 ASSERT(dst_type.IsMalformedOrMalbounded() || |
686 (!dst_type.IsDynamicType() && !dst_type.IsObjectType())); | 686 (!dst_type.IsDynamicType() && !dst_type.IsObjectType())); |
687 __ pushq(RDX); // Store instantiator type arguments. | 687 __ pushq(RDX); // Store instantiator type arguments. |
688 // A null object is always assignable and is returned as result. | 688 // A null object is always assignable and is returned as result. |
689 Label is_assignable, runtime_call; | 689 Label is_assignable, runtime_call; |
690 __ CompareObject(RAX, Object::null_object()); | 690 __ CompareObject(RAX, Object::null_object()); |
691 __ j(EQUAL, &is_assignable); | 691 __ j(EQUAL, &is_assignable); |
(...skipping 450 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1142 if (is_optimizing() && !FLAG_precompilation) { | 1142 if (is_optimizing() && !FLAG_precompilation) { |
1143 // Leave enough space for patching in case of lazy deoptimization from | 1143 // Leave enough space for patching in case of lazy deoptimization from |
1144 // deferred code. | 1144 // deferred code. |
1145 __ nop(ShortCallPattern::pattern_length_in_bytes()); | 1145 __ nop(ShortCallPattern::pattern_length_in_bytes()); |
1146 lazy_deopt_pc_offset_ = assembler()->CodeSize(); | 1146 lazy_deopt_pc_offset_ = assembler()->CodeSize(); |
1147 __ Jmp(*StubCode::DeoptimizeLazy_entry(), PP); | 1147 __ Jmp(*StubCode::DeoptimizeLazy_entry(), PP); |
1148 } | 1148 } |
1149 } | 1149 } |
1150 | 1150 |
1151 | 1151 |
1152 void FlowGraphCompiler::GenerateCall(intptr_t token_pos, | 1152 void FlowGraphCompiler::GenerateCall(TokenPosition token_pos, |
1153 const StubEntry& stub_entry, | 1153 const StubEntry& stub_entry, |
1154 RawPcDescriptors::Kind kind, | 1154 RawPcDescriptors::Kind kind, |
1155 LocationSummary* locs) { | 1155 LocationSummary* locs) { |
1156 __ Call(stub_entry); | 1156 __ Call(stub_entry); |
1157 AddCurrentDescriptor(kind, Thread::kNoDeoptId, token_pos); | 1157 AddCurrentDescriptor(kind, Thread::kNoDeoptId, token_pos); |
1158 RecordSafepoint(locs); | 1158 RecordSafepoint(locs); |
1159 } | 1159 } |
1160 | 1160 |
1161 | 1161 |
1162 void FlowGraphCompiler::GenerateDartCall(intptr_t deopt_id, | 1162 void FlowGraphCompiler::GenerateDartCall(intptr_t deopt_id, |
1163 intptr_t token_pos, | 1163 TokenPosition token_pos, |
1164 const StubEntry& stub_entry, | 1164 const StubEntry& stub_entry, |
1165 RawPcDescriptors::Kind kind, | 1165 RawPcDescriptors::Kind kind, |
1166 LocationSummary* locs) { | 1166 LocationSummary* locs) { |
1167 __ CallPatchable(stub_entry); | 1167 __ CallPatchable(stub_entry); |
1168 AddCurrentDescriptor(kind, deopt_id, token_pos); | 1168 AddCurrentDescriptor(kind, deopt_id, token_pos); |
1169 RecordSafepoint(locs); | 1169 RecordSafepoint(locs); |
1170 // Marks either the continuation point in unoptimized code or the | 1170 // Marks either the continuation point in unoptimized code or the |
1171 // deoptimization point in optimized code, after call. | 1171 // deoptimization point in optimized code, after call. |
1172 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id); | 1172 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id); |
1173 if (is_optimizing()) { | 1173 if (is_optimizing()) { |
1174 AddDeoptIndexAtCall(deopt_id_after, token_pos); | 1174 AddDeoptIndexAtCall(deopt_id_after, token_pos); |
1175 } else { | 1175 } else { |
1176 // Add deoptimization continuation point after the call and before the | 1176 // Add deoptimization continuation point after the call and before the |
1177 // arguments are removed. | 1177 // arguments are removed. |
1178 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos); | 1178 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos); |
1179 } | 1179 } |
1180 } | 1180 } |
1181 | 1181 |
1182 | 1182 |
1183 void FlowGraphCompiler::GenerateRuntimeCall(intptr_t token_pos, | 1183 void FlowGraphCompiler::GenerateRuntimeCall(TokenPosition token_pos, |
1184 intptr_t deopt_id, | 1184 intptr_t deopt_id, |
1185 const RuntimeEntry& entry, | 1185 const RuntimeEntry& entry, |
1186 intptr_t argument_count, | 1186 intptr_t argument_count, |
1187 LocationSummary* locs) { | 1187 LocationSummary* locs) { |
1188 __ CallRuntime(entry, argument_count); | 1188 __ CallRuntime(entry, argument_count); |
1189 AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id, token_pos); | 1189 AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id, token_pos); |
1190 RecordSafepoint(locs); | 1190 RecordSafepoint(locs); |
1191 if (deopt_id != Thread::kNoDeoptId) { | 1191 if (deopt_id != Thread::kNoDeoptId) { |
1192 // Marks either the continuation point in unoptimized code or the | 1192 // Marks either the continuation point in unoptimized code or the |
1193 // deoptimization point in optimized code, after call. | 1193 // deoptimization point in optimized code, after call. |
1194 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id); | 1194 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id); |
1195 if (is_optimizing()) { | 1195 if (is_optimizing()) { |
1196 AddDeoptIndexAtCall(deopt_id_after, token_pos); | 1196 AddDeoptIndexAtCall(deopt_id_after, token_pos); |
1197 } else { | 1197 } else { |
1198 // Add deoptimization continuation point after the call and before the | 1198 // Add deoptimization continuation point after the call and before the |
1199 // arguments are removed. | 1199 // arguments are removed. |
1200 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos); | 1200 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos); |
1201 } | 1201 } |
1202 } | 1202 } |
1203 } | 1203 } |
1204 | 1204 |
1205 | 1205 |
1206 void FlowGraphCompiler::EmitUnoptimizedStaticCall( | 1206 void FlowGraphCompiler::EmitUnoptimizedStaticCall( |
1207 intptr_t argument_count, | 1207 intptr_t argument_count, |
1208 intptr_t deopt_id, | 1208 intptr_t deopt_id, |
1209 intptr_t token_pos, | 1209 TokenPosition token_pos, |
1210 LocationSummary* locs, | 1210 LocationSummary* locs, |
1211 const ICData& ic_data) { | 1211 const ICData& ic_data) { |
1212 const StubEntry* stub_entry = | 1212 const StubEntry* stub_entry = |
1213 StubCode::UnoptimizedStaticCallEntry(ic_data.NumArgsTested()); | 1213 StubCode::UnoptimizedStaticCallEntry(ic_data.NumArgsTested()); |
1214 __ LoadObject(RBX, ic_data); | 1214 __ LoadObject(RBX, ic_data); |
1215 GenerateDartCall(deopt_id, | 1215 GenerateDartCall(deopt_id, |
1216 token_pos, | 1216 token_pos, |
1217 *stub_entry, | 1217 *stub_entry, |
1218 RawPcDescriptors::kUnoptStaticCall, | 1218 RawPcDescriptors::kUnoptStaticCall, |
1219 locs); | 1219 locs); |
(...skipping 13 matching lines...) Expand all Loading... |
1233 __ LoadObject(RAX, edge_counters_array_); | 1233 __ LoadObject(RAX, edge_counters_array_); |
1234 __ IncrementSmiField(FieldAddress(RAX, Array::element_offset(edge_id)), 1); | 1234 __ IncrementSmiField(FieldAddress(RAX, Array::element_offset(edge_id)), 1); |
1235 } | 1235 } |
1236 | 1236 |
1237 | 1237 |
1238 void FlowGraphCompiler::EmitOptimizedInstanceCall( | 1238 void FlowGraphCompiler::EmitOptimizedInstanceCall( |
1239 const StubEntry& stub_entry, | 1239 const StubEntry& stub_entry, |
1240 const ICData& ic_data, | 1240 const ICData& ic_data, |
1241 intptr_t argument_count, | 1241 intptr_t argument_count, |
1242 intptr_t deopt_id, | 1242 intptr_t deopt_id, |
1243 intptr_t token_pos, | 1243 TokenPosition token_pos, |
1244 LocationSummary* locs) { | 1244 LocationSummary* locs) { |
1245 ASSERT(Array::Handle(zone(), ic_data.arguments_descriptor()).Length() > 0); | 1245 ASSERT(Array::Handle(zone(), ic_data.arguments_descriptor()).Length() > 0); |
1246 // Each ICData propagated from unoptimized to optimized code contains the | 1246 // Each ICData propagated from unoptimized to optimized code contains the |
1247 // function that corresponds to the Dart function of that IC call. Due | 1247 // function that corresponds to the Dart function of that IC call. Due |
1248 // to inlining in optimized code, that function may not correspond to the | 1248 // to inlining in optimized code, that function may not correspond to the |
1249 // top-level function (parsed_function().function()) which could be | 1249 // top-level function (parsed_function().function()) which could be |
1250 // reoptimized and which counter needs to be incremented. | 1250 // reoptimized and which counter needs to be incremented. |
1251 // Pass the function explicitly, it is used in IC stub. | 1251 // Pass the function explicitly, it is used in IC stub. |
1252 __ LoadObject(RDI, parsed_function().function()); | 1252 __ LoadObject(RDI, parsed_function().function()); |
1253 __ LoadUniqueObject(RBX, ic_data); | 1253 __ LoadUniqueObject(RBX, ic_data); |
1254 GenerateDartCall(deopt_id, | 1254 GenerateDartCall(deopt_id, |
1255 token_pos, | 1255 token_pos, |
1256 stub_entry, | 1256 stub_entry, |
1257 RawPcDescriptors::kIcCall, | 1257 RawPcDescriptors::kIcCall, |
1258 locs); | 1258 locs); |
1259 __ Drop(argument_count, RCX); | 1259 __ Drop(argument_count, RCX); |
1260 } | 1260 } |
1261 | 1261 |
1262 | 1262 |
1263 void FlowGraphCompiler::EmitInstanceCall(const StubEntry& stub_entry, | 1263 void FlowGraphCompiler::EmitInstanceCall(const StubEntry& stub_entry, |
1264 const ICData& ic_data, | 1264 const ICData& ic_data, |
1265 intptr_t argument_count, | 1265 intptr_t argument_count, |
1266 intptr_t deopt_id, | 1266 intptr_t deopt_id, |
1267 intptr_t token_pos, | 1267 TokenPosition token_pos, |
1268 LocationSummary* locs) { | 1268 LocationSummary* locs) { |
1269 ASSERT(Array::Handle(zone(), ic_data.arguments_descriptor()).Length() > 0); | 1269 ASSERT(Array::Handle(zone(), ic_data.arguments_descriptor()).Length() > 0); |
1270 __ LoadUniqueObject(RBX, ic_data); | 1270 __ LoadUniqueObject(RBX, ic_data); |
1271 GenerateDartCall(deopt_id, | 1271 GenerateDartCall(deopt_id, |
1272 token_pos, | 1272 token_pos, |
1273 stub_entry, | 1273 stub_entry, |
1274 RawPcDescriptors::kIcCall, | 1274 RawPcDescriptors::kIcCall, |
1275 locs); | 1275 locs); |
1276 __ Drop(argument_count, RCX); | 1276 __ Drop(argument_count, RCX); |
1277 } | 1277 } |
1278 | 1278 |
1279 | 1279 |
1280 void FlowGraphCompiler::EmitMegamorphicInstanceCall( | 1280 void FlowGraphCompiler::EmitMegamorphicInstanceCall( |
1281 const ICData& ic_data, | 1281 const ICData& ic_data, |
1282 intptr_t argument_count, | 1282 intptr_t argument_count, |
1283 intptr_t deopt_id, | 1283 intptr_t deopt_id, |
1284 intptr_t token_pos, | 1284 TokenPosition token_pos, |
1285 LocationSummary* locs, | 1285 LocationSummary* locs, |
1286 intptr_t try_index) { | 1286 intptr_t try_index) { |
1287 const String& name = String::Handle(zone(), ic_data.target_name()); | 1287 const String& name = String::Handle(zone(), ic_data.target_name()); |
1288 const Array& arguments_descriptor = | 1288 const Array& arguments_descriptor = |
1289 Array::ZoneHandle(zone(), ic_data.arguments_descriptor()); | 1289 Array::ZoneHandle(zone(), ic_data.arguments_descriptor()); |
1290 ASSERT(!arguments_descriptor.IsNull() && (arguments_descriptor.Length() > 0)); | 1290 ASSERT(!arguments_descriptor.IsNull() && (arguments_descriptor.Length() > 0)); |
1291 const MegamorphicCache& cache = MegamorphicCache::ZoneHandle(zone(), | 1291 const MegamorphicCache& cache = MegamorphicCache::ZoneHandle(zone(), |
1292 MegamorphicCacheTable::Lookup(isolate(), name, arguments_descriptor)); | 1292 MegamorphicCacheTable::Lookup(isolate(), name, arguments_descriptor)); |
1293 | 1293 |
1294 __ Comment("MegamorphicCall"); | 1294 __ Comment("MegamorphicCall"); |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1326 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos); | 1326 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos); |
1327 } | 1327 } |
1328 __ Drop(argument_count, RCX); | 1328 __ Drop(argument_count, RCX); |
1329 } | 1329 } |
1330 | 1330 |
1331 | 1331 |
1332 void FlowGraphCompiler::EmitSwitchableInstanceCall( | 1332 void FlowGraphCompiler::EmitSwitchableInstanceCall( |
1333 const ICData& ic_data, | 1333 const ICData& ic_data, |
1334 intptr_t argument_count, | 1334 intptr_t argument_count, |
1335 intptr_t deopt_id, | 1335 intptr_t deopt_id, |
1336 intptr_t token_pos, | 1336 TokenPosition token_pos, |
1337 LocationSummary* locs) { | 1337 LocationSummary* locs) { |
1338 __ Comment("SwitchableCall"); | 1338 __ Comment("SwitchableCall"); |
1339 __ movq(RDI, Address(RSP, (argument_count - 1) * kWordSize)); | 1339 __ movq(RDI, Address(RSP, (argument_count - 1) * kWordSize)); |
1340 if (ic_data.NumArgsTested() == 1) { | 1340 if (ic_data.NumArgsTested() == 1) { |
1341 __ LoadUniqueObject(RBX, ic_data); | 1341 __ LoadUniqueObject(RBX, ic_data); |
1342 __ CallPatchable(*StubCode::ICLookup_entry()); | 1342 __ CallPatchable(*StubCode::ICLookup_entry()); |
1343 } else { | 1343 } else { |
1344 const String& name = String::Handle(zone(), ic_data.target_name()); | 1344 const String& name = String::Handle(zone(), ic_data.target_name()); |
1345 const Array& arguments_descriptor = | 1345 const Array& arguments_descriptor = |
1346 Array::ZoneHandle(zone(), ic_data.arguments_descriptor()); | 1346 Array::ZoneHandle(zone(), ic_data.arguments_descriptor()); |
(...skipping 20 matching lines...) Expand all Loading... |
1367 } | 1367 } |
1368 __ Drop(argument_count, RCX); | 1368 __ Drop(argument_count, RCX); |
1369 } | 1369 } |
1370 | 1370 |
1371 | 1371 |
1372 void FlowGraphCompiler::EmitOptimizedStaticCall( | 1372 void FlowGraphCompiler::EmitOptimizedStaticCall( |
1373 const Function& function, | 1373 const Function& function, |
1374 const Array& arguments_descriptor, | 1374 const Array& arguments_descriptor, |
1375 intptr_t argument_count, | 1375 intptr_t argument_count, |
1376 intptr_t deopt_id, | 1376 intptr_t deopt_id, |
1377 intptr_t token_pos, | 1377 TokenPosition token_pos, |
1378 LocationSummary* locs) { | 1378 LocationSummary* locs) { |
1379 __ LoadObject(R10, arguments_descriptor); | 1379 __ LoadObject(R10, arguments_descriptor); |
1380 // Do not use the code from the function, but let the code be patched so that | 1380 // Do not use the code from the function, but let the code be patched so that |
1381 // we can record the outgoing edges to other code. | 1381 // we can record the outgoing edges to other code. |
1382 GenerateDartCall(deopt_id, | 1382 GenerateDartCall(deopt_id, |
1383 token_pos, | 1383 token_pos, |
1384 *StubCode::CallStaticFunction_entry(), | 1384 *StubCode::CallStaticFunction_entry(), |
1385 RawPcDescriptors::kOther, | 1385 RawPcDescriptors::kOther, |
1386 locs); | 1386 locs); |
1387 AddStaticCallTarget(function); | 1387 AddStaticCallTarget(function); |
1388 __ Drop(argument_count, RCX); | 1388 __ Drop(argument_count, RCX); |
1389 } | 1389 } |
1390 | 1390 |
1391 | 1391 |
1392 Condition FlowGraphCompiler::EmitEqualityRegConstCompare( | 1392 Condition FlowGraphCompiler::EmitEqualityRegConstCompare( |
1393 Register reg, | 1393 Register reg, |
1394 const Object& obj, | 1394 const Object& obj, |
1395 bool needs_number_check, | 1395 bool needs_number_check, |
1396 intptr_t token_pos) { | 1396 TokenPosition token_pos) { |
1397 ASSERT(!needs_number_check || | 1397 ASSERT(!needs_number_check || |
1398 (!obj.IsMint() && !obj.IsDouble() && !obj.IsBigint())); | 1398 (!obj.IsMint() && !obj.IsDouble() && !obj.IsBigint())); |
1399 | 1399 |
1400 if (obj.IsSmi() && (Smi::Cast(obj).Value() == 0)) { | 1400 if (obj.IsSmi() && (Smi::Cast(obj).Value() == 0)) { |
1401 ASSERT(!needs_number_check); | 1401 ASSERT(!needs_number_check); |
1402 __ testq(reg, reg); | 1402 __ testq(reg, reg); |
1403 return EQUAL; | 1403 return EQUAL; |
1404 } | 1404 } |
1405 | 1405 |
1406 if (needs_number_check) { | 1406 if (needs_number_check) { |
1407 __ pushq(reg); | 1407 __ pushq(reg); |
1408 __ PushObject(obj); | 1408 __ PushObject(obj); |
1409 if (is_optimizing()) { | 1409 if (is_optimizing()) { |
1410 __ CallPatchable(*StubCode::OptimizedIdenticalWithNumberCheck_entry()); | 1410 __ CallPatchable(*StubCode::OptimizedIdenticalWithNumberCheck_entry()); |
1411 } else { | 1411 } else { |
1412 __ CallPatchable(*StubCode::UnoptimizedIdenticalWithNumberCheck_entry()); | 1412 __ CallPatchable(*StubCode::UnoptimizedIdenticalWithNumberCheck_entry()); |
1413 } | 1413 } |
1414 if (token_pos >= 0) { | 1414 if (token_pos.IsReal()) { |
1415 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, | 1415 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, |
1416 Thread::kNoDeoptId, | 1416 Thread::kNoDeoptId, |
1417 token_pos); | 1417 token_pos); |
1418 } | 1418 } |
1419 // Stub returns result in flags (result of a cmpq, we need ZF computed). | 1419 // Stub returns result in flags (result of a cmpq, we need ZF computed). |
1420 __ popq(reg); // Discard constant. | 1420 __ popq(reg); // Discard constant. |
1421 __ popq(reg); // Restore 'reg'. | 1421 __ popq(reg); // Restore 'reg'. |
1422 } else { | 1422 } else { |
1423 __ CompareObject(reg, obj); | 1423 __ CompareObject(reg, obj); |
1424 } | 1424 } |
1425 return EQUAL; | 1425 return EQUAL; |
1426 } | 1426 } |
1427 | 1427 |
1428 | 1428 |
1429 Condition FlowGraphCompiler::EmitEqualityRegRegCompare(Register left, | 1429 Condition FlowGraphCompiler::EmitEqualityRegRegCompare( |
1430 Register right, | 1430 Register left, |
1431 bool needs_number_check, | 1431 Register right, |
1432 intptr_t token_pos) { | 1432 bool needs_number_check, |
| 1433 TokenPosition token_pos) { |
1433 if (needs_number_check) { | 1434 if (needs_number_check) { |
1434 __ pushq(left); | 1435 __ pushq(left); |
1435 __ pushq(right); | 1436 __ pushq(right); |
1436 if (is_optimizing()) { | 1437 if (is_optimizing()) { |
1437 __ CallPatchable(*StubCode::OptimizedIdenticalWithNumberCheck_entry()); | 1438 __ CallPatchable(*StubCode::OptimizedIdenticalWithNumberCheck_entry()); |
1438 } else { | 1439 } else { |
1439 __ CallPatchable(*StubCode::UnoptimizedIdenticalWithNumberCheck_entry()); | 1440 __ CallPatchable(*StubCode::UnoptimizedIdenticalWithNumberCheck_entry()); |
1440 } | 1441 } |
1441 if (token_pos >= 0) { | 1442 if (token_pos.IsReal()) { |
1442 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, | 1443 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, |
1443 Thread::kNoDeoptId, | 1444 Thread::kNoDeoptId, |
1444 token_pos); | 1445 token_pos); |
1445 } | 1446 } |
1446 // Stub returns result in flags (result of a cmpq, we need ZF computed). | 1447 // Stub returns result in flags (result of a cmpq, we need ZF computed). |
1447 __ popq(right); | 1448 __ popq(right); |
1448 __ popq(left); | 1449 __ popq(left); |
1449 } else { | 1450 } else { |
1450 __ cmpl(left, right); | 1451 __ cmpl(left, right); |
1451 } | 1452 } |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1487 } | 1488 } |
1488 #endif | 1489 #endif |
1489 | 1490 |
1490 | 1491 |
1491 void FlowGraphCompiler::EmitTestAndCall(const ICData& ic_data, | 1492 void FlowGraphCompiler::EmitTestAndCall(const ICData& ic_data, |
1492 intptr_t argument_count, | 1493 intptr_t argument_count, |
1493 const Array& argument_names, | 1494 const Array& argument_names, |
1494 Label* failed, | 1495 Label* failed, |
1495 Label* match_found, | 1496 Label* match_found, |
1496 intptr_t deopt_id, | 1497 intptr_t deopt_id, |
1497 intptr_t token_index, | 1498 TokenPosition token_index, |
1498 LocationSummary* locs) { | 1499 LocationSummary* locs) { |
1499 ASSERT(is_optimizing()); | 1500 ASSERT(is_optimizing()); |
1500 | 1501 |
1501 __ Comment("EmitTestAndCall"); | 1502 __ Comment("EmitTestAndCall"); |
1502 const Array& arguments_descriptor = | 1503 const Array& arguments_descriptor = |
1503 Array::ZoneHandle(zone(), ArgumentsDescriptor::New(argument_count, | 1504 Array::ZoneHandle(zone(), ArgumentsDescriptor::New(argument_count, |
1504 argument_names)); | 1505 argument_names)); |
1505 // Load receiver into RAX. | 1506 // Load receiver into RAX. |
1506 __ movq(RAX, | 1507 __ movq(RAX, |
1507 Address(RSP, (argument_count - 1) * kWordSize)); | 1508 Address(RSP, (argument_count - 1) * kWordSize)); |
(...skipping 304 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1812 __ movups(reg, Address(RSP, 0)); | 1813 __ movups(reg, Address(RSP, 0)); |
1813 __ AddImmediate(RSP, Immediate(kFpuRegisterSize)); | 1814 __ AddImmediate(RSP, Immediate(kFpuRegisterSize)); |
1814 } | 1815 } |
1815 | 1816 |
1816 | 1817 |
1817 #undef __ | 1818 #undef __ |
1818 | 1819 |
1819 } // namespace dart | 1820 } // namespace dart |
1820 | 1821 |
1821 #endif // defined TARGET_ARCH_X64 | 1822 #endif // defined TARGET_ARCH_X64 |
OLD | NEW |