OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. |
6 #if defined(TARGET_ARCH_X64) | 6 #if defined(TARGET_ARCH_X64) |
7 | 7 |
8 #include "vm/flow_graph_compiler.h" | 8 #include "vm/flow_graph_compiler.h" |
9 | 9 |
10 #include "vm/ast_printer.h" | 10 #include "vm/ast_printer.h" |
(...skipping 169 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
180 Assembler* assem = compiler->assembler(); | 180 Assembler* assem = compiler->assembler(); |
181 #define __ assem-> | 181 #define __ assem-> |
182 __ Comment("%s", Name()); | 182 __ Comment("%s", Name()); |
183 __ Bind(entry_label()); | 183 __ Bind(entry_label()); |
184 if (FLAG_trap_on_deoptimization) { | 184 if (FLAG_trap_on_deoptimization) { |
185 __ int3(); | 185 __ int3(); |
186 } | 186 } |
187 | 187 |
188 ASSERT(deopt_env() != NULL); | 188 ASSERT(deopt_env() != NULL); |
189 | 189 |
| 190 __ pushq(CODE_REG); |
190 __ Call(*StubCode::Deoptimize_entry()); | 191 __ Call(*StubCode::Deoptimize_entry()); |
191 set_pc_offset(assem->CodeSize()); | 192 set_pc_offset(assem->CodeSize()); |
192 __ int3(); | 193 __ int3(); |
193 #undef __ | 194 #undef __ |
194 } | 195 } |
195 | 196 |
196 | 197 |
197 #define __ assembler()-> | 198 #define __ assembler()-> |
198 | 199 |
199 | 200 |
(...skipping 718 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
918 __ movq(RBX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); | 919 __ movq(RBX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); |
919 __ SmiUntag(RBX); | 920 __ SmiUntag(RBX); |
920 // Check that RCX equals RBX, i.e. no named arguments passed. | 921 // Check that RCX equals RBX, i.e. no named arguments passed. |
921 __ cmpq(RCX, RBX); | 922 __ cmpq(RCX, RBX); |
922 __ j(EQUAL, &all_arguments_processed, Assembler::kNearJump); | 923 __ j(EQUAL, &all_arguments_processed, Assembler::kNearJump); |
923 } | 924 } |
924 } | 925 } |
925 | 926 |
926 __ Bind(&wrong_num_arguments); | 927 __ Bind(&wrong_num_arguments); |
927 if (function.IsClosureFunction()) { | 928 if (function.IsClosureFunction()) { |
928 ASSERT(assembler()->constant_pool_allowed()); | 929 __ LeaveDartFrame(kKeepCalleePP); // The arguments are still on the stack. |
929 __ LeaveDartFrame(); // The arguments are still on the stack. | 930 __ Jmp(*StubCode::CallClosureNoSuchMethod_entry()); |
930 ASSERT(!assembler()->constant_pool_allowed()); | |
931 __ jmp(*StubCode::CallClosureNoSuchMethod_entry()); | |
932 __ set_constant_pool_allowed(true); | |
933 // The noSuchMethod call may return to the caller, but not here. | 931 // The noSuchMethod call may return to the caller, but not here. |
934 } else if (check_correct_named_args) { | 932 } else if (check_correct_named_args) { |
935 __ Stop("Wrong arguments"); | 933 __ Stop("Wrong arguments"); |
936 } | 934 } |
937 | 935 |
938 __ Bind(&all_arguments_processed); | 936 __ Bind(&all_arguments_processed); |
939 // Nullify originally passed arguments only after they have been copied and | 937 // Nullify originally passed arguments only after they have been copied and |
940 // checked, otherwise noSuchMethod would not see their original values. | 938 // checked, otherwise noSuchMethod would not see their original values. |
941 // This step can be skipped in case we decide that formal parameters are | 939 // This step can be skipped in case we decide that formal parameters are |
942 // implicitly final, since garbage collecting the unmodified value is not | 940 // implicitly final, since garbage collecting the unmodified value is not |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
976 // Sequence node has one store node and one return NULL node. | 974 // Sequence node has one store node and one return NULL node. |
977 __ Comment("Inlined Setter"); | 975 __ Comment("Inlined Setter"); |
978 __ movq(RAX, Address(RSP, 2 * kWordSize)); // Receiver. | 976 __ movq(RAX, Address(RSP, 2 * kWordSize)); // Receiver. |
979 __ movq(RBX, Address(RSP, 1 * kWordSize)); // Value. | 977 __ movq(RBX, Address(RSP, 1 * kWordSize)); // Value. |
980 __ StoreIntoObject(RAX, FieldAddress(RAX, offset), RBX); | 978 __ StoreIntoObject(RAX, FieldAddress(RAX, offset), RBX); |
981 __ LoadObject(RAX, Object::null_object()); | 979 __ LoadObject(RAX, Object::null_object()); |
982 __ ret(); | 980 __ ret(); |
983 } | 981 } |
984 | 982 |
985 | 983 |
| 984 static const Register new_pp = R13; |
| 985 |
| 986 |
986 // NOTE: If the entry code shape changes, ReturnAddressLocator in profiler.cc | 987 // NOTE: If the entry code shape changes, ReturnAddressLocator in profiler.cc |
987 // needs to be updated to match. | 988 // needs to be updated to match. |
988 void FlowGraphCompiler::EmitFrameEntry() { | 989 void FlowGraphCompiler::EmitFrameEntry() { |
989 ASSERT(Assembler::EntryPointToPcMarkerOffset() == 0); | |
990 | |
991 const Function& function = parsed_function().function(); | 990 const Function& function = parsed_function().function(); |
992 const Register new_pp = R13; | |
993 const Register new_pc = R12; | |
994 | |
995 // Load PC marker. | |
996 const intptr_t kRIPRelativeLeaqSize = 7; | |
997 const intptr_t entry_to_rip_offset = __ CodeSize() + kRIPRelativeLeaqSize; | |
998 __ leaq(new_pc, Address::AddressRIPRelative(-entry_to_rip_offset)); | |
999 ASSERT(__ CodeSize() == entry_to_rip_offset); | |
1000 | |
1001 // Load pool pointer. | 991 // Load pool pointer. |
1002 const intptr_t object_pool_pc_dist = | |
1003 Instructions::HeaderSize() - Instructions::object_pool_offset(); | |
1004 __ movq(new_pp, Address(new_pc, -object_pool_pc_dist)); | |
1005 | 992 |
1006 if (flow_graph().IsCompiledForOsr()) { | 993 if (flow_graph().IsCompiledForOsr()) { |
1007 intptr_t extra_slots = StackSize() | 994 intptr_t extra_slots = StackSize() |
1008 - flow_graph().num_stack_locals() | 995 - flow_graph().num_stack_locals() |
1009 - flow_graph().num_copied_params(); | 996 - flow_graph().num_copied_params(); |
1010 ASSERT(extra_slots >= 0); | 997 ASSERT(extra_slots >= 0); |
1011 __ EnterOsrFrame(extra_slots * kWordSize, new_pp, new_pc); | 998 __ EnterOsrFrame(extra_slots * kWordSize); |
1012 } else { | 999 } else { |
| 1000 __ LoadPoolPointer(new_pp); |
| 1001 |
1013 if (CanOptimizeFunction() && | 1002 if (CanOptimizeFunction() && |
1014 function.IsOptimizable() && | 1003 function.IsOptimizable() && |
1015 (!is_optimizing() || may_reoptimize())) { | 1004 (!is_optimizing() || may_reoptimize())) { |
1016 const Register function_reg = RDI; | 1005 const Register function_reg = RDI; |
1017 // Load function object using the callee's pool pointer. | 1006 // Load function object using the callee's pool pointer. |
1018 __ LoadFunctionFromCalleePool(function_reg, function, new_pp); | 1007 __ LoadFunctionFromCalleePool(function_reg, function, new_pp); |
1019 | 1008 |
1020 // Patch point is after the eventually inlined function object. | |
1021 entry_patch_pc_offset_ = assembler()->CodeSize(); | |
1022 | |
1023 // Reoptimization of an optimized function is triggered by counting in | 1009 // Reoptimization of an optimized function is triggered by counting in |
1024 // IC stubs, but not at the entry of the function. | 1010 // IC stubs, but not at the entry of the function. |
1025 if (!is_optimizing()) { | 1011 if (!is_optimizing()) { |
1026 __ incl(FieldAddress(function_reg, Function::usage_counter_offset())); | 1012 __ incl(FieldAddress(function_reg, Function::usage_counter_offset())); |
1027 } | 1013 } |
1028 __ cmpl( | 1014 __ cmpl( |
1029 FieldAddress(function_reg, Function::usage_counter_offset()), | 1015 FieldAddress(function_reg, Function::usage_counter_offset()), |
1030 Immediate(GetOptimizationThreshold())); | 1016 Immediate(GetOptimizationThreshold())); |
1031 ASSERT(function_reg == RDI); | 1017 ASSERT(function_reg == RDI); |
1032 __ J(GREATER_EQUAL, | 1018 __ J(GREATER_EQUAL, |
1033 *StubCode::OptimizeFunction_entry(), | 1019 *StubCode::OptimizeFunction_entry(), |
1034 new_pp); | 1020 new_pp); |
1035 } else { | |
1036 entry_patch_pc_offset_ = assembler()->CodeSize(); | |
1037 } | 1021 } |
1038 ASSERT(StackSize() >= 0); | 1022 ASSERT(StackSize() >= 0); |
1039 __ Comment("Enter frame"); | 1023 __ Comment("Enter frame"); |
1040 __ EnterDartFrame(StackSize() * kWordSize, new_pp, new_pc); | 1024 __ EnterDartFrame(StackSize() * kWordSize, new_pp); |
1041 } | 1025 } |
1042 } | 1026 } |
1043 | 1027 |
1044 | 1028 |
1045 void FlowGraphCompiler::CompileGraph() { | 1029 void FlowGraphCompiler::CompileGraph() { |
1046 InitCompiler(); | 1030 InitCompiler(); |
1047 | 1031 |
1048 TryIntrinsify(); | 1032 TryIntrinsify(); |
1049 | 1033 |
1050 EmitFrameEntry(); | 1034 EmitFrameEntry(); |
(...skipping 24 matching lines...) Expand all Loading... |
1075 __ movq(RAX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); | 1059 __ movq(RAX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); |
1076 __ CompareImmediate(RAX, Immediate(Smi::RawValue(num_fixed_params))); | 1060 __ CompareImmediate(RAX, Immediate(Smi::RawValue(num_fixed_params))); |
1077 __ j(NOT_EQUAL, &wrong_num_arguments, Assembler::kNearJump); | 1061 __ j(NOT_EQUAL, &wrong_num_arguments, Assembler::kNearJump); |
1078 __ cmpq(RAX, | 1062 __ cmpq(RAX, |
1079 FieldAddress(R10, | 1063 FieldAddress(R10, |
1080 ArgumentsDescriptor::positional_count_offset())); | 1064 ArgumentsDescriptor::positional_count_offset())); |
1081 __ j(EQUAL, &correct_num_arguments, Assembler::kNearJump); | 1065 __ j(EQUAL, &correct_num_arguments, Assembler::kNearJump); |
1082 | 1066 |
1083 __ Bind(&wrong_num_arguments); | 1067 __ Bind(&wrong_num_arguments); |
1084 if (function.IsClosureFunction()) { | 1068 if (function.IsClosureFunction()) { |
1085 ASSERT(assembler()->constant_pool_allowed()); | 1069 __ LeaveDartFrame(kKeepCalleePP); // Leave arguments on the stack. |
1086 __ LeaveDartFrame(); // The arguments are still on the stack. | 1070 __ Jmp(*StubCode::CallClosureNoSuchMethod_entry()); |
1087 ASSERT(!assembler()->constant_pool_allowed()); | |
1088 __ jmp(*StubCode::CallClosureNoSuchMethod_entry()); | |
1089 __ set_constant_pool_allowed(true); | |
1090 // The noSuchMethod call may return to the caller, but not here. | 1071 // The noSuchMethod call may return to the caller, but not here. |
1091 } else { | 1072 } else { |
1092 __ Stop("Wrong number of arguments"); | 1073 __ Stop("Wrong number of arguments"); |
1093 } | 1074 } |
1094 __ Bind(&correct_num_arguments); | 1075 __ Bind(&correct_num_arguments); |
1095 } | 1076 } |
1096 } else if (!flow_graph().IsCompiledForOsr()) { | 1077 } else if (!flow_graph().IsCompiledForOsr()) { |
1097 CopyParameters(); | 1078 CopyParameters(); |
1098 } | 1079 } |
1099 | 1080 |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1143 } | 1124 } |
1144 | 1125 |
1145 ASSERT(!block_order().is_empty()); | 1126 ASSERT(!block_order().is_empty()); |
1146 VisitBlocks(); | 1127 VisitBlocks(); |
1147 | 1128 |
1148 __ int3(); | 1129 __ int3(); |
1149 ASSERT(assembler()->constant_pool_allowed()); | 1130 ASSERT(assembler()->constant_pool_allowed()); |
1150 GenerateDeferredCode(); | 1131 GenerateDeferredCode(); |
1151 // Emit function patching code. This will be swapped with the first 13 bytes | 1132 // Emit function patching code. This will be swapped with the first 13 bytes |
1152 // at entry point. | 1133 // at entry point. |
1153 patch_code_pc_offset_ = assembler()->CodeSize(); | |
1154 // This is patched up to a point in FrameEntry where the PP for the | |
1155 // current function is in R13 instead of PP. | |
1156 __ JmpPatchable(*StubCode::FixCallersTarget_entry(), R13); | |
1157 | 1134 |
1158 if (is_optimizing()) { | 1135 if (is_optimizing()) { |
1159 lazy_deopt_pc_offset_ = assembler()->CodeSize(); | 1136 lazy_deopt_pc_offset_ = assembler()->CodeSize(); |
1160 __ Jmp(*StubCode::DeoptimizeLazy_entry(), PP); | 1137 __ Jmp(*StubCode::DeoptimizeLazy_entry(), PP); |
1161 } | 1138 } |
1162 } | 1139 } |
1163 | 1140 |
1164 | 1141 |
1165 void FlowGraphCompiler::GenerateCall(intptr_t token_pos, | 1142 void FlowGraphCompiler::GenerateCall(intptr_t token_pos, |
1166 const StubEntry& stub_entry, | 1143 const StubEntry& stub_entry, |
(...skipping 623 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1790 __ movups(reg, Address(RSP, 0)); | 1767 __ movups(reg, Address(RSP, 0)); |
1791 __ AddImmediate(RSP, Immediate(kFpuRegisterSize)); | 1768 __ AddImmediate(RSP, Immediate(kFpuRegisterSize)); |
1792 } | 1769 } |
1793 | 1770 |
1794 | 1771 |
1795 #undef __ | 1772 #undef __ |
1796 | 1773 |
1797 } // namespace dart | 1774 } // namespace dart |
1798 | 1775 |
1799 #endif // defined TARGET_ARCH_X64 | 1776 #endif // defined TARGET_ARCH_X64 |
OLD | NEW |