Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1022)

Side by Side Diff: runtime/vm/flow_graph_compiler_x64.cc

Issue 1192103004: VM: New calling convention for generated code. (Closed) Base URL: git@github.com:dart-lang/sdk.git@master
Patch Set: ia32 port, addressed comments Created 5 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64.
6 #if defined(TARGET_ARCH_X64) 6 #if defined(TARGET_ARCH_X64)
7 7
8 #include "vm/flow_graph_compiler.h" 8 #include "vm/flow_graph_compiler.h"
9 9
10 #include "vm/ast_printer.h" 10 #include "vm/ast_printer.h"
(...skipping 169 matching lines...) Expand 10 before | Expand all | Expand 10 after
180 Assembler* assem = compiler->assembler(); 180 Assembler* assem = compiler->assembler();
181 #define __ assem-> 181 #define __ assem->
182 __ Comment("%s", Name()); 182 __ Comment("%s", Name());
183 __ Bind(entry_label()); 183 __ Bind(entry_label());
184 if (FLAG_trap_on_deoptimization) { 184 if (FLAG_trap_on_deoptimization) {
185 __ int3(); 185 __ int3();
186 } 186 }
187 187
188 ASSERT(deopt_env() != NULL); 188 ASSERT(deopt_env() != NULL);
189 189
190 __ pushq(CODE_REG);
190 __ Call(*StubCode::Deoptimize_entry()); 191 __ Call(*StubCode::Deoptimize_entry());
191 set_pc_offset(assem->CodeSize()); 192 set_pc_offset(assem->CodeSize());
192 __ int3(); 193 __ int3();
193 #undef __ 194 #undef __
194 } 195 }
195 196
196 197
197 #define __ assembler()-> 198 #define __ assembler()->
198 199
199 200
(...skipping 718 matching lines...) Expand 10 before | Expand all | Expand 10 after
918 __ movq(RBX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); 919 __ movq(RBX, FieldAddress(R10, ArgumentsDescriptor::count_offset()));
919 __ SmiUntag(RBX); 920 __ SmiUntag(RBX);
920 // Check that RCX equals RBX, i.e. no named arguments passed. 921 // Check that RCX equals RBX, i.e. no named arguments passed.
921 __ cmpq(RCX, RBX); 922 __ cmpq(RCX, RBX);
922 __ j(EQUAL, &all_arguments_processed, Assembler::kNearJump); 923 __ j(EQUAL, &all_arguments_processed, Assembler::kNearJump);
923 } 924 }
924 } 925 }
925 926
926 __ Bind(&wrong_num_arguments); 927 __ Bind(&wrong_num_arguments);
927 if (function.IsClosureFunction()) { 928 if (function.IsClosureFunction()) {
928 ASSERT(assembler()->constant_pool_allowed()); 929 __ LeaveDartFrame(kKeepCalleePP); // The arguments are still on the stack.
929 __ LeaveDartFrame(); // The arguments are still on the stack. 930 __ Jmp(*StubCode::CallClosureNoSuchMethod_entry());
930 ASSERT(!assembler()->constant_pool_allowed());
931 __ jmp(*StubCode::CallClosureNoSuchMethod_entry());
932 __ set_constant_pool_allowed(true);
933 // The noSuchMethod call may return to the caller, but not here. 931 // The noSuchMethod call may return to the caller, but not here.
934 } else if (check_correct_named_args) { 932 } else if (check_correct_named_args) {
935 __ Stop("Wrong arguments"); 933 __ Stop("Wrong arguments");
936 } 934 }
937 935
938 __ Bind(&all_arguments_processed); 936 __ Bind(&all_arguments_processed);
939 // Nullify originally passed arguments only after they have been copied and 937 // Nullify originally passed arguments only after they have been copied and
940 // checked, otherwise noSuchMethod would not see their original values. 938 // checked, otherwise noSuchMethod would not see their original values.
941 // This step can be skipped in case we decide that formal parameters are 939 // This step can be skipped in case we decide that formal parameters are
942 // implicitly final, since garbage collecting the unmodified value is not 940 // implicitly final, since garbage collecting the unmodified value is not
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
979 __ movq(RBX, Address(RSP, 1 * kWordSize)); // Value. 977 __ movq(RBX, Address(RSP, 1 * kWordSize)); // Value.
980 __ StoreIntoObject(RAX, FieldAddress(RAX, offset), RBX); 978 __ StoreIntoObject(RAX, FieldAddress(RAX, offset), RBX);
981 __ LoadObject(RAX, Object::null_object()); 979 __ LoadObject(RAX, Object::null_object());
982 __ ret(); 980 __ ret();
983 } 981 }
984 982
985 983
986 // NOTE: If the entry code shape changes, ReturnAddressLocator in profiler.cc 984 // NOTE: If the entry code shape changes, ReturnAddressLocator in profiler.cc
987 // needs to be updated to match. 985 // needs to be updated to match.
988 void FlowGraphCompiler::EmitFrameEntry() { 986 void FlowGraphCompiler::EmitFrameEntry() {
989 ASSERT(Assembler::EntryPointToPcMarkerOffset() == 0);
990
991 const Function& function = parsed_function().function(); 987 const Function& function = parsed_function().function();
992 const Register new_pp = R13;
993 const Register new_pc = R12;
994
995 // Load PC marker.
996 const intptr_t kRIPRelativeLeaqSize = 7;
997 const intptr_t entry_to_rip_offset = __ CodeSize() + kRIPRelativeLeaqSize;
998 __ leaq(new_pc, Address::AddressRIPRelative(-entry_to_rip_offset));
999 ASSERT(__ CodeSize() == entry_to_rip_offset);
1000
1001 // Load pool pointer. 988 // Load pool pointer.
1002 const intptr_t object_pool_pc_dist =
1003 Instructions::HeaderSize() - Instructions::object_pool_offset();
1004 __ movq(new_pp, Address(new_pc, -object_pool_pc_dist));
1005 989
1006 if (flow_graph().IsCompiledForOsr()) { 990 if (flow_graph().IsCompiledForOsr()) {
1007 intptr_t extra_slots = StackSize() 991 intptr_t extra_slots = StackSize()
1008 - flow_graph().num_stack_locals() 992 - flow_graph().num_stack_locals()
1009 - flow_graph().num_copied_params(); 993 - flow_graph().num_copied_params();
1010 ASSERT(extra_slots >= 0); 994 ASSERT(extra_slots >= 0);
1011 __ EnterOsrFrame(extra_slots * kWordSize, new_pp, new_pc); 995 __ EnterOsrFrame(extra_slots * kWordSize);
1012 } else { 996 } else {
997 const Register new_pp = R13;
998 __ LoadPoolPointer(new_pp);
999
1013 if (CanOptimizeFunction() && 1000 if (CanOptimizeFunction() &&
1014 function.IsOptimizable() && 1001 function.IsOptimizable() &&
1015 (!is_optimizing() || may_reoptimize())) { 1002 (!is_optimizing() || may_reoptimize())) {
1016 const Register function_reg = RDI; 1003 const Register function_reg = RDI;
1017 // Load function object using the callee's pool pointer. 1004 // Load function object using the callee's pool pointer.
1018 __ LoadFunctionFromCalleePool(function_reg, function, new_pp); 1005 __ LoadFunctionFromCalleePool(function_reg, function, new_pp);
1019 1006
1020 // Patch point is after the eventually inlined function object.
1021 entry_patch_pc_offset_ = assembler()->CodeSize();
1022
1023 // Reoptimization of an optimized function is triggered by counting in 1007 // Reoptimization of an optimized function is triggered by counting in
1024 // IC stubs, but not at the entry of the function. 1008 // IC stubs, but not at the entry of the function.
1025 if (!is_optimizing()) { 1009 if (!is_optimizing()) {
1026 __ incl(FieldAddress(function_reg, Function::usage_counter_offset())); 1010 __ incl(FieldAddress(function_reg, Function::usage_counter_offset()));
1027 } 1011 }
1028 __ cmpl( 1012 __ cmpl(
1029 FieldAddress(function_reg, Function::usage_counter_offset()), 1013 FieldAddress(function_reg, Function::usage_counter_offset()),
1030 Immediate(GetOptimizationThreshold())); 1014 Immediate(GetOptimizationThreshold()));
1031 ASSERT(function_reg == RDI); 1015 ASSERT(function_reg == RDI);
1032 __ J(GREATER_EQUAL, 1016 __ J(GREATER_EQUAL,
1033 *StubCode::OptimizeFunction_entry(), 1017 *StubCode::OptimizeFunction_entry(),
1034 new_pp); 1018 new_pp);
1035 } else {
1036 entry_patch_pc_offset_ = assembler()->CodeSize();
1037 } 1019 }
1038 ASSERT(StackSize() >= 0); 1020 ASSERT(StackSize() >= 0);
1039 __ Comment("Enter frame"); 1021 __ Comment("Enter frame");
1040 __ EnterDartFrame(StackSize() * kWordSize, new_pp, new_pc); 1022 __ EnterDartFrame(StackSize() * kWordSize, new_pp);
1041 } 1023 }
1042 } 1024 }
1043 1025
1044 1026
1045 void FlowGraphCompiler::CompileGraph() { 1027 void FlowGraphCompiler::CompileGraph() {
1046 InitCompiler(); 1028 InitCompiler();
1047 1029
1048 TryIntrinsify(); 1030 TryIntrinsify();
1049 1031
1050 EmitFrameEntry(); 1032 EmitFrameEntry();
(...skipping 24 matching lines...) Expand all
1075 __ movq(RAX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); 1057 __ movq(RAX, FieldAddress(R10, ArgumentsDescriptor::count_offset()));
1076 __ CompareImmediate(RAX, Immediate(Smi::RawValue(num_fixed_params))); 1058 __ CompareImmediate(RAX, Immediate(Smi::RawValue(num_fixed_params)));
1077 __ j(NOT_EQUAL, &wrong_num_arguments, Assembler::kNearJump); 1059 __ j(NOT_EQUAL, &wrong_num_arguments, Assembler::kNearJump);
1078 __ cmpq(RAX, 1060 __ cmpq(RAX,
1079 FieldAddress(R10, 1061 FieldAddress(R10,
1080 ArgumentsDescriptor::positional_count_offset())); 1062 ArgumentsDescriptor::positional_count_offset()));
1081 __ j(EQUAL, &correct_num_arguments, Assembler::kNearJump); 1063 __ j(EQUAL, &correct_num_arguments, Assembler::kNearJump);
1082 1064
1083 __ Bind(&wrong_num_arguments); 1065 __ Bind(&wrong_num_arguments);
1084 if (function.IsClosureFunction()) { 1066 if (function.IsClosureFunction()) {
1085 ASSERT(assembler()->constant_pool_allowed()); 1067 __ LeaveDartFrame(kKeepCalleePP); // Leave arguments on the stack.
1086 __ LeaveDartFrame(); // The arguments are still on the stack. 1068 __ Jmp(*StubCode::CallClosureNoSuchMethod_entry());
1087 ASSERT(!assembler()->constant_pool_allowed());
1088 __ jmp(*StubCode::CallClosureNoSuchMethod_entry());
1089 __ set_constant_pool_allowed(true);
1090 // The noSuchMethod call may return to the caller, but not here. 1069 // The noSuchMethod call may return to the caller, but not here.
1091 } else { 1070 } else {
1092 __ Stop("Wrong number of arguments"); 1071 __ Stop("Wrong number of arguments");
1093 } 1072 }
1094 __ Bind(&correct_num_arguments); 1073 __ Bind(&correct_num_arguments);
1095 } 1074 }
1096 } else if (!flow_graph().IsCompiledForOsr()) { 1075 } else if (!flow_graph().IsCompiledForOsr()) {
1097 CopyParameters(); 1076 CopyParameters();
1098 } 1077 }
1099 1078
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
1143 } 1122 }
1144 1123
1145 ASSERT(!block_order().is_empty()); 1124 ASSERT(!block_order().is_empty());
1146 VisitBlocks(); 1125 VisitBlocks();
1147 1126
1148 __ int3(); 1127 __ int3();
1149 ASSERT(assembler()->constant_pool_allowed()); 1128 ASSERT(assembler()->constant_pool_allowed());
1150 GenerateDeferredCode(); 1129 GenerateDeferredCode();
1151 // Emit function patching code. This will be swapped with the first 13 bytes 1130 // Emit function patching code. This will be swapped with the first 13 bytes
1152 // at entry point. 1131 // at entry point.
1153 patch_code_pc_offset_ = assembler()->CodeSize();
1154 // This is patched up to a point in FrameEntry where the PP for the
1155 // current function is in R13 instead of PP.
1156 __ JmpPatchable(*StubCode::FixCallersTarget_entry(), R13);
1157 1132
1158 if (is_optimizing()) { 1133 if (is_optimizing()) {
1159 lazy_deopt_pc_offset_ = assembler()->CodeSize(); 1134 lazy_deopt_pc_offset_ = assembler()->CodeSize();
1160 __ Jmp(*StubCode::DeoptimizeLazy_entry(), PP); 1135 __ Jmp(*StubCode::DeoptimizeLazy_entry(), PP);
1161 } 1136 }
1162 } 1137 }
1163 1138
1164 1139
1165 void FlowGraphCompiler::GenerateCall(intptr_t token_pos, 1140 void FlowGraphCompiler::GenerateCall(intptr_t token_pos,
1166 const StubEntry& stub_entry, 1141 const StubEntry& stub_entry,
(...skipping 623 matching lines...) Expand 10 before | Expand all | Expand 10 after
1790 __ movups(reg, Address(RSP, 0)); 1765 __ movups(reg, Address(RSP, 0));
1791 __ AddImmediate(RSP, Immediate(kFpuRegisterSize)); 1766 __ AddImmediate(RSP, Immediate(kFpuRegisterSize));
1792 } 1767 }
1793 1768
1794 1769
1795 #undef __ 1770 #undef __
1796 1771
1797 } // namespace dart 1772 } // namespace dart
1798 1773
1799 #endif // defined TARGET_ARCH_X64 1774 #endif // defined TARGET_ARCH_X64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698