OLD | NEW |
1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM64. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM64. |
6 #if defined(TARGET_ARCH_ARM64) | 6 #if defined(TARGET_ARCH_ARM64) |
7 | 7 |
8 #include "vm/flow_graph_compiler.h" | 8 #include "vm/flow_graph_compiler.h" |
9 | 9 |
10 #include "vm/ast_printer.h" | 10 #include "vm/ast_printer.h" |
(...skipping 165 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
176 ASSERT(reason() != ICData::kDeoptAtCall); | 176 ASSERT(reason() != ICData::kDeoptAtCall); |
177 Assembler* assem = compiler->assembler(); | 177 Assembler* assem = compiler->assembler(); |
178 #define __ assem-> | 178 #define __ assem-> |
179 __ Comment("%s", Name()); | 179 __ Comment("%s", Name()); |
180 __ Bind(entry_label()); | 180 __ Bind(entry_label()); |
181 if (FLAG_trap_on_deoptimization) { | 181 if (FLAG_trap_on_deoptimization) { |
182 __ brk(0); | 182 __ brk(0); |
183 } | 183 } |
184 | 184 |
185 ASSERT(deopt_env() != NULL); | 185 ASSERT(deopt_env() != NULL); |
186 | 186 __ Push(CODE_REG); |
187 __ BranchLink(*StubCode::Deoptimize_entry()); | 187 __ BranchLink(*StubCode::Deoptimize_entry()); |
188 set_pc_offset(assem->CodeSize()); | 188 set_pc_offset(assem->CodeSize()); |
189 #undef __ | 189 #undef __ |
190 } | 190 } |
191 | 191 |
192 | 192 |
193 #define __ assembler()-> | 193 #define __ assembler()-> |
194 | 194 |
195 | 195 |
196 // Fall through if bool_register contains null. | 196 // Fall through if bool_register contains null. |
(...skipping 707 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
904 __ LoadFieldFromOffset(R7, R4, ArgumentsDescriptor::count_offset()); | 904 __ LoadFieldFromOffset(R7, R4, ArgumentsDescriptor::count_offset()); |
905 __ SmiUntag(R7); | 905 __ SmiUntag(R7); |
906 // Check that R8 equals R7, i.e. no named arguments passed. | 906 // Check that R8 equals R7, i.e. no named arguments passed. |
907 __ CompareRegisters(R8, R7); | 907 __ CompareRegisters(R8, R7); |
908 __ b(&all_arguments_processed, EQ); | 908 __ b(&all_arguments_processed, EQ); |
909 } | 909 } |
910 } | 910 } |
911 | 911 |
912 __ Bind(&wrong_num_arguments); | 912 __ Bind(&wrong_num_arguments); |
913 if (function.IsClosureFunction()) { | 913 if (function.IsClosureFunction()) { |
914 ASSERT(assembler()->constant_pool_allowed()); | 914 __ LeaveDartFrame(kKeepCalleePP); // The arguments are still on the stack. |
915 __ LeaveDartFrame(); // The arguments are still on the stack. | |
916 // Do not use caller's pool ptr in branch. | |
917 ASSERT(!assembler()->constant_pool_allowed()); | |
918 __ BranchPatchable(*StubCode::CallClosureNoSuchMethod_entry()); | 915 __ BranchPatchable(*StubCode::CallClosureNoSuchMethod_entry()); |
919 __ set_constant_pool_allowed(true); | |
920 // The noSuchMethod call may return to the caller, but not here. | 916 // The noSuchMethod call may return to the caller, but not here. |
921 } else if (check_correct_named_args) { | 917 } else if (check_correct_named_args) { |
922 __ Stop("Wrong arguments"); | 918 __ Stop("Wrong arguments"); |
923 } | 919 } |
924 | 920 |
925 __ Bind(&all_arguments_processed); | 921 __ Bind(&all_arguments_processed); |
926 // Nullify originally passed arguments only after they have been copied and | 922 // Nullify originally passed arguments only after they have been copied and |
927 // checked, otherwise noSuchMethod would not see their original values. | 923 // checked, otherwise noSuchMethod would not see their original values. |
928 // This step can be skipped in case we decide that formal parameters are | 924 // This step can be skipped in case we decide that formal parameters are |
929 // implicitly final, since garbage collecting the unmodified value is not | 925 // implicitly final, since garbage collecting the unmodified value is not |
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
978 (!is_optimizing() || may_reoptimize())) { | 974 (!is_optimizing() || may_reoptimize())) { |
979 const Register function_reg = R6; | 975 const Register function_reg = R6; |
980 new_pp = R13; | 976 new_pp = R13; |
981 // The pool pointer is not setup before entering the Dart frame. | 977 // The pool pointer is not setup before entering the Dart frame. |
982 // Temporarily setup pool pointer for this dart function. | 978 // Temporarily setup pool pointer for this dart function. |
983 __ LoadPoolPointer(new_pp); | 979 __ LoadPoolPointer(new_pp); |
984 | 980 |
985 // Load function object using the callee's pool pointer. | 981 // Load function object using the callee's pool pointer. |
986 __ LoadFunctionFromCalleePool(function_reg, function, new_pp); | 982 __ LoadFunctionFromCalleePool(function_reg, function, new_pp); |
987 | 983 |
988 // Patch point is after the eventually inlined function object. | |
989 entry_patch_pc_offset_ = assembler()->CodeSize(); | |
990 | |
991 __ LoadFieldFromOffset( | 984 __ LoadFieldFromOffset( |
992 R7, function_reg, Function::usage_counter_offset(), kWord); | 985 R7, function_reg, Function::usage_counter_offset(), kWord); |
993 // Reoptimization of an optimized function is triggered by counting in | 986 // Reoptimization of an optimized function is triggered by counting in |
994 // IC stubs, but not at the entry of the function. | 987 // IC stubs, but not at the entry of the function. |
995 if (!is_optimizing()) { | 988 if (!is_optimizing()) { |
996 __ add(R7, R7, Operand(1)); | 989 __ add(R7, R7, Operand(1)); |
997 __ StoreFieldToOffset( | 990 __ StoreFieldToOffset( |
998 R7, function_reg, Function::usage_counter_offset(), kWord); | 991 R7, function_reg, Function::usage_counter_offset(), kWord); |
999 } | 992 } |
1000 __ CompareImmediate(R7, GetOptimizationThreshold()); | 993 __ CompareImmediate(R7, GetOptimizationThreshold()); |
1001 ASSERT(function_reg == R6); | 994 ASSERT(function_reg == R6); |
1002 Label dont_optimize; | 995 Label dont_optimize; |
1003 __ b(&dont_optimize, LT); | 996 __ b(&dont_optimize, LT); |
1004 __ Branch(*StubCode::OptimizeFunction_entry()); | 997 __ Branch(*StubCode::OptimizeFunction_entry(), new_pp); |
1005 __ Bind(&dont_optimize); | 998 __ Bind(&dont_optimize); |
1006 } else if (!flow_graph().IsCompiledForOsr()) { | |
1007 entry_patch_pc_offset_ = assembler()->CodeSize(); | |
1008 } | 999 } |
1009 __ Comment("Enter frame"); | 1000 __ Comment("Enter frame"); |
1010 if (flow_graph().IsCompiledForOsr()) { | 1001 if (flow_graph().IsCompiledForOsr()) { |
1011 intptr_t extra_slots = StackSize() | 1002 intptr_t extra_slots = StackSize() |
1012 - flow_graph().num_stack_locals() | 1003 - flow_graph().num_stack_locals() |
1013 - flow_graph().num_copied_params(); | 1004 - flow_graph().num_copied_params(); |
1014 ASSERT(extra_slots >= 0); | 1005 ASSERT(extra_slots >= 0); |
1015 __ EnterOsrFrame(extra_slots * kWordSize, new_pp); | 1006 __ EnterOsrFrame(extra_slots * kWordSize, new_pp); |
1016 } else { | 1007 } else { |
1017 ASSERT(StackSize() >= 0); | 1008 ASSERT(StackSize() >= 0); |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1059 Label correct_num_arguments, wrong_num_arguments; | 1050 Label correct_num_arguments, wrong_num_arguments; |
1060 __ LoadFieldFromOffset(R0, R4, ArgumentsDescriptor::count_offset()); | 1051 __ LoadFieldFromOffset(R0, R4, ArgumentsDescriptor::count_offset()); |
1061 __ CompareImmediate(R0, Smi::RawValue(num_fixed_params)); | 1052 __ CompareImmediate(R0, Smi::RawValue(num_fixed_params)); |
1062 __ b(&wrong_num_arguments, NE); | 1053 __ b(&wrong_num_arguments, NE); |
1063 __ LoadFieldFromOffset(R1, R4, | 1054 __ LoadFieldFromOffset(R1, R4, |
1064 ArgumentsDescriptor::positional_count_offset()); | 1055 ArgumentsDescriptor::positional_count_offset()); |
1065 __ CompareRegisters(R0, R1); | 1056 __ CompareRegisters(R0, R1); |
1066 __ b(&correct_num_arguments, EQ); | 1057 __ b(&correct_num_arguments, EQ); |
1067 __ Bind(&wrong_num_arguments); | 1058 __ Bind(&wrong_num_arguments); |
1068 if (function.IsClosureFunction()) { | 1059 if (function.IsClosureFunction()) { |
1069 ASSERT(assembler()->constant_pool_allowed()); | 1060 __ LeaveDartFrame(kKeepCalleePP); // Arguments are still on the stack. |
1070 __ LeaveDartFrame(); // The arguments are still on the stack. | |
1071 // Do not use caller's pool ptr in branch. | |
1072 ASSERT(!assembler()->constant_pool_allowed()); | |
1073 __ BranchPatchable(*StubCode::CallClosureNoSuchMethod_entry()); | 1061 __ BranchPatchable(*StubCode::CallClosureNoSuchMethod_entry()); |
1074 __ set_constant_pool_allowed(true); | |
1075 // The noSuchMethod call may return to the caller, but not here. | 1062 // The noSuchMethod call may return to the caller, but not here. |
1076 } else { | 1063 } else { |
1077 __ Stop("Wrong number of arguments"); | 1064 __ Stop("Wrong number of arguments"); |
1078 } | 1065 } |
1079 __ Bind(&correct_num_arguments); | 1066 __ Bind(&correct_num_arguments); |
1080 } | 1067 } |
1081 } else if (!flow_graph().IsCompiledForOsr()) { | 1068 } else if (!flow_graph().IsCompiledForOsr()) { |
1082 CopyParameters(); | 1069 CopyParameters(); |
1083 } | 1070 } |
1084 | 1071 |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1118 } | 1105 } |
1119 } | 1106 } |
1120 } | 1107 } |
1121 | 1108 |
1122 VisitBlocks(); | 1109 VisitBlocks(); |
1123 | 1110 |
1124 __ brk(0); | 1111 __ brk(0); |
1125 ASSERT(assembler()->constant_pool_allowed()); | 1112 ASSERT(assembler()->constant_pool_allowed()); |
1126 GenerateDeferredCode(); | 1113 GenerateDeferredCode(); |
1127 | 1114 |
1128 // Emit function patching code. This will be swapped with the first 3 | |
1129 // instructions at entry point. | |
1130 patch_code_pc_offset_ = assembler()->CodeSize(); | |
1131 __ BranchPatchable(*StubCode::FixCallersTarget_entry()); | |
1132 | |
1133 if (is_optimizing()) { | 1115 if (is_optimizing()) { |
1134 lazy_deopt_pc_offset_ = assembler()->CodeSize(); | 1116 lazy_deopt_pc_offset_ = assembler()->CodeSize(); |
1135 __ BranchPatchable(*StubCode::DeoptimizeLazy_entry()); | 1117 __ BranchPatchable(*StubCode::DeoptimizeLazy_entry()); |
1136 } | 1118 } |
1137 } | 1119 } |
1138 | 1120 |
1139 | 1121 |
1140 void FlowGraphCompiler::GenerateCall(intptr_t token_pos, | 1122 void FlowGraphCompiler::GenerateCall(intptr_t token_pos, |
1141 const StubEntry& stub_entry, | 1123 const StubEntry& stub_entry, |
1142 RawPcDescriptors::Kind kind, | 1124 RawPcDescriptors::Kind kind, |
(...skipping 690 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1833 void ParallelMoveResolver::RestoreFpuScratch(FpuRegister reg) { | 1815 void ParallelMoveResolver::RestoreFpuScratch(FpuRegister reg) { |
1834 __ PopDouble(reg); | 1816 __ PopDouble(reg); |
1835 } | 1817 } |
1836 | 1818 |
1837 | 1819 |
1838 #undef __ | 1820 #undef __ |
1839 | 1821 |
1840 } // namespace dart | 1822 } // namespace dart |
1841 | 1823 |
1842 #endif // defined TARGET_ARCH_ARM64 | 1824 #endif // defined TARGET_ARCH_ARM64 |
OLD | NEW |