Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(380)

Side by Side Diff: runtime/vm/flow_graph_compiler_shared.cc

Issue 10538024: Implemented missing instructions in ia32, more sharing, removed bailouts, enable optimiziations on … (Closed) Base URL: http://dart.googlecode.com/svn/branches/bleeding_edge/dart/
Patch Set: Created 8 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « runtime/vm/flow_graph_compiler_shared.h ('k') | runtime/vm/flow_graph_compiler_x64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file.
4
5 #include "vm/flow_graph_compiler_shared.h"
6
7 #include "vm/debugger.h"
8 #include "vm/intermediate_language.h"
9 #include "vm/intrinsifier.h"
10 #include "vm/longjump.h"
11 #include "vm/parser.h"
12 #include "vm/stub_code.h"
13
14 namespace dart {
15
16 DECLARE_FLAG(bool, enable_type_checks);
17 DECLARE_FLAG(bool, intrinsify);
18 DECLARE_FLAG(int, optimization_counter_threshold);
19 DECLARE_FLAG(bool, trace_functions);
20 DECLARE_FLAG(bool, report_usage_count);
21
22 FlowGraphCompilerShared::FlowGraphCompilerShared(
23 Assembler* assembler,
24 const ParsedFunction& parsed_function,
25 const GrowableArray<BlockEntryInstr*>& block_order,
26 bool is_optimizing)
27 : assembler_(assembler),
28 parsed_function_(parsed_function),
29 block_order_(block_order),
30 current_block_(NULL),
31 exception_handlers_list_(NULL),
32 pc_descriptors_list_(NULL),
33 stackmap_builder_(NULL),
34 block_info_(block_order.length()),
35 deopt_stubs_(),
36 is_optimizing_(is_optimizing) {
37 ASSERT(assembler != NULL);
38 }
39
40
41 FlowGraphCompilerShared::~FlowGraphCompilerShared() {
42 // BlockInfos are zone-allocated, so their destructors are not called.
43 // Verify the labels explicitly here.
44 for (int i = 0; i < block_info_.length(); ++i) {
45 ASSERT(!block_info_[i]->label.IsLinked());
46 ASSERT(!block_info_[i]->label.HasNear());
47 }
48 }
49
50 void FlowGraphCompilerShared::InitCompiler() {
51 pc_descriptors_list_ = new DescriptorList();
52 exception_handlers_list_ = new ExceptionHandlerList();
53 block_info_.Clear();
54 for (int i = 0; i < block_order_.length(); ++i) {
55 block_info_.Add(new BlockInfo());
56 }
57 }
58
59
60 intptr_t FlowGraphCompilerShared::StackSize() const {
61 return parsed_function_.stack_local_count() +
62 parsed_function_.copied_parameter_count();
63 }
64
65
66 Label* FlowGraphCompilerShared::GetBlockLabel(
67 BlockEntryInstr* block_entry) const {
68 intptr_t block_index = block_entry->postorder_number();
69 return &block_info_[block_index]->label;
70 }
71
72
73 bool FlowGraphCompilerShared::IsNextBlock(TargetEntryInstr* block_entry) const {
74 intptr_t current_index = reverse_index(current_block()->postorder_number());
75 return block_order_[current_index + 1] == block_entry;
76 }
77
78
79 void FlowGraphCompilerShared::AddExceptionHandler(intptr_t try_index,
80 intptr_t pc_offset) {
81 exception_handlers_list_->AddHandler(try_index, pc_offset);
82 }
83
84
85 // Uses current pc position and try-index.
86 void FlowGraphCompilerShared::AddCurrentDescriptor(PcDescriptors::Kind kind,
87 intptr_t cid,
88 intptr_t token_index,
89 intptr_t try_index) {
90 pc_descriptors_list()->AddDescriptor(kind,
91 assembler()->CodeSize(),
92 cid,
93 token_index,
94 try_index);
95 }
96
97
98 Label* FlowGraphCompilerShared::AddDeoptStub(intptr_t deopt_id,
99 intptr_t deopt_token_index,
100 intptr_t try_index,
101 DeoptReasonId reason,
102 Register reg1,
103 Register reg2) {
104 DeoptimizationStub* stub =
105 new DeoptimizationStub(deopt_id, deopt_token_index, try_index, reason);
106 stub->Push(reg1);
107 stub->Push(reg2);
108 deopt_stubs_.Add(stub);
109 return stub->entry_label();
110 }
111
112
113 void FlowGraphCompilerShared::FinalizeExceptionHandlers(const Code& code) {
114 ASSERT(exception_handlers_list_ != NULL);
115 const ExceptionHandlers& handlers = ExceptionHandlers::Handle(
116 exception_handlers_list_->FinalizeExceptionHandlers(code.EntryPoint()));
117 code.set_exception_handlers(handlers);
118 }
119
120
121 void FlowGraphCompilerShared::FinalizePcDescriptors(const Code& code) {
122 ASSERT(pc_descriptors_list_ != NULL);
123 const PcDescriptors& descriptors = PcDescriptors::Handle(
124 pc_descriptors_list_->FinalizePcDescriptors(code.EntryPoint()));
125 descriptors.Verify(parsed_function_.function().is_optimizable());
126 code.set_pc_descriptors(descriptors);
127 }
128
129
130 void FlowGraphCompilerShared::FinalizeStackmaps(const Code& code) {
131 if (stackmap_builder_ == NULL) {
132 // The unoptimizing compiler has no stack maps.
133 code.set_stackmaps(Array::Handle());
134 } else {
135 // Finalize the stack map array and add it to the code object.
136 code.set_stackmaps(
137 Array::Handle(stackmap_builder_->FinalizeStackmaps(code)));
138 }
139 }
140
141
142 void FlowGraphCompilerShared::FinalizeVarDescriptors(const Code& code) {
143 const LocalVarDescriptors& var_descs = LocalVarDescriptors::Handle(
144 parsed_function_.node_sequence()->scope()->GetVarDescriptors());
145 code.set_var_descriptors(var_descs);
146 }
147
148
149 void FlowGraphCompilerShared::FinalizeComments(const Code& code) {
150 code.set_comments(assembler()->GetCodeComments());
151 }
152
153
154 void FlowGraphCompilerShared::GenerateDeferredCode() {
155 for (intptr_t i = 0; i < deopt_stubs_.length(); i++) {
156 deopt_stubs_[i]->GenerateCode(this);
157 }
158 }
159
160
161 void FlowGraphCompilerShared::GenerateInstanceCall(
162 intptr_t cid,
163 intptr_t token_index,
164 intptr_t try_index,
165 const String& function_name,
166 intptr_t argument_count,
167 const Array& argument_names,
168 intptr_t checked_argument_count) {
169 ICData& ic_data =
170 ICData::ZoneHandle(ICData::New(parsed_function().function(),
171 function_name,
172 cid,
173 checked_argument_count));
174 const Array& arguments_descriptor =
175 CodeGenerator::ArgumentsDescriptor(argument_count, argument_names);
176 uword label_address = 0;
177 switch (checked_argument_count) {
178 case 1:
179 label_address = StubCode::OneArgCheckInlineCacheEntryPoint();
180 break;
181 case 2:
182 label_address = StubCode::TwoArgsCheckInlineCacheEntryPoint();
183 break;
184 default:
185 UNIMPLEMENTED();
186 }
187 ExternalLabel target_label("InlineCache", label_address);
188
189 const intptr_t descr_offset = EmitInstanceCall(&target_label,
190 ic_data,
191 arguments_descriptor,
192 argument_count);
193 pc_descriptors_list()->AddDescriptor(PcDescriptors::kIcCall,
194 descr_offset,
195 cid,
196 token_index,
197 try_index);
198 }
199
200
201 void FlowGraphCompilerShared::GenerateStaticCall(intptr_t cid,
202 intptr_t token_index,
203 intptr_t try_index,
204 const Function& function,
205 intptr_t argument_count,
206 const Array& argument_names) {
207 const Array& arguments_descriptor =
208 CodeGenerator::ArgumentsDescriptor(argument_count, argument_names);
209 const intptr_t descr_offset = EmitStaticCall(function,
210 arguments_descriptor,
211 argument_count);
212 pc_descriptors_list()->AddDescriptor(PcDescriptors::kFuncCall,
213 descr_offset,
214 cid,
215 token_index,
216 try_index);
217 }
218
219
220 void FlowGraphCompilerShared::Bailout(const char* reason) {
221 const char* kFormat = "FlowGraphCompiler Bailout: %s %s.";
222 const char* function_name = parsed_function().function().ToCString();
223 intptr_t len = OS::SNPrint(NULL, 0, kFormat, function_name, reason) + 1;
224 char* chars = reinterpret_cast<char*>(
225 Isolate::Current()->current_zone()->Allocate(len));
226 OS::SNPrint(chars, len, kFormat, function_name, reason);
227 const Error& error = Error::Handle(
228 LanguageError::New(String::Handle(String::New(chars))));
229 Isolate::Current()->long_jump_base()->Jump(1, error);
230 }
231
232
233 static bool CanOptimize() {
234 return !FLAG_report_usage_count &&
235 (FLAG_optimization_counter_threshold >= 0) &&
236 !Isolate::Current()->debugger()->IsActive();
237 }
238
239
240 // Returns 'true' if code generation for this function is complete, i.e.,
241 // no fall-through to regular code is needed.
242 bool FlowGraphCompilerShared::TryIntrinsify() {
243 if (!CanOptimize()) return false;
244 // Intrinsification skips arguments checks, therefore disable if in checked
245 // mode.
246 if (FLAG_intrinsify && !FLAG_trace_functions && !FLAG_enable_type_checks) {
247 if ((parsed_function().function().kind() == RawFunction::kImplicitGetter)) {
248 // An implicit getter must have a specific AST structure.
249 const SequenceNode& sequence_node = *parsed_function().node_sequence();
250 ASSERT(sequence_node.length() == 1);
251 ASSERT(sequence_node.NodeAt(0)->IsReturnNode());
252 const ReturnNode& return_node = *sequence_node.NodeAt(0)->AsReturnNode();
253 ASSERT(return_node.value()->IsLoadInstanceFieldNode());
254 const LoadInstanceFieldNode& load_node =
255 *return_node.value()->AsLoadInstanceFieldNode();
256 GenerateInlinedGetter(load_node.field().Offset());
257 return true;
258 }
259 if ((parsed_function().function().kind() == RawFunction::kImplicitSetter)) {
260 // An implicit setter must have a specific AST structure.
261 // Sequence node has one store node and one return NULL node.
262 const SequenceNode& sequence_node = *parsed_function().node_sequence();
263 ASSERT(sequence_node.length() == 2);
264 ASSERT(sequence_node.NodeAt(0)->IsStoreInstanceFieldNode());
265 ASSERT(sequence_node.NodeAt(1)->IsReturnNode());
266 const StoreInstanceFieldNode& store_node =
267 *sequence_node.NodeAt(0)->AsStoreInstanceFieldNode();
268 GenerateInlinedSetter(store_node.field().Offset());
269 return true;
270 }
271 }
272 // Even if an intrinsified version of the function was successfully
273 // generated, it may fall through to the non-intrinsified method body.
274 if (!FLAG_trace_functions) {
275 return Intrinsifier::Intrinsify(parsed_function().function(), assembler());
276 }
277 return false;
278 }
279
280
281 void FlowGraphCompilerShared::GenerateNumberTypeCheck(
282 Register kClassIdReg,
283 const AbstractType& type,
284 Label* is_instance_lbl,
285 Label* is_not_instance_lbl) {
286 GrowableArray<intptr_t> args;
287 if (type.IsNumberInterface()) {
288 args.Add(kDouble);
289 args.Add(kMint);
290 args.Add(kBigint);
291 } else if (type.IsIntInterface()) {
292 args.Add(kMint);
293 args.Add(kBigint);
294 } else if (type.IsDoubleInterface()) {
295 args.Add(kDouble);
296 }
297 CheckClassIds(kClassIdReg, args, is_instance_lbl, is_not_instance_lbl);
298 }
299
300
301 void FlowGraphCompilerShared::GenerateStringTypeCheck(
302 Register kClassIdReg,
303 Label* is_instance_lbl,
304 Label* is_not_instance_lbl) {
305 GrowableArray<intptr_t> args;
306 args.Add(kOneByteString);
307 args.Add(kTwoByteString);
308 args.Add(kFourByteString);
309 args.Add(kExternalOneByteString);
310 args.Add(kExternalTwoByteString);
311 args.Add(kExternalFourByteString);
312 CheckClassIds(kClassIdReg, args, is_instance_lbl, is_not_instance_lbl);
313 }
314
315
316 void FlowGraphCompilerShared::GenerateListTypeCheck(
317 Register kClassIdReg,
318 Label* is_instance_lbl) {
319 Label unknown;
320 GrowableArray<intptr_t> args;
321 args.Add(kArray);
322 args.Add(kGrowableObjectArray);
323 args.Add(kImmutableArray);
324 CheckClassIds(kClassIdReg, args, is_instance_lbl, &unknown);
325 assembler()->Bind(&unknown);
326 }
327
328 } // namespace dart
329
330
OLDNEW
« no previous file with comments | « runtime/vm/flow_graph_compiler_shared.h ('k') | runtime/vm/flow_graph_compiler_x64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698