Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(548)

Side by Side Diff: runtime/vm/intrinsifier_ia32.cc

Issue 10035006: Share intrinsification framework across architectures, started on instrinsification in the new comp… (Closed) Base URL: http://dart.googlecode.com/svn/branches/bleeding_edge/dart/
Patch Set: Created 8 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « runtime/vm/intrinsifier_arm.cc ('k') | runtime/vm/intrinsifier_x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 // 4 //
5 // The intrinsic code below is executed before a method has built its frame. 5 // The intrinsic code below is executed before a method has built its frame.
6 // The return address is on the stack and the arguments below it. 6 // The return address is on the stack and the arguments below it.
7 // Registers EDX (arguments descriptor) and ECX (function) must be preserved. 7 // Registers EDX (arguments descriptor) and ECX (function) must be preserved.
8 // Each intrinsification method returns true if the corresponding 8 // Each intrinsification method returns true if the corresponding
9 // Dart method was intrinsified. 9 // Dart method was intrinsified.
10 10
11 #include "vm/globals.h" // Needed here to get TARGET_ARCH_IA32. 11 #include "vm/globals.h" // Needed here to get TARGET_ARCH_IA32.
12 #if defined(TARGET_ARCH_IA32) 12 #if defined(TARGET_ARCH_IA32)
13 13
14 #include "vm/intrinsifier.h" 14 #include "vm/intrinsifier.h"
15 15
16 #include "vm/assembler.h" 16 #include "vm/assembler.h"
17 #include "vm/assembler_macros.h" 17 #include "vm/assembler_macros.h"
18 #include "vm/object.h" 18 #include "vm/object.h"
19 #include "vm/object_store.h" 19 #include "vm/object_store.h"
20 #include "vm/os.h" 20 #include "vm/os.h"
21 #include "vm/stub_code.h" 21 #include "vm/stub_code.h"
22 22
23 namespace dart { 23 namespace dart {
24 24
25 DEFINE_FLAG(bool, intrinsify, true, "Instrinsify when possible");
26 DECLARE_FLAG(bool, enable_type_checks); 25 DECLARE_FLAG(bool, enable_type_checks);
27 26
28 // List of intrinsics: (class-name, function-name, intrinsification method).
29 #define INTRINSIC_LIST(V) \
30 V(IntegerImplementation, addFromInteger, Integer_addFromInteger) \
31 V(IntegerImplementation, +, Integer_addFromInteger) \
32 V(IntegerImplementation, subFromInteger, Integer_subFromInteger) \
33 V(IntegerImplementation, -, Integer_sub) \
34 V(IntegerImplementation, mulFromInteger, Integer_mulFromInteger) \
35 V(IntegerImplementation, *, Integer_mulFromInteger) \
36 V(IntegerImplementation, %, Integer_modulo) \
37 V(IntegerImplementation, ~/, Integer_truncDivide) \
38 V(IntegerImplementation, negate, Integer_negate) \
39 V(IntegerImplementation, bitAndFromInteger, Integer_bitAndFromInteger) \
40 V(IntegerImplementation, &, Integer_bitAndFromInteger) \
41 V(IntegerImplementation, bitOrFromInteger, Integer_bitOrFromInteger) \
42 V(IntegerImplementation, |, Integer_bitOrFromInteger) \
43 V(IntegerImplementation, bitXorFromInteger, Integer_bitXorFromInteger) \
44 V(IntegerImplementation, ^, Integer_bitXorFromInteger) \
45 V(IntegerImplementation, greaterThanFromInteger, Integer_lessThan) \
46 V(IntegerImplementation, >, Integer_greaterThan) \
47 V(IntegerImplementation, ==, Integer_equalToInteger) \
48 V(IntegerImplementation, equalToInteger, Integer_equalToInteger) \
49 V(IntegerImplementation, <, Integer_lessThan) \
50 V(IntegerImplementation, <=, Integer_lessEqualThan) \
51 V(IntegerImplementation, >=, Integer_greaterEqualThan) \
52 V(IntegerImplementation, <<, Integer_shl) \
53 V(IntegerImplementation, >>, Integer_sar) \
54 V(Smi, ~, Smi_bitNegate) \
55 V(Double, >, Double_greaterThan) \
56 V(Double, >=, Double_greaterEqualThan) \
57 V(Double, <, Double_lessThan) \
58 V(Double, <=, Double_lessEqualThan) \
59 V(Double, ==, Double_equal) \
60 V(Double, +, Double_add) \
61 V(Double, -, Double_sub) \
62 V(Double, *, Double_mul) \
63 V(Double, /, Double_div) \
64 V(Double, toDouble, Double_toDouble) \
65 V(Double, mulFromInteger, Double_mulFromInteger) \
66 V(Double, Double.fromInteger, Double_fromInteger) \
67 V(Double, isNaN, Double_isNaN) \
68 V(Double, isNegative, Double_isNegative) \
69 V(ObjectArray, ObjectArray., ObjectArray_Allocate) \
70 V(ObjectArray, get:length, Array_getLength) \
71 V(ObjectArray, [], Array_getIndexed) \
72 V(ObjectArray, []=, Array_setIndexed) \
73 V(GrowableObjectArray, GrowableObjectArray.fromObjectArray, GArray_Allocate) \
74 V(GrowableObjectArray, get:length, GrowableArray_getLength) \
75 V(GrowableObjectArray, get:capacity, GrowableArray_getCapacity) \
76 V(GrowableObjectArray, [], GrowableArray_getIndexed) \
77 V(GrowableObjectArray, []=, GrowableArray_setIndexed) \
78 V(GrowableObjectArray, _setLength, GrowableArray_setLength) \
79 V(GrowableObjectArray, set:data, GrowableArray_setData) \
80 V(_ByteArrayBase, get:length, ByteArrayBase_getLength) \
81 V(_ByteArrayBase, [], ByteArrayBase_getIndexed) \
82 V(ImmutableArray, [], Array_getIndexed) \
83 V(ImmutableArray, get:length, Array_getLength) \
84 V(Math, sqrt, Math_sqrt) \
85 V(Math, sin, Math_sin) \
86 V(Math, cos, Math_cos) \
87 V(Object, ==, Object_equal) \
88 V(FixedSizeArrayIterator, next, FixedSizeArrayIterator_next) \
89 V(FixedSizeArrayIterator, hasNext, FixedSizeArrayIterator_hasNext) \
90 V(StringBase, get:length, String_getLength) \
91 V(StringBase, charCodeAt, String_charCodeAt) \
92 V(StringBase, hashCode, String_hashCode) \
93 V(StringBase, isEmpty, String_isEmpty) \
94 27
95 #define __ assembler-> 28 #define __ assembler->
96 29
97 static bool ObjectArray_Allocate(Assembler* assembler) { 30 bool Intrinsifier::ObjectArray_Allocate(Assembler* assembler) {
98 // This snippet of inlined code uses the following registers: 31 // This snippet of inlined code uses the following registers:
99 // EAX, EBX, EDI 32 // EAX, EBX, EDI
100 // and the newly allocated object is returned in EAX. 33 // and the newly allocated object is returned in EAX.
101 const intptr_t kTypeArgumentsOffset = 2 * kWordSize; 34 const intptr_t kTypeArgumentsOffset = 2 * kWordSize;
102 const intptr_t kArrayLengthOffset = 1 * kWordSize; 35 const intptr_t kArrayLengthOffset = 1 * kWordSize;
103 Label fall_through; 36 Label fall_through;
104 37
105 // Compute the size to be allocated, it is based on the array length 38 // Compute the size to be allocated, it is based on the array length
106 // and it computed as: 39 // and it computed as:
107 // RoundedAllocationSize((array_length * kwordSize) + sizeof(RawArray)). 40 // RoundedAllocationSize((array_length * kwordSize) + sizeof(RawArray)).
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after
186 __ addl(EDI, Immediate(kWordSize)); 119 __ addl(EDI, Immediate(kWordSize));
187 __ jmp(&init_loop, Assembler::kNearJump); 120 __ jmp(&init_loop, Assembler::kNearJump);
188 __ Bind(&done); 121 __ Bind(&done);
189 __ ret(); // returns the newly allocated object in EAX. 122 __ ret(); // returns the newly allocated object in EAX.
190 123
191 __ Bind(&fall_through); 124 __ Bind(&fall_through);
192 return false; 125 return false;
193 } 126 }
194 127
195 128
196 static bool Array_getLength(Assembler* assembler) { 129 bool Intrinsifier::Array_getLength(Assembler* assembler) {
197 __ movl(EAX, Address(ESP, + 1 * kWordSize)); 130 __ movl(EAX, Address(ESP, + 1 * kWordSize));
198 __ movl(EAX, FieldAddress(EAX, Array::length_offset())); 131 __ movl(EAX, FieldAddress(EAX, Array::length_offset()));
199 __ ret(); 132 __ ret();
200 return true; 133 return true;
201 } 134 }
202 135
203 136
204 static bool Array_getIndexed(Assembler* assembler) { 137 bool Intrinsifier::ImmutableArray_getLength(Assembler* assembler) {
138 return Array_getLength(assembler);
139 }
140
141
142 bool Intrinsifier::Array_getIndexed(Assembler* assembler) {
205 Label fall_through; 143 Label fall_through;
206 __ movl(EBX, Address(ESP, + 1 * kWordSize)); // Index. 144 __ movl(EBX, Address(ESP, + 1 * kWordSize)); // Index.
207 __ movl(EAX, Address(ESP, + 2 * kWordSize)); // Array. 145 __ movl(EAX, Address(ESP, + 2 * kWordSize)); // Array.
208 __ testl(EBX, Immediate(kSmiTagMask)); 146 __ testl(EBX, Immediate(kSmiTagMask));
209 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump); // Non-smi index. 147 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump); // Non-smi index.
210 // Range check. 148 // Range check.
211 __ cmpl(EBX, FieldAddress(EAX, Array::length_offset())); 149 __ cmpl(EBX, FieldAddress(EAX, Array::length_offset()));
212 // Runtime throws exception. 150 // Runtime throws exception.
213 __ j(ABOVE_EQUAL, &fall_through, Assembler::kNearJump); 151 __ j(ABOVE_EQUAL, &fall_through, Assembler::kNearJump);
214 // Note that EBX is Smi, i.e, times 2. 152 // Note that EBX is Smi, i.e, times 2.
215 ASSERT(kSmiTagShift == 1); 153 ASSERT(kSmiTagShift == 1);
216 __ movl(EAX, FieldAddress(EAX, EBX, TIMES_2, sizeof(RawArray))); 154 __ movl(EAX, FieldAddress(EAX, EBX, TIMES_2, sizeof(RawArray)));
217 __ ret(); 155 __ ret();
218 __ Bind(&fall_through); 156 __ Bind(&fall_through);
219 return false; 157 return false;
220 } 158 }
221 159
222 160
161 bool Intrinsifier::ImmutableArray_getIndexed(Assembler* assembler) {
162 return Array_getIndexed(assembler);
163 }
164
165
223 static intptr_t ComputeObjectArrayTypeArgumentsOffset() { 166 static intptr_t ComputeObjectArrayTypeArgumentsOffset() {
224 const String& class_name = String::Handle(String::NewSymbol("ObjectArray")); 167 const String& class_name = String::Handle(String::NewSymbol("ObjectArray"));
225 const Class& cls = Class::Handle( 168 const Class& cls = Class::Handle(
226 Library::Handle(Library::CoreImplLibrary()).LookupClass(class_name)); 169 Library::Handle(Library::CoreImplLibrary()).LookupClass(class_name));
227 ASSERT(!cls.IsNull()); 170 ASSERT(!cls.IsNull());
228 ASSERT(cls.HasTypeArguments()); 171 ASSERT(cls.HasTypeArguments());
229 ASSERT(cls.NumTypeArguments() == 1); 172 ASSERT(cls.NumTypeArguments() == 1);
230 const intptr_t field_offset = cls.type_arguments_instance_field_offset(); 173 const intptr_t field_offset = cls.type_arguments_instance_field_offset();
231 ASSERT(field_offset != Class::kNoTypeArguments); 174 ASSERT(field_offset != Class::kNoTypeArguments);
232 return field_offset; 175 return field_offset;
233 } 176 }
234 177
235 178
236 // Intrinsify only for Smi value and index. Non-smi values need a store buffer 179 // Intrinsify only for Smi value and index. Non-smi values need a store buffer
237 // update. Array length is always a Smi. 180 // update. Array length is always a Smi.
238 static bool Array_setIndexed(Assembler* assembler) { 181 bool Intrinsifier::Array_setIndexed(Assembler* assembler) {
239 Label fall_through; 182 Label fall_through;
240 if (FLAG_enable_type_checks) { 183 if (FLAG_enable_type_checks) {
241 const intptr_t type_args_field_offset = 184 const intptr_t type_args_field_offset =
242 ComputeObjectArrayTypeArgumentsOffset(); 185 ComputeObjectArrayTypeArgumentsOffset();
243 // Inline simple tests (Smi, null), fallthrough if not positive. 186 // Inline simple tests (Smi, null), fallthrough if not positive.
244 const Immediate raw_null = 187 const Immediate raw_null =
245 Immediate(reinterpret_cast<intptr_t>(Object::null())); 188 Immediate(reinterpret_cast<intptr_t>(Object::null()));
246 Label checked_ok; 189 Label checked_ok;
247 __ movl(EDI, Address(ESP, + 1 * kWordSize)); // Value. 190 __ movl(EDI, Address(ESP, + 1 * kWordSize)); // Value.
248 // Null value is valid for any type. 191 // Null value is valid for any type.
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
303 Library::CoreImplLibrary()).LookupClass(class_name)); 246 Library::CoreImplLibrary()).LookupClass(class_name));
304 ASSERT(!cls.IsNull()); 247 ASSERT(!cls.IsNull());
305 const Field& field = Field::ZoneHandle(cls.LookupInstanceField(field_name)); 248 const Field& field = Field::ZoneHandle(cls.LookupInstanceField(field_name));
306 ASSERT(!field.IsNull()); 249 ASSERT(!field.IsNull());
307 return field.Offset(); 250 return field.Offset();
308 } 251 }
309 252
310 253
311 // Allocate a GrowableObjectArray using the backing array specified. 254 // Allocate a GrowableObjectArray using the backing array specified.
312 // On stack: type argument (+2), data (+1), return-address (+0). 255 // On stack: type argument (+2), data (+1), return-address (+0).
313 static bool GArray_Allocate(Assembler* assembler) { 256 bool Intrinsifier::GArray_Allocate(Assembler* assembler) {
314 // This snippet of inlined code uses the following registers: 257 // This snippet of inlined code uses the following registers:
315 // EAX, EBX 258 // EAX, EBX
316 // and the newly allocated object is returned in EAX. 259 // and the newly allocated object is returned in EAX.
317 const intptr_t kTypeArgumentsOffset = 2 * kWordSize; 260 const intptr_t kTypeArgumentsOffset = 2 * kWordSize;
318 const intptr_t kArrayOffset = 1 * kWordSize; 261 const intptr_t kArrayOffset = 1 * kWordSize;
319 Label fall_through; 262 Label fall_through;
320 263
321 // Compute the size to be allocated, it is based on the array length 264 // Compute the size to be allocated, it is based on the array length
322 // and it computed as: 265 // and it computed as:
323 // RoundedAllocationSize(sizeof(RawGrowableObjectArray)) + 266 // RoundedAllocationSize(sizeof(RawGrowableObjectArray)) +
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
371 Immediate(0)); 314 Immediate(0));
372 __ ret(); // returns the newly allocated object in EAX. 315 __ ret(); // returns the newly allocated object in EAX.
373 316
374 __ Bind(&fall_through); 317 __ Bind(&fall_through);
375 return false; 318 return false;
376 } 319 }
377 320
378 321
379 // Get length of growable object array. 322 // Get length of growable object array.
380 // On stack: growable array (+1), return-address (+0). 323 // On stack: growable array (+1), return-address (+0).
381 static bool GrowableArray_getLength(Assembler* assembler) { 324 bool Intrinsifier::GrowableArray_getLength(Assembler* assembler) {
382 __ movl(EAX, Address(ESP, + 1 * kWordSize)); 325 __ movl(EAX, Address(ESP, + 1 * kWordSize));
383 __ movl(EAX, FieldAddress(EAX, GrowableObjectArray::length_offset())); 326 __ movl(EAX, FieldAddress(EAX, GrowableObjectArray::length_offset()));
384 __ ret(); 327 __ ret();
385 return true; 328 return true;
386 } 329 }
387 330
388 331
389 // Get capacity of growable object array. 332 // Get capacity of growable object array.
390 // On stack: growable array (+1), return-address (+0). 333 // On stack: growable array (+1), return-address (+0).
391 static bool GrowableArray_getCapacity(Assembler* assembler) { 334 bool Intrinsifier::GrowableArray_getCapacity(Assembler* assembler) {
392 __ movl(EAX, Address(ESP, + 1 * kWordSize)); 335 __ movl(EAX, Address(ESP, + 1 * kWordSize));
393 __ movl(EAX, FieldAddress(EAX, GrowableObjectArray::data_offset())); 336 __ movl(EAX, FieldAddress(EAX, GrowableObjectArray::data_offset()));
394 __ movl(EAX, FieldAddress(EAX, Array::length_offset())); 337 __ movl(EAX, FieldAddress(EAX, Array::length_offset()));
395 __ ret(); 338 __ ret();
396 return true; 339 return true;
397 } 340 }
398 341
399 342
400 // Access growable object array at specified index. 343 // Access growable object array at specified index.
401 // On stack: growable array (+2), index (+1), return-address (+0). 344 // On stack: growable array (+2), index (+1), return-address (+0).
402 static bool GrowableArray_getIndexed(Assembler* assembler) { 345 bool Intrinsifier::GrowableArray_getIndexed(Assembler* assembler) {
403 Label fall_through; 346 Label fall_through;
404 __ movl(EBX, Address(ESP, + 1 * kWordSize)); // Index. 347 __ movl(EBX, Address(ESP, + 1 * kWordSize)); // Index.
405 __ movl(EAX, Address(ESP, + 2 * kWordSize)); // GrowableArray. 348 __ movl(EAX, Address(ESP, + 2 * kWordSize)); // GrowableArray.
406 __ testl(EBX, Immediate(kSmiTagMask)); 349 __ testl(EBX, Immediate(kSmiTagMask));
407 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump); // Non-smi index. 350 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump); // Non-smi index.
408 // Range check using _length field. 351 // Range check using _length field.
409 __ cmpl(EBX, FieldAddress(EAX, GrowableObjectArray::length_offset())); 352 __ cmpl(EBX, FieldAddress(EAX, GrowableObjectArray::length_offset()));
410 // Runtime throws exception. 353 // Runtime throws exception.
411 __ j(ABOVE_EQUAL, &fall_through, Assembler::kNearJump); 354 __ j(ABOVE_EQUAL, &fall_through, Assembler::kNearJump);
412 __ movl(EAX, FieldAddress(EAX, GrowableObjectArray::data_offset())); // data. 355 __ movl(EAX, FieldAddress(EAX, GrowableObjectArray::data_offset())); // data.
413 356
414 // Note that EBX is Smi, i.e, times 2. 357 // Note that EBX is Smi, i.e, times 2.
415 ASSERT(kSmiTagShift == 1); 358 ASSERT(kSmiTagShift == 1);
416 __ movl(EAX, FieldAddress(EAX, EBX, TIMES_2, sizeof(RawArray))); 359 __ movl(EAX, FieldAddress(EAX, EBX, TIMES_2, sizeof(RawArray)));
417 __ ret(); 360 __ ret();
418 __ Bind(&fall_through); 361 __ Bind(&fall_through);
419 return false; 362 return false;
420 } 363 }
421 364
422 365
423 // Set value into growable object array at specified index. 366 // Set value into growable object array at specified index.
424 // On stack: growable array (+3), index (+2), value (+1), return-address (+0). 367 // On stack: growable array (+3), index (+2), value (+1), return-address (+0).
425 static bool GrowableArray_setIndexed(Assembler* assembler) { 368 bool Intrinsifier::GrowableArray_setIndexed(Assembler* assembler) {
426 if (FLAG_enable_type_checks) { 369 if (FLAG_enable_type_checks) {
427 return false; 370 return false;
428 } 371 }
429 Label fall_through; 372 Label fall_through;
430 __ movl(EBX, Address(ESP, + 2 * kWordSize)); // Index. 373 __ movl(EBX, Address(ESP, + 2 * kWordSize)); // Index.
431 __ movl(EAX, Address(ESP, + 3 * kWordSize)); // GrowableArray. 374 __ movl(EAX, Address(ESP, + 3 * kWordSize)); // GrowableArray.
432 __ testl(EBX, Immediate(kSmiTagMask)); 375 __ testl(EBX, Immediate(kSmiTagMask));
433 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump); // Non-smi index. 376 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump); // Non-smi index.
434 // Range check using _length field. 377 // Range check using _length field.
435 __ cmpl(EBX, FieldAddress(EAX, GrowableObjectArray::length_offset())); 378 __ cmpl(EBX, FieldAddress(EAX, GrowableObjectArray::length_offset()));
436 // Runtime throws exception. 379 // Runtime throws exception.
437 __ j(ABOVE_EQUAL, &fall_through, Assembler::kNearJump); 380 __ j(ABOVE_EQUAL, &fall_through, Assembler::kNearJump);
438 __ movl(EAX, FieldAddress(EAX, GrowableObjectArray::data_offset())); // data. 381 __ movl(EAX, FieldAddress(EAX, GrowableObjectArray::data_offset())); // data.
439 __ movl(EDI, Address(ESP, + 1 * kWordSize)); // Value. 382 __ movl(EDI, Address(ESP, + 1 * kWordSize)); // Value.
440 // Note that EBX is Smi, i.e, times 2. 383 // Note that EBX is Smi, i.e, times 2.
441 ASSERT(kSmiTagShift == 1); 384 ASSERT(kSmiTagShift == 1);
442 __ StoreIntoObject(EAX, 385 __ StoreIntoObject(EAX,
443 FieldAddress(EAX, EBX, TIMES_2, sizeof(RawArray)), 386 FieldAddress(EAX, EBX, TIMES_2, sizeof(RawArray)),
444 EDI); 387 EDI);
445 __ ret(); 388 __ ret();
446 __ Bind(&fall_through); 389 __ Bind(&fall_through);
447 return false; 390 return false;
448 } 391 }
449 392
450 393
451 // Set length of growable object array. 394 // Set length of growable object array.
452 // On stack: growable array (+2), length (+1), return-address (+0). 395 // On stack: growable array (+2), length (+1), return-address (+0).
453 static bool GrowableArray_setLength(Assembler* assembler) { 396 bool Intrinsifier::GrowableArray_setLength(Assembler* assembler) {
454 Label fall_through; 397 Label fall_through;
455 __ movl(EAX, Address(ESP, + 2 * kWordSize)); 398 __ movl(EAX, Address(ESP, + 2 * kWordSize));
456 __ movl(EBX, Address(ESP, + 1 * kWordSize)); 399 __ movl(EBX, Address(ESP, + 1 * kWordSize));
457 __ movl(EDI, FieldAddress(EAX, GrowableObjectArray::data_offset())); 400 __ movl(EDI, FieldAddress(EAX, GrowableObjectArray::data_offset()));
458 __ cmpl(EBX, FieldAddress(EDI, Array::length_offset())); 401 __ cmpl(EBX, FieldAddress(EDI, Array::length_offset()));
459 __ j(ABOVE, &fall_through, Assembler::kNearJump); 402 __ j(ABOVE, &fall_through, Assembler::kNearJump);
460 __ movl(FieldAddress(EAX, GrowableObjectArray::length_offset()), EBX); 403 __ movl(FieldAddress(EAX, GrowableObjectArray::length_offset()), EBX);
461 __ ret(); 404 __ ret();
462 __ Bind(&fall_through); 405 __ Bind(&fall_through);
463 return true; 406 return true;
464 } 407 }
465 408
466 409
467 // Set data of growable object array. 410 // Set data of growable object array.
468 // On stack: growable array (+2), data (+1), return-address (+0). 411 // On stack: growable array (+2), data (+1), return-address (+0).
469 static bool GrowableArray_setData(Assembler* assembler) { 412 bool Intrinsifier::GrowableArray_setData(Assembler* assembler) {
470 if (FLAG_enable_type_checks) { 413 if (FLAG_enable_type_checks) {
471 return false; 414 return false;
472 } 415 }
473 __ movl(EAX, Address(ESP, + 2 * kWordSize)); 416 __ movl(EAX, Address(ESP, + 2 * kWordSize));
474 __ movl(EBX, Address(ESP, + 1 * kWordSize)); 417 __ movl(EBX, Address(ESP, + 1 * kWordSize));
475 __ movl(FieldAddress(EAX, GrowableObjectArray::data_offset()), EBX); 418 __ movl(FieldAddress(EAX, GrowableObjectArray::data_offset()), EBX);
476 __ ret(); 419 __ ret();
477 return true; 420 return true;
478 } 421 }
479 422
480 423
481 // Handles only class InternalByteArray. 424 // Handles only class InternalByteArray.
482 static bool ByteArrayBase_getLength(Assembler* assembler) { 425 bool Intrinsifier::ByteArrayBase_getLength(Assembler* assembler) {
483 ObjectStore* object_store = Isolate::Current()->object_store(); 426 ObjectStore* object_store = Isolate::Current()->object_store();
484 Label fall_through; 427 Label fall_through;
485 __ movl(EAX, Address(ESP, + 1 * kWordSize)); 428 __ movl(EAX, Address(ESP, + 1 * kWordSize));
486 __ movl(EBX, FieldAddress(EAX, Object::class_offset())); 429 __ movl(EBX, FieldAddress(EAX, Object::class_offset()));
487 __ CompareObject(EBX, 430 __ CompareObject(EBX,
488 Class::ZoneHandle(object_store->internal_byte_array_class())); 431 Class::ZoneHandle(object_store->internal_byte_array_class()));
489 __ j(NOT_EQUAL, &fall_through); 432 __ j(NOT_EQUAL, &fall_through);
490 __ movl(EAX, FieldAddress(EAX, InternalByteArray::length_offset())); 433 __ movl(EAX, FieldAddress(EAX, InternalByteArray::length_offset()));
491 __ ret(); 434 __ ret();
492 __ Bind(&fall_through); 435 __ Bind(&fall_through);
493 return false; 436 return false;
494 } 437 }
495 438
496 439
497 // Handles only class InternalByteArray. 440 // Handles only class InternalByteArray.
498 static bool ByteArrayBase_getIndexed(Assembler* assembler) { 441 bool Intrinsifier::ByteArrayBase_getIndexed(Assembler* assembler) {
499 ObjectStore* object_store = Isolate::Current()->object_store(); 442 ObjectStore* object_store = Isolate::Current()->object_store();
500 Label fall_through; 443 Label fall_through;
501 __ movl(EAX, Address(ESP, + 2 * kWordSize)); // Array. 444 __ movl(EAX, Address(ESP, + 2 * kWordSize)); // Array.
502 __ movl(EBX, FieldAddress(EAX, Object::class_offset())); 445 __ movl(EBX, FieldAddress(EAX, Object::class_offset()));
503 __ CompareObject(EBX, 446 __ CompareObject(EBX,
504 Class::ZoneHandle(object_store->internal_byte_array_class())); 447 Class::ZoneHandle(object_store->internal_byte_array_class()));
505 __ j(NOT_EQUAL, &fall_through); 448 __ j(NOT_EQUAL, &fall_through);
506 __ movl(EBX, Address(ESP, + 1 * kWordSize)); // Index. 449 __ movl(EBX, Address(ESP, + 1 * kWordSize)); // Index.
507 __ testl(EBX, Immediate(kSmiTagMask)); 450 __ testl(EBX, Immediate(kSmiTagMask));
508 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump); // Non-smi index. 451 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump); // Non-smi index.
(...skipping 17 matching lines...) Expand all
526 // Topmost argument is in EAX. 469 // Topmost argument is in EAX.
527 static void TestBothArgumentsSmis(Assembler* assembler, Label* not_smi) { 470 static void TestBothArgumentsSmis(Assembler* assembler, Label* not_smi) {
528 __ movl(EAX, Address(ESP, + 1 * kWordSize)); 471 __ movl(EAX, Address(ESP, + 1 * kWordSize));
529 __ movl(EBX, Address(ESP, + 2 * kWordSize)); 472 __ movl(EBX, Address(ESP, + 2 * kWordSize));
530 __ orl(EBX, EAX); 473 __ orl(EBX, EAX);
531 __ testl(EBX, Immediate(kSmiTagMask)); 474 __ testl(EBX, Immediate(kSmiTagMask));
532 __ j(NOT_ZERO, not_smi, Assembler::kNearJump); 475 __ j(NOT_ZERO, not_smi, Assembler::kNearJump);
533 } 476 }
534 477
535 478
536 static bool Integer_addFromInteger(Assembler* assembler) { 479 bool Intrinsifier::Integer_addFromInteger(Assembler* assembler) {
537 Label fall_through; 480 Label fall_through;
538 TestBothArgumentsSmis(assembler, &fall_through); 481 TestBothArgumentsSmis(assembler, &fall_through);
539 __ addl(EAX, Address(ESP, + 2 * kWordSize)); 482 __ addl(EAX, Address(ESP, + 2 * kWordSize));
540 __ j(OVERFLOW, &fall_through, Assembler::kNearJump); 483 __ j(OVERFLOW, &fall_through, Assembler::kNearJump);
541 // Result is in EAX. 484 // Result is in EAX.
542 __ ret(); 485 __ ret();
543 __ Bind(&fall_through); 486 __ Bind(&fall_through);
544 return false; 487 return false;
545 } 488 }
546 489
547 490
548 static bool Integer_subFromInteger(Assembler* assembler) { 491 bool Intrinsifier::Integer_add(Assembler* assembler) {
492 return Integer_addFromInteger(assembler);
493 }
494
495
496 bool Intrinsifier::Integer_subFromInteger(Assembler* assembler) {
549 Label fall_through; 497 Label fall_through;
550 TestBothArgumentsSmis(assembler, &fall_through); 498 TestBothArgumentsSmis(assembler, &fall_through);
551 __ subl(EAX, Address(ESP, + 2 * kWordSize)); 499 __ subl(EAX, Address(ESP, + 2 * kWordSize));
552 __ j(OVERFLOW, &fall_through, Assembler::kNearJump); 500 __ j(OVERFLOW, &fall_through, Assembler::kNearJump);
553 // Result is in EAX. 501 // Result is in EAX.
554 __ ret(); 502 __ ret();
555 __ Bind(&fall_through); 503 __ Bind(&fall_through);
556 return false; 504 return false;
557 } 505 }
558 506
559 507
560 static bool Integer_sub(Assembler* assembler) { 508 bool Intrinsifier::Integer_sub(Assembler* assembler) {
561 Label fall_through; 509 Label fall_through;
562 TestBothArgumentsSmis(assembler, &fall_through); 510 TestBothArgumentsSmis(assembler, &fall_through);
563 __ movl(EBX, EAX); 511 __ movl(EBX, EAX);
564 __ movl(EAX, Address(ESP, + 2 * kWordSize)); 512 __ movl(EAX, Address(ESP, + 2 * kWordSize));
565 __ subl(EAX, EBX); 513 __ subl(EAX, EBX);
566 __ j(OVERFLOW, &fall_through, Assembler::kNearJump); 514 __ j(OVERFLOW, &fall_through, Assembler::kNearJump);
567 // Result is in EAX. 515 // Result is in EAX.
568 __ ret(); 516 __ ret();
569 __ Bind(&fall_through); 517 __ Bind(&fall_through);
570 return false; 518 return false;
571 } 519 }
572 520
573 521
574 522
575 static bool Integer_mulFromInteger(Assembler* assembler) { 523 bool Intrinsifier::Integer_mulFromInteger(Assembler* assembler) {
576 Label fall_through; 524 Label fall_through;
577 TestBothArgumentsSmis(assembler, &fall_through); 525 TestBothArgumentsSmis(assembler, &fall_through);
578 ASSERT(kSmiTag == 0); // Adjust code below if not the case. 526 ASSERT(kSmiTag == 0); // Adjust code below if not the case.
579 __ SmiUntag(EAX); 527 __ SmiUntag(EAX);
580 __ imull(EAX, Address(ESP, + 2 * kWordSize)); 528 __ imull(EAX, Address(ESP, + 2 * kWordSize));
581 __ j(OVERFLOW, &fall_through, Assembler::kNearJump); 529 __ j(OVERFLOW, &fall_through, Assembler::kNearJump);
582 // Result is in EAX. 530 // Result is in EAX.
583 __ ret(); 531 __ ret();
584 __ Bind(&fall_through); 532 __ Bind(&fall_through);
585 return false; 533 return false;
586 } 534 }
587 535
588 536
537 bool Intrinsifier::Integer_mul(Assembler* assembler) {
538 return Integer_mulFromInteger(assembler);
539 }
540
541
589 // Simple implementation: for positive dividend values greater than divisor, 542 // Simple implementation: for positive dividend values greater than divisor,
590 // return dividend. 543 // return dividend.
591 static bool Integer_modulo(Assembler* assembler) { 544 bool Intrinsifier::Integer_modulo(Assembler* assembler) {
592 Label fall_through, return_zero; 545 Label fall_through, return_zero;
593 TestBothArgumentsSmis(assembler, &fall_through); 546 TestBothArgumentsSmis(assembler, &fall_through);
594 // EAX: right argument (divisor) 547 // EAX: right argument (divisor)
595 // Check if modulo by zero -> exception thrown in main function. 548 // Check if modulo by zero -> exception thrown in main function.
596 __ cmpl(EAX, Immediate(0)); 549 __ cmpl(EAX, Immediate(0));
597 __ j(EQUAL, &fall_through, Assembler::kNearJump); 550 __ j(EQUAL, &fall_through, Assembler::kNearJump);
598 __ movl(EBX, Address(ESP, + 2 * kWordSize)); // Left argument (dividend). 551 __ movl(EBX, Address(ESP, + 2 * kWordSize)); // Left argument (dividend).
599 __ cmpl(EBX, Immediate(0)); 552 __ cmpl(EBX, Immediate(0));
600 __ j(LESS, &fall_through, Assembler::kNearJump); 553 __ j(LESS, &fall_through, Assembler::kNearJump);
601 __ cmpl(EBX, EAX); 554 __ cmpl(EBX, EAX);
602 __ j(EQUAL, &return_zero, Assembler::kNearJump); 555 __ j(EQUAL, &return_zero, Assembler::kNearJump);
603 __ j(GREATER, &fall_through, Assembler::kNearJump); 556 __ j(GREATER, &fall_through, Assembler::kNearJump);
604 __ movl(EAX, EBX); // Return dividend. 557 __ movl(EAX, EBX); // Return dividend.
605 __ ret(); 558 __ ret();
606 __ Bind(&return_zero); 559 __ Bind(&return_zero);
607 __ xorl(EAX, EAX); // Return zero. 560 __ xorl(EAX, EAX); // Return zero.
608 __ ret(); 561 __ ret();
609 __ Bind(&fall_through); 562 __ Bind(&fall_through);
610 return false; 563 return false;
611 } 564 }
612 565
613 566
614 static bool Integer_truncDivide(Assembler* assembler) { 567 bool Intrinsifier::Integer_truncDivide(Assembler* assembler) {
615 Label fall_through; 568 Label fall_through;
616 TestBothArgumentsSmis(assembler, &fall_through); 569 TestBothArgumentsSmis(assembler, &fall_through);
617 // EAX: right argument (divisor) 570 // EAX: right argument (divisor)
618 __ cmpl(EAX, Immediate(0)); 571 __ cmpl(EAX, Immediate(0));
619 __ j(EQUAL, &fall_through, Assembler::kNearJump); 572 __ j(EQUAL, &fall_through, Assembler::kNearJump);
620 __ movl(EBX, EAX); 573 __ movl(EBX, EAX);
621 __ SmiUntag(EBX); 574 __ SmiUntag(EBX);
622 __ movl(EAX, Address(ESP, + 2 * kWordSize)); // Left argument (dividend). 575 __ movl(EAX, Address(ESP, + 2 * kWordSize)); // Left argument (dividend).
623 __ SmiUntag(EAX); 576 __ SmiUntag(EAX);
624 __ pushl(EDX); // Preserve EDX in case of 'fall_through'. 577 __ pushl(EDX); // Preserve EDX in case of 'fall_through'.
625 __ cdq(); 578 __ cdq();
626 __ idivl(EBX); 579 __ idivl(EBX);
627 __ popl(EDX); 580 __ popl(EDX);
628 // Check the corner case of dividing the 'MIN_SMI' with -1, in which case we 581 // Check the corner case of dividing the 'MIN_SMI' with -1, in which case we
629 // cannot tag the result. 582 // cannot tag the result.
630 __ cmpl(EAX, Immediate(0x40000000)); 583 __ cmpl(EAX, Immediate(0x40000000));
631 __ j(EQUAL, &fall_through); 584 __ j(EQUAL, &fall_through);
632 __ SmiTag(EAX); 585 __ SmiTag(EAX);
633 __ ret(); 586 __ ret();
634 __ Bind(&fall_through); 587 __ Bind(&fall_through);
635 return false; 588 return false;
636 } 589 }
637 590
638 591
639 static bool Integer_negate(Assembler* assembler) { 592 bool Intrinsifier::Integer_negate(Assembler* assembler) {
640 Label fall_through; 593 Label fall_through;
641 __ movl(EAX, Address(ESP, + 1 * kWordSize)); 594 __ movl(EAX, Address(ESP, + 1 * kWordSize));
642 __ testl(EAX, Immediate(kSmiTagMask)); 595 __ testl(EAX, Immediate(kSmiTagMask));
643 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump); // Non-smi value. 596 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump); // Non-smi value.
644 __ negl(EAX); 597 __ negl(EAX);
645 __ j(OVERFLOW, &fall_through, Assembler::kNearJump); 598 __ j(OVERFLOW, &fall_through, Assembler::kNearJump);
646 // Result is in EAX. 599 // Result is in EAX.
647 __ ret(); 600 __ ret();
648 __ Bind(&fall_through); 601 __ Bind(&fall_through);
649 return false; 602 return false;
650 } 603 }
651 604
652 605
653 static bool Integer_bitAndFromInteger(Assembler* assembler) { 606 bool Intrinsifier::Integer_bitAndFromInteger(Assembler* assembler) {
654 Label fall_through; 607 Label fall_through;
655 TestBothArgumentsSmis(assembler, &fall_through); 608 TestBothArgumentsSmis(assembler, &fall_through);
656 __ movl(EBX, Address(ESP, + 2 * kWordSize)); 609 __ movl(EBX, Address(ESP, + 2 * kWordSize));
657 __ andl(EAX, EBX); 610 __ andl(EAX, EBX);
658 // Result is in EAX. 611 // Result is in EAX.
659 __ ret(); 612 __ ret();
660 __ Bind(&fall_through); 613 __ Bind(&fall_through);
661 return false; 614 return false;
662 } 615 }
663 616
664 617
665 static bool Integer_bitOrFromInteger(Assembler* assembler) { 618 bool Intrinsifier::Integer_bitAnd(Assembler* assembler) {
619 return Integer_bitAndFromInteger(assembler);
620 }
621
622
623 bool Intrinsifier::Integer_bitOrFromInteger(Assembler* assembler) {
666 Label fall_through; 624 Label fall_through;
667 TestBothArgumentsSmis(assembler, &fall_through); 625 TestBothArgumentsSmis(assembler, &fall_through);
668 __ movl(EBX, Address(ESP, + 2 * kWordSize)); 626 __ movl(EBX, Address(ESP, + 2 * kWordSize));
669 __ orl(EAX, EBX); 627 __ orl(EAX, EBX);
670 // Result is in EAX. 628 // Result is in EAX.
671 __ ret(); 629 __ ret();
672 __ Bind(&fall_through); 630 __ Bind(&fall_through);
673 return false; 631 return false;
674 } 632 }
675 633
676 634
677 static bool Integer_bitXorFromInteger(Assembler* assembler) { 635 bool Intrinsifier::Integer_bitOr(Assembler* assembler) {
636 return Integer_bitOrFromInteger(assembler);
637 }
638
639
640 bool Intrinsifier::Integer_bitXorFromInteger(Assembler* assembler) {
678 Label fall_through; 641 Label fall_through;
679 TestBothArgumentsSmis(assembler, &fall_through); 642 TestBothArgumentsSmis(assembler, &fall_through);
680 __ movl(EBX, Address(ESP, + 2 * kWordSize)); 643 __ movl(EBX, Address(ESP, + 2 * kWordSize));
681 __ xorl(EAX, EBX); 644 __ xorl(EAX, EBX);
682 // Result is in EAX. 645 // Result is in EAX.
683 __ ret(); 646 __ ret();
684 __ Bind(&fall_through); 647 __ Bind(&fall_through);
685 return false; 648 return false;
686 } 649 }
687 650
688 651
689 static bool Integer_shl(Assembler* assembler) { 652 bool Intrinsifier::Integer_bitXor(Assembler* assembler) {
653 return Integer_bitXorFromInteger(assembler);
654 }
655
656
657 bool Intrinsifier::Integer_shl(Assembler* assembler) {
690 ASSERT(kSmiTagShift == 1); 658 ASSERT(kSmiTagShift == 1);
691 ASSERT(kSmiTag == 0); 659 ASSERT(kSmiTag == 0);
692 Label fall_through, overflow; 660 Label fall_through, overflow;
693 TestBothArgumentsSmis(assembler, &fall_through); 661 TestBothArgumentsSmis(assembler, &fall_through);
694 // Shift value is in EAX. Compare with tagged Smi. 662 // Shift value is in EAX. Compare with tagged Smi.
695 __ cmpl(EAX, Immediate(Smi::RawValue(Smi::kBits))); 663 __ cmpl(EAX, Immediate(Smi::RawValue(Smi::kBits)));
696 __ j(ABOVE_EQUAL, &fall_through, Assembler::kNearJump); 664 __ j(ABOVE_EQUAL, &fall_through, Assembler::kNearJump);
697 665
698 __ SmiUntag(EAX); 666 __ SmiUntag(EAX);
699 __ movl(ECX, EAX); // Shift amount must be in ECX. 667 __ movl(ECX, EAX); // Shift amount must be in ECX.
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
750 __ LoadObject(EAX, bool_false); 718 __ LoadObject(EAX, bool_false);
751 __ ret(); 719 __ ret();
752 __ Bind(&true_label); 720 __ Bind(&true_label);
753 __ LoadObject(EAX, bool_true); 721 __ LoadObject(EAX, bool_true);
754 __ ret(); 722 __ ret();
755 __ Bind(&fall_through); 723 __ Bind(&fall_through);
756 return false; 724 return false;
757 } 725 }
758 726
759 727
760 static bool Integer_lessThan(Assembler* assembler) { 728 bool Intrinsifier::Integer_lessThan(Assembler* assembler) {
761 return CompareIntegers(assembler, LESS); 729 return CompareIntegers(assembler, LESS);
762 } 730 }
763 731
764 732
765 static bool Integer_greaterThan(Assembler* assembler) { 733 bool Intrinsifier::Integer_greaterThanFromInt(Assembler* assembler) {
734 return CompareIntegers(assembler, LESS);
735 }
736
737
738 bool Intrinsifier::Integer_greaterThan(Assembler* assembler) {
766 return CompareIntegers(assembler, GREATER); 739 return CompareIntegers(assembler, GREATER);
767 } 740 }
768 741
769 742
770 static bool Integer_lessEqualThan(Assembler* assembler) { 743 bool Intrinsifier::Integer_lessEqualThan(Assembler* assembler) {
771 return CompareIntegers(assembler, LESS_EQUAL); 744 return CompareIntegers(assembler, LESS_EQUAL);
772 } 745 }
773 746
774 747
775 static bool Integer_greaterEqualThan(Assembler* assembler) { 748 bool Intrinsifier::Integer_greaterEqualThan(Assembler* assembler) {
776 return CompareIntegers(assembler, GREATER_EQUAL); 749 return CompareIntegers(assembler, GREATER_EQUAL);
777 } 750 }
778 751
779 752
780 // This is called for Smi, Mint and Bigint receivers. Bigints are not handled. 753 // This is called for Smi, Mint and Bigint receivers. Bigints are not handled.
781 static bool Integer_equalToInteger(Assembler* assembler) { 754 bool Intrinsifier::Integer_equalToInteger(Assembler* assembler) {
782 Label fall_through, true_label, check_for_mint; 755 Label fall_through, true_label, check_for_mint;
783 const Bool& bool_true = Bool::ZoneHandle(Bool::True()); 756 const Bool& bool_true = Bool::ZoneHandle(Bool::True());
784 const Bool& bool_false = Bool::ZoneHandle(Bool::False()); 757 const Bool& bool_false = Bool::ZoneHandle(Bool::False());
785 // For integer receiver '===' check first. 758 // For integer receiver '===' check first.
786 __ movl(EAX, Address(ESP, + 1 * kWordSize)); 759 __ movl(EAX, Address(ESP, + 1 * kWordSize));
787 __ cmpl(EAX, Address(ESP, + 2 * kWordSize)); 760 __ cmpl(EAX, Address(ESP, + 2 * kWordSize));
788 __ j(EQUAL, &true_label, Assembler::kNearJump); 761 __ j(EQUAL, &true_label, Assembler::kNearJump);
789 __ movl(EBX, Address(ESP, + 2 * kWordSize)); 762 __ movl(EBX, Address(ESP, + 2 * kWordSize));
790 __ orl(EAX, EBX); 763 __ orl(EAX, EBX);
791 __ testl(EAX, Immediate(kSmiTagMask)); 764 __ testl(EAX, Immediate(kSmiTagMask));
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
827 __ j(NOT_ZERO, &fall_through); 800 __ j(NOT_ZERO, &fall_through);
828 __ LoadObject(EAX, bool_false); // Smi == Mint -> false. 801 __ LoadObject(EAX, bool_false); // Smi == Mint -> false.
829 __ ret(); 802 __ ret();
830 // TODO(srdjan): Implement Mint == Mint comparison. 803 // TODO(srdjan): Implement Mint == Mint comparison.
831 804
832 __ Bind(&fall_through); 805 __ Bind(&fall_through);
833 return false; 806 return false;
834 } 807 }
835 808
836 809
837 static bool Integer_sar(Assembler* assembler) { 810 bool Intrinsifier::Integer_equal(Assembler* assembler) {
811 return Integer_equalToInteger(assembler);
812 }
813
814
815 bool Intrinsifier::Integer_sar(Assembler* assembler) {
838 Label fall_through, shift_count_ok; 816 Label fall_through, shift_count_ok;
839 TestBothArgumentsSmis(assembler, &fall_through); 817 TestBothArgumentsSmis(assembler, &fall_through);
840 // Can destroy ECX since we are not falling through. 818 // Can destroy ECX since we are not falling through.
841 Immediate count_limit = Immediate(0x1F); 819 Immediate count_limit = Immediate(0x1F);
842 // Check that the count is not larger than what the hardware can handle. 820 // Check that the count is not larger than what the hardware can handle.
843 // For shifting right a Smi the result is the same for all numbers 821 // For shifting right a Smi the result is the same for all numbers
844 // >= count_limit. 822 // >= count_limit.
845 __ SmiUntag(EAX); 823 __ SmiUntag(EAX);
846 // Negative counts throw exception. 824 // Negative counts throw exception.
847 __ cmpl(EAX, Immediate(0)); 825 __ cmpl(EAX, Immediate(0));
848 __ j(LESS, &fall_through, Assembler::kNearJump); 826 __ j(LESS, &fall_through, Assembler::kNearJump);
849 __ cmpl(EAX, count_limit); 827 __ cmpl(EAX, count_limit);
850 __ j(LESS_EQUAL, &shift_count_ok, Assembler::kNearJump); 828 __ j(LESS_EQUAL, &shift_count_ok, Assembler::kNearJump);
851 __ movl(EAX, count_limit); 829 __ movl(EAX, count_limit);
852 __ Bind(&shift_count_ok); 830 __ Bind(&shift_count_ok);
853 __ movl(ECX, EAX); // Shift amount must be in ECX. 831 __ movl(ECX, EAX); // Shift amount must be in ECX.
854 __ movl(EAX, Address(ESP, + 2 * kWordSize)); // Value. 832 __ movl(EAX, Address(ESP, + 2 * kWordSize)); // Value.
855 __ SmiUntag(EAX); // Value. 833 __ SmiUntag(EAX); // Value.
856 __ sarl(EAX, ECX); 834 __ sarl(EAX, ECX);
857 __ SmiTag(EAX); 835 __ SmiTag(EAX);
858 __ ret(); 836 __ ret();
859 __ Bind(&fall_through); 837 __ Bind(&fall_through);
860 return false; 838 return false;
861 } 839 }
862 840
863 841
864 static bool Smi_bitNegate(Assembler* assembler) { 842 bool Intrinsifier::Smi_bitNegate(Assembler* assembler) {
865 Label fall_through; 843 Label fall_through;
866 __ movl(EAX, Address(ESP, + 1 * kWordSize)); // Index. 844 __ movl(EAX, Address(ESP, + 1 * kWordSize)); // Index.
867 __ testl(EAX, Immediate(kSmiTagMask)); 845 __ testl(EAX, Immediate(kSmiTagMask));
868 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump); // Non-smi. 846 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump); // Non-smi.
869 __ notl(EAX); 847 __ notl(EAX);
870 __ andl(EAX, Immediate(~kSmiTagMask)); // Remove inverted smi-tag. 848 __ andl(EAX, Immediate(~kSmiTagMask)); // Remove inverted smi-tag.
871 __ ret(); 849 __ ret();
872 __ Bind(&fall_through); 850 __ Bind(&fall_through);
873 return false; 851 return false;
874 } 852 }
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
918 __ Bind(&is_smi); 896 __ Bind(&is_smi);
919 __ SmiUntag(EAX); 897 __ SmiUntag(EAX);
920 __ cvtsi2sd(XMM1, EAX); 898 __ cvtsi2sd(XMM1, EAX);
921 __ jmp(&double_op); 899 __ jmp(&double_op);
922 __ Bind(&fall_through); 900 __ Bind(&fall_through);
923 return false; 901 return false;
924 } 902 }
925 903
926 904
927 // arg0 is Double, arg1 is unknown. 905 // arg0 is Double, arg1 is unknown.
928 static bool Double_greaterThan(Assembler* assembler) { 906 bool Intrinsifier::Double_greaterThan(Assembler* assembler) {
929 return CompareDoubles(assembler, ABOVE); 907 return CompareDoubles(assembler, ABOVE);
930 } 908 }
931 909
932 910
933 // arg0 is Double, arg1 is unknown. 911 // arg0 is Double, arg1 is unknown.
934 static bool Double_greaterEqualThan(Assembler* assembler) { 912 bool Intrinsifier::Double_greaterEqualThan(Assembler* assembler) {
935 return CompareDoubles(assembler, ABOVE_EQUAL); 913 return CompareDoubles(assembler, ABOVE_EQUAL);
936 } 914 }
937 915
938 916
939 // arg0 is Double, arg1 is unknown. 917 // arg0 is Double, arg1 is unknown.
940 static bool Double_lessThan(Assembler* assembler) { 918 bool Intrinsifier::Double_lessThan(Assembler* assembler) {
941 return CompareDoubles(assembler, BELOW); 919 return CompareDoubles(assembler, BELOW);
942 } 920 }
943 921
944 922
945 // arg0 is Double, arg1 is unknown. 923 // arg0 is Double, arg1 is unknown.
946 static bool Double_equal(Assembler* assembler) { 924 bool Intrinsifier::Double_equal(Assembler* assembler) {
947 return CompareDoubles(assembler, EQUAL); 925 return CompareDoubles(assembler, EQUAL);
948 } 926 }
949 927
950 928
951 // arg0 is Double, arg1 is unknown. 929 // arg0 is Double, arg1 is unknown.
952 static bool Double_lessEqualThan(Assembler* assembler) { 930 bool Intrinsifier::Double_lessEqualThan(Assembler* assembler) {
953 return CompareDoubles(assembler, BELOW_EQUAL); 931 return CompareDoubles(assembler, BELOW_EQUAL);
954 } 932 }
955 933
956 934
957 static bool Double_toDouble(Assembler* assembler) { 935 bool Intrinsifier::Double_toDouble(Assembler* assembler) {
958 __ movl(EAX, Address(ESP, + 1 * kWordSize)); 936 __ movl(EAX, Address(ESP, + 1 * kWordSize));
959 __ ret(); 937 __ ret();
960 return true; 938 return true;
961 } 939 }
962 940
963 941
964 // Expects EAX to contain right argument, left argument is on stack. Left 942 // Expects EAX to contain right argument, left argument is on stack. Left
965 // argument is double, right argument is of unknown type. 943 // argument is double, right argument is of unknown type.
966 static bool DoubleArithmeticOperations(Assembler* assembler, Token::Kind kind) { 944 static bool DoubleArithmeticOperations(Assembler* assembler, Token::Kind kind) {
967 Label fall_through; 945 Label fall_through;
(...skipping 17 matching lines...) Expand all
985 EBX, // Class register. 963 EBX, // Class register.
986 &fall_through, 964 &fall_through,
987 EAX); // Result register. 965 EAX); // Result register.
988 __ movsd(FieldAddress(EAX, Double::value_offset()), XMM0); 966 __ movsd(FieldAddress(EAX, Double::value_offset()), XMM0);
989 __ ret(); 967 __ ret();
990 __ Bind(&fall_through); 968 __ Bind(&fall_through);
991 return false; 969 return false;
992 } 970 }
993 971
994 972
995 static bool Double_add(Assembler* assembler) { 973 bool Intrinsifier::Double_add(Assembler* assembler) {
996 return DoubleArithmeticOperations(assembler, Token::kADD); 974 return DoubleArithmeticOperations(assembler, Token::kADD);
997 } 975 }
998 976
999 977
1000 static bool Double_mul(Assembler* assembler) { 978 bool Intrinsifier::Double_mul(Assembler* assembler) {
1001 return DoubleArithmeticOperations(assembler, Token::kMUL); 979 return DoubleArithmeticOperations(assembler, Token::kMUL);
1002 } 980 }
1003 981
1004 982
1005 static bool Double_sub(Assembler* assembler) { 983 bool Intrinsifier::Double_sub(Assembler* assembler) {
1006 return DoubleArithmeticOperations(assembler, Token::kSUB); 984 return DoubleArithmeticOperations(assembler, Token::kSUB);
1007 } 985 }
1008 986
1009 987
1010 static bool Double_div(Assembler* assembler) { 988 bool Intrinsifier::Double_div(Assembler* assembler) {
1011 return DoubleArithmeticOperations(assembler, Token::kDIV); 989 return DoubleArithmeticOperations(assembler, Token::kDIV);
1012 } 990 }
1013 991
1014 992
1015 // Left is double right is integer (bigint or Smi) 993 // Left is double right is integer (bigint or Smi)
1016 static bool Double_mulFromInteger(Assembler* assembler) { 994 bool Intrinsifier::Double_mulFromInteger(Assembler* assembler) {
1017 Label fall_through; 995 Label fall_through;
1018 // Only Smi-s allowed. 996 // Only Smi-s allowed.
1019 __ movl(EAX, Address(ESP, + 1 * kWordSize)); 997 __ movl(EAX, Address(ESP, + 1 * kWordSize));
1020 __ testl(EAX, Immediate(kSmiTagMask)); 998 __ testl(EAX, Immediate(kSmiTagMask));
1021 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump); 999 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump);
1022 // Is Smi. 1000 // Is Smi.
1023 __ SmiUntag(EAX); 1001 __ SmiUntag(EAX);
1024 __ cvtsi2sd(XMM1, EAX); 1002 __ cvtsi2sd(XMM1, EAX);
1025 __ movl(EAX, Address(ESP, + 2 * kWordSize)); 1003 __ movl(EAX, Address(ESP, + 2 * kWordSize));
1026 __ movsd(XMM0, FieldAddress(EAX, Double::value_offset())); 1004 __ movsd(XMM0, FieldAddress(EAX, Double::value_offset()));
1027 __ mulsd(XMM0, XMM1); 1005 __ mulsd(XMM0, XMM1);
1028 const Class& double_class = Class::ZoneHandle( 1006 const Class& double_class = Class::ZoneHandle(
1029 Isolate::Current()->object_store()->double_class()); 1007 Isolate::Current()->object_store()->double_class());
1030 __ LoadObject(EBX, double_class); 1008 __ LoadObject(EBX, double_class);
1031 AssemblerMacros::TryAllocate(assembler, 1009 AssemblerMacros::TryAllocate(assembler,
1032 double_class, 1010 double_class,
1033 EBX, // Class register. 1011 EBX, // Class register.
1034 &fall_through, 1012 &fall_through,
1035 EAX); // Result register. 1013 EAX); // Result register.
1036 __ movsd(FieldAddress(EAX, Double::value_offset()), XMM0); 1014 __ movsd(FieldAddress(EAX, Double::value_offset()), XMM0);
1037 __ ret(); 1015 __ ret();
1038 __ Bind(&fall_through); 1016 __ Bind(&fall_through);
1039 return false; 1017 return false;
1040 } 1018 }
1041 1019
1042 1020
1043 static bool Double_fromInteger(Assembler* assembler) { 1021 bool Intrinsifier::Double_fromInteger(Assembler* assembler) {
1044 Label fall_through; 1022 Label fall_through;
1045 __ movl(EAX, Address(ESP, +1 * kWordSize)); 1023 __ movl(EAX, Address(ESP, +1 * kWordSize));
1046 __ testl(EAX, Immediate(kSmiTagMask)); 1024 __ testl(EAX, Immediate(kSmiTagMask));
1047 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump); 1025 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump);
1048 // Is Smi. 1026 // Is Smi.
1049 __ SmiUntag(EAX); 1027 __ SmiUntag(EAX);
1050 __ cvtsi2sd(XMM0, EAX); 1028 __ cvtsi2sd(XMM0, EAX);
1051 const Class& double_class = Class::ZoneHandle( 1029 const Class& double_class = Class::ZoneHandle(
1052 Isolate::Current()->object_store()->double_class()); 1030 Isolate::Current()->object_store()->double_class());
1053 __ LoadObject(EBX, double_class); 1031 __ LoadObject(EBX, double_class);
1054 AssemblerMacros::TryAllocate(assembler, 1032 AssemblerMacros::TryAllocate(assembler,
1055 double_class, 1033 double_class,
1056 EBX, // Class register. 1034 EBX, // Class register.
1057 &fall_through, 1035 &fall_through,
1058 EAX); // Result register. 1036 EAX); // Result register.
1059 __ movsd(FieldAddress(EAX, Double::value_offset()), XMM0); 1037 __ movsd(FieldAddress(EAX, Double::value_offset()), XMM0);
1060 __ ret(); 1038 __ ret();
1061 __ Bind(&fall_through); 1039 __ Bind(&fall_through);
1062 return false; 1040 return false;
1063 } 1041 }
1064 1042
1065 1043
1066 static bool Double_isNaN(Assembler* assembler) { 1044 bool Intrinsifier::Double_isNaN(Assembler* assembler) {
1067 const Bool& bool_true = Bool::ZoneHandle(Bool::True()); 1045 const Bool& bool_true = Bool::ZoneHandle(Bool::True());
1068 const Bool& bool_false = Bool::ZoneHandle(Bool::False()); 1046 const Bool& bool_false = Bool::ZoneHandle(Bool::False());
1069 Label is_true; 1047 Label is_true;
1070 __ movl(EAX, Address(ESP, +1 * kWordSize)); 1048 __ movl(EAX, Address(ESP, +1 * kWordSize));
1071 __ movsd(XMM0, FieldAddress(EAX, Double::value_offset())); 1049 __ movsd(XMM0, FieldAddress(EAX, Double::value_offset()));
1072 __ comisd(XMM0, XMM0); 1050 __ comisd(XMM0, XMM0);
1073 __ j(PARITY_EVEN, &is_true, Assembler::kNearJump); // NaN -> true; 1051 __ j(PARITY_EVEN, &is_true, Assembler::kNearJump); // NaN -> true;
1074 __ LoadObject(EAX, bool_false); 1052 __ LoadObject(EAX, bool_false);
1075 __ ret(); 1053 __ ret();
1076 __ Bind(&is_true); 1054 __ Bind(&is_true);
1077 __ LoadObject(EAX, bool_true); 1055 __ LoadObject(EAX, bool_true);
1078 __ ret(); 1056 __ ret();
1079 return true; // Method is complete, no slow case. 1057 return true; // Method is complete, no slow case.
1080 } 1058 }
1081 1059
1082 1060
1083 static bool Double_isNegative(Assembler* assembler) { 1061 bool Intrinsifier::Double_isNegative(Assembler* assembler) {
1084 const Bool& bool_true = Bool::ZoneHandle(Bool::True()); 1062 const Bool& bool_true = Bool::ZoneHandle(Bool::True());
1085 const Bool& bool_false = Bool::ZoneHandle(Bool::False()); 1063 const Bool& bool_false = Bool::ZoneHandle(Bool::False());
1086 Label is_false, is_true, is_zero; 1064 Label is_false, is_true, is_zero;
1087 __ movl(EAX, Address(ESP, +1 * kWordSize)); 1065 __ movl(EAX, Address(ESP, +1 * kWordSize));
1088 __ movsd(XMM0, FieldAddress(EAX, Double::value_offset())); 1066 __ movsd(XMM0, FieldAddress(EAX, Double::value_offset()));
1089 __ xorpd(XMM1, XMM1); // 0.0 -> XMM1. 1067 __ xorpd(XMM1, XMM1); // 0.0 -> XMM1.
1090 __ comisd(XMM0, XMM1); 1068 __ comisd(XMM0, XMM1);
1091 __ j(PARITY_EVEN, &is_false, Assembler::kNearJump); // NaN -> false. 1069 __ j(PARITY_EVEN, &is_false, Assembler::kNearJump); // NaN -> false.
1092 __ j(EQUAL, &is_zero, Assembler::kNearJump); // Check for negative zero. 1070 __ j(EQUAL, &is_zero, Assembler::kNearJump); // Check for negative zero.
1093 __ j(ABOVE_EQUAL, &is_false, Assembler::kNearJump); // >= 0 -> false. 1071 __ j(ABOVE_EQUAL, &is_false, Assembler::kNearJump); // >= 0 -> false.
1094 __ Bind(&is_true); 1072 __ Bind(&is_true);
1095 __ LoadObject(EAX, bool_true); 1073 __ LoadObject(EAX, bool_true);
1096 __ ret(); 1074 __ ret();
1097 __ Bind(&is_false); 1075 __ Bind(&is_false);
1098 __ LoadObject(EAX, bool_false); 1076 __ LoadObject(EAX, bool_false);
1099 __ ret(); 1077 __ ret();
1100 __ Bind(&is_zero); 1078 __ Bind(&is_zero);
1101 // Check for negative zero (get the sign bit). 1079 // Check for negative zero (get the sign bit).
1102 __ movmskpd(EAX, XMM0); 1080 __ movmskpd(EAX, XMM0);
1103 __ testl(EAX, Immediate(1)); 1081 __ testl(EAX, Immediate(1));
1104 __ j(NOT_ZERO, &is_true, Assembler::kNearJump); 1082 __ j(NOT_ZERO, &is_true, Assembler::kNearJump);
1105 __ jmp(&is_false, Assembler::kNearJump); 1083 __ jmp(&is_false, Assembler::kNearJump);
1106 return true; // Method is complete, no slow case. 1084 return true; // Method is complete, no slow case.
1107 } 1085 }
1108 1086
1109 1087
1110 // Argument type is not known 1088 // Argument type is not known
1111 static bool Math_sqrt(Assembler* assembler) { 1089 bool Intrinsifier::Math_sqrt(Assembler* assembler) {
1112 Label fall_through, is_smi, double_op; 1090 Label fall_through, is_smi, double_op;
1113 TestLastArgumentIsDouble(assembler, &is_smi, &fall_through); 1091 TestLastArgumentIsDouble(assembler, &is_smi, &fall_through);
1114 // Argument is double and is in EAX, class in EBX. 1092 // Argument is double and is in EAX, class in EBX.
1115 __ movsd(XMM1, FieldAddress(EAX, Double::value_offset())); 1093 __ movsd(XMM1, FieldAddress(EAX, Double::value_offset()));
1116 __ Bind(&double_op); 1094 __ Bind(&double_op);
1117 __ sqrtsd(XMM0, XMM1); 1095 __ sqrtsd(XMM0, XMM1);
1118 const Class& double_class = Class::ZoneHandle( 1096 const Class& double_class = Class::ZoneHandle(
1119 Isolate::Current()->object_store()->double_class()); 1097 Isolate::Current()->object_store()->double_class());
1120 __ LoadObject(EBX, double_class); 1098 __ LoadObject(EBX, double_class);
1121 AssemblerMacros::TryAllocate(assembler, 1099 AssemblerMacros::TryAllocate(assembler,
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
1173 __ jmp(&double_op); 1151 __ jmp(&double_op);
1174 1152
1175 __ Bind(&alloc_failed); 1153 __ Bind(&alloc_failed);
1176 __ ffree(0); 1154 __ ffree(0);
1177 __ fincstp(); 1155 __ fincstp();
1178 1156
1179 __ Bind(&fall_through); 1157 __ Bind(&fall_through);
1180 } 1158 }
1181 1159
1182 1160
1183 static bool Math_sin(Assembler* assembler) { 1161 bool Intrinsifier::Math_sin(Assembler* assembler) {
1184 EmitTrigonometric(assembler, kSine); 1162 EmitTrigonometric(assembler, kSine);
1185 return false; // Compile method for slow case. 1163 return false; // Compile method for slow case.
1186 } 1164 }
1187 1165
1188 1166
1189 static bool Math_cos(Assembler* assembler) { 1167 bool Intrinsifier::Math_cos(Assembler* assembler) {
1190 EmitTrigonometric(assembler, kCosine); 1168 EmitTrigonometric(assembler, kCosine);
1191 return false; // Compile method for slow case. 1169 return false; // Compile method for slow case.
1192 } 1170 }
1193 1171
1194 1172
1195 // Identity comparison. 1173 // Identity comparison.
1196 static bool Object_equal(Assembler* assembler) { 1174 bool Intrinsifier::Object_equal(Assembler* assembler) {
1197 Label is_true; 1175 Label is_true;
1198 const Bool& bool_true = Bool::ZoneHandle(Bool::True()); 1176 const Bool& bool_true = Bool::ZoneHandle(Bool::True());
1199 const Bool& bool_false = Bool::ZoneHandle(Bool::False()); 1177 const Bool& bool_false = Bool::ZoneHandle(Bool::False());
1200 __ movl(EAX, Address(ESP, + 1 * kWordSize)); 1178 __ movl(EAX, Address(ESP, + 1 * kWordSize));
1201 __ cmpl(EAX, Address(ESP, + 2 * kWordSize)); 1179 __ cmpl(EAX, Address(ESP, + 2 * kWordSize));
1202 __ j(EQUAL, &is_true, Assembler::kNearJump); 1180 __ j(EQUAL, &is_true, Assembler::kNearJump);
1203 __ LoadObject(EAX, bool_false); 1181 __ LoadObject(EAX, bool_false);
1204 __ ret(); 1182 __ ret();
1205 __ Bind(&is_true); 1183 __ Bind(&is_true);
1206 __ LoadObject(EAX, bool_true); 1184 __ LoadObject(EAX, bool_true);
1207 __ ret(); 1185 __ ret();
1208 return true; 1186 return true;
1209 } 1187 }
1210 1188
1211 1189
1212 static const char* kFixedSizeArrayIteratorClassName = "FixedSizeArrayIterator"; 1190 static const char* kFixedSizeArrayIteratorClassName = "FixedSizeArrayIterator";
1213 1191
1214 1192
1215 // Class 'FixedSizeArrayIterator': 1193 // Class 'FixedSizeArrayIterator':
1216 // T next() { 1194 // T next() {
1217 // return _array[_pos++]; 1195 // return _array[_pos++];
1218 // } 1196 // }
1219 // Intrinsify: return _array[_pos++]; 1197 // Intrinsify: return _array[_pos++];
1220 // TODO(srdjan): Throw a 'NoMoreElementsException' exception if the iterator 1198 // TODO(srdjan): Throw a 'NoMoreElementsException' exception if the iterator
1221 // has no more elements. 1199 // has no more elements.
1222 static bool FixedSizeArrayIterator_next(Assembler* assembler) { 1200 bool Intrinsifier::FixedSizeArrayIterator_next(Assembler* assembler) {
1223 Label fall_through; 1201 Label fall_through;
1224 intptr_t array_offset = 1202 intptr_t array_offset =
1225 GetOffsetForField(kFixedSizeArrayIteratorClassName, "_array"); 1203 GetOffsetForField(kFixedSizeArrayIteratorClassName, "_array");
1226 intptr_t pos_offset = 1204 intptr_t pos_offset =
1227 GetOffsetForField(kFixedSizeArrayIteratorClassName, "_pos"); 1205 GetOffsetForField(kFixedSizeArrayIteratorClassName, "_pos");
1228 ASSERT(array_offset >= 0 && pos_offset >= 0); 1206 ASSERT(array_offset >= 0 && pos_offset >= 0);
1229 // Receiver is not NULL. 1207 // Receiver is not NULL.
1230 __ movl(EAX, Address(ESP, + 1 * kWordSize)); // Receiver. 1208 __ movl(EAX, Address(ESP, + 1 * kWordSize)); // Receiver.
1231 __ movl(EBX, FieldAddress(EAX, pos_offset)); // Field _pos. 1209 __ movl(EBX, FieldAddress(EAX, pos_offset)); // Field _pos.
1232 // '_pos' cannot be greater than array length and therefore is always Smi. 1210 // '_pos' cannot be greater than array length and therefore is always Smi.
(...skipping 21 matching lines...) Expand all
1254 __ ret(); 1232 __ ret();
1255 __ Bind(&fall_through); 1233 __ Bind(&fall_through);
1256 return false; 1234 return false;
1257 } 1235 }
1258 1236
1259 1237
1260 // Class 'FixedSizeArrayIterator': 1238 // Class 'FixedSizeArrayIterator':
1261 // bool hasNext() { 1239 // bool hasNext() {
1262 // return _length > _pos; 1240 // return _length > _pos;
1263 // } 1241 // }
1264 static bool FixedSizeArrayIterator_hasNext(Assembler* assembler) { 1242 bool Intrinsifier::FixedSizeArrayIterator_hasNext(Assembler* assembler) {
1265 Label fall_through, is_true; 1243 Label fall_through, is_true;
1266 const Bool& bool_true = Bool::ZoneHandle(Bool::True()); 1244 const Bool& bool_true = Bool::ZoneHandle(Bool::True());
1267 const Bool& bool_false = Bool::ZoneHandle(Bool::False()); 1245 const Bool& bool_false = Bool::ZoneHandle(Bool::False());
1268 intptr_t length_offset = 1246 intptr_t length_offset =
1269 GetOffsetForField(kFixedSizeArrayIteratorClassName, "_length"); 1247 GetOffsetForField(kFixedSizeArrayIteratorClassName, "_length");
1270 intptr_t pos_offset = 1248 intptr_t pos_offset =
1271 GetOffsetForField(kFixedSizeArrayIteratorClassName, "_pos"); 1249 GetOffsetForField(kFixedSizeArrayIteratorClassName, "_pos");
1272 __ movl(EAX, Address(ESP, + 1 * kWordSize)); // Receiver. 1250 __ movl(EAX, Address(ESP, + 1 * kWordSize)); // Receiver.
1273 __ movl(EBX, FieldAddress(EAX, length_offset)); // Field _length. 1251 __ movl(EBX, FieldAddress(EAX, length_offset)); // Field _length.
1274 __ movl(EAX, FieldAddress(EAX, pos_offset)); // Field _pos. 1252 __ movl(EAX, FieldAddress(EAX, pos_offset)); // Field _pos.
1275 __ movl(EDI, EAX); 1253 __ movl(EDI, EAX);
1276 __ orl(EDI, EBX); 1254 __ orl(EDI, EBX);
1277 __ testl(EDI, Immediate(kSmiTagMask)); 1255 __ testl(EDI, Immediate(kSmiTagMask));
1278 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump); // Non-smi _length. 1256 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump); // Non-smi _length.
1279 __ cmpl(EBX, EAX); // _length > _pos. 1257 __ cmpl(EBX, EAX); // _length > _pos.
1280 __ j(GREATER, &is_true, Assembler::kNearJump); 1258 __ j(GREATER, &is_true, Assembler::kNearJump);
1281 __ LoadObject(EAX, bool_false); 1259 __ LoadObject(EAX, bool_false);
1282 __ ret(); 1260 __ ret();
1283 __ Bind(&is_true); 1261 __ Bind(&is_true);
1284 __ LoadObject(EAX, bool_true); 1262 __ LoadObject(EAX, bool_true);
1285 __ ret(); 1263 __ ret();
1286 __ Bind(&fall_through); 1264 __ Bind(&fall_through);
1287 return false; 1265 return false;
1288 } 1266 }
1289 1267
1290 1268
1291 static bool String_getLength(Assembler* assembler) { 1269 bool Intrinsifier::String_getLength(Assembler* assembler) {
1292 __ movl(EAX, Address(ESP, + 1 * kWordSize)); // String object. 1270 __ movl(EAX, Address(ESP, + 1 * kWordSize)); // String object.
1293 __ movl(EAX, FieldAddress(EAX, String::length_offset())); 1271 __ movl(EAX, FieldAddress(EAX, String::length_offset()));
1294 __ ret(); 1272 __ ret();
1295 return true; 1273 return true;
1296 } 1274 }
1297 1275
1298 1276
1299 // TODO(srdjan): Implement for two and four byte strings as well. 1277 // TODO(srdjan): Implement for two and four byte strings as well.
1300 static bool String_charCodeAt(Assembler* assembler) { 1278 bool Intrinsifier::String_charCodeAt(Assembler* assembler) {
1301 ObjectStore* object_store = Isolate::Current()->object_store(); 1279 ObjectStore* object_store = Isolate::Current()->object_store();
1302 Label fall_through; 1280 Label fall_through;
1303 __ movl(EBX, Address(ESP, + 1 * kWordSize)); // Index. 1281 __ movl(EBX, Address(ESP, + 1 * kWordSize)); // Index.
1304 __ movl(EAX, Address(ESP, + 2 * kWordSize)); // String. 1282 __ movl(EAX, Address(ESP, + 2 * kWordSize)); // String.
1305 __ testl(EBX, Immediate(kSmiTagMask)); 1283 __ testl(EBX, Immediate(kSmiTagMask));
1306 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump); // Non-smi index. 1284 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump); // Non-smi index.
1307 // Range check. 1285 // Range check.
1308 __ cmpl(EBX, FieldAddress(EAX, String::length_offset())); 1286 __ cmpl(EBX, FieldAddress(EAX, String::length_offset()));
1309 // Runtime throws exception. 1287 // Runtime throws exception.
1310 __ j(ABOVE_EQUAL, &fall_through, Assembler::kNearJump); 1288 __ j(ABOVE_EQUAL, &fall_through, Assembler::kNearJump);
1311 __ movl(EDI, FieldAddress(EAX, Instance::class_offset())); 1289 __ movl(EDI, FieldAddress(EAX, Instance::class_offset()));
1312 __ CompareObject(EDI, 1290 __ CompareObject(EDI,
1313 Class::ZoneHandle(object_store->one_byte_string_class())); 1291 Class::ZoneHandle(object_store->one_byte_string_class()));
1314 __ j(NOT_EQUAL, &fall_through); 1292 __ j(NOT_EQUAL, &fall_through);
1315 __ SmiUntag(EBX); 1293 __ SmiUntag(EBX);
1316 __ movzxb(EAX, FieldAddress(EAX, EBX, TIMES_1, OneByteString::data_offset())); 1294 __ movzxb(EAX, FieldAddress(EAX, EBX, TIMES_1, OneByteString::data_offset()));
1317 __ SmiTag(EAX); 1295 __ SmiTag(EAX);
1318 __ ret(); 1296 __ ret();
1319 __ Bind(&fall_through); 1297 __ Bind(&fall_through);
1320 return false; 1298 return false;
1321 } 1299 }
1322 1300
1323 1301
1324 static bool String_hashCode(Assembler* assembler) { 1302 bool Intrinsifier::String_hashCode(Assembler* assembler) {
1325 Label fall_through; 1303 Label fall_through;
1326 __ movl(EAX, Address(ESP, + 1 * kWordSize)); // String object. 1304 __ movl(EAX, Address(ESP, + 1 * kWordSize)); // String object.
1327 __ movl(EAX, FieldAddress(EAX, String::hash_offset())); 1305 __ movl(EAX, FieldAddress(EAX, String::hash_offset()));
1328 __ cmpl(EAX, Immediate(0)); 1306 __ cmpl(EAX, Immediate(0));
1329 __ j(EQUAL, &fall_through, Assembler::kNearJump); 1307 __ j(EQUAL, &fall_through, Assembler::kNearJump);
1330 __ ret(); 1308 __ ret();
1331 __ Bind(&fall_through); 1309 __ Bind(&fall_through);
1332 // Hash not yet computed. 1310 // Hash not yet computed.
1333 return false; 1311 return false;
1334 } 1312 }
1335 1313
1336 1314
1337 static bool String_isEmpty(Assembler* assembler) { 1315 bool Intrinsifier::String_isEmpty(Assembler* assembler) {
1338 Label is_true; 1316 Label is_true;
1339 const Bool& bool_true = Bool::ZoneHandle(Bool::True()); 1317 const Bool& bool_true = Bool::ZoneHandle(Bool::True());
1340 const Bool& bool_false = Bool::ZoneHandle(Bool::False()); 1318 const Bool& bool_false = Bool::ZoneHandle(Bool::False());
1341 // Get length. 1319 // Get length.
1342 __ movl(EAX, Address(ESP, + 1 * kWordSize)); // String object. 1320 __ movl(EAX, Address(ESP, + 1 * kWordSize)); // String object.
1343 __ movl(EAX, FieldAddress(EAX, String::length_offset())); 1321 __ movl(EAX, FieldAddress(EAX, String::length_offset()));
1344 __ cmpl(EAX, Immediate(Smi::RawValue(0))); 1322 __ cmpl(EAX, Immediate(Smi::RawValue(0)));
1345 __ j(EQUAL, &is_true, Assembler::kNearJump); 1323 __ j(EQUAL, &is_true, Assembler::kNearJump);
1346 __ LoadObject(EAX, bool_false); 1324 __ LoadObject(EAX, bool_false);
1347 __ ret(); 1325 __ ret();
1348 __ Bind(&is_true); 1326 __ Bind(&is_true);
1349 __ LoadObject(EAX, bool_true); 1327 __ LoadObject(EAX, bool_true);
1350 __ ret(); 1328 __ ret();
1351 return true; 1329 return true;
1352 } 1330 }
1353 1331
1354 #undef __ 1332 #undef __
1355
1356
1357 static bool CompareNames(const char* test_name, const char* name) {
1358 if (strcmp(test_name, name) == 0) {
1359 return true;
1360 }
1361 if ((name[0] == '_') && (test_name[0] == '_')) {
1362 // Check if the private class is member of corelib and matches the
1363 // test_class_name.
1364 const Library& core_lib = Library::Handle(Library::CoreLibrary());
1365 const Library& core_impl_lib = Library::Handle(Library::CoreImplLibrary());
1366 String& test_str = String::Handle(String::New(test_name));
1367 String& test_str_with_key = String::Handle();
1368 test_str_with_key =
1369 String::Concat(test_str, String::Handle(core_lib.private_key()));
1370 if (strcmp(test_str_with_key.ToCString(), name) == 0) {
1371 return true;
1372 }
1373 test_str_with_key =
1374 String::Concat(test_str, String::Handle(core_impl_lib.private_key()));
1375 if (strcmp(test_str_with_key.ToCString(), name) == 0) {
1376 return true;
1377 }
1378 }
1379 return false;
1380 }
1381
1382
1383 // Returns true if the function matches function_name and class_name, with
1384 // special recognition of corelib private classes
1385 static bool TestFunction(const Function& function,
1386 const char* function_class_name,
1387 const char* function_name,
1388 const char* test_class_name,
1389 const char* test_function_name) {
1390 return CompareNames(test_class_name, function_class_name) &&
1391 CompareNames(test_function_name, function_name);
1392 }
1393
1394
1395 bool Intrinsifier::Intrinsify(const Function& function, Assembler* assembler) {
1396 if (!FLAG_intrinsify) return false;
1397 const char* function_name = String::Handle(function.name()).ToCString();
1398 const Class& function_class = Class::Handle(function.owner());
1399 const char* class_name = String::Handle(function_class.Name()).ToCString();
1400 // Only core library methods can be intrinsified.
1401 const Library& core_lib = Library::Handle(Library::CoreLibrary());
1402 const Library& core_impl_lib = Library::Handle(Library::CoreImplLibrary());
1403 if ((function_class.library() != core_lib.raw()) &&
1404 (function_class.library() != core_impl_lib.raw())) {
1405 return false;
1406 }
1407 #define FIND_INTRINSICS(test_class_name, test_function_name, destination) \
1408 if (TestFunction(function, \
1409 class_name, function_name, \
1410 #test_class_name, #test_function_name)) { \
1411 return destination(assembler); \
1412 } \
1413
1414 INTRINSIC_LIST(FIND_INTRINSICS);
1415 #undef FIND_INTRINSICS
1416 return false;
1417 }
1418
1419 } // namespace dart 1333 } // namespace dart
1420 1334
1421 #endif // defined TARGET_ARCH_IA32 1335 #endif // defined TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « runtime/vm/intrinsifier_arm.cc ('k') | runtime/vm/intrinsifier_x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698