Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(54)

Side by Side Diff: src/arm/builtins-arm.cc

Issue 15085026: ARM: Smi refactoring and improvements. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 7 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 197 matching lines...) Expand 10 before | Expand all | Expand 10 after
208 __ LoadInitialArrayMap(array_function, scratch2, 208 __ LoadInitialArrayMap(array_function, scratch2,
209 elements_array_storage, fill_with_hole); 209 elements_array_storage, fill_with_hole);
210 210
211 if (FLAG_debug_code) { // Assert that array size is not zero. 211 if (FLAG_debug_code) { // Assert that array size is not zero.
212 __ tst(array_size, array_size); 212 __ tst(array_size, array_size);
213 __ Assert(ne, "array size is unexpectedly 0"); 213 __ Assert(ne, "array size is unexpectedly 0");
214 } 214 }
215 215
216 // Allocate the JSArray object together with space for a FixedArray with the 216 // Allocate the JSArray object together with space for a FixedArray with the
217 // requested number of elements. 217 // requested number of elements.
218 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
219 __ mov(elements_array_end, 218 __ mov(elements_array_end,
220 Operand((JSArray::kSize + FixedArray::kHeaderSize) / kPointerSize)); 219 Operand((JSArray::kSize + FixedArray::kHeaderSize) / kPointerSize));
221 __ add(elements_array_end, 220 __ add(elements_array_end, elements_array_end, Operand::SmiUntag(array_size));
222 elements_array_end,
223 Operand(array_size, ASR, kSmiTagSize));
224 __ Allocate(elements_array_end, 221 __ Allocate(elements_array_end,
225 result, 222 result,
226 scratch1, 223 scratch1,
227 scratch2, 224 scratch2,
228 gc_required, 225 gc_required,
229 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS)); 226 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS));
230 227
231 // Allocated the JSArray. Now initialize the fields except for the elements 228 // Allocated the JSArray. Now initialize the fields except for the elements
232 // array. 229 // array.
233 // result: JSObject 230 // result: JSObject
234 // elements_array_storage: initial map 231 // elements_array_storage: initial map
235 // array_size: size of array (smi) 232 // array_size: size of array (smi)
236 __ str(elements_array_storage, FieldMemOperand(result, JSObject::kMapOffset)); 233 __ str(elements_array_storage, FieldMemOperand(result, JSObject::kMapOffset));
237 __ LoadRoot(elements_array_storage, Heap::kEmptyFixedArrayRootIndex); 234 __ LoadRoot(elements_array_storage, Heap::kEmptyFixedArrayRootIndex);
238 __ str(elements_array_storage, 235 __ str(elements_array_storage,
239 FieldMemOperand(result, JSArray::kPropertiesOffset)); 236 FieldMemOperand(result, JSArray::kPropertiesOffset));
240 // Field JSArray::kElementsOffset is initialized later. 237 // Field JSArray::kElementsOffset is initialized later.
241 __ str(array_size, FieldMemOperand(result, JSArray::kLengthOffset)); 238 __ str(array_size, FieldMemOperand(result, JSArray::kLengthOffset));
242 239
243 // Calculate the location of the elements array and set elements array member 240 // Calculate the location of the elements array and set elements array member
244 // of the JSArray. 241 // of the JSArray.
245 // result: JSObject 242 // result: JSObject
246 // array_size: size of array (smi) 243 // array_size: size of array (smi)
247 __ add(elements_array_storage, result, Operand(JSArray::kSize)); 244 __ add(elements_array_storage, result, Operand(JSArray::kSize));
248 __ str(elements_array_storage, 245 __ str(elements_array_storage,
249 FieldMemOperand(result, JSArray::kElementsOffset)); 246 FieldMemOperand(result, JSArray::kElementsOffset));
250 247
251 // Clear the heap tag on the elements array. 248 // Clear the heap tag on the elements array.
252 STATIC_ASSERT(kSmiTag == 0);
253 __ sub(elements_array_storage, 249 __ sub(elements_array_storage,
254 elements_array_storage, 250 elements_array_storage,
255 Operand(kHeapObjectTag)); 251 Operand(kHeapObjectTag));
256 // Initialize the fixed array and fill it with holes. FixedArray length is 252 // Initialize the fixed array and fill it with holes. FixedArray length is
257 // stored as a smi. 253 // stored as a smi.
258 // result: JSObject 254 // result: JSObject
259 // elements_array_storage: elements array (untagged) 255 // elements_array_storage: elements array (untagged)
260 // array_size: size of array (smi) 256 // array_size: size of array (smi)
261 __ LoadRoot(scratch1, Heap::kFixedArrayMapRootIndex); 257 __ LoadRoot(scratch1, Heap::kFixedArrayMapRootIndex);
262 ASSERT_EQ(0 * kPointerSize, FixedArray::kMapOffset); 258 ASSERT_EQ(0 * kPointerSize, FixedArray::kMapOffset);
263 __ str(scratch1, MemOperand(elements_array_storage, kPointerSize, PostIndex)); 259 __ str(scratch1, MemOperand(elements_array_storage, kPointerSize, PostIndex));
264 STATIC_ASSERT(kSmiTag == 0);
265 ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset); 260 ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
266 __ str(array_size, 261 __ str(array_size,
267 MemOperand(elements_array_storage, kPointerSize, PostIndex)); 262 MemOperand(elements_array_storage, kPointerSize, PostIndex));
268 263
269 // Calculate elements array and elements array end. 264 // Calculate elements array and elements array end.
270 // result: JSObject 265 // result: JSObject
271 // elements_array_storage: elements array element storage 266 // elements_array_storage: elements array element storage
272 // array_size: smi-tagged size of elements array 267 // array_size: smi-tagged size of elements array
273 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
274 __ add(elements_array_end, 268 __ add(elements_array_end,
275 elements_array_storage, 269 elements_array_storage,
276 Operand(array_size, LSL, kPointerSizeLog2 - kSmiTagSize)); 270 Operand::PointerOffsetFromSmiKey(array_size));
277 271
278 // Fill the allocated FixedArray with the hole value if requested. 272 // Fill the allocated FixedArray with the hole value if requested.
279 // result: JSObject 273 // result: JSObject
280 // elements_array_storage: elements array element storage 274 // elements_array_storage: elements array element storage
281 // elements_array_end: start of next object 275 // elements_array_end: start of next object
282 if (fill_with_hole) { 276 if (fill_with_hole) {
283 Label loop, entry; 277 Label loop, entry;
284 __ LoadRoot(scratch1, Heap::kTheHoleValueRootIndex); 278 __ LoadRoot(scratch1, Heap::kTheHoleValueRootIndex);
285 __ jmp(&entry); 279 __ jmp(&entry);
286 __ bind(&loop); 280 __ bind(&loop);
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
328 // Set up return value, remove receiver from stack and return. 322 // Set up return value, remove receiver from stack and return.
329 __ mov(r0, r2); 323 __ mov(r0, r2);
330 __ add(sp, sp, Operand(kPointerSize)); 324 __ add(sp, sp, Operand(kPointerSize));
331 __ Jump(lr); 325 __ Jump(lr);
332 326
333 // Check for one argument. Bail out if argument is not smi or if it is 327 // Check for one argument. Bail out if argument is not smi or if it is
334 // negative. 328 // negative.
335 __ bind(&argc_one_or_more); 329 __ bind(&argc_one_or_more);
336 __ cmp(r0, Operand(1)); 330 __ cmp(r0, Operand(1));
337 __ b(ne, &argc_two_or_more); 331 __ b(ne, &argc_two_or_more);
338 STATIC_ASSERT(kSmiTag == 0);
339 __ ldr(r2, MemOperand(sp)); // Get the argument from the stack. 332 __ ldr(r2, MemOperand(sp)); // Get the argument from the stack.
340 __ tst(r2, r2); 333 __ tst(r2, r2);
341 __ b(ne, &not_empty_array); 334 __ b(ne, &not_empty_array);
342 __ Drop(1); // Adjust stack. 335 __ Drop(1); // Adjust stack.
343 __ mov(r0, Operand::Zero()); // Treat this as a call with argc of zero. 336 __ mov(r0, Operand::Zero()); // Treat this as a call with argc of zero.
344 __ b(&empty_array); 337 __ b(&empty_array);
345 338
346 __ bind(&not_empty_array); 339 __ bind(&not_empty_array);
340 STATIC_ASSERT(kSmiTag == 0);
347 __ and_(r3, r2, Operand(kIntptrSignBit | kSmiTagMask), SetCC); 341 __ and_(r3, r2, Operand(kIntptrSignBit | kSmiTagMask), SetCC);
348 __ b(ne, call_generic_code); 342 __ b(ne, call_generic_code);
349 343
350 // Handle construction of an empty array of a certain size. Bail out if size 344 // Handle construction of an empty array of a certain size. Bail out if size
351 // is too large to actually allocate an elements array. 345 // is too large to actually allocate an elements array.
352 STATIC_ASSERT(kSmiTag == 0); 346 STATIC_ASSERT(kSmiTag == 0);
353 __ cmp(r2, Operand(JSObject::kInitialMaxFastElementArray << kSmiTagSize)); 347 __ cmp(r2, Operand(JSObject::kInitialMaxFastElementArray << kSmiTagSize));
354 __ b(ge, call_generic_code); 348 __ b(ge, call_generic_code);
355 349
356 // r0: argc 350 // r0: argc
(...skipping 11 matching lines...) Expand all
368 true, 362 true,
369 call_generic_code); 363 call_generic_code);
370 __ IncrementCounter(counters->array_function_native(), 1, r2, r4); 364 __ IncrementCounter(counters->array_function_native(), 1, r2, r4);
371 // Set up return value, remove receiver and argument from stack and return. 365 // Set up return value, remove receiver and argument from stack and return.
372 __ mov(r0, r3); 366 __ mov(r0, r3);
373 __ add(sp, sp, Operand(2 * kPointerSize)); 367 __ add(sp, sp, Operand(2 * kPointerSize));
374 __ Jump(lr); 368 __ Jump(lr);
375 369
376 // Handle construction of an array from a list of arguments. 370 // Handle construction of an array from a list of arguments.
377 __ bind(&argc_two_or_more); 371 __ bind(&argc_two_or_more);
378 __ mov(r2, Operand(r0, LSL, kSmiTagSize)); // Convet argc to a smi. 372 __ SmiTag(r2, r0);
379 373
380 // r0: argc 374 // r0: argc
381 // r1: constructor 375 // r1: constructor
382 // r2: array_size (smi) 376 // r2: array_size (smi)
383 // sp[0]: last argument 377 // sp[0]: last argument
384 AllocateJSArray(masm, 378 AllocateJSArray(masm,
385 r1, 379 r1,
386 r2, 380 r2,
387 r3, 381 r3,
388 r4, 382 r4,
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after
471 // -- sp[...]: constructor arguments 465 // -- sp[...]: constructor arguments
472 // ----------------------------------- 466 // -----------------------------------
473 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; 467 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
474 468
475 // Get the InternalArray function. 469 // Get the InternalArray function.
476 GenerateLoadInternalArrayFunction(masm, r1); 470 GenerateLoadInternalArrayFunction(masm, r1);
477 471
478 if (FLAG_debug_code) { 472 if (FLAG_debug_code) {
479 // Initial map for the builtin InternalArray functions should be maps. 473 // Initial map for the builtin InternalArray functions should be maps.
480 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); 474 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
481 __ tst(r2, Operand(kSmiTagMask)); 475 __ SmiTst(r2);
482 __ Assert(ne, "Unexpected initial map for InternalArray function"); 476 __ Assert(ne, "Unexpected initial map for InternalArray function");
483 __ CompareObjectType(r2, r3, r4, MAP_TYPE); 477 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
484 __ Assert(eq, "Unexpected initial map for InternalArray function"); 478 __ Assert(eq, "Unexpected initial map for InternalArray function");
485 } 479 }
486 480
487 // Run the native code for the InternalArray function called as a normal 481 // Run the native code for the InternalArray function called as a normal
488 // function. 482 // function.
489 ArrayNativeCode(masm, &generic_array_code); 483 ArrayNativeCode(masm, &generic_array_code);
490 484
491 // Jump to the generic array code if the specialized code cannot handle the 485 // Jump to the generic array code if the specialized code cannot handle the
(...skipping 13 matching lines...) Expand all
505 // -- sp[...]: constructor arguments 499 // -- sp[...]: constructor arguments
506 // ----------------------------------- 500 // -----------------------------------
507 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; 501 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
508 502
509 // Get the Array function. 503 // Get the Array function.
510 GenerateLoadArrayFunction(masm, r1); 504 GenerateLoadArrayFunction(masm, r1);
511 505
512 if (FLAG_debug_code) { 506 if (FLAG_debug_code) {
513 // Initial map for the builtin Array functions should be maps. 507 // Initial map for the builtin Array functions should be maps.
514 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); 508 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
515 __ tst(r2, Operand(kSmiTagMask)); 509 __ SmiTst(r2);
516 __ Assert(ne, "Unexpected initial map for Array function"); 510 __ Assert(ne, "Unexpected initial map for Array function");
517 __ CompareObjectType(r2, r3, r4, MAP_TYPE); 511 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
518 __ Assert(eq, "Unexpected initial map for Array function"); 512 __ Assert(eq, "Unexpected initial map for Array function");
519 } 513 }
520 514
521 // Run the native code for the Array function called as a normal function. 515 // Run the native code for the Array function called as a normal function.
522 ArrayNativeCode(masm, &generic_array_code); 516 ArrayNativeCode(masm, &generic_array_code);
523 517
524 // Jump to the generic array code if the specialized code cannot handle 518 // Jump to the generic array code if the specialized code cannot handle
525 // the construction. 519 // the construction.
(...skipping 12 matching lines...) Expand all
538 // -- r2 : type info cell 532 // -- r2 : type info cell
539 // -- lr : return address 533 // -- lr : return address
540 // -- sp[...]: constructor arguments 534 // -- sp[...]: constructor arguments
541 // ----------------------------------- 535 // -----------------------------------
542 536
543 if (FLAG_debug_code) { 537 if (FLAG_debug_code) {
544 // The array construct code is only set for the builtin and internal 538 // The array construct code is only set for the builtin and internal
545 // Array functions which always have a map. 539 // Array functions which always have a map.
546 // Initial map for the builtin Array function should be a map. 540 // Initial map for the builtin Array function should be a map.
547 __ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); 541 __ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
548 __ tst(r3, Operand(kSmiTagMask)); 542 __ SmiTst(r3);
549 __ Assert(ne, "Unexpected initial map for Array function"); 543 __ Assert(ne, "Unexpected initial map for Array function");
550 __ CompareObjectType(r3, r3, r4, MAP_TYPE); 544 __ CompareObjectType(r3, r3, r4, MAP_TYPE);
551 __ Assert(eq, "Unexpected initial map for Array function"); 545 __ Assert(eq, "Unexpected initial map for Array function");
552 } 546 }
553 Label generic_constructor; 547 Label generic_constructor;
554 // Run the native code for the Array function called as a constructor. 548 // Run the native code for the Array function called as a constructor.
555 ArrayNativeCode(masm, &generic_constructor); 549 ArrayNativeCode(masm, &generic_constructor);
556 550
557 // Jump to the generic construct code in case the specialized code cannot 551 // Jump to the generic construct code in case the specialized code cannot
558 // handle the construction. 552 // handle the construction.
(...skipping 212 matching lines...) Expand 10 before | Expand all | Expand 10 after
771 // Should never count constructions for api objects. 765 // Should never count constructions for api objects.
772 ASSERT(!is_api_function || !count_constructions); 766 ASSERT(!is_api_function || !count_constructions);
773 767
774 Isolate* isolate = masm->isolate(); 768 Isolate* isolate = masm->isolate();
775 769
776 // Enter a construct frame. 770 // Enter a construct frame.
777 { 771 {
778 FrameScope scope(masm, StackFrame::CONSTRUCT); 772 FrameScope scope(masm, StackFrame::CONSTRUCT);
779 773
780 // Preserve the two incoming parameters on the stack. 774 // Preserve the two incoming parameters on the stack.
781 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); 775 __ SmiTag(r0);
782 __ push(r0); // Smi-tagged arguments count. 776 __ push(r0); // Smi-tagged arguments count.
783 __ push(r1); // Constructor function. 777 __ push(r1); // Constructor function.
784 778
785 // Try to allocate the object without transitioning into C code. If any of 779 // Try to allocate the object without transitioning into C code. If any of
786 // the preconditions is not met, the code bails out to the runtime call. 780 // the preconditions is not met, the code bails out to the runtime call.
787 Label rt_call, allocated; 781 Label rt_call, allocated;
788 if (FLAG_inline_new) { 782 if (FLAG_inline_new) {
789 Label undo_allocation; 783 Label undo_allocation;
790 #ifdef ENABLE_DEBUGGER_SUPPORT 784 #ifdef ENABLE_DEBUGGER_SUPPORT
791 ExternalReference debug_step_in_fp = 785 ExternalReference debug_step_in_fp =
(...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after
924 // Initialize the FixedArray. 918 // Initialize the FixedArray.
925 // r1: constructor 919 // r1: constructor
926 // r3: number of elements in properties array 920 // r3: number of elements in properties array
927 // r4: JSObject 921 // r4: JSObject
928 // r5: FixedArray (not tagged) 922 // r5: FixedArray (not tagged)
929 __ LoadRoot(r6, Heap::kFixedArrayMapRootIndex); 923 __ LoadRoot(r6, Heap::kFixedArrayMapRootIndex);
930 __ mov(r2, r5); 924 __ mov(r2, r5);
931 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset); 925 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
932 __ str(r6, MemOperand(r2, kPointerSize, PostIndex)); 926 __ str(r6, MemOperand(r2, kPointerSize, PostIndex));
933 ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset); 927 ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
934 __ mov(r0, Operand(r3, LSL, kSmiTagSize)); 928 __ SmiTag(r0, r3);
935 __ str(r0, MemOperand(r2, kPointerSize, PostIndex)); 929 __ str(r0, MemOperand(r2, kPointerSize, PostIndex));
936 930
937 // Initialize the fields to undefined. 931 // Initialize the fields to undefined.
938 // r1: constructor function 932 // r1: constructor function
939 // r2: First element of FixedArray (not tagged) 933 // r2: First element of FixedArray (not tagged)
940 // r3: number of elements in properties array 934 // r3: number of elements in properties array
941 // r4: JSObject 935 // r4: JSObject
942 // r5: FixedArray (not tagged) 936 // r5: FixedArray (not tagged)
943 __ add(r6, r2, Operand(r3, LSL, kPointerSizeLog2)); // End of object. 937 __ add(r6, r2, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
944 ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize); 938 ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
997 // sp[1]: receiver 991 // sp[1]: receiver
998 // sp[2]: constructor function 992 // sp[2]: constructor function
999 // sp[3]: number of arguments (smi-tagged) 993 // sp[3]: number of arguments (smi-tagged)
1000 __ ldr(r1, MemOperand(sp, 2 * kPointerSize)); 994 __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
1001 __ ldr(r3, MemOperand(sp, 3 * kPointerSize)); 995 __ ldr(r3, MemOperand(sp, 3 * kPointerSize));
1002 996
1003 // Set up pointer to last argument. 997 // Set up pointer to last argument.
1004 __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset)); 998 __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
1005 999
1006 // Set up number of arguments for function call below 1000 // Set up number of arguments for function call below
1007 __ mov(r0, Operand(r3, LSR, kSmiTagSize)); 1001 __ SmiUntag(r0, r3);
1008 1002
1009 // Copy arguments and receiver to the expression stack. 1003 // Copy arguments and receiver to the expression stack.
1010 // r0: number of arguments 1004 // r0: number of arguments
1011 // r1: constructor function 1005 // r1: constructor function
1012 // r2: address of last argument (caller sp) 1006 // r2: address of last argument (caller sp)
1013 // r3: number of arguments (smi-tagged) 1007 // r3: number of arguments (smi-tagged)
1014 // sp[0]: receiver 1008 // sp[0]: receiver
1015 // sp[1]: receiver 1009 // sp[1]: receiver
1016 // sp[2]: constructor function 1010 // sp[2]: constructor function
1017 // sp[3]: number of arguments (smi-tagged) 1011 // sp[3]: number of arguments (smi-tagged)
(...skipping 434 matching lines...) Expand 10 before | Expand all | Expand 10 after
1452 1446
1453 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); 1447 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1454 __ CompareObjectType(r2, r3, r3, FIRST_SPEC_OBJECT_TYPE); 1448 __ CompareObjectType(r2, r3, r3, FIRST_SPEC_OBJECT_TYPE);
1455 __ b(ge, &shift_arguments); 1449 __ b(ge, &shift_arguments);
1456 1450
1457 __ bind(&convert_to_object); 1451 __ bind(&convert_to_object);
1458 1452
1459 { 1453 {
1460 // Enter an internal frame in order to preserve argument count. 1454 // Enter an internal frame in order to preserve argument count.
1461 FrameScope scope(masm, StackFrame::INTERNAL); 1455 FrameScope scope(masm, StackFrame::INTERNAL);
1462 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); // Smi-tagged. 1456 __ SmiTag(r0);
1463 __ push(r0); 1457 __ push(r0);
1464 1458
1465 __ push(r2); 1459 __ push(r2);
1466 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 1460 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1467 __ mov(r2, r0); 1461 __ mov(r2, r0);
1468 1462
1469 __ pop(r0); 1463 __ pop(r0);
1470 __ mov(r0, Operand(r0, ASR, kSmiTagSize)); 1464 __ SmiUntag(r0);
1471 1465
1472 // Exit the internal frame. 1466 // Exit the internal frame.
1473 } 1467 }
1474 1468
1475 // Restore the function to r1, and the flag to r4. 1469 // Restore the function to r1, and the flag to r4.
1476 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2)); 1470 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1477 __ mov(r4, Operand::Zero()); 1471 __ mov(r4, Operand::Zero());
1478 __ jmp(&patch_receiver); 1472 __ jmp(&patch_receiver);
1479 1473
1480 // Use the global receiver object from the called function as the 1474 // Use the global receiver object from the called function as the
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after
1563 } 1557 }
1564 1558
1565 // 5b. Get the code to call from the function and check that the number of 1559 // 5b. Get the code to call from the function and check that the number of
1566 // expected arguments matches what we're providing. If so, jump 1560 // expected arguments matches what we're providing. If so, jump
1567 // (tail-call) to the code in register edx without checking arguments. 1561 // (tail-call) to the code in register edx without checking arguments.
1568 // r0: actual number of arguments 1562 // r0: actual number of arguments
1569 // r1: function 1563 // r1: function
1570 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); 1564 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1571 __ ldr(r2, 1565 __ ldr(r2,
1572 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset)); 1566 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
1573 __ mov(r2, Operand(r2, ASR, kSmiTagSize)); 1567 __ SmiUntag(r2);
1574 __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset)); 1568 __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
1575 __ SetCallKind(r5, CALL_AS_METHOD); 1569 __ SetCallKind(r5, CALL_AS_METHOD);
1576 __ cmp(r2, r0); // Check formal and actual parameter counts. 1570 __ cmp(r2, r0); // Check formal and actual parameter counts.
1577 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 1571 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1578 RelocInfo::CODE_TARGET, 1572 RelocInfo::CODE_TARGET,
1579 ne); 1573 ne);
1580 1574
1581 ParameterCount expected(0); 1575 ParameterCount expected(0);
1582 __ InvokeCode(r3, expected, expected, JUMP_FUNCTION, 1576 __ InvokeCode(r3, expected, expected, JUMP_FUNCTION,
1583 NullCallWrapper(), CALL_AS_METHOD); 1577 NullCallWrapper(), CALL_AS_METHOD);
(...skipping 18 matching lines...) Expand all
1602 1596
1603 // Check the stack for overflow. We are not trying to catch 1597 // Check the stack for overflow. We are not trying to catch
1604 // interruptions (e.g. debug break and preemption) here, so the "real stack 1598 // interruptions (e.g. debug break and preemption) here, so the "real stack
1605 // limit" is checked. 1599 // limit" is checked.
1606 Label okay; 1600 Label okay;
1607 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex); 1601 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
1608 // Make r2 the space we have left. The stack might already be overflowed 1602 // Make r2 the space we have left. The stack might already be overflowed
1609 // here which will cause r2 to become negative. 1603 // here which will cause r2 to become negative.
1610 __ sub(r2, sp, r2); 1604 __ sub(r2, sp, r2);
1611 // Check if the arguments will overflow the stack. 1605 // Check if the arguments will overflow the stack.
1612 __ cmp(r2, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize)); 1606 __ cmp(r2, Operand::PointerOffsetFromSmiKey(r0));
1613 __ b(gt, &okay); // Signed comparison. 1607 __ b(gt, &okay); // Signed comparison.
1614 1608
1615 // Out of stack space. 1609 // Out of stack space.
1616 __ ldr(r1, MemOperand(fp, kFunctionOffset)); 1610 __ ldr(r1, MemOperand(fp, kFunctionOffset));
1617 __ push(r1); 1611 __ push(r1);
1618 __ push(r0); 1612 __ push(r0);
1619 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION); 1613 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
1620 // End of stack check. 1614 // End of stack check.
1621 1615
1622 // Push current limit and index. 1616 // Push current limit and index.
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after
1712 // Test if the copy loop has finished copying all the elements from the 1706 // Test if the copy loop has finished copying all the elements from the
1713 // arguments object. 1707 // arguments object.
1714 __ bind(&entry); 1708 __ bind(&entry);
1715 __ ldr(r1, MemOperand(fp, kLimitOffset)); 1709 __ ldr(r1, MemOperand(fp, kLimitOffset));
1716 __ cmp(r0, r1); 1710 __ cmp(r0, r1);
1717 __ b(ne, &loop); 1711 __ b(ne, &loop);
1718 1712
1719 // Invoke the function. 1713 // Invoke the function.
1720 Label call_proxy; 1714 Label call_proxy;
1721 ParameterCount actual(r0); 1715 ParameterCount actual(r0);
1722 __ mov(r0, Operand(r0, ASR, kSmiTagSize)); 1716 __ SmiUntag(r0);
1723 __ ldr(r1, MemOperand(fp, kFunctionOffset)); 1717 __ ldr(r1, MemOperand(fp, kFunctionOffset));
1724 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE); 1718 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
1725 __ b(ne, &call_proxy); 1719 __ b(ne, &call_proxy);
1726 __ InvokeFunction(r1, actual, CALL_FUNCTION, 1720 __ InvokeFunction(r1, actual, CALL_FUNCTION,
1727 NullCallWrapper(), CALL_AS_METHOD); 1721 NullCallWrapper(), CALL_AS_METHOD);
1728 1722
1729 frame_scope.GenerateLeaveFrame(); 1723 frame_scope.GenerateLeaveFrame();
1730 __ add(sp, sp, Operand(3 * kPointerSize)); 1724 __ add(sp, sp, Operand(3 * kPointerSize));
1731 __ Jump(lr); 1725 __ Jump(lr);
1732 1726
1733 // Invoke the function proxy. 1727 // Invoke the function proxy.
1734 __ bind(&call_proxy); 1728 __ bind(&call_proxy);
1735 __ push(r1); // add function proxy as last argument 1729 __ push(r1); // add function proxy as last argument
1736 __ add(r0, r0, Operand(1)); 1730 __ add(r0, r0, Operand(1));
1737 __ mov(r2, Operand::Zero()); 1731 __ mov(r2, Operand::Zero());
1738 __ SetCallKind(r5, CALL_AS_METHOD); 1732 __ SetCallKind(r5, CALL_AS_METHOD);
1739 __ GetBuiltinEntry(r3, Builtins::CALL_FUNCTION_PROXY); 1733 __ GetBuiltinEntry(r3, Builtins::CALL_FUNCTION_PROXY);
1740 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 1734 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1741 RelocInfo::CODE_TARGET); 1735 RelocInfo::CODE_TARGET);
1742 1736
1743 // Tear down the internal frame and remove function, receiver and args. 1737 // Tear down the internal frame and remove function, receiver and args.
1744 } 1738 }
1745 __ add(sp, sp, Operand(3 * kPointerSize)); 1739 __ add(sp, sp, Operand(3 * kPointerSize));
1746 __ Jump(lr); 1740 __ Jump(lr);
1747 } 1741 }
1748 1742
1749 1743
1750 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { 1744 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1751 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); 1745 __ SmiTag(r0);
1752 __ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 1746 __ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1753 __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() | fp.bit() | lr.bit()); 1747 __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() | fp.bit() | lr.bit());
1754 __ add(fp, sp, Operand(3 * kPointerSize)); 1748 __ add(fp, sp, Operand(3 * kPointerSize));
1755 } 1749 }
1756 1750
1757 1751
1758 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { 1752 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1759 // ----------- S t a t e ------------- 1753 // ----------- S t a t e -------------
1760 // -- r0 : result being passed through 1754 // -- r0 : result being passed through
1761 // ----------------------------------- 1755 // -----------------------------------
1762 // Get the number of arguments passed (as a smi), tear down the frame and 1756 // Get the number of arguments passed (as a smi), tear down the frame and
1763 // then tear down the parameters. 1757 // then tear down the parameters.
1764 __ ldr(r1, MemOperand(fp, -3 * kPointerSize)); 1758 __ ldr(r1, MemOperand(fp, -3 * kPointerSize));
1765 __ mov(sp, fp); 1759 __ mov(sp, fp);
1766 __ ldm(ia_w, sp, fp.bit() | lr.bit()); 1760 __ ldm(ia_w, sp, fp.bit() | lr.bit());
1767 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize)); 1761 __ add(sp, sp, Operand::PointerOffsetFromSmiKey(r1));
1768 __ add(sp, sp, Operand(kPointerSize)); // adjust for receiver 1762 __ add(sp, sp, Operand(kPointerSize)); // adjust for receiver
1769 } 1763 }
1770 1764
1771 1765
1772 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { 1766 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1773 // ----------- S t a t e ------------- 1767 // ----------- S t a t e -------------
1774 // -- r0 : actual number of arguments 1768 // -- r0 : actual number of arguments
1775 // -- r1 : function (passed through to callee) 1769 // -- r1 : function (passed through to callee)
1776 // -- r2 : expected number of arguments 1770 // -- r2 : expected number of arguments
1777 // -- r3 : code entry to call 1771 // -- r3 : code entry to call
(...skipping 10 matching lines...) Expand all
1788 1782
1789 { // Enough parameters: actual >= expected 1783 { // Enough parameters: actual >= expected
1790 __ bind(&enough); 1784 __ bind(&enough);
1791 EnterArgumentsAdaptorFrame(masm); 1785 EnterArgumentsAdaptorFrame(masm);
1792 1786
1793 // Calculate copy start address into r0 and copy end address into r2. 1787 // Calculate copy start address into r0 and copy end address into r2.
1794 // r0: actual number of arguments as a smi 1788 // r0: actual number of arguments as a smi
1795 // r1: function 1789 // r1: function
1796 // r2: expected number of arguments 1790 // r2: expected number of arguments
1797 // r3: code entry to call 1791 // r3: code entry to call
1798 __ add(r0, fp, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize)); 1792 __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
1799 // adjust for return address and receiver 1793 // adjust for return address and receiver
1800 __ add(r0, r0, Operand(2 * kPointerSize)); 1794 __ add(r0, r0, Operand(2 * kPointerSize));
1801 __ sub(r2, r0, Operand(r2, LSL, kPointerSizeLog2)); 1795 __ sub(r2, r0, Operand(r2, LSL, kPointerSizeLog2));
1802 1796
1803 // Copy the arguments (including the receiver) to the new stack frame. 1797 // Copy the arguments (including the receiver) to the new stack frame.
1804 // r0: copy start address 1798 // r0: copy start address
1805 // r1: function 1799 // r1: function
1806 // r2: copy end address 1800 // r2: copy end address
1807 // r3: code entry to call 1801 // r3: code entry to call
1808 1802
(...skipping 10 matching lines...) Expand all
1819 1813
1820 { // Too few parameters: Actual < expected 1814 { // Too few parameters: Actual < expected
1821 __ bind(&too_few); 1815 __ bind(&too_few);
1822 EnterArgumentsAdaptorFrame(masm); 1816 EnterArgumentsAdaptorFrame(masm);
1823 1817
1824 // Calculate copy start address into r0 and copy end address is fp. 1818 // Calculate copy start address into r0 and copy end address is fp.
1825 // r0: actual number of arguments as a smi 1819 // r0: actual number of arguments as a smi
1826 // r1: function 1820 // r1: function
1827 // r2: expected number of arguments 1821 // r2: expected number of arguments
1828 // r3: code entry to call 1822 // r3: code entry to call
1829 __ add(r0, fp, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize)); 1823 __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
1830 1824
1831 // Copy the arguments (including the receiver) to the new stack frame. 1825 // Copy the arguments (including the receiver) to the new stack frame.
1832 // r0: copy start address 1826 // r0: copy start address
1833 // r1: function 1827 // r1: function
1834 // r2: expected number of arguments 1828 // r2: expected number of arguments
1835 // r3: code entry to call 1829 // r3: code entry to call
1836 Label copy; 1830 Label copy;
1837 __ bind(&copy); 1831 __ bind(&copy);
1838 // Adjust load for return address and receiver. 1832 // Adjust load for return address and receiver.
1839 __ ldr(ip, MemOperand(r0, 2 * kPointerSize)); 1833 __ ldr(ip, MemOperand(r0, 2 * kPointerSize));
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
1875 __ bind(&dont_adapt_arguments); 1869 __ bind(&dont_adapt_arguments);
1876 __ Jump(r3); 1870 __ Jump(r3);
1877 } 1871 }
1878 1872
1879 1873
1880 #undef __ 1874 #undef __
1881 1875
1882 } } // namespace v8::internal 1876 } } // namespace v8::internal
1883 1877
1884 #endif // V8_TARGET_ARCH_ARM 1878 #endif // V8_TARGET_ARCH_ARM
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698