OLD | NEW |
1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" | 5 #include "vm/globals.h" |
6 #if defined(TARGET_ARCH_ARM64) | 6 #if defined(TARGET_ARCH_ARM64) |
7 | 7 |
8 #include "vm/assembler.h" | 8 #include "vm/assembler.h" |
9 #include "vm/code_generator.h" | 9 #include "vm/code_generator.h" |
10 #include "vm/compiler.h" | 10 #include "vm/compiler.h" |
(...skipping 23 matching lines...) Expand all Loading... |
34 // SP : address of last argument in argument array. | 34 // SP : address of last argument in argument array. |
35 // SP + 8*R4 - 8 : address of first argument in argument array. | 35 // SP + 8*R4 - 8 : address of first argument in argument array. |
36 // SP + 8*R4 : address of return value. | 36 // SP + 8*R4 : address of return value. |
37 // R5 : address of the runtime function to call. | 37 // R5 : address of the runtime function to call. |
38 // R4 : number of arguments to the call. | 38 // R4 : number of arguments to the call. |
39 void StubCode::GenerateCallToRuntimeStub(Assembler* assembler) { | 39 void StubCode::GenerateCallToRuntimeStub(Assembler* assembler) { |
40 const intptr_t thread_offset = NativeArguments::thread_offset(); | 40 const intptr_t thread_offset = NativeArguments::thread_offset(); |
41 const intptr_t argc_tag_offset = NativeArguments::argc_tag_offset(); | 41 const intptr_t argc_tag_offset = NativeArguments::argc_tag_offset(); |
42 const intptr_t argv_offset = NativeArguments::argv_offset(); | 42 const intptr_t argv_offset = NativeArguments::argv_offset(); |
43 const intptr_t retval_offset = NativeArguments::retval_offset(); | 43 const intptr_t retval_offset = NativeArguments::retval_offset(); |
44 const intptr_t exitframe_last_param_slot_from_fp = 1; | |
45 | 44 |
46 __ SetPrologueOffset(); | 45 __ SetPrologueOffset(); |
47 __ Comment("CallToRuntimeStub"); | 46 __ Comment("CallToRuntimeStub"); |
48 __ EnterStubFrame(); | 47 __ EnterStubFrame(); |
49 | 48 |
50 COMPILE_ASSERT((kAbiPreservedCpuRegs & (1 << R28)) != 0); | 49 COMPILE_ASSERT((kAbiPreservedCpuRegs & (1 << R28)) != 0); |
51 __ LoadIsolate(R28); | 50 __ LoadIsolate(R28); |
52 | 51 |
53 // Save exit frame information to enable stack walking as we are about | 52 // Save exit frame information to enable stack walking as we are about |
54 // to transition to Dart VM C++ code. | 53 // to transition to Dart VM C++ code. |
(...skipping 29 matching lines...) Expand all Loading... |
84 | 83 |
85 // There are no runtime calls to closures, so we do not need to set the tag | 84 // There are no runtime calls to closures, so we do not need to set the tag |
86 // bits kClosureFunctionBit and kInstanceFunctionBit in argc_tag_. | 85 // bits kClosureFunctionBit and kInstanceFunctionBit in argc_tag_. |
87 ASSERT(argc_tag_offset == 1 * kWordSize); | 86 ASSERT(argc_tag_offset == 1 * kWordSize); |
88 __ mov(R1, R4); // Set argc in NativeArguments. | 87 __ mov(R1, R4); // Set argc in NativeArguments. |
89 | 88 |
90 ASSERT(argv_offset == 2 * kWordSize); | 89 ASSERT(argv_offset == 2 * kWordSize); |
91 __ add(R2, ZR, Operand(R4, LSL, 3)); | 90 __ add(R2, ZR, Operand(R4, LSL, 3)); |
92 __ add(R2, FP, Operand(R2)); // Compute argv. | 91 __ add(R2, FP, Operand(R2)); // Compute argv. |
93 // Set argv in NativeArguments. | 92 // Set argv in NativeArguments. |
94 __ AddImmediate(R2, R2, exitframe_last_param_slot_from_fp * kWordSize); | 93 __ AddImmediate(R2, R2, kParamEndSlotFromFp * kWordSize); |
95 | 94 |
96 ASSERT(retval_offset == 3 * kWordSize); | 95 ASSERT(retval_offset == 3 * kWordSize); |
97 __ AddImmediate(R3, R2, kWordSize); | 96 __ AddImmediate(R3, R2, kWordSize); |
98 | 97 |
99 __ StoreToOffset(R0, SP, thread_offset); | 98 __ StoreToOffset(R0, SP, thread_offset); |
100 __ StoreToOffset(R1, SP, argc_tag_offset); | 99 __ StoreToOffset(R1, SP, argc_tag_offset); |
101 __ StoreToOffset(R2, SP, argv_offset); | 100 __ StoreToOffset(R2, SP, argv_offset); |
102 __ StoreToOffset(R3, SP, retval_offset); | 101 __ StoreToOffset(R3, SP, retval_offset); |
103 __ mov(R0, SP); // Pass the pointer to the NativeArguments. | 102 __ mov(R0, SP); // Pass the pointer to the NativeArguments. |
104 | 103 |
(...skipping 234 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
339 // R4: arguments descriptor array. | 338 // R4: arguments descriptor array. |
340 void StubCode::GenerateCallStaticFunctionStub(Assembler* assembler) { | 339 void StubCode::GenerateCallStaticFunctionStub(Assembler* assembler) { |
341 // Create a stub frame as we are pushing some objects on the stack before | 340 // Create a stub frame as we are pushing some objects on the stack before |
342 // calling into the runtime. | 341 // calling into the runtime. |
343 __ EnterStubFrame(); | 342 __ EnterStubFrame(); |
344 // Setup space on stack for return value and preserve arguments descriptor. | 343 // Setup space on stack for return value and preserve arguments descriptor. |
345 __ Push(R4); | 344 __ Push(R4); |
346 __ PushObject(Object::null_object()); | 345 __ PushObject(Object::null_object()); |
347 __ CallRuntime(kPatchStaticCallRuntimeEntry, 0); | 346 __ CallRuntime(kPatchStaticCallRuntimeEntry, 0); |
348 // Get Code object result and restore arguments descriptor array. | 347 // Get Code object result and restore arguments descriptor array. |
349 __ Pop(R0); | 348 __ Pop(CODE_REG); |
350 __ Pop(R4); | 349 __ Pop(R4); |
351 // Remove the stub frame. | 350 // Remove the stub frame. |
352 __ LeaveStubFrame(); | 351 __ LeaveStubFrame(); |
353 // Jump to the dart function. | 352 // Jump to the dart function. |
354 __ LoadFieldFromOffset(R0, R0, Code::entry_point_offset()); | 353 __ LoadFieldFromOffset(R0, CODE_REG, Code::entry_point_offset()); |
355 __ br(R0); | 354 __ br(R0); |
356 } | 355 } |
357 | 356 |
358 | 357 |
359 // Called from a static call only when an invalid code has been entered | 358 // Called from a static call only when an invalid code has been entered |
360 // (invalid because its function was optimized or deoptimized). | 359 // (invalid because its function was optimized or deoptimized). |
361 // R4: arguments descriptor array. | 360 // R4: arguments descriptor array. |
362 void StubCode::GenerateFixCallersTargetStub(Assembler* assembler) { | 361 void StubCode::GenerateFixCallersTargetStub(Assembler* assembler) { |
| 362 // Load code pointer to this stub from the thread: |
| 363 // The one that is passed in, is not correct - it points to the code object |
| 364 // that needs to be replaced. |
| 365 __ ldr(CODE_REG, Address(THR, Thread::fix_callers_target_code_offset())); |
363 // Create a stub frame as we are pushing some objects on the stack before | 366 // Create a stub frame as we are pushing some objects on the stack before |
364 // calling into the runtime. | 367 // calling into the runtime. |
365 __ EnterStubFrame(); | 368 __ EnterStubFrame(); |
366 // Setup space on stack for return value and preserve arguments descriptor. | 369 // Setup space on stack for return value and preserve arguments descriptor. |
367 __ Push(R4); | 370 __ Push(R4); |
368 __ PushObject(Object::null_object()); | 371 __ PushObject(Object::null_object()); |
369 __ CallRuntime(kFixCallersTargetRuntimeEntry, 0); | 372 __ CallRuntime(kFixCallersTargetRuntimeEntry, 0); |
370 // Get Code object result and restore arguments descriptor array. | 373 // Get Code object result and restore arguments descriptor array. |
371 __ Pop(R0); | 374 __ Pop(CODE_REG); |
372 __ Pop(R4); | 375 __ Pop(R4); |
373 // Remove the stub frame. | 376 // Remove the stub frame. |
374 __ LeaveStubFrame(); | 377 __ LeaveStubFrame(); |
375 // Jump to the dart function. | 378 // Jump to the dart function. |
376 __ LoadFieldFromOffset(R0, R0, Code::entry_point_offset()); | 379 __ LoadFieldFromOffset(R0, CODE_REG, Code::entry_point_offset()); |
377 __ br(R0); | 380 __ br(R0); |
378 } | 381 } |
379 | 382 |
380 | 383 |
381 // Called from object allocate instruction when the allocation stub has been | 384 // Called from object allocate instruction when the allocation stub has been |
382 // disabled. | 385 // disabled. |
383 void StubCode::GenerateFixAllocationStubTargetStub(Assembler* assembler) { | 386 void StubCode::GenerateFixAllocationStubTargetStub(Assembler* assembler) { |
| 387 // Load code pointer to this stub from the thread: |
| 388 // The one that is passed in, is not correct - it points to the code object |
| 389 // that needs to be replaced. |
| 390 __ ldr(CODE_REG, Address(THR, Thread::fix_allocation_stub_code_offset())); |
384 __ EnterStubFrame(); | 391 __ EnterStubFrame(); |
385 // Setup space on stack for return value. | 392 // Setup space on stack for return value. |
386 __ PushObject(Object::null_object()); | 393 __ PushObject(Object::null_object()); |
387 __ CallRuntime(kFixAllocationStubTargetRuntimeEntry, 0); | 394 __ CallRuntime(kFixAllocationStubTargetRuntimeEntry, 0); |
388 // Get Code object result. | 395 // Get Code object result. |
389 __ Pop(R0); | 396 __ Pop(CODE_REG); |
390 // Remove the stub frame. | 397 // Remove the stub frame. |
391 __ LeaveStubFrame(); | 398 __ LeaveStubFrame(); |
392 // Jump to the dart function. | 399 // Jump to the dart function. |
393 __ LoadFieldFromOffset(R0, R0, Code::entry_point_offset()); | 400 __ LoadFieldFromOffset(R0, CODE_REG, Code::entry_point_offset()); |
394 __ br(R0); | 401 __ br(R0); |
395 } | 402 } |
396 | 403 |
397 | 404 |
398 // Input parameters: | 405 // Input parameters: |
399 // R2: smi-tagged argument count, may be zero. | 406 // R2: smi-tagged argument count, may be zero. |
400 // FP[kParamEndSlotFromFp + 1]: last argument. | 407 // FP[kParamEndSlotFromFp + 1]: last argument. |
401 static void PushArgumentsArray(Assembler* assembler) { | 408 static void PushArgumentsArray(Assembler* assembler) { |
402 // Allocate array to store arguments of caller. | 409 // Allocate array to store arguments of caller. |
403 __ LoadObject(R1, Object::null_object()); | 410 __ LoadObject(R1, Object::null_object()); |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
441 // Stack after TagAndPushPP() below: | 448 // Stack after TagAndPushPP() below: |
442 // +------------------+ | 449 // +------------------+ |
443 // | Saved PP | <- PP | 450 // | Saved PP | <- PP |
444 // +------------------+ | 451 // +------------------+ |
445 // | PC marker | <- TOS | 452 // | PC marker | <- TOS |
446 // +------------------+ | 453 // +------------------+ |
447 // | Saved FP | <- FP of stub | 454 // | Saved FP | <- FP of stub |
448 // +------------------+ | 455 // +------------------+ |
449 // | return-address | (deoptimization point) | 456 // | return-address | (deoptimization point) |
450 // +------------------+ | 457 // +------------------+ |
| 458 // | Saved CODE_REG | |
| 459 // +------------------+ |
451 // | ... | <- SP of optimized frame | 460 // | ... | <- SP of optimized frame |
452 // | 461 // |
453 // Parts of the code cannot GC, part of the code can GC. | 462 // Parts of the code cannot GC, part of the code can GC. |
454 static void GenerateDeoptimizationSequence(Assembler* assembler, | 463 static void GenerateDeoptimizationSequence(Assembler* assembler, |
455 bool preserve_result) { | 464 DeoptStubKind kind) { |
456 // DeoptimizeCopyFrame expects a Dart frame, i.e. EnterDartFrame(0), but there | 465 // DeoptimizeCopyFrame expects a Dart frame, i.e. EnterDartFrame(0), but there |
457 // is no need to set the correct PC marker or load PP, since they get patched. | 466 // is no need to set the correct PC marker or load PP, since they get patched. |
458 __ EnterStubFrame(); | 467 __ EnterStubFrame(); |
459 | 468 |
460 // The code in this frame may not cause GC. kDeoptimizeCopyFrameRuntimeEntry | 469 // The code in this frame may not cause GC. kDeoptimizeCopyFrameRuntimeEntry |
461 // and kDeoptimizeFillFrameRuntimeEntry are leaf runtime calls. | 470 // and kDeoptimizeFillFrameRuntimeEntry are leaf runtime calls. |
462 const intptr_t saved_result_slot_from_fp = | 471 const intptr_t saved_result_slot_from_fp = |
463 kFirstLocalSlotFromFp + 1 - (kNumberOfCpuRegisters - R0); | 472 kFirstLocalSlotFromFp + 1 - (kNumberOfCpuRegisters - R0); |
464 // Result in R0 is preserved as part of pushing all registers below. | 473 // Result in R0 is preserved as part of pushing all registers below. |
465 | 474 |
466 // Push registers in their enumeration order: lowest register number at | 475 // Push registers in their enumeration order: lowest register number at |
467 // lowest address. | 476 // lowest address. |
468 for (intptr_t i = kNumberOfCpuRegisters - 1; i >= 0; i--) { | 477 for (intptr_t i = kNumberOfCpuRegisters - 1; i >= 0; i--) { |
469 const Register r = static_cast<Register>(i); | 478 const Register r = static_cast<Register>(i); |
470 __ str(r, Address(SP, -1 * kWordSize, Address::PreIndex)); | 479 if (r == CODE_REG) { |
| 480 COMPILE_ASSERT(R25 > CODE_REG); |
| 481 __ ldr(R25, Address(FP, 2 * kWordSize)); |
| 482 __ str(R25, Address(SP, -1 * kWordSize, Address::PreIndex)); |
| 483 } else { |
| 484 __ str(r, Address(SP, -1 * kWordSize, Address::PreIndex)); |
| 485 } |
471 } | 486 } |
472 | 487 |
473 for (intptr_t reg_idx = kNumberOfVRegisters - 1; reg_idx >= 0; reg_idx--) { | 488 for (intptr_t reg_idx = kNumberOfVRegisters - 1; reg_idx >= 0; reg_idx--) { |
474 VRegister vreg = static_cast<VRegister>(reg_idx); | 489 VRegister vreg = static_cast<VRegister>(reg_idx); |
475 __ PushQuad(vreg); | 490 __ PushQuad(vreg); |
476 } | 491 } |
477 | 492 |
478 __ mov(R0, SP); // Pass address of saved registers block. | 493 __ mov(R0, SP); // Pass address of saved registers block. |
| 494 __ LoadImmediate(R1, kind == kLazyDeopt ? 1 : 0); |
479 __ ReserveAlignedFrameSpace(0); | 495 __ ReserveAlignedFrameSpace(0); |
480 __ CallRuntime(kDeoptimizeCopyFrameRuntimeEntry, 1); | 496 __ CallRuntime(kDeoptimizeCopyFrameRuntimeEntry, 2); |
481 // Result (R0) is stack-size (FP - SP) in bytes. | 497 // Result (R0) is stack-size (FP - SP) in bytes. |
482 | 498 |
| 499 const bool preserve_result = (kind == kLazyDeopt); |
483 if (preserve_result) { | 500 if (preserve_result) { |
484 // Restore result into R1 temporarily. | 501 // Restore result into R1 temporarily. |
485 __ LoadFromOffset(R1, FP, saved_result_slot_from_fp * kWordSize); | 502 __ LoadFromOffset(R1, FP, saved_result_slot_from_fp * kWordSize); |
486 } | 503 } |
487 | 504 |
488 // There is a Dart Frame on the stack. We must restore PP and leave frame. | 505 // There is a Dart Frame on the stack. We must restore PP and leave frame. |
| 506 __ RestoreCodePointer(); |
489 __ LeaveStubFrame(); | 507 __ LeaveStubFrame(); |
490 __ sub(SP, FP, Operand(R0)); | 508 __ sub(SP, FP, Operand(R0)); |
491 | 509 |
492 // DeoptimizeFillFrame expects a Dart frame, i.e. EnterDartFrame(0), but there | 510 // DeoptimizeFillFrame expects a Dart frame, i.e. EnterDartFrame(0), but there |
493 // is no need to set the correct PC marker or load PP, since they get patched. | 511 // is no need to set the correct PC marker or load PP, since they get patched. |
494 __ EnterStubFrame(); | 512 __ EnterStubFrame(); |
495 | 513 |
496 if (preserve_result) { | 514 if (preserve_result) { |
497 __ Push(R1); // Preserve result as first local. | 515 __ Push(R1); // Preserve result as first local. |
498 } | 516 } |
499 __ ReserveAlignedFrameSpace(0); | 517 __ ReserveAlignedFrameSpace(0); |
500 __ mov(R0, FP); // Pass last FP as parameter in R0. | 518 __ mov(R0, FP); // Pass last FP as parameter in R0. |
501 __ CallRuntime(kDeoptimizeFillFrameRuntimeEntry, 1); | 519 __ CallRuntime(kDeoptimizeFillFrameRuntimeEntry, 1); |
502 if (preserve_result) { | 520 if (preserve_result) { |
503 // Restore result into R1. | 521 // Restore result into R1. |
504 __ LoadFromOffset(R1, FP, kFirstLocalSlotFromFp * kWordSize); | 522 __ LoadFromOffset(R1, FP, kFirstLocalSlotFromFp * kWordSize); |
505 } | 523 } |
506 // Code above cannot cause GC. | 524 // Code above cannot cause GC. |
507 // There is a Dart Frame on the stack. We must restore PP and leave frame. | 525 // There is a Dart Frame on the stack. We must restore PP and leave frame. |
| 526 __ RestoreCodePointer(); |
508 __ LeaveStubFrame(); | 527 __ LeaveStubFrame(); |
509 | 528 |
510 // Frame is fully rewritten at this point and it is safe to perform a GC. | 529 // Frame is fully rewritten at this point and it is safe to perform a GC. |
511 // Materialize any objects that were deferred by FillFrame because they | 530 // Materialize any objects that were deferred by FillFrame because they |
512 // require allocation. | 531 // require allocation. |
513 // Enter stub frame with loading PP. The caller's PP is not materialized yet. | 532 // Enter stub frame with loading PP. The caller's PP is not materialized yet. |
514 __ EnterStubFrame(); | 533 __ EnterStubFrame(); |
515 if (preserve_result) { | 534 if (preserve_result) { |
516 __ Push(R1); // Preserve result, it will be GC-d here. | 535 __ Push(R1); // Preserve result, it will be GC-d here. |
517 } | 536 } |
518 __ Push(ZR); // Space for the result. | 537 __ Push(ZR); // Space for the result. |
519 __ CallRuntime(kDeoptimizeMaterializeRuntimeEntry, 0); | 538 __ CallRuntime(kDeoptimizeMaterializeRuntimeEntry, 0); |
520 // Result tells stub how many bytes to remove from the expression stack | 539 // Result tells stub how many bytes to remove from the expression stack |
521 // of the bottom-most frame. They were used as materialization arguments. | 540 // of the bottom-most frame. They were used as materialization arguments. |
522 __ Pop(R1); | 541 __ Pop(R1); |
523 __ SmiUntag(R1); | 542 __ SmiUntag(R1); |
524 if (preserve_result) { | 543 if (preserve_result) { |
525 __ Pop(R0); // Restore result. | 544 __ Pop(R0); // Restore result. |
526 } | 545 } |
527 __ LeaveStubFrame(); | 546 __ LeaveStubFrame(); |
528 // Remove materialization arguments. | 547 // Remove materialization arguments. |
529 __ add(SP, SP, Operand(R1)); | 548 __ add(SP, SP, Operand(R1)); |
530 __ ret(); | 549 __ ret(); |
531 } | 550 } |
532 | 551 |
533 | 552 |
534 void StubCode::GenerateDeoptimizeLazyStub(Assembler* assembler) { | 553 void StubCode::GenerateDeoptimizeLazyStub(Assembler* assembler) { |
535 // Correct return address to point just after the call that is being | 554 // Correct return address to point just after the call that is being |
536 // deoptimized. | 555 // deoptimized. |
537 __ AddImmediate(LR, LR, -CallPattern::kLengthInBytes); | 556 __ AddImmediate(LR, LR, -CallPattern::kDeoptCallLengthInBytes); |
538 GenerateDeoptimizationSequence(assembler, true); // Preserve R0. | 557 // Push zap value instead of CODE_REG for lazy deopt. |
| 558 __ LoadImmediate(TMP, 0xf1f1f1f1); |
| 559 __ Push(TMP); |
| 560 GenerateDeoptimizationSequence(assembler, kLazyDeopt); |
539 } | 561 } |
540 | 562 |
541 | 563 |
542 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { | 564 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { |
543 GenerateDeoptimizationSequence(assembler, false); // Don't preserve R0. | 565 GenerateDeoptimizationSequence(assembler, kEagerDeopt); |
544 } | 566 } |
545 | 567 |
546 | 568 |
547 static void GenerateDispatcherCode(Assembler* assembler, | 569 static void GenerateDispatcherCode(Assembler* assembler, |
548 Label* call_target_function) { | 570 Label* call_target_function) { |
549 __ Comment("NoSuchMethodDispatch"); | 571 __ Comment("NoSuchMethodDispatch"); |
550 // When lazily generated invocation dispatchers are disabled, the | 572 // When lazily generated invocation dispatchers are disabled, the |
551 // miss-handler may return null. | 573 // miss-handler may return null. |
552 __ CompareObject(R0, Object::null_object()); | 574 __ CompareObject(R0, Object::null_object()); |
553 __ b(call_target_function, NE); | 575 __ b(call_target_function, NE); |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
594 __ Push(R4); | 616 __ Push(R4); |
595 __ CallRuntime(kMegamorphicCacheMissHandlerRuntimeEntry, 3); | 617 __ CallRuntime(kMegamorphicCacheMissHandlerRuntimeEntry, 3); |
596 // Remove arguments. | 618 // Remove arguments. |
597 __ Drop(3); | 619 __ Drop(3); |
598 __ Pop(R0); // Get result into R0 (target function). | 620 __ Pop(R0); // Get result into R0 (target function). |
599 | 621 |
600 // Restore IC data and arguments descriptor. | 622 // Restore IC data and arguments descriptor. |
601 __ Pop(R4); | 623 __ Pop(R4); |
602 __ Pop(R5); | 624 __ Pop(R5); |
603 | 625 |
| 626 __ RestoreCodePointer(); |
604 __ LeaveStubFrame(); | 627 __ LeaveStubFrame(); |
605 | 628 |
606 if (!FLAG_lazy_dispatchers) { | 629 if (!FLAG_lazy_dispatchers) { |
607 Label call_target_function; | 630 Label call_target_function; |
608 GenerateDispatcherCode(assembler, &call_target_function); | 631 GenerateDispatcherCode(assembler, &call_target_function); |
609 __ Bind(&call_target_function); | 632 __ Bind(&call_target_function); |
610 } | 633 } |
611 | 634 |
612 // Tail-call to target function. | 635 // Tail-call to target function. |
| 636 __ LoadFieldFromOffset(CODE_REG, R0, Function::code_offset()); |
613 __ LoadFieldFromOffset(R2, R0, Function::entry_point_offset()); | 637 __ LoadFieldFromOffset(R2, R0, Function::entry_point_offset()); |
614 __ br(R2); | 638 __ br(R2); |
615 } | 639 } |
616 | 640 |
617 | 641 |
618 // Called for inline allocation of arrays. | 642 // Called for inline allocation of arrays. |
619 // Input parameters: | 643 // Input parameters: |
620 // LR: return address. | 644 // LR: return address. |
621 // R2: array length as Smi. | 645 // R2: array length as Smi. |
622 // R1: array element type (either NULL or an instantiated type). | 646 // R1: array element type (either NULL or an instantiated type). |
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
759 __ Pop(R2); | 783 __ Pop(R2); |
760 __ Pop(R0); | 784 __ Pop(R0); |
761 __ LeaveStubFrame(); | 785 __ LeaveStubFrame(); |
762 __ ret(); | 786 __ ret(); |
763 } | 787 } |
764 | 788 |
765 | 789 |
766 // Called when invoking Dart code from C++ (VM code). | 790 // Called when invoking Dart code from C++ (VM code). |
767 // Input parameters: | 791 // Input parameters: |
768 // LR : points to return address. | 792 // LR : points to return address. |
769 // R0 : entrypoint of the Dart function to call. | 793 // R0 : code object of the Dart function to call. |
770 // R1 : arguments descriptor array. | 794 // R1 : arguments descriptor array. |
771 // R2 : arguments array. | 795 // R2 : arguments array. |
772 // R3 : current thread. | 796 // R3 : current thread. |
773 void StubCode::GenerateInvokeDartCodeStub(Assembler* assembler) { | 797 void StubCode::GenerateInvokeDartCodeStub(Assembler* assembler) { |
774 __ Comment("InvokeDartCodeStub"); | 798 __ Comment("InvokeDartCodeStub"); |
775 | 799 |
776 // Copy the C stack pointer (R31) into the stack pointer we'll actually use | 800 // Copy the C stack pointer (R31) into the stack pointer we'll actually use |
777 // to access the stack, and put the C stack pointer at the stack limit. | 801 // to access the stack, and put the C stack pointer at the stack limit. |
778 __ SetupDartSP(Isolate::GetSpecifiedStackSize()); | 802 __ SetupDartSP(Isolate::GetSpecifiedStackSize()); |
779 __ EnterFrame(0); | 803 __ EnterFrame(0); |
780 | 804 |
781 // Save the callee-saved registers. | 805 // Save the callee-saved registers. |
782 for (int i = kAbiFirstPreservedCpuReg; i <= kAbiLastPreservedCpuReg; i++) { | 806 for (int i = kAbiFirstPreservedCpuReg; i <= kAbiLastPreservedCpuReg; i++) { |
783 const Register r = static_cast<Register>(i); | 807 const Register r = static_cast<Register>(i); |
784 // We use str instead of the Push macro because we will be pushing the PP | 808 // We use str instead of the Push macro because we will be pushing the PP |
785 // register when it is not holding a pool-pointer since we are coming from | 809 // register when it is not holding a pool-pointer since we are coming from |
786 // C++ code. | 810 // C++ code. |
787 __ str(r, Address(SP, -1 * kWordSize, Address::PreIndex)); | 811 __ str(r, Address(SP, -1 * kWordSize, Address::PreIndex)); |
788 } | 812 } |
789 | 813 |
790 // Save the bottom 64-bits of callee-saved V registers. | 814 // Save the bottom 64-bits of callee-saved V registers. |
791 for (int i = kAbiFirstPreservedFpuReg; i <= kAbiLastPreservedFpuReg; i++) { | 815 for (int i = kAbiFirstPreservedFpuReg; i <= kAbiLastPreservedFpuReg; i++) { |
792 const VRegister r = static_cast<VRegister>(i); | 816 const VRegister r = static_cast<VRegister>(i); |
793 __ PushDouble(r); | 817 __ PushDouble(r); |
794 } | 818 } |
795 | 819 |
796 // We now load the pool pointer(PP) as we are about to invoke dart code and we | |
797 // could potentially invoke some intrinsic functions which need the PP to be | |
798 // set up. | |
799 __ LoadPoolPointer(); | |
800 | |
801 // Set up THR, which caches the current thread in Dart code. | 820 // Set up THR, which caches the current thread in Dart code. |
802 if (THR != R3) { | 821 if (THR != R3) { |
803 __ mov(THR, R3); | 822 __ mov(THR, R3); |
804 } | 823 } |
805 // Load Isolate pointer into temporary register R5. | 824 // Load Isolate pointer into temporary register R5. |
806 __ LoadIsolate(R5); | 825 __ LoadIsolate(R5); |
807 | 826 |
808 // Save the current VMTag on the stack. | 827 // Save the current VMTag on the stack. |
809 __ LoadFromOffset(R4, R5, Isolate::vm_tag_offset()); | 828 __ LoadFromOffset(R4, R5, Isolate::vm_tag_offset()); |
810 __ Push(R4); | 829 __ Push(R4); |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
843 __ LoadImmediate(R1, 0); | 862 __ LoadImmediate(R1, 0); |
844 __ Bind(&push_arguments); | 863 __ Bind(&push_arguments); |
845 __ ldr(R3, Address(R2)); | 864 __ ldr(R3, Address(R2)); |
846 __ Push(R3); | 865 __ Push(R3); |
847 __ add(R1, R1, Operand(1)); | 866 __ add(R1, R1, Operand(1)); |
848 __ add(R2, R2, Operand(kWordSize)); | 867 __ add(R2, R2, Operand(kWordSize)); |
849 __ cmp(R1, Operand(R5)); | 868 __ cmp(R1, Operand(R5)); |
850 __ b(&push_arguments, LT); | 869 __ b(&push_arguments, LT); |
851 __ Bind(&done_push_arguments); | 870 __ Bind(&done_push_arguments); |
852 | 871 |
| 872 // We now load the pool pointer(PP) with a GC safe value as we are about to |
| 873 // invoke dart code. We don't need a real object pool here. |
| 874 // Smi zero does not work because ARM64 assumes PP to be untagged. |
| 875 __ LoadObject(PP, Object::null_object()); |
| 876 |
853 // Call the Dart code entrypoint. | 877 // Call the Dart code entrypoint. |
| 878 __ ldr(CODE_REG, Address(R0, VMHandles::kOffsetOfRawPtrInHandle)); |
| 879 __ ldr(R0, FieldAddress(CODE_REG, Code::entry_point_offset())); |
854 __ blr(R0); // R4 is the arguments descriptor array. | 880 __ blr(R0); // R4 is the arguments descriptor array. |
855 __ Comment("InvokeDartCodeStub return"); | 881 __ Comment("InvokeDartCodeStub return"); |
856 | 882 |
857 // Restore constant pool pointer after return. | |
858 __ LoadPoolPointer(); | |
859 | |
860 // Get rid of arguments pushed on the stack. | 883 // Get rid of arguments pushed on the stack. |
861 __ AddImmediate(SP, FP, kExitLinkSlotFromEntryFp * kWordSize); | 884 __ AddImmediate(SP, FP, kExitLinkSlotFromEntryFp * kWordSize); |
862 | 885 |
863 __ LoadIsolate(R28); | 886 __ LoadIsolate(R28); |
864 | 887 |
865 // Restore the saved top exit frame info and top resource back into the | 888 // Restore the saved top exit frame info and top resource back into the |
866 // Isolate structure. Uses R6 as a temporary register for this. | 889 // Isolate structure. Uses R6 as a temporary register for this. |
867 __ Pop(R6); | 890 __ Pop(R6); |
868 __ StoreToOffset(R6, THR, Thread::top_exit_frame_info_offset()); | 891 __ StoreToOffset(R6, THR, Thread::top_exit_frame_info_offset()); |
869 __ Pop(R6); | 892 __ Pop(R6); |
(...skipping 11 matching lines...) Expand all Loading... |
881 | 904 |
882 // Restore C++ ABI callee-saved registers. | 905 // Restore C++ ABI callee-saved registers. |
883 for (int i = kAbiLastPreservedCpuReg; i >= kAbiFirstPreservedCpuReg; i--) { | 906 for (int i = kAbiLastPreservedCpuReg; i >= kAbiFirstPreservedCpuReg; i--) { |
884 Register r = static_cast<Register>(i); | 907 Register r = static_cast<Register>(i); |
885 // We use ldr instead of the Pop macro because we will be popping the PP | 908 // We use ldr instead of the Pop macro because we will be popping the PP |
886 // register when it is not holding a pool-pointer since we are returning to | 909 // register when it is not holding a pool-pointer since we are returning to |
887 // C++ code. We also skip the dart stack pointer SP, since we are still | 910 // C++ code. We also skip the dart stack pointer SP, since we are still |
888 // using it as the stack pointer. | 911 // using it as the stack pointer. |
889 __ ldr(r, Address(SP, 1 * kWordSize, Address::PostIndex)); | 912 __ ldr(r, Address(SP, 1 * kWordSize, Address::PostIndex)); |
890 } | 913 } |
891 __ set_constant_pool_allowed(false); | |
892 | 914 |
893 // Restore the frame pointer and C stack pointer and return. | 915 // Restore the frame pointer and C stack pointer and return. |
894 __ LeaveFrame(); | 916 __ LeaveFrame(); |
895 __ mov(CSP, SP); | 917 __ mov(CSP, SP); |
896 __ ret(); | 918 __ ret(); |
897 } | 919 } |
898 | 920 |
899 | 921 |
900 // Called for inline allocation of contexts. | 922 // Called for inline allocation of contexts. |
901 // Input: | 923 // Input: |
(...skipping 165 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1067 // Restore callee-saved registers, tear down frame. | 1089 // Restore callee-saved registers, tear down frame. |
1068 __ LeaveCallRuntimeFrame(); | 1090 __ LeaveCallRuntimeFrame(); |
1069 __ ret(); | 1091 __ ret(); |
1070 } | 1092 } |
1071 | 1093 |
1072 | 1094 |
1073 // Called for inline allocation of objects. | 1095 // Called for inline allocation of objects. |
1074 // Input parameters: | 1096 // Input parameters: |
1075 // LR : return address. | 1097 // LR : return address. |
1076 // SP + 0 : type arguments object (only if class is parameterized). | 1098 // SP + 0 : type arguments object (only if class is parameterized). |
1077 void StubCode::GenerateAllocationStubForClass( | 1099 void StubCode::GenerateAllocationStubForClass(Assembler* assembler, |
1078 Assembler* assembler, const Class& cls, | 1100 const Class& cls) { |
1079 uword* entry_patch_offset, uword* patch_code_pc_offset) { | |
1080 *entry_patch_offset = assembler->CodeSize(); | |
1081 // The generated code is different if the class is parameterized. | 1101 // The generated code is different if the class is parameterized. |
1082 const bool is_cls_parameterized = cls.NumTypeArguments() > 0; | 1102 const bool is_cls_parameterized = cls.NumTypeArguments() > 0; |
1083 ASSERT(!is_cls_parameterized || | 1103 ASSERT(!is_cls_parameterized || |
1084 (cls.type_arguments_field_offset() != Class::kNoTypeArguments)); | 1104 (cls.type_arguments_field_offset() != Class::kNoTypeArguments)); |
1085 // kInlineInstanceSize is a constant used as a threshold for determining | 1105 // kInlineInstanceSize is a constant used as a threshold for determining |
1086 // when the object initialization should be done as a loop or as | 1106 // when the object initialization should be done as a loop or as |
1087 // straight line code. | 1107 // straight line code. |
1088 const int kInlineInstanceSize = 12; | 1108 const int kInlineInstanceSize = 12; |
1089 const intptr_t instance_size = cls.instance_size(); | 1109 const intptr_t instance_size = cls.instance_size(); |
1090 ASSERT(instance_size > 0); | 1110 ASSERT(instance_size > 0); |
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1190 // Push null type arguments. | 1210 // Push null type arguments. |
1191 __ PushObject(Object::null_object()); | 1211 __ PushObject(Object::null_object()); |
1192 } | 1212 } |
1193 __ CallRuntime(kAllocateObjectRuntimeEntry, 2); // Allocate object. | 1213 __ CallRuntime(kAllocateObjectRuntimeEntry, 2); // Allocate object. |
1194 __ Drop(2); // Pop arguments. | 1214 __ Drop(2); // Pop arguments. |
1195 __ Pop(R0); // Pop result (newly allocated object). | 1215 __ Pop(R0); // Pop result (newly allocated object). |
1196 // R0: new object | 1216 // R0: new object |
1197 // Restore the frame pointer. | 1217 // Restore the frame pointer. |
1198 __ LeaveStubFrame(); | 1218 __ LeaveStubFrame(); |
1199 __ ret(); | 1219 __ ret(); |
1200 *patch_code_pc_offset = assembler->CodeSize(); | |
1201 __ BranchPatchable(*StubCode::FixAllocationStubTarget_entry()); | |
1202 } | 1220 } |
1203 | 1221 |
1204 | 1222 |
1205 // Called for invoking "dynamic noSuchMethod(Invocation invocation)" function | 1223 // Called for invoking "dynamic noSuchMethod(Invocation invocation)" function |
1206 // from the entry code of a dart function after an error in passed argument | 1224 // from the entry code of a dart function after an error in passed argument |
1207 // name or number is detected. | 1225 // name or number is detected. |
1208 // Input parameters: | 1226 // Input parameters: |
1209 // LR : return address. | 1227 // LR : return address. |
1210 // SP : address of last argument. | 1228 // SP : address of last argument. |
1211 // R4: arguments descriptor array. | 1229 // R4: arguments descriptor array. |
(...skipping 296 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1508 // Pass IC data object. | 1526 // Pass IC data object. |
1509 __ Push(R5); | 1527 __ Push(R5); |
1510 __ CallRuntime(handle_ic_miss, num_args + 1); | 1528 __ CallRuntime(handle_ic_miss, num_args + 1); |
1511 // Remove the call arguments pushed earlier, including the IC data object. | 1529 // Remove the call arguments pushed earlier, including the IC data object. |
1512 __ Drop(num_args + 1); | 1530 __ Drop(num_args + 1); |
1513 // Pop returned function object into R0. | 1531 // Pop returned function object into R0. |
1514 // Restore arguments descriptor array and IC data array. | 1532 // Restore arguments descriptor array and IC data array. |
1515 __ Pop(R0); // Pop returned function object into R0. | 1533 __ Pop(R0); // Pop returned function object into R0. |
1516 __ Pop(R5); // Restore IC Data. | 1534 __ Pop(R5); // Restore IC Data. |
1517 __ Pop(R4); // Restore arguments descriptor array. | 1535 __ Pop(R4); // Restore arguments descriptor array. |
| 1536 if (range_collection_mode == kCollectRanges) { |
| 1537 __ RestoreCodePointer(); |
| 1538 } |
1518 __ LeaveStubFrame(); | 1539 __ LeaveStubFrame(); |
1519 Label call_target_function; | 1540 Label call_target_function; |
1520 if (!FLAG_lazy_dispatchers) { | 1541 if (!FLAG_lazy_dispatchers) { |
1521 GenerateDispatcherCode(assembler, &call_target_function); | 1542 GenerateDispatcherCode(assembler, &call_target_function); |
1522 } else { | 1543 } else { |
1523 __ b(&call_target_function); | 1544 __ b(&call_target_function); |
1524 } | 1545 } |
1525 | 1546 |
1526 __ Bind(&found); | 1547 __ Bind(&found); |
1527 __ Comment("Update caller's counter"); | 1548 __ Comment("Update caller's counter"); |
1528 // R6: pointer to an IC data check group. | 1549 // R6: pointer to an IC data check group. |
1529 const intptr_t target_offset = ICData::TargetIndexFor(num_args) * kWordSize; | 1550 const intptr_t target_offset = ICData::TargetIndexFor(num_args) * kWordSize; |
1530 const intptr_t count_offset = ICData::CountIndexFor(num_args) * kWordSize; | 1551 const intptr_t count_offset = ICData::CountIndexFor(num_args) * kWordSize; |
1531 __ LoadFromOffset(R0, R6, target_offset); | 1552 __ LoadFromOffset(R0, R6, target_offset); |
1532 | 1553 |
1533 if (FLAG_optimization_counter_threshold >= 0) { | 1554 if (FLAG_optimization_counter_threshold >= 0) { |
1534 // Update counter. | 1555 // Update counter. |
1535 __ LoadFromOffset(R1, R6, count_offset); | 1556 __ LoadFromOffset(R1, R6, count_offset); |
1536 __ adds(R1, R1, Operand(Smi::RawValue(1))); | 1557 __ adds(R1, R1, Operand(Smi::RawValue(1))); |
1537 __ LoadImmediate(R2, Smi::RawValue(Smi::kMaxValue)); | 1558 __ LoadImmediate(R2, Smi::RawValue(Smi::kMaxValue)); |
1538 __ csel(R1, R2, R1, VS); // Overflow. | 1559 __ csel(R1, R2, R1, VS); // Overflow. |
1539 __ StoreToOffset(R1, R6, count_offset); | 1560 __ StoreToOffset(R1, R6, count_offset); |
1540 } | 1561 } |
1541 | 1562 |
1542 __ Comment("Call target"); | 1563 __ Comment("Call target"); |
1543 __ Bind(&call_target_function); | 1564 __ Bind(&call_target_function); |
1544 // R0: target function. | 1565 // R0: target function. |
1545 __ LoadFieldFromOffset(R2, R0, Function::entry_point_offset()); | |
1546 if (range_collection_mode == kCollectRanges) { | 1566 if (range_collection_mode == kCollectRanges) { |
| 1567 __ LoadFieldFromOffset(R2, R0, Function::entry_point_offset()); |
1547 __ ldr(R1, Address(SP, 0 * kWordSize)); | 1568 __ ldr(R1, Address(SP, 0 * kWordSize)); |
1548 if (num_args == 2) { | 1569 if (num_args == 2) { |
1549 __ ldr(R3, Address(SP, 1 * kWordSize)); | 1570 __ ldr(R3, Address(SP, 1 * kWordSize)); |
1550 } | 1571 } |
1551 __ EnterStubFrame(); | 1572 __ EnterStubFrame(); |
1552 __ Push(R5); | 1573 __ Push(R5); |
1553 if (num_args == 2) { | 1574 if (num_args == 2) { |
1554 __ Push(R3); | 1575 __ Push(R3); |
1555 } | 1576 } |
1556 __ Push(R1); | 1577 __ Push(R1); |
| 1578 __ LoadFieldFromOffset(CODE_REG, R0, Function::code_offset()); |
1557 __ blr(R2); | 1579 __ blr(R2); |
1558 | 1580 |
1559 Label done; | 1581 Label done; |
1560 __ ldr(R5, Address(FP, kFirstLocalSlotFromFp * kWordSize)); | 1582 __ ldr(R5, Address(FP, kFirstLocalSlotFromFp * kWordSize)); |
1561 __ UpdateRangeFeedback(R0, 2, R5, R1, R4, &done); | 1583 __ UpdateRangeFeedback(R0, 2, R5, R1, R4, &done); |
1562 __ Bind(&done); | 1584 __ Bind(&done); |
1563 __ LeaveStubFrame(); | 1585 __ LeaveStubFrame(); |
1564 __ ret(); | 1586 __ ret(); |
1565 } else { | 1587 } else { |
| 1588 __ LoadFieldFromOffset(CODE_REG, R0, Function::code_offset()); |
| 1589 __ LoadFieldFromOffset(R2, R0, Function::entry_point_offset()); |
1566 __ br(R2); | 1590 __ br(R2); |
1567 } | 1591 } |
1568 | 1592 |
1569 if (FLAG_support_debugger && !optimized) { | 1593 if (FLAG_support_debugger && !optimized) { |
1570 __ Bind(&stepping); | 1594 __ Bind(&stepping); |
1571 __ EnterStubFrame(); | 1595 __ EnterStubFrame(); |
1572 __ Push(R5); // Preserve IC data. | 1596 __ Push(R5); // Preserve IC data. |
1573 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); | 1597 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); |
1574 __ Pop(R5); | 1598 __ Pop(R5); |
| 1599 __ RestoreCodePointer(); |
1575 __ LeaveStubFrame(); | 1600 __ LeaveStubFrame(); |
1576 __ b(&done_stepping); | 1601 __ b(&done_stepping); |
1577 } | 1602 } |
1578 } | 1603 } |
1579 | 1604 |
1580 | 1605 |
1581 // Use inline cache data array to invoke the target or continue in inline | 1606 // Use inline cache data array to invoke the target or continue in inline |
1582 // cache miss handler. Stub for 1-argument check (receiver class). | 1607 // cache miss handler. Stub for 1-argument check (receiver class). |
1583 // LR: return address. | 1608 // LR: return address. |
1584 // R5: inline cache data object. | 1609 // R5: inline cache data object. |
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1709 __ LoadImmediate(R2, Smi::RawValue(Smi::kMaxValue)); | 1734 __ LoadImmediate(R2, Smi::RawValue(Smi::kMaxValue)); |
1710 __ csel(R1, R2, R1, VS); // Overflow. | 1735 __ csel(R1, R2, R1, VS); // Overflow. |
1711 __ StoreToOffset(R1, R6, count_offset); | 1736 __ StoreToOffset(R1, R6, count_offset); |
1712 } | 1737 } |
1713 | 1738 |
1714 // Load arguments descriptor into R4. | 1739 // Load arguments descriptor into R4. |
1715 __ LoadFieldFromOffset(R4, R5, ICData::arguments_descriptor_offset()); | 1740 __ LoadFieldFromOffset(R4, R5, ICData::arguments_descriptor_offset()); |
1716 | 1741 |
1717 // Get function and call it, if possible. | 1742 // Get function and call it, if possible. |
1718 __ LoadFromOffset(R0, R6, target_offset); | 1743 __ LoadFromOffset(R0, R6, target_offset); |
| 1744 __ LoadFieldFromOffset(CODE_REG, R0, Function::code_offset()); |
1719 __ LoadFieldFromOffset(R2, R0, Function::entry_point_offset()); | 1745 __ LoadFieldFromOffset(R2, R0, Function::entry_point_offset()); |
1720 __ br(R2); | 1746 __ br(R2); |
1721 | 1747 |
1722 if (FLAG_support_debugger) { | 1748 if (FLAG_support_debugger) { |
1723 __ Bind(&stepping); | 1749 __ Bind(&stepping); |
1724 __ EnterStubFrame(); | 1750 __ EnterStubFrame(); |
1725 __ Push(R5); // Preserve IC data. | 1751 __ Push(R5); // Preserve IC data. |
1726 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); | 1752 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); |
1727 __ Pop(R5); | 1753 __ Pop(R5); |
| 1754 __ RestoreCodePointer(); |
1728 __ LeaveStubFrame(); | 1755 __ LeaveStubFrame(); |
1729 __ b(&done_stepping); | 1756 __ b(&done_stepping); |
1730 } | 1757 } |
1731 } | 1758 } |
1732 | 1759 |
1733 | 1760 |
1734 void StubCode::GenerateOneArgUnoptimizedStaticCallStub(Assembler* assembler) { | 1761 void StubCode::GenerateOneArgUnoptimizedStaticCallStub(Assembler* assembler) { |
1735 GenerateUsageCounterIncrement(assembler, R6); | 1762 GenerateUsageCounterIncrement(assembler, R6); |
1736 GenerateNArgsCheckInlineCacheStub( | 1763 GenerateNArgsCheckInlineCacheStub( |
1737 assembler, 1, kStaticCallMissHandlerOneArgRuntimeEntry, Token::kILLEGAL, | 1764 assembler, 1, kStaticCallMissHandlerOneArgRuntimeEntry, Token::kILLEGAL, |
(...skipping 18 matching lines...) Expand all Loading... |
1756 __ EnterStubFrame(); | 1783 __ EnterStubFrame(); |
1757 __ Push(R5); // Save IC Data. | 1784 __ Push(R5); // Save IC Data. |
1758 __ Push(R4); // Save arg. desc. | 1785 __ Push(R4); // Save arg. desc. |
1759 __ Push(R0); // Pass function. | 1786 __ Push(R0); // Pass function. |
1760 __ CallRuntime(kCompileFunctionRuntimeEntry, 1); | 1787 __ CallRuntime(kCompileFunctionRuntimeEntry, 1); |
1761 __ Pop(R0); // Restore argument. | 1788 __ Pop(R0); // Restore argument. |
1762 __ Pop(R4); // Restore arg desc. | 1789 __ Pop(R4); // Restore arg desc. |
1763 __ Pop(R5); // Restore IC Data. | 1790 __ Pop(R5); // Restore IC Data. |
1764 __ LeaveStubFrame(); | 1791 __ LeaveStubFrame(); |
1765 | 1792 |
| 1793 __ LoadFieldFromOffset(CODE_REG, R0, Function::code_offset()); |
1766 __ LoadFieldFromOffset(R2, R0, Function::entry_point_offset()); | 1794 __ LoadFieldFromOffset(R2, R0, Function::entry_point_offset()); |
1767 __ br(R2); | 1795 __ br(R2); |
1768 } | 1796 } |
1769 | 1797 |
1770 | 1798 |
1771 // R5: Contains an ICData. | 1799 // R5: Contains an ICData. |
1772 void StubCode::GenerateICCallBreakpointStub(Assembler* assembler) { | 1800 void StubCode::GenerateICCallBreakpointStub(Assembler* assembler) { |
1773 __ EnterStubFrame(); | 1801 __ EnterStubFrame(); |
1774 __ Push(R5); | 1802 __ Push(R5); |
1775 __ PushObject(Object::null_object()); // Space for result. | 1803 __ PushObject(Object::null_object()); // Space for result. |
1776 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0); | 1804 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0); |
1777 __ Pop(R0); | 1805 __ Pop(CODE_REG); |
1778 __ Pop(R5); | 1806 __ Pop(R5); |
1779 __ LeaveStubFrame(); | 1807 __ LeaveStubFrame(); |
| 1808 __ LoadFieldFromOffset(R0, CODE_REG, Code::entry_point_offset()); |
1780 __ br(R0); | 1809 __ br(R0); |
1781 } | 1810 } |
1782 | 1811 |
1783 | 1812 |
1784 void StubCode::GenerateRuntimeCallBreakpointStub(Assembler* assembler) { | 1813 void StubCode::GenerateRuntimeCallBreakpointStub(Assembler* assembler) { |
1785 __ EnterStubFrame(); | 1814 __ EnterStubFrame(); |
1786 __ PushObject(Object::null_object()); // Space for result. | 1815 __ PushObject(Object::null_object()); // Space for result. |
1787 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0); | 1816 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0); |
1788 __ Pop(R0); | 1817 __ Pop(CODE_REG); |
1789 __ LeaveStubFrame(); | 1818 __ LeaveStubFrame(); |
| 1819 __ LoadFieldFromOffset(R0, CODE_REG, Code::entry_point_offset()); |
1790 __ br(R0); | 1820 __ br(R0); |
1791 } | 1821 } |
1792 | 1822 |
1793 // Called only from unoptimized code. All relevant registers have been saved. | 1823 // Called only from unoptimized code. All relevant registers have been saved. |
1794 void StubCode::GenerateDebugStepCheckStub( | 1824 void StubCode::GenerateDebugStepCheckStub( |
1795 Assembler* assembler) { | 1825 Assembler* assembler) { |
1796 // Check single stepping. | 1826 // Check single stepping. |
1797 Label stepping, done_stepping; | 1827 Label stepping, done_stepping; |
1798 __ LoadIsolate(R1); | 1828 __ LoadIsolate(R1); |
1799 __ LoadFromOffset( | 1829 __ LoadFromOffset( |
(...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1957 // R6: function to be re-optimized. | 1987 // R6: function to be re-optimized. |
1958 // R4: argument descriptor (preserved). | 1988 // R4: argument descriptor (preserved). |
1959 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) { | 1989 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) { |
1960 __ EnterStubFrame(); | 1990 __ EnterStubFrame(); |
1961 __ Push(R4); | 1991 __ Push(R4); |
1962 // Setup space on stack for the return value. | 1992 // Setup space on stack for the return value. |
1963 __ PushObject(Object::null_object()); | 1993 __ PushObject(Object::null_object()); |
1964 __ Push(R6); | 1994 __ Push(R6); |
1965 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1); | 1995 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1); |
1966 __ Pop(R0); // Discard argument. | 1996 __ Pop(R0); // Discard argument. |
1967 __ Pop(R0); // Get Code object | 1997 __ Pop(CODE_REG); // Get Code object |
1968 __ Pop(R4); // Restore argument descriptor. | 1998 __ Pop(R4); // Restore argument descriptor. |
1969 __ LoadFieldFromOffset(R0, R0, Code::entry_point_offset()); | 1999 __ LoadFieldFromOffset(R0, CODE_REG, Code::entry_point_offset()); |
1970 __ LeaveStubFrame(); | 2000 __ LeaveStubFrame(); |
1971 __ br(R0); | 2001 __ br(R0); |
1972 __ brk(0); | 2002 __ brk(0); |
1973 } | 2003 } |
1974 | 2004 |
1975 | 2005 |
1976 // Does identical check (object references are equal or not equal) with special | 2006 // Does identical check (object references are equal or not equal) with special |
1977 // checks for boxed numbers. | 2007 // checks for boxed numbers. |
1978 // Left and right are pushed on stack. | 2008 // Left and right are pushed on stack. |
1979 // Return Zero condition flag set if equal. | 2009 // Return Zero condition flag set if equal. |
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2052 const Register right = R0; | 2082 const Register right = R0; |
2053 __ LoadFromOffset(left, SP, 1 * kWordSize); | 2083 __ LoadFromOffset(left, SP, 1 * kWordSize); |
2054 __ LoadFromOffset(right, SP, 0 * kWordSize); | 2084 __ LoadFromOffset(right, SP, 0 * kWordSize); |
2055 GenerateIdenticalWithNumberCheckStub(assembler, left, right); | 2085 GenerateIdenticalWithNumberCheckStub(assembler, left, right); |
2056 __ ret(); | 2086 __ ret(); |
2057 | 2087 |
2058 if (FLAG_support_debugger) { | 2088 if (FLAG_support_debugger) { |
2059 __ Bind(&stepping); | 2089 __ Bind(&stepping); |
2060 __ EnterStubFrame(); | 2090 __ EnterStubFrame(); |
2061 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); | 2091 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); |
| 2092 __ RestoreCodePointer(); |
2062 __ LeaveStubFrame(); | 2093 __ LeaveStubFrame(); |
2063 __ b(&done_stepping); | 2094 __ b(&done_stepping); |
2064 } | 2095 } |
2065 } | 2096 } |
2066 | 2097 |
2067 | 2098 |
2068 // Called from optimized code only. | 2099 // Called from optimized code only. |
2069 // LR: return address. | 2100 // LR: return address. |
2070 // SP + 4: left operand. | 2101 // SP + 4: left operand. |
2071 // SP + 0: right operand. | 2102 // SP + 0: right operand. |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2110 __ CompareRegisters(R4, R0); | 2141 __ CompareRegisters(R4, R0); |
2111 __ b(&update, NE); | 2142 __ b(&update, NE); |
2112 | 2143 |
2113 __ Bind(&call_target_function); | 2144 __ Bind(&call_target_function); |
2114 // Call the target found in the cache. For a class id match, this is a | 2145 // Call the target found in the cache. For a class id match, this is a |
2115 // proper target for the given name and arguments descriptor. If the | 2146 // proper target for the given name and arguments descriptor. If the |
2116 // illegal class id was found, the target is a cache miss handler that can | 2147 // illegal class id was found, the target is a cache miss handler that can |
2117 // be invoked as a normal Dart function. | 2148 // be invoked as a normal Dart function. |
2118 __ add(TMP, R2, Operand(R3, LSL, 3)); | 2149 __ add(TMP, R2, Operand(R3, LSL, 3)); |
2119 __ LoadFieldFromOffset(R0, TMP, base + kWordSize); | 2150 __ LoadFieldFromOffset(R0, TMP, base + kWordSize); |
| 2151 __ LoadFieldFromOffset(CODE_REG, R0, Function::code_offset()); |
2120 __ LoadFieldFromOffset(R1, R0, Function::entry_point_offset()); | 2152 __ LoadFieldFromOffset(R1, R0, Function::entry_point_offset()); |
2121 } | 2153 } |
2122 | 2154 |
2123 | 2155 |
2124 // Called from megamorphic calls. | 2156 // Called from megamorphic calls. |
2125 // R0: receiver. | 2157 // R0: receiver. |
2126 // R1: lookup cache. | 2158 // R1: lookup cache. |
2127 // Result: | 2159 // Result: |
2128 // R1: entry point. | 2160 // R1: entry point. |
2129 void StubCode::GenerateMegamorphicLookupStub(Assembler* assembler) { | 2161 void StubCode::GenerateMegamorphicLookupStub(Assembler* assembler) { |
2130 EmitMegamorphicLookup(assembler, R0, R1, R1); | 2162 EmitMegamorphicLookup(assembler, R0, R1, R1); |
2131 __ ret(); | 2163 __ ret(); |
2132 } | 2164 } |
2133 | 2165 |
2134 } // namespace dart | 2166 } // namespace dart |
2135 | 2167 |
2136 #endif // defined TARGET_ARCH_ARM64 | 2168 #endif // defined TARGET_ARCH_ARM64 |
OLD | NEW |