| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 297 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 308 // Get the global function with the given index. | 308 // Get the global function with the given index. |
| 309 Handle<JSFunction> function( | 309 Handle<JSFunction> function( |
| 310 JSFunction::cast(isolate->native_context()->get(index))); | 310 JSFunction::cast(isolate->native_context()->get(index))); |
| 311 // Load its initial map. The global functions all have initial maps. | 311 // Load its initial map. The global functions all have initial maps. |
| 312 __ Move(prototype, Handle<Map>(function->initial_map())); | 312 __ Move(prototype, Handle<Map>(function->initial_map())); |
| 313 // Load the prototype from the initial map. | 313 // Load the prototype from the initial map. |
| 314 __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset)); | 314 __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset)); |
| 315 } | 315 } |
| 316 | 316 |
| 317 | 317 |
| 318 void StubCompiler::DoGenerateFastPropertyLoad(MacroAssembler* masm, | 318 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm, |
| 319 Register dst, | 319 Register dst, |
| 320 Register src, | 320 Register src, |
| 321 bool inobject, | 321 bool inobject, |
| 322 int index) { | 322 int index, |
| 323 Representation representation) { |
| 324 ASSERT(!FLAG_track_double_fields || !representation.IsDouble()); |
| 323 int offset = index * kPointerSize; | 325 int offset = index * kPointerSize; |
| 324 if (!inobject) { | 326 if (!inobject) { |
| 325 // Calculate the offset into the properties array. | 327 // Calculate the offset into the properties array. |
| 326 offset = offset + FixedArray::kHeaderSize; | 328 offset = offset + FixedArray::kHeaderSize; |
| 327 __ ldr(dst, FieldMemOperand(src, JSObject::kPropertiesOffset)); | 329 __ ldr(dst, FieldMemOperand(src, JSObject::kPropertiesOffset)); |
| 328 src = dst; | 330 src = dst; |
| 329 } | 331 } |
| 330 __ ldr(dst, FieldMemOperand(src, offset)); | 332 __ ldr(dst, FieldMemOperand(src, offset)); |
| 331 } | 333 } |
| 332 | 334 |
| (...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 444 void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, | 446 void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, |
| 445 Handle<JSObject> object, | 447 Handle<JSObject> object, |
| 446 LookupResult* lookup, | 448 LookupResult* lookup, |
| 447 Handle<Map> transition, | 449 Handle<Map> transition, |
| 448 Handle<Name> name, | 450 Handle<Name> name, |
| 449 Register receiver_reg, | 451 Register receiver_reg, |
| 450 Register name_reg, | 452 Register name_reg, |
| 451 Register value_reg, | 453 Register value_reg, |
| 452 Register scratch1, | 454 Register scratch1, |
| 453 Register scratch2, | 455 Register scratch2, |
| 456 Register scratch3, |
| 454 Label* miss_label, | 457 Label* miss_label, |
| 455 Label* miss_restore_name) { | 458 Label* miss_restore_name, |
| 459 Label* slow) { |
| 456 // r0 : value | 460 // r0 : value |
| 457 Label exit; | 461 Label exit; |
| 458 | 462 |
| 459 // Check that the map of the object hasn't changed. | 463 // Check that the map of the object hasn't changed. |
| 460 __ CheckMap(receiver_reg, scratch1, Handle<Map>(object->map()), miss_label, | 464 __ CheckMap(receiver_reg, scratch1, Handle<Map>(object->map()), miss_label, |
| 461 DO_SMI_CHECK, REQUIRE_EXACT_MAP); | 465 DO_SMI_CHECK, REQUIRE_EXACT_MAP); |
| 462 | 466 |
| 463 // Perform global security token check if needed. | 467 // Perform global security token check if needed. |
| 464 if (object->IsJSGlobalProxy()) { | 468 if (object->IsJSGlobalProxy()) { |
| 465 __ CheckAccessGlobalProxy(receiver_reg, scratch1, miss_label); | 469 __ CheckAccessGlobalProxy(receiver_reg, scratch1, miss_label); |
| 466 } | 470 } |
| 467 | 471 |
| 468 int descriptor = transition->LastAdded(); | 472 int descriptor = transition->LastAdded(); |
| 469 DescriptorArray* descriptors = transition->instance_descriptors(); | 473 DescriptorArray* descriptors = transition->instance_descriptors(); |
| 470 PropertyDetails details = descriptors->GetDetails(descriptor); | 474 PropertyDetails details = descriptors->GetDetails(descriptor); |
| 471 Representation representation = details.representation(); | 475 Representation representation = details.representation(); |
| 472 ASSERT(!representation.IsNone()); | 476 ASSERT(!representation.IsNone()); |
| 473 | 477 |
| 474 // Ensure no transitions to deprecated maps are followed. | 478 // Ensure no transitions to deprecated maps are followed. |
| 475 __ CheckMapDeprecated(transition, scratch1, miss_label); | 479 __ CheckMapDeprecated(transition, scratch1, miss_label); |
| 476 | 480 |
| 477 if (FLAG_track_fields && representation.IsSmi()) { | |
| 478 __ JumpIfNotSmi(value_reg, miss_label); | |
| 479 } else if (FLAG_track_double_fields && representation.IsDouble()) { | |
| 480 Label do_store; | |
| 481 __ JumpIfSmi(value_reg, &do_store); | |
| 482 __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex, | |
| 483 miss_label, DONT_DO_SMI_CHECK); | |
| 484 __ bind(&do_store); | |
| 485 } | |
| 486 | |
| 487 // Check that we are allowed to write this. | 481 // Check that we are allowed to write this. |
| 488 if (object->GetPrototype()->IsJSObject()) { | 482 if (object->GetPrototype()->IsJSObject()) { |
| 489 JSObject* holder; | 483 JSObject* holder; |
| 490 // holder == object indicates that no property was found. | 484 // holder == object indicates that no property was found. |
| 491 if (lookup->holder() != *object) { | 485 if (lookup->holder() != *object) { |
| 492 holder = lookup->holder(); | 486 holder = lookup->holder(); |
| 493 } else { | 487 } else { |
| 494 // Find the top object. | 488 // Find the top object. |
| 495 holder = *object; | 489 holder = *object; |
| 496 do { | 490 do { |
| 497 holder = JSObject::cast(holder->GetPrototype()); | 491 holder = JSObject::cast(holder->GetPrototype()); |
| 498 } while (holder->GetPrototype()->IsJSObject()); | 492 } while (holder->GetPrototype()->IsJSObject()); |
| 499 } | 493 } |
| 500 Register holder_reg = CheckPrototypes( | 494 Register holder_reg = CheckPrototypes( |
| 501 object, receiver_reg, Handle<JSObject>(holder), name_reg, | 495 object, receiver_reg, Handle<JSObject>(holder), name_reg, |
| 502 scratch1, scratch2, name, miss_restore_name); | 496 scratch1, scratch2, name, miss_restore_name, SKIP_RECEIVER); |
| 503 // If no property was found, and the holder (the last object in the | 497 // If no property was found, and the holder (the last object in the |
| 504 // prototype chain) is in slow mode, we need to do a negative lookup on the | 498 // prototype chain) is in slow mode, we need to do a negative lookup on the |
| 505 // holder. | 499 // holder. |
| 506 if (lookup->holder() == *object) { | 500 if (lookup->holder() == *object) { |
| 507 if (holder->IsJSGlobalObject()) { | 501 if (holder->IsJSGlobalObject()) { |
| 508 GenerateCheckPropertyCell( | 502 GenerateCheckPropertyCell( |
| 509 masm, | 503 masm, |
| 510 Handle<GlobalObject>(GlobalObject::cast(holder)), | 504 Handle<GlobalObject>(GlobalObject::cast(holder)), |
| 511 name, | 505 name, |
| 512 scratch1, | 506 scratch1, |
| 513 miss_restore_name); | 507 miss_restore_name); |
| 514 } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) { | 508 } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) { |
| 515 GenerateDictionaryNegativeLookup( | 509 GenerateDictionaryNegativeLookup( |
| 516 masm, miss_restore_name, holder_reg, name, scratch1, scratch2); | 510 masm, miss_restore_name, holder_reg, name, scratch1, scratch2); |
| 517 } | 511 } |
| 518 } | 512 } |
| 519 } | 513 } |
| 520 | 514 |
| 515 Register storage_reg = name_reg; |
| 516 |
| 517 if (FLAG_track_fields && representation.IsSmi()) { |
| 518 __ JumpIfNotSmi(value_reg, miss_restore_name); |
| 519 } else if (FLAG_track_double_fields && representation.IsDouble()) { |
| 520 Label do_store, heap_number; |
| 521 __ LoadRoot(scratch3, Heap::kHeapNumberMapRootIndex); |
| 522 __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow); |
| 523 |
| 524 __ JumpIfNotSmi(value_reg, &heap_number); |
| 525 __ SmiUntag(scratch1, value_reg); |
| 526 __ vmov(s0, scratch1); |
| 527 __ vcvt_f64_s32(d0, s0); |
| 528 __ jmp(&do_store); |
| 529 |
| 530 __ bind(&heap_number); |
| 531 __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex, |
| 532 miss_restore_name, DONT_DO_SMI_CHECK); |
| 533 __ vldr(d0, FieldMemOperand(value_reg, HeapNumber::kValueOffset)); |
| 534 |
| 535 __ bind(&do_store); |
| 536 __ vstr(d0, FieldMemOperand(storage_reg, HeapNumber::kValueOffset)); |
| 537 } |
| 538 |
| 521 // Stub never generated for non-global objects that require access | 539 // Stub never generated for non-global objects that require access |
| 522 // checks. | 540 // checks. |
| 523 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); | 541 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); |
| 524 | 542 |
| 525 // Perform map transition for the receiver if necessary. | 543 // Perform map transition for the receiver if necessary. |
| 526 if (object->map()->unused_property_fields() == 0) { | 544 if (object->map()->unused_property_fields() == 0) { |
| 527 // The properties must be extended before we can store the value. | 545 // The properties must be extended before we can store the value. |
| 528 // We jump to a runtime call that extends the properties array. | 546 // We jump to a runtime call that extends the properties array. |
| 529 __ push(receiver_reg); | 547 __ push(receiver_reg); |
| 530 __ mov(r2, Operand(transition)); | 548 __ mov(r2, Operand(transition)); |
| 531 __ Push(r2, r0); | 549 __ Push(r2, r0); |
| 532 __ TailCallExternalReference( | 550 __ TailCallExternalReference( |
| 533 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage), | 551 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage), |
| 534 masm->isolate()), | 552 masm->isolate()), |
| 535 3, | 553 3, |
| 536 1); | 554 1); |
| 537 return; | 555 return; |
| 538 } | 556 } |
| 539 | 557 |
| 540 // Update the map of the object. | 558 // Update the map of the object. |
| 541 __ mov(scratch1, Operand(transition)); | 559 __ mov(scratch1, Operand(transition)); |
| 542 __ str(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset)); | 560 __ str(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset)); |
| 543 | 561 |
| 544 // Update the write barrier for the map field and pass the now unused | 562 // Update the write barrier for the map field and pass the now unused |
| 545 // name_reg as scratch register. | 563 // name_reg as scratch register. |
| 546 __ RecordWriteField(receiver_reg, | 564 __ RecordWriteField(receiver_reg, |
| 547 HeapObject::kMapOffset, | 565 HeapObject::kMapOffset, |
| 548 scratch1, | 566 scratch1, |
| 549 name_reg, | 567 scratch2, |
| 550 kLRHasNotBeenSaved, | 568 kLRHasNotBeenSaved, |
| 551 kDontSaveFPRegs, | 569 kDontSaveFPRegs, |
| 552 OMIT_REMEMBERED_SET, | 570 OMIT_REMEMBERED_SET, |
| 553 OMIT_SMI_CHECK); | 571 OMIT_SMI_CHECK); |
| 554 | 572 |
| 555 int index = transition->instance_descriptors()->GetFieldIndex( | 573 int index = transition->instance_descriptors()->GetFieldIndex( |
| 556 transition->LastAdded()); | 574 transition->LastAdded()); |
| 557 | 575 |
| 558 // Adjust for the number of properties stored in the object. Even in the | 576 // Adjust for the number of properties stored in the object. Even in the |
| 559 // face of a transition we can use the old map here because the size of the | 577 // face of a transition we can use the old map here because the size of the |
| 560 // object and the number of in-object properties is not going to change. | 578 // object and the number of in-object properties is not going to change. |
| 561 index -= object->map()->inobject_properties(); | 579 index -= object->map()->inobject_properties(); |
| 562 | 580 |
| 563 // TODO(verwaest): Share this code as a code stub. | 581 // TODO(verwaest): Share this code as a code stub. |
| 564 if (index < 0) { | 582 if (index < 0) { |
| 565 // Set the property straight into the object. | 583 // Set the property straight into the object. |
| 566 int offset = object->map()->instance_size() + (index * kPointerSize); | 584 int offset = object->map()->instance_size() + (index * kPointerSize); |
| 567 __ str(value_reg, FieldMemOperand(receiver_reg, offset)); | 585 if (FLAG_track_double_fields && representation.IsDouble()) { |
| 586 __ str(storage_reg, FieldMemOperand(receiver_reg, offset)); |
| 587 } else { |
| 588 __ str(value_reg, FieldMemOperand(receiver_reg, offset)); |
| 589 } |
| 568 | 590 |
| 569 if (!FLAG_track_fields || !representation.IsSmi()) { | 591 if (!FLAG_track_fields || !representation.IsSmi()) { |
| 570 // Skip updating write barrier if storing a smi. | 592 // Skip updating write barrier if storing a smi. |
| 571 __ JumpIfSmi(value_reg, &exit); | 593 __ JumpIfSmi(value_reg, &exit); |
| 572 | 594 |
| 573 // Update the write barrier for the array address. | 595 // Update the write barrier for the array address. |
| 574 // Pass the now unused name_reg as a scratch register. | 596 // Pass the now unused name_reg as a scratch register. |
| 575 __ mov(name_reg, value_reg); | 597 if (!FLAG_track_double_fields || !representation.IsDouble()) { |
| 598 __ mov(name_reg, value_reg); |
| 599 } else { |
| 600 ASSERT(storage_reg.is(name_reg)); |
| 601 } |
| 576 __ RecordWriteField(receiver_reg, | 602 __ RecordWriteField(receiver_reg, |
| 577 offset, | 603 offset, |
| 578 name_reg, | 604 name_reg, |
| 579 scratch1, | 605 scratch1, |
| 580 kLRHasNotBeenSaved, | 606 kLRHasNotBeenSaved, |
| 581 kDontSaveFPRegs); | 607 kDontSaveFPRegs); |
| 582 } | 608 } |
| 583 } else { | 609 } else { |
| 584 // Write to the properties array. | 610 // Write to the properties array. |
| 585 int offset = index * kPointerSize + FixedArray::kHeaderSize; | 611 int offset = index * kPointerSize + FixedArray::kHeaderSize; |
| 586 // Get the properties array | 612 // Get the properties array |
| 587 __ ldr(scratch1, | 613 __ ldr(scratch1, |
| 588 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset)); | 614 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset)); |
| 589 __ str(value_reg, FieldMemOperand(scratch1, offset)); | 615 if (FLAG_track_double_fields && representation.IsDouble()) { |
| 616 __ str(storage_reg, FieldMemOperand(scratch1, offset)); |
| 617 } else { |
| 618 __ str(value_reg, FieldMemOperand(scratch1, offset)); |
| 619 } |
| 590 | 620 |
| 591 if (!FLAG_track_fields || !representation.IsSmi()) { | 621 if (!FLAG_track_fields || !representation.IsSmi()) { |
| 592 // Skip updating write barrier if storing a smi. | 622 // Skip updating write barrier if storing a smi. |
| 593 __ JumpIfSmi(value_reg, &exit); | 623 __ JumpIfSmi(value_reg, &exit); |
| 594 | 624 |
| 595 // Update the write barrier for the array address. | 625 // Update the write barrier for the array address. |
| 596 // Ok to clobber receiver_reg and name_reg, since we return. | 626 // Ok to clobber receiver_reg and name_reg, since we return. |
| 597 __ mov(name_reg, value_reg); | 627 if (!FLAG_track_double_fields || !representation.IsDouble()) { |
| 628 __ mov(name_reg, value_reg); |
| 629 } else { |
| 630 ASSERT(storage_reg.is(name_reg)); |
| 631 } |
| 598 __ RecordWriteField(scratch1, | 632 __ RecordWriteField(scratch1, |
| 599 offset, | 633 offset, |
| 600 name_reg, | 634 name_reg, |
| 601 receiver_reg, | 635 receiver_reg, |
| 602 kLRHasNotBeenSaved, | 636 kLRHasNotBeenSaved, |
| 603 kDontSaveFPRegs); | 637 kDontSaveFPRegs); |
| 604 } | 638 } |
| 605 } | 639 } |
| 606 | 640 |
| 607 // Return the value (register r0). | 641 // Return the value (register r0). |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 645 // Adjust for the number of properties stored in the object. Even in the | 679 // Adjust for the number of properties stored in the object. Even in the |
| 646 // face of a transition we can use the old map here because the size of the | 680 // face of a transition we can use the old map here because the size of the |
| 647 // object and the number of in-object properties is not going to change. | 681 // object and the number of in-object properties is not going to change. |
| 648 index -= object->map()->inobject_properties(); | 682 index -= object->map()->inobject_properties(); |
| 649 | 683 |
| 650 Representation representation = lookup->representation(); | 684 Representation representation = lookup->representation(); |
| 651 ASSERT(!representation.IsNone()); | 685 ASSERT(!representation.IsNone()); |
| 652 if (FLAG_track_fields && representation.IsSmi()) { | 686 if (FLAG_track_fields && representation.IsSmi()) { |
| 653 __ JumpIfNotSmi(value_reg, miss_label); | 687 __ JumpIfNotSmi(value_reg, miss_label); |
| 654 } else if (FLAG_track_double_fields && representation.IsDouble()) { | 688 } else if (FLAG_track_double_fields && representation.IsDouble()) { |
| 655 Label do_store; | 689 // Load the double storage. |
| 656 __ JumpIfSmi(value_reg, &do_store); | 690 if (index < 0) { |
| 657 __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex, | 691 int offset = object->map()->instance_size() + (index * kPointerSize); |
| 692 __ ldr(scratch1, FieldMemOperand(receiver_reg, offset)); |
| 693 } else { |
| 694 __ ldr(scratch1, |
| 695 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset)); |
| 696 int offset = index * kPointerSize + FixedArray::kHeaderSize; |
| 697 __ ldr(scratch1, FieldMemOperand(scratch1, offset)); |
| 698 } |
| 699 |
| 700 // Store the value into the storage. |
| 701 Label do_store, heap_number; |
| 702 __ JumpIfNotSmi(value_reg, &heap_number); |
| 703 __ SmiUntag(scratch2, value_reg); |
| 704 __ vmov(s0, scratch2); |
| 705 __ vcvt_f64_s32(d0, s0); |
| 706 __ jmp(&do_store); |
| 707 |
| 708 __ bind(&heap_number); |
| 709 __ CheckMap(value_reg, scratch2, Heap::kHeapNumberMapRootIndex, |
| 658 miss_label, DONT_DO_SMI_CHECK); | 710 miss_label, DONT_DO_SMI_CHECK); |
| 711 __ vldr(d0, FieldMemOperand(value_reg, HeapNumber::kValueOffset)); |
| 712 |
| 659 __ bind(&do_store); | 713 __ bind(&do_store); |
| 714 __ vstr(d0, FieldMemOperand(scratch1, HeapNumber::kValueOffset)); |
| 715 // Return the value (register r0). |
| 716 ASSERT(value_reg.is(r0)); |
| 717 __ Ret(); |
| 718 return; |
| 660 } | 719 } |
| 661 | 720 |
| 662 // TODO(verwaest): Share this code as a code stub. | 721 // TODO(verwaest): Share this code as a code stub. |
| 663 if (index < 0) { | 722 if (index < 0) { |
| 664 // Set the property straight into the object. | 723 // Set the property straight into the object. |
| 665 int offset = object->map()->instance_size() + (index * kPointerSize); | 724 int offset = object->map()->instance_size() + (index * kPointerSize); |
| 666 __ str(value_reg, FieldMemOperand(receiver_reg, offset)); | 725 __ str(value_reg, FieldMemOperand(receiver_reg, offset)); |
| 667 | 726 |
| 668 if (!FLAG_track_fields || !representation.IsSmi()) { | 727 if (!FLAG_track_fields || !representation.IsSmi()) { |
| 669 // Skip updating write barrier if storing a smi. | 728 // Skip updating write barrier if storing a smi. |
| (...skipping 632 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1302 if (!global.is_null()) { | 1361 if (!global.is_null()) { |
| 1303 GenerateCheckPropertyCell(masm(), global, name, scratch2(), &miss); | 1362 GenerateCheckPropertyCell(masm(), global, name, scratch2(), &miss); |
| 1304 } | 1363 } |
| 1305 | 1364 |
| 1306 HandlerFrontendFooter(success, &miss); | 1365 HandlerFrontendFooter(success, &miss); |
| 1307 } | 1366 } |
| 1308 | 1367 |
| 1309 | 1368 |
| 1310 void BaseLoadStubCompiler::GenerateLoadField(Register reg, | 1369 void BaseLoadStubCompiler::GenerateLoadField(Register reg, |
| 1311 Handle<JSObject> holder, | 1370 Handle<JSObject> holder, |
| 1312 PropertyIndex field) { | 1371 PropertyIndex field, |
| 1372 Representation representation) { |
| 1313 if (!reg.is(receiver())) __ mov(receiver(), reg); | 1373 if (!reg.is(receiver())) __ mov(receiver(), reg); |
| 1314 if (kind() == Code::LOAD_IC) { | 1374 if (kind() == Code::LOAD_IC) { |
| 1315 LoadFieldStub stub(field.is_inobject(holder), | 1375 LoadFieldStub stub(field.is_inobject(holder), |
| 1316 field.translate(holder)); | 1376 field.translate(holder), |
| 1377 representation); |
| 1317 GenerateTailCall(masm(), stub.GetCode(isolate())); | 1378 GenerateTailCall(masm(), stub.GetCode(isolate())); |
| 1318 } else { | 1379 } else { |
| 1319 KeyedLoadFieldStub stub(field.is_inobject(holder), | 1380 KeyedLoadFieldStub stub(field.is_inobject(holder), |
| 1320 field.translate(holder)); | 1381 field.translate(holder), |
| 1382 representation); |
| 1321 GenerateTailCall(masm(), stub.GetCode(isolate())); | 1383 GenerateTailCall(masm(), stub.GetCode(isolate())); |
| 1322 } | 1384 } |
| 1323 } | 1385 } |
| 1324 | 1386 |
| 1325 | 1387 |
| 1326 void BaseLoadStubCompiler::GenerateLoadConstant(Handle<JSFunction> value) { | 1388 void BaseLoadStubCompiler::GenerateLoadConstant(Handle<JSFunction> value) { |
| 1327 // Return the constant value. | 1389 // Return the constant value. |
| 1328 __ LoadHeapObject(r0, value); | 1390 __ LoadHeapObject(r0, value); |
| 1329 __ Ret(); | 1391 __ Ret(); |
| 1330 } | 1392 } |
| (...skipping 205 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1536 | 1598 |
| 1537 const int argc = arguments().immediate(); | 1599 const int argc = arguments().immediate(); |
| 1538 | 1600 |
| 1539 // Get the receiver of the function from the stack into r0. | 1601 // Get the receiver of the function from the stack into r0. |
| 1540 __ ldr(r0, MemOperand(sp, argc * kPointerSize)); | 1602 __ ldr(r0, MemOperand(sp, argc * kPointerSize)); |
| 1541 // Check that the receiver isn't a smi. | 1603 // Check that the receiver isn't a smi. |
| 1542 __ JumpIfSmi(r0, &miss); | 1604 __ JumpIfSmi(r0, &miss); |
| 1543 | 1605 |
| 1544 // Do the right check and compute the holder register. | 1606 // Do the right check and compute the holder register. |
| 1545 Register reg = CheckPrototypes(object, r0, holder, r1, r3, r4, name, &miss); | 1607 Register reg = CheckPrototypes(object, r0, holder, r1, r3, r4, name, &miss); |
| 1546 GenerateFastPropertyLoad(masm(), r1, reg, holder, index); | 1608 GenerateFastPropertyLoad(masm(), r1, reg, index.is_inobject(holder), |
| 1609 index.translate(holder), Representation::Tagged()); |
| 1547 | 1610 |
| 1548 GenerateCallFunction(masm(), object, arguments(), &miss, extra_state_); | 1611 GenerateCallFunction(masm(), object, arguments(), &miss, extra_state_); |
| 1549 | 1612 |
| 1550 // Handle call cache miss. | 1613 // Handle call cache miss. |
| 1551 __ bind(&miss); | 1614 __ bind(&miss); |
| 1552 GenerateMissBranch(); | 1615 GenerateMissBranch(); |
| 1553 | 1616 |
| 1554 // Return the generated code. | 1617 // Return the generated code. |
| 1555 return GetCode(Code::FIELD, name); | 1618 return GetCode(Code::FIELD, name); |
| 1556 } | 1619 } |
| (...skipping 2153 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3710 TailCallBuiltin(masm, Builtins::kKeyedStoreIC_Slow); | 3773 TailCallBuiltin(masm, Builtins::kKeyedStoreIC_Slow); |
| 3711 } | 3774 } |
| 3712 } | 3775 } |
| 3713 | 3776 |
| 3714 | 3777 |
| 3715 #undef __ | 3778 #undef __ |
| 3716 | 3779 |
| 3717 } } // namespace v8::internal | 3780 } } // namespace v8::internal |
| 3718 | 3781 |
| 3719 #endif // V8_TARGET_ARCH_ARM | 3782 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |