Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(57)

Side by Side Diff: src/x64/stub-cache-x64.cc

Issue 12810006: Change LookupForWrite to always do a full lookup and check the result. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Addressed comments Created 7 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/stub-cache.cc ('k') | test/mjsunit/regress/negative_lookup.js » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 698 matching lines...) Expand 10 before | Expand all | Expand 10 after
709 __ bind(label); 709 __ bind(label);
710 __ Move(this->name(), name); 710 __ Move(this->name(), name);
711 } 711 }
712 } 712 }
713 713
714 714
715 // Both name_reg and receiver_reg are preserved on jumps to miss_label, 715 // Both name_reg and receiver_reg are preserved on jumps to miss_label,
716 // but may be destroyed if store is successful. 716 // but may be destroyed if store is successful.
717 void StubCompiler::GenerateStoreField(MacroAssembler* masm, 717 void StubCompiler::GenerateStoreField(MacroAssembler* masm,
718 Handle<JSObject> object, 718 Handle<JSObject> object,
719 int index, 719 LookupResult* lookup,
720 Handle<Map> transition, 720 Handle<Map> transition,
721 Handle<Name> name, 721 Handle<Name> name,
722 Register receiver_reg, 722 Register receiver_reg,
723 Register name_reg, 723 Register name_reg,
724 Register value_reg, 724 Register value_reg,
725 Register scratch1, 725 Register scratch1,
726 Register scratch2, 726 Register scratch2,
727 Label* miss_label, 727 Label* miss_label,
728 Label* miss_restore_name) { 728 Label* miss_restore_name) {
729 LookupResult lookup(masm->isolate());
730 object->Lookup(*name, &lookup);
731 if (lookup.IsFound() && (lookup.IsReadOnly() || !lookup.IsCacheable())) {
732 // In sloppy mode, we could just return the value and be done. However, we
733 // might be in strict mode, where we have to throw. Since we cannot tell,
734 // go into slow case unconditionally.
735 __ jmp(miss_label);
736 return;
737 }
738
739 // Check that the map of the object hasn't changed. 729 // Check that the map of the object hasn't changed.
740 CompareMapMode mode = transition.is_null() ? ALLOW_ELEMENT_TRANSITION_MAPS 730 CompareMapMode mode = transition.is_null() ? ALLOW_ELEMENT_TRANSITION_MAPS
741 : REQUIRE_EXACT_MAP; 731 : REQUIRE_EXACT_MAP;
742 __ CheckMap(receiver_reg, Handle<Map>(object->map()), 732 __ CheckMap(receiver_reg, Handle<Map>(object->map()),
743 miss_label, DO_SMI_CHECK, mode); 733 miss_label, DO_SMI_CHECK, mode);
744 734
745 // Perform global security token check if needed. 735 // Perform global security token check if needed.
746 if (object->IsJSGlobalProxy()) { 736 if (object->IsJSGlobalProxy()) {
747 __ CheckAccessGlobalProxy(receiver_reg, scratch1, miss_label); 737 __ CheckAccessGlobalProxy(receiver_reg, scratch1, miss_label);
748 } 738 }
749 739
750 // Check that we are allowed to write this. 740 // Check that we are allowed to write this.
751 if (!transition.is_null() && object->GetPrototype()->IsJSObject()) { 741 if (!transition.is_null() && object->GetPrototype()->IsJSObject()) {
752 JSObject* holder; 742 JSObject* holder;
753 if (lookup.IsFound()) { 743 // holder == object indicates that no property was found.
754 holder = lookup.holder(); 744 if (lookup->holder() != *object) {
745 holder = lookup->holder();
755 } else { 746 } else {
756 // Find the top object. 747 // Find the top object.
757 holder = *object; 748 holder = *object;
758 do { 749 do {
759 holder = JSObject::cast(holder->GetPrototype()); 750 holder = JSObject::cast(holder->GetPrototype());
760 } while (holder->GetPrototype()->IsJSObject()); 751 } while (holder->GetPrototype()->IsJSObject());
761 } 752 }
762 CheckPrototypes(object, receiver_reg, Handle<JSObject>(holder), name_reg, 753 Register holder_reg = CheckPrototypes(
763 scratch1, scratch2, name, miss_restore_name); 754 object, receiver_reg, Handle<JSObject>(holder), name_reg,
755 scratch1, scratch2, name, miss_restore_name);
756 // If no property was found, and the holder (the last object in the
757 // prototype chain) is in slow mode, we need to do a negative lookup on the
758 // holder.
759 if (lookup->holder() == *object &&
760 !holder->HasFastProperties() &&
761 !holder->IsJSGlobalProxy() &&
762 !holder->IsJSGlobalObject()) {
763 GenerateDictionaryNegativeLookup(
764 masm, miss_restore_name, holder_reg, name, scratch1, scratch2);
765 }
764 } 766 }
765 767
766 // Stub never generated for non-global objects that require access 768 // Stub never generated for non-global objects that require access
767 // checks. 769 // checks.
768 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); 770 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
769 771
770 // Perform map transition for the receiver if necessary. 772 // Perform map transition for the receiver if necessary.
771 if (!transition.is_null() && (object->map()->unused_property_fields() == 0)) { 773 if (!transition.is_null() && (object->map()->unused_property_fields() == 0)) {
772 // The properties must be extended before we can store the value. 774 // The properties must be extended before we can store the value.
773 // We jump to a runtime call that extends the properties array. 775 // We jump to a runtime call that extends the properties array.
774 __ pop(scratch1); // Return address. 776 __ pop(scratch1); // Return address.
775 __ push(receiver_reg); 777 __ push(receiver_reg);
776 __ Push(transition); 778 __ Push(transition);
777 __ push(value_reg); 779 __ push(value_reg);
778 __ push(scratch1); 780 __ push(scratch1);
779 __ TailCallExternalReference( 781 __ TailCallExternalReference(
780 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage), 782 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
781 masm->isolate()), 783 masm->isolate()),
782 3, 784 3,
783 1); 785 1);
784 return; 786 return;
785 } 787 }
786 788
789 int index;
787 if (!transition.is_null()) { 790 if (!transition.is_null()) {
788 // Update the map of the object. 791 // Update the map of the object.
789 __ Move(scratch1, transition); 792 __ Move(scratch1, transition);
790 __ movq(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1); 793 __ movq(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1);
791 794
792 // Update the write barrier for the map field and pass the now unused 795 // Update the write barrier for the map field and pass the now unused
793 // name_reg as scratch register. 796 // name_reg as scratch register.
794 __ RecordWriteField(receiver_reg, 797 __ RecordWriteField(receiver_reg,
795 HeapObject::kMapOffset, 798 HeapObject::kMapOffset,
796 scratch1, 799 scratch1,
797 name_reg, 800 name_reg,
798 kDontSaveFPRegs, 801 kDontSaveFPRegs,
799 OMIT_REMEMBERED_SET, 802 OMIT_REMEMBERED_SET,
800 OMIT_SMI_CHECK); 803 OMIT_SMI_CHECK);
804 index = transition->instance_descriptors()->GetFieldIndex(
805 transition->LastAdded());
806 } else {
807 index = lookup->GetFieldIndex().field_index();
801 } 808 }
802 809
803 // Adjust for the number of properties stored in the object. Even in the 810 // Adjust for the number of properties stored in the object. Even in the
804 // face of a transition we can use the old map here because the size of the 811 // face of a transition we can use the old map here because the size of the
805 // object and the number of in-object properties is not going to change. 812 // object and the number of in-object properties is not going to change.
806 index -= object->map()->inobject_properties(); 813 index -= object->map()->inobject_properties();
807 814
808 if (index < 0) { 815 if (index < 0) {
809 // Set the property straight into the object. 816 // Set the property straight into the object.
810 int offset = object->map()->instance_size() + (index * kPointerSize); 817 int offset = object->map()->instance_size() + (index * kPointerSize);
(...skipping 2590 matching lines...) Expand 10 before | Expand all | Expand 10 after
3401 TailCallBuiltin(masm, Builtins::kKeyedStoreIC_Slow); 3408 TailCallBuiltin(masm, Builtins::kKeyedStoreIC_Slow);
3402 } 3409 }
3403 } 3410 }
3404 3411
3405 3412
3406 #undef __ 3413 #undef __
3407 3414
3408 } } // namespace v8::internal 3415 } } // namespace v8::internal
3409 3416
3410 #endif // V8_TARGET_ARCH_X64 3417 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/stub-cache.cc ('k') | test/mjsunit/regress/negative_lookup.js » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698