| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 883 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 894 // ignore null and undefined in contrast to the specification; see | 894 // ignore null and undefined in contrast to the specification; see |
| 895 // ECMA-262 section 12.6.4. | 895 // ECMA-262 section 12.6.4. |
| 896 VisitForAccumulatorValue(stmt->enumerable()); | 896 VisitForAccumulatorValue(stmt->enumerable()); |
| 897 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex); | 897 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex); |
| 898 __ j(equal, &exit); | 898 __ j(equal, &exit); |
| 899 Register null_value = rdi; | 899 Register null_value = rdi; |
| 900 __ LoadRoot(null_value, Heap::kNullValueRootIndex); | 900 __ LoadRoot(null_value, Heap::kNullValueRootIndex); |
| 901 __ cmpq(rax, null_value); | 901 __ cmpq(rax, null_value); |
| 902 __ j(equal, &exit); | 902 __ j(equal, &exit); |
| 903 | 903 |
| 904 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG); |
| 905 |
| 904 // Convert the object to a JS object. | 906 // Convert the object to a JS object. |
| 905 Label convert, done_convert; | 907 Label convert, done_convert; |
| 906 __ JumpIfSmi(rax, &convert); | 908 __ JumpIfSmi(rax, &convert); |
| 907 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx); | 909 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx); |
| 908 __ j(above_equal, &done_convert); | 910 __ j(above_equal, &done_convert); |
| 909 __ bind(&convert); | 911 __ bind(&convert); |
| 910 __ push(rax); | 912 __ push(rax); |
| 911 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); | 913 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); |
| 912 __ bind(&done_convert); | 914 __ bind(&done_convert); |
| 913 __ push(rax); | 915 __ push(rax); |
| 914 | 916 |
| 915 // Check for proxies. | 917 // Check for proxies. |
| 916 Label call_runtime; | 918 Label call_runtime; |
| 917 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); | 919 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); |
| 918 __ CmpObjectType(rax, LAST_JS_PROXY_TYPE, rcx); | 920 __ CmpObjectType(rax, LAST_JS_PROXY_TYPE, rcx); |
| 919 __ j(below_equal, &call_runtime); | 921 __ j(below_equal, &call_runtime); |
| 920 | 922 |
| 921 // Check cache validity in generated code. This is a fast case for | 923 // Check cache validity in generated code. This is a fast case for |
| 922 // the JSObject::IsSimpleEnum cache validity checks. If we cannot | 924 // the JSObject::IsSimpleEnum cache validity checks. If we cannot |
| 923 // guarantee cache validity, call the runtime system to check cache | 925 // guarantee cache validity, call the runtime system to check cache |
| 924 // validity or get the property names in a fixed array. | 926 // validity or get the property names in a fixed array. |
| 925 Label next; | 927 __ CheckEnumCache(null_value, &call_runtime); |
| 926 Register empty_fixed_array_value = r8; | |
| 927 __ LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex); | |
| 928 Register empty_descriptor_array_value = r9; | |
| 929 __ LoadRoot(empty_descriptor_array_value, | |
| 930 Heap::kEmptyDescriptorArrayRootIndex); | |
| 931 __ movq(rcx, rax); | |
| 932 __ bind(&next); | |
| 933 | |
| 934 // Check that there are no elements. Register rcx contains the | |
| 935 // current JS object we've reached through the prototype chain. | |
| 936 __ cmpq(empty_fixed_array_value, | |
| 937 FieldOperand(rcx, JSObject::kElementsOffset)); | |
| 938 __ j(not_equal, &call_runtime); | |
| 939 | |
| 940 // Check that instance descriptors are not empty so that we can | |
| 941 // check for an enum cache. Leave the map in rbx for the subsequent | |
| 942 // prototype load. | |
| 943 __ movq(rbx, FieldOperand(rcx, HeapObject::kMapOffset)); | |
| 944 __ movq(rdx, FieldOperand(rbx, Map::kInstanceDescriptorsOrBitField3Offset)); | |
| 945 __ JumpIfSmi(rdx, &call_runtime); | |
| 946 | |
| 947 // Check that there is an enum cache in the non-empty instance | |
| 948 // descriptors (rdx). This is the case if the next enumeration | |
| 949 // index field does not contain a smi. | |
| 950 __ movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumerationIndexOffset)); | |
| 951 __ JumpIfSmi(rdx, &call_runtime); | |
| 952 | |
| 953 // For all objects but the receiver, check that the cache is empty. | |
| 954 Label check_prototype; | |
| 955 __ cmpq(rcx, rax); | |
| 956 __ j(equal, &check_prototype, Label::kNear); | |
| 957 __ movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumCacheBridgeCacheOffset)); | |
| 958 __ cmpq(rdx, empty_fixed_array_value); | |
| 959 __ j(not_equal, &call_runtime); | |
| 960 | |
| 961 // Load the prototype from the map and loop if non-null. | |
| 962 __ bind(&check_prototype); | |
| 963 __ movq(rcx, FieldOperand(rbx, Map::kPrototypeOffset)); | |
| 964 __ cmpq(rcx, null_value); | |
| 965 __ j(not_equal, &next); | |
| 966 | 928 |
| 967 // The enum cache is valid. Load the map of the object being | 929 // The enum cache is valid. Load the map of the object being |
| 968 // iterated over and use the cache for the iteration. | 930 // iterated over and use the cache for the iteration. |
| 969 Label use_cache; | 931 Label use_cache; |
| 970 __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset)); | 932 __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset)); |
| 971 __ jmp(&use_cache, Label::kNear); | 933 __ jmp(&use_cache, Label::kNear); |
| 972 | 934 |
| 973 // Get the set of properties to enumerate. | 935 // Get the set of properties to enumerate. |
| 974 __ bind(&call_runtime); | 936 __ bind(&call_runtime); |
| 975 __ push(rax); // Duplicate the enumerable object on the stack. | 937 __ push(rax); // Duplicate the enumerable object on the stack. |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1007 __ j(above, &non_proxy); | 969 __ j(above, &non_proxy); |
| 1008 __ Move(rbx, Smi::FromInt(0)); // Zero indicates proxy | 970 __ Move(rbx, Smi::FromInt(0)); // Zero indicates proxy |
| 1009 __ bind(&non_proxy); | 971 __ bind(&non_proxy); |
| 1010 __ push(rbx); // Smi | 972 __ push(rbx); // Smi |
| 1011 __ push(rax); // Array | 973 __ push(rax); // Array |
| 1012 __ movq(rax, FieldOperand(rax, FixedArray::kLengthOffset)); | 974 __ movq(rax, FieldOperand(rax, FixedArray::kLengthOffset)); |
| 1013 __ push(rax); // Fixed array length (as smi). | 975 __ push(rax); // Fixed array length (as smi). |
| 1014 __ Push(Smi::FromInt(0)); // Initial index. | 976 __ Push(Smi::FromInt(0)); // Initial index. |
| 1015 | 977 |
| 1016 // Generate code for doing the condition check. | 978 // Generate code for doing the condition check. |
| 979 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS); |
| 1017 __ bind(&loop); | 980 __ bind(&loop); |
| 1018 __ movq(rax, Operand(rsp, 0 * kPointerSize)); // Get the current index. | 981 __ movq(rax, Operand(rsp, 0 * kPointerSize)); // Get the current index. |
| 1019 __ cmpq(rax, Operand(rsp, 1 * kPointerSize)); // Compare to the array length. | 982 __ cmpq(rax, Operand(rsp, 1 * kPointerSize)); // Compare to the array length. |
| 1020 __ j(above_equal, loop_statement.break_label()); | 983 __ j(above_equal, loop_statement.break_label()); |
| 1021 | 984 |
| 1022 // Get the current entry of the array into register rbx. | 985 // Get the current entry of the array into register rbx. |
| 1023 __ movq(rbx, Operand(rsp, 2 * kPointerSize)); | 986 __ movq(rbx, Operand(rsp, 2 * kPointerSize)); |
| 1024 SmiIndex index = masm()->SmiToIndex(rax, rax, kPointerSizeLog2); | 987 SmiIndex index = masm()->SmiToIndex(rax, rax, kPointerSizeLog2); |
| 1025 __ movq(rbx, FieldOperand(rbx, | 988 __ movq(rbx, FieldOperand(rbx, |
| 1026 index.reg, | 989 index.reg, |
| (...skipping 25 matching lines...) Expand all Loading... |
| 1052 __ Cmp(rax, Smi::FromInt(0)); | 1015 __ Cmp(rax, Smi::FromInt(0)); |
| 1053 __ j(equal, loop_statement.continue_label()); | 1016 __ j(equal, loop_statement.continue_label()); |
| 1054 __ movq(rbx, rax); | 1017 __ movq(rbx, rax); |
| 1055 | 1018 |
| 1056 // Update the 'each' property or variable from the possibly filtered | 1019 // Update the 'each' property or variable from the possibly filtered |
| 1057 // entry in register rbx. | 1020 // entry in register rbx. |
| 1058 __ bind(&update_each); | 1021 __ bind(&update_each); |
| 1059 __ movq(result_register(), rbx); | 1022 __ movq(result_register(), rbx); |
| 1060 // Perform the assignment as if via '='. | 1023 // Perform the assignment as if via '='. |
| 1061 { EffectContext context(this); | 1024 { EffectContext context(this); |
| 1062 EmitAssignment(stmt->each(), stmt->AssignmentId()); | 1025 EmitAssignment(stmt->each()); |
| 1063 } | 1026 } |
| 1064 | 1027 |
| 1065 // Generate code for the body of the loop. | 1028 // Generate code for the body of the loop. |
| 1066 Visit(stmt->body()); | 1029 Visit(stmt->body()); |
| 1067 | 1030 |
| 1068 // Generate code for going to the next element by incrementing the | 1031 // Generate code for going to the next element by incrementing the |
| 1069 // index (smi) stored on top of the stack. | 1032 // index (smi) stored on top of the stack. |
| 1070 __ bind(loop_statement.continue_label()); | 1033 __ bind(loop_statement.continue_label()); |
| 1071 __ SmiAddConstant(Operand(rsp, 0 * kPointerSize), Smi::FromInt(1)); | 1034 __ SmiAddConstant(Operand(rsp, 0 * kPointerSize), Smi::FromInt(1)); |
| 1072 | 1035 |
| 1073 EmitStackCheck(stmt, &loop); | 1036 EmitStackCheck(stmt, &loop); |
| 1074 __ jmp(&loop); | 1037 __ jmp(&loop); |
| 1075 | 1038 |
| 1076 // Remove the pointers stored on the stack. | 1039 // Remove the pointers stored on the stack. |
| 1077 __ bind(loop_statement.break_label()); | 1040 __ bind(loop_statement.break_label()); |
| 1078 __ addq(rsp, Immediate(5 * kPointerSize)); | 1041 __ addq(rsp, Immediate(5 * kPointerSize)); |
| 1079 | 1042 |
| 1080 // Exit and decrement the loop depth. | 1043 // Exit and decrement the loop depth. |
| 1044 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); |
| 1081 __ bind(&exit); | 1045 __ bind(&exit); |
| 1082 decrement_loop_depth(); | 1046 decrement_loop_depth(); |
| 1083 } | 1047 } |
| 1084 | 1048 |
| 1085 | 1049 |
| 1086 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info, | 1050 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info, |
| 1087 bool pretenure) { | 1051 bool pretenure) { |
| 1088 // Use the fast case closure allocation code that allocates in new | 1052 // Use the fast case closure allocation code that allocates in new |
| 1089 // space for nested functions that don't need literals cloning. If | 1053 // space for nested functions that don't need literals cloning. If |
| 1090 // we're running with the --always-opt or the --prepare-always-opt | 1054 // we're running with the --always-opt or the --prepare-always-opt |
| (...skipping 702 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1793 OverwriteMode mode) { | 1757 OverwriteMode mode) { |
| 1794 __ pop(rdx); | 1758 __ pop(rdx); |
| 1795 BinaryOpStub stub(op, mode); | 1759 BinaryOpStub stub(op, mode); |
| 1796 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code. | 1760 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code. |
| 1797 __ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id()); | 1761 __ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id()); |
| 1798 patch_site.EmitPatchInfo(); | 1762 patch_site.EmitPatchInfo(); |
| 1799 context()->Plug(rax); | 1763 context()->Plug(rax); |
| 1800 } | 1764 } |
| 1801 | 1765 |
| 1802 | 1766 |
| 1803 void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) { | 1767 void FullCodeGenerator::EmitAssignment(Expression* expr) { |
| 1804 // Invalid left-hand sides are rewritten to have a 'throw | 1768 // Invalid left-hand sides are rewritten to have a 'throw |
| 1805 // ReferenceError' on the left-hand side. | 1769 // ReferenceError' on the left-hand side. |
| 1806 if (!expr->IsValidLeftHandSide()) { | 1770 if (!expr->IsValidLeftHandSide()) { |
| 1807 VisitForEffect(expr); | 1771 VisitForEffect(expr); |
| 1808 return; | 1772 return; |
| 1809 } | 1773 } |
| 1810 | 1774 |
| 1811 // Left-hand side can only be a property, a global or a (parameter or local) | 1775 // Left-hand side can only be a property, a global or a (parameter or local) |
| 1812 // slot. | 1776 // slot. |
| 1813 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; | 1777 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1845 __ movq(rcx, rax); | 1809 __ movq(rcx, rax); |
| 1846 __ pop(rdx); | 1810 __ pop(rdx); |
| 1847 __ pop(rax); // Restore value. | 1811 __ pop(rax); // Restore value. |
| 1848 Handle<Code> ic = is_classic_mode() | 1812 Handle<Code> ic = is_classic_mode() |
| 1849 ? isolate()->builtins()->KeyedStoreIC_Initialize() | 1813 ? isolate()->builtins()->KeyedStoreIC_Initialize() |
| 1850 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); | 1814 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); |
| 1851 __ call(ic); | 1815 __ call(ic); |
| 1852 break; | 1816 break; |
| 1853 } | 1817 } |
| 1854 } | 1818 } |
| 1855 PrepareForBailoutForId(bailout_ast_id, TOS_REG); | |
| 1856 context()->Plug(rax); | 1819 context()->Plug(rax); |
| 1857 } | 1820 } |
| 1858 | 1821 |
| 1859 | 1822 |
| 1860 void FullCodeGenerator::EmitVariableAssignment(Variable* var, | 1823 void FullCodeGenerator::EmitVariableAssignment(Variable* var, |
| 1861 Token::Value op) { | 1824 Token::Value op) { |
| 1862 if (var->IsUnallocated()) { | 1825 if (var->IsUnallocated()) { |
| 1863 // Global var, const, or let. | 1826 // Global var, const, or let. |
| 1864 __ Move(rcx, var->name()); | 1827 __ Move(rcx, var->name()); |
| 1865 __ movq(rdx, GlobalObjectOperand()); | 1828 __ movq(rdx, GlobalObjectOperand()); |
| (...skipping 2515 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4381 *context_length = 0; | 4344 *context_length = 0; |
| 4382 return previous_; | 4345 return previous_; |
| 4383 } | 4346 } |
| 4384 | 4347 |
| 4385 | 4348 |
| 4386 #undef __ | 4349 #undef __ |
| 4387 | 4350 |
| 4388 } } // namespace v8::internal | 4351 } } // namespace v8::internal |
| 4389 | 4352 |
| 4390 #endif // V8_TARGET_ARCH_X64 | 4353 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |