OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 740 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
751 | 751 |
752 | 752 |
753 static MemOperand GenerateMappedArgumentsLookup(MacroAssembler* masm, | 753 static MemOperand GenerateMappedArgumentsLookup(MacroAssembler* masm, |
754 Register object, | 754 Register object, |
755 Register key, | 755 Register key, |
756 Register scratch1, | 756 Register scratch1, |
757 Register scratch2, | 757 Register scratch2, |
758 Register scratch3, | 758 Register scratch3, |
759 Label* unmapped_case, | 759 Label* unmapped_case, |
760 Label* slow_case) { | 760 Label* slow_case) { |
761 Heap* heap = masm->isolate()->heap(); | |
762 | |
763 // Check that the receiver is a JSObject. Because of the map check | 761 // Check that the receiver is a JSObject. Because of the map check |
764 // later, we do not need to check for interceptors or whether it | 762 // later, we do not need to check for interceptors or whether it |
765 // requires access checks. | 763 // requires access checks. |
766 __ JumpIfSmi(object, slow_case); | 764 __ JumpIfSmi(object, slow_case); |
767 // Check that the object is some kind of JSObject. | 765 // Check that the object is some kind of JSObject. |
768 __ GetObjectType(object, scratch1, scratch2); | 766 __ GetObjectType(object, scratch1, scratch2); |
769 __ Branch(slow_case, lt, scratch2, Operand(FIRST_JS_RECEIVER_TYPE)); | 767 __ Branch(slow_case, lt, scratch2, Operand(FIRST_JS_RECEIVER_TYPE)); |
770 | 768 |
771 // Check that the key is a positive smi. | 769 // Check that the key is a positive smi. |
772 __ And(scratch1, key, Operand(0x8000001)); | 770 __ And(scratch1, key, Operand(0x8000001)); |
773 __ Branch(slow_case, ne, scratch1, Operand(zero_reg)); | 771 __ Branch(slow_case, ne, scratch1, Operand(zero_reg)); |
774 | 772 |
775 // Load the elements into scratch1 and check its map. | 773 // Load the elements into scratch1 and check its map. |
776 Handle<Map> arguments_map(heap->non_strict_arguments_elements_map()); | |
777 __ lw(scratch1, FieldMemOperand(object, JSObject::kElementsOffset)); | 774 __ lw(scratch1, FieldMemOperand(object, JSObject::kElementsOffset)); |
778 __ CheckMap(scratch1, scratch2, arguments_map, slow_case, DONT_DO_SMI_CHECK); | 775 __ CheckMap(scratch1, |
779 | 776 scratch2, |
| 777 Heap::kNonStrictArgumentsElementsMapRootIndex, |
| 778 slow_case, |
| 779 DONT_DO_SMI_CHECK); |
780 // Check if element is in the range of mapped arguments. If not, jump | 780 // Check if element is in the range of mapped arguments. If not, jump |
781 // to the unmapped lookup with the parameter map in scratch1. | 781 // to the unmapped lookup with the parameter map in scratch1. |
782 __ lw(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset)); | 782 __ lw(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset)); |
783 __ Subu(scratch2, scratch2, Operand(Smi::FromInt(2))); | 783 __ Subu(scratch2, scratch2, Operand(Smi::FromInt(2))); |
784 __ Branch(unmapped_case, Ugreater_equal, key, Operand(scratch2)); | 784 __ Branch(unmapped_case, Ugreater_equal, key, Operand(scratch2)); |
785 | 785 |
786 // Load element index and check whether it is the hole. | 786 // Load element index and check whether it is the hole. |
787 const int kOffset = | 787 const int kOffset = |
788 FixedArray::kHeaderSize + 2 * kPointerSize - kHeapObjectTag; | 788 FixedArray::kHeaderSize + 2 * kPointerSize - kHeapObjectTag; |
789 | 789 |
(...skipping 23 matching lines...) Expand all Loading... |
813 Register parameter_map, | 813 Register parameter_map, |
814 Register scratch, | 814 Register scratch, |
815 Label* slow_case) { | 815 Label* slow_case) { |
816 // Element is in arguments backing store, which is referenced by the | 816 // Element is in arguments backing store, which is referenced by the |
817 // second element of the parameter_map. The parameter_map register | 817 // second element of the parameter_map. The parameter_map register |
818 // must be loaded with the parameter map of the arguments object and is | 818 // must be loaded with the parameter map of the arguments object and is |
819 // overwritten. | 819 // overwritten. |
820 const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize; | 820 const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize; |
821 Register backing_store = parameter_map; | 821 Register backing_store = parameter_map; |
822 __ lw(backing_store, FieldMemOperand(parameter_map, kBackingStoreOffset)); | 822 __ lw(backing_store, FieldMemOperand(parameter_map, kBackingStoreOffset)); |
823 Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map()); | 823 __ CheckMap(backing_store, |
824 __ CheckMap(backing_store, scratch, fixed_array_map, slow_case, | 824 scratch, |
| 825 Heap::kFixedArrayMapRootIndex, |
| 826 slow_case, |
825 DONT_DO_SMI_CHECK); | 827 DONT_DO_SMI_CHECK); |
826 __ lw(scratch, FieldMemOperand(backing_store, FixedArray::kLengthOffset)); | 828 __ lw(scratch, FieldMemOperand(backing_store, FixedArray::kLengthOffset)); |
827 __ Branch(slow_case, Ugreater_equal, key, Operand(scratch)); | 829 __ Branch(slow_case, Ugreater_equal, key, Operand(scratch)); |
828 __ li(scratch, Operand(kPointerSize >> 1)); | 830 __ li(scratch, Operand(kPointerSize >> 1)); |
829 __ Mul(scratch, key, scratch); | 831 __ Mul(scratch, key, scratch); |
830 __ Addu(scratch, | 832 __ Addu(scratch, |
831 scratch, | 833 scratch, |
832 Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 834 Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
833 __ Addu(scratch, backing_store, scratch); | 835 __ Addu(scratch, backing_store, scratch); |
834 return MemOperand(scratch); | 836 return MemOperand(scratch); |
(...skipping 411 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1246 // element to the array by writing to array[array.length]. | 1248 // element to the array by writing to array[array.length]. |
1247 __ bind(&extra); | 1249 __ bind(&extra); |
1248 // Condition code from comparing key and array length is still available. | 1250 // Condition code from comparing key and array length is still available. |
1249 // Only support writing to array[array.length]. | 1251 // Only support writing to array[array.length]. |
1250 __ Branch(&slow, ne, key, Operand(t0)); | 1252 __ Branch(&slow, ne, key, Operand(t0)); |
1251 // Check for room in the elements backing store. | 1253 // Check for room in the elements backing store. |
1252 // Both the key and the length of FixedArray are smis. | 1254 // Both the key and the length of FixedArray are smis. |
1253 __ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset)); | 1255 __ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset)); |
1254 __ Branch(&slow, hs, key, Operand(t0)); | 1256 __ Branch(&slow, hs, key, Operand(t0)); |
1255 __ lw(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset)); | 1257 __ lw(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset)); |
1256 __ Branch(&check_if_double_array, ne, elements_map, | 1258 __ Branch( |
1257 Operand(masm->isolate()->factory()->fixed_array_map())); | 1259 &check_if_double_array, ne, elements_map, Heap::kFixedArrayMapRootIndex); |
| 1260 |
1258 // Calculate key + 1 as smi. | 1261 // Calculate key + 1 as smi. |
1259 STATIC_ASSERT(kSmiTag == 0); | 1262 STATIC_ASSERT(kSmiTag == 0); |
1260 __ Addu(t0, key, Operand(Smi::FromInt(1))); | 1263 __ Addu(t0, key, Operand(Smi::FromInt(1))); |
1261 __ sw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset)); | 1264 __ sw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset)); |
1262 __ Branch(&fast_object_without_map_check); | 1265 __ Branch(&fast_object_without_map_check); |
1263 | 1266 |
1264 __ bind(&check_if_double_array); | 1267 __ bind(&check_if_double_array); |
1265 __ Branch(&slow, ne, elements_map, | 1268 __ Branch(&slow, ne, elements_map, Heap::kFixedDoubleArrayMapRootIndex); |
1266 Operand(masm->isolate()->factory()->fixed_double_array_map())); | |
1267 // Add 1 to key, and go to common element store code for doubles. | 1269 // Add 1 to key, and go to common element store code for doubles. |
1268 STATIC_ASSERT(kSmiTag == 0); | 1270 STATIC_ASSERT(kSmiTag == 0); |
1269 __ Addu(t0, key, Operand(Smi::FromInt(1))); | 1271 __ Addu(t0, key, Operand(Smi::FromInt(1))); |
1270 __ sw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset)); | 1272 __ sw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset)); |
1271 __ jmp(&fast_double_without_map_check); | 1273 __ jmp(&fast_double_without_map_check); |
1272 | 1274 |
1273 // Array case: Get the length and the elements array from the JS | 1275 // Array case: Get the length and the elements array from the JS |
1274 // array. Check that the array is in fast mode (and writable); if it | 1276 // array. Check that the array is in fast mode (and writable); if it |
1275 // is the length is always a smi. | 1277 // is the length is always a smi. |
1276 __ bind(&array); | 1278 __ bind(&array); |
1277 __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); | 1279 __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); |
1278 | 1280 |
1279 // Check the key against the length in the array. | 1281 // Check the key against the length in the array. |
1280 __ lw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset)); | 1282 __ lw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset)); |
1281 __ Branch(&extra, hs, key, Operand(t0)); | 1283 __ Branch(&extra, hs, key, Operand(t0)); |
1282 // Fall through to fast case. | 1284 // Fall through to fast case. |
1283 | 1285 |
1284 __ bind(&fast_object_with_map_check); | 1286 __ bind(&fast_object_with_map_check); |
1285 Register scratch_value = t0; | 1287 Register scratch_value = t0; |
1286 Register address = t1; | 1288 Register address = t1; |
1287 __ lw(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset)); | 1289 __ lw(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset)); |
1288 __ Branch(&fast_double_with_map_check, ne, elements_map, | 1290 __ Branch(&fast_double_with_map_check, |
1289 Operand(masm->isolate()->factory()->fixed_array_map())); | 1291 ne, |
| 1292 elements_map, |
| 1293 Heap::kFixedArrayMapRootIndex); |
1290 __ bind(&fast_object_without_map_check); | 1294 __ bind(&fast_object_without_map_check); |
1291 // Smi stores don't require further checks. | 1295 // Smi stores don't require further checks. |
1292 Label non_smi_value; | 1296 Label non_smi_value; |
1293 __ JumpIfNotSmi(value, &non_smi_value); | 1297 __ JumpIfNotSmi(value, &non_smi_value); |
1294 // It's irrelevant whether array is smi-only or not when writing a smi. | 1298 // It's irrelevant whether array is smi-only or not when writing a smi. |
1295 __ Addu(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 1299 __ Addu(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
1296 __ sll(scratch_value, key, kPointerSizeLog2 - kSmiTagSize); | 1300 __ sll(scratch_value, key, kPointerSizeLog2 - kSmiTagSize); |
1297 __ Addu(address, address, scratch_value); | 1301 __ Addu(address, address, scratch_value); |
1298 __ sw(value, MemOperand(address)); | 1302 __ sw(value, MemOperand(address)); |
1299 __ Ret(USE_DELAY_SLOT); | 1303 __ Ret(USE_DELAY_SLOT); |
(...skipping 16 matching lines...) Expand all Loading... |
1316 value, | 1320 value, |
1317 kRAHasNotBeenSaved, | 1321 kRAHasNotBeenSaved, |
1318 kDontSaveFPRegs, | 1322 kDontSaveFPRegs, |
1319 EMIT_REMEMBERED_SET, | 1323 EMIT_REMEMBERED_SET, |
1320 OMIT_SMI_CHECK); | 1324 OMIT_SMI_CHECK); |
1321 __ Ret(); | 1325 __ Ret(); |
1322 | 1326 |
1323 __ bind(&fast_double_with_map_check); | 1327 __ bind(&fast_double_with_map_check); |
1324 // Check for fast double array case. If this fails, call through to the | 1328 // Check for fast double array case. If this fails, call through to the |
1325 // runtime. | 1329 // runtime. |
1326 __ Branch(&slow, ne, elements_map, | 1330 __ Branch(&slow, ne, elements_map, Heap::kFixedDoubleArrayMapRootIndex); |
1327 Operand(masm->isolate()->factory()->fixed_double_array_map())); | |
1328 __ bind(&fast_double_without_map_check); | 1331 __ bind(&fast_double_without_map_check); |
1329 __ StoreNumberToDoubleElements(value, | 1332 __ StoreNumberToDoubleElements(value, |
1330 key, | 1333 key, |
1331 receiver, | 1334 receiver, |
1332 elements, | 1335 elements, |
1333 a3, | 1336 a3, |
1334 t0, | 1337 t0, |
1335 t1, | 1338 t1, |
1336 t2, | 1339 t2, |
1337 &transition_double_elements); | 1340 &transition_double_elements); |
(...skipping 414 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1752 Register reg = Register::from_code(Assembler::GetRs(instr_at_patch)); | 1755 Register reg = Register::from_code(Assembler::GetRs(instr_at_patch)); |
1753 patcher.masm()->andi(at, reg, kSmiTagMask); | 1756 patcher.masm()->andi(at, reg, kSmiTagMask); |
1754 patcher.ChangeBranchCondition(eq); | 1757 patcher.ChangeBranchCondition(eq); |
1755 } | 1758 } |
1756 } | 1759 } |
1757 | 1760 |
1758 | 1761 |
1759 } } // namespace v8::internal | 1762 } } // namespace v8::internal |
1760 | 1763 |
1761 #endif // V8_TARGET_ARCH_MIPS | 1764 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |