| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| 11 // with the distribution. | 11 // with the distribution. |
| (...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 98 __ lw(t0, FieldMemOperand(a2, JSObject::kElementsOffset)); | 98 __ lw(t0, FieldMemOperand(a2, JSObject::kElementsOffset)); |
| 99 __ lw(t1, FieldMemOperand(t0, FixedArray::kLengthOffset)); | 99 __ lw(t1, FieldMemOperand(t0, FixedArray::kLengthOffset)); |
| 100 // t0: source FixedArray | 100 // t0: source FixedArray |
| 101 // t1: number of elements (smi-tagged) | 101 // t1: number of elements (smi-tagged) |
| 102 | 102 |
| 103 // Allocate new FixedDoubleArray. | 103 // Allocate new FixedDoubleArray. |
| 104 __ sll(scratch, t1, 2); | 104 __ sll(scratch, t1, 2); |
| 105 __ Addu(scratch, scratch, FixedDoubleArray::kHeaderSize); | 105 __ Addu(scratch, scratch, FixedDoubleArray::kHeaderSize); |
| 106 __ AllocateInNewSpace(scratch, t2, t3, t5, &gc_required, NO_ALLOCATION_FLAGS); | 106 __ AllocateInNewSpace(scratch, t2, t3, t5, &gc_required, NO_ALLOCATION_FLAGS); |
| 107 // t2: destination FixedDoubleArray, not tagged as heap object | 107 // t2: destination FixedDoubleArray, not tagged as heap object |
| 108 // Set destination FixedDoubleArray's length and map. |
| 108 __ LoadRoot(t5, Heap::kFixedDoubleArrayMapRootIndex); | 109 __ LoadRoot(t5, Heap::kFixedDoubleArrayMapRootIndex); |
| 110 __ sw(t1, MemOperand(t2, FixedDoubleArray::kLengthOffset)); |
| 109 __ sw(t5, MemOperand(t2, HeapObject::kMapOffset)); | 111 __ sw(t5, MemOperand(t2, HeapObject::kMapOffset)); |
| 110 // Set destination FixedDoubleArray's length. | |
| 111 __ sw(t1, MemOperand(t2, FixedDoubleArray::kLengthOffset)); | |
| 112 // Update receiver's map. | 112 // Update receiver's map. |
| 113 | 113 |
| 114 __ sw(a3, FieldMemOperand(a2, HeapObject::kMapOffset)); | 114 __ sw(a3, FieldMemOperand(a2, HeapObject::kMapOffset)); |
| 115 __ RecordWriteField(a2, | 115 __ RecordWriteField(a2, |
| 116 HeapObject::kMapOffset, | 116 HeapObject::kMapOffset, |
| 117 a3, | 117 a3, |
| 118 t5, | 118 t5, |
| 119 kRAHasBeenSaved, | 119 kRAHasBeenSaved, |
| 120 kDontSaveFPRegs, | 120 kDontSaveFPRegs, |
| 121 EMIT_REMEMBERED_SET, | 121 EMIT_REMEMBERED_SET, |
| (...skipping 30 matching lines...) Expand all Loading... |
| 152 // Call into runtime if GC is required. | 152 // Call into runtime if GC is required. |
| 153 __ bind(&gc_required); | 153 __ bind(&gc_required); |
| 154 __ pop(ra); | 154 __ pop(ra); |
| 155 __ Branch(fail); | 155 __ Branch(fail); |
| 156 | 156 |
| 157 // Convert and copy elements. | 157 // Convert and copy elements. |
| 158 __ bind(&loop); | 158 __ bind(&loop); |
| 159 __ lw(t5, MemOperand(a3)); | 159 __ lw(t5, MemOperand(a3)); |
| 160 __ Addu(a3, a3, kIntSize); | 160 __ Addu(a3, a3, kIntSize); |
| 161 // t5: current element | 161 // t5: current element |
| 162 __ JumpIfNotSmi(t5, &convert_hole); | 162 __ UntagAndJumpIfNotSmi(t5, t5, &convert_hole); |
| 163 | 163 |
| 164 // Normal smi, convert to double and store. | 164 // Normal smi, convert to double and store. |
| 165 __ SmiUntag(t5); | |
| 166 if (fpu_supported) { | 165 if (fpu_supported) { |
| 167 CpuFeatures::Scope scope(FPU); | 166 CpuFeatures::Scope scope(FPU); |
| 168 __ mtc1(t5, f0); | 167 __ mtc1(t5, f0); |
| 169 __ cvt_d_w(f0, f0); | 168 __ cvt_d_w(f0, f0); |
| 170 __ sdc1(f0, MemOperand(t3)); | 169 __ sdc1(f0, MemOperand(t3)); |
| 171 __ Addu(t3, t3, kDoubleSize); | 170 __ Addu(t3, t3, kDoubleSize); |
| 172 } else { | 171 } else { |
| 173 FloatingPointHelper::ConvertIntToDouble(masm, | 172 FloatingPointHelper::ConvertIntToDouble(masm, |
| 174 t5, | 173 t5, |
| 175 FloatingPointHelper::kCoreRegisters, | 174 FloatingPointHelper::kCoreRegisters, |
| 176 f0, | 175 f0, |
| 177 a0, | 176 a0, |
| 178 a1, | 177 a1, |
| 179 t7, | 178 t7, |
| 180 f0); | 179 f0); |
| 181 __ sw(a0, MemOperand(t3)); // mantissa | 180 __ sw(a0, MemOperand(t3)); // mantissa |
| 182 __ sw(a1, MemOperand(t3, kIntSize)); // exponent | 181 __ sw(a1, MemOperand(t3, kIntSize)); // exponent |
| 183 __ Addu(t3, t3, kDoubleSize); | 182 __ Addu(t3, t3, kDoubleSize); |
| 184 } | 183 } |
| 185 __ Branch(&entry); | 184 __ Branch(&entry); |
| 186 | 185 |
| 187 // Hole found, store the-hole NaN. | 186 // Hole found, store the-hole NaN. |
| 188 __ bind(&convert_hole); | 187 __ bind(&convert_hole); |
| 189 if (FLAG_debug_code) { | 188 if (FLAG_debug_code) { |
| 189 // Restore a "smi-untagged" heap object. |
| 190 __ SmiTag(t5); |
| 191 __ Or(t5, t5, Operand(1)); |
| 190 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 192 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
| 191 __ Assert(eq, "object found in smi-only array", at, Operand(t5)); | 193 __ Assert(eq, "object found in smi-only array", at, Operand(t5)); |
| 192 } | 194 } |
| 193 __ sw(t0, MemOperand(t3)); // mantissa | 195 __ sw(t0, MemOperand(t3)); // mantissa |
| 194 __ sw(t1, MemOperand(t3, kIntSize)); // exponent | 196 __ sw(t1, MemOperand(t3, kIntSize)); // exponent |
| 195 __ Addu(t3, t3, kDoubleSize); | 197 __ Addu(t3, t3, kDoubleSize); |
| 196 | 198 |
| 197 __ bind(&entry); | 199 __ bind(&entry); |
| 198 __ Branch(&loop, lt, t3, Operand(t2)); | 200 __ Branch(&loop, lt, t3, Operand(t2)); |
| 199 | 201 |
| (...skipping 18 matching lines...) Expand all Loading... |
| 218 __ lw(t0, FieldMemOperand(a2, JSObject::kElementsOffset)); | 220 __ lw(t0, FieldMemOperand(a2, JSObject::kElementsOffset)); |
| 219 __ lw(t1, FieldMemOperand(t0, FixedArray::kLengthOffset)); | 221 __ lw(t1, FieldMemOperand(t0, FixedArray::kLengthOffset)); |
| 220 // t0: source FixedArray | 222 // t0: source FixedArray |
| 221 // t1: number of elements (smi-tagged) | 223 // t1: number of elements (smi-tagged) |
| 222 | 224 |
| 223 // Allocate new FixedArray. | 225 // Allocate new FixedArray. |
| 224 __ sll(a0, t1, 1); | 226 __ sll(a0, t1, 1); |
| 225 __ Addu(a0, a0, FixedDoubleArray::kHeaderSize); | 227 __ Addu(a0, a0, FixedDoubleArray::kHeaderSize); |
| 226 __ AllocateInNewSpace(a0, t2, t3, t5, &gc_required, NO_ALLOCATION_FLAGS); | 228 __ AllocateInNewSpace(a0, t2, t3, t5, &gc_required, NO_ALLOCATION_FLAGS); |
| 227 // t2: destination FixedArray, not tagged as heap object | 229 // t2: destination FixedArray, not tagged as heap object |
| 230 // Set destination FixedDoubleArray's length and map. |
| 228 __ LoadRoot(t5, Heap::kFixedArrayMapRootIndex); | 231 __ LoadRoot(t5, Heap::kFixedArrayMapRootIndex); |
| 232 __ sw(t1, MemOperand(t2, FixedDoubleArray::kLengthOffset)); |
| 229 __ sw(t5, MemOperand(t2, HeapObject::kMapOffset)); | 233 __ sw(t5, MemOperand(t2, HeapObject::kMapOffset)); |
| 230 // Set destination FixedDoubleArray's length. | |
| 231 __ sw(t1, MemOperand(t2, FixedDoubleArray::kLengthOffset)); | |
| 232 | 234 |
| 233 // Prepare for conversion loop. | 235 // Prepare for conversion loop. |
| 234 __ Addu(t0, t0, Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag + 4)); | 236 __ Addu(t0, t0, Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag + 4)); |
| 235 __ Addu(a3, t2, Operand(FixedArray::kHeaderSize)); | 237 __ Addu(a3, t2, Operand(FixedArray::kHeaderSize)); |
| 236 __ Addu(t2, t2, Operand(kHeapObjectTag)); | 238 __ Addu(t2, t2, Operand(kHeapObjectTag)); |
| 237 __ sll(t1, t1, 1); | 239 __ sll(t1, t1, 1); |
| 238 __ Addu(t1, a3, t1); | 240 __ Addu(t1, a3, t1); |
| 239 __ LoadRoot(t3, Heap::kTheHoleValueRootIndex); | 241 __ LoadRoot(t3, Heap::kTheHoleValueRootIndex); |
| 240 __ LoadRoot(t5, Heap::kHeapNumberMapRootIndex); | 242 __ LoadRoot(t5, Heap::kHeapNumberMapRootIndex); |
| 241 // Using offsetted addresses. | 243 // Using offsetted addresses. |
| (...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 326 __ Branch(&check_sequential, eq, at, Operand(zero_reg)); | 328 __ Branch(&check_sequential, eq, at, Operand(zero_reg)); |
| 327 | 329 |
| 328 // Dispatch on the indirect string shape: slice or cons. | 330 // Dispatch on the indirect string shape: slice or cons. |
| 329 Label cons_string; | 331 Label cons_string; |
| 330 __ And(at, result, Operand(kSlicedNotConsMask)); | 332 __ And(at, result, Operand(kSlicedNotConsMask)); |
| 331 __ Branch(&cons_string, eq, at, Operand(zero_reg)); | 333 __ Branch(&cons_string, eq, at, Operand(zero_reg)); |
| 332 | 334 |
| 333 // Handle slices. | 335 // Handle slices. |
| 334 Label indirect_string_loaded; | 336 Label indirect_string_loaded; |
| 335 __ lw(result, FieldMemOperand(string, SlicedString::kOffsetOffset)); | 337 __ lw(result, FieldMemOperand(string, SlicedString::kOffsetOffset)); |
| 338 __ lw(string, FieldMemOperand(string, SlicedString::kParentOffset)); |
| 336 __ sra(at, result, kSmiTagSize); | 339 __ sra(at, result, kSmiTagSize); |
| 337 __ Addu(index, index, at); | 340 __ Addu(index, index, at); |
| 338 __ lw(string, FieldMemOperand(string, SlicedString::kParentOffset)); | |
| 339 __ jmp(&indirect_string_loaded); | 341 __ jmp(&indirect_string_loaded); |
| 340 | 342 |
| 341 // Handle cons strings. | 343 // Handle cons strings. |
| 342 // Check whether the right hand side is the empty string (i.e. if | 344 // Check whether the right hand side is the empty string (i.e. if |
| 343 // this is really a flat string in a cons string). If that is not | 345 // this is really a flat string in a cons string). If that is not |
| 344 // the case we would rather go to the runtime system now to flatten | 346 // the case we would rather go to the runtime system now to flatten |
| 345 // the string. | 347 // the string. |
| 346 __ bind(&cons_string); | 348 __ bind(&cons_string); |
| 347 __ lw(result, FieldMemOperand(string, ConsString::kSecondOffset)); | 349 __ lw(result, FieldMemOperand(string, ConsString::kSecondOffset)); |
| 348 __ LoadRoot(at, Heap::kEmptyStringRootIndex); | 350 __ LoadRoot(at, Heap::kEmptyStringRootIndex); |
| (...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 400 __ Addu(at, string, index); | 402 __ Addu(at, string, index); |
| 401 __ lbu(result, MemOperand(at)); | 403 __ lbu(result, MemOperand(at)); |
| 402 __ bind(&done); | 404 __ bind(&done); |
| 403 } | 405 } |
| 404 | 406 |
| 405 #undef __ | 407 #undef __ |
| 406 | 408 |
| 407 } } // namespace v8::internal | 409 } } // namespace v8::internal |
| 408 | 410 |
| 409 #endif // V8_TARGET_ARCH_MIPS | 411 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |