| OLD | NEW | 
|     1 // Copyright 2011 the V8 project authors. All rights reserved. |     1 // Copyright 2011 the V8 project authors. All rights reserved. | 
|     2 // Redistribution and use in source and binary forms, with or without |     2 // Redistribution and use in source and binary forms, with or without | 
|     3 // modification, are permitted provided that the following conditions are |     3 // modification, are permitted provided that the following conditions are | 
|     4 // met: |     4 // met: | 
|     5 // |     5 // | 
|     6 //     * Redistributions of source code must retain the above copyright |     6 //     * Redistributions of source code must retain the above copyright | 
|     7 //       notice, this list of conditions and the following disclaimer. |     7 //       notice, this list of conditions and the following disclaimer. | 
|     8 //     * Redistributions in binary form must reproduce the above |     8 //     * Redistributions in binary form must reproduce the above | 
|     9 //       copyright notice, this list of conditions and the following |     9 //       copyright notice, this list of conditions and the following | 
|    10 //       disclaimer in the documentation and/or other materials provided |    10 //       disclaimer in the documentation and/or other materials provided | 
| (...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|   115   __ LoadRoot(r2, Heap::kTheHoleValueRootIndex); |   115   __ LoadRoot(r2, Heap::kTheHoleValueRootIndex); | 
|   116   __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); |   116   __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); | 
|   117   __ str(r1, FieldMemOperand(r0, JSObject::kPropertiesOffset)); |   117   __ str(r1, FieldMemOperand(r0, JSObject::kPropertiesOffset)); | 
|   118   __ str(r1, FieldMemOperand(r0, JSObject::kElementsOffset)); |   118   __ str(r1, FieldMemOperand(r0, JSObject::kElementsOffset)); | 
|   119   __ str(r2, FieldMemOperand(r0, JSFunction::kPrototypeOrInitialMapOffset)); |   119   __ str(r2, FieldMemOperand(r0, JSFunction::kPrototypeOrInitialMapOffset)); | 
|   120   __ str(r3, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset)); |   120   __ str(r3, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset)); | 
|   121   __ str(cp, FieldMemOperand(r0, JSFunction::kContextOffset)); |   121   __ str(cp, FieldMemOperand(r0, JSFunction::kContextOffset)); | 
|   122   __ str(r1, FieldMemOperand(r0, JSFunction::kLiteralsOffset)); |   122   __ str(r1, FieldMemOperand(r0, JSFunction::kLiteralsOffset)); | 
|   123   __ str(r4, FieldMemOperand(r0, JSFunction::kNextFunctionLinkOffset)); |   123   __ str(r4, FieldMemOperand(r0, JSFunction::kNextFunctionLinkOffset)); | 
|   124  |   124  | 
|   125  |  | 
|   126   // Initialize the code pointer in the function to be the one |   125   // Initialize the code pointer in the function to be the one | 
|   127   // found in the shared function info object. |   126   // found in the shared function info object. | 
|   128   __ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kCodeOffset)); |   127   __ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kCodeOffset)); | 
|   129   __ add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); |   128   __ add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); | 
|   130   __ str(r3, FieldMemOperand(r0, JSFunction::kCodeEntryOffset)); |   129   __ str(r3, FieldMemOperand(r0, JSFunction::kCodeEntryOffset)); | 
|   131  |   130  | 
|   132   // Return result. The argument function info has been popped already. |   131   // Return result. The argument function info has been popped already. | 
|   133   __ Ret(); |   132   __ Ret(); | 
|   134  |   133  | 
|   135   // Create a new closure through the slower runtime call. |   134   // Create a new closure through the slower runtime call. | 
| (...skipping 14 matching lines...) Expand all  Loading... | 
|   150                         r0, |   149                         r0, | 
|   151                         r1, |   150                         r1, | 
|   152                         r2, |   151                         r2, | 
|   153                         &gc, |   152                         &gc, | 
|   154                         TAG_OBJECT); |   153                         TAG_OBJECT); | 
|   155  |   154  | 
|   156   // Load the function from the stack. |   155   // Load the function from the stack. | 
|   157   __ ldr(r3, MemOperand(sp, 0)); |   156   __ ldr(r3, MemOperand(sp, 0)); | 
|   158  |   157  | 
|   159   // Set up the object header. |   158   // Set up the object header. | 
|   160   __ LoadRoot(r2, Heap::kFunctionContextMapRootIndex); |   159   __ LoadRoot(r1, Heap::kFunctionContextMapRootIndex); | 
|   161   __ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); |  | 
|   162   __ mov(r2, Operand(Smi::FromInt(length))); |   160   __ mov(r2, Operand(Smi::FromInt(length))); | 
|   163   __ str(r2, FieldMemOperand(r0, FixedArray::kLengthOffset)); |   161   __ str(r2, FieldMemOperand(r0, FixedArray::kLengthOffset)); | 
 |   162   __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); | 
|   164  |   163  | 
|   165   // Set up the fixed slots. |   164   // Set up the fixed slots, copy the global object from the previous context. | 
 |   165   __ ldr(r2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); | 
|   166   __ mov(r1, Operand(Smi::FromInt(0))); |   166   __ mov(r1, Operand(Smi::FromInt(0))); | 
|   167   __ str(r3, MemOperand(r0, Context::SlotOffset(Context::CLOSURE_INDEX))); |   167   __ str(r3, MemOperand(r0, Context::SlotOffset(Context::CLOSURE_INDEX))); | 
|   168   __ str(cp, MemOperand(r0, Context::SlotOffset(Context::PREVIOUS_INDEX))); |   168   __ str(cp, MemOperand(r0, Context::SlotOffset(Context::PREVIOUS_INDEX))); | 
|   169   __ str(r1, MemOperand(r0, Context::SlotOffset(Context::EXTENSION_INDEX))); |   169   __ str(r1, MemOperand(r0, Context::SlotOffset(Context::EXTENSION_INDEX))); | 
|   170  |   170   __ str(r2, MemOperand(r0, Context::SlotOffset(Context::GLOBAL_INDEX))); | 
|   171   // Copy the global object from the previous context. |  | 
|   172   __ ldr(r1, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); |  | 
|   173   __ str(r1, MemOperand(r0, Context::SlotOffset(Context::GLOBAL_INDEX))); |  | 
|   174  |   171  | 
|   175   // Initialize the rest of the slots to undefined. |   172   // Initialize the rest of the slots to undefined. | 
|   176   __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); |   173   __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); | 
|   177   for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) { |   174   for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) { | 
|   178     __ str(r1, MemOperand(r0, Context::SlotOffset(i))); |   175     __ str(r1, MemOperand(r0, Context::SlotOffset(i))); | 
|   179   } |   176   } | 
|   180  |   177  | 
|   181   // Remove the on-stack argument and return. |   178   // Remove the on-stack argument and return. | 
|   182   __ mov(cp, r0); |   179   __ mov(cp, r0); | 
|   183   __ pop(); |   180   __ pop(); | 
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|   222   if (FLAG_debug_code) { |   219   if (FLAG_debug_code) { | 
|   223     const char* message = "Expected 0 as a Smi sentinel"; |   220     const char* message = "Expected 0 as a Smi sentinel"; | 
|   224     __ cmp(r3, Operand::Zero()); |   221     __ cmp(r3, Operand::Zero()); | 
|   225     __ Assert(eq, message); |   222     __ Assert(eq, message); | 
|   226   } |   223   } | 
|   227   __ ldr(r3, GlobalObjectOperand()); |   224   __ ldr(r3, GlobalObjectOperand()); | 
|   228   __ ldr(r3, FieldMemOperand(r3, GlobalObject::kGlobalContextOffset)); |   225   __ ldr(r3, FieldMemOperand(r3, GlobalObject::kGlobalContextOffset)); | 
|   229   __ ldr(r3, ContextOperand(r3, Context::CLOSURE_INDEX)); |   226   __ ldr(r3, ContextOperand(r3, Context::CLOSURE_INDEX)); | 
|   230   __ bind(&after_sentinel); |   227   __ bind(&after_sentinel); | 
|   231  |   228  | 
|   232   // Set up the fixed slots. |   229   // Set up the fixed slots, copy the global object from the previous context. | 
 |   230   __ ldr(r2, ContextOperand(cp, Context::GLOBAL_INDEX)); | 
|   233   __ str(r3, ContextOperand(r0, Context::CLOSURE_INDEX)); |   231   __ str(r3, ContextOperand(r0, Context::CLOSURE_INDEX)); | 
|   234   __ str(cp, ContextOperand(r0, Context::PREVIOUS_INDEX)); |   232   __ str(cp, ContextOperand(r0, Context::PREVIOUS_INDEX)); | 
|   235   __ str(r1, ContextOperand(r0, Context::EXTENSION_INDEX)); |   233   __ str(r1, ContextOperand(r0, Context::EXTENSION_INDEX)); | 
|   236  |   234   __ str(r2, ContextOperand(r0, Context::GLOBAL_INDEX)); | 
|   237   // Copy the global object from the previous context. |  | 
|   238   __ ldr(r1, ContextOperand(cp, Context::GLOBAL_INDEX)); |  | 
|   239   __ str(r1, ContextOperand(r0, Context::GLOBAL_INDEX)); |  | 
|   240  |   235  | 
|   241   // Initialize the rest of the slots to the hole value. |   236   // Initialize the rest of the slots to the hole value. | 
|   242   __ LoadRoot(r1, Heap::kTheHoleValueRootIndex); |   237   __ LoadRoot(r1, Heap::kTheHoleValueRootIndex); | 
|   243   for (int i = 0; i < slots_; i++) { |   238   for (int i = 0; i < slots_; i++) { | 
|   244     __ str(r1, ContextOperand(r0, i + Context::MIN_CONTEXT_SLOTS)); |   239     __ str(r1, ContextOperand(r0, i + Context::MIN_CONTEXT_SLOTS)); | 
|   245   } |   240   } | 
|   246  |   241  | 
|   247   // Remove the on-stack argument and return. |   242   // Remove the on-stack argument and return. | 
|   248   __ mov(cp, r0); |   243   __ mov(cp, r0); | 
|   249   __ add(sp, sp, Operand(2 * kPointerSize)); |   244   __ add(sp, sp, Operand(2 * kPointerSize)); | 
| (...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|   319   __ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |   314   __ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 
|   320   __ ldr(r3, MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize)); |   315   __ ldr(r3, MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize)); | 
|   321   __ CompareRoot(r3, Heap::kUndefinedValueRootIndex); |   316   __ CompareRoot(r3, Heap::kUndefinedValueRootIndex); | 
|   322   __ b(eq, &slow_case); |   317   __ b(eq, &slow_case); | 
|   323  |   318  | 
|   324   FastCloneShallowArrayStub::Mode mode = mode_; |   319   FastCloneShallowArrayStub::Mode mode = mode_; | 
|   325   if (mode == CLONE_ANY_ELEMENTS) { |   320   if (mode == CLONE_ANY_ELEMENTS) { | 
|   326     Label double_elements, check_fast_elements; |   321     Label double_elements, check_fast_elements; | 
|   327     __ ldr(r0, FieldMemOperand(r3, JSArray::kElementsOffset)); |   322     __ ldr(r0, FieldMemOperand(r3, JSArray::kElementsOffset)); | 
|   328     __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); |   323     __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); | 
|   329     __ LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex); |   324     __ CompareRoot(r0, Heap::kFixedCOWArrayMapRootIndex); | 
|   330     __ cmp(r0, ip); |  | 
|   331     __ b(ne, &check_fast_elements); |   325     __ b(ne, &check_fast_elements); | 
|   332     GenerateFastCloneShallowArrayCommon(masm, 0, |   326     GenerateFastCloneShallowArrayCommon(masm, 0, | 
|   333                                         COPY_ON_WRITE_ELEMENTS, &slow_case); |   327                                         COPY_ON_WRITE_ELEMENTS, &slow_case); | 
|   334     // Return and remove the on-stack parameters. |   328     // Return and remove the on-stack parameters. | 
|   335     __ add(sp, sp, Operand(3 * kPointerSize)); |   329     __ add(sp, sp, Operand(3 * kPointerSize)); | 
|   336     __ Ret(); |   330     __ Ret(); | 
|   337  |   331  | 
|   338     __ bind(&check_fast_elements); |   332     __ bind(&check_fast_elements); | 
|   339     __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex); |   333     __ CompareRoot(r0, Heap::kFixedArrayMapRootIndex); | 
|   340     __ cmp(r0, ip); |  | 
|   341     __ b(ne, &double_elements); |   334     __ b(ne, &double_elements); | 
|   342     GenerateFastCloneShallowArrayCommon(masm, length_, |   335     GenerateFastCloneShallowArrayCommon(masm, length_, | 
|   343                                         CLONE_ELEMENTS, &slow_case); |   336                                         CLONE_ELEMENTS, &slow_case); | 
|   344     // Return and remove the on-stack parameters. |   337     // Return and remove the on-stack parameters. | 
|   345     __ add(sp, sp, Operand(3 * kPointerSize)); |   338     __ add(sp, sp, Operand(3 * kPointerSize)); | 
|   346     __ Ret(); |   339     __ Ret(); | 
|   347  |   340  | 
|   348     __ bind(&double_elements); |   341     __ bind(&double_elements); | 
|   349     mode = CLONE_DOUBLE_ELEMENTS; |   342     mode = CLONE_DOUBLE_ELEMENTS; | 
|   350     // Fall through to generate the code to handle double elements. |   343     // Fall through to generate the code to handle double elements. | 
| (...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|   583                                      Register scratch2, |   576                                      Register scratch2, | 
|   584                                      Label* not_number) { |   577                                      Label* not_number) { | 
|   585   if (FLAG_debug_code) { |   578   if (FLAG_debug_code) { | 
|   586     __ AbortIfNotRootValue(heap_number_map, |   579     __ AbortIfNotRootValue(heap_number_map, | 
|   587                            Heap::kHeapNumberMapRootIndex, |   580                            Heap::kHeapNumberMapRootIndex, | 
|   588                            "HeapNumberMap register clobbered."); |   581                            "HeapNumberMap register clobbered."); | 
|   589   } |   582   } | 
|   590  |   583  | 
|   591   Label is_smi, done; |   584   Label is_smi, done; | 
|   592  |   585  | 
|   593   __ JumpIfSmi(object, &is_smi); |   586   // Smi-check | 
 |   587   __ UntagAndJumpIfSmi(scratch1, object, &is_smi); | 
 |   588   // Heap number check | 
|   594   __ JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_number); |   589   __ JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_number); | 
|   595  |   590  | 
|   596   // Handle loading a double from a heap number. |   591   // Handle loading a double from a heap number. | 
|   597   if (CpuFeatures::IsSupported(VFP3) && |   592   if (CpuFeatures::IsSupported(VFP3) && | 
|   598       destination == kVFPRegisters) { |   593       destination == kVFPRegisters) { | 
|   599     CpuFeatures::Scope scope(VFP3); |   594     CpuFeatures::Scope scope(VFP3); | 
|   600     // Load the double from tagged HeapNumber to double register. |   595     // Load the double from tagged HeapNumber to double register. | 
|   601     __ sub(scratch1, object, Operand(kHeapObjectTag)); |   596     __ sub(scratch1, object, Operand(kHeapObjectTag)); | 
|   602     __ vldr(dst, scratch1, HeapNumber::kValueOffset); |   597     __ vldr(dst, scratch1, HeapNumber::kValueOffset); | 
|   603   } else { |   598   } else { | 
|   604     ASSERT(destination == kCoreRegisters); |   599     ASSERT(destination == kCoreRegisters); | 
|   605     // Load the double from heap number to dst1 and dst2 in double format. |   600     // Load the double from heap number to dst1 and dst2 in double format. | 
|   606     __ Ldrd(dst1, dst2, FieldMemOperand(object, HeapNumber::kValueOffset)); |   601     __ Ldrd(dst1, dst2, FieldMemOperand(object, HeapNumber::kValueOffset)); | 
|   607   } |   602   } | 
|   608   __ jmp(&done); |   603   __ jmp(&done); | 
|   609  |   604  | 
|   610   // Handle loading a double from a smi. |   605   // Handle loading a double from a smi. | 
|   611   __ bind(&is_smi); |   606   __ bind(&is_smi); | 
|   612   if (CpuFeatures::IsSupported(VFP3)) { |   607   if (CpuFeatures::IsSupported(VFP3)) { | 
|   613     CpuFeatures::Scope scope(VFP3); |   608     CpuFeatures::Scope scope(VFP3); | 
|   614     // Convert smi to double using VFP instructions. |   609     // Convert smi to double using VFP instructions. | 
|   615     __ SmiUntag(scratch1, object); |  | 
|   616     __ vmov(dst.high(), scratch1); |   610     __ vmov(dst.high(), scratch1); | 
|   617     __ vcvt_f64_s32(dst, dst.high()); |   611     __ vcvt_f64_s32(dst, dst.high()); | 
|   618     if (destination == kCoreRegisters) { |   612     if (destination == kCoreRegisters) { | 
|   619       // Load the converted smi to dst1 and dst2 in double format. |   613       // Load the converted smi to dst1 and dst2 in double format. | 
|   620       __ vmov(dst1, dst2, dst); |   614       __ vmov(dst1, dst2, dst); | 
|   621     } |   615     } | 
|   622   } else { |   616   } else { | 
|   623     ASSERT(destination == kCoreRegisters); |   617     ASSERT(destination == kCoreRegisters); | 
|   624     // Write smi to dst1 and dst2 double format. |   618     // Write smi to dst1 and dst2 double format. | 
|   625     __ mov(scratch1, Operand(object)); |   619     __ mov(scratch1, Operand(object)); | 
| (...skipping 14 matching lines...) Expand all  Loading... | 
|   640                                                Register scratch1, |   634                                                Register scratch1, | 
|   641                                                Register scratch2, |   635                                                Register scratch2, | 
|   642                                                Register scratch3, |   636                                                Register scratch3, | 
|   643                                                DwVfpRegister double_scratch, |   637                                                DwVfpRegister double_scratch, | 
|   644                                                Label* not_number) { |   638                                                Label* not_number) { | 
|   645   if (FLAG_debug_code) { |   639   if (FLAG_debug_code) { | 
|   646     __ AbortIfNotRootValue(heap_number_map, |   640     __ AbortIfNotRootValue(heap_number_map, | 
|   647                            Heap::kHeapNumberMapRootIndex, |   641                            Heap::kHeapNumberMapRootIndex, | 
|   648                            "HeapNumberMap register clobbered."); |   642                            "HeapNumberMap register clobbered."); | 
|   649   } |   643   } | 
|   650   Label is_smi; |  | 
|   651   Label done; |   644   Label done; | 
|   652   Label not_in_int32_range; |   645   Label not_in_int32_range; | 
|   653  |   646  | 
|   654   __ JumpIfSmi(object, &is_smi); |   647   __ UntagAndJumpIfSmi(dst, object, &done); | 
|   655   __ ldr(scratch1, FieldMemOperand(object, HeapNumber::kMapOffset)); |   648   __ ldr(scratch1, FieldMemOperand(object, HeapNumber::kMapOffset)); | 
|   656   __ cmp(scratch1, heap_number_map); |   649   __ cmp(scratch1, heap_number_map); | 
|   657   __ b(ne, not_number); |   650   __ b(ne, not_number); | 
|   658   __ ConvertToInt32(object, |   651   __ ConvertToInt32(object, | 
|   659                     dst, |   652                     dst, | 
|   660                     scratch1, |   653                     scratch1, | 
|   661                     scratch2, |   654                     scratch2, | 
|   662                     double_scratch, |   655                     double_scratch, | 
|   663                     ¬_in_int32_range); |   656                     ¬_in_int32_range); | 
|   664   __ jmp(&done); |   657   __ jmp(&done); | 
|   665  |   658  | 
|   666   __ bind(¬_in_int32_range); |   659   __ bind(¬_in_int32_range); | 
|   667   __ ldr(scratch1, FieldMemOperand(object, HeapNumber::kExponentOffset)); |   660   __ ldr(scratch1, FieldMemOperand(object, HeapNumber::kExponentOffset)); | 
|   668   __ ldr(scratch2, FieldMemOperand(object, HeapNumber::kMantissaOffset)); |   661   __ ldr(scratch2, FieldMemOperand(object, HeapNumber::kMantissaOffset)); | 
|   669  |   662  | 
|   670   __ EmitOutOfInt32RangeTruncate(dst, |   663   __ EmitOutOfInt32RangeTruncate(dst, | 
|   671                                  scratch1, |   664                                  scratch1, | 
|   672                                  scratch2, |   665                                  scratch2, | 
|   673                                  scratch3); |   666                                  scratch3); | 
|   674   __ jmp(&done); |  | 
|   675  |  | 
|   676   __ bind(&is_smi); |  | 
|   677   __ SmiUntag(dst, object); |  | 
|   678   __ bind(&done); |   667   __ bind(&done); | 
|   679 } |   668 } | 
|   680  |   669  | 
|   681  |   670  | 
|   682 void FloatingPointHelper::ConvertIntToDouble(MacroAssembler* masm, |   671 void FloatingPointHelper::ConvertIntToDouble(MacroAssembler* masm, | 
|   683                                              Register int_scratch, |   672                                              Register int_scratch, | 
|   684                                              Destination destination, |   673                                              Destination destination, | 
|   685                                              DwVfpRegister double_dst, |   674                                              DwVfpRegister double_dst, | 
|   686                                              Register dst1, |   675                                              Register dst1, | 
|   687                                              Register dst2, |   676                                              Register dst2, | 
| (...skipping 152 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|   840                                             DwVfpRegister double_scratch, |   829                                             DwVfpRegister double_scratch, | 
|   841                                             Label* not_int32) { |   830                                             Label* not_int32) { | 
|   842   ASSERT(!dst.is(object)); |   831   ASSERT(!dst.is(object)); | 
|   843   ASSERT(!scratch1.is(object) && !scratch2.is(object) && !scratch3.is(object)); |   832   ASSERT(!scratch1.is(object) && !scratch2.is(object) && !scratch3.is(object)); | 
|   844   ASSERT(!scratch1.is(scratch2) && |   833   ASSERT(!scratch1.is(scratch2) && | 
|   845          !scratch1.is(scratch3) && |   834          !scratch1.is(scratch3) && | 
|   846          !scratch2.is(scratch3)); |   835          !scratch2.is(scratch3)); | 
|   847  |   836  | 
|   848   Label done; |   837   Label done; | 
|   849  |   838  | 
|   850   // Untag the object into the destination register. |   839   __ UntagAndJumpIfSmi(dst, object, &done); | 
|   851   __ SmiUntag(dst, object); |  | 
|   852   // Just return if the object is a smi. |  | 
|   853   __ JumpIfSmi(object, &done); |  | 
|   854  |   840  | 
|   855   if (FLAG_debug_code) { |   841   if (FLAG_debug_code) { | 
|   856     __ AbortIfNotRootValue(heap_number_map, |   842     __ AbortIfNotRootValue(heap_number_map, | 
|   857                            Heap::kHeapNumberMapRootIndex, |   843                            Heap::kHeapNumberMapRootIndex, | 
|   858                            "HeapNumberMap register clobbered."); |   844                            "HeapNumberMap register clobbered."); | 
|   859   } |   845   } | 
|   860   __ JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_int32); |   846   __ JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_int32); | 
|   861  |   847  | 
|   862   // Object is a heap number. |   848   // Object is a heap number. | 
|   863   // Convert the floating point value to a 32-bit integer. |   849   // Convert the floating point value to a 32-bit integer. | 
| (...skipping 2439 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|  3303       CHECK_EQ(2 * kIntSize, elem_out - elem_start); |  3289       CHECK_EQ(2 * kIntSize, elem_out - elem_start); | 
|  3304     } |  3290     } | 
|  3305 #endif |  3291 #endif | 
|  3306  |  3292  | 
|  3307     // Find the address of the r1'st entry in the cache, i.e., &r0[r1*12]. |  3293     // Find the address of the r1'st entry in the cache, i.e., &r0[r1*12]. | 
|  3308     __ add(r1, r1, Operand(r1, LSL, 1)); |  3294     __ add(r1, r1, Operand(r1, LSL, 1)); | 
|  3309     __ add(cache_entry, cache_entry, Operand(r1, LSL, 2)); |  3295     __ add(cache_entry, cache_entry, Operand(r1, LSL, 2)); | 
|  3310     // Check if cache matches: Double value is stored in uint32_t[2] array. |  3296     // Check if cache matches: Double value is stored in uint32_t[2] array. | 
|  3311     __ ldm(ia, cache_entry, r4.bit() | r5.bit() | r6.bit()); |  3297     __ ldm(ia, cache_entry, r4.bit() | r5.bit() | r6.bit()); | 
|  3312     __ cmp(r2, r4); |  3298     __ cmp(r2, r4); | 
|  3313     __ b(ne, &calculate); |  3299     __ cmp(r3, r5, eq); | 
|  3314     __ cmp(r3, r5); |  | 
|  3315     __ b(ne, &calculate); |  3300     __ b(ne, &calculate); | 
|  3316     // Cache hit. Load result, cleanup and return. |  3301     // Cache hit. Load result, cleanup and return. | 
|  3317     Counters* counters = masm->isolate()->counters(); |  3302     Counters* counters = masm->isolate()->counters(); | 
|  3318     __ IncrementCounter( |  3303     __ IncrementCounter( | 
|  3319         counters->transcendental_cache_hit(), 1, scratch0, scratch1); |  3304         counters->transcendental_cache_hit(), 1, scratch0, scratch1); | 
|  3320     if (tagged) { |  3305     if (tagged) { | 
|  3321       // Pop input value from stack and load result into r0. |  3306       // Pop input value from stack and load result into r0. | 
|  3322       __ pop(); |  3307       __ pop(); | 
|  3323       __ mov(r0, Operand(r6)); |  3308       __ mov(r0, Operand(r6)); | 
|  3324     } else { |  3309     } else { | 
| (...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|  3461   const Register heapnumbermap = r5; |  3446   const Register heapnumbermap = r5; | 
|  3462   const Register heapnumber = r0; |  3447   const Register heapnumber = r0; | 
|  3463   const DoubleRegister double_base = d1; |  3448   const DoubleRegister double_base = d1; | 
|  3464   const DoubleRegister double_exponent = d2; |  3449   const DoubleRegister double_exponent = d2; | 
|  3465   const DoubleRegister double_result = d3; |  3450   const DoubleRegister double_result = d3; | 
|  3466   const DoubleRegister double_scratch = d0; |  3451   const DoubleRegister double_scratch = d0; | 
|  3467   const SwVfpRegister single_scratch = s0; |  3452   const SwVfpRegister single_scratch = s0; | 
|  3468   const Register scratch = r9; |  3453   const Register scratch = r9; | 
|  3469   const Register scratch2 = r7; |  3454   const Register scratch2 = r7; | 
|  3470  |  3455  | 
|  3471   Label call_runtime, done, exponent_not_smi, int_exponent; |  3456   Label call_runtime, done, int_exponent; | 
|  3472   if (exponent_type_ == ON_STACK) { |  3457   if (exponent_type_ == ON_STACK) { | 
|  3473     Label base_is_smi, unpack_exponent; |  3458     Label base_is_smi, unpack_exponent; | 
|  3474     // The exponent and base are supplied as arguments on the stack. |  3459     // The exponent and base are supplied as arguments on the stack. | 
|  3475     // This can only happen if the stub is called from non-optimized code. |  3460     // This can only happen if the stub is called from non-optimized code. | 
|  3476     // Load input parameters from stack to double registers. |  3461     // Load input parameters from stack to double registers. | 
|  3477     __ ldr(base, MemOperand(sp, 1 * kPointerSize)); |  3462     __ ldr(base, MemOperand(sp, 1 * kPointerSize)); | 
|  3478     __ ldr(exponent, MemOperand(sp, 0 * kPointerSize)); |  3463     __ ldr(exponent, MemOperand(sp, 0 * kPointerSize)); | 
|  3479  |  3464  | 
|  3480     __ LoadRoot(heapnumbermap, Heap::kHeapNumberMapRootIndex); |  3465     __ LoadRoot(heapnumbermap, Heap::kHeapNumberMapRootIndex); | 
|  3481  |  3466  | 
|  3482     __ JumpIfSmi(base, &base_is_smi); |  3467     __ UntagAndJumpIfSmi(scratch, base, &base_is_smi); | 
|  3483     __ ldr(scratch, FieldMemOperand(base, JSObject::kMapOffset)); |  3468     __ ldr(scratch, FieldMemOperand(base, JSObject::kMapOffset)); | 
|  3484     __ cmp(scratch, heapnumbermap); |  3469     __ cmp(scratch, heapnumbermap); | 
|  3485     __ b(ne, &call_runtime); |  3470     __ b(ne, &call_runtime); | 
|  3486  |  3471  | 
|  3487     __ vldr(double_base, FieldMemOperand(base, HeapNumber::kValueOffset)); |  3472     __ vldr(double_base, FieldMemOperand(base, HeapNumber::kValueOffset)); | 
|  3488     __ jmp(&unpack_exponent); |  3473     __ jmp(&unpack_exponent); | 
|  3489  |  3474  | 
|  3490     __ bind(&base_is_smi); |  3475     __ bind(&base_is_smi); | 
|  3491     __ SmiUntag(base); |  3476     __ vmov(single_scratch, scratch); | 
|  3492     __ vmov(single_scratch, base); |  | 
|  3493     __ vcvt_f64_s32(double_base, single_scratch); |  3477     __ vcvt_f64_s32(double_base, single_scratch); | 
|  3494     __ bind(&unpack_exponent); |  3478     __ bind(&unpack_exponent); | 
|  3495  |  3479  | 
|  3496     __ JumpIfNotSmi(exponent, &exponent_not_smi); |  3480     __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent); | 
|  3497     __ SmiUntag(exponent); |  | 
|  3498     __ jmp(&int_exponent); |  | 
|  3499  |  3481  | 
|  3500     __ bind(&exponent_not_smi); |  | 
|  3501     __ ldr(scratch, FieldMemOperand(exponent, JSObject::kMapOffset)); |  3482     __ ldr(scratch, FieldMemOperand(exponent, JSObject::kMapOffset)); | 
|  3502     __ cmp(scratch, heapnumbermap); |  3483     __ cmp(scratch, heapnumbermap); | 
|  3503     __ b(ne, &call_runtime); |  3484     __ b(ne, &call_runtime); | 
|  3504     __ vldr(double_exponent, |  3485     __ vldr(double_exponent, | 
|  3505             FieldMemOperand(exponent, HeapNumber::kValueOffset)); |  3486             FieldMemOperand(exponent, HeapNumber::kValueOffset)); | 
|  3506   } else if (exponent_type_ == TAGGED) { |  3487   } else if (exponent_type_ == TAGGED) { | 
|  3507     // Base is already in double_base. |  3488     // Base is already in double_base. | 
|  3508     __ JumpIfNotSmi(exponent, &exponent_not_smi); |  3489     __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent); | 
|  3509     __ SmiUntag(exponent); |  | 
|  3510     __ jmp(&int_exponent); |  | 
|  3511  |  3490  | 
|  3512     __ bind(&exponent_not_smi); |  | 
|  3513     __ vldr(double_exponent, |  3491     __ vldr(double_exponent, | 
|  3514             FieldMemOperand(exponent, HeapNumber::kValueOffset)); |  3492             FieldMemOperand(exponent, HeapNumber::kValueOffset)); | 
|  3515   } |  3493   } | 
|  3516  |  3494  | 
|  3517   if (exponent_type_ != INTEGER) { |  3495   if (exponent_type_ != INTEGER) { | 
|  3518     Label int_exponent_convert; |  3496     Label int_exponent_convert; | 
|  3519     // Detect integer exponents stored as double. |  3497     // Detect integer exponents stored as double. | 
|  3520     __ vcvt_u32_f64(single_scratch, double_exponent); |  3498     __ vcvt_u32_f64(single_scratch, double_exponent); | 
|  3521     // We do not check for NaN or Infinity here because comparing numbers on |  3499     // We do not check for NaN or Infinity here because comparing numbers on | 
|  3522     // ARM correctly distinguishes NaNs.  We end up calling the built-in. |  3500     // ARM correctly distinguishes NaNs.  We end up calling the built-in. | 
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|  3575       __ CallCFunction( |  3553       __ CallCFunction( | 
|  3576           ExternalReference::power_double_double_function(masm->isolate()), |  3554           ExternalReference::power_double_double_function(masm->isolate()), | 
|  3577           0, 2); |  3555           0, 2); | 
|  3578     } |  3556     } | 
|  3579     __ pop(lr); |  3557     __ pop(lr); | 
|  3580     __ GetCFunctionDoubleResult(double_result); |  3558     __ GetCFunctionDoubleResult(double_result); | 
|  3581     __ jmp(&done); |  3559     __ jmp(&done); | 
|  3582  |  3560  | 
|  3583     __ bind(&int_exponent_convert); |  3561     __ bind(&int_exponent_convert); | 
|  3584     __ vcvt_u32_f64(single_scratch, double_exponent); |  3562     __ vcvt_u32_f64(single_scratch, double_exponent); | 
|  3585     __ vmov(exponent, single_scratch); |  3563     __ vmov(scratch, single_scratch); | 
|  3586   } |  3564   } | 
|  3587  |  3565  | 
|  3588   // Calculate power with integer exponent. |  3566   // Calculate power with integer exponent. | 
|  3589   __ bind(&int_exponent); |  3567   __ bind(&int_exponent); | 
|  3590  |  3568   // Exponent has been stored into scratch as untagged integer. | 
|  3591   __ mov(scratch, exponent);  // Back up exponent. |  3569   __ mov(exponent, scratch);  // Back up exponent. | 
|  3592   __ vmov(double_scratch, double_base);  // Back up base. |  3570   __ vmov(double_scratch, double_base);  // Back up base. | 
|  3593   __ vmov(double_result, 1.0); |  3571   __ vmov(double_result, 1.0); | 
|  3594  |  3572  | 
|  3595   // Get absolute value of exponent. |  3573   // Get absolute value of exponent. | 
|  3596   __ cmp(scratch, Operand(0)); |  3574   __ cmp(scratch, Operand(0)); | 
|  3597   __ mov(scratch2, Operand(0), LeaveCC, mi); |  3575   __ mov(scratch2, Operand(0), LeaveCC, mi); | 
|  3598   __ sub(scratch, scratch2, scratch, LeaveCC, mi); |  3576   __ sub(scratch, scratch2, scratch, LeaveCC, mi); | 
|  3599  |  3577  | 
|  3600   Label while_true; |  3578   Label while_true; | 
|  3601   __ bind(&while_true); |  3579   __ bind(&while_true); | 
| (...skipping 489 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|  4091   } |  4069   } | 
|  4092  |  4070  | 
|  4093   // Check that the left hand is a JS object and load map. |  4071   // Check that the left hand is a JS object and load map. | 
|  4094   __ JumpIfSmi(object, ¬_js_object); |  4072   __ JumpIfSmi(object, ¬_js_object); | 
|  4095   __ IsObjectJSObjectType(object, map, scratch, ¬_js_object); |  4073   __ IsObjectJSObjectType(object, map, scratch, ¬_js_object); | 
|  4096  |  4074  | 
|  4097   // If there is a call site cache don't look in the global cache, but do the |  4075   // If there is a call site cache don't look in the global cache, but do the | 
|  4098   // real lookup and update the call site cache. |  4076   // real lookup and update the call site cache. | 
|  4099   if (!HasCallSiteInlineCheck()) { |  4077   if (!HasCallSiteInlineCheck()) { | 
|  4100     Label miss; |  4078     Label miss; | 
|  4101     __ LoadRoot(ip, Heap::kInstanceofCacheFunctionRootIndex); |  4079     __ CompareRoot(function, Heap::kInstanceofCacheFunctionRootIndex); | 
|  4102     __ cmp(function, ip); |  | 
|  4103     __ b(ne, &miss); |  4080     __ b(ne, &miss); | 
|  4104     __ LoadRoot(ip, Heap::kInstanceofCacheMapRootIndex); |  4081     __ CompareRoot(map, Heap::kInstanceofCacheMapRootIndex); | 
|  4105     __ cmp(map, ip); |  | 
|  4106     __ b(ne, &miss); |  4082     __ b(ne, &miss); | 
|  4107     __ LoadRoot(r0, Heap::kInstanceofCacheAnswerRootIndex); |  4083     __ LoadRoot(r0, Heap::kInstanceofCacheAnswerRootIndex); | 
|  4108     __ Ret(HasArgsInRegisters() ? 0 : 2); |  4084     __ Ret(HasArgsInRegisters() ? 0 : 2); | 
|  4109  |  4085  | 
|  4110     __ bind(&miss); |  4086     __ bind(&miss); | 
|  4111   } |  4087   } | 
|  4112  |  4088  | 
|  4113   // Get the prototype of the function. |  4089   // Get the prototype of the function. | 
|  4114   __ TryGetFunctionPrototype(function, prototype, scratch, &slow, true); |  4090   __ TryGetFunctionPrototype(function, prototype, scratch, &slow, true); | 
|  4115  |  4091  | 
| (...skipping 604 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|  4720   // regexp_data: RegExp data (FixedArray) |  4696   // regexp_data: RegExp data (FixedArray) | 
|  4721   // Check that the fourth object is a JSArray object. |  4697   // Check that the fourth object is a JSArray object. | 
|  4722   __ ldr(r0, MemOperand(sp, kLastMatchInfoOffset)); |  4698   __ ldr(r0, MemOperand(sp, kLastMatchInfoOffset)); | 
|  4723   __ JumpIfSmi(r0, &runtime); |  4699   __ JumpIfSmi(r0, &runtime); | 
|  4724   __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE); |  4700   __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE); | 
|  4725   __ b(ne, &runtime); |  4701   __ b(ne, &runtime); | 
|  4726   // Check that the JSArray is in fast case. |  4702   // Check that the JSArray is in fast case. | 
|  4727   __ ldr(last_match_info_elements, |  4703   __ ldr(last_match_info_elements, | 
|  4728          FieldMemOperand(r0, JSArray::kElementsOffset)); |  4704          FieldMemOperand(r0, JSArray::kElementsOffset)); | 
|  4729   __ ldr(r0, FieldMemOperand(last_match_info_elements, HeapObject::kMapOffset)); |  4705   __ ldr(r0, FieldMemOperand(last_match_info_elements, HeapObject::kMapOffset)); | 
|  4730   __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex); |  4706   __ CompareRoot(r0, Heap::kFixedArrayMapRootIndex); | 
|  4731   __ cmp(r0, ip); |  | 
|  4732   __ b(ne, &runtime); |  4707   __ b(ne, &runtime); | 
|  4733   // Check that the last match info has space for the capture registers and the |  4708   // Check that the last match info has space for the capture registers and the | 
|  4734   // additional information. |  4709   // additional information. | 
|  4735   __ ldr(r0, |  4710   __ ldr(r0, | 
|  4736          FieldMemOperand(last_match_info_elements, FixedArray::kLengthOffset)); |  4711          FieldMemOperand(last_match_info_elements, FixedArray::kLengthOffset)); | 
|  4737   __ add(r2, r2, Operand(RegExpImpl::kLastMatchOverhead)); |  4712   __ add(r2, r2, Operand(RegExpImpl::kLastMatchOverhead)); | 
|  4738   __ cmp(r2, Operand(r0, ASR, kSmiTagSize)); |  4713   __ cmp(r2, Operand(r0, ASR, kSmiTagSize)); | 
|  4739   __ b(gt, &runtime); |  4714   __ b(gt, &runtime); | 
|  4740  |  4715  | 
|  4741   // Reset offset for possibly sliced string. |  4716   // Reset offset for possibly sliced string. | 
| (...skipping 333 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|  5075   __ add(r3, r0, Operand(JSRegExpResult::kSize)); |  5050   __ add(r3, r0, Operand(JSRegExpResult::kSize)); | 
|  5076   __ mov(r4, Operand(factory->empty_fixed_array())); |  5051   __ mov(r4, Operand(factory->empty_fixed_array())); | 
|  5077   __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalContextOffset)); |  5052   __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalContextOffset)); | 
|  5078   __ str(r3, FieldMemOperand(r0, JSObject::kElementsOffset)); |  5053   __ str(r3, FieldMemOperand(r0, JSObject::kElementsOffset)); | 
|  5079   __ ldr(r2, ContextOperand(r2, Context::REGEXP_RESULT_MAP_INDEX)); |  5054   __ ldr(r2, ContextOperand(r2, Context::REGEXP_RESULT_MAP_INDEX)); | 
|  5080   __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset)); |  5055   __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset)); | 
|  5081   __ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); |  5056   __ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); | 
|  5082  |  5057  | 
|  5083   // Set input, index and length fields from arguments. |  5058   // Set input, index and length fields from arguments. | 
|  5084   __ ldr(r1, MemOperand(sp, kPointerSize * 0)); |  5059   __ ldr(r1, MemOperand(sp, kPointerSize * 0)); | 
 |  5060   __ ldr(r2, MemOperand(sp, kPointerSize * 1)); | 
 |  5061   __ ldr(r6, MemOperand(sp, kPointerSize * 2)); | 
|  5085   __ str(r1, FieldMemOperand(r0, JSRegExpResult::kInputOffset)); |  5062   __ str(r1, FieldMemOperand(r0, JSRegExpResult::kInputOffset)); | 
|  5086   __ ldr(r1, MemOperand(sp, kPointerSize * 1)); |  5063   __ str(r2, FieldMemOperand(r0, JSRegExpResult::kIndexOffset)); | 
|  5087   __ str(r1, FieldMemOperand(r0, JSRegExpResult::kIndexOffset)); |  5064   __ str(r6, FieldMemOperand(r0, JSArray::kLengthOffset)); | 
|  5088   __ ldr(r1, MemOperand(sp, kPointerSize * 2)); |  | 
|  5089   __ str(r1, FieldMemOperand(r0, JSArray::kLengthOffset)); |  | 
|  5090  |  5065  | 
|  5091   // Fill out the elements FixedArray. |  5066   // Fill out the elements FixedArray. | 
|  5092   // r0: JSArray, tagged. |  5067   // r0: JSArray, tagged. | 
|  5093   // r3: FixedArray, tagged. |  5068   // r3: FixedArray, tagged. | 
|  5094   // r5: Number of elements in array, untagged. |  5069   // r5: Number of elements in array, untagged. | 
|  5095  |  5070  | 
|  5096   // Set map. |  5071   // Set map. | 
|  5097   __ mov(r2, Operand(factory->fixed_array_map())); |  5072   __ mov(r2, Operand(factory->fixed_array_map())); | 
|  5098   __ str(r2, FieldMemOperand(r3, HeapObject::kMapOffset)); |  5073   __ str(r2, FieldMemOperand(r3, HeapObject::kMapOffset)); | 
|  5099   // Set FixedArray length. |  5074   // Set FixedArray length. | 
| (...skipping 329 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|  5429   __ tst(code_, |  5404   __ tst(code_, | 
|  5430          Operand(kSmiTagMask | |  5405          Operand(kSmiTagMask | | 
|  5431                  ((~String::kMaxAsciiCharCode) << kSmiTagSize))); |  5406                  ((~String::kMaxAsciiCharCode) << kSmiTagSize))); | 
|  5432   __ b(ne, &slow_case_); |  5407   __ b(ne, &slow_case_); | 
|  5433  |  5408  | 
|  5434   __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex); |  5409   __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex); | 
|  5435   // At this point code register contains smi tagged ASCII char code. |  5410   // At this point code register contains smi tagged ASCII char code. | 
|  5436   STATIC_ASSERT(kSmiTag == 0); |  5411   STATIC_ASSERT(kSmiTag == 0); | 
|  5437   __ add(result_, result_, Operand(code_, LSL, kPointerSizeLog2 - kSmiTagSize)); |  5412   __ add(result_, result_, Operand(code_, LSL, kPointerSizeLog2 - kSmiTagSize)); | 
|  5438   __ ldr(result_, FieldMemOperand(result_, FixedArray::kHeaderSize)); |  5413   __ ldr(result_, FieldMemOperand(result_, FixedArray::kHeaderSize)); | 
|  5439   __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); |  5414   __ CompareRoot(result_, Heap::kUndefinedValueRootIndex); | 
|  5440   __ cmp(result_, Operand(ip)); |  | 
|  5441   __ b(eq, &slow_case_); |  5415   __ b(eq, &slow_case_); | 
|  5442   __ bind(&exit_); |  5416   __ bind(&exit_); | 
|  5443 } |  5417 } | 
|  5444  |  5418  | 
|  5445  |  5419  | 
|  5446 void StringCharFromCodeGenerator::GenerateSlow( |  5420 void StringCharFromCodeGenerator::GenerateSlow( | 
|  5447     MacroAssembler* masm, |  5421     MacroAssembler* masm, | 
|  5448     const RuntimeCallHelper& call_helper) { |  5422     const RuntimeCallHelper& call_helper) { | 
|  5449   __ Abort("Unexpected fallthrough to CharFromCode slow case"); |  5423   __ Abort("Unexpected fallthrough to CharFromCode slow case"); | 
|  5450  |  5424  | 
| (...skipping 407 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|  5858   __ Ldrd(r2, r3, MemOperand(sp, kToOffset)); |  5832   __ Ldrd(r2, r3, MemOperand(sp, kToOffset)); | 
|  5859   STATIC_ASSERT(kFromOffset == kToOffset + 4); |  5833   STATIC_ASSERT(kFromOffset == kToOffset + 4); | 
|  5860   STATIC_ASSERT(kSmiTag == 0); |  5834   STATIC_ASSERT(kSmiTag == 0); | 
|  5861   STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1); |  5835   STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1); | 
|  5862  |  5836  | 
|  5863   // I.e., arithmetic shift right by one un-smi-tags. |  5837   // I.e., arithmetic shift right by one un-smi-tags. | 
|  5864   __ mov(r2, Operand(r2, ASR, 1), SetCC); |  5838   __ mov(r2, Operand(r2, ASR, 1), SetCC); | 
|  5865   __ mov(r3, Operand(r3, ASR, 1), SetCC, cc); |  5839   __ mov(r3, Operand(r3, ASR, 1), SetCC, cc); | 
|  5866   // If either to or from had the smi tag bit set, then carry is set now. |  5840   // If either to or from had the smi tag bit set, then carry is set now. | 
|  5867   __ b(cs, &runtime);  // Either "from" or "to" is not a smi. |  5841   __ b(cs, &runtime);  // Either "from" or "to" is not a smi. | 
|  5868   __ b(mi, &runtime);  // From is negative. |  5842   // We want to bailout to runtime here if From is negative.  In that case, the | 
|  5869  |  5843   // next instruction is not executed and we fall through to bailing out to | 
 |  5844   // runtime.  pl is the opposite of mi. | 
|  5870   // Both r2 and r3 are untagged integers. |  5845   // Both r2 and r3 are untagged integers. | 
|  5871   __ sub(r2, r2, Operand(r3), SetCC); |  5846   __ sub(r2, r2, Operand(r3), SetCC, pl); | 
|  5872   __ b(mi, &runtime);  // Fail if from > to. |  5847   __ b(mi, &runtime);  // Fail if from > to. | 
|  5873  |  5848  | 
|  5874   // Make sure first argument is a string. |  5849   // Make sure first argument is a string. | 
|  5875   __ ldr(r0, MemOperand(sp, kStringOffset)); |  5850   __ ldr(r0, MemOperand(sp, kStringOffset)); | 
|  5876   STATIC_ASSERT(kSmiTag == 0); |  5851   STATIC_ASSERT(kSmiTag == 0); | 
|  5877   __ JumpIfSmi(r0, &runtime); |  5852   __ JumpIfSmi(r0, &runtime); | 
|  5878   Condition is_string = masm->IsObjectStringType(r0, r1); |  5853   Condition is_string = masm->IsObjectStringType(r0, r1); | 
|  5879   __ b(NegateCondition(is_string), &runtime); |  5854   __ b(NegateCondition(is_string), &runtime); | 
|  5880  |  5855  | 
|  5881   // Short-cut for the case of trivial substring. |  5856   // Short-cut for the case of trivial substring. | 
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|  5934   __ CompareRoot(r5, Heap::kEmptyStringRootIndex); |  5909   __ CompareRoot(r5, Heap::kEmptyStringRootIndex); | 
|  5935   __ b(ne, &runtime); |  5910   __ b(ne, &runtime); | 
|  5936   __ ldr(r5, FieldMemOperand(r0, ConsString::kFirstOffset)); |  5911   __ ldr(r5, FieldMemOperand(r0, ConsString::kFirstOffset)); | 
|  5937   // Update instance type. |  5912   // Update instance type. | 
|  5938   __ ldr(r1, FieldMemOperand(r5, HeapObject::kMapOffset)); |  5913   __ ldr(r1, FieldMemOperand(r5, HeapObject::kMapOffset)); | 
|  5939   __ ldrb(r1, FieldMemOperand(r1, Map::kInstanceTypeOffset)); |  5914   __ ldrb(r1, FieldMemOperand(r1, Map::kInstanceTypeOffset)); | 
|  5940   __ jmp(&underlying_unpacked); |  5915   __ jmp(&underlying_unpacked); | 
|  5941  |  5916  | 
|  5942   __ bind(&sliced_string); |  5917   __ bind(&sliced_string); | 
|  5943   // Sliced string.  Fetch parent and correct start index by offset. |  5918   // Sliced string.  Fetch parent and correct start index by offset. | 
|  5944   __ ldr(r5, FieldMemOperand(r0, SlicedString::kOffsetOffset)); |  5919   __ ldr(r4, FieldMemOperand(r0, SlicedString::kOffsetOffset)); | 
|  5945   __ add(r3, r3, Operand(r5, ASR, 1)); |  | 
|  5946   __ ldr(r5, FieldMemOperand(r0, SlicedString::kParentOffset)); |  5920   __ ldr(r5, FieldMemOperand(r0, SlicedString::kParentOffset)); | 
 |  5921   __ add(r3, r3, Operand(r4, ASR, 1));  // Add offset to index. | 
|  5947   // Update instance type. |  5922   // Update instance type. | 
|  5948   __ ldr(r1, FieldMemOperand(r5, HeapObject::kMapOffset)); |  5923   __ ldr(r1, FieldMemOperand(r5, HeapObject::kMapOffset)); | 
|  5949   __ ldrb(r1, FieldMemOperand(r1, Map::kInstanceTypeOffset)); |  5924   __ ldrb(r1, FieldMemOperand(r1, Map::kInstanceTypeOffset)); | 
|  5950   __ jmp(&underlying_unpacked); |  5925   __ jmp(&underlying_unpacked); | 
|  5951  |  5926  | 
|  5952   __ bind(&seq_or_external_string); |  5927   __ bind(&seq_or_external_string); | 
|  5953   // Sequential or external string.  Just move string to the expected register. |  5928   // Sequential or external string.  Just move string to the expected register. | 
|  5954   __ mov(r5, r0); |  5929   __ mov(r5, r0); | 
|  5955  |  5930  | 
|  5956   __ bind(&underlying_unpacked); |  5931   __ bind(&underlying_unpacked); | 
| (...skipping 1423 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|  7380   __ StoreNumberToDoubleElements(r0, r3, r1, r5, r6, r7, r9, r10, |  7355   __ StoreNumberToDoubleElements(r0, r3, r1, r5, r6, r7, r9, r10, | 
|  7381                                  &slow_elements); |  7356                                  &slow_elements); | 
|  7382   __ Ret(); |  7357   __ Ret(); | 
|  7383 } |  7358 } | 
|  7384  |  7359  | 
|  7385 #undef __ |  7360 #undef __ | 
|  7386  |  7361  | 
|  7387 } }  // namespace v8::internal |  7362 } }  // namespace v8::internal | 
|  7388  |  7363  | 
|  7389 #endif  // V8_TARGET_ARCH_ARM |  7364 #endif  // V8_TARGET_ARCH_ARM | 
| OLD | NEW |