| OLD | NEW | 
|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. | 
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without | 
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are | 
| 4 // met: | 4 // met: | 
| 5 // | 5 // | 
| 6 //     * Redistributions of source code must retain the above copyright | 6 //     * Redistributions of source code must retain the above copyright | 
| 7 //       notice, this list of conditions and the following disclaimer. | 7 //       notice, this list of conditions and the following disclaimer. | 
| 8 //     * Redistributions in binary form must reproduce the above | 8 //     * Redistributions in binary form must reproduce the above | 
| 9 //       copyright notice, this list of conditions and the following | 9 //       copyright notice, this list of conditions and the following | 
| 10 //       disclaimer in the documentation and/or other materials provided | 10 //       disclaimer in the documentation and/or other materials provided | 
| (...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 72   __ Addu(s0, a0, num_extra_args + 1); | 72   __ Addu(s0, a0, num_extra_args + 1); | 
| 73   __ sll(s1, s0, kPointerSizeLog2); | 73   __ sll(s1, s0, kPointerSizeLog2); | 
| 74   __ Subu(s1, s1, kPointerSize); | 74   __ Subu(s1, s1, kPointerSize); | 
| 75   __ JumpToExternalReference(ExternalReference(id, masm->isolate())); | 75   __ JumpToExternalReference(ExternalReference(id, masm->isolate())); | 
| 76 } | 76 } | 
| 77 | 77 | 
| 78 | 78 | 
| 79 // Load the built-in InternalArray function from the current context. | 79 // Load the built-in InternalArray function from the current context. | 
| 80 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm, | 80 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm, | 
| 81                                               Register result) { | 81                                               Register result) { | 
| 82   // Load the global context. | 82   // Load the native context. | 
| 83 | 83 | 
| 84   __ lw(result, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); | 84   __ lw(result, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); | 
| 85   __ lw(result, | 85   __ lw(result, | 
| 86         FieldMemOperand(result, GlobalObject::kGlobalContextOffset)); | 86         FieldMemOperand(result, GlobalObject::kNativeContextOffset)); | 
| 87   // Load the InternalArray function from the global context. | 87   // Load the InternalArray function from the native context. | 
| 88   __ lw(result, | 88   __ lw(result, | 
| 89          MemOperand(result, | 89          MemOperand(result, | 
| 90                     Context::SlotOffset( | 90                     Context::SlotOffset( | 
| 91                         Context::INTERNAL_ARRAY_FUNCTION_INDEX))); | 91                         Context::INTERNAL_ARRAY_FUNCTION_INDEX))); | 
| 92 } | 92 } | 
| 93 | 93 | 
| 94 | 94 | 
| 95 // Load the built-in Array function from the current context. | 95 // Load the built-in Array function from the current context. | 
| 96 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) { | 96 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) { | 
| 97   // Load the global context. | 97   // Load the native context. | 
| 98 | 98 | 
| 99   __ lw(result, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); | 99   __ lw(result, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); | 
| 100   __ lw(result, | 100   __ lw(result, | 
| 101         FieldMemOperand(result, GlobalObject::kGlobalContextOffset)); | 101         FieldMemOperand(result, GlobalObject::kNativeContextOffset)); | 
| 102   // Load the Array function from the global context. | 102   // Load the Array function from the native context. | 
| 103   __ lw(result, | 103   __ lw(result, | 
| 104         MemOperand(result, | 104         MemOperand(result, | 
| 105                    Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX))); | 105                    Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX))); | 
| 106 } | 106 } | 
| 107 | 107 | 
| 108 | 108 | 
| 109 // Allocate an empty JSArray. The allocated array is put into the result | 109 // Allocate an empty JSArray. The allocated array is put into the result | 
| 110 // register. An elements backing store is allocated with size initial_capacity | 110 // register. An elements backing store is allocated with size initial_capacity | 
| 111 // and filled with the hole values. | 111 // and filled with the hole values. | 
| 112 static void AllocateEmptyJSArray(MacroAssembler* masm, | 112 static void AllocateEmptyJSArray(MacroAssembler* masm, | 
| (...skipping 1312 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 1425     __ lw(a1, MemOperand(at)); | 1425     __ lw(a1, MemOperand(at)); | 
| 1426     __ li(t0, Operand(0, RelocInfo::NONE)); | 1426     __ li(t0, Operand(0, RelocInfo::NONE)); | 
| 1427     __ Branch(&patch_receiver); | 1427     __ Branch(&patch_receiver); | 
| 1428 | 1428 | 
| 1429     // Use the global receiver object from the called function as the | 1429     // Use the global receiver object from the called function as the | 
| 1430     // receiver. | 1430     // receiver. | 
| 1431     __ bind(&use_global_receiver); | 1431     __ bind(&use_global_receiver); | 
| 1432     const int kGlobalIndex = | 1432     const int kGlobalIndex = | 
| 1433         Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; | 1433         Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; | 
| 1434     __ lw(a2, FieldMemOperand(cp, kGlobalIndex)); | 1434     __ lw(a2, FieldMemOperand(cp, kGlobalIndex)); | 
| 1435     __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalContextOffset)); | 1435     __ lw(a2, FieldMemOperand(a2, GlobalObject::kNativeContextOffset)); | 
| 1436     __ lw(a2, FieldMemOperand(a2, kGlobalIndex)); | 1436     __ lw(a2, FieldMemOperand(a2, kGlobalIndex)); | 
| 1437     __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalReceiverOffset)); | 1437     __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalReceiverOffset)); | 
| 1438 | 1438 | 
| 1439     __ bind(&patch_receiver); | 1439     __ bind(&patch_receiver); | 
| 1440     __ sll(at, a0, kPointerSizeLog2); | 1440     __ sll(at, a0, kPointerSizeLog2); | 
| 1441     __ addu(a3, sp, at); | 1441     __ addu(a3, sp, at); | 
| 1442     __ sw(a2, MemOperand(a3, -kPointerSize)); | 1442     __ sw(a2, MemOperand(a3, -kPointerSize)); | 
| 1443 | 1443 | 
| 1444     __ Branch(&shift_arguments); | 1444     __ Branch(&shift_arguments); | 
| 1445   } | 1445   } | 
| (...skipping 172 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 1618     __ push(a0); | 1618     __ push(a0); | 
| 1619     __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); | 1619     __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); | 
| 1620     __ mov(a0, v0);  // Put object in a0 to match other paths to push_receiver. | 1620     __ mov(a0, v0);  // Put object in a0 to match other paths to push_receiver. | 
| 1621     __ Branch(&push_receiver); | 1621     __ Branch(&push_receiver); | 
| 1622 | 1622 | 
| 1623     // Use the current global receiver object as the receiver. | 1623     // Use the current global receiver object as the receiver. | 
| 1624     __ bind(&use_global_receiver); | 1624     __ bind(&use_global_receiver); | 
| 1625     const int kGlobalOffset = | 1625     const int kGlobalOffset = | 
| 1626         Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; | 1626         Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; | 
| 1627     __ lw(a0, FieldMemOperand(cp, kGlobalOffset)); | 1627     __ lw(a0, FieldMemOperand(cp, kGlobalOffset)); | 
| 1628     __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalContextOffset)); | 1628     __ lw(a0, FieldMemOperand(a0, GlobalObject::kNativeContextOffset)); | 
| 1629     __ lw(a0, FieldMemOperand(a0, kGlobalOffset)); | 1629     __ lw(a0, FieldMemOperand(a0, kGlobalOffset)); | 
| 1630     __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalReceiverOffset)); | 1630     __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalReceiverOffset)); | 
| 1631 | 1631 | 
| 1632     // Push the receiver. | 1632     // Push the receiver. | 
| 1633     // a0: receiver | 1633     // a0: receiver | 
| 1634     __ bind(&push_receiver); | 1634     __ bind(&push_receiver); | 
| 1635     __ push(a0); | 1635     __ push(a0); | 
| 1636 | 1636 | 
| 1637     // Copy all arguments from the array to the stack. | 1637     // Copy all arguments from the array to the stack. | 
| 1638     Label entry, loop; | 1638     Label entry, loop; | 
| (...skipping 195 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 1834   __ bind(&dont_adapt_arguments); | 1834   __ bind(&dont_adapt_arguments); | 
| 1835   __ Jump(a3); | 1835   __ Jump(a3); | 
| 1836 } | 1836 } | 
| 1837 | 1837 | 
| 1838 | 1838 | 
| 1839 #undef __ | 1839 #undef __ | 
| 1840 | 1840 | 
| 1841 } }  // namespace v8::internal | 1841 } }  // namespace v8::internal | 
| 1842 | 1842 | 
| 1843 #endif  // V8_TARGET_ARCH_MIPS | 1843 #endif  // V8_TARGET_ARCH_MIPS | 
| OLD | NEW | 
|---|