Index: src/arm/code-stubs-arm.cc |
diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc |
index 5f2f68d0fbcff5351e62939e326f9994fd8f4df1..d9e3a3da9f7b3085239edb12284e00725d66461e 100644 |
--- a/src/arm/code-stubs-arm.cc |
+++ b/src/arm/code-stubs-arm.cc |
@@ -108,7 +108,7 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) { |
// Compute the function map in the current native context and set that |
// as the map of the allocated object. |
- __ ldr(r2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); |
+ __ ldr(r2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); |
__ ldr(r2, FieldMemOperand(r2, GlobalObject::kNativeContextOffset)); |
__ ldr(r5, MemOperand(r2, Context::SlotOffset(map_index))); |
__ str(r5, FieldMemOperand(r0, HeapObject::kMapOffset)); |
@@ -242,12 +242,12 @@ void FastNewContextStub::Generate(MacroAssembler* masm) { |
__ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); |
// Set up the fixed slots, copy the global object from the previous context. |
- __ ldr(r2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); |
+ __ ldr(r2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); |
__ mov(r1, Operand(Smi::FromInt(0))); |
__ str(r3, MemOperand(r0, Context::SlotOffset(Context::CLOSURE_INDEX))); |
__ str(cp, MemOperand(r0, Context::SlotOffset(Context::PREVIOUS_INDEX))); |
__ str(r1, MemOperand(r0, Context::SlotOffset(Context::EXTENSION_INDEX))); |
- __ str(r2, MemOperand(r0, Context::SlotOffset(Context::GLOBAL_INDEX))); |
+ __ str(r2, MemOperand(r0, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); |
// Initialize the rest of the slots to undefined. |
__ LoadRoot(r1, Heap::kUndefinedValueRootIndex); |
@@ -307,11 +307,11 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) { |
__ bind(&after_sentinel); |
// Set up the fixed slots, copy the global object from the previous context. |
- __ ldr(r2, ContextOperand(cp, Context::GLOBAL_INDEX)); |
+ __ ldr(r2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); |
__ str(r3, ContextOperand(r0, Context::CLOSURE_INDEX)); |
__ str(cp, ContextOperand(r0, Context::PREVIOUS_INDEX)); |
__ str(r1, ContextOperand(r0, Context::EXTENSION_INDEX)); |
- __ str(r2, ContextOperand(r0, Context::GLOBAL_INDEX)); |
+ __ str(r2, ContextOperand(r0, Context::GLOBAL_OBJECT_INDEX)); |
// Initialize the rest of the slots to the hole value. |
__ LoadRoot(r1, Heap::kTheHoleValueRootIndex); |
@@ -4507,13 +4507,13 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) { |
// r0 = address of new object(s) (tagged) |
// r2 = argument count (tagged) |
- // Get the arguments boilerplate from the current (global) context into r4. |
+ // Get the arguments boilerplate from the current native context into r4. |
const int kNormalOffset = |
Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX); |
const int kAliasedOffset = |
Context::SlotOffset(Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX); |
- __ ldr(r4, MemOperand(r8, Context::SlotOffset(Context::GLOBAL_INDEX))); |
+ __ ldr(r4, MemOperand(r8, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); |
__ ldr(r4, FieldMemOperand(r4, GlobalObject::kNativeContextOffset)); |
__ cmp(r1, Operand::Zero()); |
__ ldr(r4, MemOperand(r4, kNormalOffset), eq); |
@@ -4687,8 +4687,8 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { |
static_cast<AllocationFlags>(TAG_OBJECT | |
SIZE_IN_WORDS)); |
- // Get the arguments boilerplate from the current (global) context. |
- __ ldr(r4, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); |
+ // Get the arguments boilerplate from the current native context. |
+ __ ldr(r4, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); |
__ ldr(r4, FieldMemOperand(r4, GlobalObject::kNativeContextOffset)); |
__ ldr(r4, MemOperand(r4, Context::SlotOffset( |
Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX))); |
@@ -5204,7 +5204,7 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) { |
// Set empty properties FixedArray. |
// Set elements to point to FixedArray allocated right after the JSArray. |
// Interleave operations for better latency. |
- __ ldr(r2, ContextOperand(cp, Context::GLOBAL_INDEX)); |
+ __ ldr(r2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); |
__ add(r3, r0, Operand(JSRegExpResult::kSize)); |
__ mov(r4, Operand(factory->empty_fixed_array())); |
__ ldr(r2, FieldMemOperand(r2, GlobalObject::kNativeContextOffset)); |
@@ -5313,7 +5313,8 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { |
__ CompareRoot(r4, Heap::kTheHoleValueRootIndex); |
__ b(ne, &call); |
// Patch the receiver on the stack with the global receiver object. |
- __ ldr(r3, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); |
+ __ ldr(r3, |
+ MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); |
__ ldr(r3, FieldMemOperand(r3, GlobalObject::kGlobalReceiverOffset)); |
__ str(r3, MemOperand(sp, argc_ * kPointerSize)); |
__ bind(&call); |