Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(837)

Unified Diff: src/ia32/code-stubs-ia32.cc

Issue 10878047: Revert to code state of 3.13.1 plus r12350 (Closed) Base URL: https://v8.googlecode.com/svn/trunk
Patch Set: Created 8 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « src/ia32/builtins-ia32.cc ('k') | src/ia32/deoptimizer-ia32.cc » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: src/ia32/code-stubs-ia32.cc
diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc
index 80b0f22ba4f94778a29630d861f12b125ffeb043..7be2b4f65f9d8a5a06c7ec651452f1192f7199e8 100644
--- a/src/ia32/code-stubs-ia32.cc
+++ b/src/ia32/code-stubs-ia32.cc
@@ -80,10 +80,10 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
? Context::FUNCTION_MAP_INDEX
: Context::STRICT_MODE_FUNCTION_MAP_INDEX;
- // Compute the function map in the current native context and set that
+ // Compute the function map in the current global context and set that
// as the map of the allocated object.
- __ mov(ecx, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
- __ mov(ecx, FieldOperand(ecx, GlobalObject::kNativeContextOffset));
+ __ mov(ecx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalContextOffset));
__ mov(ebx, Operand(ecx, Context::SlotOffset(map_index)));
__ mov(FieldOperand(eax, JSObject::kMapOffset), ebx);
@@ -123,8 +123,8 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
__ IncrementCounter(counters->fast_new_closure_try_optimized(), 1);
- // ecx holds native context, ebx points to fixed array of 3-element entries
- // (native context, optimized code, literals).
+ // ecx holds global context, ebx points to fixed array of 3-element entries
+ // (global context, optimized code, literals).
// Map must never be empty, so check the first elements.
Label install_optimized;
// Speculatively move code object into edx.
@@ -217,8 +217,8 @@ void FastNewContextStub::Generate(MacroAssembler* masm) {
__ mov(Operand(eax, Context::SlotOffset(Context::EXTENSION_INDEX)), ebx);
// Copy the global object from the previous context.
- __ mov(ebx, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
- __ mov(Operand(eax, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)), ebx);
+ __ mov(ebx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ __ mov(Operand(eax, Context::SlotOffset(Context::GLOBAL_INDEX)), ebx);
// Initialize the rest of the slots to undefined.
__ mov(ebx, factory->undefined_value());
@@ -261,9 +261,9 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
__ mov(FieldOperand(eax, Context::kLengthOffset),
Immediate(Smi::FromInt(length)));
- // If this block context is nested in the native context we get a smi
+ // If this block context is nested in the global context we get a smi
// sentinel instead of a function. The block context should get the
- // canonical empty function of the native context as its closure which
+ // canonical empty function of the global context as its closure which
// we still have to look up.
Label after_sentinel;
__ JumpIfNotSmi(ecx, &after_sentinel, Label::kNear);
@@ -273,7 +273,7 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
__ Assert(equal, message);
}
__ mov(ecx, GlobalObjectOperand());
- __ mov(ecx, FieldOperand(ecx, GlobalObject::kNativeContextOffset));
+ __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalContextOffset));
__ mov(ecx, ContextOperand(ecx, Context::CLOSURE_INDEX));
__ bind(&after_sentinel);
@@ -283,8 +283,8 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
__ mov(ContextOperand(eax, Context::EXTENSION_INDEX), ebx);
// Copy the global object from the previous context.
- __ mov(ebx, ContextOperand(esi, Context::GLOBAL_OBJECT_INDEX));
- __ mov(ContextOperand(eax, Context::GLOBAL_OBJECT_INDEX), ebx);
+ __ mov(ebx, ContextOperand(esi, Context::GLOBAL_INDEX));
+ __ mov(ContextOperand(eax, Context::GLOBAL_INDEX), ebx);
// Initialize the rest of the slots to the hole value.
if (slots_ == 1) {
@@ -3434,10 +3434,10 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
// esp[0] = mapped parameter count (tagged)
// esp[8] = parameter count (tagged)
// esp[12] = address of receiver argument
- // Get the arguments boilerplate from the current native context into edi.
+ // Get the arguments boilerplate from the current (global) context into edi.
Label has_mapped_parameters, copy;
- __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
- __ mov(edi, FieldOperand(edi, GlobalObject::kNativeContextOffset));
+ __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ __ mov(edi, FieldOperand(edi, GlobalObject::kGlobalContextOffset));
__ mov(ebx, Operand(esp, 0 * kPointerSize));
__ test(ebx, ebx);
__ j(not_zero, &has_mapped_parameters, Label::kNear);
@@ -3627,9 +3627,9 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
// Do the allocation of both objects in one go.
__ AllocateInNewSpace(ecx, eax, edx, ebx, &runtime, TAG_OBJECT);
- // Get the arguments boilerplate from the current native context.
- __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
- __ mov(edi, FieldOperand(edi, GlobalObject::kNativeContextOffset));
+ // Get the arguments boilerplate from the current (global) context.
+ __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ __ mov(edi, FieldOperand(edi, GlobalObject::kGlobalContextOffset));
const int offset =
Context::SlotOffset(Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX);
__ mov(edi, Operand(edi, offset));
@@ -4142,11 +4142,11 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
// Set empty properties FixedArray.
// Set elements to point to FixedArray allocated right after the JSArray.
// Interleave operations for better latency.
- __ mov(edx, ContextOperand(esi, Context::GLOBAL_OBJECT_INDEX));
+ __ mov(edx, ContextOperand(esi, Context::GLOBAL_INDEX));
Factory* factory = masm->isolate()->factory();
__ mov(ecx, Immediate(factory->empty_fixed_array()));
__ lea(ebx, Operand(eax, JSRegExpResult::kSize));
- __ mov(edx, FieldOperand(edx, GlobalObject::kNativeContextOffset));
+ __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalContextOffset));
__ mov(FieldOperand(eax, JSObject::kElementsOffset), ebx);
__ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ecx);
__ mov(edx, ContextOperand(edx, Context::REGEXP_RESULT_MAP_INDEX));
« no previous file with comments | « src/ia32/builtins-ia32.cc ('k') | src/ia32/deoptimizer-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698