Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(132)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 10832365: Rename Context::global to Context::global_object, (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Addressed Michael's comments. Created 8 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/builtins-x64.cc ('k') | src/x64/full-codegen-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after
71 71
72 // Get the function info from the stack. 72 // Get the function info from the stack.
73 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); 73 __ movq(rdx, Operand(rsp, 1 * kPointerSize));
74 74
75 int map_index = (language_mode_ == CLASSIC_MODE) 75 int map_index = (language_mode_ == CLASSIC_MODE)
76 ? Context::FUNCTION_MAP_INDEX 76 ? Context::FUNCTION_MAP_INDEX
77 : Context::STRICT_MODE_FUNCTION_MAP_INDEX; 77 : Context::STRICT_MODE_FUNCTION_MAP_INDEX;
78 78
79 // Compute the function map in the current native context and set that 79 // Compute the function map in the current native context and set that
80 // as the map of the allocated object. 80 // as the map of the allocated object.
81 __ movq(rcx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); 81 __ movq(rcx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
82 __ movq(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset)); 82 __ movq(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset));
83 __ movq(rbx, Operand(rcx, Context::SlotOffset(map_index))); 83 __ movq(rbx, Operand(rcx, Context::SlotOffset(map_index)));
84 __ movq(FieldOperand(rax, JSObject::kMapOffset), rbx); 84 __ movq(FieldOperand(rax, JSObject::kMapOffset), rbx);
85 85
86 // Initialize the rest of the function. We don't have to update the 86 // Initialize the rest of the function. We don't have to update the
87 // write barrier because the allocated object is in new space. 87 // write barrier because the allocated object is in new space.
88 __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex); 88 __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex);
89 __ LoadRoot(r8, Heap::kTheHoleValueRootIndex); 89 __ LoadRoot(r8, Heap::kTheHoleValueRootIndex);
90 __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex); 90 __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex);
91 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), rbx); 91 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), rbx);
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after
210 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); 210 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister);
211 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); 211 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length));
212 212
213 // Set up the fixed slots. 213 // Set up the fixed slots.
214 __ Set(rbx, 0); // Set to NULL. 214 __ Set(rbx, 0); // Set to NULL.
215 __ movq(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx); 215 __ movq(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx);
216 __ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rsi); 216 __ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rsi);
217 __ movq(Operand(rax, Context::SlotOffset(Context::EXTENSION_INDEX)), rbx); 217 __ movq(Operand(rax, Context::SlotOffset(Context::EXTENSION_INDEX)), rbx);
218 218
219 // Copy the global object from the previous context. 219 // Copy the global object from the previous context.
220 __ movq(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); 220 __ movq(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
221 __ movq(Operand(rax, Context::SlotOffset(Context::GLOBAL_INDEX)), rbx); 221 __ movq(Operand(rax, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)), rbx);
222 222
223 // Initialize the rest of the slots to undefined. 223 // Initialize the rest of the slots to undefined.
224 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex); 224 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
225 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) { 225 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
226 __ movq(Operand(rax, Context::SlotOffset(i)), rbx); 226 __ movq(Operand(rax, Context::SlotOffset(i)), rbx);
227 } 227 }
228 228
229 // Return and remove the on-stack parameter. 229 // Return and remove the on-stack parameter.
230 __ movq(rsi, rax); 230 __ movq(rsi, rax);
231 __ ret(1 * kPointerSize); 231 __ ret(1 * kPointerSize);
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
274 __ movq(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset)); 274 __ movq(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset));
275 __ movq(rcx, ContextOperand(rcx, Context::CLOSURE_INDEX)); 275 __ movq(rcx, ContextOperand(rcx, Context::CLOSURE_INDEX));
276 __ bind(&after_sentinel); 276 __ bind(&after_sentinel);
277 277
278 // Set up the fixed slots. 278 // Set up the fixed slots.
279 __ movq(ContextOperand(rax, Context::CLOSURE_INDEX), rcx); 279 __ movq(ContextOperand(rax, Context::CLOSURE_INDEX), rcx);
280 __ movq(ContextOperand(rax, Context::PREVIOUS_INDEX), rsi); 280 __ movq(ContextOperand(rax, Context::PREVIOUS_INDEX), rsi);
281 __ movq(ContextOperand(rax, Context::EXTENSION_INDEX), rbx); 281 __ movq(ContextOperand(rax, Context::EXTENSION_INDEX), rbx);
282 282
283 // Copy the global object from the previous context. 283 // Copy the global object from the previous context.
284 __ movq(rbx, ContextOperand(rsi, Context::GLOBAL_INDEX)); 284 __ movq(rbx, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX));
285 __ movq(ContextOperand(rax, Context::GLOBAL_INDEX), rbx); 285 __ movq(ContextOperand(rax, Context::GLOBAL_OBJECT_INDEX), rbx);
286 286
287 // Initialize the rest of the slots to the hole value. 287 // Initialize the rest of the slots to the hole value.
288 __ LoadRoot(rbx, Heap::kTheHoleValueRootIndex); 288 __ LoadRoot(rbx, Heap::kTheHoleValueRootIndex);
289 for (int i = 0; i < slots_; i++) { 289 for (int i = 0; i < slots_; i++) {
290 __ movq(ContextOperand(rax, i + Context::MIN_CONTEXT_SLOTS), rbx); 290 __ movq(ContextOperand(rax, i + Context::MIN_CONTEXT_SLOTS), rbx);
291 } 291 }
292 292
293 // Return and remove the on-stack parameter. 293 // Return and remove the on-stack parameter.
294 __ movq(rsi, rax); 294 __ movq(rsi, rax);
295 __ ret(2 * kPointerSize); 295 __ ret(2 * kPointerSize);
(...skipping 2153 matching lines...) Expand 10 before | Expand all | Expand 10 after
2449 __ lea(r8, Operand(r8, rcx, times_pointer_size, FixedArray::kHeaderSize)); 2449 __ lea(r8, Operand(r8, rcx, times_pointer_size, FixedArray::kHeaderSize));
2450 2450
2451 // 3. Arguments object. 2451 // 3. Arguments object.
2452 __ addq(r8, Immediate(Heap::kArgumentsObjectSize)); 2452 __ addq(r8, Immediate(Heap::kArgumentsObjectSize));
2453 2453
2454 // Do the allocation of all three objects in one go. 2454 // Do the allocation of all three objects in one go.
2455 __ AllocateInNewSpace(r8, rax, rdx, rdi, &runtime, TAG_OBJECT); 2455 __ AllocateInNewSpace(r8, rax, rdx, rdi, &runtime, TAG_OBJECT);
2456 2456
2457 // rax = address of new object(s) (tagged) 2457 // rax = address of new object(s) (tagged)
2458 // rcx = argument count (untagged) 2458 // rcx = argument count (untagged)
2459 // Get the arguments boilerplate from the current (global) context into rdi. 2459 // Get the arguments boilerplate from the current native context into rdi.
2460 Label has_mapped_parameters, copy; 2460 Label has_mapped_parameters, copy;
2461 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); 2461 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2462 __ movq(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset)); 2462 __ movq(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset));
2463 __ testq(rbx, rbx); 2463 __ testq(rbx, rbx);
2464 __ j(not_zero, &has_mapped_parameters, Label::kNear); 2464 __ j(not_zero, &has_mapped_parameters, Label::kNear);
2465 2465
2466 const int kIndex = Context::ARGUMENTS_BOILERPLATE_INDEX; 2466 const int kIndex = Context::ARGUMENTS_BOILERPLATE_INDEX;
2467 __ movq(rdi, Operand(rdi, Context::SlotOffset(kIndex))); 2467 __ movq(rdi, Operand(rdi, Context::SlotOffset(kIndex)));
2468 __ jmp(&copy, Label::kNear); 2468 __ jmp(&copy, Label::kNear);
2469 2469
2470 const int kAliasedIndex = Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX; 2470 const int kAliasedIndex = Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX;
2471 __ bind(&has_mapped_parameters); 2471 __ bind(&has_mapped_parameters);
(...skipping 193 matching lines...) Expand 10 before | Expand all | Expand 10 after
2665 __ bind(&try_allocate); 2665 __ bind(&try_allocate);
2666 __ testq(rcx, rcx); 2666 __ testq(rcx, rcx);
2667 __ j(zero, &add_arguments_object, Label::kNear); 2667 __ j(zero, &add_arguments_object, Label::kNear);
2668 __ lea(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize)); 2668 __ lea(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize));
2669 __ bind(&add_arguments_object); 2669 __ bind(&add_arguments_object);
2670 __ addq(rcx, Immediate(Heap::kArgumentsObjectSizeStrict)); 2670 __ addq(rcx, Immediate(Heap::kArgumentsObjectSizeStrict));
2671 2671
2672 // Do the allocation of both objects in one go. 2672 // Do the allocation of both objects in one go.
2673 __ AllocateInNewSpace(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT); 2673 __ AllocateInNewSpace(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT);
2674 2674
2675 // Get the arguments boilerplate from the current (global) context. 2675 // Get the arguments boilerplate from the current native context.
2676 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); 2676 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2677 __ movq(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset)); 2677 __ movq(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset));
2678 const int offset = 2678 const int offset =
2679 Context::SlotOffset(Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX); 2679 Context::SlotOffset(Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX);
2680 __ movq(rdi, Operand(rdi, offset)); 2680 __ movq(rdi, Operand(rdi, offset));
2681 2681
2682 // Copy the JS object part. 2682 // Copy the JS object part.
2683 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) { 2683 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
2684 __ movq(rbx, FieldOperand(rdi, i)); 2684 __ movq(rbx, FieldOperand(rdi, i));
2685 __ movq(FieldOperand(rax, i), rbx); 2685 __ movq(FieldOperand(rax, i), rbx);
2686 } 2686 }
(...skipping 509 matching lines...) Expand 10 before | Expand all | Expand 10 after
3196 rax, // Out: Start of allocation (tagged). 3196 rax, // Out: Start of allocation (tagged).
3197 rcx, // Out: End of allocation. 3197 rcx, // Out: End of allocation.
3198 rdx, // Scratch register 3198 rdx, // Scratch register
3199 &slowcase, 3199 &slowcase,
3200 TAG_OBJECT); 3200 TAG_OBJECT);
3201 // rax: Start of allocated area, object-tagged. 3201 // rax: Start of allocated area, object-tagged.
3202 // rbx: Number of array elements as int32. 3202 // rbx: Number of array elements as int32.
3203 // r8: Number of array elements as smi. 3203 // r8: Number of array elements as smi.
3204 3204
3205 // Set JSArray map to global.regexp_result_map(). 3205 // Set JSArray map to global.regexp_result_map().
3206 __ movq(rdx, ContextOperand(rsi, Context::GLOBAL_INDEX)); 3206 __ movq(rdx, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX));
3207 __ movq(rdx, FieldOperand(rdx, GlobalObject::kNativeContextOffset)); 3207 __ movq(rdx, FieldOperand(rdx, GlobalObject::kNativeContextOffset));
3208 __ movq(rdx, ContextOperand(rdx, Context::REGEXP_RESULT_MAP_INDEX)); 3208 __ movq(rdx, ContextOperand(rdx, Context::REGEXP_RESULT_MAP_INDEX));
3209 __ movq(FieldOperand(rax, HeapObject::kMapOffset), rdx); 3209 __ movq(FieldOperand(rax, HeapObject::kMapOffset), rdx);
3210 3210
3211 // Set empty properties FixedArray. 3211 // Set empty properties FixedArray.
3212 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex); 3212 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex);
3213 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister); 3213 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister);
3214 3214
3215 // Set elements to point to FixedArray allocated right after the JSArray. 3215 // Set elements to point to FixedArray allocated right after the JSArray.
3216 __ lea(rcx, Operand(rax, JSRegExpResult::kSize)); 3216 __ lea(rcx, Operand(rax, JSRegExpResult::kSize));
(...skipping 3265 matching lines...) Expand 10 before | Expand all | Expand 10 after
6482 #endif 6482 #endif
6483 6483
6484 __ Ret(); 6484 __ Ret();
6485 } 6485 }
6486 6486
6487 #undef __ 6487 #undef __
6488 6488
6489 } } // namespace v8::internal 6489 } } // namespace v8::internal
6490 6490
6491 #endif // V8_TARGET_ARCH_X64 6491 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/builtins-x64.cc ('k') | src/x64/full-codegen-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698