Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(233)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 10878047: Revert to code state of 3.13.1 plus r12350 (Closed) Base URL: https://v8.googlecode.com/svn/trunk
Patch Set: Created 8 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/builtins-x64.cc ('k') | src/x64/deoptimizer-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
69 69
70 __ IncrementCounter(counters->fast_new_closure_total(), 1); 70 __ IncrementCounter(counters->fast_new_closure_total(), 1);
71 71
72 // Get the function info from the stack. 72 // Get the function info from the stack.
73 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); 73 __ movq(rdx, Operand(rsp, 1 * kPointerSize));
74 74
75 int map_index = (language_mode_ == CLASSIC_MODE) 75 int map_index = (language_mode_ == CLASSIC_MODE)
76 ? Context::FUNCTION_MAP_INDEX 76 ? Context::FUNCTION_MAP_INDEX
77 : Context::STRICT_MODE_FUNCTION_MAP_INDEX; 77 : Context::STRICT_MODE_FUNCTION_MAP_INDEX;
78 78
79 // Compute the function map in the current native context and set that 79 // Compute the function map in the current global context and set that
80 // as the map of the allocated object. 80 // as the map of the allocated object.
81 __ movq(rcx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 81 __ movq(rcx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
82 __ movq(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset)); 82 __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalContextOffset));
83 __ movq(rbx, Operand(rcx, Context::SlotOffset(map_index))); 83 __ movq(rbx, Operand(rcx, Context::SlotOffset(map_index)));
84 __ movq(FieldOperand(rax, JSObject::kMapOffset), rbx); 84 __ movq(FieldOperand(rax, JSObject::kMapOffset), rbx);
85 85
86 // Initialize the rest of the function. We don't have to update the 86 // Initialize the rest of the function. We don't have to update the
87 // write barrier because the allocated object is in new space. 87 // write barrier because the allocated object is in new space.
88 __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex); 88 __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex);
89 __ LoadRoot(r8, Heap::kTheHoleValueRootIndex); 89 __ LoadRoot(r8, Heap::kTheHoleValueRootIndex);
90 __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex); 90 __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex);
91 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), rbx); 91 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), rbx);
92 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rbx); 92 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rbx);
(...skipping 20 matching lines...) Expand all
113 __ lea(rdx, FieldOperand(rdx, Code::kHeaderSize)); 113 __ lea(rdx, FieldOperand(rdx, Code::kHeaderSize));
114 __ movq(FieldOperand(rax, JSFunction::kCodeEntryOffset), rdx); 114 __ movq(FieldOperand(rax, JSFunction::kCodeEntryOffset), rdx);
115 115
116 // Return and remove the on-stack parameter. 116 // Return and remove the on-stack parameter.
117 __ ret(1 * kPointerSize); 117 __ ret(1 * kPointerSize);
118 118
119 __ bind(&check_optimized); 119 __ bind(&check_optimized);
120 120
121 __ IncrementCounter(counters->fast_new_closure_try_optimized(), 1); 121 __ IncrementCounter(counters->fast_new_closure_try_optimized(), 1);
122 122
123 // rcx holds native context, ebx points to fixed array of 3-element entries 123 // rcx holds global context, ebx points to fixed array of 3-element entries
124 // (native context, optimized code, literals). 124 // (global context, optimized code, literals).
125 // The optimized code map must never be empty, so check the first elements. 125 // The optimized code map must never be empty, so check the first elements.
126 Label install_optimized; 126 Label install_optimized;
127 // Speculatively move code object into edx. 127 // Speculatively move code object into edx.
128 __ movq(rdx, FieldOperand(rbx, FixedArray::kHeaderSize + kPointerSize)); 128 __ movq(rdx, FieldOperand(rbx, FixedArray::kHeaderSize + kPointerSize));
129 __ cmpq(rcx, FieldOperand(rbx, FixedArray::kHeaderSize)); 129 __ cmpq(rcx, FieldOperand(rbx, FixedArray::kHeaderSize));
130 __ j(equal, &install_optimized); 130 __ j(equal, &install_optimized);
131 131
132 // Iterate through the rest of map backwards. rdx holds an index. 132 // Iterate through the rest of map backwards. rdx holds an index.
133 Label loop; 133 Label loop;
134 Label restore; 134 Label restore;
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after
210 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); 210 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister);
211 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); 211 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length));
212 212
213 // Set up the fixed slots. 213 // Set up the fixed slots.
214 __ Set(rbx, 0); // Set to NULL. 214 __ Set(rbx, 0); // Set to NULL.
215 __ movq(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx); 215 __ movq(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx);
216 __ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rsi); 216 __ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rsi);
217 __ movq(Operand(rax, Context::SlotOffset(Context::EXTENSION_INDEX)), rbx); 217 __ movq(Operand(rax, Context::SlotOffset(Context::EXTENSION_INDEX)), rbx);
218 218
219 // Copy the global object from the previous context. 219 // Copy the global object from the previous context.
220 __ movq(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 220 __ movq(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
221 __ movq(Operand(rax, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)), rbx); 221 __ movq(Operand(rax, Context::SlotOffset(Context::GLOBAL_INDEX)), rbx);
222 222
223 // Initialize the rest of the slots to undefined. 223 // Initialize the rest of the slots to undefined.
224 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex); 224 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
225 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) { 225 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
226 __ movq(Operand(rax, Context::SlotOffset(i)), rbx); 226 __ movq(Operand(rax, Context::SlotOffset(i)), rbx);
227 } 227 }
228 228
229 // Return and remove the on-stack parameter. 229 // Return and remove the on-stack parameter.
230 __ movq(rsi, rax); 230 __ movq(rsi, rax);
231 __ ret(1 * kPointerSize); 231 __ ret(1 * kPointerSize);
(...skipping 20 matching lines...) Expand all
252 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); 252 __ movq(rcx, Operand(rsp, 1 * kPointerSize));
253 253
254 // Get the serialized scope info from the stack. 254 // Get the serialized scope info from the stack.
255 __ movq(rbx, Operand(rsp, 2 * kPointerSize)); 255 __ movq(rbx, Operand(rsp, 2 * kPointerSize));
256 256
257 // Set up the object header. 257 // Set up the object header.
258 __ LoadRoot(kScratchRegister, Heap::kBlockContextMapRootIndex); 258 __ LoadRoot(kScratchRegister, Heap::kBlockContextMapRootIndex);
259 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); 259 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister);
260 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); 260 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length));
261 261
262 // If this block context is nested in the native context we get a smi 262 // If this block context is nested in the global context we get a smi
263 // sentinel instead of a function. The block context should get the 263 // sentinel instead of a function. The block context should get the
264 // canonical empty function of the native context as its closure which 264 // canonical empty function of the global context as its closure which
265 // we still have to look up. 265 // we still have to look up.
266 Label after_sentinel; 266 Label after_sentinel;
267 __ JumpIfNotSmi(rcx, &after_sentinel, Label::kNear); 267 __ JumpIfNotSmi(rcx, &after_sentinel, Label::kNear);
268 if (FLAG_debug_code) { 268 if (FLAG_debug_code) {
269 const char* message = "Expected 0 as a Smi sentinel"; 269 const char* message = "Expected 0 as a Smi sentinel";
270 __ cmpq(rcx, Immediate(0)); 270 __ cmpq(rcx, Immediate(0));
271 __ Assert(equal, message); 271 __ Assert(equal, message);
272 } 272 }
273 __ movq(rcx, GlobalObjectOperand()); 273 __ movq(rcx, GlobalObjectOperand());
274 __ movq(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset)); 274 __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalContextOffset));
275 __ movq(rcx, ContextOperand(rcx, Context::CLOSURE_INDEX)); 275 __ movq(rcx, ContextOperand(rcx, Context::CLOSURE_INDEX));
276 __ bind(&after_sentinel); 276 __ bind(&after_sentinel);
277 277
278 // Set up the fixed slots. 278 // Set up the fixed slots.
279 __ movq(ContextOperand(rax, Context::CLOSURE_INDEX), rcx); 279 __ movq(ContextOperand(rax, Context::CLOSURE_INDEX), rcx);
280 __ movq(ContextOperand(rax, Context::PREVIOUS_INDEX), rsi); 280 __ movq(ContextOperand(rax, Context::PREVIOUS_INDEX), rsi);
281 __ movq(ContextOperand(rax, Context::EXTENSION_INDEX), rbx); 281 __ movq(ContextOperand(rax, Context::EXTENSION_INDEX), rbx);
282 282
283 // Copy the global object from the previous context. 283 // Copy the global object from the previous context.
284 __ movq(rbx, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX)); 284 __ movq(rbx, ContextOperand(rsi, Context::GLOBAL_INDEX));
285 __ movq(ContextOperand(rax, Context::GLOBAL_OBJECT_INDEX), rbx); 285 __ movq(ContextOperand(rax, Context::GLOBAL_INDEX), rbx);
286 286
287 // Initialize the rest of the slots to the hole value. 287 // Initialize the rest of the slots to the hole value.
288 __ LoadRoot(rbx, Heap::kTheHoleValueRootIndex); 288 __ LoadRoot(rbx, Heap::kTheHoleValueRootIndex);
289 for (int i = 0; i < slots_; i++) { 289 for (int i = 0; i < slots_; i++) {
290 __ movq(ContextOperand(rax, i + Context::MIN_CONTEXT_SLOTS), rbx); 290 __ movq(ContextOperand(rax, i + Context::MIN_CONTEXT_SLOTS), rbx);
291 } 291 }
292 292
293 // Return and remove the on-stack parameter. 293 // Return and remove the on-stack parameter.
294 __ movq(rsi, rax); 294 __ movq(rsi, rax);
295 __ ret(2 * kPointerSize); 295 __ ret(2 * kPointerSize);
(...skipping 2153 matching lines...) Expand 10 before | Expand all | Expand 10 after
2449 __ lea(r8, Operand(r8, rcx, times_pointer_size, FixedArray::kHeaderSize)); 2449 __ lea(r8, Operand(r8, rcx, times_pointer_size, FixedArray::kHeaderSize));
2450 2450
2451 // 3. Arguments object. 2451 // 3. Arguments object.
2452 __ addq(r8, Immediate(Heap::kArgumentsObjectSize)); 2452 __ addq(r8, Immediate(Heap::kArgumentsObjectSize));
2453 2453
2454 // Do the allocation of all three objects in one go. 2454 // Do the allocation of all three objects in one go.
2455 __ AllocateInNewSpace(r8, rax, rdx, rdi, &runtime, TAG_OBJECT); 2455 __ AllocateInNewSpace(r8, rax, rdx, rdi, &runtime, TAG_OBJECT);
2456 2456
2457 // rax = address of new object(s) (tagged) 2457 // rax = address of new object(s) (tagged)
2458 // rcx = argument count (untagged) 2458 // rcx = argument count (untagged)
2459 // Get the arguments boilerplate from the current native context into rdi. 2459 // Get the arguments boilerplate from the current (global) context into rdi.
2460 Label has_mapped_parameters, copy; 2460 Label has_mapped_parameters, copy;
2461 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 2461 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2462 __ movq(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset)); 2462 __ movq(rdi, FieldOperand(rdi, GlobalObject::kGlobalContextOffset));
2463 __ testq(rbx, rbx); 2463 __ testq(rbx, rbx);
2464 __ j(not_zero, &has_mapped_parameters, Label::kNear); 2464 __ j(not_zero, &has_mapped_parameters, Label::kNear);
2465 2465
2466 const int kIndex = Context::ARGUMENTS_BOILERPLATE_INDEX; 2466 const int kIndex = Context::ARGUMENTS_BOILERPLATE_INDEX;
2467 __ movq(rdi, Operand(rdi, Context::SlotOffset(kIndex))); 2467 __ movq(rdi, Operand(rdi, Context::SlotOffset(kIndex)));
2468 __ jmp(&copy, Label::kNear); 2468 __ jmp(&copy, Label::kNear);
2469 2469
2470 const int kAliasedIndex = Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX; 2470 const int kAliasedIndex = Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX;
2471 __ bind(&has_mapped_parameters); 2471 __ bind(&has_mapped_parameters);
2472 __ movq(rdi, Operand(rdi, Context::SlotOffset(kAliasedIndex))); 2472 __ movq(rdi, Operand(rdi, Context::SlotOffset(kAliasedIndex)));
(...skipping 192 matching lines...) Expand 10 before | Expand all | Expand 10 after
2665 __ bind(&try_allocate); 2665 __ bind(&try_allocate);
2666 __ testq(rcx, rcx); 2666 __ testq(rcx, rcx);
2667 __ j(zero, &add_arguments_object, Label::kNear); 2667 __ j(zero, &add_arguments_object, Label::kNear);
2668 __ lea(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize)); 2668 __ lea(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize));
2669 __ bind(&add_arguments_object); 2669 __ bind(&add_arguments_object);
2670 __ addq(rcx, Immediate(Heap::kArgumentsObjectSizeStrict)); 2670 __ addq(rcx, Immediate(Heap::kArgumentsObjectSizeStrict));
2671 2671
2672 // Do the allocation of both objects in one go. 2672 // Do the allocation of both objects in one go.
2673 __ AllocateInNewSpace(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT); 2673 __ AllocateInNewSpace(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT);
2674 2674
2675 // Get the arguments boilerplate from the current native context. 2675 // Get the arguments boilerplate from the current (global) context.
2676 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 2676 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2677 __ movq(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset)); 2677 __ movq(rdi, FieldOperand(rdi, GlobalObject::kGlobalContextOffset));
2678 const int offset = 2678 const int offset =
2679 Context::SlotOffset(Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX); 2679 Context::SlotOffset(Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX);
2680 __ movq(rdi, Operand(rdi, offset)); 2680 __ movq(rdi, Operand(rdi, offset));
2681 2681
2682 // Copy the JS object part. 2682 // Copy the JS object part.
2683 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) { 2683 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
2684 __ movq(rbx, FieldOperand(rdi, i)); 2684 __ movq(rbx, FieldOperand(rdi, i));
2685 __ movq(FieldOperand(rax, i), rbx); 2685 __ movq(FieldOperand(rax, i), rbx);
2686 } 2686 }
2687 2687
(...skipping 508 matching lines...) Expand 10 before | Expand all | Expand 10 after
3196 rax, // Out: Start of allocation (tagged). 3196 rax, // Out: Start of allocation (tagged).
3197 rcx, // Out: End of allocation. 3197 rcx, // Out: End of allocation.
3198 rdx, // Scratch register 3198 rdx, // Scratch register
3199 &slowcase, 3199 &slowcase,
3200 TAG_OBJECT); 3200 TAG_OBJECT);
3201 // rax: Start of allocated area, object-tagged. 3201 // rax: Start of allocated area, object-tagged.
3202 // rbx: Number of array elements as int32. 3202 // rbx: Number of array elements as int32.
3203 // r8: Number of array elements as smi. 3203 // r8: Number of array elements as smi.
3204 3204
3205 // Set JSArray map to global.regexp_result_map(). 3205 // Set JSArray map to global.regexp_result_map().
3206 __ movq(rdx, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX)); 3206 __ movq(rdx, ContextOperand(rsi, Context::GLOBAL_INDEX));
3207 __ movq(rdx, FieldOperand(rdx, GlobalObject::kNativeContextOffset)); 3207 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalContextOffset));
3208 __ movq(rdx, ContextOperand(rdx, Context::REGEXP_RESULT_MAP_INDEX)); 3208 __ movq(rdx, ContextOperand(rdx, Context::REGEXP_RESULT_MAP_INDEX));
3209 __ movq(FieldOperand(rax, HeapObject::kMapOffset), rdx); 3209 __ movq(FieldOperand(rax, HeapObject::kMapOffset), rdx);
3210 3210
3211 // Set empty properties FixedArray. 3211 // Set empty properties FixedArray.
3212 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex); 3212 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex);
3213 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister); 3213 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister);
3214 3214
3215 // Set elements to point to FixedArray allocated right after the JSArray. 3215 // Set elements to point to FixedArray allocated right after the JSArray.
3216 __ lea(rcx, Operand(rax, JSRegExpResult::kSize)); 3216 __ lea(rcx, Operand(rax, JSRegExpResult::kSize));
3217 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rcx); 3217 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rcx);
(...skipping 3264 matching lines...) Expand 10 before | Expand all | Expand 10 after
6482 #endif 6482 #endif
6483 6483
6484 __ Ret(); 6484 __ Ret();
6485 } 6485 }
6486 6486
6487 #undef __ 6487 #undef __
6488 6488
6489 } } // namespace v8::internal 6489 } } // namespace v8::internal
6490 6490
6491 #endif // V8_TARGET_ARCH_X64 6491 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/builtins-x64.cc ('k') | src/x64/deoptimizer-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698