OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
69 | 69 |
70 __ IncrementCounter(counters->fast_new_closure_total(), 1); | 70 __ IncrementCounter(counters->fast_new_closure_total(), 1); |
71 | 71 |
72 // Get the function info from the stack. | 72 // Get the function info from the stack. |
73 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); | 73 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); |
74 | 74 |
75 int map_index = (language_mode_ == CLASSIC_MODE) | 75 int map_index = (language_mode_ == CLASSIC_MODE) |
76 ? Context::FUNCTION_MAP_INDEX | 76 ? Context::FUNCTION_MAP_INDEX |
77 : Context::STRICT_MODE_FUNCTION_MAP_INDEX; | 77 : Context::STRICT_MODE_FUNCTION_MAP_INDEX; |
78 | 78 |
79 // Compute the function map in the current global context and set that | 79 // Compute the function map in the current native context and set that |
80 // as the map of the allocated object. | 80 // as the map of the allocated object. |
81 __ movq(rcx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); | 81 __ movq(rcx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); |
82 __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalContextOffset)); | 82 __ movq(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset)); |
83 __ movq(rbx, Operand(rcx, Context::SlotOffset(map_index))); | 83 __ movq(rbx, Operand(rcx, Context::SlotOffset(map_index))); |
84 __ movq(FieldOperand(rax, JSObject::kMapOffset), rbx); | 84 __ movq(FieldOperand(rax, JSObject::kMapOffset), rbx); |
85 | 85 |
86 // Initialize the rest of the function. We don't have to update the | 86 // Initialize the rest of the function. We don't have to update the |
87 // write barrier because the allocated object is in new space. | 87 // write barrier because the allocated object is in new space. |
88 __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex); | 88 __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex); |
89 __ LoadRoot(r8, Heap::kTheHoleValueRootIndex); | 89 __ LoadRoot(r8, Heap::kTheHoleValueRootIndex); |
90 __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex); | 90 __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex); |
91 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), rbx); | 91 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), rbx); |
92 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rbx); | 92 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rbx); |
(...skipping 20 matching lines...) Expand all Loading... |
113 __ lea(rdx, FieldOperand(rdx, Code::kHeaderSize)); | 113 __ lea(rdx, FieldOperand(rdx, Code::kHeaderSize)); |
114 __ movq(FieldOperand(rax, JSFunction::kCodeEntryOffset), rdx); | 114 __ movq(FieldOperand(rax, JSFunction::kCodeEntryOffset), rdx); |
115 | 115 |
116 // Return and remove the on-stack parameter. | 116 // Return and remove the on-stack parameter. |
117 __ ret(1 * kPointerSize); | 117 __ ret(1 * kPointerSize); |
118 | 118 |
119 __ bind(&check_optimized); | 119 __ bind(&check_optimized); |
120 | 120 |
121 __ IncrementCounter(counters->fast_new_closure_try_optimized(), 1); | 121 __ IncrementCounter(counters->fast_new_closure_try_optimized(), 1); |
122 | 122 |
123 // rcx holds global context, ebx points to fixed array of 3-element entries | 123 // rcx holds native context, ebx points to fixed array of 3-element entries |
124 // (global context, optimized code, literals). | 124 // (native context, optimized code, literals). |
125 // The optimized code map must never be empty, so check the first elements. | 125 // The optimized code map must never be empty, so check the first elements. |
126 Label install_optimized; | 126 Label install_optimized; |
127 // Speculatively move code object into edx. | 127 // Speculatively move code object into edx. |
128 __ movq(rdx, FieldOperand(rbx, FixedArray::kHeaderSize + kPointerSize)); | 128 __ movq(rdx, FieldOperand(rbx, FixedArray::kHeaderSize + kPointerSize)); |
129 __ cmpq(rcx, FieldOperand(rbx, FixedArray::kHeaderSize)); | 129 __ cmpq(rcx, FieldOperand(rbx, FixedArray::kHeaderSize)); |
130 __ j(equal, &install_optimized); | 130 __ j(equal, &install_optimized); |
131 | 131 |
132 // Iterate through the rest of map backwards. rdx holds an index. | 132 // Iterate through the rest of map backwards. rdx holds an index. |
133 Label loop; | 133 Label loop; |
134 Label restore; | 134 Label restore; |
(...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
252 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); | 252 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); |
253 | 253 |
254 // Get the serialized scope info from the stack. | 254 // Get the serialized scope info from the stack. |
255 __ movq(rbx, Operand(rsp, 2 * kPointerSize)); | 255 __ movq(rbx, Operand(rsp, 2 * kPointerSize)); |
256 | 256 |
257 // Set up the object header. | 257 // Set up the object header. |
258 __ LoadRoot(kScratchRegister, Heap::kBlockContextMapRootIndex); | 258 __ LoadRoot(kScratchRegister, Heap::kBlockContextMapRootIndex); |
259 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); | 259 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); |
260 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); | 260 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); |
261 | 261 |
262 // If this block context is nested in the global context we get a smi | 262 // If this block context is nested in the native context we get a smi |
263 // sentinel instead of a function. The block context should get the | 263 // sentinel instead of a function. The block context should get the |
264 // canonical empty function of the global context as its closure which | 264 // canonical empty function of the native context as its closure which |
265 // we still have to look up. | 265 // we still have to look up. |
266 Label after_sentinel; | 266 Label after_sentinel; |
267 __ JumpIfNotSmi(rcx, &after_sentinel, Label::kNear); | 267 __ JumpIfNotSmi(rcx, &after_sentinel, Label::kNear); |
268 if (FLAG_debug_code) { | 268 if (FLAG_debug_code) { |
269 const char* message = "Expected 0 as a Smi sentinel"; | 269 const char* message = "Expected 0 as a Smi sentinel"; |
270 __ cmpq(rcx, Immediate(0)); | 270 __ cmpq(rcx, Immediate(0)); |
271 __ Assert(equal, message); | 271 __ Assert(equal, message); |
272 } | 272 } |
273 __ movq(rcx, GlobalObjectOperand()); | 273 __ movq(rcx, GlobalObjectOperand()); |
274 __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalContextOffset)); | 274 __ movq(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset)); |
275 __ movq(rcx, ContextOperand(rcx, Context::CLOSURE_INDEX)); | 275 __ movq(rcx, ContextOperand(rcx, Context::CLOSURE_INDEX)); |
276 __ bind(&after_sentinel); | 276 __ bind(&after_sentinel); |
277 | 277 |
278 // Set up the fixed slots. | 278 // Set up the fixed slots. |
279 __ movq(ContextOperand(rax, Context::CLOSURE_INDEX), rcx); | 279 __ movq(ContextOperand(rax, Context::CLOSURE_INDEX), rcx); |
280 __ movq(ContextOperand(rax, Context::PREVIOUS_INDEX), rsi); | 280 __ movq(ContextOperand(rax, Context::PREVIOUS_INDEX), rsi); |
281 __ movq(ContextOperand(rax, Context::EXTENSION_INDEX), rbx); | 281 __ movq(ContextOperand(rax, Context::EXTENSION_INDEX), rbx); |
282 | 282 |
283 // Copy the global object from the previous context. | 283 // Copy the global object from the previous context. |
284 __ movq(rbx, ContextOperand(rsi, Context::GLOBAL_INDEX)); | 284 __ movq(rbx, ContextOperand(rsi, Context::GLOBAL_INDEX)); |
(...skipping 2167 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2452 __ addq(r8, Immediate(Heap::kArgumentsObjectSize)); | 2452 __ addq(r8, Immediate(Heap::kArgumentsObjectSize)); |
2453 | 2453 |
2454 // Do the allocation of all three objects in one go. | 2454 // Do the allocation of all three objects in one go. |
2455 __ AllocateInNewSpace(r8, rax, rdx, rdi, &runtime, TAG_OBJECT); | 2455 __ AllocateInNewSpace(r8, rax, rdx, rdi, &runtime, TAG_OBJECT); |
2456 | 2456 |
2457 // rax = address of new object(s) (tagged) | 2457 // rax = address of new object(s) (tagged) |
2458 // rcx = argument count (untagged) | 2458 // rcx = argument count (untagged) |
2459 // Get the arguments boilerplate from the current (global) context into rdi. | 2459 // Get the arguments boilerplate from the current (global) context into rdi. |
2460 Label has_mapped_parameters, copy; | 2460 Label has_mapped_parameters, copy; |
2461 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); | 2461 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); |
2462 __ movq(rdi, FieldOperand(rdi, GlobalObject::kGlobalContextOffset)); | 2462 __ movq(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset)); |
2463 __ testq(rbx, rbx); | 2463 __ testq(rbx, rbx); |
2464 __ j(not_zero, &has_mapped_parameters, Label::kNear); | 2464 __ j(not_zero, &has_mapped_parameters, Label::kNear); |
2465 | 2465 |
2466 const int kIndex = Context::ARGUMENTS_BOILERPLATE_INDEX; | 2466 const int kIndex = Context::ARGUMENTS_BOILERPLATE_INDEX; |
2467 __ movq(rdi, Operand(rdi, Context::SlotOffset(kIndex))); | 2467 __ movq(rdi, Operand(rdi, Context::SlotOffset(kIndex))); |
2468 __ jmp(©, Label::kNear); | 2468 __ jmp(©, Label::kNear); |
2469 | 2469 |
2470 const int kAliasedIndex = Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX; | 2470 const int kAliasedIndex = Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX; |
2471 __ bind(&has_mapped_parameters); | 2471 __ bind(&has_mapped_parameters); |
2472 __ movq(rdi, Operand(rdi, Context::SlotOffset(kAliasedIndex))); | 2472 __ movq(rdi, Operand(rdi, Context::SlotOffset(kAliasedIndex))); |
(...skipping 194 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2667 __ j(zero, &add_arguments_object, Label::kNear); | 2667 __ j(zero, &add_arguments_object, Label::kNear); |
2668 __ lea(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize)); | 2668 __ lea(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize)); |
2669 __ bind(&add_arguments_object); | 2669 __ bind(&add_arguments_object); |
2670 __ addq(rcx, Immediate(Heap::kArgumentsObjectSizeStrict)); | 2670 __ addq(rcx, Immediate(Heap::kArgumentsObjectSizeStrict)); |
2671 | 2671 |
2672 // Do the allocation of both objects in one go. | 2672 // Do the allocation of both objects in one go. |
2673 __ AllocateInNewSpace(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT); | 2673 __ AllocateInNewSpace(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT); |
2674 | 2674 |
2675 // Get the arguments boilerplate from the current (global) context. | 2675 // Get the arguments boilerplate from the current (global) context. |
2676 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); | 2676 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); |
2677 __ movq(rdi, FieldOperand(rdi, GlobalObject::kGlobalContextOffset)); | 2677 __ movq(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset)); |
2678 const int offset = | 2678 const int offset = |
2679 Context::SlotOffset(Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX); | 2679 Context::SlotOffset(Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX); |
2680 __ movq(rdi, Operand(rdi, offset)); | 2680 __ movq(rdi, Operand(rdi, offset)); |
2681 | 2681 |
2682 // Copy the JS object part. | 2682 // Copy the JS object part. |
2683 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) { | 2683 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) { |
2684 __ movq(rbx, FieldOperand(rdi, i)); | 2684 __ movq(rbx, FieldOperand(rdi, i)); |
2685 __ movq(FieldOperand(rax, i), rbx); | 2685 __ movq(FieldOperand(rax, i), rbx); |
2686 } | 2686 } |
2687 | 2687 |
(...skipping 509 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3197 rcx, // Out: End of allocation. | 3197 rcx, // Out: End of allocation. |
3198 rdx, // Scratch register | 3198 rdx, // Scratch register |
3199 &slowcase, | 3199 &slowcase, |
3200 TAG_OBJECT); | 3200 TAG_OBJECT); |
3201 // rax: Start of allocated area, object-tagged. | 3201 // rax: Start of allocated area, object-tagged. |
3202 // rbx: Number of array elements as int32. | 3202 // rbx: Number of array elements as int32. |
3203 // r8: Number of array elements as smi. | 3203 // r8: Number of array elements as smi. |
3204 | 3204 |
3205 // Set JSArray map to global.regexp_result_map(). | 3205 // Set JSArray map to global.regexp_result_map(). |
3206 __ movq(rdx, ContextOperand(rsi, Context::GLOBAL_INDEX)); | 3206 __ movq(rdx, ContextOperand(rsi, Context::GLOBAL_INDEX)); |
3207 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalContextOffset)); | 3207 __ movq(rdx, FieldOperand(rdx, GlobalObject::kNativeContextOffset)); |
3208 __ movq(rdx, ContextOperand(rdx, Context::REGEXP_RESULT_MAP_INDEX)); | 3208 __ movq(rdx, ContextOperand(rdx, Context::REGEXP_RESULT_MAP_INDEX)); |
3209 __ movq(FieldOperand(rax, HeapObject::kMapOffset), rdx); | 3209 __ movq(FieldOperand(rax, HeapObject::kMapOffset), rdx); |
3210 | 3210 |
3211 // Set empty properties FixedArray. | 3211 // Set empty properties FixedArray. |
3212 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex); | 3212 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex); |
3213 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister); | 3213 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister); |
3214 | 3214 |
3215 // Set elements to point to FixedArray allocated right after the JSArray. | 3215 // Set elements to point to FixedArray allocated right after the JSArray. |
3216 __ lea(rcx, Operand(rax, JSRegExpResult::kSize)); | 3216 __ lea(rcx, Operand(rax, JSRegExpResult::kSize)); |
3217 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rcx); | 3217 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rcx); |
(...skipping 3264 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6482 #endif | 6482 #endif |
6483 | 6483 |
6484 __ Ret(); | 6484 __ Ret(); |
6485 } | 6485 } |
6486 | 6486 |
6487 #undef __ | 6487 #undef __ |
6488 | 6488 |
6489 } } // namespace v8::internal | 6489 } } // namespace v8::internal |
6490 | 6490 |
6491 #endif // V8_TARGET_ARCH_X64 | 6491 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |