Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1539)

Side by Side Diff: src/x64/macro-assembler-x64.cc

Issue 71163006: Merge bleeding_edge r17376:17693. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/parser
Patch Set: Fix all.gyp Created 7 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/macro-assembler-x64.h ('k') | src/x64/regexp-macro-assembler-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 19 matching lines...) Expand all
30 #if V8_TARGET_ARCH_X64 30 #if V8_TARGET_ARCH_X64
31 31
32 #include "bootstrapper.h" 32 #include "bootstrapper.h"
33 #include "codegen.h" 33 #include "codegen.h"
34 #include "cpu-profiler.h" 34 #include "cpu-profiler.h"
35 #include "assembler-x64.h" 35 #include "assembler-x64.h"
36 #include "macro-assembler-x64.h" 36 #include "macro-assembler-x64.h"
37 #include "serialize.h" 37 #include "serialize.h"
38 #include "debug.h" 38 #include "debug.h"
39 #include "heap.h" 39 #include "heap.h"
40 #include "isolate-inl.h"
40 41
41 namespace v8 { 42 namespace v8 {
42 namespace internal { 43 namespace internal {
43 44
44 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size) 45 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
45 : Assembler(arg_isolate, buffer, size), 46 : Assembler(arg_isolate, buffer, size),
46 generating_stub_(false), 47 generating_stub_(false),
47 allow_stub_calls_(true), 48 allow_stub_calls_(true),
48 has_frame_(false), 49 has_frame_(false),
49 root_array_available_(true) { 50 root_array_available_(true) {
(...skipping 22 matching lines...) Expand all
72 73
73 Operand MacroAssembler::ExternalOperand(ExternalReference target, 74 Operand MacroAssembler::ExternalOperand(ExternalReference target,
74 Register scratch) { 75 Register scratch) {
75 if (root_array_available_ && !Serializer::enabled()) { 76 if (root_array_available_ && !Serializer::enabled()) {
76 intptr_t delta = RootRegisterDelta(target); 77 intptr_t delta = RootRegisterDelta(target);
77 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) { 78 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
78 Serializer::TooLateToEnableNow(); 79 Serializer::TooLateToEnableNow();
79 return Operand(kRootRegister, static_cast<int32_t>(delta)); 80 return Operand(kRootRegister, static_cast<int32_t>(delta));
80 } 81 }
81 } 82 }
82 movq(scratch, target); 83 Move(scratch, target);
83 return Operand(scratch, 0); 84 return Operand(scratch, 0);
84 } 85 }
85 86
86 87
87 void MacroAssembler::Load(Register destination, ExternalReference source) { 88 void MacroAssembler::Load(Register destination, ExternalReference source) {
88 if (root_array_available_ && !Serializer::enabled()) { 89 if (root_array_available_ && !Serializer::enabled()) {
89 intptr_t delta = RootRegisterDelta(source); 90 intptr_t delta = RootRegisterDelta(source);
90 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) { 91 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
91 Serializer::TooLateToEnableNow(); 92 Serializer::TooLateToEnableNow();
92 movq(destination, Operand(kRootRegister, static_cast<int32_t>(delta))); 93 movq(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
93 return; 94 return;
94 } 95 }
95 } 96 }
96 // Safe code. 97 // Safe code.
97 if (destination.is(rax)) { 98 if (destination.is(rax)) {
98 load_rax(source); 99 load_rax(source);
99 } else { 100 } else {
100 movq(kScratchRegister, source); 101 Move(kScratchRegister, source);
101 movq(destination, Operand(kScratchRegister, 0)); 102 movq(destination, Operand(kScratchRegister, 0));
102 } 103 }
103 } 104 }
104 105
105 106
106 void MacroAssembler::Store(ExternalReference destination, Register source) { 107 void MacroAssembler::Store(ExternalReference destination, Register source) {
107 if (root_array_available_ && !Serializer::enabled()) { 108 if (root_array_available_ && !Serializer::enabled()) {
108 intptr_t delta = RootRegisterDelta(destination); 109 intptr_t delta = RootRegisterDelta(destination);
109 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) { 110 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
110 Serializer::TooLateToEnableNow(); 111 Serializer::TooLateToEnableNow();
111 movq(Operand(kRootRegister, static_cast<int32_t>(delta)), source); 112 movq(Operand(kRootRegister, static_cast<int32_t>(delta)), source);
112 return; 113 return;
113 } 114 }
114 } 115 }
115 // Safe code. 116 // Safe code.
116 if (source.is(rax)) { 117 if (source.is(rax)) {
117 store_rax(destination); 118 store_rax(destination);
118 } else { 119 } else {
119 movq(kScratchRegister, destination); 120 Move(kScratchRegister, destination);
120 movq(Operand(kScratchRegister, 0), source); 121 movq(Operand(kScratchRegister, 0), source);
121 } 122 }
122 } 123 }
123 124
124 125
125 void MacroAssembler::LoadAddress(Register destination, 126 void MacroAssembler::LoadAddress(Register destination,
126 ExternalReference source) { 127 ExternalReference source) {
127 if (root_array_available_ && !Serializer::enabled()) { 128 if (root_array_available_ && !Serializer::enabled()) {
128 intptr_t delta = RootRegisterDelta(source); 129 intptr_t delta = RootRegisterDelta(source);
129 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) { 130 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
130 Serializer::TooLateToEnableNow(); 131 Serializer::TooLateToEnableNow();
131 lea(destination, Operand(kRootRegister, static_cast<int32_t>(delta))); 132 lea(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
132 return; 133 return;
133 } 134 }
134 } 135 }
135 // Safe code. 136 // Safe code.
136 movq(destination, source); 137 Move(destination, source);
137 } 138 }
138 139
139 140
140 int MacroAssembler::LoadAddressSize(ExternalReference source) { 141 int MacroAssembler::LoadAddressSize(ExternalReference source) {
141 if (root_array_available_ && !Serializer::enabled()) { 142 if (root_array_available_ && !Serializer::enabled()) {
142 // This calculation depends on the internals of LoadAddress. 143 // This calculation depends on the internals of LoadAddress.
143 // It's correctness is ensured by the asserts in the Call 144 // It's correctness is ensured by the asserts in the Call
144 // instruction below. 145 // instruction below.
145 intptr_t delta = RootRegisterDelta(source); 146 intptr_t delta = RootRegisterDelta(source);
146 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) { 147 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
147 Serializer::TooLateToEnableNow(); 148 Serializer::TooLateToEnableNow();
148 // Operand is lea(scratch, Operand(kRootRegister, delta)); 149 // Operand is lea(scratch, Operand(kRootRegister, delta));
149 // Opcodes : REX.W 8D ModRM Disp8/Disp32 - 4 or 7. 150 // Opcodes : REX.W 8D ModRM Disp8/Disp32 - 4 or 7.
150 int size = 4; 151 int size = 4;
151 if (!is_int8(static_cast<int32_t>(delta))) { 152 if (!is_int8(static_cast<int32_t>(delta))) {
152 size += 3; // Need full four-byte displacement in lea. 153 size += 3; // Need full four-byte displacement in lea.
153 } 154 }
154 return size; 155 return size;
155 } 156 }
156 } 157 }
157 // Size of movq(destination, src); 158 // Size of movq(destination, src);
158 return Assembler::kMoveAddressIntoScratchRegisterInstructionLength; 159 return Assembler::kMoveAddressIntoScratchRegisterInstructionLength;
159 } 160 }
160 161
161 162
162 void MacroAssembler::PushAddress(ExternalReference source) { 163 void MacroAssembler::PushAddress(ExternalReference source) {
163 int64_t address = reinterpret_cast<int64_t>(source.address()); 164 int64_t address = reinterpret_cast<int64_t>(source.address());
164 if (is_int32(address) && !Serializer::enabled()) { 165 if (is_int32(address) && !Serializer::enabled()) {
165 if (emit_debug_code()) { 166 if (emit_debug_code()) {
166 movq(kScratchRegister, BitCast<int64_t>(kZapValue), RelocInfo::NONE64); 167 movq(kScratchRegister, kZapValue, RelocInfo::NONE64);
167 } 168 }
168 push(Immediate(static_cast<int32_t>(address))); 169 push(Immediate(static_cast<int32_t>(address)));
169 return; 170 return;
170 } 171 }
171 LoadAddress(kScratchRegister, source); 172 LoadAddress(kScratchRegister, source);
172 push(kScratchRegister); 173 push(kScratchRegister);
173 } 174 }
174 175
175 176
176 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) { 177 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after
268 Register scratch, 269 Register scratch,
269 Condition cc, 270 Condition cc,
270 Label* branch, 271 Label* branch,
271 Label::Distance distance) { 272 Label::Distance distance) {
272 if (Serializer::enabled()) { 273 if (Serializer::enabled()) {
273 // Can't do arithmetic on external references if it might get serialized. 274 // Can't do arithmetic on external references if it might get serialized.
274 // The mask isn't really an address. We load it as an external reference in 275 // The mask isn't really an address. We load it as an external reference in
275 // case the size of the new space is different between the snapshot maker 276 // case the size of the new space is different between the snapshot maker
276 // and the running system. 277 // and the running system.
277 if (scratch.is(object)) { 278 if (scratch.is(object)) {
278 movq(kScratchRegister, ExternalReference::new_space_mask(isolate())); 279 Move(kScratchRegister, ExternalReference::new_space_mask(isolate()));
279 and_(scratch, kScratchRegister); 280 and_(scratch, kScratchRegister);
280 } else { 281 } else {
281 movq(scratch, ExternalReference::new_space_mask(isolate())); 282 Move(scratch, ExternalReference::new_space_mask(isolate()));
282 and_(scratch, object); 283 and_(scratch, object);
283 } 284 }
284 movq(kScratchRegister, ExternalReference::new_space_start(isolate())); 285 Move(kScratchRegister, ExternalReference::new_space_start(isolate()));
285 cmpq(scratch, kScratchRegister); 286 cmpq(scratch, kScratchRegister);
286 j(cc, branch, distance); 287 j(cc, branch, distance);
287 } else { 288 } else {
288 ASSERT(is_int32(static_cast<int64_t>(isolate()->heap()->NewSpaceMask()))); 289 ASSERT(is_int32(static_cast<int64_t>(isolate()->heap()->NewSpaceMask())));
289 intptr_t new_space_start = 290 intptr_t new_space_start =
290 reinterpret_cast<intptr_t>(isolate()->heap()->NewSpaceStart()); 291 reinterpret_cast<intptr_t>(isolate()->heap()->NewSpaceStart());
291 movq(kScratchRegister, -new_space_start, RelocInfo::NONE64); 292 movq(kScratchRegister, reinterpret_cast<Address>(-new_space_start),
293 RelocInfo::NONE64);
292 if (scratch.is(object)) { 294 if (scratch.is(object)) {
293 addq(scratch, kScratchRegister); 295 addq(scratch, kScratchRegister);
294 } else { 296 } else {
295 lea(scratch, Operand(object, kScratchRegister, times_1, 0)); 297 lea(scratch, Operand(object, kScratchRegister, times_1, 0));
296 } 298 }
297 and_(scratch, 299 and_(scratch,
298 Immediate(static_cast<int32_t>(isolate()->heap()->NewSpaceMask()))); 300 Immediate(static_cast<int32_t>(isolate()->heap()->NewSpaceMask())));
299 j(cc, branch, distance); 301 j(cc, branch, distance);
300 } 302 }
301 } 303 }
302 304
303 305
304 void MacroAssembler::RecordWriteField( 306 void MacroAssembler::RecordWriteField(
305 Register object, 307 Register object,
306 int offset, 308 int offset,
307 Register value, 309 Register value,
308 Register dst, 310 Register dst,
309 SaveFPRegsMode save_fp, 311 SaveFPRegsMode save_fp,
310 RememberedSetAction remembered_set_action, 312 RememberedSetAction remembered_set_action,
311 SmiCheck smi_check) { 313 SmiCheck smi_check) {
312 // The compiled code assumes that record write doesn't change the
313 // context register, so we check that none of the clobbered
314 // registers are rsi.
315 ASSERT(!value.is(rsi) && !dst.is(rsi));
316
317 // First, check if a write barrier is even needed. The tests below 314 // First, check if a write barrier is even needed. The tests below
318 // catch stores of Smis. 315 // catch stores of Smis.
319 Label done; 316 Label done;
320 317
321 // Skip barrier if writing a smi. 318 // Skip barrier if writing a smi.
322 if (smi_check == INLINE_SMI_CHECK) { 319 if (smi_check == INLINE_SMI_CHECK) {
323 JumpIfSmi(value, &done); 320 JumpIfSmi(value, &done);
324 } 321 }
325 322
326 // Although the object register is tagged, the offset is relative to the start 323 // Although the object register is tagged, the offset is relative to the start
(...skipping 10 matching lines...) Expand all
337 } 334 }
338 335
339 RecordWrite( 336 RecordWrite(
340 object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK); 337 object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK);
341 338
342 bind(&done); 339 bind(&done);
343 340
344 // Clobber clobbered input registers when running with the debug-code flag 341 // Clobber clobbered input registers when running with the debug-code flag
345 // turned on to provoke errors. 342 // turned on to provoke errors.
346 if (emit_debug_code()) { 343 if (emit_debug_code()) {
347 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE64); 344 movq(value, kZapValue, RelocInfo::NONE64);
348 movq(dst, BitCast<int64_t>(kZapValue), RelocInfo::NONE64); 345 movq(dst, kZapValue, RelocInfo::NONE64);
349 } 346 }
350 } 347 }
351 348
352 349
353 void MacroAssembler::RecordWriteArray(Register object, 350 void MacroAssembler::RecordWriteArray(Register object,
354 Register value, 351 Register value,
355 Register index, 352 Register index,
356 SaveFPRegsMode save_fp, 353 SaveFPRegsMode save_fp,
357 RememberedSetAction remembered_set_action, 354 RememberedSetAction remembered_set_action,
358 SmiCheck smi_check) { 355 SmiCheck smi_check) {
(...skipping 12 matching lines...) Expand all
371 FixedArray::kHeaderSize - kHeapObjectTag)); 368 FixedArray::kHeaderSize - kHeapObjectTag));
372 369
373 RecordWrite( 370 RecordWrite(
374 object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK); 371 object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK);
375 372
376 bind(&done); 373 bind(&done);
377 374
378 // Clobber clobbered input registers when running with the debug-code flag 375 // Clobber clobbered input registers when running with the debug-code flag
379 // turned on to provoke errors. 376 // turned on to provoke errors.
380 if (emit_debug_code()) { 377 if (emit_debug_code()) {
381 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE64); 378 movq(value, kZapValue, RelocInfo::NONE64);
382 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE64); 379 movq(index, kZapValue, RelocInfo::NONE64);
383 } 380 }
384 } 381 }
385 382
386 383
387 void MacroAssembler::RecordWrite(Register object, 384 void MacroAssembler::RecordWrite(Register object,
388 Register address, 385 Register address,
389 Register value, 386 Register value,
390 SaveFPRegsMode fp_mode, 387 SaveFPRegsMode fp_mode,
391 RememberedSetAction remembered_set_action, 388 RememberedSetAction remembered_set_action,
392 SmiCheck smi_check) { 389 SmiCheck smi_check) {
393 // The compiled code assumes that record write doesn't change the
394 // context register, so we check that none of the clobbered
395 // registers are rsi.
396 ASSERT(!value.is(rsi) && !address.is(rsi));
397
398 ASSERT(!object.is(value)); 390 ASSERT(!object.is(value));
399 ASSERT(!object.is(address)); 391 ASSERT(!object.is(address));
400 ASSERT(!value.is(address)); 392 ASSERT(!value.is(address));
401 AssertNotSmi(object); 393 AssertNotSmi(object);
402 394
403 if (remembered_set_action == OMIT_REMEMBERED_SET && 395 if (remembered_set_action == OMIT_REMEMBERED_SET &&
404 !FLAG_incremental_marking) { 396 !FLAG_incremental_marking) {
405 return; 397 return;
406 } 398 }
407 399
(...skipping 29 matching lines...) Expand all
437 Label::kNear); 429 Label::kNear);
438 430
439 RecordWriteStub stub(object, value, address, remembered_set_action, fp_mode); 431 RecordWriteStub stub(object, value, address, remembered_set_action, fp_mode);
440 CallStub(&stub); 432 CallStub(&stub);
441 433
442 bind(&done); 434 bind(&done);
443 435
444 // Clobber clobbered registers when running with the debug-code flag 436 // Clobber clobbered registers when running with the debug-code flag
445 // turned on to provoke errors. 437 // turned on to provoke errors.
446 if (emit_debug_code()) { 438 if (emit_debug_code()) {
447 movq(address, BitCast<int64_t>(kZapValue), RelocInfo::NONE64); 439 movq(address, kZapValue, RelocInfo::NONE64);
448 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE64); 440 movq(value, kZapValue, RelocInfo::NONE64);
449 } 441 }
450 } 442 }
451 443
452 444
453 void MacroAssembler::Assert(Condition cc, BailoutReason reason) { 445 void MacroAssembler::Assert(Condition cc, BailoutReason reason) {
454 if (emit_debug_code()) Check(cc, reason); 446 if (emit_debug_code()) Check(cc, reason);
455 } 447 }
456 448
457 449
458 void MacroAssembler::AssertFastElements(Register elements) { 450 void MacroAssembler::AssertFastElements(Register elements) {
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after
526 RecordComment(msg); 518 RecordComment(msg);
527 } 519 }
528 520
529 if (FLAG_trap_on_abort) { 521 if (FLAG_trap_on_abort) {
530 int3(); 522 int3();
531 return; 523 return;
532 } 524 }
533 #endif 525 #endif
534 526
535 push(rax); 527 push(rax);
536 movq(kScratchRegister, p0, RelocInfo::NONE64); 528 movq(kScratchRegister, reinterpret_cast<Smi*>(p0), RelocInfo::NONE64);
537 push(kScratchRegister); 529 push(kScratchRegister);
538 movq(kScratchRegister, 530 movq(kScratchRegister, Smi::FromInt(static_cast<int>(p1 - p0)),
539 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
540 RelocInfo::NONE64); 531 RelocInfo::NONE64);
541 push(kScratchRegister); 532 push(kScratchRegister);
542 533
543 if (!has_frame_) { 534 if (!has_frame_) {
544 // We don't actually want to generate a pile of code for this, so just 535 // We don't actually want to generate a pile of code for this, so just
545 // claim there is a stack frame, without generating one. 536 // claim there is a stack frame, without generating one.
546 FrameScope scope(this, StackFrame::NONE); 537 FrameScope scope(this, StackFrame::NONE);
547 CallRuntime(Runtime::kAbort, 2); 538 CallRuntime(Runtime::kAbort, 2);
548 } else { 539 } else {
549 CallRuntime(Runtime::kAbort, 2); 540 CallRuntime(Runtime::kAbort, 2);
(...skipping 152 matching lines...) Expand 10 before | Expand all | Expand 10 after
702 const int kLevelOffset = Offset( 693 const int kLevelOffset = Offset(
703 ExternalReference::handle_scope_level_address(isolate()), 694 ExternalReference::handle_scope_level_address(isolate()),
704 next_address); 695 next_address);
705 ExternalReference scheduled_exception_address = 696 ExternalReference scheduled_exception_address =
706 ExternalReference::scheduled_exception_address(isolate()); 697 ExternalReference::scheduled_exception_address(isolate());
707 698
708 // Allocate HandleScope in callee-save registers. 699 // Allocate HandleScope in callee-save registers.
709 Register prev_next_address_reg = r14; 700 Register prev_next_address_reg = r14;
710 Register prev_limit_reg = rbx; 701 Register prev_limit_reg = rbx;
711 Register base_reg = r15; 702 Register base_reg = r15;
712 movq(base_reg, next_address); 703 Move(base_reg, next_address);
713 movq(prev_next_address_reg, Operand(base_reg, kNextOffset)); 704 movq(prev_next_address_reg, Operand(base_reg, kNextOffset));
714 movq(prev_limit_reg, Operand(base_reg, kLimitOffset)); 705 movq(prev_limit_reg, Operand(base_reg, kLimitOffset));
715 addl(Operand(base_reg, kLevelOffset), Immediate(1)); 706 addl(Operand(base_reg, kLevelOffset), Immediate(1));
716 707
717 if (FLAG_log_timer_events) { 708 if (FLAG_log_timer_events) {
718 FrameScope frame(this, StackFrame::MANUAL); 709 FrameScope frame(this, StackFrame::MANUAL);
719 PushSafepointRegisters(); 710 PushSafepointRegisters();
720 PrepareCallCFunction(1); 711 PrepareCallCFunction(1);
721 LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate())); 712 LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate()));
722 CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1); 713 CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1);
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
763 754
764 // No more valid handles (the result handle was the last one). Restore 755 // No more valid handles (the result handle was the last one). Restore
765 // previous handle scope. 756 // previous handle scope.
766 subl(Operand(base_reg, kLevelOffset), Immediate(1)); 757 subl(Operand(base_reg, kLevelOffset), Immediate(1));
767 movq(Operand(base_reg, kNextOffset), prev_next_address_reg); 758 movq(Operand(base_reg, kNextOffset), prev_next_address_reg);
768 cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset)); 759 cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset));
769 j(not_equal, &delete_allocated_handles); 760 j(not_equal, &delete_allocated_handles);
770 bind(&leave_exit_frame); 761 bind(&leave_exit_frame);
771 762
772 // Check if the function scheduled an exception. 763 // Check if the function scheduled an exception.
773 movq(rsi, scheduled_exception_address); 764 Move(rsi, scheduled_exception_address);
774 Cmp(Operand(rsi, 0), factory->the_hole_value()); 765 Cmp(Operand(rsi, 0), factory->the_hole_value());
775 j(not_equal, &promote_scheduled_exception); 766 j(not_equal, &promote_scheduled_exception);
776 bind(&exception_handled); 767 bind(&exception_handled);
777 768
778 #if ENABLE_EXTRA_CHECKS 769 #if ENABLE_EXTRA_CHECKS
779 // Check if the function returned a valid JavaScript value. 770 // Check if the function returned a valid JavaScript value.
780 Label ok; 771 Label ok;
781 Register return_value = rax; 772 Register return_value = rax;
782 Register map = rcx; 773 Register map = rcx;
783 774
(...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after
942 933
943 934
944 void MacroAssembler::Cvtlsi2sd(XMMRegister dst, const Operand& src) { 935 void MacroAssembler::Cvtlsi2sd(XMMRegister dst, const Operand& src) {
945 xorps(dst, dst); 936 xorps(dst, dst);
946 cvtlsi2sd(dst, src); 937 cvtlsi2sd(dst, src);
947 } 938 }
948 939
949 940
950 void MacroAssembler::Load(Register dst, const Operand& src, Representation r) { 941 void MacroAssembler::Load(Register dst, const Operand& src, Representation r) {
951 ASSERT(!r.IsDouble()); 942 ASSERT(!r.IsDouble());
952 if (r.IsByte()) { 943 if (r.IsInteger8()) {
944 movsxbq(dst, src);
945 } else if (r.IsUInteger8()) {
953 movzxbl(dst, src); 946 movzxbl(dst, src);
947 } else if (r.IsInteger16()) {
948 movsxwq(dst, src);
949 } else if (r.IsUInteger16()) {
950 movzxwl(dst, src);
954 } else if (r.IsInteger32()) { 951 } else if (r.IsInteger32()) {
955 movl(dst, src); 952 movl(dst, src);
956 } else { 953 } else {
957 movq(dst, src); 954 movq(dst, src);
958 } 955 }
959 } 956 }
960 957
961 958
962 void MacroAssembler::Store(const Operand& dst, Register src, Representation r) { 959 void MacroAssembler::Store(const Operand& dst, Register src, Representation r) {
963 ASSERT(!r.IsDouble()); 960 ASSERT(!r.IsDouble());
964 if (r.IsByte()) { 961 if (r.IsInteger8() || r.IsUInteger8()) {
965 movb(dst, src); 962 movb(dst, src);
963 } else if (r.IsInteger16() || r.IsUInteger16()) {
964 movw(dst, src);
966 } else if (r.IsInteger32()) { 965 } else if (r.IsInteger32()) {
967 movl(dst, src); 966 movl(dst, src);
968 } else { 967 } else {
969 movq(dst, src); 968 movq(dst, src);
970 } 969 }
971 } 970 }
972 971
973 972
974 void MacroAssembler::Set(Register dst, int64_t x) { 973 void MacroAssembler::Set(Register dst, int64_t x) {
975 if (x == 0) { 974 if (x == 0) {
976 xorl(dst, dst); 975 xorl(dst, dst);
977 } else if (is_uint32(x)) { 976 } else if (is_uint32(x)) {
978 movl(dst, Immediate(static_cast<uint32_t>(x))); 977 movl(dst, Immediate(static_cast<uint32_t>(x)));
979 } else if (is_int32(x)) { 978 } else if (is_int32(x)) {
980 movq(dst, Immediate(static_cast<int32_t>(x))); 979 movq(dst, Immediate(static_cast<int32_t>(x)));
981 } else { 980 } else {
982 movq(dst, x, RelocInfo::NONE64); 981 movq(dst, x);
983 } 982 }
984 } 983 }
985 984
986 985
987 void MacroAssembler::Set(const Operand& dst, int64_t x) { 986 void MacroAssembler::Set(const Operand& dst, int64_t x) {
988 if (is_int32(x)) { 987 if (is_int32(x)) {
989 movq(dst, Immediate(static_cast<int32_t>(x))); 988 movq(dst, Immediate(static_cast<int32_t>(x)));
990 } else { 989 } else {
991 Set(kScratchRegister, x); 990 Set(kScratchRegister, x);
992 movq(dst, kScratchRegister); 991 movq(dst, kScratchRegister);
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
1037 if (value == 1) { 1036 if (value == 1) {
1038 return kSmiConstantRegister; 1037 return kSmiConstantRegister;
1039 } 1038 }
1040 LoadSmiConstant(kScratchRegister, source); 1039 LoadSmiConstant(kScratchRegister, source);
1041 return kScratchRegister; 1040 return kScratchRegister;
1042 } 1041 }
1043 1042
1044 1043
1045 void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) { 1044 void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
1046 if (emit_debug_code()) { 1045 if (emit_debug_code()) {
1047 movq(dst, 1046 movq(dst, Smi::FromInt(kSmiConstantRegisterValue), RelocInfo::NONE64);
1048 reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)),
1049 RelocInfo::NONE64);
1050 cmpq(dst, kSmiConstantRegister); 1047 cmpq(dst, kSmiConstantRegister);
1051 if (allow_stub_calls()) { 1048 if (allow_stub_calls()) {
1052 Assert(equal, kUninitializedKSmiConstantRegister); 1049 Assert(equal, kUninitializedKSmiConstantRegister);
1053 } else { 1050 } else {
1054 Label ok; 1051 Label ok;
1055 j(equal, &ok, Label::kNear); 1052 j(equal, &ok, Label::kNear);
1056 int3(); 1053 int3();
1057 bind(&ok); 1054 bind(&ok);
1058 } 1055 }
1059 } 1056 }
(...skipping 26 matching lines...) Expand all
1086 case 2: 1083 case 2:
1087 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_1, 0)); 1084 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_1, 0));
1088 break; 1085 break;
1089 case 1: 1086 case 1:
1090 movq(dst, kSmiConstantRegister); 1087 movq(dst, kSmiConstantRegister);
1091 break; 1088 break;
1092 case 0: 1089 case 0:
1093 UNREACHABLE(); 1090 UNREACHABLE();
1094 return; 1091 return;
1095 default: 1092 default:
1096 movq(dst, reinterpret_cast<uint64_t>(source), RelocInfo::NONE64); 1093 movq(dst, source, RelocInfo::NONE64);
1097 return; 1094 return;
1098 } 1095 }
1099 if (negative) { 1096 if (negative) {
1100 neg(dst); 1097 neg(dst);
1101 } 1098 }
1102 } 1099 }
1103 1100
1104 1101
1105 void MacroAssembler::Integer32ToSmi(Register dst, Register src) { 1102 void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
1106 STATIC_ASSERT(kSmiTag == 0); 1103 STATIC_ASSERT(kSmiTag == 0);
(...skipping 403 matching lines...) Expand 10 before | Expand all | Expand 10 after
1510 void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) { 1507 void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
1511 if (constant->value() != 0) { 1508 if (constant->value() != 0) {
1512 addl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(constant->value())); 1509 addl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(constant->value()));
1513 } 1510 }
1514 } 1511 }
1515 1512
1516 1513
1517 void MacroAssembler::SmiAddConstant(Register dst, 1514 void MacroAssembler::SmiAddConstant(Register dst,
1518 Register src, 1515 Register src,
1519 Smi* constant, 1516 Smi* constant,
1520 Label* on_not_smi_result, 1517 SmiOperationExecutionMode mode,
1518 Label* bailout_label,
1521 Label::Distance near_jump) { 1519 Label::Distance near_jump) {
1522 if (constant->value() == 0) { 1520 if (constant->value() == 0) {
1523 if (!dst.is(src)) { 1521 if (!dst.is(src)) {
1524 movq(dst, src); 1522 movq(dst, src);
1525 } 1523 }
1526 } else if (dst.is(src)) { 1524 } else if (dst.is(src)) {
1527 ASSERT(!dst.is(kScratchRegister)); 1525 ASSERT(!dst.is(kScratchRegister));
1528
1529 Label done;
1530 LoadSmiConstant(kScratchRegister, constant); 1526 LoadSmiConstant(kScratchRegister, constant);
1531 addq(dst, kScratchRegister); 1527 addq(dst, kScratchRegister);
1532 j(no_overflow, &done, Label::kNear); 1528 if (mode.Contains(BAILOUT_ON_NO_OVERFLOW)) {
1533 // Restore src. 1529 j(no_overflow, bailout_label, near_jump);
1534 subq(dst, kScratchRegister); 1530 ASSERT(mode.Contains(PRESERVE_SOURCE_REGISTER));
1535 jmp(on_not_smi_result, near_jump); 1531 subq(dst, kScratchRegister);
1536 bind(&done); 1532 } else if (mode.Contains(BAILOUT_ON_OVERFLOW)) {
1533 if (mode.Contains(PRESERVE_SOURCE_REGISTER)) {
1534 Label done;
1535 j(no_overflow, &done, Label::kNear);
1536 subq(dst, kScratchRegister);
1537 jmp(bailout_label, near_jump);
1538 bind(&done);
1539 } else {
1540 // Bailout if overflow without reserving src.
1541 j(overflow, bailout_label, near_jump);
1542 }
1543 } else {
1544 CHECK(mode.IsEmpty());
1545 }
1537 } else { 1546 } else {
1547 ASSERT(mode.Contains(PRESERVE_SOURCE_REGISTER));
1548 ASSERT(mode.Contains(BAILOUT_ON_OVERFLOW));
1538 LoadSmiConstant(dst, constant); 1549 LoadSmiConstant(dst, constant);
1539 addq(dst, src); 1550 addq(dst, src);
1540 j(overflow, on_not_smi_result, near_jump); 1551 j(overflow, bailout_label, near_jump);
1541 } 1552 }
1542 } 1553 }
1543 1554
1544 1555
1545 void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) { 1556 void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
1546 if (constant->value() == 0) { 1557 if (constant->value() == 0) {
1547 if (!dst.is(src)) { 1558 if (!dst.is(src)) {
1548 movq(dst, src); 1559 movq(dst, src);
1549 } 1560 }
1550 } else if (dst.is(src)) { 1561 } else if (dst.is(src)) {
(...skipping 11 matching lines...) Expand all
1562 LoadSmiConstant(dst, Smi::FromInt(-constant->value())); 1573 LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
1563 addq(dst, src); 1574 addq(dst, src);
1564 } 1575 }
1565 } 1576 }
1566 } 1577 }
1567 1578
1568 1579
1569 void MacroAssembler::SmiSubConstant(Register dst, 1580 void MacroAssembler::SmiSubConstant(Register dst,
1570 Register src, 1581 Register src,
1571 Smi* constant, 1582 Smi* constant,
1572 Label* on_not_smi_result, 1583 SmiOperationExecutionMode mode,
1584 Label* bailout_label,
1573 Label::Distance near_jump) { 1585 Label::Distance near_jump) {
1574 if (constant->value() == 0) { 1586 if (constant->value() == 0) {
1575 if (!dst.is(src)) { 1587 if (!dst.is(src)) {
1576 movq(dst, src); 1588 movq(dst, src);
1577 } 1589 }
1578 } else if (dst.is(src)) { 1590 } else if (dst.is(src)) {
1579 ASSERT(!dst.is(kScratchRegister)); 1591 ASSERT(!dst.is(kScratchRegister));
1592 LoadSmiConstant(kScratchRegister, constant);
1593 subq(dst, kScratchRegister);
1594 if (mode.Contains(BAILOUT_ON_NO_OVERFLOW)) {
1595 j(no_overflow, bailout_label, near_jump);
1596 ASSERT(mode.Contains(PRESERVE_SOURCE_REGISTER));
1597 addq(dst, kScratchRegister);
1598 } else if (mode.Contains(BAILOUT_ON_OVERFLOW)) {
1599 if (mode.Contains(PRESERVE_SOURCE_REGISTER)) {
1600 Label done;
1601 j(no_overflow, &done, Label::kNear);
1602 addq(dst, kScratchRegister);
1603 jmp(bailout_label, near_jump);
1604 bind(&done);
1605 } else {
1606 // Bailout if overflow without reserving src.
1607 j(overflow, bailout_label, near_jump);
1608 }
1609 } else {
1610 CHECK(mode.IsEmpty());
1611 }
1612 } else {
1613 ASSERT(mode.Contains(PRESERVE_SOURCE_REGISTER));
1614 ASSERT(mode.Contains(BAILOUT_ON_OVERFLOW));
1580 if (constant->value() == Smi::kMinValue) { 1615 if (constant->value() == Smi::kMinValue) {
1581 // Subtracting min-value from any non-negative value will overflow. 1616 ASSERT(!dst.is(kScratchRegister));
1582 // We test the non-negativeness before doing the subtraction. 1617 movq(dst, src);
1583 testq(src, src);
1584 j(not_sign, on_not_smi_result, near_jump);
1585 LoadSmiConstant(kScratchRegister, constant); 1618 LoadSmiConstant(kScratchRegister, constant);
1586 subq(dst, kScratchRegister); 1619 subq(dst, kScratchRegister);
1587 } else { 1620 j(overflow, bailout_label, near_jump);
1588 // Subtract by adding the negation.
1589 LoadSmiConstant(kScratchRegister, Smi::FromInt(-constant->value()));
1590 addq(kScratchRegister, dst);
1591 j(overflow, on_not_smi_result, near_jump);
1592 movq(dst, kScratchRegister);
1593 }
1594 } else {
1595 if (constant->value() == Smi::kMinValue) {
1596 // Subtracting min-value from any non-negative value will overflow.
1597 // We test the non-negativeness before doing the subtraction.
1598 testq(src, src);
1599 j(not_sign, on_not_smi_result, near_jump);
1600 LoadSmiConstant(dst, constant);
1601 // Adding and subtracting the min-value gives the same result, it only
1602 // differs on the overflow bit, which we don't check here.
1603 addq(dst, src);
1604 } else { 1621 } else {
1605 // Subtract by adding the negation. 1622 // Subtract by adding the negation.
1606 LoadSmiConstant(dst, Smi::FromInt(-(constant->value()))); 1623 LoadSmiConstant(dst, Smi::FromInt(-(constant->value())));
1607 addq(dst, src); 1624 addq(dst, src);
1608 j(overflow, on_not_smi_result, near_jump); 1625 j(overflow, bailout_label, near_jump);
1609 } 1626 }
1610 } 1627 }
1611 } 1628 }
1612 1629
1613 1630
1614 void MacroAssembler::SmiNeg(Register dst, 1631 void MacroAssembler::SmiNeg(Register dst,
1615 Register src, 1632 Register src,
1616 Label* on_smi_result, 1633 Label* on_smi_result,
1617 Label::Distance near_jump) { 1634 Label::Distance near_jump) {
1618 if (dst.is(src)) { 1635 if (dst.is(src)) {
(...skipping 1390 matching lines...) Expand 10 before | Expand all | Expand 10 after
3009 // Preserve original value. 3026 // Preserve original value.
3010 SmiToInteger32(kScratchRegister, maybe_number); 3027 SmiToInteger32(kScratchRegister, maybe_number);
3011 Cvtlsi2sd(xmm_scratch, kScratchRegister); 3028 Cvtlsi2sd(xmm_scratch, kScratchRegister);
3012 movsd(FieldOperand(elements, index, times_8, 3029 movsd(FieldOperand(elements, index, times_8,
3013 FixedDoubleArray::kHeaderSize - elements_offset), 3030 FixedDoubleArray::kHeaderSize - elements_offset),
3014 xmm_scratch); 3031 xmm_scratch);
3015 bind(&done); 3032 bind(&done);
3016 } 3033 }
3017 3034
3018 3035
3019 void MacroAssembler::CompareMap(Register obj, 3036 void MacroAssembler::CompareMap(Register obj, Handle<Map> map) {
3020 Handle<Map> map,
3021 Label* early_success) {
3022 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map); 3037 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
3023 } 3038 }
3024 3039
3025 3040
3026 void MacroAssembler::CheckMap(Register obj, 3041 void MacroAssembler::CheckMap(Register obj,
3027 Handle<Map> map, 3042 Handle<Map> map,
3028 Label* fail, 3043 Label* fail,
3029 SmiCheckType smi_check_type) { 3044 SmiCheckType smi_check_type) {
3030 if (smi_check_type == DO_SMI_CHECK) { 3045 if (smi_check_type == DO_SMI_CHECK) {
3031 JumpIfSmi(obj, fail); 3046 JumpIfSmi(obj, fail);
3032 } 3047 }
3033 3048
3034 Label success; 3049 CompareMap(obj, map);
3035 CompareMap(obj, map, &success);
3036 j(not_equal, fail); 3050 j(not_equal, fail);
3037 bind(&success);
3038 } 3051 }
3039 3052
3040 3053
3041 void MacroAssembler::ClampUint8(Register reg) { 3054 void MacroAssembler::ClampUint8(Register reg) {
3042 Label done; 3055 Label done;
3043 testl(reg, Immediate(0xFFFFFF00)); 3056 testl(reg, Immediate(0xFFFFFF00));
3044 j(zero, &done, Label::kNear); 3057 j(zero, &done, Label::kNear);
3045 setcc(negative, reg); // 1 if negative, 0 if positive. 3058 setcc(negative, reg); // 1 if negative, 0 if positive.
3046 decb(reg); // 0 if negative, 255 if positive. 3059 decb(reg); // 0 if negative, 255 if positive.
3047 bind(&done); 3060 bind(&done);
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after
3112 } 3125 }
3113 3126
3114 bind(&done); 3127 bind(&done);
3115 } 3128 }
3116 3129
3117 3130
3118 void MacroAssembler::TruncateDoubleToI(Register result_reg, 3131 void MacroAssembler::TruncateDoubleToI(Register result_reg,
3119 XMMRegister input_reg) { 3132 XMMRegister input_reg) {
3120 Label done; 3133 Label done;
3121 cvttsd2siq(result_reg, input_reg); 3134 cvttsd2siq(result_reg, input_reg);
3122 movq(kScratchRegister, 3135 movq(kScratchRegister, V8_INT64_C(0x8000000000000000));
3123 V8_INT64_C(0x8000000000000000),
3124 RelocInfo::NONE64);
3125 cmpq(result_reg, kScratchRegister); 3136 cmpq(result_reg, kScratchRegister);
3126 j(not_equal, &done, Label::kNear); 3137 j(not_equal, &done, Label::kNear);
3127 3138
3128 subq(rsp, Immediate(kDoubleSize)); 3139 subq(rsp, Immediate(kDoubleSize));
3129 movsd(MemOperand(rsp, 0), input_reg); 3140 movsd(MemOperand(rsp, 0), input_reg);
3130 SlowTruncateToI(result_reg, rsp, 0); 3141 SlowTruncateToI(result_reg, rsp, 0);
3131 addq(rsp, Immediate(kDoubleSize)); 3142 addq(rsp, Immediate(kDoubleSize));
3132 3143
3133 bind(&done); 3144 bind(&done);
3134 } 3145 }
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after
3264 if (emit_debug_code()) { 3275 if (emit_debug_code()) {
3265 Condition is_smi = CheckSmi(object); 3276 Condition is_smi = CheckSmi(object);
3266 Check(is_smi, kOperandIsNotASmi); 3277 Check(is_smi, kOperandIsNotASmi);
3267 } 3278 }
3268 } 3279 }
3269 3280
3270 3281
3271 void MacroAssembler::AssertZeroExtended(Register int32_register) { 3282 void MacroAssembler::AssertZeroExtended(Register int32_register) {
3272 if (emit_debug_code()) { 3283 if (emit_debug_code()) {
3273 ASSERT(!int32_register.is(kScratchRegister)); 3284 ASSERT(!int32_register.is(kScratchRegister));
3274 movq(kScratchRegister, 0x100000000l, RelocInfo::NONE64); 3285 movq(kScratchRegister, V8_INT64_C(0x0000000100000000));
3275 cmpq(kScratchRegister, int32_register); 3286 cmpq(kScratchRegister, int32_register);
3276 Check(above_equal, k32BitValueInRegisterIsNotZeroExtended); 3287 Check(above_equal, k32BitValueInRegisterIsNotZeroExtended);
3277 } 3288 }
3278 } 3289 }
3279 3290
3280 3291
3281 void MacroAssembler::AssertString(Register object) { 3292 void MacroAssembler::AssertString(Register object) {
3282 if (emit_debug_code()) { 3293 if (emit_debug_code()) {
3283 testb(object, Immediate(kSmiTagMask)); 3294 testb(object, Immediate(kSmiTagMask));
3284 Check(not_equal, kOperandIsASmiAndNotAString); 3295 Check(not_equal, kOperandIsASmiAndNotAString);
(...skipping 367 matching lines...) Expand 10 before | Expand all | Expand 10 after
3652 3663
3653 void MacroAssembler::Prologue(PrologueFrameMode frame_mode) { 3664 void MacroAssembler::Prologue(PrologueFrameMode frame_mode) {
3654 if (frame_mode == BUILD_STUB_FRAME) { 3665 if (frame_mode == BUILD_STUB_FRAME) {
3655 push(rbp); // Caller's frame pointer. 3666 push(rbp); // Caller's frame pointer.
3656 movq(rbp, rsp); 3667 movq(rbp, rsp);
3657 push(rsi); // Callee's context. 3668 push(rsi); // Callee's context.
3658 Push(Smi::FromInt(StackFrame::STUB)); 3669 Push(Smi::FromInt(StackFrame::STUB));
3659 } else { 3670 } else {
3660 PredictableCodeSizeScope predictible_code_size_scope(this, 3671 PredictableCodeSizeScope predictible_code_size_scope(this,
3661 kNoCodeAgeSequenceLength); 3672 kNoCodeAgeSequenceLength);
3662 if (FLAG_optimize_for_size && FLAG_age_code) { 3673 if (isolate()->IsCodePreAgingActive()) {
3663 // Pre-age the code. 3674 // Pre-age the code.
3664 Call(isolate()->builtins()->MarkCodeAsExecutedOnce(), 3675 Call(isolate()->builtins()->MarkCodeAsExecutedOnce(),
3665 RelocInfo::CODE_AGE_SEQUENCE); 3676 RelocInfo::CODE_AGE_SEQUENCE);
3666 Nop(kNoCodeAgeSequenceLength - Assembler::kShortCallInstructionLength); 3677 Nop(kNoCodeAgeSequenceLength - Assembler::kShortCallInstructionLength);
3667 } else { 3678 } else {
3668 push(rbp); // Caller's frame pointer. 3679 push(rbp); // Caller's frame pointer.
3669 movq(rbp, rsp); 3680 movq(rbp, rsp);
3670 push(rsi); // Callee's context. 3681 push(rsi); // Callee's context.
3671 push(rdi); // Callee's JS function. 3682 push(rdi); // Callee's JS function.
3672 } 3683 }
(...skipping 390 matching lines...) Expand 10 before | Expand all | Expand 10 after
4063 4074
4064 4075
4065 void MacroAssembler::Allocate(int object_size, 4076 void MacroAssembler::Allocate(int object_size,
4066 Register result, 4077 Register result,
4067 Register result_end, 4078 Register result_end,
4068 Register scratch, 4079 Register scratch,
4069 Label* gc_required, 4080 Label* gc_required,
4070 AllocationFlags flags) { 4081 AllocationFlags flags) {
4071 ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0); 4082 ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
4072 ASSERT(object_size <= Page::kMaxNonCodeHeapObjectSize); 4083 ASSERT(object_size <= Page::kMaxNonCodeHeapObjectSize);
4073 if (!FLAG_inline_new) { 4084 if (!FLAG_inline_new ||
4085 // TODO(mstarzinger): Implement more efficiently by keeping then
4086 // bump-pointer allocation area empty instead of recompiling code.
4087 isolate()->heap_profiler()->is_tracking_allocations()) {
4074 if (emit_debug_code()) { 4088 if (emit_debug_code()) {
4075 // Trash the registers to simulate an allocation failure. 4089 // Trash the registers to simulate an allocation failure.
4076 movl(result, Immediate(0x7091)); 4090 movl(result, Immediate(0x7091));
4077 if (result_end.is_valid()) { 4091 if (result_end.is_valid()) {
4078 movl(result_end, Immediate(0x7191)); 4092 movl(result_end, Immediate(0x7191));
4079 } 4093 }
4080 if (scratch.is_valid()) { 4094 if (scratch.is_valid()) {
4081 movl(scratch, Immediate(0x7291)); 4095 movl(scratch, Immediate(0x7291));
4082 } 4096 }
4083 } 4097 }
4084 jmp(gc_required); 4098 jmp(gc_required);
4085 return; 4099 return;
4086 } 4100 }
4087 ASSERT(!result.is(result_end)); 4101 ASSERT(!result.is(result_end));
4088 4102
4089 // Load address of new object into result. 4103 // Load address of new object into result.
4090 LoadAllocationTopHelper(result, scratch, flags); 4104 LoadAllocationTopHelper(result, scratch, flags);
4091 4105
4092 if (isolate()->heap_profiler()->is_tracking_allocations()) {
4093 RecordObjectAllocation(isolate(), result, object_size);
4094 }
4095
4096 // Align the next allocation. Storing the filler map without checking top is 4106 // Align the next allocation. Storing the filler map without checking top is
4097 // safe in new-space because the limit of the heap is aligned there. 4107 // safe in new-space because the limit of the heap is aligned there.
4098 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) { 4108 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) {
4099 testq(result, Immediate(kDoubleAlignmentMask)); 4109 testq(result, Immediate(kDoubleAlignmentMask));
4100 Check(zero, kAllocationIsNotDoubleAligned); 4110 Check(zero, kAllocationIsNotDoubleAligned);
4101 } 4111 }
4102 4112
4103 // Calculate new top and bail out if new space is exhausted. 4113 // Calculate new top and bail out if new space is exhausted.
4104 ExternalReference allocation_limit = 4114 ExternalReference allocation_limit =
4105 AllocationUtils::GetAllocationLimitReference(isolate(), flags); 4115 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
4147 } 4157 }
4148 4158
4149 4159
4150 void MacroAssembler::Allocate(Register object_size, 4160 void MacroAssembler::Allocate(Register object_size,
4151 Register result, 4161 Register result,
4152 Register result_end, 4162 Register result_end,
4153 Register scratch, 4163 Register scratch,
4154 Label* gc_required, 4164 Label* gc_required,
4155 AllocationFlags flags) { 4165 AllocationFlags flags) {
4156 ASSERT((flags & SIZE_IN_WORDS) == 0); 4166 ASSERT((flags & SIZE_IN_WORDS) == 0);
4157 if (!FLAG_inline_new) { 4167 if (!FLAG_inline_new ||
4168 // TODO(mstarzinger): Implement more efficiently by keeping then
4169 // bump-pointer allocation area empty instead of recompiling code.
4170 isolate()->heap_profiler()->is_tracking_allocations()) {
4158 if (emit_debug_code()) { 4171 if (emit_debug_code()) {
4159 // Trash the registers to simulate an allocation failure. 4172 // Trash the registers to simulate an allocation failure.
4160 movl(result, Immediate(0x7091)); 4173 movl(result, Immediate(0x7091));
4161 movl(result_end, Immediate(0x7191)); 4174 movl(result_end, Immediate(0x7191));
4162 if (scratch.is_valid()) { 4175 if (scratch.is_valid()) {
4163 movl(scratch, Immediate(0x7291)); 4176 movl(scratch, Immediate(0x7291));
4164 } 4177 }
4165 // object_size is left unchanged by this function. 4178 // object_size is left unchanged by this function.
4166 } 4179 }
4167 jmp(gc_required); 4180 jmp(gc_required);
4168 return; 4181 return;
4169 } 4182 }
4170 ASSERT(!result.is(result_end)); 4183 ASSERT(!result.is(result_end));
4171 4184
4172 // Load address of new object into result. 4185 // Load address of new object into result.
4173 LoadAllocationTopHelper(result, scratch, flags); 4186 LoadAllocationTopHelper(result, scratch, flags);
4174 4187
4175 if (isolate()->heap_profiler()->is_tracking_allocations()) {
4176 RecordObjectAllocation(isolate(), result, object_size);
4177 }
4178
4179 // Align the next allocation. Storing the filler map without checking top is 4188 // Align the next allocation. Storing the filler map without checking top is
4180 // safe in new-space because the limit of the heap is aligned there. 4189 // safe in new-space because the limit of the heap is aligned there.
4181 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) { 4190 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) {
4182 testq(result, Immediate(kDoubleAlignmentMask)); 4191 testq(result, Immediate(kDoubleAlignmentMask));
4183 Check(zero, kAllocationIsNotDoubleAligned); 4192 Check(zero, kAllocationIsNotDoubleAligned);
4184 } 4193 }
4185 4194
4186 // Calculate new top and bail out if new space is exhausted. 4195 // Calculate new top and bail out if new space is exhausted.
4187 ExternalReference allocation_limit = 4196 ExternalReference allocation_limit =
4188 AllocationUtils::GetAllocationLimitReference(isolate(), flags); 4197 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
(...skipping 210 matching lines...) Expand 10 before | Expand all | Expand 10 after
4399 void MacroAssembler::CopyBytes(Register destination, 4408 void MacroAssembler::CopyBytes(Register destination,
4400 Register source, 4409 Register source,
4401 Register length, 4410 Register length,
4402 int min_length, 4411 int min_length,
4403 Register scratch) { 4412 Register scratch) {
4404 ASSERT(min_length >= 0); 4413 ASSERT(min_length >= 0);
4405 if (emit_debug_code()) { 4414 if (emit_debug_code()) {
4406 cmpl(length, Immediate(min_length)); 4415 cmpl(length, Immediate(min_length));
4407 Assert(greater_equal, kInvalidMinLength); 4416 Assert(greater_equal, kInvalidMinLength);
4408 } 4417 }
4409 Label loop, done, short_string, short_loop; 4418 Label short_loop, len8, len16, len24, done, short_string;
4410 4419
4411 const int kLongStringLimit = 20; 4420 const int kLongStringLimit = 4 * kPointerSize;
4412 if (min_length <= kLongStringLimit) { 4421 if (min_length <= kLongStringLimit) {
4413 cmpl(length, Immediate(kLongStringLimit)); 4422 cmpl(length, Immediate(kPointerSize));
4414 j(less_equal, &short_string); 4423 j(below, &short_string, Label::kNear);
4415 } 4424 }
4416 4425
4417 ASSERT(source.is(rsi)); 4426 ASSERT(source.is(rsi));
4418 ASSERT(destination.is(rdi)); 4427 ASSERT(destination.is(rdi));
4419 ASSERT(length.is(rcx)); 4428 ASSERT(length.is(rcx));
4420 4429
4430 if (min_length <= kLongStringLimit) {
4431 cmpl(length, Immediate(2 * kPointerSize));
4432 j(below_equal, &len8, Label::kNear);
4433 cmpl(length, Immediate(3 * kPointerSize));
4434 j(below_equal, &len16, Label::kNear);
4435 cmpl(length, Immediate(4 * kPointerSize));
4436 j(below_equal, &len24, Label::kNear);
4437 }
4438
4421 // Because source is 8-byte aligned in our uses of this function, 4439 // Because source is 8-byte aligned in our uses of this function,
4422 // we keep source aligned for the rep movs operation by copying the odd bytes 4440 // we keep source aligned for the rep movs operation by copying the odd bytes
4423 // at the end of the ranges. 4441 // at the end of the ranges.
4424 movq(scratch, length); 4442 movq(scratch, length);
4425 shrl(length, Immediate(kPointerSizeLog2)); 4443 shrl(length, Immediate(kPointerSizeLog2));
4426 repmovsq(); 4444 repmovsq();
4427 // Move remaining bytes of length. 4445 // Move remaining bytes of length.
4428 andl(scratch, Immediate(kPointerSize - 1)); 4446 andl(scratch, Immediate(kPointerSize - 1));
4429 movq(length, Operand(source, scratch, times_1, -kPointerSize)); 4447 movq(length, Operand(source, scratch, times_1, -kPointerSize));
4430 movq(Operand(destination, scratch, times_1, -kPointerSize), length); 4448 movq(Operand(destination, scratch, times_1, -kPointerSize), length);
4431 addq(destination, scratch); 4449 addq(destination, scratch);
4432 4450
4433 if (min_length <= kLongStringLimit) { 4451 if (min_length <= kLongStringLimit) {
4434 jmp(&done); 4452 jmp(&done, Label::kNear);
4453 bind(&len24);
4454 movq(scratch, Operand(source, 2 * kPointerSize));
4455 movq(Operand(destination, 2 * kPointerSize), scratch);
4456 bind(&len16);
4457 movq(scratch, Operand(source, kPointerSize));
4458 movq(Operand(destination, kPointerSize), scratch);
4459 bind(&len8);
4460 movq(scratch, Operand(source, 0));
4461 movq(Operand(destination, 0), scratch);
4462 // Move remaining bytes of length.
4463 movq(scratch, Operand(source, length, times_1, -kPointerSize));
4464 movq(Operand(destination, length, times_1, -kPointerSize), scratch);
4465 addq(destination, length);
4466 jmp(&done, Label::kNear);
4435 4467
4436 bind(&short_string); 4468 bind(&short_string);
4437 if (min_length == 0) { 4469 if (min_length == 0) {
4438 testl(length, length); 4470 testl(length, length);
4439 j(zero, &done); 4471 j(zero, &done, Label::kNear);
4440 } 4472 }
4441 lea(scratch, Operand(destination, length, times_1, 0));
4442 4473
4443 bind(&short_loop); 4474 bind(&short_loop);
4444 movb(length, Operand(source, 0)); 4475 movb(scratch, Operand(source, 0));
4445 movb(Operand(destination, 0), length); 4476 movb(Operand(destination, 0), scratch);
4446 incq(source); 4477 incq(source);
4447 incq(destination); 4478 incq(destination);
4448 cmpq(destination, scratch); 4479 decl(length);
4449 j(not_equal, &short_loop); 4480 j(not_zero, &short_loop);
4481 }
4450 4482
4451 bind(&done); 4483 bind(&done);
4452 }
4453 } 4484 }
4454 4485
4455 4486
4456 void MacroAssembler::InitializeFieldsWithFiller(Register start_offset, 4487 void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
4457 Register end_offset, 4488 Register end_offset,
4458 Register filler) { 4489 Register filler) {
4459 Label loop, entry; 4490 Label loop, entry;
4460 jmp(&entry); 4491 jmp(&entry);
4461 bind(&loop); 4492 bind(&loop);
4462 movq(Operand(start_offset, 0), filler); 4493 movq(Operand(start_offset, 0), filler);
(...skipping 456 matching lines...) Expand 10 before | Expand all | Expand 10 after
4919 Register receiver_reg, 4950 Register receiver_reg,
4920 Register scratch_reg, 4951 Register scratch_reg,
4921 Label* no_memento_found) { 4952 Label* no_memento_found) {
4922 ExternalReference new_space_start = 4953 ExternalReference new_space_start =
4923 ExternalReference::new_space_start(isolate()); 4954 ExternalReference::new_space_start(isolate());
4924 ExternalReference new_space_allocation_top = 4955 ExternalReference new_space_allocation_top =
4925 ExternalReference::new_space_allocation_top_address(isolate()); 4956 ExternalReference::new_space_allocation_top_address(isolate());
4926 4957
4927 lea(scratch_reg, Operand(receiver_reg, 4958 lea(scratch_reg, Operand(receiver_reg,
4928 JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag)); 4959 JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag));
4929 movq(kScratchRegister, new_space_start); 4960 Move(kScratchRegister, new_space_start);
4930 cmpq(scratch_reg, kScratchRegister); 4961 cmpq(scratch_reg, kScratchRegister);
4931 j(less, no_memento_found); 4962 j(less, no_memento_found);
4932 cmpq(scratch_reg, ExternalOperand(new_space_allocation_top)); 4963 cmpq(scratch_reg, ExternalOperand(new_space_allocation_top));
4933 j(greater, no_memento_found); 4964 j(greater, no_memento_found);
4934 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize), 4965 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize),
4935 Heap::kAllocationMementoMapRootIndex); 4966 Heap::kAllocationMementoMapRootIndex);
4936 } 4967 }
4937 4968
4938 4969
4939 void MacroAssembler::RecordObjectAllocation(Isolate* isolate, 4970 void MacroAssembler::JumpIfDictionaryInPrototypeChain(
4940 Register object, 4971 Register object,
4941 Register object_size) { 4972 Register scratch0,
4942 FrameScope frame(this, StackFrame::EXIT); 4973 Register scratch1,
4943 PushSafepointRegisters(); 4974 Label* found) {
4944 PrepareCallCFunction(3); 4975 ASSERT(!(scratch0.is(kScratchRegister) && scratch1.is(kScratchRegister)));
4945 // In case object is rdx 4976 ASSERT(!scratch1.is(scratch0));
4946 movq(kScratchRegister, object); 4977 Register current = scratch0;
4947 movq(arg_reg_3, object_size); 4978 Label loop_again;
4948 movq(arg_reg_2, kScratchRegister); 4979
4949 movq(arg_reg_1, isolate, RelocInfo::EXTERNAL_REFERENCE); 4980 movq(current, object);
4950 CallCFunction( 4981
4951 ExternalReference::record_object_allocation_function(isolate), 3); 4982 // Loop based on the map going up the prototype chain.
4952 PopSafepointRegisters(); 4983 bind(&loop_again);
4984 movq(current, FieldOperand(current, HeapObject::kMapOffset));
4985 movq(scratch1, FieldOperand(current, Map::kBitField2Offset));
4986 and_(scratch1, Immediate(Map::kElementsKindMask));
4987 shr(scratch1, Immediate(Map::kElementsKindShift));
4988 cmpq(scratch1, Immediate(DICTIONARY_ELEMENTS));
4989 j(equal, found);
4990 movq(current, FieldOperand(current, Map::kPrototypeOffset));
4991 CompareRoot(current, Heap::kNullValueRootIndex);
4992 j(not_equal, &loop_again);
4953 } 4993 }
4954 4994
4955 4995
4956 void MacroAssembler::RecordObjectAllocation(Isolate* isolate,
4957 Register object,
4958 int object_size) {
4959 FrameScope frame(this, StackFrame::EXIT);
4960 PushSafepointRegisters();
4961 PrepareCallCFunction(3);
4962 movq(arg_reg_2, object);
4963 movq(arg_reg_3, Immediate(object_size));
4964 movq(arg_reg_1, isolate, RelocInfo::EXTERNAL_REFERENCE);
4965 CallCFunction(
4966 ExternalReference::record_object_allocation_function(isolate), 3);
4967 PopSafepointRegisters();
4968 }
4969
4970
4971 } } // namespace v8::internal 4996 } } // namespace v8::internal
4972 4997
4973 #endif // V8_TARGET_ARCH_X64 4998 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/macro-assembler-x64.h ('k') | src/x64/regexp-macro-assembler-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698