Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(751)

Side by Side Diff: src/x64/full-codegen-x64.cc

Issue 71163006: Merge bleeding_edge r17376:17693. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/parser
Patch Set: Fix all.gyp Created 7 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/disasm-x64.cc ('k') | src/x64/ic-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 297 matching lines...) Expand 10 before | Expand all | Expand 10 after
308 } 308 }
309 309
310 310
311 void FullCodeGenerator::EmitProfilingCounterReset() { 311 void FullCodeGenerator::EmitProfilingCounterReset() {
312 int reset_value = FLAG_interrupt_budget; 312 int reset_value = FLAG_interrupt_budget;
313 if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) { 313 if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
314 // Self-optimization is a one-off thing; if it fails, don't try again. 314 // Self-optimization is a one-off thing; if it fails, don't try again.
315 reset_value = Smi::kMaxValue; 315 reset_value = Smi::kMaxValue;
316 } 316 }
317 __ movq(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT); 317 __ movq(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
318 __ movq(kScratchRegister, 318 __ Move(kScratchRegister, Smi::FromInt(reset_value));
319 reinterpret_cast<uint64_t>(Smi::FromInt(reset_value)),
320 RelocInfo::NONE64);
321 __ movq(FieldOperand(rbx, Cell::kValueOffset), kScratchRegister); 319 __ movq(FieldOperand(rbx, Cell::kValueOffset), kScratchRegister);
322 } 320 }
323 321
324 322
325 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, 323 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
326 Label* back_edge_target) { 324 Label* back_edge_target) {
327 Comment cmnt(masm_, "[ Back edge bookkeeping"); 325 Comment cmnt(masm_, "[ Back edge bookkeeping");
328 Label ok; 326 Label ok;
329 327
330 int weight = 1; 328 int weight = 1;
(...skipping 1259 matching lines...) Expand 10 before | Expand all | Expand 10 after
1590 if (expression == NULL) { 1588 if (expression == NULL) {
1591 __ PushRoot(Heap::kNullValueRootIndex); 1589 __ PushRoot(Heap::kNullValueRootIndex);
1592 } else { 1590 } else {
1593 VisitForStackValue(expression); 1591 VisitForStackValue(expression);
1594 } 1592 }
1595 } 1593 }
1596 1594
1597 1595
1598 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { 1596 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1599 Comment cmnt(masm_, "[ ObjectLiteral"); 1597 Comment cmnt(masm_, "[ ObjectLiteral");
1598
1599 int depth = 1;
1600 expr->BuildConstantProperties(isolate(), &depth);
1600 Handle<FixedArray> constant_properties = expr->constant_properties(); 1601 Handle<FixedArray> constant_properties = expr->constant_properties();
1601 int flags = expr->fast_elements() 1602 int flags = expr->fast_elements()
1602 ? ObjectLiteral::kFastElements 1603 ? ObjectLiteral::kFastElements
1603 : ObjectLiteral::kNoFlags; 1604 : ObjectLiteral::kNoFlags;
1604 flags |= expr->has_function() 1605 flags |= expr->has_function()
1605 ? ObjectLiteral::kHasFunction 1606 ? ObjectLiteral::kHasFunction
1606 : ObjectLiteral::kNoFlags; 1607 : ObjectLiteral::kNoFlags;
1607 int properties_count = constant_properties->length() / 2; 1608 int properties_count = constant_properties->length() / 2;
1608 if ((FLAG_track_double_fields && expr->may_store_doubles()) || 1609 if ((FLAG_track_double_fields && expr->may_store_doubles()) ||
1609 expr->depth() > 1 || Serializer::enabled() || 1610 depth > 1 || Serializer::enabled() ||
1610 flags != ObjectLiteral::kFastElements || 1611 flags != ObjectLiteral::kFastElements ||
1611 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) { 1612 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1612 __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 1613 __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1613 __ push(FieldOperand(rdi, JSFunction::kLiteralsOffset)); 1614 __ push(FieldOperand(rdi, JSFunction::kLiteralsOffset));
1614 __ Push(Smi::FromInt(expr->literal_index())); 1615 __ Push(Smi::FromInt(expr->literal_index()));
1615 __ Push(constant_properties); 1616 __ Push(constant_properties);
1616 __ Push(Smi::FromInt(flags)); 1617 __ Push(Smi::FromInt(flags));
1617 __ CallRuntime(Runtime::kCreateObjectLiteral, 4); 1618 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1618 } else { 1619 } else {
1619 __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 1620 __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
(...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after
1718 context()->PlugTOS(); 1719 context()->PlugTOS();
1719 } else { 1720 } else {
1720 context()->Plug(rax); 1721 context()->Plug(rax);
1721 } 1722 }
1722 } 1723 }
1723 1724
1724 1725
1725 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { 1726 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1726 Comment cmnt(masm_, "[ ArrayLiteral"); 1727 Comment cmnt(masm_, "[ ArrayLiteral");
1727 1728
1729 int depth = 1;
1730 expr->BuildConstantElements(isolate(), &depth);
1728 ZoneList<Expression*>* subexprs = expr->values(); 1731 ZoneList<Expression*>* subexprs = expr->values();
1729 int length = subexprs->length(); 1732 int length = subexprs->length();
1730 Handle<FixedArray> constant_elements = expr->constant_elements(); 1733 Handle<FixedArray> constant_elements = expr->constant_elements();
1731 ASSERT_EQ(2, constant_elements->length()); 1734 ASSERT_EQ(2, constant_elements->length());
1732 ElementsKind constant_elements_kind = 1735 ElementsKind constant_elements_kind =
1733 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value()); 1736 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1734 bool has_constant_fast_elements = 1737 bool has_constant_fast_elements =
1735 IsFastObjectElementsKind(constant_elements_kind); 1738 IsFastObjectElementsKind(constant_elements_kind);
1736 Handle<FixedArrayBase> constant_elements_values( 1739 Handle<FixedArrayBase> constant_elements_values(
1737 FixedArrayBase::cast(constant_elements->get(1))); 1740 FixedArrayBase::cast(constant_elements->get(1)));
1738 1741
1739 Heap* heap = isolate()->heap(); 1742 Heap* heap = isolate()->heap();
1740 if (has_constant_fast_elements && 1743 if (has_constant_fast_elements &&
1741 constant_elements_values->map() == heap->fixed_cow_array_map()) { 1744 constant_elements_values->map() == heap->fixed_cow_array_map()) {
1742 // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot 1745 // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
1743 // change, so it's possible to specialize the stub in advance. 1746 // change, so it's possible to specialize the stub in advance.
1744 __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1); 1747 __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
1745 __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 1748 __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1746 __ movq(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset)); 1749 __ movq(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset));
1747 __ Move(rbx, Smi::FromInt(expr->literal_index())); 1750 __ Move(rbx, Smi::FromInt(expr->literal_index()));
1748 __ Move(rcx, constant_elements); 1751 __ Move(rcx, constant_elements);
1749 FastCloneShallowArrayStub stub( 1752 FastCloneShallowArrayStub stub(
1750 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, 1753 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
1751 DONT_TRACK_ALLOCATION_SITE, 1754 DONT_TRACK_ALLOCATION_SITE,
1752 length); 1755 length);
1753 __ CallStub(&stub); 1756 __ CallStub(&stub);
1754 } else if (expr->depth() > 1) { 1757 } else if (depth > 1 || Serializer::enabled() ||
1758 length > FastCloneShallowArrayStub::kMaximumClonedLength) {
1755 __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 1759 __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1756 __ push(FieldOperand(rbx, JSFunction::kLiteralsOffset)); 1760 __ push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
1757 __ Push(Smi::FromInt(expr->literal_index())); 1761 __ Push(Smi::FromInt(expr->literal_index()));
1758 __ Push(constant_elements); 1762 __ Push(constant_elements);
1759 __ CallRuntime(Runtime::kCreateArrayLiteral, 3); 1763 __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1760 } else if (Serializer::enabled() ||
1761 length > FastCloneShallowArrayStub::kMaximumClonedLength) {
1762 __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1763 __ push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
1764 __ Push(Smi::FromInt(expr->literal_index()));
1765 __ Push(constant_elements);
1766 __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1767 } else { 1764 } else {
1768 ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) || 1765 ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
1769 FLAG_smi_only_arrays); 1766 FLAG_smi_only_arrays);
1770 FastCloneShallowArrayStub::Mode mode = 1767 FastCloneShallowArrayStub::Mode mode =
1771 FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS; 1768 FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
1772 AllocationSiteMode allocation_site_mode = FLAG_track_allocation_sites 1769 AllocationSiteMode allocation_site_mode = FLAG_track_allocation_sites
1773 ? TRACK_ALLOCATION_SITE : DONT_TRACK_ALLOCATION_SITE; 1770 ? TRACK_ALLOCATION_SITE : DONT_TRACK_ALLOCATION_SITE;
1774 1771
1775 // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot 1772 // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
1776 // change, so it's possible to specialize the stub in advance. 1773 // change, so it's possible to specialize the stub in advance.
(...skipping 1245 matching lines...) Expand 10 before | Expand all | Expand 10 after
3022 3019
3023 __ JumpIfSmi(rax, if_false); 3020 __ JumpIfSmi(rax, if_false);
3024 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx); 3021 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
3025 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3022 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3026 Split(equal, if_true, if_false, fall_through); 3023 Split(equal, if_true, if_false, fall_through);
3027 3024
3028 context()->Plug(if_true, if_false); 3025 context()->Plug(if_true, if_false);
3029 } 3026 }
3030 3027
3031 3028
3029 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3030 ZoneList<Expression*>* args = expr->arguments();
3031 ASSERT(args->length() == 1);
3032
3033 VisitForAccumulatorValue(args->at(0));
3034
3035 Label materialize_true, materialize_false;
3036 Label* if_true = NULL;
3037 Label* if_false = NULL;
3038 Label* fall_through = NULL;
3039 context()->PrepareTest(&materialize_true, &materialize_false,
3040 &if_true, &if_false, &fall_through);
3041
3042 Handle<Map> map = masm()->isolate()->factory()->heap_number_map();
3043 __ CheckMap(rax, map, if_false, DO_SMI_CHECK);
3044 __ cmpl(FieldOperand(rax, HeapNumber::kExponentOffset),
3045 Immediate(0x80000000));
3046 __ j(not_equal, if_false);
3047 __ cmpl(FieldOperand(rax, HeapNumber::kMantissaOffset),
3048 Immediate(0x00000000));
3049 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3050 Split(equal, if_true, if_false, fall_through);
3051
3052 context()->Plug(if_true, if_false);
3053 }
3054
3055
3032 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) { 3056 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3033 ZoneList<Expression*>* args = expr->arguments(); 3057 ZoneList<Expression*>* args = expr->arguments();
3034 ASSERT(args->length() == 1); 3058 ASSERT(args->length() == 1);
3035 3059
3036 VisitForAccumulatorValue(args->at(0)); 3060 VisitForAccumulatorValue(args->at(0));
3037 3061
3038 Label materialize_true, materialize_false; 3062 Label materialize_true, materialize_false;
3039 Label* if_true = NULL; 3063 Label* if_true = NULL;
3040 Label* if_false = NULL; 3064 Label* if_false = NULL;
3041 Label* fall_through = NULL; 3065 Label* fall_through = NULL;
(...skipping 320 matching lines...) Expand 10 before | Expand all | Expand 10 after
3362 Operand stamp_operand = __ ExternalOperand(stamp); 3386 Operand stamp_operand = __ ExternalOperand(stamp);
3363 __ movq(scratch, stamp_operand); 3387 __ movq(scratch, stamp_operand);
3364 __ cmpq(scratch, FieldOperand(object, JSDate::kCacheStampOffset)); 3388 __ cmpq(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
3365 __ j(not_equal, &runtime, Label::kNear); 3389 __ j(not_equal, &runtime, Label::kNear);
3366 __ movq(result, FieldOperand(object, JSDate::kValueOffset + 3390 __ movq(result, FieldOperand(object, JSDate::kValueOffset +
3367 kPointerSize * index->value())); 3391 kPointerSize * index->value()));
3368 __ jmp(&done); 3392 __ jmp(&done);
3369 } 3393 }
3370 __ bind(&runtime); 3394 __ bind(&runtime);
3371 __ PrepareCallCFunction(2); 3395 __ PrepareCallCFunction(2);
3372 __ movq(arg_reg_1, object); 3396 __ movq(arg_reg_1, object);
3373 __ movq(arg_reg_2, index, RelocInfo::NONE64); 3397 __ movq(arg_reg_2, index, RelocInfo::NONE64);
3374 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); 3398 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3375 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 3399 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3376 __ jmp(&done); 3400 __ jmp(&done);
3377 } 3401 }
3378 3402
3379 __ bind(&not_date_object); 3403 __ bind(&not_date_object);
3380 __ CallRuntime(Runtime::kThrowNotDateError, 0); 3404 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3381 __ bind(&done); 3405 __ bind(&done);
3382 context()->Plug(rax); 3406 context()->Plug(rax);
3383 } 3407 }
(...skipping 240 matching lines...) Expand 10 before | Expand all | Expand 10 after
3624 3648
3625 __ bind(&done); 3649 __ bind(&done);
3626 context()->Plug(result); 3650 context()->Plug(result);
3627 } 3651 }
3628 3652
3629 3653
3630 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) { 3654 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3631 ZoneList<Expression*>* args = expr->arguments(); 3655 ZoneList<Expression*>* args = expr->arguments();
3632 ASSERT_EQ(2, args->length()); 3656 ASSERT_EQ(2, args->length());
3633 3657
3634 VisitForStackValue(args->at(0)); 3658 if (FLAG_new_string_add) {
3635 VisitForStackValue(args->at(1)); 3659 VisitForStackValue(args->at(0));
3660 VisitForAccumulatorValue(args->at(1));
3636 3661
3637 StringAddStub stub(STRING_ADD_CHECK_BOTH); 3662 __ pop(rdx);
3638 __ CallStub(&stub); 3663 NewStringAddStub stub(STRING_ADD_CHECK_BOTH, NOT_TENURED);
3664 __ CallStub(&stub);
3665 } else {
3666 VisitForStackValue(args->at(0));
3667 VisitForStackValue(args->at(1));
3668
3669 StringAddStub stub(STRING_ADD_CHECK_BOTH);
3670 __ CallStub(&stub);
3671 }
3639 context()->Plug(rax); 3672 context()->Plug(rax);
3640 } 3673 }
3641 3674
3642 3675
3643 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) { 3676 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3644 ZoneList<Expression*>* args = expr->arguments(); 3677 ZoneList<Expression*>* args = expr->arguments();
3645 ASSERT_EQ(2, args->length()); 3678 ASSERT_EQ(2, args->length());
3646 3679
3647 VisitForStackValue(args->at(0)); 3680 VisitForStackValue(args->at(0));
3648 VisitForStackValue(args->at(1)); 3681 VisitForStackValue(args->at(1));
3649 3682
3650 StringCompareStub stub; 3683 StringCompareStub stub;
3651 __ CallStub(&stub); 3684 __ CallStub(&stub);
3652 context()->Plug(rax); 3685 context()->Plug(rax);
3653 } 3686 }
3654 3687
3655 3688
3656 void FullCodeGenerator::EmitMathSin(CallRuntime* expr) {
3657 // Load the argument on the stack and call the stub.
3658 TranscendentalCacheStub stub(TranscendentalCache::SIN,
3659 TranscendentalCacheStub::TAGGED);
3660 ZoneList<Expression*>* args = expr->arguments();
3661 ASSERT(args->length() == 1);
3662 VisitForStackValue(args->at(0));
3663 __ CallStub(&stub);
3664 context()->Plug(rax);
3665 }
3666
3667
3668 void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
3669 // Load the argument on the stack and call the stub.
3670 TranscendentalCacheStub stub(TranscendentalCache::COS,
3671 TranscendentalCacheStub::TAGGED);
3672 ZoneList<Expression*>* args = expr->arguments();
3673 ASSERT(args->length() == 1);
3674 VisitForStackValue(args->at(0));
3675 __ CallStub(&stub);
3676 context()->Plug(rax);
3677 }
3678
3679
3680 void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
3681 // Load the argument on the stack and call the stub.
3682 TranscendentalCacheStub stub(TranscendentalCache::TAN,
3683 TranscendentalCacheStub::TAGGED);
3684 ZoneList<Expression*>* args = expr->arguments();
3685 ASSERT(args->length() == 1);
3686 VisitForStackValue(args->at(0));
3687 __ CallStub(&stub);
3688 context()->Plug(rax);
3689 }
3690
3691
3692 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) { 3689 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3693 // Load the argument on the stack and call the stub. 3690 // Load the argument on the stack and call the stub.
3694 TranscendentalCacheStub stub(TranscendentalCache::LOG, 3691 TranscendentalCacheStub stub(TranscendentalCache::LOG,
3695 TranscendentalCacheStub::TAGGED); 3692 TranscendentalCacheStub::TAGGED);
3696 ZoneList<Expression*>* args = expr->arguments(); 3693 ZoneList<Expression*>* args = expr->arguments();
3697 ASSERT(args->length() == 1); 3694 ASSERT(args->length() == 1);
3698 VisitForStackValue(args->at(0)); 3695 VisitForStackValue(args->at(0));
3699 __ CallStub(&stub); 3696 __ CallStub(&stub);
3700 context()->Plug(rax); 3697 context()->Plug(rax);
3701 } 3698 }
(...skipping 675 matching lines...) Expand 10 before | Expand all | Expand 10 after
4377 } 4374 }
4378 4375
4379 // We need a second deoptimization point after loading the value 4376 // We need a second deoptimization point after loading the value
4380 // in case evaluating the property load my have a side effect. 4377 // in case evaluating the property load my have a side effect.
4381 if (assign_type == VARIABLE) { 4378 if (assign_type == VARIABLE) {
4382 PrepareForBailout(expr->expression(), TOS_REG); 4379 PrepareForBailout(expr->expression(), TOS_REG);
4383 } else { 4380 } else {
4384 PrepareForBailoutForId(prop->LoadId(), TOS_REG); 4381 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4385 } 4382 }
4386 4383
4387 // Call ToNumber only if operand is not a smi. 4384 // Inline smi case if we are in a loop.
4388 Label no_conversion; 4385 Label done, stub_call;
4386 JumpPatchSite patch_site(masm_);
4389 if (ShouldInlineSmiCase(expr->op())) { 4387 if (ShouldInlineSmiCase(expr->op())) {
4390 __ JumpIfSmi(rax, &no_conversion, Label::kNear); 4388 Label slow;
4389 patch_site.EmitJumpIfNotSmi(rax, &slow, Label::kNear);
4390
4391 // Save result for postfix expressions.
4392 if (expr->is_postfix()) {
4393 if (!context()->IsEffect()) {
4394 // Save the result on the stack. If we have a named or keyed property
4395 // we store the result under the receiver that is currently on top
4396 // of the stack.
4397 switch (assign_type) {
4398 case VARIABLE:
4399 __ push(rax);
4400 break;
4401 case NAMED_PROPERTY:
4402 __ movq(Operand(rsp, kPointerSize), rax);
4403 break;
4404 case KEYED_PROPERTY:
4405 __ movq(Operand(rsp, 2 * kPointerSize), rax);
4406 break;
4407 }
4408 }
4409 }
4410
4411 SmiOperationExecutionMode mode;
4412 mode.Add(PRESERVE_SOURCE_REGISTER);
4413 mode.Add(BAILOUT_ON_NO_OVERFLOW);
4414 if (expr->op() == Token::INC) {
4415 __ SmiAddConstant(rax, rax, Smi::FromInt(1), mode, &done, Label::kNear);
4416 } else {
4417 __ SmiSubConstant(rax, rax, Smi::FromInt(1), mode, &done, Label::kNear);
4418 }
4419 __ jmp(&stub_call, Label::kNear);
4420 __ bind(&slow);
4391 } 4421 }
4422
4392 ToNumberStub convert_stub; 4423 ToNumberStub convert_stub;
4393 __ CallStub(&convert_stub); 4424 __ CallStub(&convert_stub);
4394 __ bind(&no_conversion);
4395 4425
4396 // Save result for postfix expressions. 4426 // Save result for postfix expressions.
4397 if (expr->is_postfix()) { 4427 if (expr->is_postfix()) {
4398 if (!context()->IsEffect()) { 4428 if (!context()->IsEffect()) {
4399 // Save the result on the stack. If we have a named or keyed property 4429 // Save the result on the stack. If we have a named or keyed property
4400 // we store the result under the receiver that is currently on top 4430 // we store the result under the receiver that is currently on top
4401 // of the stack. 4431 // of the stack.
4402 switch (assign_type) { 4432 switch (assign_type) {
4403 case VARIABLE: 4433 case VARIABLE:
4404 __ push(rax); 4434 __ push(rax);
4405 break; 4435 break;
4406 case NAMED_PROPERTY: 4436 case NAMED_PROPERTY:
4407 __ movq(Operand(rsp, kPointerSize), rax); 4437 __ movq(Operand(rsp, kPointerSize), rax);
4408 break; 4438 break;
4409 case KEYED_PROPERTY: 4439 case KEYED_PROPERTY:
4410 __ movq(Operand(rsp, 2 * kPointerSize), rax); 4440 __ movq(Operand(rsp, 2 * kPointerSize), rax);
4411 break; 4441 break;
4412 } 4442 }
4413 } 4443 }
4414 } 4444 }
4415 4445
4416 // Inline smi case if we are in a loop.
4417 Label done, stub_call;
4418 JumpPatchSite patch_site(masm_);
4419
4420 if (ShouldInlineSmiCase(expr->op())) {
4421 if (expr->op() == Token::INC) {
4422 __ SmiAddConstant(rax, rax, Smi::FromInt(1));
4423 } else {
4424 __ SmiSubConstant(rax, rax, Smi::FromInt(1));
4425 }
4426 __ j(overflow, &stub_call, Label::kNear);
4427 // We could eliminate this smi check if we split the code at
4428 // the first smi check before calling ToNumber.
4429 patch_site.EmitJumpIfSmi(rax, &done, Label::kNear);
4430
4431 __ bind(&stub_call);
4432 // Call stub. Undo operation first.
4433 if (expr->op() == Token::INC) {
4434 __ SmiSubConstant(rax, rax, Smi::FromInt(1));
4435 } else {
4436 __ SmiAddConstant(rax, rax, Smi::FromInt(1));
4437 }
4438 }
4439
4440 // Record position before stub call. 4446 // Record position before stub call.
4441 SetSourcePosition(expr->position()); 4447 SetSourcePosition(expr->position());
4442 4448
4443 // Call stub for +1/-1. 4449 // Call stub for +1/-1.
4450 __ bind(&stub_call);
4444 __ movq(rdx, rax); 4451 __ movq(rdx, rax);
4445 __ Move(rax, Smi::FromInt(1)); 4452 __ Move(rax, Smi::FromInt(1));
4446 BinaryOpStub stub(expr->binary_op(), NO_OVERWRITE); 4453 BinaryOpStub stub(expr->binary_op(), NO_OVERWRITE);
4447 CallIC(stub.GetCode(isolate()), 4454 CallIC(stub.GetCode(isolate()),
4448 RelocInfo::CODE_TARGET, 4455 RelocInfo::CODE_TARGET,
4449 expr->CountBinOpFeedbackId()); 4456 expr->CountBinOpFeedbackId());
4450 patch_site.EmitPatchInfo(); 4457 patch_site.EmitPatchInfo();
4451 __ bind(&done); 4458 __ bind(&done);
4452 4459
4453 // Store the value returned in rax. 4460 // Store the value returned in rax.
(...skipping 490 matching lines...) Expand 10 before | Expand all | Expand 10 after
4944 4951
4945 ASSERT_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(), 4952 ASSERT_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(),
4946 Assembler::target_address_at(call_target_address)); 4953 Assembler::target_address_at(call_target_address));
4947 return OSR_AFTER_STACK_CHECK; 4954 return OSR_AFTER_STACK_CHECK;
4948 } 4955 }
4949 4956
4950 4957
4951 } } // namespace v8::internal 4958 } } // namespace v8::internal
4952 4959
4953 #endif // V8_TARGET_ARCH_X64 4960 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/disasm-x64.cc ('k') | src/x64/ic-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698