Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(191)

Side by Side Diff: src/ia32/lithium-ia32.cc

Issue 16013003: Fix hole handling, and ensure smi representation is handled properly (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 7 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/lithium-ia32.h ('k') | src/x64/lithium-codegen-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 751 matching lines...) Expand 10 before | Expand all | Expand 10 after
762 } 762 }
763 763
764 764
765 LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) { 765 LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) {
766 return AssignEnvironment(new(zone()) LDeoptimize); 766 return AssignEnvironment(new(zone()) LDeoptimize);
767 } 767 }
768 768
769 769
770 LInstruction* LChunkBuilder::DoShift(Token::Value op, 770 LInstruction* LChunkBuilder::DoShift(Token::Value op,
771 HBitwiseBinaryOperation* instr) { 771 HBitwiseBinaryOperation* instr) {
772 if (instr->representation().IsTagged()) { 772 if (instr->representation().IsSmiOrTagged()) {
773 ASSERT(instr->left()->representation().IsTagged()); 773 ASSERT(instr->left()->representation().IsSmiOrTagged());
774 ASSERT(instr->right()->representation().IsTagged()); 774 ASSERT(instr->right()->representation().IsSmiOrTagged());
775 775
776 LOperand* context = UseFixed(instr->context(), esi); 776 LOperand* context = UseFixed(instr->context(), esi);
777 LOperand* left = UseFixed(instr->left(), edx); 777 LOperand* left = UseFixed(instr->left(), edx);
778 LOperand* right = UseFixed(instr->right(), eax); 778 LOperand* right = UseFixed(instr->right(), eax);
779 LArithmeticT* result = new(zone()) LArithmeticT(op, context, left, right); 779 LArithmeticT* result = new(zone()) LArithmeticT(op, context, left, right);
780 return MarkAsCall(DefineFixed(result, eax), instr); 780 return MarkAsCall(DefineFixed(result, eax), instr);
781 } 781 }
782 782
783 ASSERT(instr->representation().IsInteger32()); 783 ASSERT(instr->representation().IsInteger32());
784 ASSERT(instr->left()->representation().IsInteger32()); 784 ASSERT(instr->left()->representation().IsInteger32());
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
833 833
834 LInstruction* LChunkBuilder::DoArithmeticT(Token::Value op, 834 LInstruction* LChunkBuilder::DoArithmeticT(Token::Value op,
835 HArithmeticBinaryOperation* instr) { 835 HArithmeticBinaryOperation* instr) {
836 ASSERT(op == Token::ADD || 836 ASSERT(op == Token::ADD ||
837 op == Token::DIV || 837 op == Token::DIV ||
838 op == Token::MOD || 838 op == Token::MOD ||
839 op == Token::MUL || 839 op == Token::MUL ||
840 op == Token::SUB); 840 op == Token::SUB);
841 HValue* left = instr->left(); 841 HValue* left = instr->left();
842 HValue* right = instr->right(); 842 HValue* right = instr->right();
843 ASSERT(left->representation().IsTagged()); 843 ASSERT(left->representation().IsSmiOrTagged());
844 ASSERT(right->representation().IsTagged()); 844 ASSERT(right->representation().IsSmiOrTagged());
845 LOperand* context = UseFixed(instr->context(), esi); 845 LOperand* context = UseFixed(instr->context(), esi);
846 LOperand* left_operand = UseFixed(left, edx); 846 LOperand* left_operand = UseFixed(left, edx);
847 LOperand* right_operand = UseFixed(right, eax); 847 LOperand* right_operand = UseFixed(right, eax);
848 LArithmeticT* result = 848 LArithmeticT* result =
849 new(zone()) LArithmeticT(op, context, left_operand, right_operand); 849 new(zone()) LArithmeticT(op, context, left_operand, right_operand);
850 return MarkAsCall(DefineFixed(result, eax), instr); 850 return MarkAsCall(DefineFixed(result, eax), instr);
851 } 851 }
852 852
853 853
854 void LChunkBuilder::DoBasicBlock(HBasicBlock* block, HBasicBlock* next_block) { 854 void LChunkBuilder::DoBasicBlock(HBasicBlock* block, HBasicBlock* next_block) {
(...skipping 529 matching lines...) Expand 10 before | Expand all | Expand 10 after
1384 1384
1385 LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) { 1385 LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
1386 if (instr->representation().IsInteger32()) { 1386 if (instr->representation().IsInteger32()) {
1387 ASSERT(instr->left()->representation().IsInteger32()); 1387 ASSERT(instr->left()->representation().IsInteger32());
1388 ASSERT(instr->right()->representation().IsInteger32()); 1388 ASSERT(instr->right()->representation().IsInteger32());
1389 1389
1390 LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand()); 1390 LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
1391 LOperand* right = UseOrConstantAtStart(instr->BetterRightOperand()); 1391 LOperand* right = UseOrConstantAtStart(instr->BetterRightOperand());
1392 return DefineSameAsFirst(new(zone()) LBitI(left, right)); 1392 return DefineSameAsFirst(new(zone()) LBitI(left, right));
1393 } else { 1393 } else {
1394 ASSERT(instr->representation().IsTagged()); 1394 ASSERT(instr->representation().IsSmiOrTagged());
1395 ASSERT(instr->left()->representation().IsTagged()); 1395 ASSERT(instr->left()->representation().IsSmiOrTagged());
1396 ASSERT(instr->right()->representation().IsTagged()); 1396 ASSERT(instr->right()->representation().IsSmiOrTagged());
1397 1397
1398 LOperand* context = UseFixed(instr->context(), esi); 1398 LOperand* context = UseFixed(instr->context(), esi);
1399 LOperand* left = UseFixed(instr->left(), edx); 1399 LOperand* left = UseFixed(instr->left(), edx);
1400 LOperand* right = UseFixed(instr->right(), eax); 1400 LOperand* right = UseFixed(instr->right(), eax);
1401 LArithmeticT* result = 1401 LArithmeticT* result =
1402 new(zone()) LArithmeticT(instr->op(), context, left, right); 1402 new(zone()) LArithmeticT(instr->op(), context, left, right);
1403 return MarkAsCall(DefineFixed(result, eax), instr); 1403 return MarkAsCall(DefineFixed(result, eax), instr);
1404 } 1404 }
1405 } 1405 }
1406 1406
(...skipping 20 matching lines...) Expand all
1427 return AssignEnvironment(DefineSameAsFirst(div)); 1427 return AssignEnvironment(DefineSameAsFirst(div));
1428 } 1428 }
1429 // The temporary operand is necessary to ensure that right is not allocated 1429 // The temporary operand is necessary to ensure that right is not allocated
1430 // into edx. 1430 // into edx.
1431 LOperand* temp = FixedTemp(edx); 1431 LOperand* temp = FixedTemp(edx);
1432 LOperand* dividend = UseFixed(instr->left(), eax); 1432 LOperand* dividend = UseFixed(instr->left(), eax);
1433 LOperand* divisor = UseRegister(instr->right()); 1433 LOperand* divisor = UseRegister(instr->right());
1434 LDivI* result = new(zone()) LDivI(dividend, divisor, temp); 1434 LDivI* result = new(zone()) LDivI(dividend, divisor, temp);
1435 return AssignEnvironment(DefineFixed(result, eax)); 1435 return AssignEnvironment(DefineFixed(result, eax));
1436 } else { 1436 } else {
1437 ASSERT(instr->representation().IsTagged()); 1437 ASSERT(instr->representation().IsSmiOrTagged());
1438 return DoArithmeticT(Token::DIV, instr); 1438 return DoArithmeticT(Token::DIV, instr);
1439 } 1439 }
1440 } 1440 }
1441 1441
1442 1442
1443 HValue* LChunkBuilder::SimplifiedDividendForMathFloorOfDiv(HValue* dividend) { 1443 HValue* LChunkBuilder::SimplifiedDividendForMathFloorOfDiv(HValue* dividend) {
1444 // A value with an integer representation does not need to be transformed. 1444 // A value with an integer representation does not need to be transformed.
1445 if (dividend->representation().IsInteger32()) { 1445 if (dividend->representation().IsInteger32()) {
1446 return dividend; 1446 return dividend;
1447 // A change from an integer32 can be replaced by the integer32 value. 1447 // A change from an integer32 can be replaced by the integer32 value.
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after
1530 LOperand* divisor = UseRegister(instr->right()); 1530 LOperand* divisor = UseRegister(instr->right());
1531 LModI* mod = new(zone()) LModI(value, divisor, temp); 1531 LModI* mod = new(zone()) LModI(value, divisor, temp);
1532 result = DefineFixed(mod, edx); 1532 result = DefineFixed(mod, edx);
1533 } 1533 }
1534 1534
1535 return (instr->CheckFlag(HValue::kBailoutOnMinusZero) || 1535 return (instr->CheckFlag(HValue::kBailoutOnMinusZero) ||
1536 instr->CheckFlag(HValue::kCanBeDivByZero) || 1536 instr->CheckFlag(HValue::kCanBeDivByZero) ||
1537 instr->CheckFlag(HValue::kCanOverflow)) 1537 instr->CheckFlag(HValue::kCanOverflow))
1538 ? AssignEnvironment(result) 1538 ? AssignEnvironment(result)
1539 : result; 1539 : result;
1540 } else if (instr->representation().IsTagged()) { 1540 } else if (instr->representation().IsSmiOrTagged()) {
1541 return DoArithmeticT(Token::MOD, instr); 1541 return DoArithmeticT(Token::MOD, instr);
1542 } else { 1542 } else {
1543 ASSERT(instr->representation().IsDouble()); 1543 ASSERT(instr->representation().IsDouble());
1544 // We call a C function for double modulo. It can't trigger a GC. 1544 // We call a C function for double modulo. It can't trigger a GC.
1545 // We need to use fixed result register for the call. 1545 // We need to use fixed result register for the call.
1546 // TODO(fschneider): Allow any register as input registers. 1546 // TODO(fschneider): Allow any register as input registers.
1547 LOperand* left = UseFixedDouble(instr->left(), xmm2); 1547 LOperand* left = UseFixedDouble(instr->left(), xmm2);
1548 LOperand* right = UseFixedDouble(instr->right(), xmm1); 1548 LOperand* right = UseFixedDouble(instr->right(), xmm1);
1549 LArithmeticD* result = new(zone()) LArithmeticD(Token::MOD, left, right); 1549 LArithmeticD* result = new(zone()) LArithmeticD(Token::MOD, left, right);
1550 return MarkAsCall(DefineFixedDouble(result, xmm1), instr); 1550 return MarkAsCall(DefineFixedDouble(result, xmm1), instr);
(...skipping 13 matching lines...) Expand all
1564 } 1564 }
1565 LMulI* mul = new(zone()) LMulI(left, right, temp); 1565 LMulI* mul = new(zone()) LMulI(left, right, temp);
1566 if (instr->CheckFlag(HValue::kCanOverflow) || 1566 if (instr->CheckFlag(HValue::kCanOverflow) ||
1567 instr->CheckFlag(HValue::kBailoutOnMinusZero)) { 1567 instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1568 AssignEnvironment(mul); 1568 AssignEnvironment(mul);
1569 } 1569 }
1570 return DefineSameAsFirst(mul); 1570 return DefineSameAsFirst(mul);
1571 } else if (instr->representation().IsDouble()) { 1571 } else if (instr->representation().IsDouble()) {
1572 return DoArithmeticD(Token::MUL, instr); 1572 return DoArithmeticD(Token::MUL, instr);
1573 } else { 1573 } else {
1574 ASSERT(instr->representation().IsTagged()); 1574 ASSERT(instr->representation().IsSmiOrTagged());
1575 return DoArithmeticT(Token::MUL, instr); 1575 return DoArithmeticT(Token::MUL, instr);
1576 } 1576 }
1577 } 1577 }
1578 1578
1579 1579
1580 LInstruction* LChunkBuilder::DoSub(HSub* instr) { 1580 LInstruction* LChunkBuilder::DoSub(HSub* instr) {
1581 if (instr->representation().IsInteger32()) { 1581 if (instr->representation().IsInteger32()) {
1582 ASSERT(instr->left()->representation().IsInteger32()); 1582 ASSERT(instr->left()->representation().IsInteger32());
1583 ASSERT(instr->right()->representation().IsInteger32()); 1583 ASSERT(instr->right()->representation().IsInteger32());
1584 LOperand* left = UseRegisterAtStart(instr->left()); 1584 LOperand* left = UseRegisterAtStart(instr->left());
1585 LOperand* right = UseOrConstantAtStart(instr->right()); 1585 LOperand* right = UseOrConstantAtStart(instr->right());
1586 LSubI* sub = new(zone()) LSubI(left, right); 1586 LSubI* sub = new(zone()) LSubI(left, right);
1587 LInstruction* result = DefineSameAsFirst(sub); 1587 LInstruction* result = DefineSameAsFirst(sub);
1588 if (instr->CheckFlag(HValue::kCanOverflow)) { 1588 if (instr->CheckFlag(HValue::kCanOverflow)) {
1589 result = AssignEnvironment(result); 1589 result = AssignEnvironment(result);
1590 } 1590 }
1591 return result; 1591 return result;
1592 } else if (instr->representation().IsDouble()) { 1592 } else if (instr->representation().IsDouble()) {
1593 return DoArithmeticD(Token::SUB, instr); 1593 return DoArithmeticD(Token::SUB, instr);
1594 } else { 1594 } else {
1595 ASSERT(instr->representation().IsTagged()); 1595 ASSERT(instr->representation().IsSmiOrTagged());
1596 return DoArithmeticT(Token::SUB, instr); 1596 return DoArithmeticT(Token::SUB, instr);
1597 } 1597 }
1598 } 1598 }
1599 1599
1600 1600
1601 LInstruction* LChunkBuilder::DoAdd(HAdd* instr) { 1601 LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
1602 if (instr->representation().IsInteger32()) { 1602 if (instr->representation().IsInteger32()) {
1603 // Check to see if it would be advantageous to use an lea instruction rather 1603 // Check to see if it would be advantageous to use an lea instruction rather
1604 // than an add. This is the case when no overflow check is needed and there 1604 // than an add. This is the case when no overflow check is needed and there
1605 // are multiple uses of the add's inputs, so using a 3-register add will 1605 // are multiple uses of the add's inputs, so using a 3-register add will
(...skipping 11 matching lines...) Expand all
1617 LInstruction* result = use_lea 1617 LInstruction* result = use_lea
1618 ? DefineAsRegister(add) 1618 ? DefineAsRegister(add)
1619 : DefineSameAsFirst(add); 1619 : DefineSameAsFirst(add);
1620 if (can_overflow) { 1620 if (can_overflow) {
1621 result = AssignEnvironment(result); 1621 result = AssignEnvironment(result);
1622 } 1622 }
1623 return result; 1623 return result;
1624 } else if (instr->representation().IsDouble()) { 1624 } else if (instr->representation().IsDouble()) {
1625 return DoArithmeticD(Token::ADD, instr); 1625 return DoArithmeticD(Token::ADD, instr);
1626 } else { 1626 } else {
1627 ASSERT(instr->representation().IsTagged()); 1627 ASSERT(instr->representation().IsSmiOrTagged());
1628 return DoArithmeticT(Token::ADD, instr); 1628 return DoArithmeticT(Token::ADD, instr);
1629 } 1629 }
1630 } 1630 }
1631 1631
1632 1632
1633 LInstruction* LChunkBuilder::DoMathMinMax(HMathMinMax* instr) { 1633 LInstruction* LChunkBuilder::DoMathMinMax(HMathMinMax* instr) {
1634 LOperand* left = NULL; 1634 LOperand* left = NULL;
1635 LOperand* right = NULL; 1635 LOperand* right = NULL;
1636 if (instr->representation().IsInteger32()) { 1636 if (instr->representation().IsInteger32()) {
1637 ASSERT(instr->left()->representation().IsInteger32()); 1637 ASSERT(instr->left()->representation().IsInteger32());
(...skipping 23 matching lines...) Expand all
1661 UseFixedDouble(instr->right(), xmm1) : 1661 UseFixedDouble(instr->right(), xmm1) :
1662 UseFixed(instr->right(), eax); 1662 UseFixed(instr->right(), eax);
1663 LPower* result = new(zone()) LPower(left, right); 1663 LPower* result = new(zone()) LPower(left, right);
1664 return MarkAsCall(DefineFixedDouble(result, xmm3), instr, 1664 return MarkAsCall(DefineFixedDouble(result, xmm3), instr,
1665 CAN_DEOPTIMIZE_EAGERLY); 1665 CAN_DEOPTIMIZE_EAGERLY);
1666 } 1666 }
1667 1667
1668 1668
1669 LInstruction* LChunkBuilder::DoRandom(HRandom* instr) { 1669 LInstruction* LChunkBuilder::DoRandom(HRandom* instr) {
1670 ASSERT(instr->representation().IsDouble()); 1670 ASSERT(instr->representation().IsDouble());
1671 ASSERT(instr->global_object()->representation().IsTagged()); 1671 ASSERT(instr->global_object()->representation().IsSmiOrTagged());
1672 LOperand* global_object = UseFixed(instr->global_object(), eax); 1672 LOperand* global_object = UseFixed(instr->global_object(), eax);
1673 LRandom* result = new(zone()) LRandom(global_object); 1673 LRandom* result = new(zone()) LRandom(global_object);
1674 return MarkAsCall(DefineFixedDouble(result, xmm1), instr); 1674 return MarkAsCall(DefineFixedDouble(result, xmm1), instr);
1675 } 1675 }
1676 1676
1677 1677
1678 LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) { 1678 LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
1679 ASSERT(instr->left()->representation().IsTagged()); 1679 ASSERT(instr->left()->representation().IsSmiOrTagged());
1680 ASSERT(instr->right()->representation().IsTagged()); 1680 ASSERT(instr->right()->representation().IsSmiOrTagged());
1681 LOperand* context = UseFixed(instr->context(), esi); 1681 LOperand* context = UseFixed(instr->context(), esi);
1682 LOperand* left = UseFixed(instr->left(), edx); 1682 LOperand* left = UseFixed(instr->left(), edx);
1683 LOperand* right = UseFixed(instr->right(), eax); 1683 LOperand* right = UseFixed(instr->right(), eax);
1684 LCmpT* result = new(zone()) LCmpT(context, left, right); 1684 LCmpT* result = new(zone()) LCmpT(context, left, right);
1685 return MarkAsCall(DefineFixed(result, eax), instr); 1685 return MarkAsCall(DefineFixed(result, eax), instr);
1686 } 1686 }
1687 1687
1688 1688
1689 LInstruction* LChunkBuilder::DoCompareIDAndBranch( 1689 LInstruction* LChunkBuilder::DoCompareIDAndBranch(
1690 HCompareIDAndBranch* instr) { 1690 HCompareIDAndBranch* instr) {
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
1722 1722
1723 1723
1724 LInstruction* LChunkBuilder::DoCompareConstantEqAndBranch( 1724 LInstruction* LChunkBuilder::DoCompareConstantEqAndBranch(
1725 HCompareConstantEqAndBranch* instr) { 1725 HCompareConstantEqAndBranch* instr) {
1726 return new(zone()) LCmpConstantEqAndBranch( 1726 return new(zone()) LCmpConstantEqAndBranch(
1727 UseRegisterAtStart(instr->value())); 1727 UseRegisterAtStart(instr->value()));
1728 } 1728 }
1729 1729
1730 1730
1731 LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) { 1731 LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
1732 ASSERT(instr->value()->representation().IsTagged()); 1732 ASSERT(instr->value()->representation().IsSmiOrTagged());
1733 LOperand* temp = TempRegister(); 1733 LOperand* temp = TempRegister();
1734 return new(zone()) LIsObjectAndBranch(UseRegister(instr->value()), temp); 1734 return new(zone()) LIsObjectAndBranch(UseRegister(instr->value()), temp);
1735 } 1735 }
1736 1736
1737 1737
1738 LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) { 1738 LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) {
1739 ASSERT(instr->value()->representation().IsTagged()); 1739 ASSERT(instr->value()->representation().IsTagged());
1740 LOperand* temp = TempRegister(); 1740 LOperand* temp = TempRegister();
1741 return new(zone()) LIsStringAndBranch(UseRegister(instr->value()), temp); 1741 return new(zone()) LIsStringAndBranch(UseRegister(instr->value()), temp);
1742 } 1742 }
1743 1743
1744 1744
1745 LInstruction* LChunkBuilder::DoIsSmiAndBranch(HIsSmiAndBranch* instr) { 1745 LInstruction* LChunkBuilder::DoIsSmiAndBranch(HIsSmiAndBranch* instr) {
1746 ASSERT(instr->value()->representation().IsTagged()); 1746 ASSERT(instr->value()->representation().IsTagged());
1747 return new(zone()) LIsSmiAndBranch(Use(instr->value())); 1747 return new(zone()) LIsSmiAndBranch(Use(instr->value()));
1748 } 1748 }
1749 1749
1750 1750
1751 LInstruction* LChunkBuilder::DoIsUndetectableAndBranch( 1751 LInstruction* LChunkBuilder::DoIsUndetectableAndBranch(
1752 HIsUndetectableAndBranch* instr) { 1752 HIsUndetectableAndBranch* instr) {
1753 ASSERT(instr ->value()->representation().IsTagged()); 1753 ASSERT(instr->value()->representation().IsTagged());
1754 return new(zone()) LIsUndetectableAndBranch( 1754 return new(zone()) LIsUndetectableAndBranch(
1755 UseRegisterAtStart(instr->value()), TempRegister()); 1755 UseRegisterAtStart(instr->value()), TempRegister());
1756 } 1756 }
1757 1757
1758 1758
1759 LInstruction* LChunkBuilder::DoStringCompareAndBranch( 1759 LInstruction* LChunkBuilder::DoStringCompareAndBranch(
1760 HStringCompareAndBranch* instr) { 1760 HStringCompareAndBranch* instr) {
1761 ASSERT(instr->left()->representation().IsTagged()); 1761 ASSERT(instr->left()->representation().IsTagged());
1762 ASSERT(instr->right()->representation().IsTagged()); 1762 ASSERT(instr->right()->representation().IsTagged());
1763 LOperand* context = UseFixed(instr->context(), esi); 1763 LOperand* context = UseFixed(instr->context(), esi);
(...skipping 138 matching lines...) Expand 10 before | Expand all | Expand 10 after
1902 return NULL; 1902 return NULL;
1903 } 1903 }
1904 1904
1905 1905
1906 LInstruction* LChunkBuilder::DoChange(HChange* instr) { 1906 LInstruction* LChunkBuilder::DoChange(HChange* instr) {
1907 Representation from = instr->from(); 1907 Representation from = instr->from();
1908 Representation to = instr->to(); 1908 Representation to = instr->to();
1909 if (from.IsSmi()) { 1909 if (from.IsSmi()) {
1910 if (to.IsTagged()) { 1910 if (to.IsTagged()) {
1911 LOperand* value = UseRegister(instr->value()); 1911 LOperand* value = UseRegister(instr->value());
1912 // For now, always deopt on hole. 1912 return DefineSameAsFirst(new(zone()) LDummyUse(value));
1913 if (instr->value()->IsLoadKeyed() &&
1914 HLoadKeyed::cast(instr->value())->UsesMustHandleHole()) {
1915 return AssignEnvironment(
1916 DefineSameAsFirst(new(zone()) LCheckSmiAndReturn(value)));
1917 } else {
1918 return DefineSameAsFirst(new(zone()) LDummyUse(value));
1919 }
1920 } 1913 }
1921 from = Representation::Tagged(); 1914 from = Representation::Tagged();
1922 } 1915 }
1923 // Only mark conversions that might need to allocate as calling rather than 1916 // Only mark conversions that might need to allocate as calling rather than
1924 // all changes. This makes simple, non-allocating conversion not have to force 1917 // all changes. This makes simple, non-allocating conversion not have to force
1925 // building a stack frame. 1918 // building a stack frame.
1926 if (from.IsTagged()) { 1919 if (from.IsTagged()) {
1927 if (to.IsDouble()) { 1920 if (to.IsDouble()) {
1928 info()->MarkAsDeferredCalling(); 1921 info()->MarkAsDeferredCalling();
1929 LOperand* value = UseRegister(instr->value()); 1922 LOperand* value = UseRegister(instr->value());
(...skipping 12 matching lines...) Expand all
1942 LOperand* value = UseRegister(val); 1935 LOperand* value = UseRegister(val);
1943 if (val->type().IsSmi()) { 1936 if (val->type().IsSmi()) {
1944 return DefineSameAsFirst(new(zone()) LDummyUse(value)); 1937 return DefineSameAsFirst(new(zone()) LDummyUse(value));
1945 } 1938 }
1946 return AssignEnvironment( 1939 return AssignEnvironment(
1947 DefineSameAsFirst(new(zone()) LCheckSmiAndReturn(value))); 1940 DefineSameAsFirst(new(zone()) LCheckSmiAndReturn(value)));
1948 } else { 1941 } else {
1949 ASSERT(to.IsInteger32()); 1942 ASSERT(to.IsInteger32());
1950 if (instr->value()->type().IsSmi()) { 1943 if (instr->value()->type().IsSmi()) {
1951 LOperand* value = UseRegister(instr->value()); 1944 LOperand* value = UseRegister(instr->value());
1952 LInstruction* result = 1945 return DefineSameAsFirst(new(zone()) LSmiUntag(value, false));
1953 DefineSameAsFirst(new(zone()) LSmiUntag(value, false));
1954 if (instr->value()->IsLoadKeyed()) {
1955 HLoadKeyed* load_keyed = HLoadKeyed::cast(instr->value());
1956 if (load_keyed->UsesMustHandleHole() &&
1957 load_keyed->hole_mode() == NEVER_RETURN_HOLE) {
1958 return AssignEnvironment(result);
1959 }
1960 }
1961 return result;
1962 } else { 1946 } else {
1963 bool truncating = instr->CanTruncateToInt32(); 1947 bool truncating = instr->CanTruncateToInt32();
1964 if (CpuFeatures::IsSafeForSnapshot(SSE2)) { 1948 if (CpuFeatures::IsSafeForSnapshot(SSE2)) {
1965 LOperand* value = UseRegister(instr->value()); 1949 LOperand* value = UseRegister(instr->value());
1966 LOperand* xmm_temp = 1950 LOperand* xmm_temp =
1967 (truncating && CpuFeatures::IsSupported(SSE3)) 1951 (truncating && CpuFeatures::IsSupported(SSE3))
1968 ? NULL 1952 ? NULL
1969 : FixedTemp(xmm1); 1953 : FixedTemp(xmm1);
1970 LTaggedToI* res = new(zone()) LTaggedToI(value, xmm_temp); 1954 LTaggedToI* res = new(zone()) LTaggedToI(value, xmm_temp);
1971 return AssignEnvironment(DefineSameAsFirst(res)); 1955 return AssignEnvironment(DefineSameAsFirst(res));
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after
2092 LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) { 2076 LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
2093 HValue* value = instr->value(); 2077 HValue* value = instr->value();
2094 Representation input_rep = value->representation(); 2078 Representation input_rep = value->representation();
2095 if (input_rep.IsDouble()) { 2079 if (input_rep.IsDouble()) {
2096 LOperand* reg = UseRegister(value); 2080 LOperand* reg = UseRegister(value);
2097 return DefineFixed(new(zone()) LClampDToUint8(reg), eax); 2081 return DefineFixed(new(zone()) LClampDToUint8(reg), eax);
2098 } else if (input_rep.IsInteger32()) { 2082 } else if (input_rep.IsInteger32()) {
2099 LOperand* reg = UseFixed(value, eax); 2083 LOperand* reg = UseFixed(value, eax);
2100 return DefineFixed(new(zone()) LClampIToUint8(reg), eax); 2084 return DefineFixed(new(zone()) LClampIToUint8(reg), eax);
2101 } else { 2085 } else {
2102 ASSERT(input_rep.IsTagged()); 2086 ASSERT(input_rep.IsSmiOrTagged());
2103 if (CpuFeatures::IsSupported(SSE2)) { 2087 if (CpuFeatures::IsSupported(SSE2)) {
2104 LOperand* reg = UseFixed(value, eax); 2088 LOperand* reg = UseFixed(value, eax);
2105 // Register allocator doesn't (yet) support allocation of double 2089 // Register allocator doesn't (yet) support allocation of double
2106 // temps. Reserve xmm1 explicitly. 2090 // temps. Reserve xmm1 explicitly.
2107 LOperand* temp = FixedTemp(xmm1); 2091 LOperand* temp = FixedTemp(xmm1);
2108 LClampTToUint8* result = new(zone()) LClampTToUint8(reg, temp); 2092 LClampTToUint8* result = new(zone()) LClampTToUint8(reg, temp);
2109 return AssignEnvironment(DefineFixed(result, eax)); 2093 return AssignEnvironment(DefineFixed(result, eax));
2110 } else { 2094 } else {
2111 LOperand* value = UseRegister(instr->value()); 2095 LOperand* value = UseRegister(instr->value());
2112 LClampTToUint8NoSSE2* res = 2096 LClampTToUint8NoSSE2* res =
(...skipping 682 matching lines...) Expand 10 before | Expand all | Expand 10 after
2795 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) { 2779 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
2796 LOperand* object = UseRegister(instr->object()); 2780 LOperand* object = UseRegister(instr->object());
2797 LOperand* index = UseTempRegister(instr->index()); 2781 LOperand* index = UseTempRegister(instr->index());
2798 return DefineSameAsFirst(new(zone()) LLoadFieldByIndex(object, index)); 2782 return DefineSameAsFirst(new(zone()) LLoadFieldByIndex(object, index));
2799 } 2783 }
2800 2784
2801 2785
2802 } } // namespace v8::internal 2786 } } // namespace v8::internal
2803 2787
2804 #endif // V8_TARGET_ARCH_IA32 2788 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/lithium-ia32.h ('k') | src/x64/lithium-codegen-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698