Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(442)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 10837165: Lattice-based representation inference, powered by left/right specific type feedback for BinaryOps … (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: review feedback; fixed tests Created 8 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/code-stubs-x64.h ('k') | src/x64/full-codegen-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 619 matching lines...) Expand 10 before | Expand all | Expand 10 after
630 // operation result to the caller of the stub. 630 // operation result to the caller of the stub.
631 __ TailCallExternalReference( 631 __ TailCallExternalReference(
632 ExternalReference(IC_Utility(IC::kToBoolean_Patch), masm->isolate()), 632 ExternalReference(IC_Utility(IC::kToBoolean_Patch), masm->isolate()),
633 3, 633 3,
634 1); 634 1);
635 } 635 }
636 636
637 637
638 class FloatingPointHelper : public AllStatic { 638 class FloatingPointHelper : public AllStatic {
639 public: 639 public:
640 enum ConvertUndefined {
641 CONVERT_UNDEFINED_TO_ZERO,
642 BAILOUT_ON_UNDEFINED
643 };
640 // Load the operands from rdx and rax into xmm0 and xmm1, as doubles. 644 // Load the operands from rdx and rax into xmm0 and xmm1, as doubles.
641 // If the operands are not both numbers, jump to not_numbers. 645 // If the operands are not both numbers, jump to not_numbers.
642 // Leaves rdx and rax unchanged. SmiOperands assumes both are smis. 646 // Leaves rdx and rax unchanged. SmiOperands assumes both are smis.
643 // NumberOperands assumes both are smis or heap numbers. 647 // NumberOperands assumes both are smis or heap numbers.
644 static void LoadSSE2SmiOperands(MacroAssembler* masm); 648 static void LoadSSE2SmiOperands(MacroAssembler* masm);
645 static void LoadSSE2NumberOperands(MacroAssembler* masm); 649 static void LoadSSE2NumberOperands(MacroAssembler* masm);
646 static void LoadSSE2UnknownOperands(MacroAssembler* masm, 650 static void LoadSSE2UnknownOperands(MacroAssembler* masm,
647 Label* not_numbers); 651 Label* not_numbers);
648 652
649 // Takes the operands in rdx and rax and loads them as integers in rax 653 // Takes the operands in rdx and rax and loads them as integers in rax
(...skipping 15 matching lines...) Expand all
665 // is NULL). 669 // is NULL).
666 // On success, both first and second holds Smi tagged values. 670 // On success, both first and second holds Smi tagged values.
667 // One of first or second must be non-Smi when entering. 671 // One of first or second must be non-Smi when entering.
668 static void NumbersToSmis(MacroAssembler* masm, 672 static void NumbersToSmis(MacroAssembler* masm,
669 Register first, 673 Register first,
670 Register second, 674 Register second,
671 Register scratch1, 675 Register scratch1,
672 Register scratch2, 676 Register scratch2,
673 Register scratch3, 677 Register scratch3,
674 Label* on_success, 678 Label* on_success,
675 Label* on_not_smis); 679 Label* on_not_smis,
680 ConvertUndefined convert_undefined);
676 }; 681 };
677 682
678 683
679 // Get the integer part of a heap number. 684 // Get the integer part of a heap number.
680 // Overwrites the contents of rdi, rbx and rcx. Result cannot be rdi or rbx. 685 // Overwrites the contents of rdi, rbx and rcx. Result cannot be rdi or rbx.
681 void IntegerConvert(MacroAssembler* masm, 686 void IntegerConvert(MacroAssembler* masm,
682 Register result, 687 Register result,
683 Register source) { 688 Register source) {
684 // Result may be rcx. If result and source are the same register, source will 689 // Result may be rcx. If result and source are the same register, source will
685 // be overwritten. 690 // be overwritten.
(...skipping 304 matching lines...) Expand 10 before | Expand all | Expand 10 after
990 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break; 995 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break;
991 case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break; 996 case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break;
992 } 997 }
993 stream->Add("UnaryOpStub_%s_%s_%s", 998 stream->Add("UnaryOpStub_%s_%s_%s",
994 op_name, 999 op_name,
995 overwrite_name, 1000 overwrite_name,
996 UnaryOpIC::GetName(operand_type_)); 1001 UnaryOpIC::GetName(operand_type_));
997 } 1002 }
998 1003
999 1004
1005 void BinaryOpStub::Initialize() {}
1006
1007
1000 void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { 1008 void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
1001 __ pop(rcx); // Save return address. 1009 __ pop(rcx); // Save return address.
1002 __ push(rdx); 1010 __ push(rdx);
1003 __ push(rax); 1011 __ push(rax);
1004 // Left and right arguments are now on top. 1012 // Left and right arguments are now on top.
1005 // Push this stub's key. Although the operation and the type info are
1006 // encoded into the key, the encoding is opaque, so push them too.
1007 __ Push(Smi::FromInt(MinorKey())); 1013 __ Push(Smi::FromInt(MinorKey()));
1008 __ Push(Smi::FromInt(op_));
1009 __ Push(Smi::FromInt(operands_type_));
1010 1014
1011 __ push(rcx); // Push return address. 1015 __ push(rcx); // Push return address.
1012 1016
1013 // Patch the caller to an appropriate specialized stub and return the 1017 // Patch the caller to an appropriate specialized stub and return the
1014 // operation result to the caller of the stub. 1018 // operation result to the caller of the stub.
1015 __ TailCallExternalReference( 1019 __ TailCallExternalReference(
1016 ExternalReference(IC_Utility(IC::kBinaryOp_Patch), 1020 ExternalReference(IC_Utility(IC::kBinaryOp_Patch),
1017 masm->isolate()), 1021 masm->isolate()),
1018 5, 1022 3,
1019 1); 1023 1);
1020 } 1024 }
1021 1025
1022 1026
1023 void BinaryOpStub::Generate(MacroAssembler* masm) { 1027 static void BinaryOpStub_GenerateSmiCode(
1024 // Explicitly allow generation of nested stubs. It is safe here because
1025 // generation code does not use any raw pointers.
1026 AllowStubCallsScope allow_stub_calls(masm, true);
1027
1028 switch (operands_type_) {
1029 case BinaryOpIC::UNINITIALIZED:
1030 GenerateTypeTransition(masm);
1031 break;
1032 case BinaryOpIC::SMI:
1033 GenerateSmiStub(masm);
1034 break;
1035 case BinaryOpIC::INT32:
1036 UNREACHABLE();
1037 // The int32 case is identical to the Smi case. We avoid creating this
1038 // ic state on x64.
1039 break;
1040 case BinaryOpIC::HEAP_NUMBER:
1041 GenerateHeapNumberStub(masm);
1042 break;
1043 case BinaryOpIC::ODDBALL:
1044 GenerateOddballStub(masm);
1045 break;
1046 case BinaryOpIC::BOTH_STRING:
1047 GenerateBothStringStub(masm);
1048 break;
1049 case BinaryOpIC::STRING:
1050 GenerateStringStub(masm);
1051 break;
1052 case BinaryOpIC::GENERIC:
1053 GenerateGeneric(masm);
1054 break;
1055 default:
1056 UNREACHABLE();
1057 }
1058 }
1059
1060
1061 void BinaryOpStub::PrintName(StringStream* stream) {
1062 const char* op_name = Token::Name(op_);
1063 const char* overwrite_name;
1064 switch (mode_) {
1065 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
1066 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
1067 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
1068 default: overwrite_name = "UnknownOverwrite"; break;
1069 }
1070 stream->Add("BinaryOpStub_%s_%s_%s",
1071 op_name,
1072 overwrite_name,
1073 BinaryOpIC::GetName(operands_type_));
1074 }
1075
1076
1077 void BinaryOpStub::GenerateSmiCode(
1078 MacroAssembler* masm, 1028 MacroAssembler* masm,
1079 Label* slow, 1029 Label* slow,
1080 SmiCodeGenerateHeapNumberResults allow_heapnumber_results) { 1030 BinaryOpStub::SmiCodeGenerateHeapNumberResults allow_heapnumber_results,
1031 Token::Value op) {
1081 1032
1082 // Arguments to BinaryOpStub are in rdx and rax. 1033 // Arguments to BinaryOpStub are in rdx and rax.
1083 const Register left = rdx; 1034 const Register left = rdx;
1084 const Register right = rax; 1035 const Register right = rax;
1085 1036
1086 // We only generate heapnumber answers for overflowing calculations 1037 // We only generate heapnumber answers for overflowing calculations
1087 // for the four basic arithmetic operations and logical right shift by 0. 1038 // for the four basic arithmetic operations and logical right shift by 0.
1088 bool generate_inline_heapnumber_results = 1039 bool generate_inline_heapnumber_results =
1089 (allow_heapnumber_results == ALLOW_HEAPNUMBER_RESULTS) && 1040 (allow_heapnumber_results == BinaryOpStub::ALLOW_HEAPNUMBER_RESULTS) &&
1090 (op_ == Token::ADD || op_ == Token::SUB || 1041 (op == Token::ADD || op == Token::SUB ||
1091 op_ == Token::MUL || op_ == Token::DIV || op_ == Token::SHR); 1042 op == Token::MUL || op == Token::DIV || op == Token::SHR);
1092 1043
1093 // Smi check of both operands. If op is BIT_OR, the check is delayed 1044 // Smi check of both operands. If op is BIT_OR, the check is delayed
1094 // until after the OR operation. 1045 // until after the OR operation.
1095 Label not_smis; 1046 Label not_smis;
1096 Label use_fp_on_smis; 1047 Label use_fp_on_smis;
1097 Label fail; 1048 Label fail;
1098 1049
1099 if (op_ != Token::BIT_OR) { 1050 if (op != Token::BIT_OR) {
1100 Comment smi_check_comment(masm, "-- Smi check arguments"); 1051 Comment smi_check_comment(masm, "-- Smi check arguments");
1101 __ JumpIfNotBothSmi(left, right, &not_smis); 1052 __ JumpIfNotBothSmi(left, right, &not_smis);
1102 } 1053 }
1103 1054
1104 Label smi_values; 1055 Label smi_values;
1105 __ bind(&smi_values); 1056 __ bind(&smi_values);
1106 // Perform the operation. 1057 // Perform the operation.
1107 Comment perform_smi(masm, "-- Perform smi operation"); 1058 Comment perform_smi(masm, "-- Perform smi operation");
1108 switch (op_) { 1059 switch (op) {
1109 case Token::ADD: 1060 case Token::ADD:
1110 ASSERT(right.is(rax)); 1061 ASSERT(right.is(rax));
1111 __ SmiAdd(right, right, left, &use_fp_on_smis); // ADD is commutative. 1062 __ SmiAdd(right, right, left, &use_fp_on_smis); // ADD is commutative.
1112 break; 1063 break;
1113 1064
1114 case Token::SUB: 1065 case Token::SUB:
1115 __ SmiSub(left, left, right, &use_fp_on_smis); 1066 __ SmiSub(left, left, right, &use_fp_on_smis);
1116 __ movq(rax, left); 1067 __ movq(rax, left);
1117 break; 1068 break;
1118 1069
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
1170 } 1121 }
1171 1122
1172 // 5. Emit return of result in rax. Some operations have registers pushed. 1123 // 5. Emit return of result in rax. Some operations have registers pushed.
1173 __ ret(0); 1124 __ ret(0);
1174 1125
1175 if (use_fp_on_smis.is_linked()) { 1126 if (use_fp_on_smis.is_linked()) {
1176 // 6. For some operations emit inline code to perform floating point 1127 // 6. For some operations emit inline code to perform floating point
1177 // operations on known smis (e.g., if the result of the operation 1128 // operations on known smis (e.g., if the result of the operation
1178 // overflowed the smi range). 1129 // overflowed the smi range).
1179 __ bind(&use_fp_on_smis); 1130 __ bind(&use_fp_on_smis);
1180 if (op_ == Token::DIV || op_ == Token::MOD) { 1131 if (op == Token::DIV || op == Token::MOD) {
1181 // Restore left and right to rdx and rax. 1132 // Restore left and right to rdx and rax.
1182 __ movq(rdx, rcx); 1133 __ movq(rdx, rcx);
1183 __ movq(rax, rbx); 1134 __ movq(rax, rbx);
1184 } 1135 }
1185 1136
1186 if (generate_inline_heapnumber_results) { 1137 if (generate_inline_heapnumber_results) {
1187 __ AllocateHeapNumber(rcx, rbx, slow); 1138 __ AllocateHeapNumber(rcx, rbx, slow);
1188 Comment perform_float(masm, "-- Perform float operation on smis"); 1139 Comment perform_float(masm, "-- Perform float operation on smis");
1189 if (op_ == Token::SHR) { 1140 if (op == Token::SHR) {
1190 __ SmiToInteger32(left, left); 1141 __ SmiToInteger32(left, left);
1191 __ cvtqsi2sd(xmm0, left); 1142 __ cvtqsi2sd(xmm0, left);
1192 } else { 1143 } else {
1193 FloatingPointHelper::LoadSSE2SmiOperands(masm); 1144 FloatingPointHelper::LoadSSE2SmiOperands(masm);
1194 switch (op_) { 1145 switch (op) {
1195 case Token::ADD: __ addsd(xmm0, xmm1); break; 1146 case Token::ADD: __ addsd(xmm0, xmm1); break;
1196 case Token::SUB: __ subsd(xmm0, xmm1); break; 1147 case Token::SUB: __ subsd(xmm0, xmm1); break;
1197 case Token::MUL: __ mulsd(xmm0, xmm1); break; 1148 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1198 case Token::DIV: __ divsd(xmm0, xmm1); break; 1149 case Token::DIV: __ divsd(xmm0, xmm1); break;
1199 default: UNREACHABLE(); 1150 default: UNREACHABLE();
1200 } 1151 }
1201 } 1152 }
1202 __ movsd(FieldOperand(rcx, HeapNumber::kValueOffset), xmm0); 1153 __ movsd(FieldOperand(rcx, HeapNumber::kValueOffset), xmm0);
1203 __ movq(rax, rcx); 1154 __ movq(rax, rcx);
1204 __ ret(0); 1155 __ ret(0);
1205 } else { 1156 } else {
1206 __ jmp(&fail); 1157 __ jmp(&fail);
1207 } 1158 }
1208 } 1159 }
1209 1160
1210 // 7. Non-smi operands reach the end of the code generated by 1161 // 7. Non-smi operands reach the end of the code generated by
1211 // GenerateSmiCode, and fall through to subsequent code, 1162 // GenerateSmiCode, and fall through to subsequent code,
1212 // with the operands in rdx and rax. 1163 // with the operands in rdx and rax.
1213 // But first we check if non-smi values are HeapNumbers holding 1164 // But first we check if non-smi values are HeapNumbers holding
1214 // values that could be smi. 1165 // values that could be smi.
1215 __ bind(&not_smis); 1166 __ bind(&not_smis);
1216 Comment done_comment(masm, "-- Enter non-smi code"); 1167 Comment done_comment(masm, "-- Enter non-smi code");
1168 FloatingPointHelper::ConvertUndefined convert_undefined =
1169 FloatingPointHelper::BAILOUT_ON_UNDEFINED;
1170 // This list must be in sync with BinaryOpPatch() behavior in ic.cc.
1171 if (op == Token::BIT_AND ||
1172 op == Token::BIT_OR ||
1173 op == Token::BIT_XOR ||
1174 op == Token::SAR ||
1175 op == Token::SHL ||
1176 op == Token::SHR) {
1177 convert_undefined = FloatingPointHelper::CONVERT_UNDEFINED_TO_ZERO;
1178 }
1217 FloatingPointHelper::NumbersToSmis(masm, left, right, rbx, rdi, rcx, 1179 FloatingPointHelper::NumbersToSmis(masm, left, right, rbx, rdi, rcx,
1218 &smi_values, &fail); 1180 &smi_values, &fail, convert_undefined);
1219 __ jmp(&smi_values); 1181 __ jmp(&smi_values);
1220 __ bind(&fail); 1182 __ bind(&fail);
1221 } 1183 }
1222 1184
1223 1185
1224 void BinaryOpStub::GenerateFloatingPointCode(MacroAssembler* masm, 1186 static void BinaryOpStub_GenerateHeapResultAllocation(MacroAssembler* masm,
1225 Label* allocation_failure, 1187 Label* alloc_failure,
1226 Label* non_numeric_failure) { 1188 OverwriteMode mode);
1227 switch (op_) { 1189
1190
1191 static void BinaryOpStub_GenerateFloatingPointCode(MacroAssembler* masm,
1192 Label* allocation_failure,
1193 Label* non_numeric_failure,
1194 Token::Value op,
1195 OverwriteMode mode) {
1196 switch (op) {
1228 case Token::ADD: 1197 case Token::ADD:
1229 case Token::SUB: 1198 case Token::SUB:
1230 case Token::MUL: 1199 case Token::MUL:
1231 case Token::DIV: { 1200 case Token::DIV: {
1232 FloatingPointHelper::LoadSSE2UnknownOperands(masm, non_numeric_failure); 1201 FloatingPointHelper::LoadSSE2UnknownOperands(masm, non_numeric_failure);
1233 1202
1234 switch (op_) { 1203 switch (op) {
1235 case Token::ADD: __ addsd(xmm0, xmm1); break; 1204 case Token::ADD: __ addsd(xmm0, xmm1); break;
1236 case Token::SUB: __ subsd(xmm0, xmm1); break; 1205 case Token::SUB: __ subsd(xmm0, xmm1); break;
1237 case Token::MUL: __ mulsd(xmm0, xmm1); break; 1206 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1238 case Token::DIV: __ divsd(xmm0, xmm1); break; 1207 case Token::DIV: __ divsd(xmm0, xmm1); break;
1239 default: UNREACHABLE(); 1208 default: UNREACHABLE();
1240 } 1209 }
1241 GenerateHeapResultAllocation(masm, allocation_failure); 1210 BinaryOpStub_GenerateHeapResultAllocation(
1211 masm, allocation_failure, mode);
1242 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0); 1212 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0);
1243 __ ret(0); 1213 __ ret(0);
1244 break; 1214 break;
1245 } 1215 }
1246 case Token::MOD: { 1216 case Token::MOD: {
1247 // For MOD we jump to the allocation_failure label, to call runtime. 1217 // For MOD we jump to the allocation_failure label, to call runtime.
1248 __ jmp(allocation_failure); 1218 __ jmp(allocation_failure);
1249 break; 1219 break;
1250 } 1220 }
1251 case Token::BIT_OR: 1221 case Token::BIT_OR:
1252 case Token::BIT_AND: 1222 case Token::BIT_AND:
1253 case Token::BIT_XOR: 1223 case Token::BIT_XOR:
1254 case Token::SAR: 1224 case Token::SAR:
1255 case Token::SHL: 1225 case Token::SHL:
1256 case Token::SHR: { 1226 case Token::SHR: {
1257 Label non_smi_shr_result; 1227 Label non_smi_shr_result;
1258 Register heap_number_map = r9; 1228 Register heap_number_map = r9;
1259 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); 1229 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
1260 FloatingPointHelper::LoadAsIntegers(masm, non_numeric_failure, 1230 FloatingPointHelper::LoadAsIntegers(masm, non_numeric_failure,
1261 heap_number_map); 1231 heap_number_map);
1262 switch (op_) { 1232 switch (op) {
1263 case Token::BIT_OR: __ orl(rax, rcx); break; 1233 case Token::BIT_OR: __ orl(rax, rcx); break;
1264 case Token::BIT_AND: __ andl(rax, rcx); break; 1234 case Token::BIT_AND: __ andl(rax, rcx); break;
1265 case Token::BIT_XOR: __ xorl(rax, rcx); break; 1235 case Token::BIT_XOR: __ xorl(rax, rcx); break;
1266 case Token::SAR: __ sarl_cl(rax); break; 1236 case Token::SAR: __ sarl_cl(rax); break;
1267 case Token::SHL: __ shll_cl(rax); break; 1237 case Token::SHL: __ shll_cl(rax); break;
1268 case Token::SHR: { 1238 case Token::SHR: {
1269 __ shrl_cl(rax); 1239 __ shrl_cl(rax);
1270 // Check if result is negative. This can only happen for a shift 1240 // Check if result is negative. This can only happen for a shift
1271 // by zero. 1241 // by zero.
1272 __ testl(rax, rax); 1242 __ testl(rax, rax);
1273 __ j(negative, &non_smi_shr_result); 1243 __ j(negative, &non_smi_shr_result);
1274 break; 1244 break;
1275 } 1245 }
1276 default: UNREACHABLE(); 1246 default: UNREACHABLE();
1277 } 1247 }
1278 STATIC_ASSERT(kSmiValueSize == 32); 1248 STATIC_ASSERT(kSmiValueSize == 32);
1279 // Tag smi result and return. 1249 // Tag smi result and return.
1280 __ Integer32ToSmi(rax, rax); 1250 __ Integer32ToSmi(rax, rax);
1281 __ Ret(); 1251 __ Ret();
1282 1252
1283 // Logical shift right can produce an unsigned int32 that is not 1253 // Logical shift right can produce an unsigned int32 that is not
1284 // an int32, and so is not in the smi range. Allocate a heap number 1254 // an int32, and so is not in the smi range. Allocate a heap number
1285 // in that case. 1255 // in that case.
1286 if (op_ == Token::SHR) { 1256 if (op == Token::SHR) {
1287 __ bind(&non_smi_shr_result); 1257 __ bind(&non_smi_shr_result);
1288 Label allocation_failed; 1258 Label allocation_failed;
1289 __ movl(rbx, rax); // rbx holds result value (uint32 value as int64). 1259 __ movl(rbx, rax); // rbx holds result value (uint32 value as int64).
1290 // Allocate heap number in new space. 1260 // Allocate heap number in new space.
1291 // Not using AllocateHeapNumber macro in order to reuse 1261 // Not using AllocateHeapNumber macro in order to reuse
1292 // already loaded heap_number_map. 1262 // already loaded heap_number_map.
1293 __ AllocateInNewSpace(HeapNumber::kSize, 1263 __ AllocateInNewSpace(HeapNumber::kSize,
1294 rax, 1264 rax,
1295 rdx, 1265 rdx,
1296 no_reg, 1266 no_reg,
(...skipping 16 matching lines...) Expand all
1313 __ Integer32ToSmi(rdx, rbx); 1283 __ Integer32ToSmi(rdx, rbx);
1314 __ jmp(allocation_failure); 1284 __ jmp(allocation_failure);
1315 } 1285 }
1316 break; 1286 break;
1317 } 1287 }
1318 default: UNREACHABLE(); break; 1288 default: UNREACHABLE(); break;
1319 } 1289 }
1320 // No fall-through from this generated code. 1290 // No fall-through from this generated code.
1321 if (FLAG_debug_code) { 1291 if (FLAG_debug_code) {
1322 __ Abort("Unexpected fall-through in " 1292 __ Abort("Unexpected fall-through in "
1323 "BinaryStub::GenerateFloatingPointCode."); 1293 "BinaryStub_GenerateFloatingPointCode.");
1324 } 1294 }
1325 } 1295 }
1326 1296
1327 1297
1328 void BinaryOpStub::GenerateStringAddCode(MacroAssembler* masm) { 1298 void BinaryOpStub::GenerateAddStrings(MacroAssembler* masm) {
1329 ASSERT(op_ == Token::ADD); 1299 ASSERT(op_ == Token::ADD);
1330 Label left_not_string, call_runtime; 1300 Label left_not_string, call_runtime;
1331 1301
1332 // Registers containing left and right operands respectively. 1302 // Registers containing left and right operands respectively.
1333 Register left = rdx; 1303 Register left = rdx;
1334 Register right = rax; 1304 Register right = rax;
1335 1305
1336 // Test if left operand is a string. 1306 // Test if left operand is a string.
1337 __ JumpIfSmi(left, &left_not_string, Label::kNear); 1307 __ JumpIfSmi(left, &left_not_string, Label::kNear);
1338 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, rcx); 1308 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, rcx);
(...skipping 10 matching lines...) Expand all
1349 1319
1350 StringAddStub string_add_right_stub(NO_STRING_CHECK_RIGHT_IN_STUB); 1320 StringAddStub string_add_right_stub(NO_STRING_CHECK_RIGHT_IN_STUB);
1351 GenerateRegisterArgsPush(masm); 1321 GenerateRegisterArgsPush(masm);
1352 __ TailCallStub(&string_add_right_stub); 1322 __ TailCallStub(&string_add_right_stub);
1353 1323
1354 // Neither argument is a string. 1324 // Neither argument is a string.
1355 __ bind(&call_runtime); 1325 __ bind(&call_runtime);
1356 } 1326 }
1357 1327
1358 1328
1359 void BinaryOpStub::GenerateCallRuntimeCode(MacroAssembler* masm) {
1360 GenerateRegisterArgsPush(masm);
1361 switch (op_) {
1362 case Token::ADD:
1363 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
1364 break;
1365 case Token::SUB:
1366 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
1367 break;
1368 case Token::MUL:
1369 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
1370 break;
1371 case Token::DIV:
1372 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
1373 break;
1374 case Token::MOD:
1375 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
1376 break;
1377 case Token::BIT_OR:
1378 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
1379 break;
1380 case Token::BIT_AND:
1381 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION);
1382 break;
1383 case Token::BIT_XOR:
1384 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION);
1385 break;
1386 case Token::SAR:
1387 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION);
1388 break;
1389 case Token::SHL:
1390 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
1391 break;
1392 case Token::SHR:
1393 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
1394 break;
1395 default:
1396 UNREACHABLE();
1397 }
1398 }
1399
1400
1401 void BinaryOpStub::GenerateSmiStub(MacroAssembler* masm) { 1329 void BinaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
1402 Label call_runtime; 1330 Label call_runtime;
1403 if (result_type_ == BinaryOpIC::UNINITIALIZED || 1331 if (result_type_ == BinaryOpIC::UNINITIALIZED ||
1404 result_type_ == BinaryOpIC::SMI) { 1332 result_type_ == BinaryOpIC::SMI) {
1405 // Only allow smi results. 1333 // Only allow smi results.
1406 GenerateSmiCode(masm, NULL, NO_HEAPNUMBER_RESULTS); 1334 BinaryOpStub_GenerateSmiCode(masm, NULL, NO_HEAPNUMBER_RESULTS, op_);
1407 } else { 1335 } else {
1408 // Allow heap number result and don't make a transition if a heap number 1336 // Allow heap number result and don't make a transition if a heap number
1409 // cannot be allocated. 1337 // cannot be allocated.
1410 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS); 1338 BinaryOpStub_GenerateSmiCode(
1339 masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS, op_);
1411 } 1340 }
1412 1341
1413 // Code falls through if the result is not returned as either a smi or heap 1342 // Code falls through if the result is not returned as either a smi or heap
1414 // number. 1343 // number.
1415 GenerateTypeTransition(masm); 1344 GenerateTypeTransition(masm);
1416 1345
1417 if (call_runtime.is_linked()) { 1346 if (call_runtime.is_linked()) {
1418 __ bind(&call_runtime); 1347 __ bind(&call_runtime);
1419 GenerateCallRuntimeCode(masm); 1348 GenerateRegisterArgsPush(masm);
1349 GenerateCallRuntime(masm);
1420 } 1350 }
1421 } 1351 }
1422 1352
1423 1353
1424 void BinaryOpStub::GenerateStringStub(MacroAssembler* masm) { 1354 void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
1425 ASSERT(operands_type_ == BinaryOpIC::STRING); 1355 // The int32 case is identical to the Smi case. We avoid creating this
1426 ASSERT(op_ == Token::ADD); 1356 // ic state on x64.
1427 GenerateStringAddCode(masm); 1357 UNREACHABLE();
1428 // Try to add arguments as strings, otherwise, transition to the generic
1429 // BinaryOpIC type.
1430 GenerateTypeTransition(masm);
1431 } 1358 }
1432 1359
1433 1360
1434 void BinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) { 1361 void BinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) {
1435 Label call_runtime; 1362 Label call_runtime;
1436 ASSERT(operands_type_ == BinaryOpIC::BOTH_STRING); 1363 ASSERT(left_type_ == BinaryOpIC::STRING && right_type_ == BinaryOpIC::STRING);
1437 ASSERT(op_ == Token::ADD); 1364 ASSERT(op_ == Token::ADD);
1438 // If both arguments are strings, call the string add stub. 1365 // If both arguments are strings, call the string add stub.
1439 // Otherwise, do a transition. 1366 // Otherwise, do a transition.
1440 1367
1441 // Registers containing left and right operands respectively. 1368 // Registers containing left and right operands respectively.
1442 Register left = rdx; 1369 Register left = rdx;
1443 Register right = rax; 1370 Register right = rax;
1444 1371
1445 // Test if left operand is a string. 1372 // Test if left operand is a string.
1446 __ JumpIfSmi(left, &call_runtime); 1373 __ JumpIfSmi(left, &call_runtime);
(...skipping 13 matching lines...) Expand all
1460 GenerateTypeTransition(masm); 1387 GenerateTypeTransition(masm);
1461 } 1388 }
1462 1389
1463 1390
1464 void BinaryOpStub::GenerateOddballStub(MacroAssembler* masm) { 1391 void BinaryOpStub::GenerateOddballStub(MacroAssembler* masm) {
1465 Label call_runtime; 1392 Label call_runtime;
1466 1393
1467 if (op_ == Token::ADD) { 1394 if (op_ == Token::ADD) {
1468 // Handle string addition here, because it is the only operation 1395 // Handle string addition here, because it is the only operation
1469 // that does not do a ToNumber conversion on the operands. 1396 // that does not do a ToNumber conversion on the operands.
1470 GenerateStringAddCode(masm); 1397 GenerateAddStrings(masm);
1471 } 1398 }
1472 1399
1473 // Convert oddball arguments to numbers. 1400 // Convert oddball arguments to numbers.
1474 Label check, done; 1401 Label check, done;
1475 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex); 1402 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
1476 __ j(not_equal, &check, Label::kNear); 1403 __ j(not_equal, &check, Label::kNear);
1477 if (Token::IsBitOp(op_)) { 1404 if (Token::IsBitOp(op_)) {
1478 __ xor_(rdx, rdx); 1405 __ xor_(rdx, rdx);
1479 } else { 1406 } else {
1480 __ LoadRoot(rdx, Heap::kNanValueRootIndex); 1407 __ LoadRoot(rdx, Heap::kNanValueRootIndex);
1481 } 1408 }
1482 __ jmp(&done, Label::kNear); 1409 __ jmp(&done, Label::kNear);
1483 __ bind(&check); 1410 __ bind(&check);
1484 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex); 1411 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1485 __ j(not_equal, &done, Label::kNear); 1412 __ j(not_equal, &done, Label::kNear);
1486 if (Token::IsBitOp(op_)) { 1413 if (Token::IsBitOp(op_)) {
1487 __ xor_(rax, rax); 1414 __ xor_(rax, rax);
1488 } else { 1415 } else {
1489 __ LoadRoot(rax, Heap::kNanValueRootIndex); 1416 __ LoadRoot(rax, Heap::kNanValueRootIndex);
1490 } 1417 }
1491 __ bind(&done); 1418 __ bind(&done);
1492 1419
1493 GenerateHeapNumberStub(masm); 1420 GenerateHeapNumberStub(masm);
1494 } 1421 }
1495 1422
1496 1423
1424 static void BinaryOpStub_CheckSmiInput(MacroAssembler* masm,
1425 Register input,
1426 Label* fail) {
1427 Label ok;
1428 __ JumpIfSmi(input, &ok, Label::kNear);
1429 Register heap_number_map = r8;
1430 Register scratch1 = r9;
1431 Register scratch2 = r10;
1432 // HeapNumbers containing 32bit integer values are also allowed.
1433 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
1434 __ cmpq(FieldOperand(input, HeapObject::kMapOffset), heap_number_map);
1435 __ j(not_equal, fail);
1436 __ movsd(xmm0, FieldOperand(input, HeapNumber::kValueOffset));
1437 // Convert, convert back, and compare the two doubles' bits.
1438 __ cvttsd2siq(scratch2, xmm0);
1439 __ cvtlsi2sd(xmm1, scratch2);
1440 __ movq(scratch1, xmm0);
1441 __ movq(scratch2, xmm1);
1442 __ cmpq(scratch1, scratch2);
1443 __ j(not_equal, fail);
1444 __ bind(&ok);
1445 }
1446
1447
1497 void BinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { 1448 void BinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
1498 Label gc_required, not_number; 1449 Label gc_required, not_number;
1499 GenerateFloatingPointCode(masm, &gc_required, &not_number); 1450
1451 // It could be that only SMIs have been seen at either the left
1452 // or the right operand. For precise type feedback, patch the IC
1453 // again if this changes.
1454 if (left_type_ == BinaryOpIC::SMI) {
1455 BinaryOpStub_CheckSmiInput(masm, rdx, &not_number);
1456 }
1457 if (right_type_ == BinaryOpIC::SMI) {
1458 BinaryOpStub_CheckSmiInput(masm, rax, &not_number);
1459 }
1460
1461 BinaryOpStub_GenerateFloatingPointCode(
1462 masm, &gc_required, &not_number, op_, mode_);
1500 1463
1501 __ bind(&not_number); 1464 __ bind(&not_number);
1502 GenerateTypeTransition(masm); 1465 GenerateTypeTransition(masm);
1503 1466
1504 __ bind(&gc_required); 1467 __ bind(&gc_required);
1505 GenerateCallRuntimeCode(masm); 1468 GenerateRegisterArgsPush(masm);
1469 GenerateCallRuntime(masm);
1506 } 1470 }
1507 1471
1508 1472
1509 void BinaryOpStub::GenerateGeneric(MacroAssembler* masm) { 1473 void BinaryOpStub::GenerateGeneric(MacroAssembler* masm) {
1510 Label call_runtime, call_string_add_or_runtime; 1474 Label call_runtime, call_string_add_or_runtime;
1511 1475
1512 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS); 1476 BinaryOpStub_GenerateSmiCode(
1477 masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS, op_);
1513 1478
1514 GenerateFloatingPointCode(masm, &call_runtime, &call_string_add_or_runtime); 1479 BinaryOpStub_GenerateFloatingPointCode(
1480 masm, &call_runtime, &call_string_add_or_runtime, op_, mode_);
1515 1481
1516 __ bind(&call_string_add_or_runtime); 1482 __ bind(&call_string_add_or_runtime);
1517 if (op_ == Token::ADD) { 1483 if (op_ == Token::ADD) {
1518 GenerateStringAddCode(masm); 1484 GenerateAddStrings(masm);
1519 } 1485 }
1520 1486
1521 __ bind(&call_runtime); 1487 __ bind(&call_runtime);
1522 GenerateCallRuntimeCode(masm); 1488 GenerateRegisterArgsPush(masm);
1489 GenerateCallRuntime(masm);
1523 } 1490 }
1524 1491
1525 1492
1526 void BinaryOpStub::GenerateHeapResultAllocation(MacroAssembler* masm, 1493 static void BinaryOpStub_GenerateHeapResultAllocation(MacroAssembler* masm,
1527 Label* alloc_failure) { 1494 Label* alloc_failure,
1495 OverwriteMode mode) {
1528 Label skip_allocation; 1496 Label skip_allocation;
1529 OverwriteMode mode = mode_;
1530 switch (mode) { 1497 switch (mode) {
1531 case OVERWRITE_LEFT: { 1498 case OVERWRITE_LEFT: {
1532 // If the argument in rdx is already an object, we skip the 1499 // If the argument in rdx is already an object, we skip the
1533 // allocation of a heap number. 1500 // allocation of a heap number.
1534 __ JumpIfNotSmi(rdx, &skip_allocation); 1501 __ JumpIfNotSmi(rdx, &skip_allocation);
1535 // Allocate a heap number for the result. Keep eax and edx intact 1502 // Allocate a heap number for the result. Keep eax and edx intact
1536 // for the possible runtime call. 1503 // for the possible runtime call.
1537 __ AllocateHeapNumber(rbx, rcx, alloc_failure); 1504 __ AllocateHeapNumber(rbx, rcx, alloc_failure);
1538 // Now rdx can be overwritten losing one of the arguments as we are 1505 // Now rdx can be overwritten losing one of the arguments as we are
1539 // now done and will not need it any more. 1506 // now done and will not need it any more.
(...skipping 475 matching lines...) Expand 10 before | Expand all | Expand 10 after
2015 } 1982 }
2016 1983
2017 1984
2018 void FloatingPointHelper::NumbersToSmis(MacroAssembler* masm, 1985 void FloatingPointHelper::NumbersToSmis(MacroAssembler* masm,
2019 Register first, 1986 Register first,
2020 Register second, 1987 Register second,
2021 Register scratch1, 1988 Register scratch1,
2022 Register scratch2, 1989 Register scratch2,
2023 Register scratch3, 1990 Register scratch3,
2024 Label* on_success, 1991 Label* on_success,
2025 Label* on_not_smis) { 1992 Label* on_not_smis,
1993 ConvertUndefined convert_undefined) {
2026 Register heap_number_map = scratch3; 1994 Register heap_number_map = scratch3;
2027 Register smi_result = scratch1; 1995 Register smi_result = scratch1;
2028 Label done; 1996 Label done, maybe_undefined_first, maybe_undefined_second, first_done;
2029 1997
2030 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); 1998 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
2031 1999
2032 Label first_smi; 2000 Label first_smi;
2033 __ JumpIfSmi(first, &first_smi, Label::kNear); 2001 __ JumpIfSmi(first, &first_smi, Label::kNear);
2034 __ cmpq(FieldOperand(first, HeapObject::kMapOffset), heap_number_map); 2002 __ cmpq(FieldOperand(first, HeapObject::kMapOffset), heap_number_map);
2035 __ j(not_equal, on_not_smis); 2003 __ j(not_equal,
2004 (convert_undefined == CONVERT_UNDEFINED_TO_ZERO)
2005 ? &maybe_undefined_first
2006 : on_not_smis);
2036 // Convert HeapNumber to smi if possible. 2007 // Convert HeapNumber to smi if possible.
2037 __ movsd(xmm0, FieldOperand(first, HeapNumber::kValueOffset)); 2008 __ movsd(xmm0, FieldOperand(first, HeapNumber::kValueOffset));
2038 __ movq(scratch2, xmm0); 2009 __ movq(scratch2, xmm0);
2039 __ cvttsd2siq(smi_result, xmm0); 2010 __ cvttsd2siq(smi_result, xmm0);
2040 // Check if conversion was successful by converting back and 2011 // Check if conversion was successful by converting back and
2041 // comparing to the original double's bits. 2012 // comparing to the original double's bits.
2042 __ cvtlsi2sd(xmm1, smi_result); 2013 __ cvtlsi2sd(xmm1, smi_result);
2043 __ movq(kScratchRegister, xmm1); 2014 __ movq(kScratchRegister, xmm1);
2044 __ cmpq(scratch2, kScratchRegister); 2015 __ cmpq(scratch2, kScratchRegister);
2045 __ j(not_equal, on_not_smis); 2016 __ j(not_equal, on_not_smis);
2046 __ Integer32ToSmi(first, smi_result); 2017 __ Integer32ToSmi(first, smi_result);
2047 2018
2019 __ bind(&first_done);
2048 __ JumpIfSmi(second, (on_success != NULL) ? on_success : &done); 2020 __ JumpIfSmi(second, (on_success != NULL) ? on_success : &done);
2049 __ bind(&first_smi); 2021 __ bind(&first_smi);
2050 __ AssertNotSmi(second); 2022 __ AssertNotSmi(second);
2051 __ cmpq(FieldOperand(second, HeapObject::kMapOffset), heap_number_map); 2023 __ cmpq(FieldOperand(second, HeapObject::kMapOffset), heap_number_map);
2052 __ j(not_equal, on_not_smis); 2024 __ j(not_equal,
2025 (convert_undefined == CONVERT_UNDEFINED_TO_ZERO)
2026 ? &maybe_undefined_second
2027 : on_not_smis);
2053 // Convert second to smi, if possible. 2028 // Convert second to smi, if possible.
2054 __ movsd(xmm0, FieldOperand(second, HeapNumber::kValueOffset)); 2029 __ movsd(xmm0, FieldOperand(second, HeapNumber::kValueOffset));
2055 __ movq(scratch2, xmm0); 2030 __ movq(scratch2, xmm0);
2056 __ cvttsd2siq(smi_result, xmm0); 2031 __ cvttsd2siq(smi_result, xmm0);
2057 __ cvtlsi2sd(xmm1, smi_result); 2032 __ cvtlsi2sd(xmm1, smi_result);
2058 __ movq(kScratchRegister, xmm1); 2033 __ movq(kScratchRegister, xmm1);
2059 __ cmpq(scratch2, kScratchRegister); 2034 __ cmpq(scratch2, kScratchRegister);
2060 __ j(not_equal, on_not_smis); 2035 __ j(not_equal, on_not_smis);
2061 __ Integer32ToSmi(second, smi_result); 2036 __ Integer32ToSmi(second, smi_result);
2062 if (on_success != NULL) { 2037 if (on_success != NULL) {
2063 __ jmp(on_success); 2038 __ jmp(on_success);
2064 } else { 2039 } else {
2065 __ bind(&done); 2040 __ jmp(&done);
2066 } 2041 }
2042
2043 __ bind(&maybe_undefined_first);
2044 __ CompareRoot(first, Heap::kUndefinedValueRootIndex);
2045 __ j(not_equal, on_not_smis);
2046 __ xor_(first, first);
2047 __ jmp(&first_done);
2048
2049 __ bind(&maybe_undefined_second);
2050 __ CompareRoot(second, Heap::kUndefinedValueRootIndex);
2051 __ j(not_equal, on_not_smis);
2052 __ xor_(second, second);
2053 if (on_success != NULL) {
2054 __ jmp(on_success);
2055 }
2056 // Else: fall through.
2057
2058 __ bind(&done);
2067 } 2059 }
2068 2060
2069 2061
2070 void MathPowStub::Generate(MacroAssembler* masm) { 2062 void MathPowStub::Generate(MacroAssembler* masm) {
2071 // Choose register conforming to calling convention (when bailing out). 2063 // Choose register conforming to calling convention (when bailing out).
2072 #ifdef _WIN64 2064 #ifdef _WIN64
2073 const Register exponent = rdx; 2065 const Register exponent = rdx;
2074 #else 2066 #else
2075 const Register exponent = rdi; 2067 const Register exponent = rdi;
2076 #endif 2068 #endif
(...skipping 1294 matching lines...) Expand 10 before | Expand all | Expand 10 after
3371 3363
3372 3364
3373 static int NegativeComparisonResult(Condition cc) { 3365 static int NegativeComparisonResult(Condition cc) {
3374 ASSERT(cc != equal); 3366 ASSERT(cc != equal);
3375 ASSERT((cc == less) || (cc == less_equal) 3367 ASSERT((cc == less) || (cc == less_equal)
3376 || (cc == greater) || (cc == greater_equal)); 3368 || (cc == greater) || (cc == greater_equal));
3377 return (cc == greater || cc == greater_equal) ? LESS : GREATER; 3369 return (cc == greater || cc == greater_equal) ? LESS : GREATER;
3378 } 3370 }
3379 3371
3380 3372
3381 void CompareStub::Generate(MacroAssembler* masm) { 3373 static void CheckInputType(MacroAssembler* masm,
3382 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg)); 3374 Register input,
3375 CompareIC::State expected,
3376 Label* fail) {
3377 Label ok;
3378 if (expected == CompareIC::SMI) {
3379 __ JumpIfNotSmi(input, fail);
3380 } else if (expected == CompareIC::HEAP_NUMBER) {
3381 __ JumpIfSmi(input, &ok);
3382 __ CompareMap(input, masm->isolate()->factory()->heap_number_map(), NULL);
3383 __ j(not_equal, fail);
3384 }
3385 // We could be strict about symbol/string here, but as long as
3386 // hydrogen doesn't care, the stub doesn't have to care either.
3387 __ bind(&ok);
3388 }
3383 3389
3390
3391 static void BranchIfNonSymbol(MacroAssembler* masm,
3392 Label* label,
3393 Register object,
3394 Register scratch) {
3395 __ JumpIfSmi(object, label);
3396 __ movq(scratch, FieldOperand(object, HeapObject::kMapOffset));
3397 __ movzxbq(scratch,
3398 FieldOperand(scratch, Map::kInstanceTypeOffset));
3399 // Ensure that no non-strings have the symbol bit set.
3400 STATIC_ASSERT(LAST_TYPE < kNotStringTag + kIsSymbolMask);
3401 STATIC_ASSERT(kSymbolTag != 0);
3402 __ testb(scratch, Immediate(kIsSymbolMask));
3403 __ j(zero, label);
3404 }
3405
3406
3407 void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
3384 Label check_unequal_objects, done; 3408 Label check_unequal_objects, done;
3409 Condition cc = GetCondition();
3385 Factory* factory = masm->isolate()->factory(); 3410 Factory* factory = masm->isolate()->factory();
3386 3411
3387 // Compare two smis if required. 3412 Label miss;
3388 if (include_smi_compare_) { 3413 CheckInputType(masm, rdx, left_, &miss);
3389 Label non_smi, smi_done; 3414 CheckInputType(masm, rax, right_, &miss);
3390 __ JumpIfNotBothSmi(rax, rdx, &non_smi); 3415
3391 __ subq(rdx, rax); 3416 // Compare two smis.
3392 __ j(no_overflow, &smi_done); 3417 Label non_smi, smi_done;
3393 __ not_(rdx); // Correct sign in case of overflow. rdx cannot be 0 here. 3418 __ JumpIfNotBothSmi(rax, rdx, &non_smi);
3394 __ bind(&smi_done); 3419 __ subq(rdx, rax);
3395 __ movq(rax, rdx); 3420 __ j(no_overflow, &smi_done);
3396 __ ret(0); 3421 __ not_(rdx); // Correct sign in case of overflow. rdx cannot be 0 here.
3397 __ bind(&non_smi); 3422 __ bind(&smi_done);
3398 } else if (FLAG_debug_code) { 3423 __ movq(rax, rdx);
3399 Label ok; 3424 __ ret(0);
3400 __ JumpIfNotSmi(rdx, &ok); 3425 __ bind(&non_smi);
3401 __ JumpIfNotSmi(rax, &ok);
3402 __ Abort("CompareStub: smi operands");
3403 __ bind(&ok);
3404 }
3405 3426
3406 // The compare stub returns a positive, negative, or zero 64-bit integer 3427 // The compare stub returns a positive, negative, or zero 64-bit integer
3407 // value in rax, corresponding to result of comparing the two inputs. 3428 // value in rax, corresponding to result of comparing the two inputs.
3408 // NOTICE! This code is only reached after a smi-fast-case check, so 3429 // NOTICE! This code is only reached after a smi-fast-case check, so
3409 // it is certain that at least one operand isn't a smi. 3430 // it is certain that at least one operand isn't a smi.
3410 3431
3411 // Two identical objects are equal unless they are both NaN or undefined. 3432 // Two identical objects are equal unless they are both NaN or undefined.
3412 { 3433 {
3413 Label not_identical; 3434 Label not_identical;
3414 __ cmpq(rax, rdx); 3435 __ cmpq(rax, rdx);
3415 __ j(not_equal, &not_identical, Label::kNear); 3436 __ j(not_equal, &not_identical, Label::kNear);
3416 3437
3417 if (cc_ != equal) { 3438 if (cc != equal) {
3418 // Check for undefined. undefined OP undefined is false even though 3439 // Check for undefined. undefined OP undefined is false even though
3419 // undefined == undefined. 3440 // undefined == undefined.
3420 Label check_for_nan; 3441 Label check_for_nan;
3421 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex); 3442 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
3422 __ j(not_equal, &check_for_nan, Label::kNear); 3443 __ j(not_equal, &check_for_nan, Label::kNear);
3423 __ Set(rax, NegativeComparisonResult(cc_)); 3444 __ Set(rax, NegativeComparisonResult(cc));
3424 __ ret(0); 3445 __ ret(0);
3425 __ bind(&check_for_nan); 3446 __ bind(&check_for_nan);
3426 } 3447 }
3427 3448
3428 // Test for NaN. Sadly, we can't just compare to FACTORY->nan_value(), 3449 // Test for NaN. Sadly, we can't just compare to FACTORY->nan_value(),
3429 // so we do the second best thing - test it ourselves. 3450 // so we do the second best thing - test it ourselves.
3430 // Note: if cc_ != equal, never_nan_nan_ is not used. 3451 Label heap_number;
3431 // We cannot set rax to EQUAL until just before return because 3452 // If it's not a heap number, then return equal for (in)equality operator.
3432 // rax must be unchanged on jump to not_identical. 3453 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
3433 if (never_nan_nan_ && (cc_ == equal)) { 3454 factory->heap_number_map());
3434 __ Set(rax, EQUAL); 3455 __ j(equal, &heap_number, Label::kNear);
3435 __ ret(0); 3456 if (cc != equal) {
3436 } else { 3457 // Call runtime on identical objects. Otherwise return equal.
3437 Label heap_number; 3458 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
3438 // If it's not a heap number, then return equal for (in)equality operator. 3459 __ j(above_equal, &not_identical, Label::kNear);
3439 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset), 3460 }
3440 factory->heap_number_map()); 3461 __ Set(rax, EQUAL);
3441 __ j(equal, &heap_number, Label::kNear); 3462 __ ret(0);
3442 if (cc_ != equal) {
3443 // Call runtime on identical objects. Otherwise return equal.
3444 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
3445 __ j(above_equal, &not_identical, Label::kNear);
3446 }
3447 __ Set(rax, EQUAL);
3448 __ ret(0);
3449 3463
3450 __ bind(&heap_number); 3464 __ bind(&heap_number);
3451 // It is a heap number, so return equal if it's not NaN. 3465 // It is a heap number, so return equal if it's not NaN.
3452 // For NaN, return 1 for every condition except greater and 3466 // For NaN, return 1 for every condition except greater and
3453 // greater-equal. Return -1 for them, so the comparison yields 3467 // greater-equal. Return -1 for them, so the comparison yields
3454 // false for all conditions except not-equal. 3468 // false for all conditions except not-equal.
3455 __ Set(rax, EQUAL); 3469 __ Set(rax, EQUAL);
3456 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); 3470 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
3457 __ ucomisd(xmm0, xmm0); 3471 __ ucomisd(xmm0, xmm0);
3458 __ setcc(parity_even, rax); 3472 __ setcc(parity_even, rax);
3459 // rax is 0 for equal non-NaN heapnumbers, 1 for NaNs. 3473 // rax is 0 for equal non-NaN heapnumbers, 1 for NaNs.
3460 if (cc_ == greater_equal || cc_ == greater) { 3474 if (cc == greater_equal || cc == greater) {
3461 __ neg(rax); 3475 __ neg(rax);
3462 }
3463 __ ret(0);
3464 } 3476 }
3477 __ ret(0);
3465 3478
3466 __ bind(&not_identical); 3479 __ bind(&not_identical);
3467 } 3480 }
3468 3481
3469 if (cc_ == equal) { // Both strict and non-strict. 3482 if (cc == equal) { // Both strict and non-strict.
3470 Label slow; // Fallthrough label. 3483 Label slow; // Fallthrough label.
3471 3484
3472 // If we're doing a strict equality comparison, we don't have to do 3485 // If we're doing a strict equality comparison, we don't have to do
3473 // type conversion, so we generate code to do fast comparison for objects 3486 // type conversion, so we generate code to do fast comparison for objects
3474 // and oddballs. Non-smi numbers and strings still go through the usual 3487 // and oddballs. Non-smi numbers and strings still go through the usual
3475 // slow-case code. 3488 // slow-case code.
3476 if (strict_) { 3489 if (strict()) {
3477 // If either is a Smi (we know that not both are), then they can only 3490 // If either is a Smi (we know that not both are), then they can only
3478 // be equal if the other is a HeapNumber. If so, use the slow case. 3491 // be equal if the other is a HeapNumber. If so, use the slow case.
3479 { 3492 {
3480 Label not_smis; 3493 Label not_smis;
3481 __ SelectNonSmi(rbx, rax, rdx, &not_smis); 3494 __ SelectNonSmi(rbx, rax, rdx, &not_smis);
3482 3495
3483 // Check if the non-smi operand is a heap number. 3496 // Check if the non-smi operand is a heap number.
3484 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), 3497 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
3485 factory->heap_number_map()); 3498 factory->heap_number_map());
3486 // If heap number, handle it in the slow case. 3499 // If heap number, handle it in the slow case.
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
3518 // Check for oddballs: true, false, null, undefined. 3531 // Check for oddballs: true, false, null, undefined.
3519 __ CmpInstanceType(rcx, ODDBALL_TYPE); 3532 __ CmpInstanceType(rcx, ODDBALL_TYPE);
3520 __ j(equal, &return_not_equal); 3533 __ j(equal, &return_not_equal);
3521 3534
3522 // Fall through to the general case. 3535 // Fall through to the general case.
3523 } 3536 }
3524 __ bind(&slow); 3537 __ bind(&slow);
3525 } 3538 }
3526 3539
3527 // Generate the number comparison code. 3540 // Generate the number comparison code.
3528 if (include_number_compare_) { 3541 Label non_number_comparison;
3529 Label non_number_comparison; 3542 Label unordered;
3530 Label unordered; 3543 FloatingPointHelper::LoadSSE2UnknownOperands(masm, &non_number_comparison);
3531 FloatingPointHelper::LoadSSE2UnknownOperands(masm, &non_number_comparison); 3544 __ xorl(rax, rax);
3532 __ xorl(rax, rax); 3545 __ xorl(rcx, rcx);
3533 __ xorl(rcx, rcx); 3546 __ ucomisd(xmm0, xmm1);
3534 __ ucomisd(xmm0, xmm1);
3535 3547
3536 // Don't base result on EFLAGS when a NaN is involved. 3548 // Don't base result on EFLAGS when a NaN is involved.
3537 __ j(parity_even, &unordered, Label::kNear); 3549 __ j(parity_even, &unordered, Label::kNear);
3538 // Return a result of -1, 0, or 1, based on EFLAGS. 3550 // Return a result of -1, 0, or 1, based on EFLAGS.
3539 __ setcc(above, rax); 3551 __ setcc(above, rax);
3540 __ setcc(below, rcx); 3552 __ setcc(below, rcx);
3541 __ subq(rax, rcx); 3553 __ subq(rax, rcx);
3542 __ ret(0); 3554 __ ret(0);
3543 3555
3544 // If one of the numbers was NaN, then the result is always false. 3556 // If one of the numbers was NaN, then the result is always false.
3545 // The cc is never not-equal. 3557 // The cc is never not-equal.
3546 __ bind(&unordered); 3558 __ bind(&unordered);
3547 ASSERT(cc_ != not_equal); 3559 ASSERT(cc != not_equal);
3548 if (cc_ == less || cc_ == less_equal) { 3560 if (cc == less || cc == less_equal) {
3549 __ Set(rax, 1); 3561 __ Set(rax, 1);
3550 } else { 3562 } else {
3551 __ Set(rax, -1); 3563 __ Set(rax, -1);
3552 } 3564 }
3553 __ ret(0); 3565 __ ret(0);
3554 3566
3555 // The number comparison code did not provide a valid result. 3567 // The number comparison code did not provide a valid result.
3556 __ bind(&non_number_comparison); 3568 __ bind(&non_number_comparison);
3557 }
3558 3569
3559 // Fast negative check for symbol-to-symbol equality. 3570 // Fast negative check for symbol-to-symbol equality.
3560 Label check_for_strings; 3571 Label check_for_strings;
3561 if (cc_ == equal) { 3572 if (cc == equal) {
3562 BranchIfNonSymbol(masm, &check_for_strings, rax, kScratchRegister); 3573 BranchIfNonSymbol(masm, &check_for_strings, rax, kScratchRegister);
3563 BranchIfNonSymbol(masm, &check_for_strings, rdx, kScratchRegister); 3574 BranchIfNonSymbol(masm, &check_for_strings, rdx, kScratchRegister);
3564 3575
3565 // We've already checked for object identity, so if both operands 3576 // We've already checked for object identity, so if both operands
3566 // are symbols they aren't equal. Register eax (not rax) already holds a 3577 // are symbols they aren't equal. Register eax (not rax) already holds a
3567 // non-zero value, which indicates not equal, so just return. 3578 // non-zero value, which indicates not equal, so just return.
3568 __ ret(0); 3579 __ ret(0);
3569 } 3580 }
3570 3581
3571 __ bind(&check_for_strings); 3582 __ bind(&check_for_strings);
3572 3583
3573 __ JumpIfNotBothSequentialAsciiStrings( 3584 __ JumpIfNotBothSequentialAsciiStrings(
3574 rdx, rax, rcx, rbx, &check_unequal_objects); 3585 rdx, rax, rcx, rbx, &check_unequal_objects);
3575 3586
3576 // Inline comparison of ASCII strings. 3587 // Inline comparison of ASCII strings.
3577 if (cc_ == equal) { 3588 if (cc == equal) {
3578 StringCompareStub::GenerateFlatAsciiStringEquals(masm, 3589 StringCompareStub::GenerateFlatAsciiStringEquals(masm,
3579 rdx, 3590 rdx,
3580 rax, 3591 rax,
3581 rcx, 3592 rcx,
3582 rbx); 3593 rbx);
3583 } else { 3594 } else {
3584 StringCompareStub::GenerateCompareFlatAsciiStrings(masm, 3595 StringCompareStub::GenerateCompareFlatAsciiStrings(masm,
3585 rdx, 3596 rdx,
3586 rax, 3597 rax,
3587 rcx, 3598 rcx,
3588 rbx, 3599 rbx,
3589 rdi, 3600 rdi,
3590 r8); 3601 r8);
3591 } 3602 }
3592 3603
3593 #ifdef DEBUG 3604 #ifdef DEBUG
3594 __ Abort("Unexpected fall-through from string comparison"); 3605 __ Abort("Unexpected fall-through from string comparison");
3595 #endif 3606 #endif
3596 3607
3597 __ bind(&check_unequal_objects); 3608 __ bind(&check_unequal_objects);
3598 if (cc_ == equal && !strict_) { 3609 if (cc == equal && !strict()) {
3599 // Not strict equality. Objects are unequal if 3610 // Not strict equality. Objects are unequal if
3600 // they are both JSObjects and not undetectable, 3611 // they are both JSObjects and not undetectable,
3601 // and their pointers are different. 3612 // and their pointers are different.
3602 Label not_both_objects, return_unequal; 3613 Label not_both_objects, return_unequal;
3603 // At most one is a smi, so we can test for smi by adding the two. 3614 // At most one is a smi, so we can test for smi by adding the two.
3604 // A smi plus a heap object has the low bit set, a heap object plus 3615 // A smi plus a heap object has the low bit set, a heap object plus
3605 // a heap object has the low bit clear. 3616 // a heap object has the low bit clear.
3606 STATIC_ASSERT(kSmiTag == 0); 3617 STATIC_ASSERT(kSmiTag == 0);
3607 STATIC_ASSERT(kSmiTagMask == 1); 3618 STATIC_ASSERT(kSmiTagMask == 1);
3608 __ lea(rcx, Operand(rax, rdx, times_1, 0)); 3619 __ lea(rcx, Operand(rax, rdx, times_1, 0));
(...skipping 19 matching lines...) Expand all
3628 __ bind(&not_both_objects); 3639 __ bind(&not_both_objects);
3629 } 3640 }
3630 3641
3631 // Push arguments below the return address to prepare jump to builtin. 3642 // Push arguments below the return address to prepare jump to builtin.
3632 __ pop(rcx); 3643 __ pop(rcx);
3633 __ push(rdx); 3644 __ push(rdx);
3634 __ push(rax); 3645 __ push(rax);
3635 3646
3636 // Figure out which native to call and setup the arguments. 3647 // Figure out which native to call and setup the arguments.
3637 Builtins::JavaScript builtin; 3648 Builtins::JavaScript builtin;
3638 if (cc_ == equal) { 3649 if (cc == equal) {
3639 builtin = strict_ ? Builtins::STRICT_EQUALS : Builtins::EQUALS; 3650 builtin = strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
3640 } else { 3651 } else {
3641 builtin = Builtins::COMPARE; 3652 builtin = Builtins::COMPARE;
3642 __ Push(Smi::FromInt(NegativeComparisonResult(cc_))); 3653 __ Push(Smi::FromInt(NegativeComparisonResult(cc)));
3643 } 3654 }
3644 3655
3645 // Restore return address on the stack. 3656 // Restore return address on the stack.
3646 __ push(rcx); 3657 __ push(rcx);
3647 3658
3648 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) 3659 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
3649 // tagged as a small integer. 3660 // tagged as a small integer.
3650 __ InvokeBuiltin(builtin, JUMP_FUNCTION); 3661 __ InvokeBuiltin(builtin, JUMP_FUNCTION);
3662
3663 __ bind(&miss);
3664 GenerateMiss(masm);
3651 } 3665 }
3652 3666
3653 3667
3654 void CompareStub::BranchIfNonSymbol(MacroAssembler* masm,
3655 Label* label,
3656 Register object,
3657 Register scratch) {
3658 __ JumpIfSmi(object, label);
3659 __ movq(scratch, FieldOperand(object, HeapObject::kMapOffset));
3660 __ movzxbq(scratch,
3661 FieldOperand(scratch, Map::kInstanceTypeOffset));
3662 // Ensure that no non-strings have the symbol bit set.
3663 STATIC_ASSERT(LAST_TYPE < kNotStringTag + kIsSymbolMask);
3664 STATIC_ASSERT(kSymbolTag != 0);
3665 __ testb(scratch, Immediate(kIsSymbolMask));
3666 __ j(zero, label);
3667 }
3668
3669
3670 void StackCheckStub::Generate(MacroAssembler* masm) { 3668 void StackCheckStub::Generate(MacroAssembler* masm) {
3671 __ TailCallRuntime(Runtime::kStackGuard, 0, 1); 3669 __ TailCallRuntime(Runtime::kStackGuard, 0, 1);
3672 } 3670 }
3673 3671
3674 3672
3675 void InterruptStub::Generate(MacroAssembler* masm) { 3673 void InterruptStub::Generate(MacroAssembler* masm) {
3676 __ TailCallRuntime(Runtime::kInterrupt, 0, 1); 3674 __ TailCallRuntime(Runtime::kInterrupt, 0, 1);
3677 } 3675 }
3678 3676
3679 3677
(...skipping 734 matching lines...) Expand 10 before | Expand all | Expand 10 after
4414 } 4412 }
4415 4413
4416 4414
4417 // Passing arguments in registers is not supported. 4415 // Passing arguments in registers is not supported.
4418 Register InstanceofStub::left() { return no_reg; } 4416 Register InstanceofStub::left() { return no_reg; }
4419 4417
4420 4418
4421 Register InstanceofStub::right() { return no_reg; } 4419 Register InstanceofStub::right() { return no_reg; }
4422 4420
4423 4421
4424 int CompareStub::MinorKey() {
4425 // Encode the three parameters in a unique 16 bit value. To avoid duplicate
4426 // stubs the never NaN NaN condition is only taken into account if the
4427 // condition is equals.
4428 ASSERT(static_cast<unsigned>(cc_) < (1 << 12));
4429 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
4430 return ConditionField::encode(static_cast<unsigned>(cc_))
4431 | RegisterField::encode(false) // lhs_ and rhs_ are not used
4432 | StrictField::encode(strict_)
4433 | NeverNanNanField::encode(cc_ == equal ? never_nan_nan_ : false)
4434 | IncludeNumberCompareField::encode(include_number_compare_)
4435 | IncludeSmiCompareField::encode(include_smi_compare_);
4436 }
4437
4438
4439 // Unfortunately you have to run without snapshots to see most of these
4440 // names in the profile since most compare stubs end up in the snapshot.
4441 void CompareStub::PrintName(StringStream* stream) {
4442 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
4443 const char* cc_name;
4444 switch (cc_) {
4445 case less: cc_name = "LT"; break;
4446 case greater: cc_name = "GT"; break;
4447 case less_equal: cc_name = "LE"; break;
4448 case greater_equal: cc_name = "GE"; break;
4449 case equal: cc_name = "EQ"; break;
4450 case not_equal: cc_name = "NE"; break;
4451 default: cc_name = "UnknownCondition"; break;
4452 }
4453 bool is_equality = cc_ == equal || cc_ == not_equal;
4454 stream->Add("CompareStub_%s", cc_name);
4455 if (strict_ && is_equality) stream->Add("_STRICT");
4456 if (never_nan_nan_ && is_equality) stream->Add("_NO_NAN");
4457 if (!include_number_compare_) stream->Add("_NO_NUMBER");
4458 if (!include_smi_compare_) stream->Add("_NO_SMI");
4459 }
4460
4461
4462 // ------------------------------------------------------------------------- 4422 // -------------------------------------------------------------------------
4463 // StringCharCodeAtGenerator 4423 // StringCharCodeAtGenerator
4464 4424
4465 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { 4425 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
4466 Label flat_string; 4426 Label flat_string;
4467 Label ascii_string; 4427 Label ascii_string;
4468 Label got_char_code; 4428 Label got_char_code;
4469 Label sliced_string; 4429 Label sliced_string;
4470 4430
4471 // If the receiver is a smi trigger the non-string case. 4431 // If the receiver is a smi trigger the non-string case.
(...skipping 1089 matching lines...) Expand 10 before | Expand all | Expand 10 after
5561 GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8); 5521 GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8);
5562 5522
5563 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) 5523 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
5564 // tagged as a small integer. 5524 // tagged as a small integer.
5565 __ bind(&runtime); 5525 __ bind(&runtime);
5566 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); 5526 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
5567 } 5527 }
5568 5528
5569 5529
5570 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { 5530 void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
5571 ASSERT(state_ == CompareIC::SMIS); 5531 ASSERT(state_ == CompareIC::SMI);
5572 Label miss; 5532 Label miss;
5573 __ JumpIfNotBothSmi(rdx, rax, &miss, Label::kNear); 5533 __ JumpIfNotBothSmi(rdx, rax, &miss, Label::kNear);
5574 5534
5575 if (GetCondition() == equal) { 5535 if (GetCondition() == equal) {
5576 // For equality we do not care about the sign of the result. 5536 // For equality we do not care about the sign of the result.
5577 __ subq(rax, rdx); 5537 __ subq(rax, rdx);
5578 } else { 5538 } else {
5579 Label done; 5539 Label done;
5580 __ subq(rdx, rax); 5540 __ subq(rdx, rax);
5581 __ j(no_overflow, &done, Label::kNear); 5541 __ j(no_overflow, &done, Label::kNear);
5582 // Correct sign of result in case of overflow. 5542 // Correct sign of result in case of overflow.
5583 __ SmiNot(rdx, rdx); 5543 __ SmiNot(rdx, rdx);
5584 __ bind(&done); 5544 __ bind(&done);
5585 __ movq(rax, rdx); 5545 __ movq(rax, rdx);
5586 } 5546 }
5587 __ ret(0); 5547 __ ret(0);
5588 5548
5589 __ bind(&miss); 5549 __ bind(&miss);
5590 GenerateMiss(masm); 5550 GenerateMiss(masm);
5591 } 5551 }
5592 5552
5593 5553
5594 void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) { 5554 void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
5595 ASSERT(state_ == CompareIC::HEAP_NUMBERS); 5555 ASSERT(state_ == CompareIC::HEAP_NUMBER);
5596 5556
5597 Label generic_stub; 5557 Label generic_stub;
5598 Label unordered, maybe_undefined1, maybe_undefined2; 5558 Label unordered, maybe_undefined1, maybe_undefined2;
5599 Label miss; 5559 Label miss;
5600 Condition either_smi = masm->CheckEitherSmi(rax, rdx);
5601 __ j(either_smi, &generic_stub, Label::kNear);
5602 5560
5603 __ CmpObjectType(rax, HEAP_NUMBER_TYPE, rcx); 5561 if (left_ == CompareIC::SMI) {
5562 __ JumpIfNotSmi(rdx, &miss);
5563 }
5564 if (right_ == CompareIC::SMI) {
5565 __ JumpIfNotSmi(rax, &miss);
5566 }
5567
5568 // Load left and right operand.
5569 Label done, left, left_smi, right_smi;
5570 __ JumpIfSmi(rax, &right_smi, Label::kNear);
5571 __ CompareMap(rax, masm->isolate()->factory()->heap_number_map(), NULL);
5604 __ j(not_equal, &maybe_undefined1, Label::kNear); 5572 __ j(not_equal, &maybe_undefined1, Label::kNear);
5605 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx); 5573 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
5574 __ jmp(&left, Label::kNear);
5575 __ bind(&right_smi);
5576 __ SmiToInteger32(rcx, rax); // Can't clobber rax yet.
5577 __ cvtlsi2sd(xmm1, rcx);
5578
5579 __ bind(&left);
5580 __ JumpIfSmi(rdx, &left_smi, Label::kNear);
5581 __ CompareMap(rdx, masm->isolate()->factory()->heap_number_map(), NULL);
5606 __ j(not_equal, &maybe_undefined2, Label::kNear); 5582 __ j(not_equal, &maybe_undefined2, Label::kNear);
5583 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
5584 __ jmp(&done);
5585 __ bind(&left_smi);
5586 __ SmiToInteger32(rcx, rdx); // Can't clobber rdx yet.
5587 __ cvtlsi2sd(xmm0, rcx);
5607 5588
5608 // Load left and right operand 5589 __ bind(&done);
5609 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
5610 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
5611
5612 // Compare operands 5590 // Compare operands
5613 __ ucomisd(xmm0, xmm1); 5591 __ ucomisd(xmm0, xmm1);
5614 5592
5615 // Don't base result on EFLAGS when a NaN is involved. 5593 // Don't base result on EFLAGS when a NaN is involved.
5616 __ j(parity_even, &unordered, Label::kNear); 5594 __ j(parity_even, &unordered, Label::kNear);
5617 5595
5618 // Return a result of -1, 0, or 1, based on EFLAGS. 5596 // Return a result of -1, 0, or 1, based on EFLAGS.
5619 // Performing mov, because xor would destroy the flag register. 5597 // Performing mov, because xor would destroy the flag register.
5620 __ movl(rax, Immediate(0)); 5598 __ movl(rax, Immediate(0));
5621 __ movl(rcx, Immediate(0)); 5599 __ movl(rcx, Immediate(0));
5622 __ setcc(above, rax); // Add one to zero if carry clear and not equal. 5600 __ setcc(above, rax); // Add one to zero if carry clear and not equal.
5623 __ sbbq(rax, rcx); // Subtract one if below (aka. carry set). 5601 __ sbbq(rax, rcx); // Subtract one if below (aka. carry set).
5624 __ ret(0); 5602 __ ret(0);
5625 5603
5626 __ bind(&unordered); 5604 __ bind(&unordered);
5627 CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS);
5628 __ bind(&generic_stub); 5605 __ bind(&generic_stub);
5606 ICCompareStub stub(op_, CompareIC::GENERIC, CompareIC::GENERIC,
5607 CompareIC::GENERIC);
5629 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); 5608 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
5630 5609
5631 __ bind(&maybe_undefined1); 5610 __ bind(&maybe_undefined1);
5632 if (Token::IsOrderedRelationalCompareOp(op_)) { 5611 if (Token::IsOrderedRelationalCompareOp(op_)) {
5633 __ Cmp(rax, masm->isolate()->factory()->undefined_value()); 5612 __ Cmp(rax, masm->isolate()->factory()->undefined_value());
5634 __ j(not_equal, &miss); 5613 __ j(not_equal, &miss);
5614 __ JumpIfSmi(rdx, &unordered);
5635 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx); 5615 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx);
5636 __ j(not_equal, &maybe_undefined2, Label::kNear); 5616 __ j(not_equal, &maybe_undefined2, Label::kNear);
5637 __ jmp(&unordered); 5617 __ jmp(&unordered);
5638 } 5618 }
5639 5619
5640 __ bind(&maybe_undefined2); 5620 __ bind(&maybe_undefined2);
5641 if (Token::IsOrderedRelationalCompareOp(op_)) { 5621 if (Token::IsOrderedRelationalCompareOp(op_)) {
5642 __ Cmp(rdx, masm->isolate()->factory()->undefined_value()); 5622 __ Cmp(rdx, masm->isolate()->factory()->undefined_value());
5643 __ j(equal, &unordered); 5623 __ j(equal, &unordered);
5644 } 5624 }
5645 5625
5646 __ bind(&miss); 5626 __ bind(&miss);
5647 GenerateMiss(masm); 5627 GenerateMiss(masm);
5648 } 5628 }
5649 5629
5650 5630
5651 void ICCompareStub::GenerateSymbols(MacroAssembler* masm) { 5631 void ICCompareStub::GenerateSymbols(MacroAssembler* masm) {
5652 ASSERT(state_ == CompareIC::SYMBOLS); 5632 ASSERT(state_ == CompareIC::SYMBOL);
5653 ASSERT(GetCondition() == equal); 5633 ASSERT(GetCondition() == equal);
5654 5634
5655 // Registers containing left and right operands respectively. 5635 // Registers containing left and right operands respectively.
5656 Register left = rdx; 5636 Register left = rdx;
5657 Register right = rax; 5637 Register right = rax;
5658 Register tmp1 = rcx; 5638 Register tmp1 = rcx;
5659 Register tmp2 = rbx; 5639 Register tmp2 = rbx;
5660 5640
5661 // Check that both operands are heap objects. 5641 // Check that both operands are heap objects.
5662 Label miss; 5642 Label miss;
(...skipping 22 matching lines...) Expand all
5685 __ Move(rax, Smi::FromInt(EQUAL)); 5665 __ Move(rax, Smi::FromInt(EQUAL));
5686 __ bind(&done); 5666 __ bind(&done);
5687 __ ret(0); 5667 __ ret(0);
5688 5668
5689 __ bind(&miss); 5669 __ bind(&miss);
5690 GenerateMiss(masm); 5670 GenerateMiss(masm);
5691 } 5671 }
5692 5672
5693 5673
5694 void ICCompareStub::GenerateStrings(MacroAssembler* masm) { 5674 void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
5695 ASSERT(state_ == CompareIC::STRINGS); 5675 ASSERT(state_ == CompareIC::STRING);
5696 Label miss; 5676 Label miss;
5697 5677
5698 bool equality = Token::IsEqualityOp(op_); 5678 bool equality = Token::IsEqualityOp(op_);
5699 5679
5700 // Registers containing left and right operands respectively. 5680 // Registers containing left and right operands respectively.
5701 Register left = rdx; 5681 Register left = rdx;
5702 Register right = rax; 5682 Register right = rax;
5703 Register tmp1 = rcx; 5683 Register tmp1 = rcx;
5704 Register tmp2 = rbx; 5684 Register tmp2 = rbx;
5705 Register tmp3 = rdi; 5685 Register tmp3 = rdi;
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
5771 } else { 5751 } else {
5772 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); 5752 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
5773 } 5753 }
5774 5754
5775 __ bind(&miss); 5755 __ bind(&miss);
5776 GenerateMiss(masm); 5756 GenerateMiss(masm);
5777 } 5757 }
5778 5758
5779 5759
5780 void ICCompareStub::GenerateObjects(MacroAssembler* masm) { 5760 void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
5781 ASSERT(state_ == CompareIC::OBJECTS); 5761 ASSERT(state_ == CompareIC::OBJECT);
5782 Label miss; 5762 Label miss;
5783 Condition either_smi = masm->CheckEitherSmi(rdx, rax); 5763 Condition either_smi = masm->CheckEitherSmi(rdx, rax);
5784 __ j(either_smi, &miss, Label::kNear); 5764 __ j(either_smi, &miss, Label::kNear);
5785 5765
5786 __ CmpObjectType(rax, JS_OBJECT_TYPE, rcx); 5766 __ CmpObjectType(rax, JS_OBJECT_TYPE, rcx);
5787 __ j(not_equal, &miss, Label::kNear); 5767 __ j(not_equal, &miss, Label::kNear);
5788 __ CmpObjectType(rdx, JS_OBJECT_TYPE, rcx); 5768 __ CmpObjectType(rdx, JS_OBJECT_TYPE, rcx);
5789 __ j(not_equal, &miss, Label::kNear); 5769 __ j(not_equal, &miss, Label::kNear);
5790 5770
5791 ASSERT(GetCondition() == equal); 5771 ASSERT(GetCondition() == equal);
(...skipping 707 matching lines...) Expand 10 before | Expand all | Expand 10 after
6499 #endif 6479 #endif
6500 6480
6501 __ Ret(); 6481 __ Ret();
6502 } 6482 }
6503 6483
6504 #undef __ 6484 #undef __
6505 6485
6506 } } // namespace v8::internal 6486 } } // namespace v8::internal
6507 6487
6508 #endif // V8_TARGET_ARCH_X64 6488 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/code-stubs-x64.h ('k') | src/x64/full-codegen-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698