Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(219)

Side by Side Diff: src/arm/lithium-codegen-arm.cc

Issue 15085026: ARM: Smi refactoring and improvements. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 7 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 1430 matching lines...) Expand 10 before | Expand all | Expand 10 after
1441 DeoptimizeIf(ne, instr->environment()); 1441 DeoptimizeIf(ne, instr->environment());
1442 __ mov(dividend, Operand(dividend, ASR, power)); 1442 __ mov(dividend, Operand(dividend, ASR, power));
1443 } 1443 }
1444 if (divisor < 0) __ rsb(dividend, dividend, Operand(0)); 1444 if (divisor < 0) __ rsb(dividend, dividend, Operand(0));
1445 1445
1446 return; 1446 return;
1447 } 1447 }
1448 1448
1449 const Register left = ToRegister(instr->left()); 1449 const Register left = ToRegister(instr->left());
1450 const Register right = ToRegister(instr->right()); 1450 const Register right = ToRegister(instr->right());
1451 const Register scratch = scratch0();
1452 const Register result = ToRegister(instr->result()); 1451 const Register result = ToRegister(instr->result());
1453 1452
1454 // Check for x / 0. 1453 // Check for x / 0.
1455 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) { 1454 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
1456 __ cmp(right, Operand::Zero()); 1455 __ cmp(right, Operand::Zero());
1457 DeoptimizeIf(eq, instr->environment()); 1456 DeoptimizeIf(eq, instr->environment());
1458 } 1457 }
1459 1458
1460 // Check for (0 / -x) that will produce negative zero. 1459 // Check for (0 / -x) that will produce negative zero.
1461 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { 1460 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
(...skipping 28 matching lines...) Expand all
1490 1489
1491 __ cmp(right, Operand(4)); 1490 __ cmp(right, Operand(4));
1492 __ tst(left, Operand(3), eq); 1491 __ tst(left, Operand(3), eq);
1493 __ mov(result, Operand(left, ASR, 2), LeaveCC, eq); 1492 __ mov(result, Operand(left, ASR, 2), LeaveCC, eq);
1494 __ b(eq, &done); 1493 __ b(eq, &done);
1495 1494
1496 // Call the stub. The numbers in r0 and r1 have 1495 // Call the stub. The numbers in r0 and r1 have
1497 // to be tagged to Smis. If that is not possible, deoptimize. 1496 // to be tagged to Smis. If that is not possible, deoptimize.
1498 DeferredDivI* deferred = new(zone()) DeferredDivI(this, instr); 1497 DeferredDivI* deferred = new(zone()) DeferredDivI(this, instr);
1499 1498
1500 __ TrySmiTag(left, &deoptimize, scratch); 1499 __ TrySmiTag(left, &deoptimize);
1501 __ TrySmiTag(right, &deoptimize, scratch); 1500 __ TrySmiTag(right, &deoptimize);
1502 1501
1503 __ b(al, deferred->entry()); 1502 __ b(al, deferred->entry());
1504 __ bind(deferred->exit()); 1503 __ bind(deferred->exit());
1505 1504
1506 // If the result in r0 is a Smi, untag it, else deoptimize. 1505 // If the result in r0 is a Smi, untag it, else deoptimize.
1507 __ JumpIfNotSmi(result, &deoptimize); 1506 __ JumpIfNotSmi(result, &deoptimize);
1508 __ SmiUntag(result); 1507 __ SmiUntag(result);
1509 __ b(&done); 1508 __ b(&done);
1510 1509
1511 __ bind(&deoptimize); 1510 __ bind(&deoptimize);
(...skipping 431 matching lines...) Expand 10 before | Expand all | Expand 10 after
1943 } 1942 }
1944 1943
1945 1944
1946 void LCodeGen::DoValueOf(LValueOf* instr) { 1945 void LCodeGen::DoValueOf(LValueOf* instr) {
1947 Register input = ToRegister(instr->value()); 1946 Register input = ToRegister(instr->value());
1948 Register result = ToRegister(instr->result()); 1947 Register result = ToRegister(instr->result());
1949 Register map = ToRegister(instr->temp()); 1948 Register map = ToRegister(instr->temp());
1950 Label done; 1949 Label done;
1951 1950
1952 // If the object is a smi return the object. 1951 // If the object is a smi return the object.
1953 __ tst(input, Operand(kSmiTagMask)); 1952 __ SmiTst(input);
1954 __ Move(result, input, eq); 1953 __ Move(result, input, eq);
1955 __ b(eq, &done); 1954 __ b(eq, &done);
1956 1955
1957 // If the object is not a value type, return the object. 1956 // If the object is not a value type, return the object.
1958 __ CompareObjectType(input, map, map, JS_VALUE_TYPE); 1957 __ CompareObjectType(input, map, map, JS_VALUE_TYPE);
1959 __ Move(result, input, ne); 1958 __ Move(result, input, ne);
1960 __ b(ne, &done); 1959 __ b(ne, &done);
1961 __ ldr(result, FieldMemOperand(input, JSValue::kValueOffset)); 1960 __ ldr(result, FieldMemOperand(input, JSValue::kValueOffset));
1962 1961
1963 __ bind(&done); 1962 __ bind(&done);
1964 } 1963 }
1965 1964
1966 1965
1967 void LCodeGen::DoDateField(LDateField* instr) { 1966 void LCodeGen::DoDateField(LDateField* instr) {
1968 Register object = ToRegister(instr->date()); 1967 Register object = ToRegister(instr->date());
1969 Register result = ToRegister(instr->result()); 1968 Register result = ToRegister(instr->result());
1970 Register scratch = ToRegister(instr->temp()); 1969 Register scratch = ToRegister(instr->temp());
1971 Smi* index = instr->index(); 1970 Smi* index = instr->index();
1972 Label runtime, done; 1971 Label runtime, done;
1973 ASSERT(object.is(result)); 1972 ASSERT(object.is(result));
1974 ASSERT(object.is(r0)); 1973 ASSERT(object.is(r0));
1975 ASSERT(!scratch.is(scratch0())); 1974 ASSERT(!scratch.is(scratch0()));
1976 ASSERT(!scratch.is(object)); 1975 ASSERT(!scratch.is(object));
1977 1976
1978 __ tst(object, Operand(kSmiTagMask)); 1977 __ SmiTst(object);
1979 DeoptimizeIf(eq, instr->environment()); 1978 DeoptimizeIf(eq, instr->environment());
1980 __ CompareObjectType(object, scratch, scratch, JS_DATE_TYPE); 1979 __ CompareObjectType(object, scratch, scratch, JS_DATE_TYPE);
1981 DeoptimizeIf(ne, instr->environment()); 1980 DeoptimizeIf(ne, instr->environment());
1982 1981
1983 if (index->value() == 0) { 1982 if (index->value() == 0) {
1984 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset)); 1983 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
1985 } else { 1984 } else {
1986 if (index->value() < JSDate::kFirstUncachedField) { 1985 if (index->value() < JSDate::kFirstUncachedField) {
1987 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); 1986 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
1988 __ mov(scratch, Operand(stamp)); 1987 __ mov(scratch, Operand(stamp));
(...skipping 265 matching lines...) Expand 10 before | Expand all | Expand 10 after
2254 __ b(eq, false_label); 2253 __ b(eq, false_label);
2255 } 2254 }
2256 2255
2257 if (expected.Contains(ToBooleanStub::SMI)) { 2256 if (expected.Contains(ToBooleanStub::SMI)) {
2258 // Smis: 0 -> false, all other -> true. 2257 // Smis: 0 -> false, all other -> true.
2259 __ cmp(reg, Operand::Zero()); 2258 __ cmp(reg, Operand::Zero());
2260 __ b(eq, false_label); 2259 __ b(eq, false_label);
2261 __ JumpIfSmi(reg, true_label); 2260 __ JumpIfSmi(reg, true_label);
2262 } else if (expected.NeedsMap()) { 2261 } else if (expected.NeedsMap()) {
2263 // If we need a map later and have a Smi -> deopt. 2262 // If we need a map later and have a Smi -> deopt.
2264 __ tst(reg, Operand(kSmiTagMask)); 2263 __ SmiTst(reg);
2265 DeoptimizeIf(eq, instr->environment()); 2264 DeoptimizeIf(eq, instr->environment());
2266 } 2265 }
2267 2266
2268 const Register map = scratch0(); 2267 const Register map = scratch0();
2269 if (expected.NeedsMap()) { 2268 if (expected.NeedsMap()) {
2270 __ ldr(map, FieldMemOperand(reg, HeapObject::kMapOffset)); 2269 __ ldr(map, FieldMemOperand(reg, HeapObject::kMapOffset));
2271 2270
2272 if (expected.CanBeUndetectable()) { 2271 if (expected.CanBeUndetectable()) {
2273 // Undetectable -> false. 2272 // Undetectable -> false.
2274 __ ldrb(ip, FieldMemOperand(map, Map::kBitFieldOffset)); 2273 __ ldrb(ip, FieldMemOperand(map, Map::kBitFieldOffset));
(...skipping 215 matching lines...) Expand 10 before | Expand all | Expand 10 after
2490 2489
2491 EmitBranch(true_block, false_block, true_cond); 2490 EmitBranch(true_block, false_block, true_cond);
2492 } 2491 }
2493 2492
2494 2493
2495 void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) { 2494 void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
2496 int true_block = chunk_->LookupDestination(instr->true_block_id()); 2495 int true_block = chunk_->LookupDestination(instr->true_block_id());
2497 int false_block = chunk_->LookupDestination(instr->false_block_id()); 2496 int false_block = chunk_->LookupDestination(instr->false_block_id());
2498 2497
2499 Register input_reg = EmitLoadRegister(instr->value(), ip); 2498 Register input_reg = EmitLoadRegister(instr->value(), ip);
2500 __ tst(input_reg, Operand(kSmiTagMask)); 2499 __ SmiTst(input_reg);
2501 EmitBranch(true_block, false_block, eq); 2500 EmitBranch(true_block, false_block, eq);
2502 } 2501 }
2503 2502
2504 2503
2505 void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) { 2504 void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
2506 Register input = ToRegister(instr->value()); 2505 Register input = ToRegister(instr->value());
2507 Register temp = ToRegister(instr->temp()); 2506 Register temp = ToRegister(instr->temp());
2508 2507
2509 int true_block = chunk_->LookupDestination(instr->true_block_id()); 2508 int true_block = chunk_->LookupDestination(instr->true_block_id());
2510 int false_block = chunk_->LookupDestination(instr->false_block_id()); 2509 int false_block = chunk_->LookupDestination(instr->false_block_id());
(...skipping 850 matching lines...) Expand 10 before | Expand all | Expand 10 after
3361 offset = FixedArray::OffsetOfElementAt(ToInteger32(const_operand) + 3360 offset = FixedArray::OffsetOfElementAt(ToInteger32(const_operand) +
3362 instr->additional_index()); 3361 instr->additional_index());
3363 store_base = elements; 3362 store_base = elements;
3364 } else { 3363 } else {
3365 Register key = EmitLoadRegister(instr->key(), scratch0()); 3364 Register key = EmitLoadRegister(instr->key(), scratch0());
3366 // Even though the HLoadKeyed instruction forces the input 3365 // Even though the HLoadKeyed instruction forces the input
3367 // representation for the key to be an integer, the input gets replaced 3366 // representation for the key to be an integer, the input gets replaced
3368 // during bound check elimination with the index argument to the bounds 3367 // during bound check elimination with the index argument to the bounds
3369 // check, which can be tagged, so that case must be handled here, too. 3368 // check, which can be tagged, so that case must be handled here, too.
3370 if (instr->hydrogen()->key()->representation().IsTagged()) { 3369 if (instr->hydrogen()->key()->representation().IsTagged()) {
3371 __ add(scratch, elements, 3370 __ add(scratch, elements, Operand::PointerOffsetFromSmiKey(key));
3372 Operand(key, LSL, kPointerSizeLog2 - kSmiTagSize));
3373 } else { 3371 } else {
3374 __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2)); 3372 __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
3375 } 3373 }
3376 offset = FixedArray::OffsetOfElementAt(instr->additional_index()); 3374 offset = FixedArray::OffsetOfElementAt(instr->additional_index());
3377 } 3375 }
3378 __ ldr(result, FieldMemOperand(store_base, offset)); 3376 __ ldr(result, FieldMemOperand(store_base, offset));
3379 3377
3380 // Check for the hole value. 3378 // Check for the hole value.
3381 if (instr->hydrogen()->RequiresHoleCheck()) { 3379 if (instr->hydrogen()->RequiresHoleCheck()) {
3382 if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) { 3380 if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) {
3383 __ tst(result, Operand(kSmiTagMask)); 3381 __ SmiTst(result);
3384 DeoptimizeIf(ne, instr->environment()); 3382 DeoptimizeIf(ne, instr->environment());
3385 } else { 3383 } else {
3386 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex); 3384 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
3387 __ cmp(result, scratch); 3385 __ cmp(result, scratch);
3388 DeoptimizeIf(eq, instr->environment()); 3386 DeoptimizeIf(eq, instr->environment());
3389 } 3387 }
3390 } 3388 }
3391 } 3389 }
3392 3390
3393 3391
(...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after
3516 3514
3517 // Normal function. Replace undefined or null with global receiver. 3515 // Normal function. Replace undefined or null with global receiver.
3518 __ LoadRoot(scratch, Heap::kNullValueRootIndex); 3516 __ LoadRoot(scratch, Heap::kNullValueRootIndex);
3519 __ cmp(receiver, scratch); 3517 __ cmp(receiver, scratch);
3520 __ b(eq, &global_object); 3518 __ b(eq, &global_object);
3521 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); 3519 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
3522 __ cmp(receiver, scratch); 3520 __ cmp(receiver, scratch);
3523 __ b(eq, &global_object); 3521 __ b(eq, &global_object);
3524 3522
3525 // Deoptimize if the receiver is not a JS object. 3523 // Deoptimize if the receiver is not a JS object.
3526 __ tst(receiver, Operand(kSmiTagMask)); 3524 __ SmiTst(receiver);
3527 DeoptimizeIf(eq, instr->environment()); 3525 DeoptimizeIf(eq, instr->environment());
3528 __ CompareObjectType(receiver, scratch, scratch, FIRST_SPEC_OBJECT_TYPE); 3526 __ CompareObjectType(receiver, scratch, scratch, FIRST_SPEC_OBJECT_TYPE);
3529 DeoptimizeIf(lt, instr->environment()); 3527 DeoptimizeIf(lt, instr->environment());
3530 __ jmp(&receiver_ok); 3528 __ jmp(&receiver_ok);
3531 3529
3532 __ bind(&global_object); 3530 __ bind(&global_object);
3533 __ ldr(receiver, GlobalObjectOperand()); 3531 __ ldr(receiver, GlobalObjectOperand());
3534 __ ldr(receiver, 3532 __ ldr(receiver,
3535 FieldMemOperand(receiver, JSGlobalObject::kGlobalReceiverOffset)); 3533 FieldMemOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
3536 __ bind(&receiver_ok); 3534 __ bind(&receiver_ok);
(...skipping 677 matching lines...) Expand 10 before | Expand all | Expand 10 after
4214 4212
4215 if (FLAG_track_fields && representation.IsSmi()) { 4213 if (FLAG_track_fields && representation.IsSmi()) {
4216 Register value = ToRegister(instr->value()); 4214 Register value = ToRegister(instr->value());
4217 __ SmiTag(value, value, SetCC); 4215 __ SmiTag(value, value, SetCC);
4218 if (!instr->hydrogen()->value()->range()->IsInSmiRange()) { 4216 if (!instr->hydrogen()->value()->range()->IsInSmiRange()) {
4219 DeoptimizeIf(vs, instr->environment()); 4217 DeoptimizeIf(vs, instr->environment());
4220 } 4218 }
4221 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) { 4219 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
4222 Register value = ToRegister(instr->value()); 4220 Register value = ToRegister(instr->value());
4223 if (!instr->hydrogen()->value()->type().IsHeapObject()) { 4221 if (!instr->hydrogen()->value()->type().IsHeapObject()) {
4224 __ tst(value, Operand(kSmiTagMask)); 4222 __ SmiTst(value);
4225 DeoptimizeIf(eq, instr->environment()); 4223 DeoptimizeIf(eq, instr->environment());
4226 } 4224 }
4227 } else if (FLAG_track_double_fields && representation.IsDouble()) { 4225 } else if (FLAG_track_double_fields && representation.IsDouble()) {
4228 ASSERT(transition.is_null()); 4226 ASSERT(transition.is_null());
4229 ASSERT(instr->is_in_object()); 4227 ASSERT(instr->is_in_object());
4230 ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); 4228 ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
4231 DwVfpRegister value = ToDoubleRegister(instr->value()); 4229 DwVfpRegister value = ToDoubleRegister(instr->value());
4232 __ vstr(value, FieldMemOperand(object, offset)); 4230 __ vstr(value, FieldMemOperand(object, offset));
4233 return; 4231 return;
4234 } 4232 }
(...skipping 216 matching lines...) Expand 10 before | Expand all | Expand 10 after
4451 LConstantOperand* const_operand = LConstantOperand::cast(instr->key()); 4449 LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
4452 offset = FixedArray::OffsetOfElementAt(ToInteger32(const_operand) + 4450 offset = FixedArray::OffsetOfElementAt(ToInteger32(const_operand) +
4453 instr->additional_index()); 4451 instr->additional_index());
4454 store_base = elements; 4452 store_base = elements;
4455 } else { 4453 } else {
4456 // Even though the HLoadKeyed instruction forces the input 4454 // Even though the HLoadKeyed instruction forces the input
4457 // representation for the key to be an integer, the input gets replaced 4455 // representation for the key to be an integer, the input gets replaced
4458 // during bound check elimination with the index argument to the bounds 4456 // during bound check elimination with the index argument to the bounds
4459 // check, which can be tagged, so that case must be handled here, too. 4457 // check, which can be tagged, so that case must be handled here, too.
4460 if (instr->hydrogen()->key()->representation().IsTagged()) { 4458 if (instr->hydrogen()->key()->representation().IsTagged()) {
4461 __ add(scratch, elements, 4459 __ add(scratch, elements, Operand::PointerOffsetFromSmiKey(key));
4462 Operand(key, LSL, kPointerSizeLog2 - kSmiTagSize));
4463 } else { 4460 } else {
4464 __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2)); 4461 __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
4465 } 4462 }
4466 offset = FixedArray::OffsetOfElementAt(instr->additional_index()); 4463 offset = FixedArray::OffsetOfElementAt(instr->additional_index());
4467 } 4464 }
4468 __ str(value, FieldMemOperand(store_base, offset)); 4465 __ str(value, FieldMemOperand(store_base, offset));
4469 4466
4470 if (instr->hydrogen()->NeedsWriteBarrier()) { 4467 if (instr->hydrogen()->NeedsWriteBarrier()) {
4471 HType type = instr->hydrogen()->value()->type(); 4468 HType type = instr->hydrogen()->value()->type();
4472 SmiCheck check_needed = 4469 SmiCheck check_needed =
(...skipping 664 matching lines...) Expand 10 before | Expand all | Expand 10 after
5137 __ TryDoubleToInt32Exact(result_reg, double_input, double_scratch); 5134 __ TryDoubleToInt32Exact(result_reg, double_input, double_scratch);
5138 // Deoptimize if the input wasn't a int32 (inside a double). 5135 // Deoptimize if the input wasn't a int32 (inside a double).
5139 DeoptimizeIf(ne, instr->environment()); 5136 DeoptimizeIf(ne, instr->environment());
5140 } 5137 }
5141 __ bind(&done); 5138 __ bind(&done);
5142 } 5139 }
5143 5140
5144 5141
5145 void LCodeGen::DoCheckSmi(LCheckSmi* instr) { 5142 void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
5146 LOperand* input = instr->value(); 5143 LOperand* input = instr->value();
5147 __ tst(ToRegister(input), Operand(kSmiTagMask)); 5144 __ SmiTst(ToRegister(input));
5148 DeoptimizeIf(ne, instr->environment()); 5145 DeoptimizeIf(ne, instr->environment());
5149 } 5146 }
5150 5147
5151 5148
5152 void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) { 5149 void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
5153 LOperand* input = instr->value(); 5150 LOperand* input = instr->value();
5154 __ tst(ToRegister(input), Operand(kSmiTagMask)); 5151 __ SmiTst(ToRegister(input));
5155 DeoptimizeIf(eq, instr->environment()); 5152 DeoptimizeIf(eq, instr->environment());
5156 } 5153 }
5157 5154
5158 5155
5159 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) { 5156 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
5160 Register input = ToRegister(instr->value()); 5157 Register input = ToRegister(instr->value());
5161 Register scratch = scratch0(); 5158 Register scratch = scratch0();
5162 5159
5163 __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset)); 5160 __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
5164 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); 5161 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
(...skipping 658 matching lines...) Expand 10 before | Expand all | Expand 10 after
5823 void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) { 5820 void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) {
5824 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 5821 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
5825 __ cmp(r0, ip); 5822 __ cmp(r0, ip);
5826 DeoptimizeIf(eq, instr->environment()); 5823 DeoptimizeIf(eq, instr->environment());
5827 5824
5828 Register null_value = r5; 5825 Register null_value = r5;
5829 __ LoadRoot(null_value, Heap::kNullValueRootIndex); 5826 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
5830 __ cmp(r0, null_value); 5827 __ cmp(r0, null_value);
5831 DeoptimizeIf(eq, instr->environment()); 5828 DeoptimizeIf(eq, instr->environment());
5832 5829
5833 __ tst(r0, Operand(kSmiTagMask)); 5830 __ SmiTst(r0);
5834 DeoptimizeIf(eq, instr->environment()); 5831 DeoptimizeIf(eq, instr->environment());
5835 5832
5836 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); 5833 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
5837 __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE); 5834 __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE);
5838 DeoptimizeIf(le, instr->environment()); 5835 DeoptimizeIf(le, instr->environment());
5839 5836
5840 Label use_cache, call_runtime; 5837 Label use_cache, call_runtime;
5841 __ CheckEnumCache(null_value, &call_runtime); 5838 __ CheckEnumCache(null_value, &call_runtime);
5842 5839
5843 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); 5840 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
5891 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { 5888 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
5892 Register object = ToRegister(instr->object()); 5889 Register object = ToRegister(instr->object());
5893 Register index = ToRegister(instr->index()); 5890 Register index = ToRegister(instr->index());
5894 Register result = ToRegister(instr->result()); 5891 Register result = ToRegister(instr->result());
5895 Register scratch = scratch0(); 5892 Register scratch = scratch0();
5896 5893
5897 Label out_of_object, done; 5894 Label out_of_object, done;
5898 __ cmp(index, Operand::Zero()); 5895 __ cmp(index, Operand::Zero());
5899 __ b(lt, &out_of_object); 5896 __ b(lt, &out_of_object);
5900 5897
5901 STATIC_ASSERT(kPointerSizeLog2 > kSmiTagSize); 5898 __ add(scratch, object, Operand::PointerOffsetFromSmiKey(index));
5902 __ add(scratch, object, Operand(index, LSL, kPointerSizeLog2 - kSmiTagSize));
5903 __ ldr(result, FieldMemOperand(scratch, JSObject::kHeaderSize)); 5899 __ ldr(result, FieldMemOperand(scratch, JSObject::kHeaderSize));
5904 5900
5905 __ b(&done); 5901 __ b(&done);
5906 5902
5907 __ bind(&out_of_object); 5903 __ bind(&out_of_object);
5908 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); 5904 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
5909 // Index is equal to negated out of object property index plus 1. 5905 // Index is equal to negated out of object property index plus 1.
5910 __ sub(scratch, result, Operand(index, LSL, kPointerSizeLog2 - kSmiTagSize)); 5906 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
5907 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index));
5911 __ ldr(result, FieldMemOperand(scratch, 5908 __ ldr(result, FieldMemOperand(scratch,
5912 FixedArray::kHeaderSize - kPointerSize)); 5909 FixedArray::kHeaderSize - kPointerSize));
5913 __ bind(&done); 5910 __ bind(&done);
5914 } 5911 }
5915 5912
5916 5913
5917 #undef __ 5914 #undef __
5918 5915
5919 } } // namespace v8::internal 5916 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/arm/ic-arm.cc ('k') | src/arm/macro-assembler-arm.h » ('j') | src/arm/macro-assembler-arm.h » ('J')

Powered by Google App Engine
This is Rietveld 408576698