| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 3448 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3459 ZoneList<Expression*>* args = expr->arguments(); | 3459 ZoneList<Expression*>* args = expr->arguments(); |
| 3460 ASSERT(args->length() == 3); | 3460 ASSERT(args->length() == 3); |
| 3461 VisitForStackValue(args->at(0)); | 3461 VisitForStackValue(args->at(0)); |
| 3462 VisitForStackValue(args->at(1)); | 3462 VisitForStackValue(args->at(1)); |
| 3463 VisitForStackValue(args->at(2)); | 3463 VisitForStackValue(args->at(2)); |
| 3464 __ CallStub(&stub); | 3464 __ CallStub(&stub); |
| 3465 context()->Plug(r0); | 3465 context()->Plug(r0); |
| 3466 } | 3466 } |
| 3467 | 3467 |
| 3468 | 3468 |
| 3469 void FullCodeGenerator::EmitSwapElements(CallRuntime* expr) { | |
| 3470 ZoneList<Expression*>* args = expr->arguments(); | |
| 3471 ASSERT(args->length() == 3); | |
| 3472 VisitForStackValue(args->at(0)); | |
| 3473 VisitForStackValue(args->at(1)); | |
| 3474 VisitForStackValue(args->at(2)); | |
| 3475 Label done; | |
| 3476 Label slow_case; | |
| 3477 Register object = r0; | |
| 3478 Register index1 = r1; | |
| 3479 Register index2 = r2; | |
| 3480 Register elements = r3; | |
| 3481 Register scratch1 = r4; | |
| 3482 Register scratch2 = r5; | |
| 3483 | |
| 3484 __ ldr(object, MemOperand(sp, 2 * kPointerSize)); | |
| 3485 // Fetch the map and check if array is in fast case. | |
| 3486 // Check that object doesn't require security checks and | |
| 3487 // has no indexed interceptor. | |
| 3488 __ CompareObjectType(object, scratch1, scratch2, JS_ARRAY_TYPE); | |
| 3489 __ b(ne, &slow_case); | |
| 3490 // Map is now in scratch1. | |
| 3491 | |
| 3492 __ ldrb(scratch2, FieldMemOperand(scratch1, Map::kBitFieldOffset)); | |
| 3493 __ tst(scratch2, Operand(KeyedLoadIC::kSlowCaseBitFieldMask)); | |
| 3494 __ b(ne, &slow_case); | |
| 3495 | |
| 3496 // Check the object's elements are in fast case and writable. | |
| 3497 __ ldr(elements, FieldMemOperand(object, JSObject::kElementsOffset)); | |
| 3498 __ ldr(scratch1, FieldMemOperand(elements, HeapObject::kMapOffset)); | |
| 3499 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex); | |
| 3500 __ cmp(scratch1, ip); | |
| 3501 __ b(ne, &slow_case); | |
| 3502 | |
| 3503 // Check that both indices are smis. | |
| 3504 __ ldr(index1, MemOperand(sp, 1 * kPointerSize)); | |
| 3505 __ ldr(index2, MemOperand(sp, 0)); | |
| 3506 __ JumpIfNotBothSmi(index1, index2, &slow_case); | |
| 3507 | |
| 3508 // Check that both indices are valid. | |
| 3509 __ ldr(scratch1, FieldMemOperand(object, JSArray::kLengthOffset)); | |
| 3510 __ cmp(scratch1, index1); | |
| 3511 __ cmp(scratch1, index2, hi); | |
| 3512 __ b(ls, &slow_case); | |
| 3513 | |
| 3514 // Bring the address of the elements into index1 and index2. | |
| 3515 __ add(scratch1, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
| 3516 __ add(index1, | |
| 3517 scratch1, | |
| 3518 Operand(index1, LSL, kPointerSizeLog2 - kSmiTagSize)); | |
| 3519 __ add(index2, | |
| 3520 scratch1, | |
| 3521 Operand(index2, LSL, kPointerSizeLog2 - kSmiTagSize)); | |
| 3522 | |
| 3523 // Swap elements. | |
| 3524 __ ldr(scratch1, MemOperand(index1, 0)); | |
| 3525 __ ldr(scratch2, MemOperand(index2, 0)); | |
| 3526 __ str(scratch1, MemOperand(index2, 0)); | |
| 3527 __ str(scratch2, MemOperand(index1, 0)); | |
| 3528 | |
| 3529 Label no_remembered_set; | |
| 3530 __ CheckPageFlag(elements, | |
| 3531 scratch1, | |
| 3532 1 << MemoryChunk::SCAN_ON_SCAVENGE, | |
| 3533 ne, | |
| 3534 &no_remembered_set); | |
| 3535 // Possible optimization: do a check that both values are Smis | |
| 3536 // (or them and test against Smi mask.) | |
| 3537 | |
| 3538 // We are swapping two objects in an array and the incremental marker never | |
| 3539 // pauses in the middle of scanning a single object. Therefore the | |
| 3540 // incremental marker is not disturbed, so we don't need to call the | |
| 3541 // RecordWrite stub that notifies the incremental marker. | |
| 3542 __ RememberedSetHelper(elements, | |
| 3543 index1, | |
| 3544 scratch2, | |
| 3545 kDontSaveFPRegs, | |
| 3546 MacroAssembler::kFallThroughAtEnd); | |
| 3547 __ RememberedSetHelper(elements, | |
| 3548 index2, | |
| 3549 scratch2, | |
| 3550 kDontSaveFPRegs, | |
| 3551 MacroAssembler::kFallThroughAtEnd); | |
| 3552 | |
| 3553 __ bind(&no_remembered_set); | |
| 3554 // We are done. Drop elements from the stack, and return undefined. | |
| 3555 __ Drop(3); | |
| 3556 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); | |
| 3557 __ jmp(&done); | |
| 3558 | |
| 3559 __ bind(&slow_case); | |
| 3560 __ CallRuntime(Runtime::kSwapElements, 3); | |
| 3561 | |
| 3562 __ bind(&done); | |
| 3563 context()->Plug(r0); | |
| 3564 } | |
| 3565 | |
| 3566 | |
| 3567 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) { | 3469 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) { |
| 3568 ZoneList<Expression*>* args = expr->arguments(); | 3470 ZoneList<Expression*>* args = expr->arguments(); |
| 3569 ASSERT_EQ(2, args->length()); | 3471 ASSERT_EQ(2, args->length()); |
| 3570 ASSERT_NE(NULL, args->at(0)->AsLiteral()); | 3472 ASSERT_NE(NULL, args->at(0)->AsLiteral()); |
| 3571 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value(); | 3473 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value(); |
| 3572 | 3474 |
| 3573 Handle<FixedArray> jsfunction_result_caches( | 3475 Handle<FixedArray> jsfunction_result_caches( |
| 3574 isolate()->global_context()->jsfunction_result_caches()); | 3476 isolate()->global_context()->jsfunction_result_caches()); |
| 3575 if (jsfunction_result_caches->length() <= cache_id) { | 3477 if (jsfunction_result_caches->length() <= cache_id) { |
| 3576 __ Abort("Attempt to use undefined cache."); | 3478 __ Abort("Attempt to use undefined cache."); |
| (...skipping 1056 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4633 *context_length = 0; | 4535 *context_length = 0; |
| 4634 return previous_; | 4536 return previous_; |
| 4635 } | 4537 } |
| 4636 | 4538 |
| 4637 | 4539 |
| 4638 #undef __ | 4540 #undef __ |
| 4639 | 4541 |
| 4640 } } // namespace v8::internal | 4542 } } // namespace v8::internal |
| 4641 | 4543 |
| 4642 #endif // V8_TARGET_ARCH_ARM | 4544 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |