OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 3482 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3493 ZoneList<Expression*>* args = expr->arguments(); | 3493 ZoneList<Expression*>* args = expr->arguments(); |
3494 ASSERT(args->length() == 3); | 3494 ASSERT(args->length() == 3); |
3495 VisitForStackValue(args->at(0)); | 3495 VisitForStackValue(args->at(0)); |
3496 VisitForStackValue(args->at(1)); | 3496 VisitForStackValue(args->at(1)); |
3497 VisitForStackValue(args->at(2)); | 3497 VisitForStackValue(args->at(2)); |
3498 __ CallStub(&stub); | 3498 __ CallStub(&stub); |
3499 context()->Plug(v0); | 3499 context()->Plug(v0); |
3500 } | 3500 } |
3501 | 3501 |
3502 | 3502 |
3503 void FullCodeGenerator::EmitSwapElements(CallRuntime* expr) { | |
3504 ZoneList<Expression*>* args = expr->arguments(); | |
3505 ASSERT(args->length() == 3); | |
3506 VisitForStackValue(args->at(0)); | |
3507 VisitForStackValue(args->at(1)); | |
3508 VisitForStackValue(args->at(2)); | |
3509 Label done; | |
3510 Label slow_case; | |
3511 Register object = a0; | |
3512 Register index1 = a1; | |
3513 Register index2 = a2; | |
3514 Register elements = a3; | |
3515 Register scratch1 = t0; | |
3516 Register scratch2 = t1; | |
3517 | |
3518 __ lw(object, MemOperand(sp, 2 * kPointerSize)); | |
3519 // Fetch the map and check if array is in fast case. | |
3520 // Check that object doesn't require security checks and | |
3521 // has no indexed interceptor. | |
3522 __ GetObjectType(object, scratch1, scratch2); | |
3523 __ Branch(&slow_case, ne, scratch2, Operand(JS_ARRAY_TYPE)); | |
3524 // Map is now in scratch1. | |
3525 | |
3526 __ lbu(scratch2, FieldMemOperand(scratch1, Map::kBitFieldOffset)); | |
3527 __ And(scratch2, scratch2, Operand(KeyedLoadIC::kSlowCaseBitFieldMask)); | |
3528 __ Branch(&slow_case, ne, scratch2, Operand(zero_reg)); | |
3529 | |
3530 // Check the object's elements are in fast case and writable. | |
3531 __ lw(elements, FieldMemOperand(object, JSObject::kElementsOffset)); | |
3532 __ lw(scratch1, FieldMemOperand(elements, HeapObject::kMapOffset)); | |
3533 __ LoadRoot(scratch2, Heap::kFixedArrayMapRootIndex); | |
3534 __ Branch(&slow_case, ne, scratch1, Operand(scratch2)); | |
3535 | |
3536 // Check that both indices are smis. | |
3537 __ lw(index1, MemOperand(sp, 1 * kPointerSize)); | |
3538 __ lw(index2, MemOperand(sp, 0)); | |
3539 __ JumpIfNotBothSmi(index1, index2, &slow_case); | |
3540 | |
3541 // Check that both indices are valid. | |
3542 Label not_hi; | |
3543 __ lw(scratch1, FieldMemOperand(object, JSArray::kLengthOffset)); | |
3544 __ Branch(&slow_case, ls, scratch1, Operand(index1)); | |
3545 __ Branch(¬_hi, NegateCondition(hi), scratch1, Operand(index1)); | |
3546 __ Branch(&slow_case, ls, scratch1, Operand(index2)); | |
3547 __ bind(¬_hi); | |
3548 | |
3549 // Bring the address of the elements into index1 and index2. | |
3550 __ Addu(scratch1, elements, | |
3551 Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
3552 __ sll(index1, index1, kPointerSizeLog2 - kSmiTagSize); | |
3553 __ Addu(index1, scratch1, index1); | |
3554 __ sll(index2, index2, kPointerSizeLog2 - kSmiTagSize); | |
3555 __ Addu(index2, scratch1, index2); | |
3556 | |
3557 // Swap elements. | |
3558 __ lw(scratch1, MemOperand(index1, 0)); | |
3559 __ lw(scratch2, MemOperand(index2, 0)); | |
3560 __ sw(scratch1, MemOperand(index2, 0)); | |
3561 __ sw(scratch2, MemOperand(index1, 0)); | |
3562 | |
3563 Label no_remembered_set; | |
3564 __ CheckPageFlag(elements, | |
3565 scratch1, | |
3566 1 << MemoryChunk::SCAN_ON_SCAVENGE, | |
3567 ne, | |
3568 &no_remembered_set); | |
3569 // Possible optimization: do a check that both values are Smis | |
3570 // (or them and test against Smi mask). | |
3571 | |
3572 // We are swapping two objects in an array and the incremental marker never | |
3573 // pauses in the middle of scanning a single object. Therefore the | |
3574 // incremental marker is not disturbed, so we don't need to call the | |
3575 // RecordWrite stub that notifies the incremental marker. | |
3576 __ RememberedSetHelper(elements, | |
3577 index1, | |
3578 scratch2, | |
3579 kDontSaveFPRegs, | |
3580 MacroAssembler::kFallThroughAtEnd); | |
3581 __ RememberedSetHelper(elements, | |
3582 index2, | |
3583 scratch2, | |
3584 kDontSaveFPRegs, | |
3585 MacroAssembler::kFallThroughAtEnd); | |
3586 | |
3587 __ bind(&no_remembered_set); | |
3588 // We are done. Drop elements from the stack, and return undefined. | |
3589 __ Drop(3); | |
3590 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); | |
3591 __ jmp(&done); | |
3592 | |
3593 __ bind(&slow_case); | |
3594 __ CallRuntime(Runtime::kSwapElements, 3); | |
3595 | |
3596 __ bind(&done); | |
3597 context()->Plug(v0); | |
3598 } | |
3599 | |
3600 | |
3601 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) { | 3503 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) { |
3602 ZoneList<Expression*>* args = expr->arguments(); | 3504 ZoneList<Expression*>* args = expr->arguments(); |
3603 ASSERT_EQ(2, args->length()); | 3505 ASSERT_EQ(2, args->length()); |
3604 | 3506 |
3605 ASSERT_NE(NULL, args->at(0)->AsLiteral()); | 3507 ASSERT_NE(NULL, args->at(0)->AsLiteral()); |
3606 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value(); | 3508 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value(); |
3607 | 3509 |
3608 Handle<FixedArray> jsfunction_result_caches( | 3510 Handle<FixedArray> jsfunction_result_caches( |
3609 isolate()->global_context()->jsfunction_result_caches()); | 3511 isolate()->global_context()->jsfunction_result_caches()); |
3610 if (jsfunction_result_caches->length() <= cache_id) { | 3512 if (jsfunction_result_caches->length() <= cache_id) { |
(...skipping 1059 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4670 *context_length = 0; | 4572 *context_length = 0; |
4671 return previous_; | 4573 return previous_; |
4672 } | 4574 } |
4673 | 4575 |
4674 | 4576 |
4675 #undef __ | 4577 #undef __ |
4676 | 4578 |
4677 } } // namespace v8::internal | 4579 } } // namespace v8::internal |
4678 | 4580 |
4679 #endif // V8_TARGET_ARCH_MIPS | 4581 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |