Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: src/mips/stub-cache-mips.cc

Issue 10105026: Version 3.10.3 (Closed) Base URL: http://v8.googlecode.com/svn/trunk/
Patch Set: Created 8 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/mips/regexp-macro-assembler-mips.cc ('k') | src/objects.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 547 matching lines...) Expand 10 before | Expand all | Expand 10 after
558 Register name, 558 Register name,
559 Handle<JSObject> holder_obj) { 559 Handle<JSObject> holder_obj) {
560 __ push(name); 560 __ push(name);
561 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor()); 561 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
562 ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor)); 562 ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
563 Register scratch = name; 563 Register scratch = name;
564 __ li(scratch, Operand(interceptor)); 564 __ li(scratch, Operand(interceptor));
565 __ Push(scratch, receiver, holder); 565 __ Push(scratch, receiver, holder);
566 __ lw(scratch, FieldMemOperand(scratch, InterceptorInfo::kDataOffset)); 566 __ lw(scratch, FieldMemOperand(scratch, InterceptorInfo::kDataOffset));
567 __ push(scratch); 567 __ push(scratch);
568 __ li(scratch, Operand(ExternalReference::isolate_address()));
569 __ push(scratch);
568 } 570 }
569 571
570 572
571 static void CompileCallLoadPropertyWithInterceptor( 573 static void CompileCallLoadPropertyWithInterceptor(
572 MacroAssembler* masm, 574 MacroAssembler* masm,
573 Register receiver, 575 Register receiver,
574 Register holder, 576 Register holder,
575 Register name, 577 Register name,
576 Handle<JSObject> holder_obj) { 578 Handle<JSObject> holder_obj) {
577 PushInterceptorArguments(masm, receiver, holder, name, holder_obj); 579 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
578 580
579 ExternalReference ref = 581 ExternalReference ref =
580 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly), 582 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
581 masm->isolate()); 583 masm->isolate());
582 __ PrepareCEntryArgs(5); 584 __ PrepareCEntryArgs(6);
583 __ PrepareCEntryFunction(ref); 585 __ PrepareCEntryFunction(ref);
584 586
585 CEntryStub stub(1); 587 CEntryStub stub(1);
586 __ CallStub(&stub); 588 __ CallStub(&stub);
587 } 589 }
588 590
589 591
590 static const int kFastApiCallArguments = 3; 592 static const int kFastApiCallArguments = 4;
591 593
592 594
593 // Reserves space for the extra arguments to FastHandleApiCall in the 595 // Reserves space for the extra arguments to API function in the
594 // caller's frame. 596 // caller's frame.
595 // 597 //
596 // These arguments are set by CheckPrototypes and GenerateFastApiDirectCall. 598 // These arguments are set by CheckPrototypes and GenerateFastApiDirectCall.
597 static void ReserveSpaceForFastApiCall(MacroAssembler* masm, 599 static void ReserveSpaceForFastApiCall(MacroAssembler* masm,
598 Register scratch) { 600 Register scratch) {
599 ASSERT(Smi::FromInt(0) == 0); 601 ASSERT(Smi::FromInt(0) == 0);
600 for (int i = 0; i < kFastApiCallArguments; i++) { 602 for (int i = 0; i < kFastApiCallArguments; i++) {
601 __ push(zero_reg); 603 __ push(zero_reg);
602 } 604 }
603 } 605 }
604 606
605 607
606 // Undoes the effects of ReserveSpaceForFastApiCall. 608 // Undoes the effects of ReserveSpaceForFastApiCall.
607 static void FreeSpaceForFastApiCall(MacroAssembler* masm) { 609 static void FreeSpaceForFastApiCall(MacroAssembler* masm) {
608 __ Drop(kFastApiCallArguments); 610 __ Drop(kFastApiCallArguments);
609 } 611 }
610 612
611 613
612 static void GenerateFastApiDirectCall(MacroAssembler* masm, 614 static void GenerateFastApiDirectCall(MacroAssembler* masm,
613 const CallOptimization& optimization, 615 const CallOptimization& optimization,
614 int argc) { 616 int argc) {
615 // ----------- S t a t e ------------- 617 // ----------- S t a t e -------------
616 // -- sp[0] : holder (set by CheckPrototypes) 618 // -- sp[0] : holder (set by CheckPrototypes)
617 // -- sp[4] : callee JS function 619 // -- sp[4] : callee JS function
618 // -- sp[8] : call data 620 // -- sp[8] : call data
619 // -- sp[12] : last JS argument 621 // -- sp[12] : isolate
622 // -- sp[16] : last JS argument
620 // -- ... 623 // -- ...
621 // -- sp[(argc + 3) * 4] : first JS argument 624 // -- sp[(argc + 3) * 4] : first JS argument
622 // -- sp[(argc + 4) * 4] : receiver 625 // -- sp[(argc + 4) * 4] : receiver
623 // ----------------------------------- 626 // -----------------------------------
624 // Get the function and setup the context. 627 // Get the function and setup the context.
625 Handle<JSFunction> function = optimization.constant_function(); 628 Handle<JSFunction> function = optimization.constant_function();
626 __ LoadHeapObject(t1, function); 629 __ LoadHeapObject(t1, function);
627 __ lw(cp, FieldMemOperand(t1, JSFunction::kContextOffset)); 630 __ lw(cp, FieldMemOperand(t1, JSFunction::kContextOffset));
628 631
629 // Pass the additional arguments FastHandleApiCall expects. 632 // Pass the additional arguments.
630 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info(); 633 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
631 Handle<Object> call_data(api_call_info->data()); 634 Handle<Object> call_data(api_call_info->data());
632 if (masm->isolate()->heap()->InNewSpace(*call_data)) { 635 if (masm->isolate()->heap()->InNewSpace(*call_data)) {
633 __ li(a0, api_call_info); 636 __ li(a0, api_call_info);
634 __ lw(t2, FieldMemOperand(a0, CallHandlerInfo::kDataOffset)); 637 __ lw(t2, FieldMemOperand(a0, CallHandlerInfo::kDataOffset));
635 } else { 638 } else {
636 __ li(t2, call_data); 639 __ li(t2, call_data);
637 } 640 }
638 641
639 // Store JS function and call data. 642 __ li(t3, Operand(ExternalReference::isolate_address()));
643 // Store JS function, call data and isolate.
640 __ sw(t1, MemOperand(sp, 1 * kPointerSize)); 644 __ sw(t1, MemOperand(sp, 1 * kPointerSize));
641 __ sw(t2, MemOperand(sp, 2 * kPointerSize)); 645 __ sw(t2, MemOperand(sp, 2 * kPointerSize));
646 __ sw(t3, MemOperand(sp, 3 * kPointerSize));
642 647
643 // a2 points to call data as expected by Arguments 648 // Prepare arguments.
644 // (refer to layout above). 649 __ Addu(a2, sp, Operand(3 * kPointerSize));
645 __ Addu(a2, sp, Operand(2 * kPointerSize));
646 650
651 // Allocate the v8::Arguments structure in the arguments' space since
652 // it's not controlled by GC.
647 const int kApiStackSpace = 4; 653 const int kApiStackSpace = 4;
648 654
649 FrameScope frame_scope(masm, StackFrame::MANUAL); 655 FrameScope frame_scope(masm, StackFrame::MANUAL);
650 __ EnterExitFrame(false, kApiStackSpace); 656 __ EnterExitFrame(false, kApiStackSpace);
651 657
652 // NOTE: the O32 abi requires a0 to hold a special pointer when returning a 658 // NOTE: the O32 abi requires a0 to hold a special pointer when returning a
653 // struct from the function (which is currently the case). This means we pass 659 // struct from the function (which is currently the case). This means we pass
654 // the first argument in a1 instead of a0. TryCallApiFunctionAndReturn 660 // the first argument in a1 instead of a0. TryCallApiFunctionAndReturn
655 // will handle setting up a0. 661 // will handle setting up a0.
656 662
657 // a1 = v8::Arguments& 663 // a1 = v8::Arguments&
658 // Arguments is built at sp + 1 (sp is a reserved spot for ra). 664 // Arguments is built at sp + 1 (sp is a reserved spot for ra).
659 __ Addu(a1, sp, kPointerSize); 665 __ Addu(a1, sp, kPointerSize);
660 666
661 // v8::Arguments::implicit_args = data 667 // v8::Arguments::implicit_args_
662 __ sw(a2, MemOperand(a1, 0 * kPointerSize)); 668 __ sw(a2, MemOperand(a1, 0 * kPointerSize));
663 // v8::Arguments::values = last argument 669 // v8::Arguments::values_
664 __ Addu(t0, a2, Operand(argc * kPointerSize)); 670 __ Addu(t0, a2, Operand(argc * kPointerSize));
665 __ sw(t0, MemOperand(a1, 1 * kPointerSize)); 671 __ sw(t0, MemOperand(a1, 1 * kPointerSize));
666 // v8::Arguments::length_ = argc 672 // v8::Arguments::length_ = argc
667 __ li(t0, Operand(argc)); 673 __ li(t0, Operand(argc));
668 __ sw(t0, MemOperand(a1, 2 * kPointerSize)); 674 __ sw(t0, MemOperand(a1, 2 * kPointerSize));
669 // v8::Arguments::is_construct_call = 0 675 // v8::Arguments::is_construct_call = 0
670 __ sw(zero_reg, MemOperand(a1, 3 * kPointerSize)); 676 __ sw(zero_reg, MemOperand(a1, 3 * kPointerSize));
671 677
672 const int kStackUnwindSpace = argc + kFastApiCallArguments + 1; 678 const int kStackUnwindSpace = argc + kFastApiCallArguments + 1;
673 Address function_address = v8::ToCData<Address>(api_call_info->callback()); 679 Address function_address = v8::ToCData<Address>(api_call_info->callback());
(...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after
831 FrameScope scope(masm, StackFrame::INTERNAL); 837 FrameScope scope(masm, StackFrame::INTERNAL);
832 // Save the name_ register across the call. 838 // Save the name_ register across the call.
833 __ push(name_); 839 __ push(name_);
834 840
835 PushInterceptorArguments(masm, receiver, holder, name_, interceptor_holder); 841 PushInterceptorArguments(masm, receiver, holder, name_, interceptor_holder);
836 842
837 __ CallExternalReference( 843 __ CallExternalReference(
838 ExternalReference( 844 ExternalReference(
839 IC_Utility(IC::kLoadPropertyWithInterceptorForCall), 845 IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
840 masm->isolate()), 846 masm->isolate()),
841 5); 847 6);
842 // Restore the name_ register. 848 // Restore the name_ register.
843 __ pop(name_); 849 __ pop(name_);
844 // Leave the internal frame. 850 // Leave the internal frame.
845 } 851 }
846 852
847 void LoadWithInterceptor(MacroAssembler* masm, 853 void LoadWithInterceptor(MacroAssembler* masm,
848 Register receiver, 854 Register receiver,
849 Register holder, 855 Register holder,
850 Handle<JSObject> holder_obj, 856 Handle<JSObject> holder_obj,
851 Register scratch, 857 Register scratch,
(...skipping 348 matching lines...) Expand 10 before | Expand all | Expand 10 after
1200 // Build AccessorInfo::args_ list on the stack and push property name below 1206 // Build AccessorInfo::args_ list on the stack and push property name below
1201 // the exit frame to make GC aware of them and store pointers to them. 1207 // the exit frame to make GC aware of them and store pointers to them.
1202 __ push(receiver); 1208 __ push(receiver);
1203 __ mov(scratch2, sp); // scratch2 = AccessorInfo::args_ 1209 __ mov(scratch2, sp); // scratch2 = AccessorInfo::args_
1204 if (heap()->InNewSpace(callback->data())) { 1210 if (heap()->InNewSpace(callback->data())) {
1205 __ li(scratch3, callback); 1211 __ li(scratch3, callback);
1206 __ lw(scratch3, FieldMemOperand(scratch3, AccessorInfo::kDataOffset)); 1212 __ lw(scratch3, FieldMemOperand(scratch3, AccessorInfo::kDataOffset));
1207 } else { 1213 } else {
1208 __ li(scratch3, Handle<Object>(callback->data())); 1214 __ li(scratch3, Handle<Object>(callback->data()));
1209 } 1215 }
1210 __ Push(reg, scratch3, name_reg); 1216 __ Subu(sp, sp, 4 * kPointerSize);
1217 __ sw(reg, MemOperand(sp, 3 * kPointerSize));
1218 __ sw(scratch3, MemOperand(sp, 2 * kPointerSize));
1219 __ li(scratch3, Operand(ExternalReference::isolate_address()));
1220 __ sw(scratch3, MemOperand(sp, 1 * kPointerSize));
1221 __ sw(name_reg, MemOperand(sp, 0 * kPointerSize));
1222
1211 __ mov(a2, scratch2); // Saved in case scratch2 == a1. 1223 __ mov(a2, scratch2); // Saved in case scratch2 == a1.
1212 __ mov(a1, sp); // a1 (first argument - see note below) = Handle<String> 1224 __ mov(a1, sp); // a1 (first argument - see note below) = Handle<String>
1213 1225
1214 // NOTE: the O32 abi requires a0 to hold a special pointer when returning a 1226 // NOTE: the O32 abi requires a0 to hold a special pointer when returning a
1215 // struct from the function (which is currently the case). This means we pass 1227 // struct from the function (which is currently the case). This means we pass
1216 // the arguments in a1-a2 instead of a0-a1. TryCallApiFunctionAndReturn 1228 // the arguments in a1-a2 instead of a0-a1. TryCallApiFunctionAndReturn
1217 // will handle setting up a0. 1229 // will handle setting up a0.
1218 1230
1219 const int kApiStackSpace = 1; 1231 const int kApiStackSpace = 1;
1220 FrameScope frame_scope(masm(), StackFrame::MANUAL); 1232 FrameScope frame_scope(masm(), StackFrame::MANUAL);
1221 __ EnterExitFrame(false, kApiStackSpace); 1233 __ EnterExitFrame(false, kApiStackSpace);
1222 1234
1223 // Create AccessorInfo instance on the stack above the exit frame with 1235 // Create AccessorInfo instance on the stack above the exit frame with
1224 // scratch2 (internal::Object** args_) as the data. 1236 // scratch2 (internal::Object** args_) as the data.
1225 __ sw(a2, MemOperand(sp, kPointerSize)); 1237 __ sw(a2, MemOperand(sp, kPointerSize));
1226 // a2 (second argument - see note above) = AccessorInfo& 1238 // a2 (second argument - see note above) = AccessorInfo&
1227 __ Addu(a2, sp, kPointerSize); 1239 __ Addu(a2, sp, kPointerSize);
1228 1240
1229 const int kStackUnwindSpace = 4; 1241 const int kStackUnwindSpace = 5;
1230 Address getter_address = v8::ToCData<Address>(callback->getter()); 1242 Address getter_address = v8::ToCData<Address>(callback->getter());
1231 ApiFunction fun(getter_address); 1243 ApiFunction fun(getter_address);
1232 ExternalReference ref = 1244 ExternalReference ref =
1233 ExternalReference(&fun, 1245 ExternalReference(&fun,
1234 ExternalReference::DIRECT_GETTER_CALL, 1246 ExternalReference::DIRECT_GETTER_CALL,
1235 masm()->isolate()); 1247 masm()->isolate());
1236 __ CallApiFunctionAndReturn(ref, kStackUnwindSpace); 1248 __ CallApiFunctionAndReturn(ref, kStackUnwindSpace);
1237 } 1249 }
1238 1250
1239 1251
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after
1335 // holder. 1347 // holder.
1336 ASSERT(lookup->type() == CALLBACKS); 1348 ASSERT(lookup->type() == CALLBACKS);
1337 Handle<AccessorInfo> callback( 1349 Handle<AccessorInfo> callback(
1338 AccessorInfo::cast(lookup->GetCallbackObject())); 1350 AccessorInfo::cast(lookup->GetCallbackObject()));
1339 ASSERT(callback->getter() != NULL); 1351 ASSERT(callback->getter() != NULL);
1340 1352
1341 // Tail call to runtime. 1353 // Tail call to runtime.
1342 // Important invariant in CALLBACKS case: the code above must be 1354 // Important invariant in CALLBACKS case: the code above must be
1343 // structured to never clobber |receiver| register. 1355 // structured to never clobber |receiver| register.
1344 __ li(scratch2, callback); 1356 __ li(scratch2, callback);
1345 // holder_reg is either receiver or scratch1. 1357
1346 if (!receiver.is(holder_reg)) { 1358 __ Push(receiver, holder_reg);
1347 ASSERT(scratch1.is(holder_reg)); 1359 __ lw(scratch3,
1348 __ Push(receiver, holder_reg); 1360 FieldMemOperand(scratch2, AccessorInfo::kDataOffset));
1349 __ lw(scratch3, 1361 __ li(scratch1, Operand(ExternalReference::isolate_address()));
1350 FieldMemOperand(scratch2, AccessorInfo::kDataOffset)); 1362 __ Push(scratch3, scratch1, scratch2, name_reg);
1351 __ Push(scratch3, scratch2, name_reg);
1352 } else {
1353 __ push(receiver);
1354 __ lw(scratch3,
1355 FieldMemOperand(scratch2, AccessorInfo::kDataOffset));
1356 __ Push(holder_reg, scratch3, scratch2, name_reg);
1357 }
1358 1363
1359 ExternalReference ref = 1364 ExternalReference ref =
1360 ExternalReference(IC_Utility(IC::kLoadCallbackProperty), 1365 ExternalReference(IC_Utility(IC::kLoadCallbackProperty),
1361 masm()->isolate()); 1366 masm()->isolate());
1362 __ TailCallExternalReference(ref, 5, 1); 1367 __ TailCallExternalReference(ref, 6, 1);
1363 } 1368 }
1364 } else { // !compile_followup_inline 1369 } else { // !compile_followup_inline
1365 // Call the runtime system to load the interceptor. 1370 // Call the runtime system to load the interceptor.
1366 // Check that the maps haven't changed. 1371 // Check that the maps haven't changed.
1367 Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder, 1372 Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1368 scratch1, scratch2, scratch3, 1373 scratch1, scratch2, scratch3,
1369 name, miss); 1374 name, miss);
1370 PushInterceptorArguments(masm(), receiver, holder_reg, 1375 PushInterceptorArguments(masm(), receiver, holder_reg,
1371 name_reg, interceptor_holder); 1376 name_reg, interceptor_holder);
1372 1377
1373 ExternalReference ref = ExternalReference( 1378 ExternalReference ref = ExternalReference(
1374 IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), masm()->isolate()); 1379 IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), masm()->isolate());
1375 __ TailCallExternalReference(ref, 5, 1); 1380 __ TailCallExternalReference(ref, 6, 1);
1376 } 1381 }
1377 } 1382 }
1378 1383
1379 1384
1380 void CallStubCompiler::GenerateNameCheck(Handle<String> name, Label* miss) { 1385 void CallStubCompiler::GenerateNameCheck(Handle<String> name, Label* miss) {
1381 if (kind_ == Code::KEYED_CALL_IC) { 1386 if (kind_ == Code::KEYED_CALL_IC) {
1382 __ Branch(miss, ne, a2, Operand(name)); 1387 __ Branch(miss, ne, a2, Operand(name));
1383 } 1388 }
1384 } 1389 }
1385 1390
(...skipping 1980 matching lines...) Expand 10 before | Expand all | Expand 10 after
3366 case FAST_DOUBLE_ELEMENTS: 3371 case FAST_DOUBLE_ELEMENTS:
3367 case DICTIONARY_ELEMENTS: 3372 case DICTIONARY_ELEMENTS:
3368 case NON_STRICT_ARGUMENTS_ELEMENTS: 3373 case NON_STRICT_ARGUMENTS_ELEMENTS:
3369 UNREACHABLE(); 3374 UNREACHABLE();
3370 return false; 3375 return false;
3371 } 3376 }
3372 return false; 3377 return false;
3373 } 3378 }
3374 3379
3375 3380
3381 static void GenerateSmiKeyCheck(MacroAssembler* masm,
3382 Register key,
3383 Register scratch0,
3384 Register scratch1,
3385 FPURegister double_scratch0,
3386 Label* fail) {
3387 if (CpuFeatures::IsSupported(FPU)) {
3388 CpuFeatures::Scope scope(FPU);
3389 Label key_ok;
3390 // Check for smi or a smi inside a heap number. We convert the heap
3391 // number and check if the conversion is exact and fits into the smi
3392 // range.
3393 __ JumpIfSmi(key, &key_ok);
3394 __ CheckMap(key,
3395 scratch0,
3396 Heap::kHeapNumberMapRootIndex,
3397 fail,
3398 DONT_DO_SMI_CHECK);
3399 __ ldc1(double_scratch0, FieldMemOperand(key, HeapNumber::kValueOffset));
3400 __ EmitFPUTruncate(kRoundToZero,
3401 double_scratch0,
3402 double_scratch0,
3403 scratch0,
3404 scratch1,
3405 kCheckForInexactConversion);
3406
3407 __ Branch(fail, ne, scratch1, Operand(zero_reg));
3408
3409 __ mfc1(scratch0, double_scratch0);
3410 __ SmiTagCheckOverflow(key, scratch0, scratch1);
3411 __ BranchOnOverflow(fail, scratch1);
3412 __ bind(&key_ok);
3413 } else {
3414 // Check that the key is a smi.
3415 __ JumpIfNotSmi(key, fail);
3416 }
3417 }
3418
3419
3376 void KeyedLoadStubCompiler::GenerateLoadExternalArray( 3420 void KeyedLoadStubCompiler::GenerateLoadExternalArray(
3377 MacroAssembler* masm, 3421 MacroAssembler* masm,
3378 ElementsKind elements_kind) { 3422 ElementsKind elements_kind) {
3379 // ---------- S t a t e -------------- 3423 // ---------- S t a t e --------------
3380 // -- ra : return address 3424 // -- ra : return address
3381 // -- a0 : key 3425 // -- a0 : key
3382 // -- a1 : receiver 3426 // -- a1 : receiver
3383 // ----------------------------------- 3427 // -----------------------------------
3384 Label miss_force_generic, slow, failed_allocation; 3428 Label miss_force_generic, slow, failed_allocation;
3385 3429
3386 Register key = a0; 3430 Register key = a0;
3387 Register receiver = a1; 3431 Register receiver = a1;
3388 3432
3389 // This stub is meant to be tail-jumped to, the receiver must already 3433 // This stub is meant to be tail-jumped to, the receiver must already
3390 // have been verified by the caller to not be a smi. 3434 // have been verified by the caller to not be a smi.
3391 3435
3392 // Check that the key is a smi. 3436 // Check that the key is a smi or a heap number convertible to a smi.
3393 __ JumpIfNotSmi(key, &miss_force_generic); 3437 GenerateSmiKeyCheck(masm, key, t0, t1, f2, &miss_force_generic);
3394 3438
3395 __ lw(a3, FieldMemOperand(receiver, JSObject::kElementsOffset)); 3439 __ lw(a3, FieldMemOperand(receiver, JSObject::kElementsOffset));
3396 // a3: elements array 3440 // a3: elements array
3397 3441
3398 // Check that the index is in range. 3442 // Check that the index is in range.
3399 __ lw(t1, FieldMemOperand(a3, ExternalArray::kLengthOffset)); 3443 __ lw(t1, FieldMemOperand(a3, ExternalArray::kLengthOffset));
3400 __ sra(t2, key, kSmiTagSize); 3444 __ sra(t2, key, kSmiTagSize);
3401 // Unsigned comparison catches both negative and too-large values. 3445 // Unsigned comparison catches both negative and too-large values.
3402 __ Branch(&miss_force_generic, Ugreater_equal, key, Operand(t1)); 3446 __ Branch(&miss_force_generic, Ugreater_equal, key, Operand(t1));
3403 3447
(...skipping 317 matching lines...) Expand 10 before | Expand all | Expand 10 after
3721 3765
3722 // Register usage. 3766 // Register usage.
3723 Register value = a0; 3767 Register value = a0;
3724 Register key = a1; 3768 Register key = a1;
3725 Register receiver = a2; 3769 Register receiver = a2;
3726 // a3 mostly holds the elements array or the destination external array. 3770 // a3 mostly holds the elements array or the destination external array.
3727 3771
3728 // This stub is meant to be tail-jumped to, the receiver must already 3772 // This stub is meant to be tail-jumped to, the receiver must already
3729 // have been verified by the caller to not be a smi. 3773 // have been verified by the caller to not be a smi.
3730 3774
3731 // Check that the key is a smi. 3775 // Check that the key is a smi or a heap number convertible to a smi.
3732 __ JumpIfNotSmi(key, &miss_force_generic); 3776 GenerateSmiKeyCheck(masm, key, t0, t1, f2, &miss_force_generic);
3733 3777
3734 __ lw(a3, FieldMemOperand(receiver, JSObject::kElementsOffset)); 3778 __ lw(a3, FieldMemOperand(receiver, JSObject::kElementsOffset));
3735 3779
3736 // Check that the index is in range. 3780 // Check that the index is in range.
3737 __ lw(t1, FieldMemOperand(a3, ExternalArray::kLengthOffset)); 3781 __ lw(t1, FieldMemOperand(a3, ExternalArray::kLengthOffset));
3738 // Unsigned comparison catches both negative and too-large values. 3782 // Unsigned comparison catches both negative and too-large values.
3739 __ Branch(&miss_force_generic, Ugreater_equal, key, Operand(t1)); 3783 __ Branch(&miss_force_generic, Ugreater_equal, key, Operand(t1));
3740 3784
3741 // Handle both smis and HeapNumbers in the fast path. Go to the 3785 // Handle both smis and HeapNumbers in the fast path. Go to the
3742 // runtime for all other kinds of values. 3786 // runtime for all other kinds of values.
(...skipping 358 matching lines...) Expand 10 before | Expand all | Expand 10 after
4101 // ----------- S t a t e ------------- 4145 // ----------- S t a t e -------------
4102 // -- ra : return address 4146 // -- ra : return address
4103 // -- a0 : key 4147 // -- a0 : key
4104 // -- a1 : receiver 4148 // -- a1 : receiver
4105 // ----------------------------------- 4149 // -----------------------------------
4106 Label miss_force_generic; 4150 Label miss_force_generic;
4107 4151
4108 // This stub is meant to be tail-jumped to, the receiver must already 4152 // This stub is meant to be tail-jumped to, the receiver must already
4109 // have been verified by the caller to not be a smi. 4153 // have been verified by the caller to not be a smi.
4110 4154
4111 // Check that the key is a smi. 4155 // Check that the key is a smi or a heap number convertible to a smi.
4112 __ JumpIfNotSmi(a0, &miss_force_generic, at, USE_DELAY_SLOT); 4156 GenerateSmiKeyCheck(masm, a0, t0, t1, f2, &miss_force_generic);
4113 // The delay slot can be safely used here, a1 is an object pointer.
4114 4157
4115 // Get the elements array. 4158 // Get the elements array.
4116 __ lw(a2, FieldMemOperand(a1, JSObject::kElementsOffset)); 4159 __ lw(a2, FieldMemOperand(a1, JSObject::kElementsOffset));
4117 __ AssertFastElements(a2); 4160 __ AssertFastElements(a2);
4118 4161
4119 // Check that the key is within bounds. 4162 // Check that the key is within bounds.
4120 __ lw(a3, FieldMemOperand(a2, FixedArray::kLengthOffset)); 4163 __ lw(a3, FieldMemOperand(a2, FixedArray::kLengthOffset));
4121 __ Branch(USE_DELAY_SLOT, &miss_force_generic, hs, a0, Operand(a3)); 4164 __ Branch(USE_DELAY_SLOT, &miss_force_generic, hs, a0, Operand(a3));
4122 4165
4123 // Load the result and make sure it's not the hole. 4166 // Load the result and make sure it's not the hole.
(...skipping 29 matching lines...) Expand all
4153 Register heap_number_reg = a2; 4196 Register heap_number_reg = a2;
4154 Register indexed_double_offset = a3; 4197 Register indexed_double_offset = a3;
4155 Register scratch = t0; 4198 Register scratch = t0;
4156 Register scratch2 = t1; 4199 Register scratch2 = t1;
4157 Register scratch3 = t2; 4200 Register scratch3 = t2;
4158 Register heap_number_map = t3; 4201 Register heap_number_map = t3;
4159 4202
4160 // This stub is meant to be tail-jumped to, the receiver must already 4203 // This stub is meant to be tail-jumped to, the receiver must already
4161 // have been verified by the caller to not be a smi. 4204 // have been verified by the caller to not be a smi.
4162 4205
4163 // Check that the key is a smi. 4206 // Check that the key is a smi or a heap number convertible to a smi.
4164 __ JumpIfNotSmi(key_reg, &miss_force_generic); 4207 GenerateSmiKeyCheck(masm, key_reg, t0, t1, f2, &miss_force_generic);
4165 4208
4166 // Get the elements array. 4209 // Get the elements array.
4167 __ lw(elements_reg, 4210 __ lw(elements_reg,
4168 FieldMemOperand(receiver_reg, JSObject::kElementsOffset)); 4211 FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
4169 4212
4170 // Check that the key is within bounds. 4213 // Check that the key is within bounds.
4171 __ lw(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset)); 4214 __ lw(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
4172 __ Branch(&miss_force_generic, hs, key_reg, Operand(scratch)); 4215 __ Branch(&miss_force_generic, hs, key_reg, Operand(scratch));
4173 4216
4174 // Load the upper word of the double in the fixed array and test for NaN. 4217 // Load the upper word of the double in the fixed array and test for NaN.
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
4228 Register receiver_reg = a2; 4271 Register receiver_reg = a2;
4229 Register scratch = t0; 4272 Register scratch = t0;
4230 Register elements_reg = a3; 4273 Register elements_reg = a3;
4231 Register length_reg = t1; 4274 Register length_reg = t1;
4232 Register scratch2 = t2; 4275 Register scratch2 = t2;
4233 Register scratch3 = t3; 4276 Register scratch3 = t3;
4234 4277
4235 // This stub is meant to be tail-jumped to, the receiver must already 4278 // This stub is meant to be tail-jumped to, the receiver must already
4236 // have been verified by the caller to not be a smi. 4279 // have been verified by the caller to not be a smi.
4237 4280
4238 // Check that the key is a smi. 4281 // Check that the key is a smi or a heap number convertible to a smi.
4239 __ JumpIfNotSmi(key_reg, &miss_force_generic); 4282 GenerateSmiKeyCheck(masm, key_reg, t0, t1, f2, &miss_force_generic);
4240 4283
4241 if (elements_kind == FAST_SMI_ONLY_ELEMENTS) { 4284 if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
4242 __ JumpIfNotSmi(value_reg, &transition_elements_kind); 4285 __ JumpIfNotSmi(value_reg, &transition_elements_kind);
4243 } 4286 }
4244 4287
4245 // Check that the key is within bounds. 4288 // Check that the key is within bounds.
4246 __ lw(elements_reg, 4289 __ lw(elements_reg,
4247 FieldMemOperand(receiver_reg, JSObject::kElementsOffset)); 4290 FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
4248 if (is_js_array) { 4291 if (is_js_array) {
4249 __ lw(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset)); 4292 __ lw(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
(...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after
4395 Register receiver_reg = a2; 4438 Register receiver_reg = a2;
4396 Register elements_reg = a3; 4439 Register elements_reg = a3;
4397 Register scratch1 = t0; 4440 Register scratch1 = t0;
4398 Register scratch2 = t1; 4441 Register scratch2 = t1;
4399 Register scratch3 = t2; 4442 Register scratch3 = t2;
4400 Register scratch4 = t3; 4443 Register scratch4 = t3;
4401 Register length_reg = t3; 4444 Register length_reg = t3;
4402 4445
4403 // This stub is meant to be tail-jumped to, the receiver must already 4446 // This stub is meant to be tail-jumped to, the receiver must already
4404 // have been verified by the caller to not be a smi. 4447 // have been verified by the caller to not be a smi.
4405 __ JumpIfNotSmi(key_reg, &miss_force_generic); 4448
4449 // Check that the key is a smi or a heap number convertible to a smi.
4450 GenerateSmiKeyCheck(masm, key_reg, t0, t1, f2, &miss_force_generic);
4406 4451
4407 __ lw(elements_reg, 4452 __ lw(elements_reg,
4408 FieldMemOperand(receiver_reg, JSObject::kElementsOffset)); 4453 FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
4409 4454
4410 // Check that the key is within bounds. 4455 // Check that the key is within bounds.
4411 if (is_js_array) { 4456 if (is_js_array) {
4412 __ lw(scratch1, FieldMemOperand(receiver_reg, JSArray::kLengthOffset)); 4457 __ lw(scratch1, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4413 } else { 4458 } else {
4414 __ lw(scratch1, 4459 __ lw(scratch1,
4415 FieldMemOperand(elements_reg, FixedArray::kLengthOffset)); 4460 FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
(...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after
4512 __ Jump(ic_slow, RelocInfo::CODE_TARGET); 4557 __ Jump(ic_slow, RelocInfo::CODE_TARGET);
4513 } 4558 }
4514 } 4559 }
4515 4560
4516 4561
4517 #undef __ 4562 #undef __
4518 4563
4519 } } // namespace v8::internal 4564 } } // namespace v8::internal
4520 4565
4521 #endif // V8_TARGET_ARCH_MIPS 4566 #endif // V8_TARGET_ARCH_MIPS
OLDNEW
« no previous file with comments | « src/mips/regexp-macro-assembler-mips.cc ('k') | src/objects.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698