OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 485 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
496 Immediate(reinterpret_cast<int>(masm->isolate()))); | 496 Immediate(reinterpret_cast<int>(masm->isolate()))); |
497 __ mov(Operand(esp, 5 * kPointerSize), | 497 __ mov(Operand(esp, 5 * kPointerSize), |
498 masm->isolate()->factory()->undefined_value()); | 498 masm->isolate()->factory()->undefined_value()); |
499 __ mov(Operand(esp, 6 * kPointerSize), | 499 __ mov(Operand(esp, 6 * kPointerSize), |
500 masm->isolate()->factory()->undefined_value()); | 500 masm->isolate()->factory()->undefined_value()); |
501 | 501 |
502 // Prepare arguments. | 502 // Prepare arguments. |
503 STATIC_ASSERT(kFastApiCallArguments == 6); | 503 STATIC_ASSERT(kFastApiCallArguments == 6); |
504 __ lea(eax, Operand(esp, kFastApiCallArguments * kPointerSize)); | 504 __ lea(eax, Operand(esp, kFastApiCallArguments * kPointerSize)); |
505 | 505 |
506 | 506 const int kApiArgc = 1; // API function gets reference to the v8::Arguments. |
507 // API function gets reference to the v8::Arguments. If CPU profiler | |
508 // is enabled wrapper function will be called and we need to pass | |
509 // address of the callback as additional parameter, always allocate | |
510 // space for it. | |
511 const int kApiArgc = 1 + 1; | |
512 | 507 |
513 // Allocate the v8::Arguments structure in the arguments' space since | 508 // Allocate the v8::Arguments structure in the arguments' space since |
514 // it's not controlled by GC. | 509 // it's not controlled by GC. |
515 const int kApiStackSpace = 4; | 510 const int kApiStackSpace = 4; |
516 | 511 |
517 // Function address is a foreign pointer outside V8's heap. | 512 // Function address is a foreign pointer outside V8's heap. |
518 Address function_address = v8::ToCData<Address>(api_call_info->callback()); | 513 Address function_address = v8::ToCData<Address>(api_call_info->callback()); |
519 bool returns_handle = | 514 bool returns_handle = |
520 !CallbackTable::ReturnsVoid(masm->isolate(), | 515 !CallbackTable::ReturnsVoid(masm->isolate(), |
521 reinterpret_cast<void*>(function_address)); | 516 reinterpret_cast<void*>(function_address)); |
522 __ PrepareCallApiFunction(kApiArgc + kApiStackSpace, returns_handle); | 517 __ PrepareCallApiFunction(kApiArgc + kApiStackSpace, returns_handle); |
523 | 518 |
524 // v8::Arguments::implicit_args_. | 519 // v8::Arguments::implicit_args_. |
525 __ mov(ApiParameterOperand(2, returns_handle), eax); | 520 __ mov(ApiParameterOperand(1, returns_handle), eax); |
526 __ add(eax, Immediate(argc * kPointerSize)); | 521 __ add(eax, Immediate(argc * kPointerSize)); |
527 // v8::Arguments::values_. | 522 // v8::Arguments::values_. |
528 __ mov(ApiParameterOperand(3, returns_handle), eax); | 523 __ mov(ApiParameterOperand(2, returns_handle), eax); |
529 // v8::Arguments::length_. | 524 // v8::Arguments::length_. |
530 __ Set(ApiParameterOperand(4, returns_handle), Immediate(argc)); | 525 __ Set(ApiParameterOperand(3, returns_handle), Immediate(argc)); |
531 // v8::Arguments::is_construct_call_. | 526 // v8::Arguments::is_construct_call_. |
532 __ Set(ApiParameterOperand(5, returns_handle), Immediate(0)); | 527 __ Set(ApiParameterOperand(4, returns_handle), Immediate(0)); |
533 | 528 |
534 // v8::InvocationCallback's argument. | 529 // v8::InvocationCallback's argument. |
535 __ lea(eax, ApiParameterOperand(2, returns_handle)); | 530 __ lea(eax, ApiParameterOperand(1, returns_handle)); |
536 __ mov(ApiParameterOperand(0, returns_handle), eax); | 531 __ mov(ApiParameterOperand(0, returns_handle), eax); |
537 | 532 |
538 Address thunk_address = returns_handle | |
539 ? FUNCTION_ADDR(&InvokeInvocationCallback) | |
540 : FUNCTION_ADDR(&InvokeFunctionCallback); | |
541 | |
542 __ CallApiFunctionAndReturn(function_address, | 533 __ CallApiFunctionAndReturn(function_address, |
543 thunk_address, | |
544 ApiParameterOperand(1, returns_handle), | |
545 argc + kFastApiCallArguments + 1, | 534 argc + kFastApiCallArguments + 1, |
546 returns_handle, | 535 returns_handle, |
547 kFastApiCallArguments + 1); | 536 kFastApiCallArguments + 1); |
548 } | 537 } |
549 | 538 |
550 | 539 |
551 class CallInterceptorCompiler BASE_EMBEDDED { | 540 class CallInterceptorCompiler BASE_EMBEDDED { |
552 public: | 541 public: |
553 CallInterceptorCompiler(StubCompiler* stub_compiler, | 542 CallInterceptorCompiler(StubCompiler* stub_compiler, |
554 const ParameterCount& arguments, | 543 const ParameterCount& arguments, |
(...skipping 288 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
843 miss_restore_name); | 832 miss_restore_name); |
844 } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) { | 833 } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) { |
845 GenerateDictionaryNegativeLookup( | 834 GenerateDictionaryNegativeLookup( |
846 masm, miss_restore_name, holder_reg, name, scratch1, scratch2); | 835 masm, miss_restore_name, holder_reg, name, scratch1, scratch2); |
847 } | 836 } |
848 } | 837 } |
849 } | 838 } |
850 | 839 |
851 Register storage_reg = name_reg; | 840 Register storage_reg = name_reg; |
852 | 841 |
853 if (details.type() == CONSTANT_FUNCTION) { | 842 if (FLAG_track_fields && representation.IsSmi()) { |
854 Handle<HeapObject> constant( | 843 __ JumpIfNotSmi(value_reg, miss_restore_name); |
855 HeapObject::cast(descriptors->GetValue(descriptor))); | |
856 __ LoadHeapObject(scratch1, constant); | |
857 __ cmp(value_reg, scratch1); | |
858 __ j(not_equal, miss_restore_name); | |
859 } else if (FLAG_track_fields && representation.IsSmi()) { | |
860 __ JumpIfNotSmi(value_reg, miss_restore_name); | |
861 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) { | 844 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) { |
862 __ JumpIfSmi(value_reg, miss_restore_name); | 845 __ JumpIfSmi(value_reg, miss_restore_name); |
863 } else if (FLAG_track_double_fields && representation.IsDouble()) { | 846 } else if (FLAG_track_double_fields && representation.IsDouble()) { |
864 Label do_store, heap_number; | 847 Label do_store, heap_number; |
865 __ AllocateHeapNumber(storage_reg, scratch1, scratch2, slow); | 848 __ AllocateHeapNumber(storage_reg, scratch1, scratch2, slow); |
866 | 849 |
867 __ JumpIfNotSmi(value_reg, &heap_number); | 850 __ JumpIfNotSmi(value_reg, &heap_number); |
868 __ SmiUntag(value_reg); | 851 __ SmiUntag(value_reg); |
869 if (CpuFeatures::IsSupported(SSE2)) { | 852 if (CpuFeatures::IsSupported(SSE2)) { |
870 CpuFeatureScope use_sse2(masm, SSE2); | 853 CpuFeatureScope use_sse2(masm, SSE2); |
(...skipping 23 matching lines...) Expand all Loading... |
894 } else { | 877 } else { |
895 __ fstp_d(FieldOperand(storage_reg, HeapNumber::kValueOffset)); | 878 __ fstp_d(FieldOperand(storage_reg, HeapNumber::kValueOffset)); |
896 } | 879 } |
897 } | 880 } |
898 | 881 |
899 // Stub never generated for non-global objects that require access | 882 // Stub never generated for non-global objects that require access |
900 // checks. | 883 // checks. |
901 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); | 884 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); |
902 | 885 |
903 // Perform map transition for the receiver if necessary. | 886 // Perform map transition for the receiver if necessary. |
904 if (details.type() == FIELD && | 887 if (object->map()->unused_property_fields() == 0) { |
905 object->map()->unused_property_fields() == 0) { | |
906 // The properties must be extended before we can store the value. | 888 // The properties must be extended before we can store the value. |
907 // We jump to a runtime call that extends the properties array. | 889 // We jump to a runtime call that extends the properties array. |
908 __ pop(scratch1); // Return address. | 890 __ pop(scratch1); // Return address. |
909 __ push(receiver_reg); | 891 __ push(receiver_reg); |
910 __ push(Immediate(transition)); | 892 __ push(Immediate(transition)); |
911 __ push(value_reg); | 893 __ push(value_reg); |
912 __ push(scratch1); | 894 __ push(scratch1); |
913 __ TailCallExternalReference( | 895 __ TailCallExternalReference( |
914 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage), | 896 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage), |
915 masm->isolate()), | 897 masm->isolate()), |
916 3, | 898 3, |
917 1); | 899 1); |
918 return; | 900 return; |
919 } | 901 } |
920 | 902 |
921 // Update the map of the object. | 903 // Update the map of the object. |
922 __ mov(scratch1, Immediate(transition)); | 904 __ mov(scratch1, Immediate(transition)); |
923 __ mov(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1); | 905 __ mov(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1); |
924 | 906 |
925 // Update the write barrier for the map field. | 907 // Update the write barrier for the map field. |
926 __ RecordWriteField(receiver_reg, | 908 __ RecordWriteField(receiver_reg, |
927 HeapObject::kMapOffset, | 909 HeapObject::kMapOffset, |
928 scratch1, | 910 scratch1, |
929 scratch2, | 911 scratch2, |
930 kDontSaveFPRegs, | 912 kDontSaveFPRegs, |
931 OMIT_REMEMBERED_SET, | 913 OMIT_REMEMBERED_SET, |
932 OMIT_SMI_CHECK); | 914 OMIT_SMI_CHECK); |
933 | 915 |
934 if (details.type() == CONSTANT_FUNCTION) return; | |
935 | |
936 int index = transition->instance_descriptors()->GetFieldIndex( | 916 int index = transition->instance_descriptors()->GetFieldIndex( |
937 transition->LastAdded()); | 917 transition->LastAdded()); |
938 | 918 |
939 // Adjust for the number of properties stored in the object. Even in the | 919 // Adjust for the number of properties stored in the object. Even in the |
940 // face of a transition we can use the old map here because the size of the | 920 // face of a transition we can use the old map here because the size of the |
941 // object and the number of in-object properties is not going to change. | 921 // object and the number of in-object properties is not going to change. |
942 index -= object->map()->inobject_properties(); | 922 index -= object->map()->inobject_properties(); |
943 | 923 |
944 SmiCheck smi_check = representation.IsTagged() | 924 SmiCheck smi_check = representation.IsTagged() |
945 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK; | 925 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK; |
(...skipping 473 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1419 __ push(scratch2()); | 1399 __ push(scratch2()); |
1420 | 1400 |
1421 __ push(name()); // name | 1401 __ push(name()); // name |
1422 __ mov(ebx, esp); // esp points to reference to name (handler). | 1402 __ mov(ebx, esp); // esp points to reference to name (handler). |
1423 | 1403 |
1424 __ push(scratch3()); // Restore return address. | 1404 __ push(scratch3()); // Restore return address. |
1425 | 1405 |
1426 // array for v8::Arguments::values_, handler for name and pointer | 1406 // array for v8::Arguments::values_, handler for name and pointer |
1427 // to the values (it considered as smi in GC). | 1407 // to the values (it considered as smi in GC). |
1428 const int kStackSpace = PropertyCallbackArguments::kArgsLength + 2; | 1408 const int kStackSpace = PropertyCallbackArguments::kArgsLength + 2; |
1429 // Allocate space for opional callback address parameter in case | 1409 const int kApiArgc = 2; |
1430 // CPU profiler is active. | |
1431 const int kApiArgc = 2 + 1; | |
1432 | 1410 |
1433 Address getter_address = v8::ToCData<Address>(callback->getter()); | 1411 Address getter_address = v8::ToCData<Address>(callback->getter()); |
1434 bool returns_handle = | 1412 bool returns_handle = |
1435 !CallbackTable::ReturnsVoid(isolate(), | 1413 !CallbackTable::ReturnsVoid(isolate(), |
1436 reinterpret_cast<void*>(getter_address)); | 1414 reinterpret_cast<void*>(getter_address)); |
1437 __ PrepareCallApiFunction(kApiArgc, returns_handle); | 1415 __ PrepareCallApiFunction(kApiArgc, returns_handle); |
1438 __ mov(ApiParameterOperand(0, returns_handle), ebx); // name. | 1416 __ mov(ApiParameterOperand(0, returns_handle), ebx); // name. |
1439 __ add(ebx, Immediate(kPointerSize)); | 1417 __ add(ebx, Immediate(kPointerSize)); |
1440 __ mov(ApiParameterOperand(1, returns_handle), ebx); // arguments pointer. | 1418 __ mov(ApiParameterOperand(1, returns_handle), ebx); // arguments pointer. |
1441 | 1419 |
1442 // Emitting a stub call may try to allocate (if the code is not | 1420 // Emitting a stub call may try to allocate (if the code is not |
1443 // already generated). Do not allow the assembler to perform a | 1421 // already generated). Do not allow the assembler to perform a |
1444 // garbage collection but instead return the allocation failure | 1422 // garbage collection but instead return the allocation failure |
1445 // object. | 1423 // object. |
1446 | 1424 |
1447 Address thunk_address = returns_handle | |
1448 ? FUNCTION_ADDR(&InvokeAccessorGetter) | |
1449 : FUNCTION_ADDR(&InvokeAccessorGetterCallback); | |
1450 | |
1451 __ CallApiFunctionAndReturn(getter_address, | 1425 __ CallApiFunctionAndReturn(getter_address, |
1452 thunk_address, | |
1453 ApiParameterOperand(2, returns_handle), | |
1454 kStackSpace, | 1426 kStackSpace, |
1455 returns_handle, | 1427 returns_handle, |
1456 6); | 1428 6); |
1457 } | 1429 } |
1458 | 1430 |
1459 | 1431 |
1460 void BaseLoadStubCompiler::GenerateLoadConstant(Handle<JSFunction> value) { | 1432 void BaseLoadStubCompiler::GenerateLoadConstant(Handle<JSFunction> value) { |
1461 // Return the constant value. | 1433 // Return the constant value. |
1462 __ LoadHeapObject(eax, value); | 1434 __ LoadHeapObject(eax, value); |
1463 __ ret(0); | 1435 __ ret(0); |
(...skipping 2307 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3771 TailCallBuiltin(masm, Builtins::kKeyedStoreIC_Slow); | 3743 TailCallBuiltin(masm, Builtins::kKeyedStoreIC_Slow); |
3772 } | 3744 } |
3773 } | 3745 } |
3774 | 3746 |
3775 | 3747 |
3776 #undef __ | 3748 #undef __ |
3777 | 3749 |
3778 } } // namespace v8::internal | 3750 } } // namespace v8::internal |
3779 | 3751 |
3780 #endif // V8_TARGET_ARCH_IA32 | 3752 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |