| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 15 matching lines...) Expand all Loading... |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 | 27 |
| 28 #include "v8.h" | 28 #include "v8.h" |
| 29 | 29 |
| 30 #if V8_TARGET_ARCH_IA32 | 30 #if V8_TARGET_ARCH_IA32 |
| 31 | 31 |
| 32 #include "bootstrapper.h" | 32 #include "bootstrapper.h" |
| 33 #include "codegen.h" | 33 #include "codegen.h" |
| 34 #include "cpu-profiler.h" | 34 #include "cpu-profiler.h" |
| 35 #include "debug.h" | 35 #include "debug.h" |
| 36 #include "isolate-inl.h" |
| 36 #include "runtime.h" | 37 #include "runtime.h" |
| 37 #include "serialize.h" | 38 #include "serialize.h" |
| 38 | 39 |
| 39 namespace v8 { | 40 namespace v8 { |
| 40 namespace internal { | 41 namespace internal { |
| 41 | 42 |
| 42 // ------------------------------------------------------------------------- | 43 // ------------------------------------------------------------------------- |
| 43 // MacroAssembler implementation. | 44 // MacroAssembler implementation. |
| 44 | 45 |
| 45 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size) | 46 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size) |
| 46 : Assembler(arg_isolate, buffer, size), | 47 : Assembler(arg_isolate, buffer, size), |
| 47 generating_stub_(false), | 48 generating_stub_(false), |
| 48 allow_stub_calls_(true), | 49 allow_stub_calls_(true), |
| 49 has_frame_(false) { | 50 has_frame_(false) { |
| 50 if (isolate() != NULL) { | 51 if (isolate() != NULL) { |
| 51 // TODO(titzer): should we just use a null handle here instead? | 52 // TODO(titzer): should we just use a null handle here instead? |
| 52 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(), | 53 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(), |
| 53 isolate()); | 54 isolate()); |
| 54 } | 55 } |
| 55 } | 56 } |
| 56 | 57 |
| 57 | 58 |
| 59 void MacroAssembler::Load(Register dst, const Operand& src, Representation r) { |
| 60 ASSERT(!r.IsDouble()); |
| 61 if (r.IsInteger8()) { |
| 62 movsx_b(dst, src); |
| 63 } else if (r.IsUInteger8()) { |
| 64 movzx_b(dst, src); |
| 65 } else if (r.IsInteger16()) { |
| 66 movsx_w(dst, src); |
| 67 } else if (r.IsUInteger16()) { |
| 68 movzx_w(dst, src); |
| 69 } else { |
| 70 mov(dst, src); |
| 71 } |
| 72 } |
| 73 |
| 74 |
| 75 void MacroAssembler::Store(Register src, const Operand& dst, Representation r) { |
| 76 ASSERT(!r.IsDouble()); |
| 77 if (r.IsInteger8() || r.IsUInteger8()) { |
| 78 mov_b(dst, src); |
| 79 } else if (r.IsInteger16() || r.IsUInteger16()) { |
| 80 mov_w(dst, src); |
| 81 } else { |
| 82 mov(dst, src); |
| 83 } |
| 84 } |
| 85 |
| 86 |
| 58 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) { | 87 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) { |
| 59 if (isolate()->heap()->RootCanBeTreatedAsConstant(index)) { | 88 if (isolate()->heap()->RootCanBeTreatedAsConstant(index)) { |
| 60 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]); | 89 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]); |
| 61 mov(destination, value); | 90 mov(destination, value); |
| 62 return; | 91 return; |
| 63 } | 92 } |
| 64 ExternalReference roots_array_start = | 93 ExternalReference roots_array_start = |
| 65 ExternalReference::roots_array_start(isolate()); | 94 ExternalReference::roots_array_start(isolate()); |
| 66 mov(destination, Immediate(index)); | 95 mov(destination, Immediate(index)); |
| 67 mov(destination, Operand::StaticArray(destination, | 96 mov(destination, Operand::StaticArray(destination, |
| (...skipping 791 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 859 push(scratch1); | 888 push(scratch1); |
| 860 fild_s(Operand(esp, 0)); | 889 fild_s(Operand(esp, 0)); |
| 861 pop(scratch1); | 890 pop(scratch1); |
| 862 fstp_d(FieldOperand(elements, key, times_4, | 891 fstp_d(FieldOperand(elements, key, times_4, |
| 863 FixedDoubleArray::kHeaderSize - elements_offset)); | 892 FixedDoubleArray::kHeaderSize - elements_offset)); |
| 864 } | 893 } |
| 865 bind(&done); | 894 bind(&done); |
| 866 } | 895 } |
| 867 | 896 |
| 868 | 897 |
| 869 void MacroAssembler::CompareMap(Register obj, | 898 void MacroAssembler::CompareMap(Register obj, Handle<Map> map) { |
| 870 Handle<Map> map, | |
| 871 Label* early_success) { | |
| 872 cmp(FieldOperand(obj, HeapObject::kMapOffset), map); | 899 cmp(FieldOperand(obj, HeapObject::kMapOffset), map); |
| 873 } | 900 } |
| 874 | 901 |
| 875 | 902 |
| 876 void MacroAssembler::CheckMap(Register obj, | 903 void MacroAssembler::CheckMap(Register obj, |
| 877 Handle<Map> map, | 904 Handle<Map> map, |
| 878 Label* fail, | 905 Label* fail, |
| 879 SmiCheckType smi_check_type) { | 906 SmiCheckType smi_check_type) { |
| 880 if (smi_check_type == DO_SMI_CHECK) { | 907 if (smi_check_type == DO_SMI_CHECK) { |
| 881 JumpIfSmi(obj, fail); | 908 JumpIfSmi(obj, fail); |
| 882 } | 909 } |
| 883 | 910 |
| 884 Label success; | 911 CompareMap(obj, map); |
| 885 CompareMap(obj, map, &success); | |
| 886 j(not_equal, fail); | 912 j(not_equal, fail); |
| 887 bind(&success); | |
| 888 } | 913 } |
| 889 | 914 |
| 890 | 915 |
| 891 void MacroAssembler::DispatchMap(Register obj, | 916 void MacroAssembler::DispatchMap(Register obj, |
| 892 Register unused, | 917 Register unused, |
| 893 Handle<Map> map, | 918 Handle<Map> map, |
| 894 Handle<Code> success, | 919 Handle<Code> success, |
| 895 SmiCheckType smi_check_type) { | 920 SmiCheckType smi_check_type) { |
| 896 Label fail; | 921 Label fail; |
| 897 if (smi_check_type == DO_SMI_CHECK) { | 922 if (smi_check_type == DO_SMI_CHECK) { |
| (...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1015 | 1040 |
| 1016 void MacroAssembler::Prologue(PrologueFrameMode frame_mode) { | 1041 void MacroAssembler::Prologue(PrologueFrameMode frame_mode) { |
| 1017 if (frame_mode == BUILD_STUB_FRAME) { | 1042 if (frame_mode == BUILD_STUB_FRAME) { |
| 1018 push(ebp); // Caller's frame pointer. | 1043 push(ebp); // Caller's frame pointer. |
| 1019 mov(ebp, esp); | 1044 mov(ebp, esp); |
| 1020 push(esi); // Callee's context. | 1045 push(esi); // Callee's context. |
| 1021 push(Immediate(Smi::FromInt(StackFrame::STUB))); | 1046 push(Immediate(Smi::FromInt(StackFrame::STUB))); |
| 1022 } else { | 1047 } else { |
| 1023 PredictableCodeSizeScope predictible_code_size_scope(this, | 1048 PredictableCodeSizeScope predictible_code_size_scope(this, |
| 1024 kNoCodeAgeSequenceLength); | 1049 kNoCodeAgeSequenceLength); |
| 1025 if (FLAG_optimize_for_size && FLAG_age_code) { | 1050 if (isolate()->IsCodePreAgingActive()) { |
| 1026 // Pre-age the code. | 1051 // Pre-age the code. |
| 1027 call(isolate()->builtins()->MarkCodeAsExecutedOnce(), | 1052 call(isolate()->builtins()->MarkCodeAsExecutedOnce(), |
| 1028 RelocInfo::CODE_AGE_SEQUENCE); | 1053 RelocInfo::CODE_AGE_SEQUENCE); |
| 1029 Nop(kNoCodeAgeSequenceLength - Assembler::kCallInstructionLength); | 1054 Nop(kNoCodeAgeSequenceLength - Assembler::kCallInstructionLength); |
| 1030 } else { | 1055 } else { |
| 1031 push(ebp); // Caller's frame pointer. | 1056 push(ebp); // Caller's frame pointer. |
| 1032 mov(ebp, esp); | 1057 mov(ebp, esp); |
| 1033 push(esi); // Callee's context. | 1058 push(esi); // Callee's context. |
| 1034 push(edi); // Callee's JS function. | 1059 push(edi); // Callee's JS function. |
| 1035 } | 1060 } |
| (...skipping 942 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1978 // Source and destination are incremented by length. | 2003 // Source and destination are incremented by length. |
| 1979 // Many variants of movsb, loop unrolling, word moves, and indexed operands | 2004 // Many variants of movsb, loop unrolling, word moves, and indexed operands |
| 1980 // have been tried here already, and this is fastest. | 2005 // have been tried here already, and this is fastest. |
| 1981 // A simpler loop is faster on small copies, but 30% slower on large ones. | 2006 // A simpler loop is faster on small copies, but 30% slower on large ones. |
| 1982 // The cld() instruction must have been emitted, to set the direction flag(), | 2007 // The cld() instruction must have been emitted, to set the direction flag(), |
| 1983 // before calling this function. | 2008 // before calling this function. |
| 1984 void MacroAssembler::CopyBytes(Register source, | 2009 void MacroAssembler::CopyBytes(Register source, |
| 1985 Register destination, | 2010 Register destination, |
| 1986 Register length, | 2011 Register length, |
| 1987 Register scratch) { | 2012 Register scratch) { |
| 1988 Label loop, done, short_string, short_loop; | 2013 Label short_loop, len4, len8, len12, done, short_string; |
| 1989 // Experimentation shows that the short string loop is faster if length < 10. | |
| 1990 cmp(length, Immediate(10)); | |
| 1991 j(less_equal, &short_string); | |
| 1992 | |
| 1993 ASSERT(source.is(esi)); | 2014 ASSERT(source.is(esi)); |
| 1994 ASSERT(destination.is(edi)); | 2015 ASSERT(destination.is(edi)); |
| 1995 ASSERT(length.is(ecx)); | 2016 ASSERT(length.is(ecx)); |
| 2017 cmp(length, Immediate(4)); |
| 2018 j(below, &short_string, Label::kNear); |
| 1996 | 2019 |
| 1997 // Because source is 4-byte aligned in our uses of this function, | 2020 // Because source is 4-byte aligned in our uses of this function, |
| 1998 // we keep source aligned for the rep_movs call by copying the odd bytes | 2021 // we keep source aligned for the rep_movs call by copying the odd bytes |
| 1999 // at the end of the ranges. | 2022 // at the end of the ranges. |
| 2000 mov(scratch, Operand(source, length, times_1, -4)); | 2023 mov(scratch, Operand(source, length, times_1, -4)); |
| 2001 mov(Operand(destination, length, times_1, -4), scratch); | 2024 mov(Operand(destination, length, times_1, -4), scratch); |
| 2025 |
| 2026 cmp(length, Immediate(8)); |
| 2027 j(below_equal, &len4, Label::kNear); |
| 2028 cmp(length, Immediate(12)); |
| 2029 j(below_equal, &len8, Label::kNear); |
| 2030 cmp(length, Immediate(16)); |
| 2031 j(below_equal, &len12, Label::kNear); |
| 2032 |
| 2002 mov(scratch, ecx); | 2033 mov(scratch, ecx); |
| 2003 shr(ecx, 2); | 2034 shr(ecx, 2); |
| 2004 rep_movs(); | 2035 rep_movs(); |
| 2005 and_(scratch, Immediate(0x3)); | 2036 and_(scratch, Immediate(0x3)); |
| 2006 add(destination, scratch); | 2037 add(destination, scratch); |
| 2007 jmp(&done); | 2038 jmp(&done, Label::kNear); |
| 2039 |
| 2040 bind(&len12); |
| 2041 mov(scratch, Operand(source, 8)); |
| 2042 mov(Operand(destination, 8), scratch); |
| 2043 bind(&len8); |
| 2044 mov(scratch, Operand(source, 4)); |
| 2045 mov(Operand(destination, 4), scratch); |
| 2046 bind(&len4); |
| 2047 mov(scratch, Operand(source, 0)); |
| 2048 mov(Operand(destination, 0), scratch); |
| 2049 add(destination, length); |
| 2050 jmp(&done, Label::kNear); |
| 2008 | 2051 |
| 2009 bind(&short_string); | 2052 bind(&short_string); |
| 2010 test(length, length); | 2053 test(length, length); |
| 2011 j(zero, &done); | 2054 j(zero, &done, Label::kNear); |
| 2012 | 2055 |
| 2013 bind(&short_loop); | 2056 bind(&short_loop); |
| 2014 mov_b(scratch, Operand(source, 0)); | 2057 mov_b(scratch, Operand(source, 0)); |
| 2015 mov_b(Operand(destination, 0), scratch); | 2058 mov_b(Operand(destination, 0), scratch); |
| 2016 inc(source); | 2059 inc(source); |
| 2017 inc(destination); | 2060 inc(destination); |
| 2018 dec(length); | 2061 dec(length); |
| 2019 j(not_zero, &short_loop); | 2062 j(not_zero, &short_loop); |
| 2020 | 2063 |
| 2021 bind(&done); | 2064 bind(&done); |
| (...skipping 1521 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3543 JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag)); | 3586 JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag)); |
| 3544 cmp(scratch_reg, Immediate(new_space_start)); | 3587 cmp(scratch_reg, Immediate(new_space_start)); |
| 3545 j(less, no_memento_found); | 3588 j(less, no_memento_found); |
| 3546 cmp(scratch_reg, Operand::StaticVariable(new_space_allocation_top)); | 3589 cmp(scratch_reg, Operand::StaticVariable(new_space_allocation_top)); |
| 3547 j(greater, no_memento_found); | 3590 j(greater, no_memento_found); |
| 3548 cmp(MemOperand(scratch_reg, -AllocationMemento::kSize), | 3591 cmp(MemOperand(scratch_reg, -AllocationMemento::kSize), |
| 3549 Immediate(isolate()->factory()->allocation_memento_map())); | 3592 Immediate(isolate()->factory()->allocation_memento_map())); |
| 3550 } | 3593 } |
| 3551 | 3594 |
| 3552 | 3595 |
| 3596 void MacroAssembler::JumpIfDictionaryInPrototypeChain( |
| 3597 Register object, |
| 3598 Register scratch0, |
| 3599 Register scratch1, |
| 3600 Label* found) { |
| 3601 ASSERT(!scratch1.is(scratch0)); |
| 3602 Factory* factory = isolate()->factory(); |
| 3603 Register current = scratch0; |
| 3604 Label loop_again; |
| 3605 |
| 3606 // scratch contained elements pointer. |
| 3607 mov(current, object); |
| 3608 |
| 3609 // Loop based on the map going up the prototype chain. |
| 3610 bind(&loop_again); |
| 3611 mov(current, FieldOperand(current, HeapObject::kMapOffset)); |
| 3612 mov(scratch1, FieldOperand(current, Map::kBitField2Offset)); |
| 3613 and_(scratch1, Map::kElementsKindMask); |
| 3614 shr(scratch1, Map::kElementsKindShift); |
| 3615 cmp(scratch1, Immediate(DICTIONARY_ELEMENTS)); |
| 3616 j(equal, found); |
| 3617 mov(current, FieldOperand(current, Map::kPrototypeOffset)); |
| 3618 cmp(current, Immediate(factory->null_value())); |
| 3619 j(not_equal, &loop_again); |
| 3620 } |
| 3621 |
| 3553 } } // namespace v8::internal | 3622 } } // namespace v8::internal |
| 3554 | 3623 |
| 3555 #endif // V8_TARGET_ARCH_IA32 | 3624 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |