OLD | NEW |
---|---|
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
11 // with the distribution. | 11 // with the distribution. |
(...skipping 10 matching lines...) Expand all Loading... | |
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
27 | 27 |
28 #ifndef V8_IC_INL_H_ | 28 #ifndef V8_IC_INL_H_ |
29 #define V8_IC_INL_H_ | 29 #define V8_IC_INL_H_ |
30 | 30 |
31 #include "ic.h" | 31 #include "ic.h" |
32 | |
33 #include "compiler.h" | |
32 #include "debug.h" | 34 #include "debug.h" |
33 #include "macro-assembler.h" | 35 #include "macro-assembler.h" |
34 | 36 |
35 namespace v8 { | 37 namespace v8 { |
36 namespace internal { | 38 namespace internal { |
37 | 39 |
38 | 40 |
39 Address IC::address() const { | 41 Address IC::address() const { |
40 // Get the address of the call. | 42 // Get the address of the call. |
41 Address result = pc() - Assembler::kCallTargetAddressOffset; | 43 Address result = pc() - Assembler::kCallTargetAddressOffset; |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
82 // ICs as strict mode. The strict-ness of the IC must be preserved. | 84 // ICs as strict mode. The strict-ness of the IC must be preserved. |
83 Code* old_target = GetTargetAtAddress(address); | 85 Code* old_target = GetTargetAtAddress(address); |
84 if (old_target->kind() == Code::STORE_IC || | 86 if (old_target->kind() == Code::STORE_IC || |
85 old_target->kind() == Code::KEYED_STORE_IC) { | 87 old_target->kind() == Code::KEYED_STORE_IC) { |
86 ASSERT(old_target->extra_ic_state() == target->extra_ic_state()); | 88 ASSERT(old_target->extra_ic_state() == target->extra_ic_state()); |
87 } | 89 } |
88 #endif | 90 #endif |
89 Assembler::set_target_address_at(address, target->instruction_start()); | 91 Assembler::set_target_address_at(address, target->instruction_start()); |
90 target->GetHeap()->incremental_marking()->RecordCodeTargetPatch(address, | 92 target->GetHeap()->incremental_marking()->RecordCodeTargetPatch(address, |
91 target); | 93 target); |
94 if (FLAG_counting_profiler) { | |
Erik Corry
2012/02/08 14:09:23
Suggested comment:
// We do not want to optimize
Jakob Kummerow
2012/02/08 15:24:08
Done.
| |
95 Isolate::Current()->runtime_profiler()->NotifyICChanged(); | |
96 StackFrameIterator it; | |
97 if (it.done()) return; | |
98 it.Advance(); | |
99 static const int kStackFramesToMark = Compiler::kMaxInliningLevels - 1; | |
100 for (int i = 0; i < kStackFramesToMark; ++i) { | |
101 if (it.done()) return; | |
102 StackFrame* raw_frame = it.frame(); | |
103 if (raw_frame->is_java_script()) { | |
104 JSFunction* function = | |
105 JSFunction::cast(JavaScriptFrame::cast(raw_frame)->function()); | |
106 function->shared()->set_profiler_ticks(0); | |
107 } | |
108 it.Advance(); | |
109 } | |
110 } | |
92 } | 111 } |
93 | 112 |
94 | 113 |
95 InlineCacheHolderFlag IC::GetCodeCacheForObject(Object* object, | 114 InlineCacheHolderFlag IC::GetCodeCacheForObject(Object* object, |
96 JSObject* holder) { | 115 JSObject* holder) { |
97 if (object->IsJSObject()) { | 116 if (object->IsJSObject()) { |
98 return GetCodeCacheForObject(JSObject::cast(object), holder); | 117 return GetCodeCacheForObject(JSObject::cast(object), holder); |
99 } | 118 } |
100 // If the object is a value, we use the prototype map for the cache. | 119 // If the object is a value, we use the prototype map for the cache. |
101 ASSERT(object->IsString() || object->IsNumber() || object->IsBoolean()); | 120 ASSERT(object->IsString() || object->IsNumber() || object->IsBoolean()); |
(...skipping 21 matching lines...) Expand all Loading... | |
123 JSObject* IC::GetCodeCacheHolder(Object* object, InlineCacheHolderFlag holder) { | 142 JSObject* IC::GetCodeCacheHolder(Object* object, InlineCacheHolderFlag holder) { |
124 Object* map_owner = (holder == OWN_MAP ? object : object->GetPrototype()); | 143 Object* map_owner = (holder == OWN_MAP ? object : object->GetPrototype()); |
125 ASSERT(map_owner->IsJSObject()); | 144 ASSERT(map_owner->IsJSObject()); |
126 return JSObject::cast(map_owner); | 145 return JSObject::cast(map_owner); |
127 } | 146 } |
128 | 147 |
129 | 148 |
130 } } // namespace v8::internal | 149 } } // namespace v8::internal |
131 | 150 |
132 #endif // V8_IC_INL_H_ | 151 #endif // V8_IC_INL_H_ |
OLD | NEW |