Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(27)

Side by Side Diff: src/mips/code-stubs-mips.cc

Issue 2325083003: Record call counts also for megamorphic calls. (Closed)
Patch Set: Code comments. Created 4 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/interpreter/interpreter-assembler.cc ('k') | src/mips64/code-stubs-mips64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_MIPS 5 #if V8_TARGET_ARCH_MIPS
6 6
7 #include "src/code-stubs.h" 7 #include "src/code-stubs.h"
8 #include "src/api-arguments.h" 8 #include "src/api-arguments.h"
9 #include "src/base/bits.h" 9 #include "src/base/bits.h"
10 #include "src/bootstrapper.h" 10 #include "src/bootstrapper.h"
(...skipping 1899 matching lines...) Expand 10 before | Expand all | Expand 10 after
1910 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); 1910 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1911 __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kConstructStubOffset)); 1911 __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kConstructStubOffset));
1912 __ Addu(at, t0, Operand(Code::kHeaderSize - kHeapObjectTag)); 1912 __ Addu(at, t0, Operand(Code::kHeaderSize - kHeapObjectTag));
1913 __ Jump(at); 1913 __ Jump(at);
1914 1914
1915 __ bind(&non_function); 1915 __ bind(&non_function);
1916 __ mov(a3, a1); 1916 __ mov(a3, a1);
1917 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); 1917 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1918 } 1918 }
1919 1919
1920 // Note: feedback_vector and slot are clobbered after the call.
1921 static void IncrementCallCount(MacroAssembler* masm, Register feedback_vector,
1922 Register slot) {
1923 __ Lsa(at, feedback_vector, slot, kPointerSizeLog2 - kSmiTagSize);
1924 __ lw(slot, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
1925 __ Addu(slot, slot, Operand(Smi::FromInt(1)));
1926 __ sw(slot, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
1927 }
1920 1928
1921 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { 1929 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
1922 // a1 - function 1930 // a1 - function
1923 // a3 - slot id 1931 // a3 - slot id
1924 // a2 - vector 1932 // a2 - vector
1925 // t0 - loaded from vector[slot] 1933 // t0 - loaded from vector[slot]
1926 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, at); 1934 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, at);
1927 __ Branch(miss, ne, a1, Operand(at)); 1935 __ Branch(miss, ne, a1, Operand(at));
1928 1936
1929 __ li(a0, Operand(arg_count())); 1937 __ li(a0, Operand(arg_count()));
1930 1938
1931 // Increment the call count for monomorphic function calls. 1939 // Increment the call count for monomorphic function calls.
1932 __ Lsa(at, a2, a3, kPointerSizeLog2 - kSmiTagSize); 1940 IncrementCallCount(masm, a2, a3);
1933 __ lw(a3, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
1934 __ Addu(a3, a3, Operand(Smi::FromInt(1)));
1935 __ sw(a3, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
1936 1941
1937 __ mov(a2, t0); 1942 __ mov(a2, t0);
1938 __ mov(a3, a1); 1943 __ mov(a3, a1);
1939 ArrayConstructorStub stub(masm->isolate(), arg_count()); 1944 ArrayConstructorStub stub(masm->isolate(), arg_count());
1940 __ TailCallStub(&stub); 1945 __ TailCallStub(&stub);
1941 } 1946 }
1942 1947
1943 1948
1944 void CallICStub::Generate(MacroAssembler* masm) { 1949 void CallICStub::Generate(MacroAssembler* masm) {
1945 // a1 - function 1950 // a1 - function
1946 // a3 - slot id (Smi) 1951 // a3 - slot id (Smi)
1947 // a2 - vector 1952 // a2 - vector
1948 Label extra_checks_or_miss, call, call_function; 1953 Label extra_checks_or_miss, call, call_function, call_count_incremented;
1949 int argc = arg_count(); 1954 int argc = arg_count();
1950 ParameterCount actual(argc); 1955 ParameterCount actual(argc);
1951 1956
1952 // The checks. First, does r1 match the recorded monomorphic target? 1957 // The checks. First, does r1 match the recorded monomorphic target?
1953 __ Lsa(t0, a2, a3, kPointerSizeLog2 - kSmiTagSize); 1958 __ Lsa(t0, a2, a3, kPointerSizeLog2 - kSmiTagSize);
1954 __ lw(t0, FieldMemOperand(t0, FixedArray::kHeaderSize)); 1959 __ lw(t0, FieldMemOperand(t0, FixedArray::kHeaderSize));
1955 1960
1956 // We don't know that we have a weak cell. We might have a private symbol 1961 // We don't know that we have a weak cell. We might have a private symbol
1957 // or an AllocationSite, but the memory is safe to examine. 1962 // or an AllocationSite, but the memory is safe to examine.
1958 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to 1963 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to
1959 // FixedArray. 1964 // FixedArray.
1960 // WeakCell::kValueOffset - contains a JSFunction or Smi(0) 1965 // WeakCell::kValueOffset - contains a JSFunction or Smi(0)
1961 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not 1966 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not
1962 // computed, meaning that it can't appear to be a pointer. If the low bit is 1967 // computed, meaning that it can't appear to be a pointer. If the low bit is
1963 // 0, then hash is computed, but the 0 bit prevents the field from appearing 1968 // 0, then hash is computed, but the 0 bit prevents the field from appearing
1964 // to be a pointer. 1969 // to be a pointer.
1965 STATIC_ASSERT(WeakCell::kSize >= kPointerSize); 1970 STATIC_ASSERT(WeakCell::kSize >= kPointerSize);
1966 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset == 1971 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset ==
1967 WeakCell::kValueOffset && 1972 WeakCell::kValueOffset &&
1968 WeakCell::kValueOffset == Symbol::kHashFieldSlot); 1973 WeakCell::kValueOffset == Symbol::kHashFieldSlot);
1969 1974
1970 __ lw(t1, FieldMemOperand(t0, WeakCell::kValueOffset)); 1975 __ lw(t1, FieldMemOperand(t0, WeakCell::kValueOffset));
1971 __ Branch(&extra_checks_or_miss, ne, a1, Operand(t1)); 1976 __ Branch(&extra_checks_or_miss, ne, a1, Operand(t1));
1972 1977
1973 // The compare above could have been a SMI/SMI comparison. Guard against this 1978 // The compare above could have been a SMI/SMI comparison. Guard against this
1974 // convincing us that we have a monomorphic JSFunction. 1979 // convincing us that we have a monomorphic JSFunction.
1975 __ JumpIfSmi(a1, &extra_checks_or_miss); 1980 __ JumpIfSmi(a1, &extra_checks_or_miss);
1976 1981
1982 __ bind(&call_function);
1983
1977 // Increment the call count for monomorphic function calls. 1984 // Increment the call count for monomorphic function calls.
1978 __ Lsa(at, a2, a3, kPointerSizeLog2 - kSmiTagSize); 1985 IncrementCallCount(masm, a2, a3);
1979 __ lw(a3, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
1980 __ Addu(a3, a3, Operand(Smi::FromInt(1)));
1981 __ sw(a3, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
1982 1986
1983 __ bind(&call_function);
1984 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(), 1987 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
1985 tail_call_mode()), 1988 tail_call_mode()),
1986 RelocInfo::CODE_TARGET, al, zero_reg, Operand(zero_reg), 1989 RelocInfo::CODE_TARGET, al, zero_reg, Operand(zero_reg),
1987 USE_DELAY_SLOT); 1990 USE_DELAY_SLOT);
1988 __ li(a0, Operand(argc)); // In delay slot. 1991 __ li(a0, Operand(argc)); // In delay slot.
1989 1992
1990 __ bind(&extra_checks_or_miss); 1993 __ bind(&extra_checks_or_miss);
1991 Label uninitialized, miss, not_allocation_site; 1994 Label uninitialized, miss, not_allocation_site;
1992 1995
1993 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); 1996 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex);
(...skipping 20 matching lines...) Expand all
2014 // We are going megamorphic. If the feedback is a JSFunction, it is fine 2017 // We are going megamorphic. If the feedback is a JSFunction, it is fine
2015 // to handle it here. More complex cases are dealt with in the runtime. 2018 // to handle it here. More complex cases are dealt with in the runtime.
2016 __ AssertNotSmi(t0); 2019 __ AssertNotSmi(t0);
2017 __ GetObjectType(t0, t1, t1); 2020 __ GetObjectType(t0, t1, t1);
2018 __ Branch(&miss, ne, t1, Operand(JS_FUNCTION_TYPE)); 2021 __ Branch(&miss, ne, t1, Operand(JS_FUNCTION_TYPE));
2019 __ Lsa(t0, a2, a3, kPointerSizeLog2 - kSmiTagSize); 2022 __ Lsa(t0, a2, a3, kPointerSizeLog2 - kSmiTagSize);
2020 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); 2023 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex);
2021 __ sw(at, FieldMemOperand(t0, FixedArray::kHeaderSize)); 2024 __ sw(at, FieldMemOperand(t0, FixedArray::kHeaderSize));
2022 2025
2023 __ bind(&call); 2026 __ bind(&call);
2027 IncrementCallCount(masm, a2, a3);
2028
2029 __ bind(&call_count_incremented);
2030
2024 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()), 2031 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
2025 RelocInfo::CODE_TARGET, al, zero_reg, Operand(zero_reg), 2032 RelocInfo::CODE_TARGET, al, zero_reg, Operand(zero_reg),
2026 USE_DELAY_SLOT); 2033 USE_DELAY_SLOT);
2027 __ li(a0, Operand(argc)); // In delay slot. 2034 __ li(a0, Operand(argc)); // In delay slot.
2028 2035
2029 __ bind(&uninitialized); 2036 __ bind(&uninitialized);
2030 2037
2031 // We are going monomorphic, provided we actually have a JSFunction. 2038 // We are going monomorphic, provided we actually have a JSFunction.
2032 __ JumpIfSmi(a1, &miss); 2039 __ JumpIfSmi(a1, &miss);
2033 2040
2034 // Goto miss case if we do not have a function. 2041 // Goto miss case if we do not have a function.
2035 __ GetObjectType(a1, t0, t0); 2042 __ GetObjectType(a1, t0, t0);
2036 __ Branch(&miss, ne, t0, Operand(JS_FUNCTION_TYPE)); 2043 __ Branch(&miss, ne, t0, Operand(JS_FUNCTION_TYPE));
2037 2044
2038 // Make sure the function is not the Array() function, which requires special 2045 // Make sure the function is not the Array() function, which requires special
2039 // behavior on MISS. 2046 // behavior on MISS.
2040 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, t0); 2047 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, t0);
2041 __ Branch(&miss, eq, a1, Operand(t0)); 2048 __ Branch(&miss, eq, a1, Operand(t0));
2042 2049
2043 // Make sure the function belongs to the same native context. 2050 // Make sure the function belongs to the same native context.
2044 __ lw(t0, FieldMemOperand(a1, JSFunction::kContextOffset)); 2051 __ lw(t0, FieldMemOperand(a1, JSFunction::kContextOffset));
2045 __ lw(t0, ContextMemOperand(t0, Context::NATIVE_CONTEXT_INDEX)); 2052 __ lw(t0, ContextMemOperand(t0, Context::NATIVE_CONTEXT_INDEX));
2046 __ lw(t1, NativeContextMemOperand()); 2053 __ lw(t1, NativeContextMemOperand());
2047 __ Branch(&miss, ne, t0, Operand(t1)); 2054 __ Branch(&miss, ne, t0, Operand(t1));
2048 2055
2049 // Initialize the call counter.
2050 __ Lsa(at, a2, a3, kPointerSizeLog2 - kSmiTagSize);
2051 __ li(t0, Operand(Smi::FromInt(1)));
2052 __ sw(t0, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
2053
2054 // Store the function. Use a stub since we need a frame for allocation. 2056 // Store the function. Use a stub since we need a frame for allocation.
2055 // a2 - vector 2057 // a2 - vector
2056 // a3 - slot 2058 // a3 - slot
2057 // a1 - function 2059 // a1 - function
2058 { 2060 {
2059 FrameScope scope(masm, StackFrame::INTERNAL); 2061 FrameScope scope(masm, StackFrame::INTERNAL);
2060 CreateWeakCellStub create_stub(masm->isolate()); 2062 CreateWeakCellStub create_stub(masm->isolate());
2063 __ Push(a2, a3);
2061 __ Push(cp, a1); 2064 __ Push(cp, a1);
2062 __ CallStub(&create_stub); 2065 __ CallStub(&create_stub);
2063 __ Pop(cp, a1); 2066 __ Pop(cp, a1);
2067 __ Pop(a2, a3);
2064 } 2068 }
2065 2069
2066 __ Branch(&call_function); 2070 __ Branch(&call_function);
2067 2071
2068 // We are here because tracing is on or we encountered a MISS case we can't 2072 // We are here because tracing is on or we encountered a MISS case we can't
2069 // handle here. 2073 // handle here.
2070 __ bind(&miss); 2074 __ bind(&miss);
2071 GenerateMiss(masm); 2075 GenerateMiss(masm);
2072 2076
2073 __ Branch(&call); 2077 __ Branch(&call_count_incremented);
2074 } 2078 }
2075 2079
2076 2080
2077 void CallICStub::GenerateMiss(MacroAssembler* masm) { 2081 void CallICStub::GenerateMiss(MacroAssembler* masm) {
2078 FrameScope scope(masm, StackFrame::INTERNAL); 2082 FrameScope scope(masm, StackFrame::INTERNAL);
2079 2083
2080 // Push the receiver and the function and feedback info. 2084 // Push the receiver and the function and feedback info.
2081 __ Push(a1, a2, a3); 2085 __ Push(a1, a2, a3);
2082 2086
2083 // Call the entry. 2087 // Call the entry.
(...skipping 3286 matching lines...) Expand 10 before | Expand all | Expand 10 after
5370 kStackUnwindSpace, kInvalidStackOffset, 5374 kStackUnwindSpace, kInvalidStackOffset,
5371 return_value_operand, NULL); 5375 return_value_operand, NULL);
5372 } 5376 }
5373 5377
5374 #undef __ 5378 #undef __
5375 5379
5376 } // namespace internal 5380 } // namespace internal
5377 } // namespace v8 5381 } // namespace v8
5378 5382
5379 #endif // V8_TARGET_ARCH_MIPS 5383 #endif // V8_TARGET_ARCH_MIPS
OLDNEW
« no previous file with comments | « src/interpreter/interpreter-assembler.cc ('k') | src/mips64/code-stubs-mips64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698