Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: src/x64/stub-cache-x64.cc

Issue 9496010: Fix secondary stub cache and add a test for the stub cache lookups. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 8 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 25 matching lines...) Expand all
36 namespace v8 { 36 namespace v8 {
37 namespace internal { 37 namespace internal {
38 38
39 #define __ ACCESS_MASM(masm) 39 #define __ ACCESS_MASM(masm)
40 40
41 41
42 static void ProbeTable(Isolate* isolate, 42 static void ProbeTable(Isolate* isolate,
43 MacroAssembler* masm, 43 MacroAssembler* masm,
44 Code::Flags flags, 44 Code::Flags flags,
45 StubCache::Table table, 45 StubCache::Table table,
46 Register receiver,
46 Register name, 47 Register name,
47 Register offset) { 48 Register offset) {
48 ASSERT_EQ(8, kPointerSize); 49 ASSERT_EQ(8, kPointerSize);
49 ASSERT_EQ(16, sizeof(StubCache::Entry)); 50 ASSERT_EQ(24, sizeof(StubCache::Entry));
50 // The offset register holds the entry offset times four (due to masking 51 // The offset register holds the entry offset times four (due to masking
51 // and shifting optimizations). 52 // and shifting optimizations).
52 ExternalReference key_offset(isolate->stub_cache()->key_reference(table)); 53 ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
54 ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
53 Label miss; 55 Label miss;
54 56
57 // Multiply by 3.
58 __ lea(offset, Operand(offset, offset, times_2, 0));
59
55 __ LoadAddress(kScratchRegister, key_offset); 60 __ LoadAddress(kScratchRegister, key_offset);
61
56 // Check that the key in the entry matches the name. 62 // Check that the key in the entry matches the name.
57 // Multiply entry offset by 16 to get the entry address. Since the 63 // Multiply entry offset by 16 to get the entry address. Since the
58 // offset register already holds the entry offset times four, multiply 64 // offset register already holds the entry offset times four, multiply
59 // by a further four. 65 // by a further four.
60 __ cmpl(name, Operand(kScratchRegister, offset, times_4, 0)); 66 __ cmpl(name, Operand(kScratchRegister, offset, times_2, 0));
61 __ j(not_equal, &miss); 67 __ j(not_equal, &miss);
68
69 // Get the map entry from the cache.
70 // Use key_offset + kPointerSize * 2, rather than loading map_offset.
71 __ movq(kScratchRegister,
72 Operand(kScratchRegister, offset, times_2, kPointerSize * 2));
73 __ cmpq(kScratchRegister, FieldOperand(receiver, HeapObject::kMapOffset));
74 __ j(not_equal, &miss);
75
62 // Get the code entry from the cache. 76 // Get the code entry from the cache.
63 // Use key_offset + kPointerSize, rather than loading value_offset. 77 __ LoadAddress(kScratchRegister, value_offset);
64 __ movq(kScratchRegister, 78 __ movq(kScratchRegister,
65 Operand(kScratchRegister, offset, times_4, kPointerSize)); 79 Operand(kScratchRegister, offset, times_2, 0));
80
66 // Check that the flags match what we're looking for. 81 // Check that the flags match what we're looking for.
67 __ movl(offset, FieldOperand(kScratchRegister, Code::kFlagsOffset)); 82 __ movl(offset, FieldOperand(kScratchRegister, Code::kFlagsOffset));
68 __ and_(offset, Immediate(~Code::kFlagsNotUsedInLookup)); 83 __ and_(offset, Immediate(~Code::kFlagsNotUsedInLookup));
69 __ cmpl(offset, Immediate(flags)); 84 __ cmpl(offset, Immediate(flags));
70 __ j(not_equal, &miss); 85 __ j(not_equal, &miss);
71 86
87 #ifdef DEBUG
88 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
89 __ jmp(&miss);
90 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
91 __ jmp(&miss);
92 }
93 #endif
94
72 // Jump to the first instruction in the code stub. 95 // Jump to the first instruction in the code stub.
73 __ addq(kScratchRegister, Immediate(Code::kHeaderSize - kHeapObjectTag)); 96 __ addq(kScratchRegister, Immediate(Code::kHeaderSize - kHeapObjectTag));
74 __ jmp(kScratchRegister); 97 __ jmp(kScratchRegister);
75 98
76 __ bind(&miss); 99 __ bind(&miss);
77 } 100 }
78 101
79 102
80 // Helper function used to check that the dictionary doesn't contain 103 // Helper function used to check that the dictionary doesn't contain
81 // the property. This function may return false negatives, so miss_label 104 // the property. This function may return false negatives, so miss_label
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
127 __ DecrementCounter(counters->negative_lookups_miss(), 1); 150 __ DecrementCounter(counters->negative_lookups_miss(), 1);
128 } 151 }
129 152
130 153
131 void StubCache::GenerateProbe(MacroAssembler* masm, 154 void StubCache::GenerateProbe(MacroAssembler* masm,
132 Code::Flags flags, 155 Code::Flags flags,
133 Register receiver, 156 Register receiver,
134 Register name, 157 Register name,
135 Register scratch, 158 Register scratch,
136 Register extra, 159 Register extra,
137 Register extra2) { 160 Register extra2,
161 Register extra3) {
138 Isolate* isolate = masm->isolate(); 162 Isolate* isolate = masm->isolate();
139 Label miss; 163 Label miss;
140 USE(extra); // The register extra is not used on the X64 platform. 164 USE(extra); // The register extra is not used on the X64 platform.
141 USE(extra2); // The register extra2 is not used on the X64 platform. 165 USE(extra2); // The register extra2 is not used on the X64 platform.
142 // Make sure that code is valid. The shifting code relies on the 166 USE(extra3); // The register extra2 is not used on the X64 platform.
143 // entry size being 16. 167 // Make sure that code is valid. The multiplying code relies on the
144 ASSERT(sizeof(Entry) == 16); 168 // entry size being 24.
169 ASSERT(sizeof(Entry) == 24);
145 170
146 // Make sure the flags do not name a specific type. 171 // Make sure the flags do not name a specific type.
147 ASSERT(Code::ExtractTypeFromFlags(flags) == 0); 172 ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
148 173
149 // Make sure that there are no register conflicts. 174 // Make sure that there are no register conflicts.
150 ASSERT(!scratch.is(receiver)); 175 ASSERT(!scratch.is(receiver));
151 ASSERT(!scratch.is(name)); 176 ASSERT(!scratch.is(name));
152 177
153 // Check scratch register is valid, extra and extra2 are unused. 178 // Check scratch register is valid, extra and extra2 are unused.
154 ASSERT(!scratch.is(no_reg)); 179 ASSERT(!scratch.is(no_reg));
155 ASSERT(extra2.is(no_reg)); 180 ASSERT(extra2.is(no_reg));
181 ASSERT(extra3.is(no_reg));
182
183 Counters* counters = masm->isolate()->counters();
184 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1);
156 185
157 // Check that the receiver isn't a smi. 186 // Check that the receiver isn't a smi.
158 __ JumpIfSmi(receiver, &miss); 187 __ JumpIfSmi(receiver, &miss);
159 188
160 // Get the map of the receiver and compute the hash. 189 // Get the map of the receiver and compute the hash.
161 __ movl(scratch, FieldOperand(name, String::kHashFieldOffset)); 190 __ movl(scratch, FieldOperand(name, String::kHashFieldOffset));
162 // Use only the low 32 bits of the map pointer. 191 // Use only the low 32 bits of the map pointer.
163 __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset)); 192 __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
164 __ xor_(scratch, Immediate(flags)); 193 __ xor_(scratch, Immediate(flags));
165 __ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize)); 194 __ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize));
166 195
167 // Probe the primary table. 196 // Probe the primary table.
168 ProbeTable(isolate, masm, flags, kPrimary, name, scratch); 197 ProbeTable(isolate, masm, flags, kPrimary, receiver, name, scratch);
169 198
170 // Primary miss: Compute hash for secondary probe. 199 // Primary miss: Compute hash for secondary probe.
171 __ movl(scratch, FieldOperand(name, String::kHashFieldOffset)); 200 __ movl(scratch, FieldOperand(name, String::kHashFieldOffset));
172 __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset)); 201 __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
173 __ xor_(scratch, Immediate(flags)); 202 __ xor_(scratch, Immediate(flags));
174 __ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize)); 203 __ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize));
175 __ subl(scratch, name); 204 __ subl(scratch, name);
176 __ addl(scratch, Immediate(flags)); 205 __ addl(scratch, Immediate(flags));
177 __ and_(scratch, Immediate((kSecondaryTableSize - 1) << kHeapObjectTagSize)); 206 __ and_(scratch, Immediate((kSecondaryTableSize - 1) << kHeapObjectTagSize));
178 207
179 // Probe the secondary table. 208 // Probe the secondary table.
180 ProbeTable(isolate, masm, flags, kSecondary, name, scratch); 209 ProbeTable(isolate, masm, flags, kSecondary, receiver, name, scratch);
181 210
182 // Cache miss: Fall-through and let caller handle the miss by 211 // Cache miss: Fall-through and let caller handle the miss by
183 // entering the runtime system. 212 // entering the runtime system.
184 __ bind(&miss); 213 __ bind(&miss);
214 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1);
185 } 215 }
186 216
187 217
188 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm, 218 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
189 int index, 219 int index,
190 Register prototype) { 220 Register prototype) {
191 // Load the global or builtins object from the current context. 221 // Load the global or builtins object from the current context.
192 __ movq(prototype, 222 __ movq(prototype,
193 Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); 223 Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
194 // Load the global context from the global or builtins object. 224 // Load the global context from the global or builtins object.
(...skipping 3564 matching lines...) Expand 10 before | Expand all | Expand 10 after
3759 __ jmp(ic_slow, RelocInfo::CODE_TARGET); 3789 __ jmp(ic_slow, RelocInfo::CODE_TARGET);
3760 } 3790 }
3761 } 3791 }
3762 3792
3763 3793
3764 #undef __ 3794 #undef __
3765 3795
3766 } } // namespace v8::internal 3796 } } // namespace v8::internal
3767 3797
3768 #endif // V8_TARGET_ARCH_X64 3798 #endif // V8_TARGET_ARCH_X64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698