Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(92)

Side by Side Diff: src/mips/ic-mips.cc

Issue 9193015: Further robustify the keyed lookup cache against unlucky hash (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 8 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 1020 matching lines...) Expand 10 before | Expand all | Expand 10 after
1031 __ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset)); 1031 __ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset));
1032 __ sra(a3, a2, KeyedLookupCache::kMapHashShift); 1032 __ sra(a3, a2, KeyedLookupCache::kMapHashShift);
1033 __ lw(t0, FieldMemOperand(a0, String::kHashFieldOffset)); 1033 __ lw(t0, FieldMemOperand(a0, String::kHashFieldOffset));
1034 __ sra(at, t0, String::kHashShift); 1034 __ sra(at, t0, String::kHashShift);
1035 __ xor_(a3, a3, at); 1035 __ xor_(a3, a3, at);
1036 int mask = KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask; 1036 int mask = KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask;
1037 __ And(a3, a3, Operand(mask)); 1037 __ And(a3, a3, Operand(mask));
1038 1038
1039 // Load the key (consisting of map and symbol) from the cache and 1039 // Load the key (consisting of map and symbol) from the cache and
1040 // check for match. 1040 // check for match.
1041 Label try_second_entry, hit_on_first_entry, load_in_object_property; 1041 Label load_in_object_property;
1042 static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket;
1043 Label hit_on_nth_entry[kEntriesPerBucket];
1042 ExternalReference cache_keys = 1044 ExternalReference cache_keys =
1043 ExternalReference::keyed_lookup_cache_keys(isolate); 1045 ExternalReference::keyed_lookup_cache_keys(isolate);
1044 __ li(t0, Operand(cache_keys)); 1046 __ li(t0, Operand(cache_keys));
1045 __ sll(at, a3, kPointerSizeLog2 + 1); 1047 __ sll(at, a3, kPointerSizeLog2 + 1);
1046 __ addu(t0, t0, at); 1048 __ addu(t0, t0, at);
1047 __ lw(t1, MemOperand(t0));
1048 __ Branch(&try_second_entry, ne, a2, Operand(t1));
1049 __ lw(t1, MemOperand(t0, kPointerSize));
1050 __ Branch(&hit_on_first_entry, eq, a0, Operand(t1));
1051 1049
1052 __ bind(&try_second_entry); 1050 for (int i = 0; i < kEntriesPerBucket - 1; i++) {
1053 __ lw(t1, MemOperand(t0, kPointerSize * 2)); 1051 Label try_next_entry;
1052 __ lw(t1, MemOperand(t0, kPointerSize * i * 2));
1053 __ Branch(&try_next_entry, ne, a2, Operand(t1));
1054 __ lw(t1, MemOperand(t0, kPointerSize * (i * 2 + 1)));
1055 __ Branch(&hit_on_nth_entry[i], eq, a0, Operand(t1));
1056 __ bind(&try_next_entry);
1057 }
1058
1059 __ lw(t1, MemOperand(t0, kPointerSize * (kEntriesPerBucket - 1) * 2));
1054 __ Branch(&slow, ne, a2, Operand(t1)); 1060 __ Branch(&slow, ne, a2, Operand(t1));
1055 __ lw(t1, MemOperand(t0, kPointerSize * 3)); 1061 __ lw(t1, MemOperand(t0, kPointerSize * ((kEntriesPerBucket - 1) * 2 + 1)));
1056 __ Branch(&slow, ne, a0, Operand(t1)); 1062 __ Branch(&slow, ne, a0, Operand(t1));
1057 1063
1058 // Get field offset. 1064 // Get field offset.
1059 // a0 : key 1065 // a0 : key
1060 // a1 : receiver 1066 // a1 : receiver
1061 // a2 : receiver's map 1067 // a2 : receiver's map
1062 // a3 : lookup cache index 1068 // a3 : lookup cache index
1063 ExternalReference cache_field_offsets = 1069 ExternalReference cache_field_offsets =
1064 ExternalReference::keyed_lookup_cache_field_offsets(isolate); 1070 ExternalReference::keyed_lookup_cache_field_offsets(isolate);
1065 1071
1066 // Hit on second entry. 1072 // Hit on nth entry.
1067 __ li(t0, Operand(cache_field_offsets)); 1073 for (int i = kEntriesPerBucket - 1; i >= 0; i--) {
1068 __ sll(at, a3, kPointerSizeLog2); 1074 __ bind(&hit_on_nth_entry[i]);
1069 __ addu(at, t0, at); 1075 __ li(t0, Operand(cache_field_offsets));
1070 __ lw(t1, MemOperand(at, kPointerSize)); 1076 __ sll(at, a3, kPointerSizeLog2);
1071 __ lbu(t2, FieldMemOperand(a2, Map::kInObjectPropertiesOffset)); 1077 __ addu(at, t0, at);
1072 __ Subu(t1, t1, t2); 1078 __ lw(t1, MemOperand(at, kPointerSize * i));
1073 __ Branch(&property_array_property, ge, t1, Operand(zero_reg)); 1079 __ lbu(t2, FieldMemOperand(a2, Map::kInObjectPropertiesOffset));
1074 __ Branch(&load_in_object_property); 1080 __ Branch(&property_array_property, ge, t1, Operand(zero_reg));
1075 1081 if (i != 0) {
1076 // Hit on first entry. 1082 __ Branch(&load_in_object_property);
1077 __ bind(&hit_on_first_entry); 1083 }
1078 __ li(t0, Operand(cache_field_offsets)); 1084 }
1079 __ sll(at, a3, kPointerSizeLog2);
1080 __ addu(at, t0, at);
1081 __ lw(t1, MemOperand(at));
1082 __ lbu(t2, FieldMemOperand(a2, Map::kInObjectPropertiesOffset));
1083 __ Subu(t1, t1, t2);
1084 __ Branch(&property_array_property, ge, t1, Operand(zero_reg));
1085 1085
1086 // Load in-object property. 1086 // Load in-object property.
1087 __ bind(&load_in_object_property); 1087 __ bind(&load_in_object_property);
1088 __ lbu(t2, FieldMemOperand(a2, Map::kInstanceSizeOffset)); 1088 __ lbu(t2, FieldMemOperand(a2, Map::kInstanceSizeOffset));
1089 __ addu(t2, t2, t1); // Index from start of object. 1089 __ addu(t2, t2, t1); // Index from start of object.
1090 __ Subu(a1, a1, Operand(kHeapObjectTag)); // Remove the heap tag. 1090 __ Subu(a1, a1, Operand(kHeapObjectTag)); // Remove the heap tag.
1091 __ sll(at, t2, kPointerSizeLog2); 1091 __ sll(at, t2, kPointerSizeLog2);
1092 __ addu(at, a1, at); 1092 __ addu(at, a1, at);
1093 __ lw(v0, MemOperand(at)); 1093 __ lw(v0, MemOperand(at));
1094 __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(), 1094 __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(),
(...skipping 605 matching lines...) Expand 10 before | Expand all | Expand 10 after
1700 Register reg = Register::from_code(Assembler::GetRs(instr_at_patch)); 1700 Register reg = Register::from_code(Assembler::GetRs(instr_at_patch));
1701 patcher.masm()->andi(at, reg, kSmiTagMask); 1701 patcher.masm()->andi(at, reg, kSmiTagMask);
1702 patcher.ChangeBranchCondition(eq); 1702 patcher.ChangeBranchCondition(eq);
1703 } 1703 }
1704 } 1704 }
1705 1705
1706 1706
1707 } } // namespace v8::internal 1707 } } // namespace v8::internal
1708 1708
1709 #endif // V8_TARGET_ARCH_MIPS 1709 #endif // V8_TARGET_ARCH_MIPS
OLDNEW
« src/heap.h ('K') | « src/ia32/ic-ia32.cc ('k') | src/x64/ic-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698