| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 27 matching lines...) Expand all Loading... |
| 38 | 38 |
| 39 #define __ ACCESS_MASM(masm) | 39 #define __ ACCESS_MASM(masm) |
| 40 | 40 |
| 41 | 41 |
| 42 static void ProbeTable(Isolate* isolate, | 42 static void ProbeTable(Isolate* isolate, |
| 43 MacroAssembler* masm, | 43 MacroAssembler* masm, |
| 44 Code::Flags flags, | 44 Code::Flags flags, |
| 45 StubCache::Table table, | 45 StubCache::Table table, |
| 46 Register name, | 46 Register name, |
| 47 Register offset, | 47 Register offset, |
| 48 int offset_shift_bits, |
| 48 Register scratch, | 49 Register scratch, |
| 49 Register scratch2) { | 50 Register scratch2) { |
| 50 ExternalReference key_offset(isolate->stub_cache()->key_reference(table)); | 51 ExternalReference key_offset(isolate->stub_cache()->key_reference(table)); |
| 51 ExternalReference value_offset(isolate->stub_cache()->value_reference(table)); | 52 ExternalReference value_offset(isolate->stub_cache()->value_reference(table)); |
| 52 | 53 |
| 53 uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address()); | 54 uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address()); |
| 54 uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address()); | 55 uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address()); |
| 55 | 56 |
| 56 // Check the relative positions of the address fields. | 57 // Check the relative positions of the address fields. |
| 57 ASSERT(value_off_addr > key_off_addr); | 58 ASSERT(value_off_addr > key_off_addr); |
| 58 ASSERT((value_off_addr - key_off_addr) % 4 == 0); | 59 ASSERT((value_off_addr - key_off_addr) % 4 == 0); |
| 59 ASSERT((value_off_addr - key_off_addr) < (256 * 4)); | 60 ASSERT((value_off_addr - key_off_addr) < (256 * 4)); |
| 60 | 61 |
| 61 Label miss; | 62 Label miss; |
| 62 Register offsets_base_addr = scratch; | 63 Register offsets_base_addr = scratch; |
| 63 | 64 |
| 64 // Check that the key in the entry matches the name. | 65 // Check that the key in the entry matches the name. |
| 65 __ mov(offsets_base_addr, Operand(key_offset)); | 66 __ mov(offsets_base_addr, Operand(key_offset)); |
| 66 __ ldr(ip, MemOperand(offsets_base_addr, offset, LSL, 1)); | 67 __ ldr(ip, MemOperand(offsets_base_addr, offset, LSL, 1 + offset_shift_bits)); |
| 67 __ cmp(name, ip); | 68 __ cmp(name, ip); |
| 68 __ b(ne, &miss); | 69 __ b(ne, &miss); |
| 69 | 70 |
| 70 // Get the code entry from the cache. | 71 // Get the code entry from the cache. |
| 71 __ add(offsets_base_addr, offsets_base_addr, | 72 __ add(offsets_base_addr, offsets_base_addr, |
| 72 Operand(value_off_addr - key_off_addr)); | 73 Operand(value_off_addr - key_off_addr)); |
| 73 __ ldr(scratch2, MemOperand(offsets_base_addr, offset, LSL, 1)); | 74 __ ldr(scratch2, |
| 75 MemOperand(offsets_base_addr, offset, LSL, 1 + offset_shift_bits)); |
| 74 | 76 |
| 75 // Check that the flags match what we're looking for. | 77 // Check that the flags match what we're looking for. |
| 76 __ ldr(scratch2, FieldMemOperand(scratch2, Code::kFlagsOffset)); | 78 __ ldr(scratch2, FieldMemOperand(scratch2, Code::kFlagsOffset)); |
| 77 __ bic(scratch2, scratch2, Operand(Code::kFlagsNotUsedInLookup)); | 79 __ bic(scratch2, scratch2, Operand(Code::kFlagsNotUsedInLookup)); |
| 78 __ cmp(scratch2, Operand(flags)); | 80 // Using cmn and the negative instead of cmp means we can use movw. |
| 81 __ cmn(scratch2, Operand(-flags)); |
| 79 __ b(ne, &miss); | 82 __ b(ne, &miss); |
| 80 | 83 |
| 81 // Re-load code entry from cache. | 84 // Re-load code entry from cache. |
| 82 __ ldr(offset, MemOperand(offsets_base_addr, offset, LSL, 1)); | 85 __ ldr(offset, |
| 86 MemOperand(offsets_base_addr, offset, LSL, 1 + offset_shift_bits)); |
| 83 | 87 |
| 84 // Jump to the first instruction in the code stub. | 88 // Jump to the first instruction in the code stub. |
| 85 __ add(offset, offset, Operand(Code::kHeaderSize - kHeapObjectTag)); | 89 __ add(offset, offset, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 86 __ Jump(offset); | 90 __ Jump(offset); |
| 87 | 91 |
| 88 // Miss: fall through. | 92 // Miss: fall through. |
| 89 __ bind(&miss); | 93 __ bind(&miss); |
| 90 } | 94 } |
| 91 | 95 |
| 92 | 96 |
| (...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 182 ASSERT(!extra.is(no_reg)); | 186 ASSERT(!extra.is(no_reg)); |
| 183 ASSERT(!extra2.is(no_reg)); | 187 ASSERT(!extra2.is(no_reg)); |
| 184 | 188 |
| 185 // Check that the receiver isn't a smi. | 189 // Check that the receiver isn't a smi. |
| 186 __ JumpIfSmi(receiver, &miss); | 190 __ JumpIfSmi(receiver, &miss); |
| 187 | 191 |
| 188 // Get the map of the receiver and compute the hash. | 192 // Get the map of the receiver and compute the hash. |
| 189 __ ldr(scratch, FieldMemOperand(name, String::kHashFieldOffset)); | 193 __ ldr(scratch, FieldMemOperand(name, String::kHashFieldOffset)); |
| 190 __ ldr(ip, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 194 __ ldr(ip, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
| 191 __ add(scratch, scratch, Operand(ip)); | 195 __ add(scratch, scratch, Operand(ip)); |
| 192 __ eor(scratch, scratch, Operand(flags)); | 196 uint32_t mask = (kPrimaryTableSize - 1) << kHeapObjectTagSize; |
| 193 __ and_(scratch, | 197 // Mask down the eor argument to the minimum to keep the immediate |
| 194 scratch, | 198 // ARM-encodable. |
| 195 Operand((kPrimaryTableSize - 1) << kHeapObjectTagSize)); | 199 __ eor(scratch, scratch, Operand(flags & mask)); |
| 200 // Prefer ubfx to and_ here because the mask is not ARM-encodable. |
| 201 __ Ubfx(scratch, scratch, kHeapObjectTagSize, kPrimaryTableBits); |
| 196 | 202 |
| 197 // Probe the primary table. | 203 // Probe the primary table. |
| 198 ProbeTable(isolate, masm, flags, kPrimary, name, scratch, extra, extra2); | 204 ProbeTable(isolate, |
| 205 masm, |
| 206 flags, |
| 207 kPrimary, |
| 208 name, |
| 209 scratch, |
| 210 kHeapObjectTagSize, |
| 211 extra, |
| 212 extra2); |
| 199 | 213 |
| 200 // Primary miss: Compute hash for secondary probe. | 214 // Primary miss: Compute hash for secondary probe. |
| 201 __ sub(scratch, scratch, Operand(name)); | 215 __ rsb(scratch, name, Operand(scratch, LSL, kHeapObjectTagSize)); |
| 202 __ add(scratch, scratch, Operand(flags)); | 216 __ add(scratch, scratch, Operand(flags)); |
| 203 __ and_(scratch, | 217 __ Ubfx(scratch, scratch, kHeapObjectTagSize, kSecondaryTableBits); |
| 204 scratch, | |
| 205 Operand((kSecondaryTableSize - 1) << kHeapObjectTagSize)); | |
| 206 | 218 |
| 207 // Probe the secondary table. | 219 // Probe the secondary table. |
| 208 ProbeTable(isolate, masm, flags, kSecondary, name, scratch, extra, extra2); | 220 ProbeTable(isolate, |
| 221 masm, |
| 222 flags, |
| 223 kSecondary, |
| 224 name, |
| 225 scratch, |
| 226 kHeapObjectTagSize, |
| 227 extra, |
| 228 extra2); |
| 209 | 229 |
| 210 // Cache miss: Fall-through and let caller handle the miss by | 230 // Cache miss: Fall-through and let caller handle the miss by |
| 211 // entering the runtime system. | 231 // entering the runtime system. |
| 212 __ bind(&miss); | 232 __ bind(&miss); |
| 213 } | 233 } |
| 214 | 234 |
| 215 | 235 |
| 216 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm, | 236 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm, |
| 217 int index, | 237 int index, |
| 218 Register prototype) { | 238 Register prototype) { |
| (...skipping 3997 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4216 Handle<Code> ic_miss = masm->isolate()->builtins()->KeyedStoreIC_Miss(); | 4236 Handle<Code> ic_miss = masm->isolate()->builtins()->KeyedStoreIC_Miss(); |
| 4217 __ Jump(ic_miss, RelocInfo::CODE_TARGET); | 4237 __ Jump(ic_miss, RelocInfo::CODE_TARGET); |
| 4218 } | 4238 } |
| 4219 | 4239 |
| 4220 | 4240 |
| 4221 #undef __ | 4241 #undef __ |
| 4222 | 4242 |
| 4223 } } // namespace v8::internal | 4243 } } // namespace v8::internal |
| 4224 | 4244 |
| 4225 #endif // V8_TARGET_ARCH_ARM | 4245 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |