OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 553 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
564 LInstruction* instr) { | 564 LInstruction* instr) { |
565 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT); | 565 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT); |
566 } | 566 } |
567 | 567 |
568 | 568 |
569 void LCodeGen::CallCodeGeneric(Handle<Code> code, | 569 void LCodeGen::CallCodeGeneric(Handle<Code> code, |
570 RelocInfo::Mode mode, | 570 RelocInfo::Mode mode, |
571 LInstruction* instr, | 571 LInstruction* instr, |
572 SafepointMode safepoint_mode) { | 572 SafepointMode safepoint_mode) { |
573 ASSERT(instr != NULL); | 573 ASSERT(instr != NULL); |
| 574 // Block literal pool emission to ensure nop indicating no inlined smi code |
| 575 // is in the correct position. |
| 576 Assembler::BlockConstPoolScope block_const_pool(masm()); |
574 LPointerMap* pointers = instr->pointer_map(); | 577 LPointerMap* pointers = instr->pointer_map(); |
575 RecordPosition(pointers->position()); | 578 RecordPosition(pointers->position()); |
576 __ Call(code, mode); | 579 __ Call(code, mode); |
577 RecordSafepointWithLazyDeopt(instr, safepoint_mode); | 580 RecordSafepointWithLazyDeopt(instr, safepoint_mode); |
578 | 581 |
579 // Signal that we don't inline smi code before these stubs in the | 582 // Signal that we don't inline smi code before these stubs in the |
580 // optimizing code generator. | 583 // optimizing code generator. |
581 if (code->kind() == Code::BINARY_OP_IC || | 584 if (code->kind() == Code::BINARY_OP_IC || |
582 code->kind() == Code::COMPARE_IC) { | 585 code->kind() == Code::COMPARE_IC) { |
583 __ nop(); | 586 __ nop(); |
(...skipping 1093 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1677 } | 1680 } |
1678 } | 1681 } |
1679 | 1682 |
1680 | 1683 |
1681 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { | 1684 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { |
1682 ASSERT(ToRegister(instr->InputAt(0)).is(r1)); | 1685 ASSERT(ToRegister(instr->InputAt(0)).is(r1)); |
1683 ASSERT(ToRegister(instr->InputAt(1)).is(r0)); | 1686 ASSERT(ToRegister(instr->InputAt(1)).is(r0)); |
1684 ASSERT(ToRegister(instr->result()).is(r0)); | 1687 ASSERT(ToRegister(instr->result()).is(r0)); |
1685 | 1688 |
1686 BinaryOpStub stub(instr->op(), NO_OVERWRITE); | 1689 BinaryOpStub stub(instr->op(), NO_OVERWRITE); |
| 1690 // Block literal pool emission to ensure nop indicating no inlined smi code |
| 1691 // is in the correct position. |
| 1692 Assembler::BlockConstPoolScope block_const_pool(masm()); |
1687 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 1693 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
1688 __ nop(); // Signals no inlined code. | 1694 __ nop(); // Signals no inlined code. |
1689 } | 1695 } |
1690 | 1696 |
1691 | 1697 |
1692 int LCodeGen::GetNextEmittedBlock(int block) { | 1698 int LCodeGen::GetNextEmittedBlock(int block) { |
1693 for (int i = block + 1; i < graph()->blocks()->length(); ++i) { | 1699 for (int i = block + 1; i < graph()->blocks()->length(); ++i) { |
1694 LLabel* label = chunk_->GetLabel(i); | 1700 LLabel* label = chunk_->GetLabel(i); |
1695 if (!label->HasReplacement()) return i; | 1701 if (!label->HasReplacement()) return i; |
1696 } | 1702 } |
(...skipping 611 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2308 | 2314 |
2309 // A Smi is not instance of anything. | 2315 // A Smi is not instance of anything. |
2310 __ JumpIfSmi(object, &false_result); | 2316 __ JumpIfSmi(object, &false_result); |
2311 | 2317 |
2312 // This is the inlined call site instanceof cache. The two occurences of the | 2318 // This is the inlined call site instanceof cache. The two occurences of the |
2313 // hole value will be patched to the last map/result pair generated by the | 2319 // hole value will be patched to the last map/result pair generated by the |
2314 // instanceof stub. | 2320 // instanceof stub. |
2315 Label cache_miss; | 2321 Label cache_miss; |
2316 Register map = temp; | 2322 Register map = temp; |
2317 __ ldr(map, FieldMemOperand(object, HeapObject::kMapOffset)); | 2323 __ ldr(map, FieldMemOperand(object, HeapObject::kMapOffset)); |
2318 __ bind(deferred->map_check()); // Label for calculating code patching. | 2324 { |
2319 // We use Factory::the_hole_value() on purpose instead of loading from the | 2325 // Block constant pool emission to ensure the positions of instructions are |
2320 // root array to force relocation to be able to later patch with | 2326 // as expected by the patcher. See InstanceofStub::Generate(). |
2321 // the cached map. | 2327 Assembler::BlockConstPoolScope block_const_pool(masm()); |
2322 Handle<JSGlobalPropertyCell> cell = | 2328 __ bind(deferred->map_check()); // Label for calculating code patching. |
2323 factory()->NewJSGlobalPropertyCell(factory()->the_hole_value()); | 2329 // We use Factory::the_hole_value() on purpose instead of loading from the |
2324 __ mov(ip, Operand(Handle<Object>(cell))); | 2330 // root array to force relocation to be able to later patch with |
2325 __ ldr(ip, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset)); | 2331 // the cached map. |
2326 __ cmp(map, Operand(ip)); | 2332 Handle<JSGlobalPropertyCell> cell = |
2327 __ b(ne, &cache_miss); | 2333 factory()->NewJSGlobalPropertyCell(factory()->the_hole_value()); |
2328 // We use Factory::the_hole_value() on purpose instead of loading from the | 2334 __ mov(ip, Operand(Handle<Object>(cell))); |
2329 // root array to force relocation to be able to later patch | 2335 __ ldr(ip, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset)); |
2330 // with true or false. | 2336 __ cmp(map, Operand(ip)); |
2331 __ mov(result, Operand(factory()->the_hole_value())); | 2337 __ b(ne, &cache_miss); |
| 2338 // We use Factory::the_hole_value() on purpose instead of loading from the |
| 2339 // root array to force relocation to be able to later patch |
| 2340 // with true or false. |
| 2341 __ mov(result, Operand(factory()->the_hole_value())); |
| 2342 } |
2332 __ b(&done); | 2343 __ b(&done); |
2333 | 2344 |
2334 // The inlined call site cache did not match. Check null and string before | 2345 // The inlined call site cache did not match. Check null and string before |
2335 // calling the deferred code. | 2346 // calling the deferred code. |
2336 __ bind(&cache_miss); | 2347 __ bind(&cache_miss); |
2337 // Null is not instance of anything. | 2348 // Null is not instance of anything. |
2338 __ LoadRoot(ip, Heap::kNullValueRootIndex); | 2349 __ LoadRoot(ip, Heap::kNullValueRootIndex); |
2339 __ cmp(object, Operand(ip)); | 2350 __ cmp(object, Operand(ip)); |
2340 __ b(eq, &false_result); | 2351 __ b(eq, &false_result); |
2341 | 2352 |
(...skipping 2790 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5132 __ cmp(temp1, Operand(Smi::FromInt(StackFrame::CONSTRUCT))); | 5143 __ cmp(temp1, Operand(Smi::FromInt(StackFrame::CONSTRUCT))); |
5133 } | 5144 } |
5134 | 5145 |
5135 | 5146 |
5136 void LCodeGen::EnsureSpaceForLazyDeopt() { | 5147 void LCodeGen::EnsureSpaceForLazyDeopt() { |
5137 // Ensure that we have enough space after the previous lazy-bailout | 5148 // Ensure that we have enough space after the previous lazy-bailout |
5138 // instruction for patching the code here. | 5149 // instruction for patching the code here. |
5139 int current_pc = masm()->pc_offset(); | 5150 int current_pc = masm()->pc_offset(); |
5140 int patch_size = Deoptimizer::patch_size(); | 5151 int patch_size = Deoptimizer::patch_size(); |
5141 if (current_pc < last_lazy_deopt_pc_ + patch_size) { | 5152 if (current_pc < last_lazy_deopt_pc_ + patch_size) { |
| 5153 // Block literal pool emission for duration of padding. |
| 5154 Assembler::BlockConstPoolScope block_const_pool(masm()); |
5142 int padding_size = last_lazy_deopt_pc_ + patch_size - current_pc; | 5155 int padding_size = last_lazy_deopt_pc_ + patch_size - current_pc; |
5143 ASSERT_EQ(0, padding_size % Assembler::kInstrSize); | 5156 ASSERT_EQ(0, padding_size % Assembler::kInstrSize); |
5144 while (padding_size > 0) { | 5157 while (padding_size > 0) { |
5145 __ nop(); | 5158 __ nop(); |
5146 padding_size -= Assembler::kInstrSize; | 5159 padding_size -= Assembler::kInstrSize; |
5147 } | 5160 } |
5148 } | 5161 } |
5149 last_lazy_deopt_pc_ = masm()->pc_offset(); | 5162 last_lazy_deopt_pc_ = masm()->pc_offset(); |
5150 } | 5163 } |
5151 | 5164 |
(...skipping 193 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5345 __ sub(scratch, result, Operand(index, LSL, kPointerSizeLog2 - kSmiTagSize)); | 5358 __ sub(scratch, result, Operand(index, LSL, kPointerSizeLog2 - kSmiTagSize)); |
5346 __ ldr(result, FieldMemOperand(scratch, | 5359 __ ldr(result, FieldMemOperand(scratch, |
5347 FixedArray::kHeaderSize - kPointerSize)); | 5360 FixedArray::kHeaderSize - kPointerSize)); |
5348 __ bind(&done); | 5361 __ bind(&done); |
5349 } | 5362 } |
5350 | 5363 |
5351 | 5364 |
5352 #undef __ | 5365 #undef __ |
5353 | 5366 |
5354 } } // namespace v8::internal | 5367 } } // namespace v8::internal |
OLD | NEW |