OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
77 GenerateDeferredCode() && | 77 GenerateDeferredCode() && |
78 GenerateDeoptJumpTable() && | 78 GenerateDeoptJumpTable() && |
79 GenerateSafepointTable(); | 79 GenerateSafepointTable(); |
80 } | 80 } |
81 | 81 |
82 | 82 |
83 void LCodeGen::FinishCode(Handle<Code> code) { | 83 void LCodeGen::FinishCode(Handle<Code> code) { |
84 ASSERT(is_done()); | 84 ASSERT(is_done()); |
85 code->set_stack_slots(GetStackSlotCount()); | 85 code->set_stack_slots(GetStackSlotCount()); |
86 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); | 86 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); |
| 87 if (FLAG_weak_embedded_maps_in_optimized_code) { |
| 88 RegisterDependentCodeForEmbeddedMaps(code); |
| 89 } |
87 PopulateDeoptimizationData(code); | 90 PopulateDeoptimizationData(code); |
88 } | 91 } |
89 | 92 |
90 | 93 |
91 void LChunkBuilder::Abort(const char* reason) { | 94 void LChunkBuilder::Abort(const char* reason) { |
92 info()->set_bailout_reason(reason); | 95 info()->set_bailout_reason(reason); |
93 status_ = ABORTED; | 96 status_ = ABORTED; |
94 } | 97 } |
95 | 98 |
96 | 99 |
(...skipping 742 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
839 (deopt_jump_table_.last().is_lazy_deopt != needs_lazy_deopt) || | 842 (deopt_jump_table_.last().is_lazy_deopt != needs_lazy_deopt) || |
840 (deopt_jump_table_.last().needs_frame != !frame_is_built_)) { | 843 (deopt_jump_table_.last().needs_frame != !frame_is_built_)) { |
841 JumpTableEntry table_entry(entry, !frame_is_built_, needs_lazy_deopt); | 844 JumpTableEntry table_entry(entry, !frame_is_built_, needs_lazy_deopt); |
842 deopt_jump_table_.Add(table_entry, zone()); | 845 deopt_jump_table_.Add(table_entry, zone()); |
843 } | 846 } |
844 __ Branch(&deopt_jump_table_.last().label, cc, src1, src2); | 847 __ Branch(&deopt_jump_table_.last().label, cc, src1, src2); |
845 } | 848 } |
846 } | 849 } |
847 | 850 |
848 | 851 |
| 852 void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle<Code> code) { |
| 853 ZoneList<Handle<Map> > maps(1, zone()); |
| 854 int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); |
| 855 for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) { |
| 856 RelocInfo::Mode mode = it.rinfo()->rmode(); |
| 857 if (mode == RelocInfo::EMBEDDED_OBJECT && |
| 858 it.rinfo()->target_object()->IsMap()) { |
| 859 Handle<Map> map(Map::cast(it.rinfo()->target_object())); |
| 860 if (map->CanTransition()) { |
| 861 maps.Add(map, zone()); |
| 862 } |
| 863 } |
| 864 } |
| 865 #ifdef VERIFY_HEAP |
| 866 // This disables verification of weak embedded maps after full GC. |
| 867 // AddDependentCode can cause a GC, which would observe the state where |
| 868 // this code is not yet in the depended code lists of the embedded maps. |
| 869 NoWeakEmbeddedMapsVerificationScope disable_verification_of_embedded_maps; |
| 870 #endif |
| 871 for (int i = 0; i < maps.length(); i++) { |
| 872 maps.at(i)->AddDependentCode(code); |
| 873 } |
| 874 } |
| 875 |
| 876 |
849 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { | 877 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { |
850 int length = deoptimizations_.length(); | 878 int length = deoptimizations_.length(); |
851 if (length == 0) return; | 879 if (length == 0) return; |
852 Handle<DeoptimizationInputData> data = | 880 Handle<DeoptimizationInputData> data = |
853 factory()->NewDeoptimizationInputData(length, TENURED); | 881 factory()->NewDeoptimizationInputData(length, TENURED); |
854 | 882 |
855 Handle<ByteArray> translations = translations_.CreateByteArray(); | 883 Handle<ByteArray> translations = translations_.CreateByteArray(); |
856 data->SetTranslationByteArray(*translations); | 884 data->SetTranslationByteArray(*translations); |
857 data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_)); | 885 data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_)); |
858 | 886 |
(...skipping 5137 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5996 __ Subu(scratch, result, scratch); | 6024 __ Subu(scratch, result, scratch); |
5997 __ lw(result, FieldMemOperand(scratch, | 6025 __ lw(result, FieldMemOperand(scratch, |
5998 FixedArray::kHeaderSize - kPointerSize)); | 6026 FixedArray::kHeaderSize - kPointerSize)); |
5999 __ bind(&done); | 6027 __ bind(&done); |
6000 } | 6028 } |
6001 | 6029 |
6002 | 6030 |
6003 #undef __ | 6031 #undef __ |
6004 | 6032 |
6005 } } // namespace v8::internal | 6033 } } // namespace v8::internal |
OLD | NEW |