OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
96 ASSERT(is_done()); | 96 ASSERT(is_done()); |
97 code->set_stack_slots(GetStackSlotCount()); | 97 code->set_stack_slots(GetStackSlotCount()); |
98 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); | 98 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); |
99 if (FLAG_weak_embedded_maps_in_optimized_code) { | 99 if (FLAG_weak_embedded_maps_in_optimized_code) { |
100 RegisterDependentCodeForEmbeddedMaps(code); | 100 RegisterDependentCodeForEmbeddedMaps(code); |
101 } | 101 } |
102 PopulateDeoptimizationData(code); | 102 PopulateDeoptimizationData(code); |
103 if (!info()->IsStub()) { | 103 if (!info()->IsStub()) { |
104 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code); | 104 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code); |
105 } | 105 } |
| 106 for (int i = 0 ; i < prototype_maps_.length(); i++) { |
| 107 prototype_maps_.at(i)->AddDependentCode( |
| 108 DependentCode::kPrototypeCheckGroup, code); |
| 109 } |
106 } | 110 } |
107 | 111 |
108 | 112 |
109 void LCodeGen::Abort(const char* reason) { | 113 void LCodeGen::Abort(const char* reason) { |
110 info()->set_bailout_reason(reason); | 114 info()->set_bailout_reason(reason); |
111 status_ = ABORTED; | 115 status_ = ABORTED; |
112 } | 116 } |
113 | 117 |
114 | 118 |
115 void LCodeGen::Comment(const char* format, ...) { | 119 void LCodeGen::Comment(const char* format, ...) { |
(...skipping 796 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
912 } | 916 } |
913 } | 917 } |
914 } | 918 } |
915 #ifdef VERIFY_HEAP | 919 #ifdef VERIFY_HEAP |
916 // This disables verification of weak embedded maps after full GC. | 920 // This disables verification of weak embedded maps after full GC. |
917 // AddDependentCode can cause a GC, which would observe the state where | 921 // AddDependentCode can cause a GC, which would observe the state where |
918 // this code is not yet in the depended code lists of the embedded maps. | 922 // this code is not yet in the depended code lists of the embedded maps. |
919 NoWeakEmbeddedMapsVerificationScope disable_verification_of_embedded_maps; | 923 NoWeakEmbeddedMapsVerificationScope disable_verification_of_embedded_maps; |
920 #endif | 924 #endif |
921 for (int i = 0; i < maps.length(); i++) { | 925 for (int i = 0; i < maps.length(); i++) { |
922 maps.at(i)->AddDependentCode(code); | 926 maps.at(i)->AddDependentCode(DependentCode::kWeaklyEmbeddedGroup, code); |
923 } | 927 } |
924 } | 928 } |
925 | 929 |
926 | 930 |
927 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { | 931 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { |
928 int length = deoptimizations_.length(); | 932 int length = deoptimizations_.length(); |
929 if (length == 0) return; | 933 if (length == 0) return; |
930 Handle<DeoptimizationInputData> data = | 934 Handle<DeoptimizationInputData> data = |
931 factory()->NewDeoptimizationInputData(length, TENURED); | 935 factory()->NewDeoptimizationInputData(length, TENURED); |
932 | 936 |
(...skipping 4385 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5318 | 5322 |
5319 void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) { | 5323 void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) { |
5320 ASSERT(instr->temp()->Equals(instr->result())); | 5324 ASSERT(instr->temp()->Equals(instr->result())); |
5321 Register reg = ToRegister(instr->temp()); | 5325 Register reg = ToRegister(instr->temp()); |
5322 | 5326 |
5323 ZoneList<Handle<JSObject> >* prototypes = instr->prototypes(); | 5327 ZoneList<Handle<JSObject> >* prototypes = instr->prototypes(); |
5324 ZoneList<Handle<Map> >* maps = instr->maps(); | 5328 ZoneList<Handle<Map> >* maps = instr->maps(); |
5325 | 5329 |
5326 ASSERT(prototypes->length() == maps->length()); | 5330 ASSERT(prototypes->length() == maps->length()); |
5327 | 5331 |
5328 for (int i = 0; i < prototypes->length(); i++) { | 5332 // TODO(ulan): Move this check to hydrogen and split HCheckPrototypeMaps |
5329 __ LoadHeapObject(reg, prototypes->at(i)); | 5333 // into two instruction: one that checks the prototypes and another that |
5330 DoCheckMapCommon(reg, maps->at(i), ALLOW_ELEMENT_TRANSITION_MAPS, instr); | 5334 // loads the holder (HConstant). Find a way to do it without breaking |
| 5335 // parallel recompilation. |
| 5336 if (instr->hydrogen()->CanOmitPrototypeChecks()) { |
| 5337 for (int i = 0; i < maps->length(); i++) { |
| 5338 prototype_maps_.Add(maps->at(i), info()->zone()); |
| 5339 } |
| 5340 __ LoadHeapObject(reg, prototypes->at(prototypes->length() - 1)); |
| 5341 } else { |
| 5342 for (int i = 0; i < prototypes->length(); i++) { |
| 5343 __ LoadHeapObject(reg, prototypes->at(i)); |
| 5344 DoCheckMapCommon(reg, maps->at(i), ALLOW_ELEMENT_TRANSITION_MAPS, instr); |
| 5345 } |
5331 } | 5346 } |
5332 } | 5347 } |
5333 | 5348 |
5334 | 5349 |
5335 void LCodeGen::DoAllocateObject(LAllocateObject* instr) { | 5350 void LCodeGen::DoAllocateObject(LAllocateObject* instr) { |
5336 class DeferredAllocateObject: public LDeferredCode { | 5351 class DeferredAllocateObject: public LDeferredCode { |
5337 public: | 5352 public: |
5338 DeferredAllocateObject(LCodeGen* codegen, LAllocateObject* instr) | 5353 DeferredAllocateObject(LCodeGen* codegen, LAllocateObject* instr) |
5339 : LDeferredCode(codegen), instr_(instr) { } | 5354 : LDeferredCode(codegen), instr_(instr) { } |
5340 virtual void Generate() { codegen()->DoDeferredAllocateObject(instr_); } | 5355 virtual void Generate() { codegen()->DoDeferredAllocateObject(instr_); } |
(...skipping 830 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6171 FixedArray::kHeaderSize - kPointerSize)); | 6186 FixedArray::kHeaderSize - kPointerSize)); |
6172 __ bind(&done); | 6187 __ bind(&done); |
6173 } | 6188 } |
6174 | 6189 |
6175 | 6190 |
6176 #undef __ | 6191 #undef __ |
6177 | 6192 |
6178 } } // namespace v8::internal | 6193 } } // namespace v8::internal |
6179 | 6194 |
6180 #endif // V8_TARGET_ARCH_IA32 | 6195 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |