| OLD | NEW |
| (Empty) |
| 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | |
| 2 // for details. All rights reserved. Use of this source code is governed by a | |
| 3 // BSD-style license that can be found in the LICENSE file. | |
| 4 | |
| 5 #ifndef VM_FLOW_GRAPH_COMPILER_SHARED_H_ | |
| 6 #define VM_FLOW_GRAPH_COMPILER_SHARED_H_ | |
| 7 | |
| 8 #include "vm/allocation.h" | |
| 9 #include "vm/assembler.h" | |
| 10 #include "vm/code_descriptors.h" | |
| 11 #include "vm/code_generator.h" | |
| 12 #include "vm/growable_array.h" | |
| 13 | |
| 14 namespace dart { | |
| 15 | |
| 16 class BlockEntryInstr; | |
| 17 class ExceptionHandlerList; | |
| 18 class FlowGraphCompilerShared; | |
| 19 class ParsedFunction; | |
| 20 class TargetEntryInstr; | |
| 21 | |
| 22 class DeoptimizationStub : public ZoneAllocated { | |
| 23 public: | |
| 24 DeoptimizationStub(intptr_t deopt_id, | |
| 25 intptr_t deopt_token_index, | |
| 26 intptr_t try_index, | |
| 27 DeoptReasonId reason) | |
| 28 : deopt_id_(deopt_id), | |
| 29 deopt_token_index_(deopt_token_index), | |
| 30 try_index_(try_index), | |
| 31 reason_(reason), | |
| 32 registers_(2), | |
| 33 entry_label_() {} | |
| 34 | |
| 35 void Push(Register reg) { registers_.Add(reg); } | |
| 36 Label* entry_label() { return &entry_label_; } | |
| 37 | |
| 38 // Implementation is in architecture specific file. | |
| 39 void GenerateCode(FlowGraphCompilerShared* compiler); | |
| 40 | |
| 41 private: | |
| 42 const intptr_t deopt_id_; | |
| 43 const intptr_t deopt_token_index_; | |
| 44 const intptr_t try_index_; | |
| 45 const DeoptReasonId reason_; | |
| 46 GrowableArray<Register> registers_; | |
| 47 Label entry_label_; | |
| 48 | |
| 49 DISALLOW_COPY_AND_ASSIGN(DeoptimizationStub); | |
| 50 }; | |
| 51 | |
| 52 | |
| 53 class FlowGraphCompilerShared : public ValueObject { | |
| 54 public: | |
| 55 FlowGraphCompilerShared(Assembler* assembler, | |
| 56 const ParsedFunction& parsed_function, | |
| 57 const GrowableArray<BlockEntryInstr*>& block_order, | |
| 58 bool is_optimizing); | |
| 59 | |
| 60 virtual ~FlowGraphCompilerShared(); | |
| 61 | |
| 62 // Constructor is lighweight, major initialization work should occur here. | |
| 63 // This makes it easier to measure time spent in the compiler. | |
| 64 void InitCompiler(); | |
| 65 | |
| 66 Assembler* assembler() const { return assembler_; } | |
| 67 const ParsedFunction& parsed_function() const { return parsed_function_; } | |
| 68 const GrowableArray<BlockEntryInstr*>& block_order() const { | |
| 69 return block_order_; | |
| 70 } | |
| 71 DescriptorList* pc_descriptors_list() const { | |
| 72 return pc_descriptors_list_; | |
| 73 } | |
| 74 BlockEntryInstr* current_block() const { return current_block_; } | |
| 75 void set_current_block(BlockEntryInstr* value) { | |
| 76 current_block_ = value; | |
| 77 } | |
| 78 bool is_optimizing() const { return is_optimizing_; } | |
| 79 | |
| 80 intptr_t StackSize() const; | |
| 81 | |
| 82 // Returns assembler label associated with the given block entry. | |
| 83 Label* GetBlockLabel(BlockEntryInstr* block_entry) const; | |
| 84 | |
| 85 // Returns true if the next block after current in the current block order | |
| 86 // is the given block. | |
| 87 bool IsNextBlock(TargetEntryInstr* block_entry) const; | |
| 88 | |
| 89 void AddExceptionHandler(intptr_t try_index, intptr_t pc_offset); | |
| 90 void AddCurrentDescriptor(PcDescriptors::Kind kind, | |
| 91 intptr_t cid, | |
| 92 intptr_t token_index, | |
| 93 intptr_t try_index); | |
| 94 Label* AddDeoptStub(intptr_t deopt_id, | |
| 95 intptr_t deopt_token_index, | |
| 96 intptr_t try_index_, | |
| 97 DeoptReasonId reason, | |
| 98 Register reg1, | |
| 99 Register reg2); | |
| 100 | |
| 101 void FinalizeExceptionHandlers(const Code& code); | |
| 102 void FinalizePcDescriptors(const Code& code); | |
| 103 void FinalizeStackmaps(const Code& code); | |
| 104 void FinalizeVarDescriptors(const Code& code); | |
| 105 void FinalizeComments(const Code& code); | |
| 106 | |
| 107 void GenerateInstanceCall(intptr_t cid, | |
| 108 intptr_t token_index, | |
| 109 intptr_t try_index, | |
| 110 const String& function_name, | |
| 111 intptr_t argument_count, | |
| 112 const Array& argument_names, | |
| 113 intptr_t checked_argument_count); | |
| 114 void GenerateStaticCall(intptr_t cid, | |
| 115 intptr_t token_index, | |
| 116 intptr_t try_index, | |
| 117 const Function& function, | |
| 118 intptr_t argument_count, | |
| 119 const Array& argument_names); | |
| 120 | |
| 121 void GenerateNumberTypeCheck(Register kClassIdReg, | |
| 122 const AbstractType& type, | |
| 123 Label* is_instance_lbl, | |
| 124 Label* is_not_instance_lbl); | |
| 125 void GenerateStringTypeCheck(Register kClassIdReg, | |
| 126 Label* is_instance_lbl, | |
| 127 Label* is_not_instance_lbl); | |
| 128 void GenerateListTypeCheck(Register kClassIdReg, | |
| 129 Label* is_instance_lbl); | |
| 130 | |
| 131 void GenerateDeferredCode(); | |
| 132 | |
| 133 | |
| 134 // Returns 'true' if code generation for this function is complete, i.e., | |
| 135 // no fall-through to regular code is needed. | |
| 136 bool TryIntrinsify(); | |
| 137 virtual void GenerateInlinedGetter(intptr_t offset) = 0; | |
| 138 virtual void GenerateInlinedSetter(intptr_t offset) = 0; | |
| 139 // Returns pc-offset (in bytes) of the pc after the call, can be used to emit | |
| 140 // pc-descriptor information. | |
| 141 virtual intptr_t EmitInstanceCall(ExternalLabel* target_label, | |
| 142 const ICData& ic_data, | |
| 143 const Array& arguments_descriptor, | |
| 144 intptr_t argument_count) = 0; | |
| 145 // Returns pc-offset (in bytes) of the pc after the call, can be used to emit | |
| 146 // pc-descriptor information. | |
| 147 virtual intptr_t EmitStaticCall(const Function& function, | |
| 148 const Array& arguments_descriptor, | |
| 149 intptr_t argument_count) = 0; | |
| 150 virtual void CheckClassIds(Register class_id_reg, | |
| 151 const GrowableArray<intptr_t>& class_ids, | |
| 152 Label* is_equal_lbl, | |
| 153 Label* is_not_equal_lbl) = 0; | |
| 154 | |
| 155 | |
| 156 struct BlockInfo : public ZoneAllocated { | |
| 157 public: | |
| 158 BlockInfo() : label() { } | |
| 159 | |
| 160 Label label; | |
| 161 }; | |
| 162 | |
| 163 const GrowableArray<BlockInfo*>& block_info() const { return block_info_; } | |
| 164 | |
| 165 // Bail out of the flow graph compiler. Does not return to the caller. | |
| 166 void Bailout(const char* reason); | |
| 167 | |
| 168 private: | |
| 169 // Map a block number in a forward iteration into the block number in the | |
| 170 // corresponding reverse iteration. Used to obtain an index into | |
| 171 // block_order for reverse iterations. | |
| 172 intptr_t reverse_index(intptr_t index) const { | |
| 173 return block_order_.length() - index - 1; | |
| 174 } | |
| 175 | |
| 176 class Assembler* assembler_; | |
| 177 const ParsedFunction& parsed_function_; | |
| 178 const GrowableArray<BlockEntryInstr*>& block_order_; | |
| 179 | |
| 180 // Compiler specific per-block state. Indexed by postorder block number | |
| 181 // for convenience. This is not the block's index in the block order, | |
| 182 // which is reverse postorder. | |
| 183 BlockEntryInstr* current_block_; | |
| 184 ExceptionHandlerList* exception_handlers_list_; | |
| 185 DescriptorList* pc_descriptors_list_; | |
| 186 StackmapBuilder* stackmap_builder_; | |
| 187 GrowableArray<BlockInfo*> block_info_; | |
| 188 GrowableArray<DeoptimizationStub*> deopt_stubs_; | |
| 189 const bool is_optimizing_; | |
| 190 | |
| 191 DISALLOW_COPY_AND_ASSIGN(FlowGraphCompilerShared); | |
| 192 }; | |
| 193 | |
| 194 | |
| 195 } // namespace dart | |
| 196 | |
| 197 | |
| 198 #endif // VM_FLOW_GRAPH_COMPILER_SHARED_H_ | |
| OLD | NEW |