| Index: runtime/vm/flow_graph_compiler_x64.h
|
| ===================================================================
|
| --- runtime/vm/flow_graph_compiler_x64.h (revision 8400)
|
| +++ runtime/vm/flow_graph_compiler_x64.h (working copy)
|
| @@ -13,7 +13,6 @@
|
| #include "vm/assembler_macros.h"
|
| #include "vm/code_descriptors.h"
|
| #include "vm/code_generator.h"
|
| -#include "vm/flow_graph_compiler_shared.h"
|
| #include "vm/intermediate_language.h"
|
|
|
| namespace dart {
|
| @@ -24,59 +23,156 @@
|
| template <typename T> class GrowableArray;
|
| class ParsedFunction;
|
|
|
| -class FlowGraphCompiler : public FlowGraphCompilerShared {
|
| +class FlowGraphCompiler : public ValueObject {
|
| + private:
|
| + struct BlockInfo : public ZoneAllocated {
|
| + public:
|
| + BlockInfo() : label() { }
|
| + Label label;
|
| + };
|
| +
|
| public:
|
| FlowGraphCompiler(Assembler* assembler,
|
| const ParsedFunction& parsed_function,
|
| const GrowableArray<BlockEntryInstr*>& block_order,
|
| bool is_optimizing);
|
|
|
| + virtual ~FlowGraphCompiler();
|
| +
|
| + // Accessors.
|
| + Assembler* assembler() const { return assembler_; }
|
| + const ParsedFunction& parsed_function() const { return parsed_function_; }
|
| + const GrowableArray<BlockEntryInstr*>& block_order() const {
|
| + return block_order_;
|
| + }
|
| + DescriptorList* pc_descriptors_list() const {
|
| + return pc_descriptors_list_;
|
| + }
|
| + BlockEntryInstr* current_block() const { return current_block_; }
|
| + void set_current_block(BlockEntryInstr* value) {
|
| + current_block_ = value;
|
| + }
|
| + bool is_optimizing() const { return is_optimizing_; }
|
| + const GrowableArray<BlockInfo*>& block_info() const { return block_info_; }
|
| +
|
| + // Constructor is lighweight, major initialization work should occur here.
|
| + // This makes it easier to measure time spent in the compiler.
|
| + void InitCompiler();
|
| +
|
| void CompileGraph();
|
|
|
| - void GenerateCallRuntime(intptr_t cid,
|
| - intptr_t token_index,
|
| - intptr_t try_index,
|
| - const RuntimeEntry& entry);
|
| + void VisitBlocks();
|
|
|
| - private:
|
| - friend class DeoptimizationStub;
|
| + // Bail out of the flow graph compiler. Does not return to the caller.
|
| + void Bailout(const char* reason);
|
|
|
| - // TODO(fschneider): Clean up friend-class declarations once all code
|
| - // generator templates have been moved to intermediate_language_x64.cc.
|
| -#define DECLARE_FRIEND(ShortName, ClassName) friend class ClassName;
|
| - FOR_EACH_COMPUTATION(DECLARE_FRIEND)
|
| -#undef DECLARE_FRIEND
|
|
|
| - static const int kLocalsOffsetFromFP = (-1 * kWordSize);
|
| + // Returns 'true' if code generation for this function is complete, i.e.,
|
| + // no fall-through to regular code is needed.
|
| + bool TryIntrinsify();
|
|
|
| - virtual void VisitBlocks();
|
| + virtual void GenerateCallRuntime(intptr_t cid,
|
| + intptr_t token_index,
|
| + intptr_t try_index,
|
| + const RuntimeEntry& entry);
|
|
|
| - void EmitInstructionPrologue(Instruction* instr);
|
| + // Infrastructure copied from class CodeGenerator.
|
| + void GenerateCall(intptr_t token_index,
|
| + intptr_t try_index,
|
| + const ExternalLabel* label,
|
| + PcDescriptors::Kind kind);
|
|
|
| - // Emit code to load a Value into register 'dst'.
|
| - void LoadValue(Register dst, Value* value);
|
| + void GenerateAssertAssignable(intptr_t cid,
|
| + intptr_t token_index,
|
| + intptr_t try_index,
|
| + const AbstractType& dst_type,
|
| + const String& dst_name);
|
|
|
| + void GenerateInstanceOf(intptr_t cid,
|
| + intptr_t token_index,
|
| + intptr_t try_index,
|
| + const AbstractType& type,
|
| + bool negate_result);
|
| +
|
| + void GenerateInstanceCall(intptr_t cid,
|
| + intptr_t token_index,
|
| + intptr_t try_index,
|
| + const String& function_name,
|
| + intptr_t argument_count,
|
| + const Array& argument_names,
|
| + intptr_t checked_argument_count);
|
| +
|
| + void GenerateStaticCall(intptr_t cid,
|
| + intptr_t token_index,
|
| + intptr_t try_index,
|
| + const Function& function,
|
| + intptr_t argument_count,
|
| + const Array& argument_names);
|
| +
|
| + void GenerateNumberTypeCheck(Register kClassIdReg,
|
| + const AbstractType& type,
|
| + Label* is_instance_lbl,
|
| + Label* is_not_instance_lbl);
|
| + void GenerateStringTypeCheck(Register kClassIdReg,
|
| + Label* is_instance_lbl,
|
| + Label* is_not_instance_lbl);
|
| + void GenerateListTypeCheck(Register kClassIdReg,
|
| + Label* is_instance_lbl);
|
| +
|
| void EmitComment(Instruction* instr);
|
|
|
| // Returns pc-offset (in bytes) of the pc after the call, can be used to emit
|
| // pc-descriptor information.
|
| - virtual intptr_t EmitInstanceCall(ExternalLabel* target_label,
|
| - const ICData& ic_data,
|
| - const Array& arguments_descriptor,
|
| - intptr_t argument_count);
|
| + intptr_t EmitInstanceCall(ExternalLabel* target_label,
|
| + const ICData& ic_data,
|
| + const Array& arguments_descriptor,
|
| + intptr_t argument_count);
|
|
|
| + intptr_t StackSize() const;
|
| +
|
| + // Returns assembler label associated with the given block entry.
|
| + Label* GetBlockLabel(BlockEntryInstr* block_entry) const;
|
| +
|
| + // Returns true if the next block after current in the current block order
|
| + // is the given block.
|
| + bool IsNextBlock(TargetEntryInstr* block_entry) const;
|
| +
|
| + void AddExceptionHandler(intptr_t try_index, intptr_t pc_offset);
|
| + void AddCurrentDescriptor(PcDescriptors::Kind kind,
|
| + intptr_t cid,
|
| + intptr_t token_index,
|
| + intptr_t try_index);
|
| + Label* AddDeoptStub(intptr_t deopt_id,
|
| + intptr_t deopt_token_index,
|
| + intptr_t try_index_,
|
| + DeoptReasonId reason,
|
| + Register reg1,
|
| + Register reg2);
|
| +
|
| + void FinalizeExceptionHandlers(const Code& code);
|
| + void FinalizePcDescriptors(const Code& code);
|
| + void FinalizeStackmaps(const Code& code);
|
| + void FinalizeVarDescriptors(const Code& code);
|
| + void FinalizeComments(const Code& code);
|
| +
|
| + static const int kLocalsOffsetFromFP = (-1 * kWordSize);
|
| +
|
| + private:
|
| + friend class DeoptimizationStub;
|
| +
|
| + void GenerateDeferredCode();
|
| +
|
| + virtual void EmitInstructionPrologue(Instruction* instr);
|
| +
|
| + // Emit code to load a Value into register 'dst'.
|
| + void LoadValue(Register dst, Value* value);
|
| +
|
| // Returns pc-offset (in bytes) of the pc after the call, can be used to emit
|
| // pc-descriptor information.
|
| virtual intptr_t EmitStaticCall(const Function& function,
|
| const Array& arguments_descriptor,
|
| intptr_t argument_count);
|
|
|
| - // Infrastructure copied from class CodeGenerator.
|
| - void GenerateCall(intptr_t token_index,
|
| - intptr_t try_index,
|
| - const ExternalLabel* label,
|
| - PcDescriptors::Kind kind);
|
| -
|
| // Type checking helper methods.
|
| virtual void CheckClassIds(Register class_id_reg,
|
| const GrowableArray<intptr_t>& class_ids,
|
| @@ -116,18 +212,6 @@
|
| Label* is_instance_lbl,
|
| Label* is_not_instance_lbl);
|
|
|
| - void GenerateAssertAssignable(intptr_t cid,
|
| - intptr_t token_index,
|
| - intptr_t try_index,
|
| - const AbstractType& dst_type,
|
| - const String& dst_name);
|
| -
|
| - void GenerateInstanceOf(intptr_t cid,
|
| - intptr_t token_index,
|
| - intptr_t try_index,
|
| - const AbstractType& type,
|
| - bool negate_result);
|
| -
|
| enum TypeTestStubKind {
|
| kTestTypeOneArg,
|
| kTestTypeTwoArgs,
|
| @@ -148,9 +232,62 @@
|
| virtual void GenerateInlinedGetter(intptr_t offset);
|
| virtual void GenerateInlinedSetter(intptr_t offset);
|
|
|
| + // Map a block number in a forward iteration into the block number in the
|
| + // corresponding reverse iteration. Used to obtain an index into
|
| + // block_order for reverse iterations.
|
| + intptr_t reverse_index(intptr_t index) const {
|
| + return block_order_.length() - index - 1;
|
| + }
|
| +
|
| + class Assembler* assembler_;
|
| + const ParsedFunction& parsed_function_;
|
| + const GrowableArray<BlockEntryInstr*>& block_order_;
|
| +
|
| + // Compiler specific per-block state. Indexed by postorder block number
|
| + // for convenience. This is not the block's index in the block order,
|
| + // which is reverse postorder.
|
| + BlockEntryInstr* current_block_;
|
| + ExceptionHandlerList* exception_handlers_list_;
|
| + DescriptorList* pc_descriptors_list_;
|
| + StackmapBuilder* stackmap_builder_;
|
| + GrowableArray<BlockInfo*> block_info_;
|
| + GrowableArray<DeoptimizationStub*> deopt_stubs_;
|
| + const bool is_optimizing_;
|
| +
|
| DISALLOW_COPY_AND_ASSIGN(FlowGraphCompiler);
|
| };
|
|
|
| +
|
| +class DeoptimizationStub : public ZoneAllocated {
|
| + public:
|
| + DeoptimizationStub(intptr_t deopt_id,
|
| + intptr_t deopt_token_index,
|
| + intptr_t try_index,
|
| + DeoptReasonId reason)
|
| + : deopt_id_(deopt_id),
|
| + deopt_token_index_(deopt_token_index),
|
| + try_index_(try_index),
|
| + reason_(reason),
|
| + registers_(2),
|
| + entry_label_() {}
|
| +
|
| + void Push(Register reg) { registers_.Add(reg); }
|
| + Label* entry_label() { return &entry_label_; }
|
| +
|
| + // Implementation is in architecture specific file.
|
| + void GenerateCode(FlowGraphCompiler* compiler);
|
| +
|
| + private:
|
| + const intptr_t deopt_id_;
|
| + const intptr_t deopt_token_index_;
|
| + const intptr_t try_index_;
|
| + const DeoptReasonId reason_;
|
| + GrowableArray<Register> registers_;
|
| + Label entry_label_;
|
| +
|
| + DISALLOW_COPY_AND_ASSIGN(DeoptimizationStub);
|
| +};
|
| +
|
| } // namespace dart
|
|
|
| #endif // VM_FLOW_GRAPH_COMPILER_X64_H_
|
|
|