| Index: runtime/vm/flow_graph_compiler_ia32.h
|
| ===================================================================
|
| --- runtime/vm/flow_graph_compiler_ia32.h (revision 8400)
|
| +++ runtime/vm/flow_graph_compiler_ia32.h (working copy)
|
| @@ -13,7 +13,6 @@
|
| #include "vm/assembler_macros.h"
|
| #include "vm/code_descriptors.h"
|
| #include "vm/code_generator.h"
|
| -#include "vm/flow_graph_compiler_shared.h"
|
| #include "vm/intermediate_language.h"
|
|
|
| namespace dart {
|
| @@ -28,55 +27,149 @@
|
|
|
| // Stubbed out implementation of graph compiler, bails out immediately if
|
| // CompileGraph is called. The rest of the public API is UNIMPLEMENTED.
|
| -class FlowGraphCompiler : public FlowGraphCompilerShared {
|
| +class FlowGraphCompiler : public ValueObject {
|
| + private:
|
| + struct BlockInfo : public ZoneAllocated {
|
| + public:
|
| + BlockInfo() : label() { }
|
| + Label label;
|
| + };
|
| +
|
| public:
|
| FlowGraphCompiler(Assembler* assembler,
|
| const ParsedFunction& parsed_function,
|
| const GrowableArray<BlockEntryInstr*>& block_order,
|
| bool is_optimizing);
|
|
|
| + virtual ~FlowGraphCompiler();
|
| +
|
| + // Accessors.
|
| + Assembler* assembler() const { return assembler_; }
|
| + const ParsedFunction& parsed_function() const { return parsed_function_; }
|
| + const GrowableArray<BlockEntryInstr*>& block_order() const {
|
| + return block_order_;
|
| + }
|
| + DescriptorList* pc_descriptors_list() const {
|
| + return pc_descriptors_list_;
|
| + }
|
| + BlockEntryInstr* current_block() const { return current_block_; }
|
| + void set_current_block(BlockEntryInstr* value) {
|
| + current_block_ = value;
|
| + }
|
| + bool is_optimizing() const { return is_optimizing_; }
|
| + const GrowableArray<BlockInfo*>& block_info() const { return block_info_; }
|
| +
|
| + // Constructor is lighweight, major initialization work should occur here.
|
| + // This makes it easier to measure time spent in the compiler.
|
| + void InitCompiler();
|
| +
|
| void CompileGraph();
|
|
|
| - void GenerateCallRuntime(intptr_t cid,
|
| - intptr_t token_index,
|
| - intptr_t try_index,
|
| - const RuntimeEntry& entry);
|
| + void VisitBlocks();
|
|
|
| - // Returns pc-offset (in bytes) of the pc after the call, can be used to emit
|
| - // pc-descriptor information.
|
| - virtual intptr_t EmitInstanceCall(ExternalLabel* target_label,
|
| - const ICData& ic_data,
|
| - const Array& arguments_descriptor,
|
| - intptr_t argument_count);
|
| + // Bail out of the flow graph compiler. Does not return to the caller.
|
| + void Bailout(const char* reason);
|
|
|
| + // Returns 'true' if code generation for this function is complete, i.e.,
|
| + // no fall-through to regular code is needed.
|
| + bool TryIntrinsify();
|
| +
|
| + virtual void GenerateCallRuntime(intptr_t cid,
|
| + intptr_t token_index,
|
| + intptr_t try_index,
|
| + const RuntimeEntry& entry);
|
| +
|
| // Returns pc-offset (in bytes) of the pc after the call, can be used to emit
|
| // pc-descriptor information.
|
| virtual intptr_t EmitStaticCall(const Function& function,
|
| const Array& arguments_descriptor,
|
| intptr_t argument_count);
|
|
|
| - void GenerateCall(intptr_t token_index,
|
| - intptr_t try_index,
|
| - const ExternalLabel* label,
|
| - PcDescriptors::Kind kind);
|
| - void GenerateInstanceOf(intptr_t cid,
|
| + virtual void GenerateCall(intptr_t token_index,
|
| + intptr_t try_index,
|
| + const ExternalLabel* label,
|
| + PcDescriptors::Kind kind);
|
| + virtual void GenerateInstanceOf(intptr_t cid,
|
| + intptr_t token_index,
|
| + intptr_t try_index,
|
| + const AbstractType& type,
|
| + bool negate_result);
|
| + virtual void GenerateAssertAssignable(intptr_t cid,
|
| + intptr_t token_index,
|
| + intptr_t try_index,
|
| + const AbstractType& dst_type,
|
| + const String& dst_name);
|
| +
|
| + void GenerateInstanceCall(intptr_t cid,
|
| + intptr_t token_index,
|
| + intptr_t try_index,
|
| + const String& function_name,
|
| + intptr_t argument_count,
|
| + const Array& argument_names,
|
| + intptr_t checked_argument_count);
|
| +
|
| + void GenerateStaticCall(intptr_t cid,
|
| intptr_t token_index,
|
| intptr_t try_index,
|
| - const AbstractType& type,
|
| - bool negate_result);
|
| - void GenerateAssertAssignable(intptr_t cid,
|
| - intptr_t token_index,
|
| - intptr_t try_index,
|
| - const AbstractType& dst_type,
|
| - const String& dst_name);
|
| + const Function& function,
|
| + intptr_t argument_count,
|
| + const Array& argument_names);
|
|
|
| + void GenerateNumberTypeCheck(Register kClassIdReg,
|
| + const AbstractType& type,
|
| + Label* is_instance_lbl,
|
| + Label* is_not_instance_lbl);
|
| + void GenerateStringTypeCheck(Register kClassIdReg,
|
| + Label* is_instance_lbl,
|
| + Label* is_not_instance_lbl);
|
| + void GenerateListTypeCheck(Register kClassIdReg,
|
| + Label* is_instance_lbl);
|
| +
|
| + // Returns pc-offset (in bytes) of the pc after the call, can be used to emit
|
| + // pc-descriptor information.
|
| + intptr_t EmitInstanceCall(ExternalLabel* target_label,
|
| + const ICData& ic_data,
|
| + const Array& arguments_descriptor,
|
| + intptr_t argument_count);
|
| +
|
| + void EmitComment(Instruction* instr);
|
| +
|
| + intptr_t StackSize() const;
|
| +
|
| + // Returns assembler label associated with the given block entry.
|
| + Label* GetBlockLabel(BlockEntryInstr* block_entry) const;
|
| +
|
| + // Returns true if the next block after current in the current block order
|
| + // is the given block.
|
| + bool IsNextBlock(TargetEntryInstr* block_entry) const;
|
| +
|
| + void AddExceptionHandler(intptr_t try_index, intptr_t pc_offset);
|
| + void AddCurrentDescriptor(PcDescriptors::Kind kind,
|
| + intptr_t cid,
|
| + intptr_t token_index,
|
| + intptr_t try_index);
|
| + Label* AddDeoptStub(intptr_t deopt_id,
|
| + intptr_t deopt_token_index,
|
| + intptr_t try_index_,
|
| + DeoptReasonId reason,
|
| + Register reg1,
|
| + Register reg2);
|
| +
|
| + void FinalizeExceptionHandlers(const Code& code);
|
| + void FinalizePcDescriptors(const Code& code);
|
| + void FinalizeStackmaps(const Code& code);
|
| + void FinalizeVarDescriptors(const Code& code);
|
| + void FinalizeComments(const Code& code);
|
| +
|
| + static const int kLocalsOffsetFromFP = (-1 * kWordSize);
|
| +
|
| private:
|
| friend class DeoptimizationStub;
|
|
|
| - virtual void VisitBlocks();
|
| + void GenerateDeferredCode();
|
|
|
| void CopyParameters();
|
| - void EmitInstructionPrologue(Instruction* instr);
|
| + virtual void EmitInstructionPrologue(Instruction* instr);
|
|
|
| virtual void GenerateInlinedGetter(intptr_t offset);
|
| virtual void GenerateInlinedSetter(intptr_t offset);
|
| @@ -134,12 +227,64 @@
|
| Label* is_equal_lbl,
|
| Label* is_not_equal_lbl);
|
|
|
| - void EmitComment(Instruction* instr);
|
| - void BailoutOnInstruction(Instruction* instr);
|
|
|
| + // Map a block number in a forward iteration into the block number in the
|
| + // corresponding reverse iteration. Used to obtain an index into
|
| + // block_order for reverse iterations.
|
| + intptr_t reverse_index(intptr_t index) const {
|
| + return block_order_.length() - index - 1;
|
| + }
|
| +
|
| + class Assembler* assembler_;
|
| + const ParsedFunction& parsed_function_;
|
| + const GrowableArray<BlockEntryInstr*>& block_order_;
|
| +
|
| + // Compiler specific per-block state. Indexed by postorder block number
|
| + // for convenience. This is not the block's index in the block order,
|
| + // which is reverse postorder.
|
| + BlockEntryInstr* current_block_;
|
| + ExceptionHandlerList* exception_handlers_list_;
|
| + DescriptorList* pc_descriptors_list_;
|
| + StackmapBuilder* stackmap_builder_;
|
| + GrowableArray<BlockInfo*> block_info_;
|
| + GrowableArray<DeoptimizationStub*> deopt_stubs_;
|
| + const bool is_optimizing_;
|
| +
|
| DISALLOW_COPY_AND_ASSIGN(FlowGraphCompiler);
|
| };
|
|
|
| +
|
| +class DeoptimizationStub : public ZoneAllocated {
|
| + public:
|
| + DeoptimizationStub(intptr_t deopt_id,
|
| + intptr_t deopt_token_index,
|
| + intptr_t try_index,
|
| + DeoptReasonId reason)
|
| + : deopt_id_(deopt_id),
|
| + deopt_token_index_(deopt_token_index),
|
| + try_index_(try_index),
|
| + reason_(reason),
|
| + registers_(2),
|
| + entry_label_() {}
|
| +
|
| + void Push(Register reg) { registers_.Add(reg); }
|
| + Label* entry_label() { return &entry_label_; }
|
| +
|
| + // Implementation is in architecture specific file.
|
| + void GenerateCode(FlowGraphCompiler* compiler);
|
| +
|
| + private:
|
| + const intptr_t deopt_id_;
|
| + const intptr_t deopt_token_index_;
|
| + const intptr_t try_index_;
|
| + const DeoptReasonId reason_;
|
| + GrowableArray<Register> registers_;
|
| + Label entry_label_;
|
| +
|
| + DISALLOW_COPY_AND_ASSIGN(DeoptimizationStub);
|
| +};
|
| +
|
| +
|
| } // namespace dart
|
|
|
| #endif // VM_FLOW_GRAPH_COMPILER_IA32_H_
|
|
|