src/share/vm/opto/compile.hpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File 6961690 Sdiff src/share/vm/opto

src/share/vm/opto/compile.hpp

Print this page
rev 1839 : 6961690: load oops from constant table on SPARC
Summary: oops should be loaded from the constant table of an nmethod instead of materializing them with a long code sequence.
Reviewed-by:


  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 class Block;
  26 class Bundle;
  27 class C2Compiler;
  28 class CallGenerator;
  29 class ConnectionGraph;
  30 class InlineTree;
  31 class Int_Array;
  32 class Matcher;


  33 class MachNode;

  34 class MachSafePointNode;
  35 class Node;
  36 class Node_Array;
  37 class Node_Notes;
  38 class OptoReg;
  39 class PhaseCFG;
  40 class PhaseGVN;
  41 class PhaseIterGVN;
  42 class PhaseRegAlloc;
  43 class PhaseCCP;
  44 class PhaseCCP_DCE;
  45 class RootNode;
  46 class relocInfo;
  47 class Scope;
  48 class StartNode;
  49 class SafePointNode;
  50 class JVMState;
  51 class TypeData;
  52 class TypePtr;
  53 class TypeFunc;


 104 
 105     void set_rewritable(bool z) { _is_rewritable = z; }
 106     void set_field(ciField* f) {
 107       assert(!_field,"");
 108       _field = f;
 109       if (f->is_final())  _is_rewritable = false;
 110     }
 111 
 112     void print_on(outputStream* st) PRODUCT_RETURN;
 113   };
 114 
 115   enum {
 116     logAliasCacheSize = 6,
 117     AliasCacheSize = (1<<logAliasCacheSize)
 118   };
 119   struct AliasCacheEntry { const TypePtr* _adr_type; int _index; };  // simple duple type
 120   enum {
 121     trapHistLength = methodDataOopDesc::_trap_hist_limit
 122   };
 123 











































































 124  private:
 125   // Fixed parameters to this compilation.
 126   const int             _compile_id;
 127   const bool            _save_argument_registers; // save/restore arg regs for trampolines
 128   const bool            _subsume_loads;         // Load can be matched as part of a larger op.
 129   const bool            _do_escape_analysis;    // Do escape analysis.
 130   ciMethod*             _method;                // The method being compiled.
 131   int                   _entry_bci;             // entry bci for osr methods.
 132   const TypeFunc*       _tf;                    // My kind of signature
 133   InlineTree*           _ilt;                   // Ditto (temporary).
 134   address               _stub_function;         // VM entry for stub being compiled, or NULL
 135   const char*           _stub_name;             // Name of stub or adapter being compiled, or NULL
 136   address               _stub_entry_point;      // Compile code entry for generated stub, or NULL
 137 
 138   // Control of this compilation.
 139   int                   _num_loop_opts;         // Number of iterations for doing loop optimiztions
 140   int                   _max_inline_size;       // Max inline size for this compilation
 141   int                   _freq_inline_size;      // Max hot method inline size for this compilation
 142   int                   _fixed_slots;           // count of frame slots not allocated by the register
 143                                                 // allocator i.e. locks, original deopt pc, etc.


 177   GrowableArray<Node*>* _macro_nodes;           // List of nodes which need to be expanded before matching.
 178   GrowableArray<Node*>* _predicate_opaqs;       // List of Opaque1 nodes for the loop predicates.
 179   ConnectionGraph*      _congraph;
 180 #ifndef PRODUCT
 181   IdealGraphPrinter*    _printer;
 182 #endif
 183 
 184   // Node management
 185   uint                  _unique;                // Counter for unique Node indices
 186   debug_only(static int _debug_idx;)            // Monotonic counter (not reset), use -XX:BreakAtNode=<idx>
 187   Arena                 _node_arena;            // Arena for new-space Nodes
 188   Arena                 _old_arena;             // Arena for old-space Nodes, lifetime during xform
 189   RootNode*             _root;                  // Unique root of compilation, or NULL after bail-out.
 190   Node*                 _top;                   // Unique top node.  (Reset by various phases.)
 191 
 192   Node*                 _immutable_memory;      // Initial memory state
 193 
 194   Node*                 _recent_alloc_obj;
 195   Node*                 _recent_alloc_ctl;
 196 





 197   // Blocked array of debugging and profiling information,
 198   // tracked per node.
 199   enum { _log2_node_notes_block_size = 8,
 200          _node_notes_block_size = (1<<_log2_node_notes_block_size)
 201   };
 202   GrowableArray<Node_Notes*>* _node_note_array;
 203   Node_Notes*           _default_node_notes;  // default notes for new nodes
 204 
 205   // After parsing and every bulk phase we hang onto the Root instruction.
 206   // The RootNode instruction is where the whole program begins.  It produces
 207   // the initial Control and BOTTOM for everybody else.
 208 
 209   // Type management
 210   Arena                 _Compile_types;         // Arena for all types
 211   Arena*                _type_arena;            // Alias for _Compile_types except in Initialize_shared()
 212   Dict*                 _type_dict;             // Intern table
 213   void*                 _type_hwm;              // Last allocation (see Type::operator new/delete)
 214   size_t                _type_last_size;        // Last allocation size (see Type::operator new/delete)
 215   ciMethod*             _last_tf_m;             // Cache for
 216   const TypeFunc*       _last_tf;               //  TypeFunc::make


 237   PhaseRegAlloc*        _regalloc;              // Results of register allocation.
 238   int                   _frame_slots;           // Size of total frame in stack slots
 239   CodeOffsets           _code_offsets;          // Offsets into the code for various interesting entries
 240   RegMask               _FIRST_STACK_mask;      // All stack slots usable for spills (depends on frame layout)
 241   Arena*                _indexSet_arena;        // control IndexSet allocation within PhaseChaitin
 242   void*                 _indexSet_free_block_list; // free list of IndexSet bit blocks
 243 
 244   uint                  _node_bundling_limit;
 245   Bundle*               _node_bundling_base;    // Information for instruction bundling
 246 
 247   // Instruction bits passed off to the VM
 248   int                   _method_size;           // Size of nmethod code segment in bytes
 249   CodeBuffer            _code_buffer;           // Where the code is assembled
 250   int                   _first_block_size;      // Size of unvalidated entry point code / OSR poison code
 251   ExceptionHandlerTable _handler_table;         // Table of native-code exception handlers
 252   ImplicitExceptionTable _inc_table;            // Table of implicit null checks in native code
 253   OopMapSet*            _oop_map_set;           // Table of oop maps (one for each safepoint location)
 254   static int            _CompiledZap_count;     // counter compared against CompileZap[First/Last]
 255   BufferBlob*           _scratch_buffer_blob;   // For temporary code buffers.
 256   relocInfo*            _scratch_locs_memory;   // For temporary code buffers.


 257 
 258  public:
 259   // Accessors
 260 
 261   // The Compile instance currently active in this (compiler) thread.
 262   static Compile* current() {
 263     return (Compile*) ciEnv::current()->compiler_data();
 264   }
 265 
 266   // ID for this compilation.  Useful for setting breakpoints in the debugger.
 267   int               compile_id() const          { return _compile_id; }
 268 
 269   // Does this compilation allow instructions to subsume loads?  User
 270   // instructions that subsume a load may result in an unschedulable
 271   // instruction sequence.
 272   bool              subsume_loads() const       { return _subsume_loads; }
 273   // Do escape analysis.
 274   bool              do_escape_analysis() const  { return _do_escape_analysis; }
 275   bool              save_argument_registers() const { return _save_argument_registers; }
 276 


 419   uint              unique() const              { return _unique; }
 420   uint         next_unique()                    { return _unique++; }
 421   void          set_unique(uint i)              { _unique = i; }
 422   static int        debug_idx()                 { return debug_only(_debug_idx)+0; }
 423   static void   set_debug_idx(int i)            { debug_only(_debug_idx = i); }
 424   Arena*            node_arena()                { return &_node_arena; }
 425   Arena*            old_arena()                 { return &_old_arena; }
 426   RootNode*         root() const                { return _root; }
 427   void          set_root(RootNode* r)           { _root = r; }
 428   StartNode*        start() const;              // (Derived from root.)
 429   void         init_start(StartNode* s);
 430   Node*             immutable_memory();
 431 
 432   Node*             recent_alloc_ctl() const    { return _recent_alloc_ctl; }
 433   Node*             recent_alloc_obj() const    { return _recent_alloc_obj; }
 434   void          set_recent_alloc(Node* ctl, Node* obj) {
 435                                                   _recent_alloc_ctl = ctl;
 436                                                   _recent_alloc_obj = obj;
 437                                                 }
 438 






 439   // Handy undefined Node
 440   Node*             top() const                 { return _top; }
 441 
 442   // these are used by guys who need to know about creation and transformation of top:
 443   Node*             cached_top_node()           { return _top; }
 444   void          set_cached_top_node(Node* tn);
 445 
 446   GrowableArray<Node_Notes*>* node_note_array() const { return _node_note_array; }
 447   void set_node_note_array(GrowableArray<Node_Notes*>* arr) { _node_note_array = arr; }
 448   Node_Notes* default_node_notes() const        { return _default_node_notes; }
 449   void    set_default_node_notes(Node_Notes* n) { _default_node_notes = n; }
 450 
 451   Node_Notes*       node_notes_at(int idx) {
 452     return locate_node_notes(_node_note_array, idx, false);
 453   }
 454   inline bool   set_node_notes_at(int idx, Node_Notes* value);
 455 
 456   // Copy notes from source to dest, if they exist.
 457   // Overwrite dest only if source provides something.
 458   // Return true if information was moved.


 570   void set_24_bit_selection_and_mode(bool selection, bool mode) {
 571     _select_24_bit_instr = selection;
 572     _in_24_bit_fp_mode   = mode;
 573   }
 574 
 575   void  set_java_calls(int z) { _java_calls  = z; }
 576   void set_inner_loops(int z) { _inner_loops = z; }
 577 
 578   // Instruction bits passed off to the VM
 579   int               code_size()                 { return _method_size; }
 580   CodeBuffer*       code_buffer()               { return &_code_buffer; }
 581   int               first_block_size()          { return _first_block_size; }
 582   void              set_frame_complete(int off) { _code_offsets.set_value(CodeOffsets::Frame_Complete, off); }
 583   ExceptionHandlerTable*  handler_table()       { return &_handler_table; }
 584   ImplicitExceptionTable* inc_table()           { return &_inc_table; }
 585   OopMapSet*        oop_map_set()               { return _oop_map_set; }
 586   DebugInformationRecorder* debug_info()        { return env()->debug_info(); }
 587   Dependencies*     dependencies()              { return env()->dependencies(); }
 588   static int        CompiledZap_count()         { return _CompiledZap_count; }
 589   BufferBlob*       scratch_buffer_blob()       { return _scratch_buffer_blob; }
 590   void         init_scratch_buffer_blob();

 591   void          set_scratch_buffer_blob(BufferBlob* b) { _scratch_buffer_blob = b; }
 592   relocInfo*        scratch_locs_memory()       { return _scratch_locs_memory; }
 593   void          set_scratch_locs_memory(relocInfo* b)  { _scratch_locs_memory = b; }
 594 
 595   // emit to scratch blob, report resulting size
 596   uint              scratch_emit_size(const Node* n);


 597 
 598   enum ScratchBufferBlob {
 599     MAX_inst_size       = 1024,
 600     MAX_locs_size       = 128, // number of relocInfo elements
 601     MAX_const_size      = 128,
 602     MAX_stubs_size      = 128
 603   };
 604 
 605   // Major entry point.  Given a Scope, compile the associated method.
 606   // For normal compilations, entry_bci is InvocationEntryBci.  For on stack
 607   // replacement, entry_bci indicates the bytecode for which to compile a
 608   // continuation.
 609   Compile(ciEnv* ci_env, C2Compiler* compiler, ciMethod* target,
 610           int entry_bci, bool subsume_loads, bool do_escape_analysis);
 611 
 612   // Second major entry point.  From the TypeFunc signature, generate code
 613   // to pass arguments from the Java calling convention to the C calling
 614   // convention.
 615   Compile(ciEnv* ci_env, const TypeFunc *(*gen)(),
 616           address stub_function, const char *stub_name,


 657   // "array",   May also consult and add to "objs", which describes the
 658   // scalar-replaced objects.
 659   void FillLocArray( int idx, MachSafePointNode* sfpt,
 660                      Node *local, GrowableArray<ScopeValue*> *array,
 661                      GrowableArray<ScopeValue*> *objs );
 662 
 663   // If "objs" contains an ObjectValue whose id is "id", returns it, else NULL.
 664   static ObjectValue* sv_for_node_id(GrowableArray<ScopeValue*> *objs, int id);
 665   // Requres that "objs" does not contains an ObjectValue whose id matches
 666   // that of "sv.  Appends "sv".
 667   static void set_sv_for_object_node(GrowableArray<ScopeValue*> *objs,
 668                                      ObjectValue* sv );
 669 
 670   // Process an OopMap Element while emitting nodes
 671   void Process_OopMap_Node(MachNode *mach, int code_offset);
 672 
 673   // Write out basic block data to code buffer
 674   void Fill_buffer();
 675 
 676   // Determine which variable sized branches can be shortened
 677   void Shorten_branches(Label *labels, int& code_size, int& reloc_size, int& stub_size, int& const_size);
 678 
 679   // Compute the size of first NumberOfLoopInstrToAlign instructions
 680   // at the head of a loop.
 681   void compute_loop_first_inst_sizes();
 682 
 683   // Compute the information for the exception tables
 684   void FillExceptionTables(uint cnt, uint *call_returns, uint *inct_starts, Label *blk_labels);
 685 
 686   // Stack slots that may be unused by the calling convention but must
 687   // otherwise be preserved.  On Intel this includes the return address.
 688   // On PowerPC it includes the 4 words holding the old TOC & LR glue.
 689   uint in_preserve_stack_slots();
 690 
 691   // "Top of Stack" slots that may be unused by the calling convention but must
 692   // otherwise be preserved.
 693   // On Intel these are not necessary and the value can be zero.
 694   // On Sparc this describes the words reserved for storing a register window
 695   // when an interrupt occurs.
 696   static uint out_preserve_stack_slots();
 697 




  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 class Block;
  26 class Bundle;
  27 class C2Compiler;
  28 class CallGenerator;
  29 class ConnectionGraph;
  30 class InlineTree;
  31 class Int_Array;
  32 class Matcher;
  33 class MachConstantNode;
  34 class MachConstantBaseNode;
  35 class MachNode;
  36 class MachOper;
  37 class MachSafePointNode;
  38 class Node;
  39 class Node_Array;
  40 class Node_Notes;
  41 class OptoReg;
  42 class PhaseCFG;
  43 class PhaseGVN;
  44 class PhaseIterGVN;
  45 class PhaseRegAlloc;
  46 class PhaseCCP;
  47 class PhaseCCP_DCE;
  48 class RootNode;
  49 class relocInfo;
  50 class Scope;
  51 class StartNode;
  52 class SafePointNode;
  53 class JVMState;
  54 class TypeData;
  55 class TypePtr;
  56 class TypeFunc;


 107 
 108     void set_rewritable(bool z) { _is_rewritable = z; }
 109     void set_field(ciField* f) {
 110       assert(!_field,"");
 111       _field = f;
 112       if (f->is_final())  _is_rewritable = false;
 113     }
 114 
 115     void print_on(outputStream* st) PRODUCT_RETURN;
 116   };
 117 
 118   enum {
 119     logAliasCacheSize = 6,
 120     AliasCacheSize = (1<<logAliasCacheSize)
 121   };
 122   struct AliasCacheEntry { const TypePtr* _adr_type; int _index; };  // simple duple type
 123   enum {
 124     trapHistLength = methodDataOopDesc::_trap_hist_limit
 125   };
 126 
 127   // Constant entry of the constant table.
 128   class Constant {
 129   private:
 130     BasicType _type;
 131     jvalue    _value;
 132     int       _offset;         // offset of this constant (in bytes) relative to the constant table base.
 133     bool      _can_be_reused;  // true (default) if the value can be shared with other users.
 134 
 135   public:
 136     Constant() : _type(T_ILLEGAL), _offset(-1), _can_be_reused(true) { _value.l = 0; }
 137     Constant(BasicType type, jvalue value, bool can_be_reused = true) :
 138       _type(type),
 139       _value(value),
 140       _offset(-1),
 141       _can_be_reused(can_be_reused)
 142     {}
 143 
 144     bool operator==(const Constant& other);
 145 
 146     BasicType type()      const    { return _type; }
 147 
 148     jlong   get_jlong()   const    { return _value.j; }
 149     jfloat  get_jfloat()  const    { return _value.f; }
 150     jdouble get_jdouble() const    { return _value.d; }
 151     jobject get_jobject() const    { return _value.l; }
 152 
 153     int         offset()  const    { return _offset; }
 154     void    set_offset(int offset) {        _offset = offset; }
 155 
 156     bool    can_be_reused() const  { return _can_be_reused; }
 157   };
 158 
 159   // Constant table.
 160   class ConstantTable {
 161   private:
 162     GrowableArray<Constant> _constants;          // Constants of this table.
 163     int                     _size;               // Size in bytes the emitted constant table takes (including padding).
 164     int                     _table_base_offset;  // Offset of the table base that gets added to the constant offsets.
 165 
 166   public:
 167     ConstantTable() :
 168       _size(-1),
 169       _table_base_offset(-1)  // We can use -1 here since the constant table is always bigger than 2 bytes (-(size / 2), see MachConstantBaseNode::emit).
 170     {}
 171 
 172     int  size() const { assert(_size != -1, "size not yet calculated"); return _size; }
 173 
 174     void set_table_base_offset(int x)  { assert(_table_base_offset == -1, "set only once");                        _table_base_offset = x; }
 175     int      table_base_offset() const { assert(_table_base_offset != -1, "table base offset not yet set"); return _table_base_offset; }
 176 
 177     void emit(CodeBuffer& cb);
 178 
 179     // Returns the offset of the last entry (the top) of the constant table.
 180     int  top_offset() const { assert(_constants.top().offset() != -1, "constant not yet bound"); return _constants.top().offset(); }
 181 
 182     void calculate_offsets_and_size();
 183     int  find_offset(Constant& con) const;
 184 
 185     void     add(Constant& con);
 186     Constant add(BasicType type, jvalue value);
 187     Constant add(MachOper* oper);
 188     Constant add(jfloat f) {
 189       jvalue value; value.f = f;
 190       return add(T_FLOAT, value);
 191     }
 192     Constant add(jdouble d) {
 193       jvalue value; value.d = d;
 194       return add(T_DOUBLE, value);
 195     }
 196 
 197     // Jump table
 198     Constant allocate_jump_table(MachConstantNode* n);
 199     void         fill_jump_table(CodeBuffer& cb, MachConstantNode* n, GrowableArray<Label*> labels) const;
 200   };
 201 
 202  private:
 203   // Fixed parameters to this compilation.
 204   const int             _compile_id;
 205   const bool            _save_argument_registers; // save/restore arg regs for trampolines
 206   const bool            _subsume_loads;         // Load can be matched as part of a larger op.
 207   const bool            _do_escape_analysis;    // Do escape analysis.
 208   ciMethod*             _method;                // The method being compiled.
 209   int                   _entry_bci;             // entry bci for osr methods.
 210   const TypeFunc*       _tf;                    // My kind of signature
 211   InlineTree*           _ilt;                   // Ditto (temporary).
 212   address               _stub_function;         // VM entry for stub being compiled, or NULL
 213   const char*           _stub_name;             // Name of stub or adapter being compiled, or NULL
 214   address               _stub_entry_point;      // Compile code entry for generated stub, or NULL
 215 
 216   // Control of this compilation.
 217   int                   _num_loop_opts;         // Number of iterations for doing loop optimiztions
 218   int                   _max_inline_size;       // Max inline size for this compilation
 219   int                   _freq_inline_size;      // Max hot method inline size for this compilation
 220   int                   _fixed_slots;           // count of frame slots not allocated by the register
 221                                                 // allocator i.e. locks, original deopt pc, etc.


 255   GrowableArray<Node*>* _macro_nodes;           // List of nodes which need to be expanded before matching.
 256   GrowableArray<Node*>* _predicate_opaqs;       // List of Opaque1 nodes for the loop predicates.
 257   ConnectionGraph*      _congraph;
 258 #ifndef PRODUCT
 259   IdealGraphPrinter*    _printer;
 260 #endif
 261 
 262   // Node management
 263   uint                  _unique;                // Counter for unique Node indices
 264   debug_only(static int _debug_idx;)            // Monotonic counter (not reset), use -XX:BreakAtNode=<idx>
 265   Arena                 _node_arena;            // Arena for new-space Nodes
 266   Arena                 _old_arena;             // Arena for old-space Nodes, lifetime during xform
 267   RootNode*             _root;                  // Unique root of compilation, or NULL after bail-out.
 268   Node*                 _top;                   // Unique top node.  (Reset by various phases.)
 269 
 270   Node*                 _immutable_memory;      // Initial memory state
 271 
 272   Node*                 _recent_alloc_obj;
 273   Node*                 _recent_alloc_ctl;
 274 
 275   // Constant table
 276   ConstantTable         _constant_table;        // The constant table for this compile.
 277   MachConstantBaseNode* _mach_constant_base_node;  // Constant table base node singleton.
 278 
 279 
 280   // Blocked array of debugging and profiling information,
 281   // tracked per node.
 282   enum { _log2_node_notes_block_size = 8,
 283          _node_notes_block_size = (1<<_log2_node_notes_block_size)
 284   };
 285   GrowableArray<Node_Notes*>* _node_note_array;
 286   Node_Notes*           _default_node_notes;  // default notes for new nodes
 287 
 288   // After parsing and every bulk phase we hang onto the Root instruction.
 289   // The RootNode instruction is where the whole program begins.  It produces
 290   // the initial Control and BOTTOM for everybody else.
 291 
 292   // Type management
 293   Arena                 _Compile_types;         // Arena for all types
 294   Arena*                _type_arena;            // Alias for _Compile_types except in Initialize_shared()
 295   Dict*                 _type_dict;             // Intern table
 296   void*                 _type_hwm;              // Last allocation (see Type::operator new/delete)
 297   size_t                _type_last_size;        // Last allocation size (see Type::operator new/delete)
 298   ciMethod*             _last_tf_m;             // Cache for
 299   const TypeFunc*       _last_tf;               //  TypeFunc::make


 320   PhaseRegAlloc*        _regalloc;              // Results of register allocation.
 321   int                   _frame_slots;           // Size of total frame in stack slots
 322   CodeOffsets           _code_offsets;          // Offsets into the code for various interesting entries
 323   RegMask               _FIRST_STACK_mask;      // All stack slots usable for spills (depends on frame layout)
 324   Arena*                _indexSet_arena;        // control IndexSet allocation within PhaseChaitin
 325   void*                 _indexSet_free_block_list; // free list of IndexSet bit blocks
 326 
 327   uint                  _node_bundling_limit;
 328   Bundle*               _node_bundling_base;    // Information for instruction bundling
 329 
 330   // Instruction bits passed off to the VM
 331   int                   _method_size;           // Size of nmethod code segment in bytes
 332   CodeBuffer            _code_buffer;           // Where the code is assembled
 333   int                   _first_block_size;      // Size of unvalidated entry point code / OSR poison code
 334   ExceptionHandlerTable _handler_table;         // Table of native-code exception handlers
 335   ImplicitExceptionTable _inc_table;            // Table of implicit null checks in native code
 336   OopMapSet*            _oop_map_set;           // Table of oop maps (one for each safepoint location)
 337   static int            _CompiledZap_count;     // counter compared against CompileZap[First/Last]
 338   BufferBlob*           _scratch_buffer_blob;   // For temporary code buffers.
 339   relocInfo*            _scratch_locs_memory;   // For temporary code buffers.
 340   int                   _scratch_const_size;    // For temporary code buffers.
 341   bool                  _in_scratch_emit_size;  // true when in scratch_emit_size.
 342 
 343  public:
 344   // Accessors
 345 
 346   // The Compile instance currently active in this (compiler) thread.
 347   static Compile* current() {
 348     return (Compile*) ciEnv::current()->compiler_data();
 349   }
 350 
 351   // ID for this compilation.  Useful for setting breakpoints in the debugger.
 352   int               compile_id() const          { return _compile_id; }
 353 
 354   // Does this compilation allow instructions to subsume loads?  User
 355   // instructions that subsume a load may result in an unschedulable
 356   // instruction sequence.
 357   bool              subsume_loads() const       { return _subsume_loads; }
 358   // Do escape analysis.
 359   bool              do_escape_analysis() const  { return _do_escape_analysis; }
 360   bool              save_argument_registers() const { return _save_argument_registers; }
 361 


 504   uint              unique() const              { return _unique; }
 505   uint         next_unique()                    { return _unique++; }
 506   void          set_unique(uint i)              { _unique = i; }
 507   static int        debug_idx()                 { return debug_only(_debug_idx)+0; }
 508   static void   set_debug_idx(int i)            { debug_only(_debug_idx = i); }
 509   Arena*            node_arena()                { return &_node_arena; }
 510   Arena*            old_arena()                 { return &_old_arena; }
 511   RootNode*         root() const                { return _root; }
 512   void          set_root(RootNode* r)           { _root = r; }
 513   StartNode*        start() const;              // (Derived from root.)
 514   void         init_start(StartNode* s);
 515   Node*             immutable_memory();
 516 
 517   Node*             recent_alloc_ctl() const    { return _recent_alloc_ctl; }
 518   Node*             recent_alloc_obj() const    { return _recent_alloc_obj; }
 519   void          set_recent_alloc(Node* ctl, Node* obj) {
 520                                                   _recent_alloc_ctl = ctl;
 521                                                   _recent_alloc_obj = obj;
 522                                                 }
 523 
 524   // Constant table
 525   ConstantTable&   constant_table() { return _constant_table; }
 526 
 527   MachConstantBaseNode*     mach_constant_base_node();
 528   bool                  has_mach_constant_base_node() const { return _mach_constant_base_node != NULL; }
 529 
 530   // Handy undefined Node
 531   Node*             top() const                 { return _top; }
 532 
 533   // these are used by guys who need to know about creation and transformation of top:
 534   Node*             cached_top_node()           { return _top; }
 535   void          set_cached_top_node(Node* tn);
 536 
 537   GrowableArray<Node_Notes*>* node_note_array() const { return _node_note_array; }
 538   void set_node_note_array(GrowableArray<Node_Notes*>* arr) { _node_note_array = arr; }
 539   Node_Notes* default_node_notes() const        { return _default_node_notes; }
 540   void    set_default_node_notes(Node_Notes* n) { _default_node_notes = n; }
 541 
 542   Node_Notes*       node_notes_at(int idx) {
 543     return locate_node_notes(_node_note_array, idx, false);
 544   }
 545   inline bool   set_node_notes_at(int idx, Node_Notes* value);
 546 
 547   // Copy notes from source to dest, if they exist.
 548   // Overwrite dest only if source provides something.
 549   // Return true if information was moved.


 661   void set_24_bit_selection_and_mode(bool selection, bool mode) {
 662     _select_24_bit_instr = selection;
 663     _in_24_bit_fp_mode   = mode;
 664   }
 665 
 666   void  set_java_calls(int z) { _java_calls  = z; }
 667   void set_inner_loops(int z) { _inner_loops = z; }
 668 
 669   // Instruction bits passed off to the VM
 670   int               code_size()                 { return _method_size; }
 671   CodeBuffer*       code_buffer()               { return &_code_buffer; }
 672   int               first_block_size()          { return _first_block_size; }
 673   void              set_frame_complete(int off) { _code_offsets.set_value(CodeOffsets::Frame_Complete, off); }
 674   ExceptionHandlerTable*  handler_table()       { return &_handler_table; }
 675   ImplicitExceptionTable* inc_table()           { return &_inc_table; }
 676   OopMapSet*        oop_map_set()               { return _oop_map_set; }
 677   DebugInformationRecorder* debug_info()        { return env()->debug_info(); }
 678   Dependencies*     dependencies()              { return env()->dependencies(); }
 679   static int        CompiledZap_count()         { return _CompiledZap_count; }
 680   BufferBlob*       scratch_buffer_blob()       { return _scratch_buffer_blob; }
 681   void         init_scratch_buffer_blob(int const_size);
 682   void        clear_scratch_buffer_blob();
 683   void          set_scratch_buffer_blob(BufferBlob* b) { _scratch_buffer_blob = b; }
 684   relocInfo*        scratch_locs_memory()       { return _scratch_locs_memory; }
 685   void          set_scratch_locs_memory(relocInfo* b)  { _scratch_locs_memory = b; }
 686 
 687   // emit to scratch blob, report resulting size
 688   uint              scratch_emit_size(const Node* n);
 689   void       set_in_scratch_emit_size(bool x)   {        _in_scratch_emit_size = x; }
 690   bool           in_scratch_emit_size() const   { return _in_scratch_emit_size;     }
 691 
 692   enum ScratchBufferBlob {
 693     MAX_inst_size       = 1024,
 694     MAX_locs_size       = 128, // number of relocInfo elements
 695     MAX_const_size      = 128,
 696     MAX_stubs_size      = 128
 697   };
 698 
 699   // Major entry point.  Given a Scope, compile the associated method.
 700   // For normal compilations, entry_bci is InvocationEntryBci.  For on stack
 701   // replacement, entry_bci indicates the bytecode for which to compile a
 702   // continuation.
 703   Compile(ciEnv* ci_env, C2Compiler* compiler, ciMethod* target,
 704           int entry_bci, bool subsume_loads, bool do_escape_analysis);
 705 
 706   // Second major entry point.  From the TypeFunc signature, generate code
 707   // to pass arguments from the Java calling convention to the C calling
 708   // convention.
 709   Compile(ciEnv* ci_env, const TypeFunc *(*gen)(),
 710           address stub_function, const char *stub_name,


 751   // "array",   May also consult and add to "objs", which describes the
 752   // scalar-replaced objects.
 753   void FillLocArray( int idx, MachSafePointNode* sfpt,
 754                      Node *local, GrowableArray<ScopeValue*> *array,
 755                      GrowableArray<ScopeValue*> *objs );
 756 
 757   // If "objs" contains an ObjectValue whose id is "id", returns it, else NULL.
 758   static ObjectValue* sv_for_node_id(GrowableArray<ScopeValue*> *objs, int id);
 759   // Requres that "objs" does not contains an ObjectValue whose id matches
 760   // that of "sv.  Appends "sv".
 761   static void set_sv_for_object_node(GrowableArray<ScopeValue*> *objs,
 762                                      ObjectValue* sv );
 763 
 764   // Process an OopMap Element while emitting nodes
 765   void Process_OopMap_Node(MachNode *mach, int code_offset);
 766 
 767   // Write out basic block data to code buffer
 768   void Fill_buffer();
 769 
 770   // Determine which variable sized branches can be shortened
 771   void Shorten_branches(Label *labels, int& code_size, int& reloc_size, int& stub_size);
 772 
 773   // Compute the size of first NumberOfLoopInstrToAlign instructions
 774   // at the head of a loop.
 775   void compute_loop_first_inst_sizes();
 776 
 777   // Compute the information for the exception tables
 778   void FillExceptionTables(uint cnt, uint *call_returns, uint *inct_starts, Label *blk_labels);
 779 
 780   // Stack slots that may be unused by the calling convention but must
 781   // otherwise be preserved.  On Intel this includes the return address.
 782   // On PowerPC it includes the 4 words holding the old TOC & LR glue.
 783   uint in_preserve_stack_slots();
 784 
 785   // "Top of Stack" slots that may be unused by the calling convention but must
 786   // otherwise be preserved.
 787   // On Intel these are not necessary and the value can be zero.
 788   // On Sparc this describes the words reserved for storing a register window
 789   // when an interrupt occurs.
 790   static uint out_preserve_stack_slots();
 791 


src/share/vm/opto/compile.hpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File