src/share/vm/opto/compile.hpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File 6961690 Sdiff src/share/vm/opto

src/share/vm/opto/compile.hpp

Print this page
rev 1838 : 6961690: load oops from constant table on SPARC
Summary: oops should be loaded from the constant table of an nmethod instead of materializing them with a long code sequence.
Reviewed-by:


  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 class Block;
  26 class Bundle;
  27 class C2Compiler;
  28 class CallGenerator;
  29 class ConnectionGraph;
  30 class InlineTree;
  31 class Int_Array;
  32 class Matcher;

  33 class MachNode;
  34 class MachSafePointNode;
  35 class Node;
  36 class Node_Array;
  37 class Node_Notes;
  38 class OptoReg;
  39 class PhaseCFG;
  40 class PhaseGVN;
  41 class PhaseIterGVN;
  42 class PhaseRegAlloc;
  43 class PhaseCCP;
  44 class PhaseCCP_DCE;
  45 class RootNode;
  46 class relocInfo;
  47 class Scope;
  48 class StartNode;
  49 class SafePointNode;
  50 class JVMState;
  51 class TypeData;
  52 class TypePtr;


 177   GrowableArray<Node*>* _macro_nodes;           // List of nodes which need to be expanded before matching.
 178   GrowableArray<Node*>* _predicate_opaqs;       // List of Opaque1 nodes for the loop predicates.
 179   ConnectionGraph*      _congraph;
 180 #ifndef PRODUCT
 181   IdealGraphPrinter*    _printer;
 182 #endif
 183 
 184   // Node management
 185   uint                  _unique;                // Counter for unique Node indices
 186   debug_only(static int _debug_idx;)            // Monotonic counter (not reset), use -XX:BreakAtNode=<idx>
 187   Arena                 _node_arena;            // Arena for new-space Nodes
 188   Arena                 _old_arena;             // Arena for old-space Nodes, lifetime during xform
 189   RootNode*             _root;                  // Unique root of compilation, or NULL after bail-out.
 190   Node*                 _top;                   // Unique top node.  (Reset by various phases.)
 191 
 192   Node*                 _immutable_memory;      // Initial memory state
 193 
 194   Node*                 _recent_alloc_obj;
 195   Node*                 _recent_alloc_ctl;
 196 



 197   // Blocked array of debugging and profiling information,
 198   // tracked per node.
 199   enum { _log2_node_notes_block_size = 8,
 200          _node_notes_block_size = (1<<_log2_node_notes_block_size)
 201   };
 202   GrowableArray<Node_Notes*>* _node_note_array;
 203   Node_Notes*           _default_node_notes;  // default notes for new nodes
 204 
 205   // After parsing and every bulk phase we hang onto the Root instruction.
 206   // The RootNode instruction is where the whole program begins.  It produces
 207   // the initial Control and BOTTOM for everybody else.
 208 
 209   // Type management
 210   Arena                 _Compile_types;         // Arena for all types
 211   Arena*                _type_arena;            // Alias for _Compile_types except in Initialize_shared()
 212   Dict*                 _type_dict;             // Intern table
 213   void*                 _type_hwm;              // Last allocation (see Type::operator new/delete)
 214   size_t                _type_last_size;        // Last allocation size (see Type::operator new/delete)
 215   ciMethod*             _last_tf_m;             // Cache for
 216   const TypeFunc*       _last_tf;               //  TypeFunc::make


 237   PhaseRegAlloc*        _regalloc;              // Results of register allocation.
 238   int                   _frame_slots;           // Size of total frame in stack slots
 239   CodeOffsets           _code_offsets;          // Offsets into the code for various interesting entries
 240   RegMask               _FIRST_STACK_mask;      // All stack slots usable for spills (depends on frame layout)
 241   Arena*                _indexSet_arena;        // control IndexSet allocation within PhaseChaitin
 242   void*                 _indexSet_free_block_list; // free list of IndexSet bit blocks
 243 
 244   uint                  _node_bundling_limit;
 245   Bundle*               _node_bundling_base;    // Information for instruction bundling
 246 
 247   // Instruction bits passed off to the VM
 248   int                   _method_size;           // Size of nmethod code segment in bytes
 249   CodeBuffer            _code_buffer;           // Where the code is assembled
 250   int                   _first_block_size;      // Size of unvalidated entry point code / OSR poison code
 251   ExceptionHandlerTable _handler_table;         // Table of native-code exception handlers
 252   ImplicitExceptionTable _inc_table;            // Table of implicit null checks in native code
 253   OopMapSet*            _oop_map_set;           // Table of oop maps (one for each safepoint location)
 254   static int            _CompiledZap_count;     // counter compared against CompileZap[First/Last]
 255   BufferBlob*           _scratch_buffer_blob;   // For temporary code buffers.
 256   relocInfo*            _scratch_locs_memory;   // For temporary code buffers.


 257 
 258  public:
 259   // Accessors
 260 
 261   // The Compile instance currently active in this (compiler) thread.
 262   static Compile* current() {
 263     return (Compile*) ciEnv::current()->compiler_data();
 264   }
 265 
 266   // ID for this compilation.  Useful for setting breakpoints in the debugger.
 267   int               compile_id() const          { return _compile_id; }
 268 
 269   // Does this compilation allow instructions to subsume loads?  User
 270   // instructions that subsume a load may result in an unschedulable
 271   // instruction sequence.
 272   bool              subsume_loads() const       { return _subsume_loads; }
 273   // Do escape analysis.
 274   bool              do_escape_analysis() const  { return _do_escape_analysis; }
 275   bool              save_argument_registers() const { return _save_argument_registers; }
 276 


 419   uint              unique() const              { return _unique; }
 420   uint         next_unique()                    { return _unique++; }
 421   void          set_unique(uint i)              { _unique = i; }
 422   static int        debug_idx()                 { return debug_only(_debug_idx)+0; }
 423   static void   set_debug_idx(int i)            { debug_only(_debug_idx = i); }
 424   Arena*            node_arena()                { return &_node_arena; }
 425   Arena*            old_arena()                 { return &_old_arena; }
 426   RootNode*         root() const                { return _root; }
 427   void          set_root(RootNode* r)           { _root = r; }
 428   StartNode*        start() const;              // (Derived from root.)
 429   void         init_start(StartNode* s);
 430   Node*             immutable_memory();
 431 
 432   Node*             recent_alloc_ctl() const    { return _recent_alloc_ctl; }
 433   Node*             recent_alloc_obj() const    { return _recent_alloc_obj; }
 434   void          set_recent_alloc(Node* ctl, Node* obj) {
 435                                                   _recent_alloc_ctl = ctl;
 436                                                   _recent_alloc_obj = obj;
 437                                                 }
 438 



 439   // Handy undefined Node
 440   Node*             top() const                 { return _top; }
 441 
 442   // these are used by guys who need to know about creation and transformation of top:
 443   Node*             cached_top_node()           { return _top; }
 444   void          set_cached_top_node(Node* tn);
 445 
 446   GrowableArray<Node_Notes*>* node_note_array() const { return _node_note_array; }
 447   void set_node_note_array(GrowableArray<Node_Notes*>* arr) { _node_note_array = arr; }
 448   Node_Notes* default_node_notes() const        { return _default_node_notes; }
 449   void    set_default_node_notes(Node_Notes* n) { _default_node_notes = n; }
 450 
 451   Node_Notes*       node_notes_at(int idx) {
 452     return locate_node_notes(_node_note_array, idx, false);
 453   }
 454   inline bool   set_node_notes_at(int idx, Node_Notes* value);
 455 
 456   // Copy notes from source to dest, if they exist.
 457   // Overwrite dest only if source provides something.
 458   // Return true if information was moved.


 570   void set_24_bit_selection_and_mode(bool selection, bool mode) {
 571     _select_24_bit_instr = selection;
 572     _in_24_bit_fp_mode   = mode;
 573   }
 574 
 575   void  set_java_calls(int z) { _java_calls  = z; }
 576   void set_inner_loops(int z) { _inner_loops = z; }
 577 
 578   // Instruction bits passed off to the VM
 579   int               code_size()                 { return _method_size; }
 580   CodeBuffer*       code_buffer()               { return &_code_buffer; }
 581   int               first_block_size()          { return _first_block_size; }
 582   void              set_frame_complete(int off) { _code_offsets.set_value(CodeOffsets::Frame_Complete, off); }
 583   ExceptionHandlerTable*  handler_table()       { return &_handler_table; }
 584   ImplicitExceptionTable* inc_table()           { return &_inc_table; }
 585   OopMapSet*        oop_map_set()               { return _oop_map_set; }
 586   DebugInformationRecorder* debug_info()        { return env()->debug_info(); }
 587   Dependencies*     dependencies()              { return env()->dependencies(); }
 588   static int        CompiledZap_count()         { return _CompiledZap_count; }
 589   BufferBlob*       scratch_buffer_blob()       { return _scratch_buffer_blob; }
 590   void         init_scratch_buffer_blob();

 591   void          set_scratch_buffer_blob(BufferBlob* b) { _scratch_buffer_blob = b; }
 592   relocInfo*        scratch_locs_memory()       { return _scratch_locs_memory; }
 593   void          set_scratch_locs_memory(relocInfo* b)  { _scratch_locs_memory = b; }
 594 
 595   // emit to scratch blob, report resulting size
 596   uint              scratch_emit_size(const Node* n);


 597 
 598   enum ScratchBufferBlob {
 599     MAX_inst_size       = 1024,
 600     MAX_locs_size       = 128, // number of relocInfo elements
 601     MAX_const_size      = 128,
 602     MAX_stubs_size      = 128
 603   };
 604 
 605   // Major entry point.  Given a Scope, compile the associated method.
 606   // For normal compilations, entry_bci is InvocationEntryBci.  For on stack
 607   // replacement, entry_bci indicates the bytecode for which to compile a
 608   // continuation.
 609   Compile(ciEnv* ci_env, C2Compiler* compiler, ciMethod* target,
 610           int entry_bci, bool subsume_loads, bool do_escape_analysis);
 611 
 612   // Second major entry point.  From the TypeFunc signature, generate code
 613   // to pass arguments from the Java calling convention to the C calling
 614   // convention.
 615   Compile(ciEnv* ci_env, const TypeFunc *(*gen)(),
 616           address stub_function, const char *stub_name,


 657   // "array",   May also consult and add to "objs", which describes the
 658   // scalar-replaced objects.
 659   void FillLocArray( int idx, MachSafePointNode* sfpt,
 660                      Node *local, GrowableArray<ScopeValue*> *array,
 661                      GrowableArray<ScopeValue*> *objs );
 662 
 663   // If "objs" contains an ObjectValue whose id is "id", returns it, else NULL.
 664   static ObjectValue* sv_for_node_id(GrowableArray<ScopeValue*> *objs, int id);
 665   // Requres that "objs" does not contains an ObjectValue whose id matches
 666   // that of "sv.  Appends "sv".
 667   static void set_sv_for_object_node(GrowableArray<ScopeValue*> *objs,
 668                                      ObjectValue* sv );
 669 
 670   // Process an OopMap Element while emitting nodes
 671   void Process_OopMap_Node(MachNode *mach, int code_offset);
 672 
 673   // Write out basic block data to code buffer
 674   void Fill_buffer();
 675 
 676   // Determine which variable sized branches can be shortened
 677   void Shorten_branches(Label *labels, int& code_size, int& reloc_size, int& stub_size, int& const_size);
 678 
 679   // Compute the size of first NumberOfLoopInstrToAlign instructions
 680   // at the head of a loop.
 681   void compute_loop_first_inst_sizes();
 682 
 683   // Compute the information for the exception tables
 684   void FillExceptionTables(uint cnt, uint *call_returns, uint *inct_starts, Label *blk_labels);
 685 
 686   // Stack slots that may be unused by the calling convention but must
 687   // otherwise be preserved.  On Intel this includes the return address.
 688   // On PowerPC it includes the 4 words holding the old TOC & LR glue.
 689   uint in_preserve_stack_slots();
 690 
 691   // "Top of Stack" slots that may be unused by the calling convention but must
 692   // otherwise be preserved.
 693   // On Intel these are not necessary and the value can be zero.
 694   // On Sparc this describes the words reserved for storing a register window
 695   // when an interrupt occurs.
 696   static uint out_preserve_stack_slots();
 697 




  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 class Block;
  26 class Bundle;
  27 class C2Compiler;
  28 class CallGenerator;
  29 class ConnectionGraph;
  30 class InlineTree;
  31 class Int_Array;
  32 class Matcher;
  33 class MachConstantBaseNode;
  34 class MachNode;
  35 class MachSafePointNode;
  36 class Node;
  37 class Node_Array;
  38 class Node_Notes;
  39 class OptoReg;
  40 class PhaseCFG;
  41 class PhaseGVN;
  42 class PhaseIterGVN;
  43 class PhaseRegAlloc;
  44 class PhaseCCP;
  45 class PhaseCCP_DCE;
  46 class RootNode;
  47 class relocInfo;
  48 class Scope;
  49 class StartNode;
  50 class SafePointNode;
  51 class JVMState;
  52 class TypeData;
  53 class TypePtr;


 178   GrowableArray<Node*>* _macro_nodes;           // List of nodes which need to be expanded before matching.
 179   GrowableArray<Node*>* _predicate_opaqs;       // List of Opaque1 nodes for the loop predicates.
 180   ConnectionGraph*      _congraph;
 181 #ifndef PRODUCT
 182   IdealGraphPrinter*    _printer;
 183 #endif
 184 
 185   // Node management
 186   uint                  _unique;                // Counter for unique Node indices
 187   debug_only(static int _debug_idx;)            // Monotonic counter (not reset), use -XX:BreakAtNode=<idx>
 188   Arena                 _node_arena;            // Arena for new-space Nodes
 189   Arena                 _old_arena;             // Arena for old-space Nodes, lifetime during xform
 190   RootNode*             _root;                  // Unique root of compilation, or NULL after bail-out.
 191   Node*                 _top;                   // Unique top node.  (Reset by various phases.)
 192 
 193   Node*                 _immutable_memory;      // Initial memory state
 194 
 195   Node*                 _recent_alloc_obj;
 196   Node*                 _recent_alloc_ctl;
 197 
 198   MachConstantBaseNode* _mach_constant_base_node;  // Constant table base node singleton.
 199 
 200 
 201   // Blocked array of debugging and profiling information,
 202   // tracked per node.
 203   enum { _log2_node_notes_block_size = 8,
 204          _node_notes_block_size = (1<<_log2_node_notes_block_size)
 205   };
 206   GrowableArray<Node_Notes*>* _node_note_array;
 207   Node_Notes*           _default_node_notes;  // default notes for new nodes
 208 
 209   // After parsing and every bulk phase we hang onto the Root instruction.
 210   // The RootNode instruction is where the whole program begins.  It produces
 211   // the initial Control and BOTTOM for everybody else.
 212 
 213   // Type management
 214   Arena                 _Compile_types;         // Arena for all types
 215   Arena*                _type_arena;            // Alias for _Compile_types except in Initialize_shared()
 216   Dict*                 _type_dict;             // Intern table
 217   void*                 _type_hwm;              // Last allocation (see Type::operator new/delete)
 218   size_t                _type_last_size;        // Last allocation size (see Type::operator new/delete)
 219   ciMethod*             _last_tf_m;             // Cache for
 220   const TypeFunc*       _last_tf;               //  TypeFunc::make


 241   PhaseRegAlloc*        _regalloc;              // Results of register allocation.
 242   int                   _frame_slots;           // Size of total frame in stack slots
 243   CodeOffsets           _code_offsets;          // Offsets into the code for various interesting entries
 244   RegMask               _FIRST_STACK_mask;      // All stack slots usable for spills (depends on frame layout)
 245   Arena*                _indexSet_arena;        // control IndexSet allocation within PhaseChaitin
 246   void*                 _indexSet_free_block_list; // free list of IndexSet bit blocks
 247 
 248   uint                  _node_bundling_limit;
 249   Bundle*               _node_bundling_base;    // Information for instruction bundling
 250 
 251   // Instruction bits passed off to the VM
 252   int                   _method_size;           // Size of nmethod code segment in bytes
 253   CodeBuffer            _code_buffer;           // Where the code is assembled
 254   int                   _first_block_size;      // Size of unvalidated entry point code / OSR poison code
 255   ExceptionHandlerTable _handler_table;         // Table of native-code exception handlers
 256   ImplicitExceptionTable _inc_table;            // Table of implicit null checks in native code
 257   OopMapSet*            _oop_map_set;           // Table of oop maps (one for each safepoint location)
 258   static int            _CompiledZap_count;     // counter compared against CompileZap[First/Last]
 259   BufferBlob*           _scratch_buffer_blob;   // For temporary code buffers.
 260   relocInfo*            _scratch_locs_memory;   // For temporary code buffers.
 261   int                   _scratch_const_size;    // For temporary code buffers.
 262   bool                  _in_scratch_emit_size;  // true when in scratch_emit_size.
 263 
 264  public:
 265   // Accessors
 266 
 267   // The Compile instance currently active in this (compiler) thread.
 268   static Compile* current() {
 269     return (Compile*) ciEnv::current()->compiler_data();
 270   }
 271 
 272   // ID for this compilation.  Useful for setting breakpoints in the debugger.
 273   int               compile_id() const          { return _compile_id; }
 274 
 275   // Does this compilation allow instructions to subsume loads?  User
 276   // instructions that subsume a load may result in an unschedulable
 277   // instruction sequence.
 278   bool              subsume_loads() const       { return _subsume_loads; }
 279   // Do escape analysis.
 280   bool              do_escape_analysis() const  { return _do_escape_analysis; }
 281   bool              save_argument_registers() const { return _save_argument_registers; }
 282 


 425   uint              unique() const              { return _unique; }
 426   uint         next_unique()                    { return _unique++; }
 427   void          set_unique(uint i)              { _unique = i; }
 428   static int        debug_idx()                 { return debug_only(_debug_idx)+0; }
 429   static void   set_debug_idx(int i)            { debug_only(_debug_idx = i); }
 430   Arena*            node_arena()                { return &_node_arena; }
 431   Arena*            old_arena()                 { return &_old_arena; }
 432   RootNode*         root() const                { return _root; }
 433   void          set_root(RootNode* r)           { _root = r; }
 434   StartNode*        start() const;              // (Derived from root.)
 435   void         init_start(StartNode* s);
 436   Node*             immutable_memory();
 437 
 438   Node*             recent_alloc_ctl() const    { return _recent_alloc_ctl; }
 439   Node*             recent_alloc_obj() const    { return _recent_alloc_obj; }
 440   void          set_recent_alloc(Node* ctl, Node* obj) {
 441                                                   _recent_alloc_ctl = ctl;
 442                                                   _recent_alloc_obj = obj;
 443                                                 }
 444 
 445   MachConstantBaseNode*     mach_constant_base_node();
 446   bool                  has_mach_constant_base_node() const { return _mach_constant_base_node != NULL; }
 447 
 448   // Handy undefined Node
 449   Node*             top() const                 { return _top; }
 450 
 451   // these are used by guys who need to know about creation and transformation of top:
 452   Node*             cached_top_node()           { return _top; }
 453   void          set_cached_top_node(Node* tn);
 454 
 455   GrowableArray<Node_Notes*>* node_note_array() const { return _node_note_array; }
 456   void set_node_note_array(GrowableArray<Node_Notes*>* arr) { _node_note_array = arr; }
 457   Node_Notes* default_node_notes() const        { return _default_node_notes; }
 458   void    set_default_node_notes(Node_Notes* n) { _default_node_notes = n; }
 459 
 460   Node_Notes*       node_notes_at(int idx) {
 461     return locate_node_notes(_node_note_array, idx, false);
 462   }
 463   inline bool   set_node_notes_at(int idx, Node_Notes* value);
 464 
 465   // Copy notes from source to dest, if they exist.
 466   // Overwrite dest only if source provides something.
 467   // Return true if information was moved.


 579   void set_24_bit_selection_and_mode(bool selection, bool mode) {
 580     _select_24_bit_instr = selection;
 581     _in_24_bit_fp_mode   = mode;
 582   }
 583 
 584   void  set_java_calls(int z) { _java_calls  = z; }
 585   void set_inner_loops(int z) { _inner_loops = z; }
 586 
 587   // Instruction bits passed off to the VM
 588   int               code_size()                 { return _method_size; }
 589   CodeBuffer*       code_buffer()               { return &_code_buffer; }
 590   int               first_block_size()          { return _first_block_size; }
 591   void              set_frame_complete(int off) { _code_offsets.set_value(CodeOffsets::Frame_Complete, off); }
 592   ExceptionHandlerTable*  handler_table()       { return &_handler_table; }
 593   ImplicitExceptionTable* inc_table()           { return &_inc_table; }
 594   OopMapSet*        oop_map_set()               { return _oop_map_set; }
 595   DebugInformationRecorder* debug_info()        { return env()->debug_info(); }
 596   Dependencies*     dependencies()              { return env()->dependencies(); }
 597   static int        CompiledZap_count()         { return _CompiledZap_count; }
 598   BufferBlob*       scratch_buffer_blob()       { return _scratch_buffer_blob; }
 599   void         init_scratch_buffer_blob(int const_size);
 600   void        clear_scratch_buffer_blob();
 601   void          set_scratch_buffer_blob(BufferBlob* b) { _scratch_buffer_blob = b; }
 602   relocInfo*        scratch_locs_memory()       { return _scratch_locs_memory; }
 603   void          set_scratch_locs_memory(relocInfo* b)  { _scratch_locs_memory = b; }
 604 
 605   // emit to scratch blob, report resulting size
 606   uint              scratch_emit_size(const Node* n);
 607   void       set_in_scratch_emit_size(bool x)   {        _in_scratch_emit_size = x; }
 608   bool           in_scratch_emit_size() const   { return _in_scratch_emit_size;     }
 609 
 610   enum ScratchBufferBlob {
 611     MAX_inst_size       = 1024,
 612     MAX_locs_size       = 128, // number of relocInfo elements
 613     MAX_const_size      = 128,
 614     MAX_stubs_size      = 128
 615   };
 616 
 617   // Major entry point.  Given a Scope, compile the associated method.
 618   // For normal compilations, entry_bci is InvocationEntryBci.  For on stack
 619   // replacement, entry_bci indicates the bytecode for which to compile a
 620   // continuation.
 621   Compile(ciEnv* ci_env, C2Compiler* compiler, ciMethod* target,
 622           int entry_bci, bool subsume_loads, bool do_escape_analysis);
 623 
 624   // Second major entry point.  From the TypeFunc signature, generate code
 625   // to pass arguments from the Java calling convention to the C calling
 626   // convention.
 627   Compile(ciEnv* ci_env, const TypeFunc *(*gen)(),
 628           address stub_function, const char *stub_name,


 669   // "array",   May also consult and add to "objs", which describes the
 670   // scalar-replaced objects.
 671   void FillLocArray( int idx, MachSafePointNode* sfpt,
 672                      Node *local, GrowableArray<ScopeValue*> *array,
 673                      GrowableArray<ScopeValue*> *objs );
 674 
 675   // If "objs" contains an ObjectValue whose id is "id", returns it, else NULL.
 676   static ObjectValue* sv_for_node_id(GrowableArray<ScopeValue*> *objs, int id);
 677   // Requres that "objs" does not contains an ObjectValue whose id matches
 678   // that of "sv.  Appends "sv".
 679   static void set_sv_for_object_node(GrowableArray<ScopeValue*> *objs,
 680                                      ObjectValue* sv );
 681 
 682   // Process an OopMap Element while emitting nodes
 683   void Process_OopMap_Node(MachNode *mach, int code_offset);
 684 
 685   // Write out basic block data to code buffer
 686   void Fill_buffer();
 687 
 688   // Determine which variable sized branches can be shortened
 689   void Shorten_branches(Label *labels, int& code_size, int& reloc_size, int& stub_size);
 690 
 691   // Compute the size of first NumberOfLoopInstrToAlign instructions
 692   // at the head of a loop.
 693   void compute_loop_first_inst_sizes();
 694 
 695   // Compute the information for the exception tables
 696   void FillExceptionTables(uint cnt, uint *call_returns, uint *inct_starts, Label *blk_labels);
 697 
 698   // Stack slots that may be unused by the calling convention but must
 699   // otherwise be preserved.  On Intel this includes the return address.
 700   // On PowerPC it includes the 4 words holding the old TOC & LR glue.
 701   uint in_preserve_stack_slots();
 702 
 703   // "Top of Stack" slots that may be unused by the calling convention but must
 704   // otherwise be preserved.
 705   // On Intel these are not necessary and the value can be zero.
 706   // On Sparc this describes the words reserved for storing a register window
 707   // when an interrupt occurs.
 708   static uint out_preserve_stack_slots();
 709 


src/share/vm/opto/compile.hpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File