src/share/vm/opto/compile.hpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File 7092905 Sdiff src/share/vm/opto

src/share/vm/opto/compile.hpp

Print this page




  58 class Node_Notes;
  59 class OptoReg;
  60 class PhaseCFG;
  61 class PhaseGVN;
  62 class PhaseIterGVN;
  63 class PhaseRegAlloc;
  64 class PhaseCCP;
  65 class PhaseCCP_DCE;
  66 class RootNode;
  67 class relocInfo;
  68 class Scope;
  69 class StartNode;
  70 class SafePointNode;
  71 class JVMState;
  72 class TypeData;
  73 class TypePtr;
  74 class TypeFunc;
  75 class Unique_Node_List;
  76 class nmethod;
  77 class WarmCallInfo;


  78 
  79 //------------------------------Compile----------------------------------------
  80 // This class defines a top-level Compiler invocation.
  81 
  82 class Compile : public Phase {
  83   friend class VMStructs;
  84 
  85  public:
  86   // Fixed alias indexes.  (See also MergeMemNode.)
  87   enum {
  88     AliasIdxTop = 1,  // pseudo-index, aliases to nothing (used as sentinel value)
  89     AliasIdxBot = 2,  // pseudo-index, aliases to everything
  90     AliasIdxRaw = 3   // hard-wired index for TypeRawPtr::BOTTOM
  91   };
  92 
  93   // Variant of TraceTime(NULL, &_t_accumulator, TimeCompiler);
  94   // Integrated with logging.  If logging is turned on, and dolog is true,
  95   // then brackets are put into the log, with time stamps and node counts.
  96   // (The time collection itself is always conditionalized on TimeCompiler.)
  97   class TracePhase : public TraceTime {
  98    private:
  99     Compile*    C;
 100     CompileLog* _log;


 101    public:
 102     TracePhase(const char* name, elapsedTimer* accumulator, bool dolog);
 103     ~TracePhase();
 104   };
 105 
 106   // Information per category of alias (memory slice)
 107   class AliasType {
 108    private:
 109     friend class Compile;
 110 
 111     int             _index;         // unique index, used with MergeMemNode
 112     const TypePtr*  _adr_type;      // normalized address type
 113     ciField*        _field;         // relevant instance field, or null if none
 114     bool            _is_rewritable; // false if the memory is write-once only
 115     int             _general_index; // if this is type is an instance, the general
 116                                     // type that this is an instance of
 117 
 118     void Init(int i, const TypePtr* at);
 119 
 120    public:


 296 #endif
 297 
 298   // JSR 292
 299   bool                  _has_method_handle_invokes; // True if this method has MethodHandle invokes.
 300 
 301   // Compilation environment.
 302   Arena                 _comp_arena;            // Arena with lifetime equivalent to Compile
 303   ciEnv*                _env;                   // CI interface
 304   CompileLog*           _log;                   // from CompilerThread
 305   const char*           _failure_reason;        // for record_failure/failing pattern
 306   GrowableArray<CallGenerator*>* _intrinsics;   // List of intrinsics.
 307   GrowableArray<Node*>* _macro_nodes;           // List of nodes which need to be expanded before matching.
 308   GrowableArray<Node*>* _predicate_opaqs;       // List of Opaque1 nodes for the loop predicates.
 309   ConnectionGraph*      _congraph;
 310 #ifndef PRODUCT
 311   IdealGraphPrinter*    _printer;
 312 #endif
 313 
 314   // Node management
 315   uint                  _unique;                // Counter for unique Node indices



 316   debug_only(static int _debug_idx;)            // Monotonic counter (not reset), use -XX:BreakAtNode=<idx>
 317   Arena                 _node_arena;            // Arena for new-space Nodes
 318   Arena                 _old_arena;             // Arena for old-space Nodes, lifetime during xform
 319   RootNode*             _root;                  // Unique root of compilation, or NULL after bail-out.
 320   Node*                 _top;                   // Unique top node.  (Reset by various phases.)
 321 
 322   Node*                 _immutable_memory;      // Initial memory state
 323 
 324   Node*                 _recent_alloc_obj;
 325   Node*                 _recent_alloc_ctl;
 326 
 327   // Constant table
 328   ConstantTable         _constant_table;        // The constant table for this compile.
 329   MachConstantBaseNode* _mach_constant_base_node;  // Constant table base node singleton.
 330 
 331 
 332   // Blocked array of debugging and profiling information,
 333   // tracked per node.
 334   enum { _log2_node_notes_block_size = 8,
 335          _node_notes_block_size = (1<<_log2_node_notes_block_size)


 532   // Compilation environment.
 533   Arena*            comp_arena()                { return &_comp_arena; }
 534   ciEnv*            env() const                 { return _env; }
 535   CompileLog*       log() const                 { return _log; }
 536   bool              failing() const             { return _env->failing() || _failure_reason != NULL; }
 537   const char* failure_reason() { return _failure_reason; }
 538   bool              failure_reason_is(const char* r) { return (r==_failure_reason) || (r!=NULL && _failure_reason!=NULL && strcmp(r, _failure_reason)==0); }
 539 
 540   void record_failure(const char* reason);
 541   void record_method_not_compilable(const char* reason, bool all_tiers = false) {
 542     // All bailouts cover "all_tiers" when TieredCompilation is off.
 543     if (!TieredCompilation) all_tiers = true;
 544     env()->record_method_not_compilable(reason, all_tiers);
 545     // Record failure reason.
 546     record_failure(reason);
 547   }
 548   void record_method_not_compilable_all_tiers(const char* reason) {
 549     record_method_not_compilable(reason, true);
 550   }
 551   bool check_node_count(uint margin, const char* reason) {
 552     if (unique() + margin > (uint)MaxNodeLimit) {
 553       record_method_not_compilable(reason);
 554       return true;
 555     } else {
 556       return false;
 557     }
 558   }
 559 
 560   // Node management
 561   uint              unique() const              { return _unique; }
 562   uint         next_unique()                    { return _unique++; }
 563   void          set_unique(uint i)              { _unique = i; }
 564   static int        debug_idx()                 { return debug_only(_debug_idx)+0; }
 565   static void   set_debug_idx(int i)            { debug_only(_debug_idx = i); }
 566   Arena*            node_arena()                { return &_node_arena; }
 567   Arena*            old_arena()                 { return &_old_arena; }
 568   RootNode*         root() const                { return _root; }
 569   void          set_root(RootNode* r)           { _root = r; }
 570   StartNode*        start() const;              // (Derived from root.)
 571   void         init_start(StartNode* s);
 572   Node*             immutable_memory();
 573 
 574   Node*             recent_alloc_ctl() const    { return _recent_alloc_ctl; }
 575   Node*             recent_alloc_obj() const    { return _recent_alloc_obj; }
 576   void          set_recent_alloc(Node* ctl, Node* obj) {
 577                                                   _recent_alloc_ctl = ctl;
 578                                                   _recent_alloc_obj = obj;
 579                                                 }
















 580 
 581   // Constant table
 582   ConstantTable&   constant_table() { return _constant_table; }
 583 
 584   MachConstantBaseNode*     mach_constant_base_node();
 585   bool                  has_mach_constant_base_node() const { return _mach_constant_base_node != NULL; }
 586 
 587   // Handy undefined Node
 588   Node*             top() const                 { return _top; }
 589 
 590   // these are used by guys who need to know about creation and transformation of top:
 591   Node*             cached_top_node()           { return _top; }
 592   void          set_cached_top_node(Node* tn);
 593 
 594   GrowableArray<Node_Notes*>* node_note_array() const { return _node_note_array; }
 595   void set_node_note_array(GrowableArray<Node_Notes*>* arr) { _node_note_array = arr; }
 596   Node_Notes* default_node_notes() const        { return _default_node_notes; }
 597   void    set_default_node_notes(Node_Notes* n) { _default_node_notes = n; }
 598 
 599   Node_Notes*       node_notes_at(int idx) {


 661   // PerMethodTrapLimit was exceeded for all inlined methods seen so far.
 662   bool too_many_traps(Deoptimization::DeoptReason reason,
 663                       // Privately used parameter for logging:
 664                       ciMethodData* logmd = NULL);
 665   // Report if there were too many recompiles at a method and bci.
 666   bool too_many_recompiles(ciMethod* method, int bci, Deoptimization::DeoptReason reason);
 667 
 668   // Parsing, optimization
 669   PhaseGVN*         initial_gvn()               { return _initial_gvn; }
 670   Unique_Node_List* for_igvn()                  { return _for_igvn; }
 671   inline void       record_for_igvn(Node* n);   // Body is after class Unique_Node_List.
 672   void          set_initial_gvn(PhaseGVN *gvn)           { _initial_gvn = gvn; }
 673   void          set_for_igvn(Unique_Node_List *for_igvn) { _for_igvn = for_igvn; }
 674 
 675   // Replace n by nn using initial_gvn, calling hash_delete and
 676   // record_for_igvn as needed.
 677   void gvn_replace_by(Node* n, Node* nn);
 678 
 679 
 680   void              identify_useful_nodes(Unique_Node_List &useful);

 681   void              remove_useless_nodes  (Unique_Node_List &useful);
 682 
 683   WarmCallInfo*     warm_calls() const          { return _warm_calls; }
 684   void          set_warm_calls(WarmCallInfo* l) { _warm_calls = l; }
 685   WarmCallInfo* pop_warm_call();
 686 
 687   // Record this CallGenerator for inlining at the end of parsing.
 688   void              add_late_inline(CallGenerator* cg) { _late_inlines.push(cg); }
 689 
 690   // Matching, CFG layout, allocation, code generation
 691   PhaseCFG*         cfg()                       { return _cfg; }
 692   bool              select_24_bit_instr() const { return _select_24_bit_instr; }
 693   bool              in_24_bit_fp_mode() const   { return _in_24_bit_fp_mode; }
 694   bool              has_java_calls() const      { return _java_calls > 0; }
 695   int               java_calls() const          { return _java_calls; }
 696   int               inner_loops() const         { return _inner_loops; }
 697   Matcher*          matcher()                   { return _matcher; }
 698   PhaseRegAlloc*    regalloc()                  { return _regalloc; }
 699   int               frame_slots() const         { return _frame_slots; }
 700   int               frame_size_in_words() const; // frame_slots in units of the polymorphic 'words'


 875 
 876   // Management of the AliasType table.
 877   void grow_alias_types();
 878   AliasCacheEntry* probe_alias_cache(const TypePtr* adr_type);
 879   const TypePtr *flatten_alias_type(const TypePtr* adr_type) const;
 880   AliasType* find_alias_type(const TypePtr* adr_type, bool no_create, ciField* field);
 881 
 882   void verify_top(Node*) const PRODUCT_RETURN;
 883 
 884   // Intrinsic setup.
 885   void           register_library_intrinsics();                            // initializer
 886   CallGenerator* make_vm_intrinsic(ciMethod* m, bool is_virtual);          // constructor
 887   int            intrinsic_insertion_index(ciMethod* m, bool is_virtual);  // helper
 888   CallGenerator* find_intrinsic(ciMethod* m, bool is_virtual);             // query fn
 889   void           register_intrinsic(CallGenerator* cg);                    // update fn
 890 
 891 #ifndef PRODUCT
 892   static juint  _intrinsic_hist_count[vmIntrinsics::ID_LIMIT];
 893   static jubyte _intrinsic_hist_flags[vmIntrinsics::ID_LIMIT];
 894 #endif





 895 
 896  public:
 897 
 898   // Note:  Histogram array size is about 1 Kb.
 899   enum {                        // flag bits:
 900     _intrinsic_worked = 1,      // succeeded at least once
 901     _intrinsic_failed = 2,      // tried it but it failed
 902     _intrinsic_disabled = 4,    // was requested but disabled (e.g., -XX:-InlineUnsafeOps)
 903     _intrinsic_virtual = 8,     // was seen in the virtual form (rare)
 904     _intrinsic_both = 16        // was seen in the non-virtual form (usual)
 905   };
 906   // Update histogram.  Return boolean if this is a first-time occurrence.
 907   static bool gather_intrinsic_statistics(vmIntrinsics::ID id,
 908                                           bool is_virtual, int flags) PRODUCT_RETURN0;
 909   static void print_intrinsic_statistics() PRODUCT_RETURN;
 910 
 911   // Graph verification code
 912   // Walk the node list, verifying that there is a one-to-one
 913   // correspondence between Use-Def edges and Def-Use edges
 914   // The option no_dead_code enables stronger checks that the


  58 class Node_Notes;
  59 class OptoReg;
  60 class PhaseCFG;
  61 class PhaseGVN;
  62 class PhaseIterGVN;
  63 class PhaseRegAlloc;
  64 class PhaseCCP;
  65 class PhaseCCP_DCE;
  66 class RootNode;
  67 class relocInfo;
  68 class Scope;
  69 class StartNode;
  70 class SafePointNode;
  71 class JVMState;
  72 class TypeData;
  73 class TypePtr;
  74 class TypeFunc;
  75 class Unique_Node_List;
  76 class nmethod;
  77 class WarmCallInfo;
  78 class Node_Stack;
  79 struct Final_Reshape_Counts;
  80 
  81 //------------------------------Compile----------------------------------------
  82 // This class defines a top-level Compiler invocation.
  83 
  84 class Compile : public Phase {
  85   friend class VMStructs;
  86 
  87  public:
  88   // Fixed alias indexes.  (See also MergeMemNode.)
  89   enum {
  90     AliasIdxTop = 1,  // pseudo-index, aliases to nothing (used as sentinel value)
  91     AliasIdxBot = 2,  // pseudo-index, aliases to everything
  92     AliasIdxRaw = 3   // hard-wired index for TypeRawPtr::BOTTOM
  93   };
  94 
  95   // Variant of TraceTime(NULL, &_t_accumulator, TimeCompiler);
  96   // Integrated with logging.  If logging is turned on, and dolog is true,
  97   // then brackets are put into the log, with time stamps and node counts.
  98   // (The time collection itself is always conditionalized on TimeCompiler.)
  99   class TracePhase : public TraceTime {
 100    private:
 101     Compile*    C;
 102     CompileLog* _log;
 103     const char* _phase_name;
 104     bool _dolog;
 105    public:
 106     TracePhase(const char* name, elapsedTimer* accumulator, bool dolog);
 107     ~TracePhase();
 108   };
 109 
 110   // Information per category of alias (memory slice)
 111   class AliasType {
 112    private:
 113     friend class Compile;
 114 
 115     int             _index;         // unique index, used with MergeMemNode
 116     const TypePtr*  _adr_type;      // normalized address type
 117     ciField*        _field;         // relevant instance field, or null if none
 118     bool            _is_rewritable; // false if the memory is write-once only
 119     int             _general_index; // if this is type is an instance, the general
 120                                     // type that this is an instance of
 121 
 122     void Init(int i, const TypePtr* at);
 123 
 124    public:


 300 #endif
 301 
 302   // JSR 292
 303   bool                  _has_method_handle_invokes; // True if this method has MethodHandle invokes.
 304 
 305   // Compilation environment.
 306   Arena                 _comp_arena;            // Arena with lifetime equivalent to Compile
 307   ciEnv*                _env;                   // CI interface
 308   CompileLog*           _log;                   // from CompilerThread
 309   const char*           _failure_reason;        // for record_failure/failing pattern
 310   GrowableArray<CallGenerator*>* _intrinsics;   // List of intrinsics.
 311   GrowableArray<Node*>* _macro_nodes;           // List of nodes which need to be expanded before matching.
 312   GrowableArray<Node*>* _predicate_opaqs;       // List of Opaque1 nodes for the loop predicates.
 313   ConnectionGraph*      _congraph;
 314 #ifndef PRODUCT
 315   IdealGraphPrinter*    _printer;
 316 #endif
 317 
 318   // Node management
 319   uint                  _unique;                // Counter for unique Node indices
 320   VectorSet             _dead_node_list;        // Set of dead nodes
 321   uint                  _dead_node_count;       // Number of dead nodes; VectorSet::Size() is O(N).
 322                                                 // So use this to keep count and make the call O(1).
 323   debug_only(static int _debug_idx;)            // Monotonic counter (not reset), use -XX:BreakAtNode=<idx>
 324   Arena                 _node_arena;            // Arena for new-space Nodes
 325   Arena                 _old_arena;             // Arena for old-space Nodes, lifetime during xform
 326   RootNode*             _root;                  // Unique root of compilation, or NULL after bail-out.
 327   Node*                 _top;                   // Unique top node.  (Reset by various phases.)
 328 
 329   Node*                 _immutable_memory;      // Initial memory state
 330 
 331   Node*                 _recent_alloc_obj;
 332   Node*                 _recent_alloc_ctl;
 333 
 334   // Constant table
 335   ConstantTable         _constant_table;        // The constant table for this compile.
 336   MachConstantBaseNode* _mach_constant_base_node;  // Constant table base node singleton.
 337 
 338 
 339   // Blocked array of debugging and profiling information,
 340   // tracked per node.
 341   enum { _log2_node_notes_block_size = 8,
 342          _node_notes_block_size = (1<<_log2_node_notes_block_size)


 539   // Compilation environment.
 540   Arena*            comp_arena()                { return &_comp_arena; }
 541   ciEnv*            env() const                 { return _env; }
 542   CompileLog*       log() const                 { return _log; }
 543   bool              failing() const             { return _env->failing() || _failure_reason != NULL; }
 544   const char*       failure_reason() { return _failure_reason; }
 545   bool              failure_reason_is(const char* r) { return (r==_failure_reason) || (r!=NULL && _failure_reason!=NULL && strcmp(r, _failure_reason)==0); }
 546 
 547   void record_failure(const char* reason);
 548   void record_method_not_compilable(const char* reason, bool all_tiers = false) {
 549     // All bailouts cover "all_tiers" when TieredCompilation is off.
 550     if (!TieredCompilation) all_tiers = true;
 551     env()->record_method_not_compilable(reason, all_tiers);
 552     // Record failure reason.
 553     record_failure(reason);
 554   }
 555   void record_method_not_compilable_all_tiers(const char* reason) {
 556     record_method_not_compilable(reason, true);
 557   }
 558   bool check_node_count(uint margin, const char* reason) {
 559     if (live_nodes() + margin > (uint)MaxNodeLimit) {
 560       record_method_not_compilable(reason);
 561       return true;
 562     } else {
 563       return false;
 564     }
 565   }
 566 
 567   // Node management
 568   uint         unique() const              { return _unique; }
 569   uint         next_unique()               { return _unique++; }
 570   void         set_unique(uint i)          { _unique = i; }
 571   static int   debug_idx()                 { return debug_only(_debug_idx)+0; }
 572   static void  set_debug_idx(int i)        { debug_only(_debug_idx = i); }
 573   Arena*       node_arena()                { return &_node_arena; }
 574   Arena*       old_arena()                 { return &_old_arena; }
 575   RootNode*    root() const                { return _root; }
 576   void         set_root(RootNode* r)       { _root = r; }
 577   StartNode*   start() const;              // (Derived from root.)
 578   void         init_start(StartNode* s);
 579   Node*        immutable_memory();
 580 
 581   Node*        recent_alloc_ctl() const    { return _recent_alloc_ctl; }
 582   Node*        recent_alloc_obj() const    { return _recent_alloc_obj; }
 583   void         set_recent_alloc(Node* ctl, Node* obj) {
 584                                                   _recent_alloc_ctl = ctl;
 585                                                   _recent_alloc_obj = obj;
 586                                            }
 587   void         record_dead_node(uint idx)  { if (_dead_node_list.test_set(idx)) return;
 588                                              _dead_node_count++;
 589                                            }
 590   uint         dead_node_count()           { return _dead_node_count; }
 591   void         reset_dead_node_list()      { _dead_node_list.Reset();
 592                                              _dead_node_count = 0;
 593                                            }
 594   uint          live_nodes()               {
 595     int  val = _unique - _dead_node_count;
 596     assert (val >= 0, err_msg_res("number of tracked dead nodes %d more than created nodes %d", _unique, _dead_node_count));
 597             return (uint) val;
 598                                            }
 599 #ifdef ASSERT
 600   uint         count_live_nodes_by_graph_walk();
 601   void         print_missing_nodes();
 602 #endif
 603 
 604   // Constant table
 605   ConstantTable&   constant_table() { return _constant_table; }
 606 
 607   MachConstantBaseNode*     mach_constant_base_node();
 608   bool                  has_mach_constant_base_node() const { return _mach_constant_base_node != NULL; }
 609 
 610   // Handy undefined Node
 611   Node*             top() const                 { return _top; }
 612 
 613   // these are used by guys who need to know about creation and transformation of top:
 614   Node*             cached_top_node()           { return _top; }
 615   void          set_cached_top_node(Node* tn);
 616 
 617   GrowableArray<Node_Notes*>* node_note_array() const { return _node_note_array; }
 618   void set_node_note_array(GrowableArray<Node_Notes*>* arr) { _node_note_array = arr; }
 619   Node_Notes* default_node_notes() const        { return _default_node_notes; }
 620   void    set_default_node_notes(Node_Notes* n) { _default_node_notes = n; }
 621 
 622   Node_Notes*       node_notes_at(int idx) {


 684   // PerMethodTrapLimit was exceeded for all inlined methods seen so far.
 685   bool too_many_traps(Deoptimization::DeoptReason reason,
 686                       // Privately used parameter for logging:
 687                       ciMethodData* logmd = NULL);
 688   // Report if there were too many recompiles at a method and bci.
 689   bool too_many_recompiles(ciMethod* method, int bci, Deoptimization::DeoptReason reason);
 690 
 691   // Parsing, optimization
 692   PhaseGVN*         initial_gvn()               { return _initial_gvn; }
 693   Unique_Node_List* for_igvn()                  { return _for_igvn; }
 694   inline void       record_for_igvn(Node* n);   // Body is after class Unique_Node_List.
 695   void          set_initial_gvn(PhaseGVN *gvn)           { _initial_gvn = gvn; }
 696   void          set_for_igvn(Unique_Node_List *for_igvn) { _for_igvn = for_igvn; }
 697 
 698   // Replace n by nn using initial_gvn, calling hash_delete and
 699   // record_for_igvn as needed.
 700   void gvn_replace_by(Node* n, Node* nn);
 701 
 702 
 703   void              identify_useful_nodes(Unique_Node_List &useful);
 704   void              update_dead_node_list(Unique_Node_List &useful);
 705   void              remove_useless_nodes  (Unique_Node_List &useful);
 706 
 707   WarmCallInfo*     warm_calls() const          { return _warm_calls; }
 708   void          set_warm_calls(WarmCallInfo* l) { _warm_calls = l; }
 709   WarmCallInfo* pop_warm_call();
 710 
 711   // Record this CallGenerator for inlining at the end of parsing.
 712   void              add_late_inline(CallGenerator* cg) { _late_inlines.push(cg); }
 713 
 714   // Matching, CFG layout, allocation, code generation
 715   PhaseCFG*         cfg()                       { return _cfg; }
 716   bool              select_24_bit_instr() const { return _select_24_bit_instr; }
 717   bool              in_24_bit_fp_mode() const   { return _in_24_bit_fp_mode; }
 718   bool              has_java_calls() const      { return _java_calls > 0; }
 719   int               java_calls() const          { return _java_calls; }
 720   int               inner_loops() const         { return _inner_loops; }
 721   Matcher*          matcher()                   { return _matcher; }
 722   PhaseRegAlloc*    regalloc()                  { return _regalloc; }
 723   int               frame_slots() const         { return _frame_slots; }
 724   int               frame_size_in_words() const; // frame_slots in units of the polymorphic 'words'


 899 
 900   // Management of the AliasType table.
 901   void grow_alias_types();
 902   AliasCacheEntry* probe_alias_cache(const TypePtr* adr_type);
 903   const TypePtr *flatten_alias_type(const TypePtr* adr_type) const;
 904   AliasType* find_alias_type(const TypePtr* adr_type, bool no_create, ciField* field);
 905 
 906   void verify_top(Node*) const PRODUCT_RETURN;
 907 
 908   // Intrinsic setup.
 909   void           register_library_intrinsics();                            // initializer
 910   CallGenerator* make_vm_intrinsic(ciMethod* m, bool is_virtual);          // constructor
 911   int            intrinsic_insertion_index(ciMethod* m, bool is_virtual);  // helper
 912   CallGenerator* find_intrinsic(ciMethod* m, bool is_virtual);             // query fn
 913   void           register_intrinsic(CallGenerator* cg);                    // update fn
 914 
 915 #ifndef PRODUCT
 916   static juint  _intrinsic_hist_count[vmIntrinsics::ID_LIMIT];
 917   static jubyte _intrinsic_hist_flags[vmIntrinsics::ID_LIMIT];
 918 #endif
 919   // Function calls made by the public function final_graph_reshaping.
 920   // No need to be made public as they are not called elsewhere.
 921   void final_graph_reshaping_impl( Node *n, Final_Reshape_Counts &frc);
 922   void final_graph_reshaping_walk( Node_Stack &nstack, Node *root, Final_Reshape_Counts &frc );
 923   void eliminate_redundant_card_marks(Node* n);
 924 
 925  public:
 926 
 927   // Note:  Histogram array size is about 1 Kb.
 928   enum {                        // flag bits:
 929     _intrinsic_worked = 1,      // succeeded at least once
 930     _intrinsic_failed = 2,      // tried it but it failed
 931     _intrinsic_disabled = 4,    // was requested but disabled (e.g., -XX:-InlineUnsafeOps)
 932     _intrinsic_virtual = 8,     // was seen in the virtual form (rare)
 933     _intrinsic_both = 16        // was seen in the non-virtual form (usual)
 934   };
 935   // Update histogram.  Return boolean if this is a first-time occurrence.
 936   static bool gather_intrinsic_statistics(vmIntrinsics::ID id,
 937                                           bool is_virtual, int flags) PRODUCT_RETURN0;
 938   static void print_intrinsic_statistics() PRODUCT_RETURN;
 939 
 940   // Graph verification code
 941   // Walk the node list, verifying that there is a one-to-one
 942   // correspondence between Use-Def edges and Def-Use edges
 943   // The option no_dead_code enables stronger checks that the
src/share/vm/opto/compile.hpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File