src/share/vm/opto/phaseX.hpp

Print this page
rev 3904 : 8005071: Incremental inlining for JSR 292
Summary: post parse inlining driven by number of live nodes.
Reviewed-by:


  75     _inserts++;
  76     if( _inserts == _insert_limit ) { grow(); }
  77     assert( _inserts <= _insert_limit, "hash table overflow");
  78     assert( _inserts < _max, "hash table overflow" );
  79   }
  80   static uint round_up(uint);   // Round up to nearest power of 2
  81   void   grow();                // Grow _table to next power of 2 and rehash
  82   // Return 75% of _max, rounded up.
  83   uint   insert_limit() const { return _max - (_max>>2); }
  84 
  85   void   clear();               // Set all entries to NULL, keep storage.
  86   // Size of hash table
  87   uint   size()         const { return _max; }
  88   // Return Node* at index in table
  89   Node  *at(uint table_index) {
  90     assert(table_index < _max, "Must be within table");
  91     return _table[table_index];
  92   }
  93 
  94   void   remove_useless_nodes(VectorSet &useful); // replace with sentinel

  95 
  96   Node  *sentinel() { return _sentinel; }
  97 
  98 #ifndef PRODUCT
  99   Node  *find_index(uint idx);  // For debugging
 100   void   dump();                // For debugging, dump statistics
 101 #endif
 102   uint   _grows;                // For debugging, count of table grow()s
 103   uint   _look_probes;          // For debugging, count of hash probes
 104   uint   _lookup_hits;          // For debugging, count of hash_finds
 105   uint   _lookup_misses;        // For debugging, count of hash_finds
 106   uint   _insert_probes;        // For debugging, count of hash probes
 107   uint   _delete_probes;        // For debugging, count of hash probes for deletes
 108   uint   _delete_hits;          // For debugging, count of hash probes for deletes
 109   uint   _delete_misses;        // For debugging, count of hash probes for deletes
 110   uint   _total_inserts;        // For debugging, total inserts into hash table
 111   uint   _total_insert_probes;  // For debugging, total probes while inserting
 112 };
 113 
 114 


 368   uint   _count_new_values;     // For profiling, count new values produced
 369   void    inc_new_values()        { ++_count_new_values; }
 370   void    clear_new_values()      { _count_new_values = 0; }
 371   uint    made_new_values() const { return _count_new_values; }
 372 #endif
 373 };
 374 
 375 
 376 //------------------------------PhaseGVN---------------------------------------
 377 // Phase for performing local, pessimistic GVN-style optimizations.
 378 class PhaseGVN : public PhaseValues {
 379 public:
 380   PhaseGVN( Arena *arena, uint est_max_size ) : PhaseValues( arena, est_max_size ) {}
 381   PhaseGVN( PhaseGVN *gvn ) : PhaseValues( gvn ) {}
 382   PhaseGVN( PhaseGVN *gvn, const char *dummy ) : PhaseValues( gvn, dummy ) {}
 383 
 384   // Return a node which computes the same function as this node, but
 385   // in a faster or cheaper fashion.
 386   Node  *transform( Node *n );
 387   Node  *transform_no_reclaim( Node *n );





 388 
 389   // Check for a simple dead loop when a data node references itself.
 390   DEBUG_ONLY(void dead_loop_check(Node *n);)
 391 };
 392 
 393 //------------------------------PhaseIterGVN-----------------------------------
 394 // Phase for iteratively performing local, pessimistic GVN-style optimizations.
 395 // and ideal transformations on the graph.
 396 class PhaseIterGVN : public PhaseGVN {
 397  private:
 398   bool _delay_transform;  // When true simply register the node when calling transform
 399                           // instead of actually optimizing it
 400 
 401   // Idealize old Node 'n' with respect to its inputs and its value
 402   virtual Node *transform_old( Node *a_node );
 403 
 404   // Subsume users of node 'old' into node 'nn'
 405   void subsume_node( Node *old, Node *nn );
 406 
 407   Node_Stack _stack;      // Stack used to avoid recursion




  75     _inserts++;
  76     if( _inserts == _insert_limit ) { grow(); }
  77     assert( _inserts <= _insert_limit, "hash table overflow");
  78     assert( _inserts < _max, "hash table overflow" );
  79   }
  80   static uint round_up(uint);   // Round up to nearest power of 2
  81   void   grow();                // Grow _table to next power of 2 and rehash
  82   // Return 75% of _max, rounded up.
  83   uint   insert_limit() const { return _max - (_max>>2); }
  84 
  85   void   clear();               // Set all entries to NULL, keep storage.
  86   // Size of hash table
  87   uint   size()         const { return _max; }
  88   // Return Node* at index in table
  89   Node  *at(uint table_index) {
  90     assert(table_index < _max, "Must be within table");
  91     return _table[table_index];
  92   }
  93 
  94   void   remove_useless_nodes(VectorSet &useful); // replace with sentinel
  95   void update_with(NodeHash* nh);
  96 
  97   Node  *sentinel() { return _sentinel; }
  98 
  99 #ifndef PRODUCT
 100   Node  *find_index(uint idx);  // For debugging
 101   void   dump();                // For debugging, dump statistics
 102 #endif
 103   uint   _grows;                // For debugging, count of table grow()s
 104   uint   _look_probes;          // For debugging, count of hash probes
 105   uint   _lookup_hits;          // For debugging, count of hash_finds
 106   uint   _lookup_misses;        // For debugging, count of hash_finds
 107   uint   _insert_probes;        // For debugging, count of hash probes
 108   uint   _delete_probes;        // For debugging, count of hash probes for deletes
 109   uint   _delete_hits;          // For debugging, count of hash probes for deletes
 110   uint   _delete_misses;        // For debugging, count of hash probes for deletes
 111   uint   _total_inserts;        // For debugging, total inserts into hash table
 112   uint   _total_insert_probes;  // For debugging, total probes while inserting
 113 };
 114 
 115 


 369   uint   _count_new_values;     // For profiling, count new values produced
 370   void    inc_new_values()        { ++_count_new_values; }
 371   void    clear_new_values()      { _count_new_values = 0; }
 372   uint    made_new_values() const { return _count_new_values; }
 373 #endif
 374 };
 375 
 376 
 377 //------------------------------PhaseGVN---------------------------------------
 378 // Phase for performing local, pessimistic GVN-style optimizations.
 379 class PhaseGVN : public PhaseValues {
 380 public:
 381   PhaseGVN( Arena *arena, uint est_max_size ) : PhaseValues( arena, est_max_size ) {}
 382   PhaseGVN( PhaseGVN *gvn ) : PhaseValues( gvn ) {}
 383   PhaseGVN( PhaseGVN *gvn, const char *dummy ) : PhaseValues( gvn, dummy ) {}
 384 
 385   // Return a node which computes the same function as this node, but
 386   // in a faster or cheaper fashion.
 387   Node  *transform( Node *n );
 388   Node  *transform_no_reclaim( Node *n );
 389 
 390   void update_with(PhaseGVN* gvn) {
 391     _table.update_with(&gvn->_table);
 392     _types = gvn->_types;
 393   }
 394 
 395   // Check for a simple dead loop when a data node references itself.
 396   DEBUG_ONLY(void dead_loop_check(Node *n);)
 397 };
 398 
 399 //------------------------------PhaseIterGVN-----------------------------------
 400 // Phase for iteratively performing local, pessimistic GVN-style optimizations.
 401 // and ideal transformations on the graph.
 402 class PhaseIterGVN : public PhaseGVN {
 403  private:
 404   bool _delay_transform;  // When true simply register the node when calling transform
 405                           // instead of actually optimizing it
 406 
 407   // Idealize old Node 'n' with respect to its inputs and its value
 408   virtual Node *transform_old( Node *a_node );
 409 
 410   // Subsume users of node 'old' into node 'nn'
 411   void subsume_node( Node *old, Node *nn );
 412 
 413   Node_Stack _stack;      // Stack used to avoid recursion