src/share/vm/opto/matcher.hpp

Print this page
rev 2570 : imported patch membar2


 424 
 425   // Are floats conerted to double when stored to stack during deoptimization?
 426   static bool float_in_double();
 427   // Do ints take an entire long register or just half?
 428   static const bool int_in_long;
 429 
 430   // Do the processor's shift instructions only use the low 5/6 bits
 431   // of the count for 32/64 bit ints? If not we need to do the masking
 432   // ourselves.
 433   static const bool need_masked_shift_count;
 434 
 435   // This routine is run whenever a graph fails to match.
 436   // If it returns, the compiler should bailout to interpreter without error.
 437   // In non-product mode, SoftMatchFailure is false to detect non-canonical
 438   // graphs.  Print a message and exit.
 439   static void soft_match_failure() {
 440     if( SoftMatchFailure ) return;
 441     else { fatal("SoftMatchFailure is not allowed except in product"); }
 442   }
 443 
 444   // Used by the DFA in dfa_sparc.cpp.  Check for a prior FastLock
 445   // acting as an Acquire and thus we don't need an Acquire here.  We
 446   // retain the Node to act as a compiler ordering barrier.
 447   static bool prior_fast_lock( const Node *acq );
 448 
 449   // Used by the DFA in dfa_sparc.cpp.  Check for a following
 450   // FastUnLock acting as a Release and thus we don't need a Release
 451   // here.  We retain the Node to act as a compiler ordering barrier.
 452   static bool post_fast_unlock( const Node *rel );
 453 
 454   // Check for a following volatile memory barrier without an
 455   // intervening load and thus we don't need a barrier here.  We
 456   // retain the Node to act as a compiler ordering barrier.
 457   static bool post_store_load_barrier(const Node* mb);
 458 
 459 
 460 #ifdef ASSERT
 461   void dump_old2new_map();      // machine-independent to machine-dependent
 462 
 463   Node* find_old_node(Node* new_node) {
 464     return _new2old_map[new_node->_idx];
 465   }
 466 #endif
 467 };
 468 
 469 #endif // SHARE_VM_OPTO_MATCHER_HPP


 424 
 425   // Are floats conerted to double when stored to stack during deoptimization?
 426   static bool float_in_double();
 427   // Do ints take an entire long register or just half?
 428   static const bool int_in_long;
 429 
 430   // Do the processor's shift instructions only use the low 5/6 bits
 431   // of the count for 32/64 bit ints? If not we need to do the masking
 432   // ourselves.
 433   static const bool need_masked_shift_count;
 434 
 435   // This routine is run whenever a graph fails to match.
 436   // If it returns, the compiler should bailout to interpreter without error.
 437   // In non-product mode, SoftMatchFailure is false to detect non-canonical
 438   // graphs.  Print a message and exit.
 439   static void soft_match_failure() {
 440     if( SoftMatchFailure ) return;
 441     else { fatal("SoftMatchFailure is not allowed except in product"); }
 442   }
 443 










 444   // Check for a following volatile memory barrier without an
 445   // intervening load and thus we don't need a barrier here.  We
 446   // retain the Node to act as a compiler ordering barrier.
 447   static bool post_store_load_barrier(const Node* mb);
 448 
 449 
 450 #ifdef ASSERT
 451   void dump_old2new_map();      // machine-independent to machine-dependent
 452 
 453   Node* find_old_node(Node* new_node) {
 454     return _new2old_map[new_node->_idx];
 455   }
 456 #endif
 457 };
 458 
 459 #endif // SHARE_VM_OPTO_MATCHER_HPP