< prev index next >

src/hotspot/share/opto/graphKit.hpp

Print this page




 843   Node* insert_mem_bar_volatile(int opcode, int alias_idx, Node* precedent = NULL);
 844   void insert_store_load_for_barrier();
 845   // Optional 'precedent' is appended as an extra edge, to force ordering.
 846   FastLockNode* shared_lock(Node* obj);
 847   void shared_unlock(Node* box, Node* obj);
 848 
 849   // helper functions for the fast path/slow path idioms
 850   Node* fast_and_slow(Node* in, const Type *result_type, Node* null_result, IfNode* fast_test, Node* fast_result, address slow_call, const TypeFunc *slow_call_type, Node* slow_arg, Klass* ex_klass, Node* slow_result);
 851 
 852   // Generate an instance-of idiom.  Used by both the instance-of bytecode
 853   // and the reflective instance-of call.
 854   Node* gen_instanceof(Node *subobj, Node* superkls, bool safe_for_replace = false);
 855 
 856   // Generate a check-cast idiom.  Used by both the check-cast bytecode
 857   // and the array-store bytecode
 858   Node* gen_checkcast(Node *subobj, Node* superkls, Node* *failure_control = NULL, bool never_null = false);
 859 
 860   Node* is_always_locked(Node* obj);
 861   Node* is_value_mirror(Node* mirror);
 862   void gen_value_type_guard(Node* obj, int nargs = 0);

 863   void gen_value_array_null_guard(Node* ary, Node* val, int nargs);
 864   Node* load_lh_array_tag(Node* kls);
 865   Node* gen_lh_array_test(Node* kls, unsigned int lh_value);
 866 
 867   Node* gen_subtype_check(Node* subklass, Node* superklass) {
 868     MergeMemNode* mem = merged_memory();
 869     Node* ctrl = control();
 870     Node* n = Phase::gen_subtype_check(subklass, superklass, &ctrl, mem, &_gvn);
 871     set_control(ctrl);
 872     return n;
 873   }
 874 
 875   // Exact type check used for predicted calls and casts.
 876   // Rewrites (*casted_receiver) to be casted to the stronger type.
 877   // (Caller is responsible for doing replace_in_map.)
 878   Node* type_check_receiver(Node* receiver, ciKlass* klass, float prob,
 879                             Node* *casted_receiver);
 880   Node* type_check(Node* recv_klass, const TypeKlassPtr* tklass, float prob);
 881 
 882   // Inexact type check used for predicted calls.




 843   Node* insert_mem_bar_volatile(int opcode, int alias_idx, Node* precedent = NULL);
 844   void insert_store_load_for_barrier();
 845   // Optional 'precedent' is appended as an extra edge, to force ordering.
 846   FastLockNode* shared_lock(Node* obj);
 847   void shared_unlock(Node* box, Node* obj);
 848 
 849   // helper functions for the fast path/slow path idioms
 850   Node* fast_and_slow(Node* in, const Type *result_type, Node* null_result, IfNode* fast_test, Node* fast_result, address slow_call, const TypeFunc *slow_call_type, Node* slow_arg, Klass* ex_klass, Node* slow_result);
 851 
 852   // Generate an instance-of idiom.  Used by both the instance-of bytecode
 853   // and the reflective instance-of call.
 854   Node* gen_instanceof(Node *subobj, Node* superkls, bool safe_for_replace = false);
 855 
 856   // Generate a check-cast idiom.  Used by both the check-cast bytecode
 857   // and the array-store bytecode
 858   Node* gen_checkcast(Node *subobj, Node* superkls, Node* *failure_control = NULL, bool never_null = false);
 859 
 860   Node* is_always_locked(Node* obj);
 861   Node* is_value_mirror(Node* mirror);
 862   void gen_value_type_guard(Node* obj, int nargs = 0);
 863   Node* gen_null_free_array_check(Node* ary);
 864   void gen_value_array_null_guard(Node* ary, Node* val, int nargs);
 865   Node* load_lh_array_tag(Node* kls);
 866   Node* gen_lh_array_test(Node* kls, unsigned int lh_value);
 867 
 868   Node* gen_subtype_check(Node* subklass, Node* superklass) {
 869     MergeMemNode* mem = merged_memory();
 870     Node* ctrl = control();
 871     Node* n = Phase::gen_subtype_check(subklass, superklass, &ctrl, mem, &_gvn);
 872     set_control(ctrl);
 873     return n;
 874   }
 875 
 876   // Exact type check used for predicted calls and casts.
 877   // Rewrites (*casted_receiver) to be casted to the stronger type.
 878   // (Caller is responsible for doing replace_in_map.)
 879   Node* type_check_receiver(Node* receiver, ciKlass* klass, float prob,
 880                             Node* *casted_receiver);
 881   Node* type_check(Node* recv_klass, const TypeKlassPtr* tklass, float prob);
 882 
 883   // Inexact type check used for predicted calls.


< prev index next >