< prev index next >

src/share/vm/opto/callnode.hpp

Print this page




 693 // Make a direct subroutine call using Java calling convention (for static
 694 // calls and optimized virtual calls, plus calls to wrappers for run-time
 695 // routines); generates static stub.
 696 class CallStaticJavaNode : public CallJavaNode {
 697   virtual uint cmp( const Node &n ) const;
 698   virtual uint size_of() const; // Size is bigger
 699 public:
 700   CallStaticJavaNode(Compile* C, const TypeFunc* tf, address addr, ciMethod* method, int bci)
 701     : CallJavaNode(tf, addr, method, bci) {
 702     init_class_id(Class_CallStaticJava);
 703     if (C->eliminate_boxing() && (method != NULL) && method->is_boxing_method()) {
 704       init_flags(Flag_is_macro);
 705       C->add_macro_node(this);
 706     }
 707     const TypeTuple *r = tf->range_sig();
 708     if (ValueTypeReturnedAsFields &&
 709         method != NULL &&
 710         method->is_method_handle_intrinsic() &&
 711         r->cnt() > TypeFunc::Parms &&
 712         r->field_at(TypeFunc::Parms)->isa_valuetypeptr() &&
 713         r->field_at(TypeFunc::Parms)->is_valuetypeptr()->value_type()->value_klass() == C->env()->___Value_klass()) {
 714       init_flags(Flag_is_macro);
 715       C->add_macro_node(this);
 716     }
 717 
 718     _is_scalar_replaceable = false;
 719     _is_non_escaping = false;
 720   }
 721   CallStaticJavaNode(const TypeFunc* tf, address addr, const char* name, int bci,
 722                      const TypePtr* adr_type)
 723     : CallJavaNode(tf, addr, NULL, bci) {
 724     init_class_id(Class_CallStaticJava);
 725     // This node calls a runtime stub, which often has narrow memory effects.
 726     _adr_type = adr_type;
 727     _is_scalar_replaceable = false;
 728     _is_non_escaping = false;
 729     _name = name;
 730   }
 731 
 732   // Result of Escape Analysis
 733   bool _is_scalar_replaceable;


 862 
 863     const TypeTuple *domain = TypeTuple::make(ParmLimit, fields);
 864 
 865     // create result type (range)
 866     fields = TypeTuple::fields(1);
 867     fields[TypeFunc::Parms+0] = TypeRawPtr::NOTNULL; // Returned oop
 868 
 869     const TypeTuple *range = TypeTuple::make(TypeFunc::Parms+1, fields);
 870 
 871     return TypeFunc::make(domain, range);
 872   }
 873 
 874   // Result of Escape Analysis
 875   bool _is_scalar_replaceable;
 876   bool _is_non_escaping;
 877   // True when MemBar for new is redundant with MemBar at initialzer exit
 878   bool _is_allocation_MemBar_redundant;
 879 
 880   virtual uint size_of() const; // Size is bigger
 881   AllocateNode(Compile* C, const TypeFunc *atype, Node *ctrl, Node *mem, Node *abio,
 882                Node *size, Node *klass_node, Node *initial_test, ValueTypeNode* value_node = NULL);
 883   // Expansion modifies the JVMState, so we need to clone it
 884   virtual void  clone_jvms(Compile* C) {
 885     if (jvms() != NULL) {
 886       set_jvms(jvms()->clone_deep(C));
 887       jvms()->set_map_deep(this);
 888     }
 889   }
 890   virtual int Opcode() const;
 891   virtual uint ideal_reg() const { return Op_RegP; }
 892   virtual bool        guaranteed_safepoint()  { return false; }
 893 
 894   // allocations do not modify their arguments
 895   virtual bool        may_modify(const TypeOopPtr *t_oop, PhaseTransform *phase) { return false;}
 896 
 897   // Pattern-match a possible usage of AllocateNode.
 898   // Return null if no allocation is recognized.
 899   // The operand is the pointer produced by the (possible) allocation.
 900   // It must be a projection of the Allocate or its subsequent CastPP.
 901   // (Note:  This function is defined in file graphKit.cpp, near
 902   // GraphKit::new_instance/new_array, whose output it recognizes.)




 693 // Make a direct subroutine call using Java calling convention (for static
 694 // calls and optimized virtual calls, plus calls to wrappers for run-time
 695 // routines); generates static stub.
 696 class CallStaticJavaNode : public CallJavaNode {
 697   virtual uint cmp( const Node &n ) const;
 698   virtual uint size_of() const; // Size is bigger
 699 public:
 700   CallStaticJavaNode(Compile* C, const TypeFunc* tf, address addr, ciMethod* method, int bci)
 701     : CallJavaNode(tf, addr, method, bci) {
 702     init_class_id(Class_CallStaticJava);
 703     if (C->eliminate_boxing() && (method != NULL) && method->is_boxing_method()) {
 704       init_flags(Flag_is_macro);
 705       C->add_macro_node(this);
 706     }
 707     const TypeTuple *r = tf->range_sig();
 708     if (ValueTypeReturnedAsFields &&
 709         method != NULL &&
 710         method->is_method_handle_intrinsic() &&
 711         r->cnt() > TypeFunc::Parms &&
 712         r->field_at(TypeFunc::Parms)->isa_valuetypeptr() &&
 713         r->field_at(TypeFunc::Parms)->is_valuetypeptr()->is__Value()) {
 714       init_flags(Flag_is_macro);
 715       C->add_macro_node(this);
 716     }
 717 
 718     _is_scalar_replaceable = false;
 719     _is_non_escaping = false;
 720   }
 721   CallStaticJavaNode(const TypeFunc* tf, address addr, const char* name, int bci,
 722                      const TypePtr* adr_type)
 723     : CallJavaNode(tf, addr, NULL, bci) {
 724     init_class_id(Class_CallStaticJava);
 725     // This node calls a runtime stub, which often has narrow memory effects.
 726     _adr_type = adr_type;
 727     _is_scalar_replaceable = false;
 728     _is_non_escaping = false;
 729     _name = name;
 730   }
 731 
 732   // Result of Escape Analysis
 733   bool _is_scalar_replaceable;


 862 
 863     const TypeTuple *domain = TypeTuple::make(ParmLimit, fields);
 864 
 865     // create result type (range)
 866     fields = TypeTuple::fields(1);
 867     fields[TypeFunc::Parms+0] = TypeRawPtr::NOTNULL; // Returned oop
 868 
 869     const TypeTuple *range = TypeTuple::make(TypeFunc::Parms+1, fields);
 870 
 871     return TypeFunc::make(domain, range);
 872   }
 873 
 874   // Result of Escape Analysis
 875   bool _is_scalar_replaceable;
 876   bool _is_non_escaping;
 877   // True when MemBar for new is redundant with MemBar at initialzer exit
 878   bool _is_allocation_MemBar_redundant;
 879 
 880   virtual uint size_of() const; // Size is bigger
 881   AllocateNode(Compile* C, const TypeFunc *atype, Node *ctrl, Node *mem, Node *abio,
 882                Node *size, Node *klass_node, Node *initial_test, ValueTypeBaseNode* value_node = NULL);
 883   // Expansion modifies the JVMState, so we need to clone it
 884   virtual void  clone_jvms(Compile* C) {
 885     if (jvms() != NULL) {
 886       set_jvms(jvms()->clone_deep(C));
 887       jvms()->set_map_deep(this);
 888     }
 889   }
 890   virtual int Opcode() const;
 891   virtual uint ideal_reg() const { return Op_RegP; }
 892   virtual bool        guaranteed_safepoint()  { return false; }
 893 
 894   // allocations do not modify their arguments
 895   virtual bool        may_modify(const TypeOopPtr *t_oop, PhaseTransform *phase) { return false;}
 896 
 897   // Pattern-match a possible usage of AllocateNode.
 898   // Return null if no allocation is recognized.
 899   // The operand is the pointer produced by the (possible) allocation.
 900   // It must be a projection of the Allocate or its subsequent CastPP.
 901   // (Note:  This function is defined in file graphKit.cpp, near
 902   // GraphKit::new_instance/new_array, whose output it recognizes.)


< prev index next >