src/share/vm/opto/callnode.hpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File 6934604 Cdiff src/share/vm/opto/callnode.hpp

src/share/vm/opto/callnode.hpp

Print this page

        

*** 47,56 **** --- 47,57 ---- class CallRuntimeNode; class CallLeafNode; class CallLeafNoFPNode; class AllocateNode; class AllocateArrayNode; + class BoxLockNode; class LockNode; class UnlockNode; class JVMState; class OopMap; class State;
*** 233,243 **** uint endoff() const { return _endoff; } uint oopoff() const { return debug_end(); } int loc_size() const { return stkoff() - locoff(); } int stk_size() const { return monoff() - stkoff(); } - int arg_size() const { return monoff() - argoff(); } int mon_size() const { return scloff() - monoff(); } int scl_size() const { return endoff() - scloff(); } bool is_loc(uint i) const { return locoff() <= i && i < stkoff(); } bool is_stk(uint i) const { return stkoff() <= i && i < monoff(); } --- 234,243 ----
*** 296,305 **** --- 296,306 ---- void set_should_reexecute(bool reexec) {_reexecute = reexec ? Reexecute_True : Reexecute_False;} // Miscellaneous utility functions JVMState* clone_deep(Compile* C) const; // recursively clones caller chain JVMState* clone_shallow(Compile* C) const; // retains uncloned caller + void set_map_deep(SafePointNode *map);// reset map for all callers #ifndef PRODUCT void format(PhaseRegAlloc *regalloc, const Node *n, outputStream* st) const; void dump_spec(outputStream *st) const; void dump_on(outputStream* st) const;
*** 552,565 **** // Are we guaranteed that this node is a safepoint? Not true for leaf calls and // for some macro nodes whose expansion does not have a safepoint on the fast path. virtual bool guaranteed_safepoint() { return true; } // For macro nodes, the JVMState gets modified during expansion, so when cloning // the node the JVMState must be cloned. ! virtual void clone_jvms() { } // default is not to clone // Returns true if the call may modify n ! virtual bool may_modify(const TypePtr *addr_t, PhaseTransform *phase); // Does this node have a use of n other than in debug information? bool has_non_debug_use(Node *n); // Returns the unique CheckCastPP of a call // or result projection is there are several CheckCastPP // or returns NULL if there is no one. --- 553,566 ---- // Are we guaranteed that this node is a safepoint? Not true for leaf calls and // for some macro nodes whose expansion does not have a safepoint on the fast path. virtual bool guaranteed_safepoint() { return true; } // For macro nodes, the JVMState gets modified during expansion, so when cloning // the node the JVMState must be cloned. ! virtual void clone_jvms(Compile* C) { } // default is not to clone // Returns true if the call may modify n ! virtual bool may_modify(const TypeOopPtr *t_oop, PhaseTransform *phase); // Does this node have a use of n other than in debug information? bool has_non_debug_use(Node *n); // Returns the unique CheckCastPP of a call // or result projection is there are several CheckCastPP // or returns NULL if there is no one.
*** 628,654 **** // routines); generates static stub. class CallStaticJavaNode : public CallJavaNode { virtual uint cmp( const Node &n ) const; virtual uint size_of() const; // Size is bigger public: ! CallStaticJavaNode(const TypeFunc* tf, address addr, ciMethod* method, int bci) : CallJavaNode(tf, addr, method, bci), _name(NULL) { init_class_id(Class_CallStaticJava); } CallStaticJavaNode(const TypeFunc* tf, address addr, const char* name, int bci, const TypePtr* adr_type) : CallJavaNode(tf, addr, NULL, bci), _name(name) { init_class_id(Class_CallStaticJava); // This node calls a runtime stub, which often has narrow memory effects. _adr_type = adr_type; } const char *_name; // Runtime wrapper name // If this is an uncommon trap, return the request code, else zero. int uncommon_trap_request() const; static int extract_uncommon_trap_request(const Node* call); virtual int Opcode() const; #ifndef PRODUCT virtual void dump_spec(outputStream *st) const; #endif }; --- 629,679 ---- // routines); generates static stub. class CallStaticJavaNode : public CallJavaNode { virtual uint cmp( const Node &n ) const; virtual uint size_of() const; // Size is bigger public: ! CallStaticJavaNode(Compile* C, const TypeFunc* tf, address addr, ciMethod* method, int bci) : CallJavaNode(tf, addr, method, bci), _name(NULL) { init_class_id(Class_CallStaticJava); + if (C->eliminate_boxing() && (method != NULL) && method->is_boxing_method()) { + init_flags(Flag_is_macro); + C->add_macro_node(this); + } + _is_scalar_replaceable = false; + _is_non_escaping = false; } CallStaticJavaNode(const TypeFunc* tf, address addr, const char* name, int bci, const TypePtr* adr_type) : CallJavaNode(tf, addr, NULL, bci), _name(name) { init_class_id(Class_CallStaticJava); // This node calls a runtime stub, which often has narrow memory effects. _adr_type = adr_type; + _is_scalar_replaceable = false; + _is_non_escaping = false; } const char *_name; // Runtime wrapper name + // Result of Escape Analysis + bool _is_scalar_replaceable; + bool _is_non_escaping; + // If this is an uncommon trap, return the request code, else zero. int uncommon_trap_request() const; static int extract_uncommon_trap_request(const Node* call); + bool is_boxing_method() const { + return is_macro() && (method() != NULL) && method()->is_boxing_method(); + } + // Later inlining modifies the JVMState, so we need to clone it + // when the call node is cloned (because it is macro node). + virtual void clone_jvms(Compile* C) { + if ((jvms() != NULL) && is_boxing_method()) { + set_jvms(jvms()->clone_deep(C)); + jvms()->set_map_deep(this); + } + } + virtual int Opcode() const; #ifndef PRODUCT virtual void dump_spec(outputStream *st) const; #endif };
*** 746,761 **** InitialTest, // slow-path test (may be constant) ALength, // array length (or TOP if none) ParmLimit }; ! static const TypeFunc* alloc_type() { const Type** fields = TypeTuple::fields(ParmLimit - TypeFunc::Parms); fields[AllocSize] = TypeInt::POS; fields[KlassNode] = TypeInstPtr::NOTNULL; fields[InitialTest] = TypeInt::BOOL; ! fields[ALength] = TypeInt::INT; // length (can be a bad length) const TypeTuple *domain = TypeTuple::make(ParmLimit, fields); // create result type (range) fields = TypeTuple::fields(1); --- 771,786 ---- InitialTest, // slow-path test (may be constant) ALength, // array length (or TOP if none) ParmLimit }; ! static const TypeFunc* alloc_type(const Type* t) { const Type** fields = TypeTuple::fields(ParmLimit - TypeFunc::Parms); fields[AllocSize] = TypeInt::POS; fields[KlassNode] = TypeInstPtr::NOTNULL; fields[InitialTest] = TypeInt::BOOL; ! fields[ALength] = t; // length (can be a bad length) const TypeTuple *domain = TypeTuple::make(ParmLimit, fields); // create result type (range) fields = TypeTuple::fields(1);
*** 764,788 **** const TypeTuple *range = TypeTuple::make(TypeFunc::Parms+1, fields); return TypeFunc::make(domain, range); } ! bool _is_scalar_replaceable; // Result of Escape Analysis virtual uint size_of() const; // Size is bigger AllocateNode(Compile* C, const TypeFunc *atype, Node *ctrl, Node *mem, Node *abio, Node *size, Node *klass_node, Node *initial_test); // Expansion modifies the JVMState, so we need to clone it ! virtual void clone_jvms() { ! set_jvms(jvms()->clone_deep(Compile::current())); } virtual int Opcode() const; virtual uint ideal_reg() const { return Op_RegP; } virtual bool guaranteed_safepoint() { return false; } // allocations do not modify their arguments ! virtual bool may_modify(const TypePtr *addr_t, PhaseTransform *phase) { return false;} // Pattern-match a possible usage of AllocateNode. // Return null if no allocation is recognized. // The operand is the pointer produced by the (possible) allocation. // It must be a projection of the Allocate or its subsequent CastPP. --- 789,818 ---- const TypeTuple *range = TypeTuple::make(TypeFunc::Parms+1, fields); return TypeFunc::make(domain, range); } ! // Result of Escape Analysis ! bool _is_scalar_replaceable; ! bool _is_non_escaping; virtual uint size_of() const; // Size is bigger AllocateNode(Compile* C, const TypeFunc *atype, Node *ctrl, Node *mem, Node *abio, Node *size, Node *klass_node, Node *initial_test); // Expansion modifies the JVMState, so we need to clone it ! virtual void clone_jvms(Compile* C) { ! if (jvms() != NULL) { ! set_jvms(jvms()->clone_deep(C)); ! jvms()->set_map_deep(this); ! } } virtual int Opcode() const; virtual uint ideal_reg() const { return Op_RegP; } virtual bool guaranteed_safepoint() { return false; } // allocations do not modify their arguments ! virtual bool may_modify(const TypeOopPtr *t_oop, PhaseTransform *phase) { return false;} // Pattern-match a possible usage of AllocateNode. // Return null if no allocation is recognized. // The operand is the pointer produced by the (possible) allocation. // It must be a projection of the Allocate or its subsequent CastPP.
*** 813,826 **** // Walks out edges to find it... // (Note: Both InitializeNode::allocation and AllocateNode::initialization // are defined in graphKit.cpp, which sets up the bidirectional relation.) InitializeNode* initialization(); - // Return the corresponding storestore barrier (or null if none). - // Walks out edges to find it... - MemBarStoreStoreNode* storestore(); - // Convenience for initialization->maybe_set_complete(phase) bool maybe_set_complete(PhaseGVN* phase); }; //------------------------------AllocateArray--------------------------------- --- 843,852 ----
*** 838,848 **** { init_class_id(Class_AllocateArray); set_req(AllocateNode::ALength, count_val); } virtual int Opcode() const; - virtual uint size_of() const; // Size is bigger virtual Node *Ideal(PhaseGVN *phase, bool can_reshape); // Dig the length operand out of a array allocation site. Node* Ideal_length() { return in(AllocateNode::ALength); --- 864,873 ----
*** 916,926 **** void set_non_esc_obj() { _kind = NonEscObj; set_eliminated_lock_counter(); } void set_coarsened() { _kind = Coarsened; set_eliminated_lock_counter(); } void set_nested() { _kind = Nested; set_eliminated_lock_counter(); } // locking does not modify its arguments ! virtual bool may_modify(const TypePtr *addr_t, PhaseTransform *phase){ return false;} #ifndef PRODUCT void create_lock_counter(JVMState* s); NamedCounter* counter() const { return _counter; } #endif --- 941,951 ---- void set_non_esc_obj() { _kind = NonEscObj; set_eliminated_lock_counter(); } void set_coarsened() { _kind = Coarsened; set_eliminated_lock_counter(); } void set_nested() { _kind = Nested; set_eliminated_lock_counter(); } // locking does not modify its arguments ! virtual bool may_modify(const TypeOopPtr *t_oop, PhaseTransform *phase){ return false;} #ifndef PRODUCT void create_lock_counter(JVMState* s); NamedCounter* counter() const { return _counter; } #endif
*** 963,974 **** } virtual bool guaranteed_safepoint() { return false; } virtual Node *Ideal(PhaseGVN *phase, bool can_reshape); // Expansion modifies the JVMState, so we need to clone it ! virtual void clone_jvms() { ! set_jvms(jvms()->clone_deep(Compile::current())); } bool is_nested_lock_region(); // Is this Lock nested? }; --- 988,1002 ---- } virtual bool guaranteed_safepoint() { return false; } virtual Node *Ideal(PhaseGVN *phase, bool can_reshape); // Expansion modifies the JVMState, so we need to clone it ! virtual void clone_jvms(Compile* C) { ! if (jvms() != NULL) { ! set_jvms(jvms()->clone_deep(C)); ! jvms()->set_map_deep(this); ! } } bool is_nested_lock_region(); // Is this Lock nested? };
src/share/vm/opto/callnode.hpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File