1754 // For oop loads, we expect the _type to be precise. 1755 1756 // Optimize loads from constant fields. 1757 const TypeInstPtr* tinst = tp->is_instptr(); 1758 ciObject* const_oop = tinst->const_oop(); 1759 if (!is_mismatched_access() && off != Type::OffsetBot && const_oop != NULL && const_oop->is_instance()) { 1760 const Type* con_type = Type::make_constant_from_field(const_oop->as_instance(), off, is_unsigned(), memory_type()); 1761 if (con_type != NULL) { 1762 return con_type; 1763 } 1764 } 1765 } else if (tp->base() == Type::KlassPtr) { 1766 assert( off != Type::OffsetBot || 1767 // arrays can be cast to Objects 1768 tp->is_klassptr()->klass()->is_java_lang_Object() || 1769 // also allow array-loading from the primary supertype 1770 // array during subtype checks 1771 Opcode() == Op_LoadKlass, 1772 "Field accesses must be precise" ); 1773 // For klass/static loads, we expect the _type to be precise 1774 } 1775 1776 const TypeKlassPtr *tkls = tp->isa_klassptr(); 1777 if (tkls != NULL && !StressReflectiveCode) { 1778 ciKlass* klass = tkls->klass(); 1779 if (klass->is_loaded() && tkls->klass_is_exact()) { 1780 // We are loading a field from a Klass metaobject whose identity 1781 // is known at compile time (the type is "exact" or "precise"). 1782 // Check for fields we know are maintained as constants by the VM. 1783 if (tkls->offset() == in_bytes(Klass::super_check_offset_offset())) { 1784 // The field is Klass::_super_check_offset. Return its (constant) value. 1785 // (Folds up type checking code.) 1786 assert(Opcode() == Op_LoadI, "must load an int from _super_check_offset"); 1787 return TypeInt::make(klass->super_check_offset()); 1788 } 1789 // Compute index into primary_supers array 1790 juint depth = (tkls->offset() - in_bytes(Klass::primary_supers_offset())) / sizeof(Klass*); 1791 // Check for overflowing; use unsigned compare to handle the negative case. 1792 if( depth < ciKlass::primary_super_limit() ) { 1793 // The field is an element of Klass::_primary_supers. Return its (constant) value. 1794 // (Folds up type checking code.) 1795 assert(Opcode() == Op_LoadKlass, "must load a klass from _primary_supers"); 1796 ciKlass *ss = klass->super_of_depth(depth); 1797 return ss ? TypeKlassPtr::make(ss) : TypePtr::NULL_PTR; 1798 } 1799 const Type* aift = load_array_final_field(tkls, klass); 1800 if (aift != NULL) return aift; 1801 if (tkls->offset() == in_bytes(Klass::java_mirror_offset())) { 1802 // The field is Klass::_java_mirror. Return its (constant) value. 1803 // (Folds up the 2nd indirection in anObjConstant.getClass().) 1804 assert(Opcode() == Op_LoadP, "must load an oop from _java_mirror"); 1805 return TypeInstPtr::make(klass->java_mirror()); 1806 } 1807 } 1808 1809 // We can still check if we are loading from the primary_supers array at a 1810 // shallow enough depth. Even though the klass is not exact, entries less 1811 // than or equal to its super depth are correct. 1812 if (klass->is_loaded() ) { 1813 ciType *inner = klass; 1814 while( inner->is_obj_array_klass() ) 1815 inner = inner->as_obj_array_klass()->base_element_type(); 1816 if( inner->is_instance_klass() && 1817 !inner->as_instance_klass()->flags().is_interface() ) { 1818 // Compute index into primary_supers array 1819 juint depth = (tkls->offset() - in_bytes(Klass::primary_supers_offset())) / sizeof(Klass*); 1820 // Check for overflowing; use unsigned compare to handle the negative case. 1821 if( depth < ciKlass::primary_super_limit() && 1822 depth <= klass->super_depth() ) { // allow self-depth checks to handle self-check case 1823 // The field is an element of Klass::_primary_supers. Return its (constant) value. 1824 // (Folds up type checking code.) 1825 assert(Opcode() == Op_LoadKlass, "must load a klass from _primary_supers"); 1826 ciKlass *ss = klass->super_of_depth(depth); 2186 if (base == NULL) return this; 2187 const TypeOopPtr* toop = phase->type(adr)->isa_oopptr(); 2188 if (toop == NULL) return this; 2189 2190 // We can fetch the klass directly through an AllocateNode. 2191 // This works even if the klass is not constant (clone or newArray). 2192 if (offset == oopDesc::klass_offset_in_bytes()) { 2193 Node* allocated_klass = AllocateNode::Ideal_klass(base, phase); 2194 if (allocated_klass != NULL) { 2195 return allocated_klass; 2196 } 2197 } 2198 2199 // Simplify k.java_mirror.as_klass to plain k, where k is a Klass*. 2200 // See inline_native_Class_query for occurrences of these patterns. 2201 // Java Example: x.getClass().isAssignableFrom(y) 2202 // 2203 // This improves reflective code, often making the Class 2204 // mirror go completely dead. (Current exception: Class 2205 // mirrors may appear in debug info, but we could clean them out by 2206 // introducing a new debug info operator for Klass*.java_mirror). 2207 if (toop->isa_instptr() && toop->klass() == phase->C->env()->Class_klass() 2208 && offset == java_lang_Class::klass_offset_in_bytes()) { 2209 // We are loading a special hidden field from a Class mirror, 2210 // the field which points to its Klass or ArrayKlass metaobject. 2211 if (base->is_Load()) { 2212 Node* adr2 = base->in(MemNode::Address); 2213 const TypeKlassPtr* tkls = phase->type(adr2)->isa_klassptr(); 2214 if (tkls != NULL && !tkls->empty() 2215 && (tkls->klass()->is_instance_klass() || 2216 tkls->klass()->is_array_klass()) 2217 && adr2->is_AddP() 2218 ) { 2219 int mirror_field = in_bytes(Klass::java_mirror_offset()); 2220 if (tkls->offset() == mirror_field) { 2221 return adr2->in(AddPNode::Base); 2222 } 2223 } 2224 } 2225 } 2226 2227 return this; 2228 } 2229 2230 2231 //------------------------------Value------------------------------------------ 2232 const Type* LoadNKlassNode::Value(PhaseGVN* phase) const { 2233 const Type *t = klass_value_common(phase); 2234 if (t == Type::TOP) 2235 return t; 2236 2237 return t->make_narrowklass(); 2238 } 2239 2240 //------------------------------Identity--------------------------------------- 2241 // To clean up reflective code, simplify k.java_mirror.as_klass to narrow k. | 1754 // For oop loads, we expect the _type to be precise. 1755 1756 // Optimize loads from constant fields. 1757 const TypeInstPtr* tinst = tp->is_instptr(); 1758 ciObject* const_oop = tinst->const_oop(); 1759 if (!is_mismatched_access() && off != Type::OffsetBot && const_oop != NULL && const_oop->is_instance()) { 1760 const Type* con_type = Type::make_constant_from_field(const_oop->as_instance(), off, is_unsigned(), memory_type()); 1761 if (con_type != NULL) { 1762 return con_type; 1763 } 1764 } 1765 } else if (tp->base() == Type::KlassPtr) { 1766 assert( off != Type::OffsetBot || 1767 // arrays can be cast to Objects 1768 tp->is_klassptr()->klass()->is_java_lang_Object() || 1769 // also allow array-loading from the primary supertype 1770 // array during subtype checks 1771 Opcode() == Op_LoadKlass, 1772 "Field accesses must be precise" ); 1773 // For klass/static loads, we expect the _type to be precise 1774 } else if (tp->base() == Type::RawPtr && adr->is_Load() && off == 0) { 1775 /* With mirrors being an indirect in the Klass* 1776 * the VM is now using two load. LoadKlass(LoadP(LoadP(Klass, mirror_offset), zero_offset)) 1777 * The LoadP from the Klass has a RawPtr type (see LibraryCallKit::load_mirror_from_klass). 1778 * 1779 * So check the type and klass of the node before the LoadP 1780 */ 1781 Node* adr2 = adr->in(MemNode::Address); 1782 const TypeKlassPtr* tkls = phase->type(adr2)->isa_klassptr(); 1783 if (tkls != NULL && !StressReflectiveCode) { 1784 ciKlass* klass = tkls->klass(); 1785 if (klass->is_loaded() && tkls->klass_is_exact() && tkls->offset() == in_bytes(Klass::java_mirror_offset())) { 1786 assert(adr->Opcode() == Op_LoadP, "must load an oop from _java_mirror"); 1787 assert(Opcode() == Op_LoadP, "must load an oop from _java_mirror"); 1788 return TypeInstPtr::make(klass->java_mirror()); 1789 } 1790 } 1791 } 1792 1793 const TypeKlassPtr *tkls = tp->isa_klassptr(); 1794 if (tkls != NULL && !StressReflectiveCode) { 1795 ciKlass* klass = tkls->klass(); 1796 if (klass->is_loaded() && tkls->klass_is_exact()) { 1797 // We are loading a field from a Klass metaobject whose identity 1798 // is known at compile time (the type is "exact" or "precise"). 1799 // Check for fields we know are maintained as constants by the VM. 1800 if (tkls->offset() == in_bytes(Klass::super_check_offset_offset())) { 1801 // The field is Klass::_super_check_offset. Return its (constant) value. 1802 // (Folds up type checking code.) 1803 assert(Opcode() == Op_LoadI, "must load an int from _super_check_offset"); 1804 return TypeInt::make(klass->super_check_offset()); 1805 } 1806 // Compute index into primary_supers array 1807 juint depth = (tkls->offset() - in_bytes(Klass::primary_supers_offset())) / sizeof(Klass*); 1808 // Check for overflowing; use unsigned compare to handle the negative case. 1809 if( depth < ciKlass::primary_super_limit() ) { 1810 // The field is an element of Klass::_primary_supers. Return its (constant) value. 1811 // (Folds up type checking code.) 1812 assert(Opcode() == Op_LoadKlass, "must load a klass from _primary_supers"); 1813 ciKlass *ss = klass->super_of_depth(depth); 1814 return ss ? TypeKlassPtr::make(ss) : TypePtr::NULL_PTR; 1815 } 1816 const Type* aift = load_array_final_field(tkls, klass); 1817 if (aift != NULL) return aift; 1818 } 1819 1820 // We can still check if we are loading from the primary_supers array at a 1821 // shallow enough depth. Even though the klass is not exact, entries less 1822 // than or equal to its super depth are correct. 1823 if (klass->is_loaded() ) { 1824 ciType *inner = klass; 1825 while( inner->is_obj_array_klass() ) 1826 inner = inner->as_obj_array_klass()->base_element_type(); 1827 if( inner->is_instance_klass() && 1828 !inner->as_instance_klass()->flags().is_interface() ) { 1829 // Compute index into primary_supers array 1830 juint depth = (tkls->offset() - in_bytes(Klass::primary_supers_offset())) / sizeof(Klass*); 1831 // Check for overflowing; use unsigned compare to handle the negative case. 1832 if( depth < ciKlass::primary_super_limit() && 1833 depth <= klass->super_depth() ) { // allow self-depth checks to handle self-check case 1834 // The field is an element of Klass::_primary_supers. Return its (constant) value. 1835 // (Folds up type checking code.) 1836 assert(Opcode() == Op_LoadKlass, "must load a klass from _primary_supers"); 1837 ciKlass *ss = klass->super_of_depth(depth); 2197 if (base == NULL) return this; 2198 const TypeOopPtr* toop = phase->type(adr)->isa_oopptr(); 2199 if (toop == NULL) return this; 2200 2201 // We can fetch the klass directly through an AllocateNode. 2202 // This works even if the klass is not constant (clone or newArray). 2203 if (offset == oopDesc::klass_offset_in_bytes()) { 2204 Node* allocated_klass = AllocateNode::Ideal_klass(base, phase); 2205 if (allocated_klass != NULL) { 2206 return allocated_klass; 2207 } 2208 } 2209 2210 // Simplify k.java_mirror.as_klass to plain k, where k is a Klass*. 2211 // See inline_native_Class_query for occurrences of these patterns. 2212 // Java Example: x.getClass().isAssignableFrom(y) 2213 // 2214 // This improves reflective code, often making the Class 2215 // mirror go completely dead. (Current exception: Class 2216 // mirrors may appear in debug info, but we could clean them out by 2217 // introducing a new debug info operator for Klass.java_mirror). 2218 2219 if (toop->isa_instptr() && toop->klass() == phase->C->env()->Class_klass() 2220 && offset == java_lang_Class::klass_offset_in_bytes()) { 2221 if (base->is_Load()) { 2222 Node* base2 = base->in(MemNode::Address); 2223 if (base2->is_Load()) { /* direct load of a load which is the oophandle */ 2224 Node* adr2 = base2->in(MemNode::Address); 2225 const TypeKlassPtr* tkls = phase->type(adr2)->isa_klassptr(); 2226 if (tkls != NULL && !tkls->empty() 2227 && (tkls->klass()->is_instance_klass() || 2228 tkls->klass()->is_array_klass()) 2229 && adr2->is_AddP() 2230 ) { 2231 int mirror_field = in_bytes(Klass::java_mirror_offset()); 2232 if (tkls->offset() == mirror_field) { 2233 return adr2->in(AddPNode::Base); 2234 } 2235 } 2236 } 2237 } 2238 } 2239 2240 return this; 2241 } 2242 2243 2244 //------------------------------Value------------------------------------------ 2245 const Type* LoadNKlassNode::Value(PhaseGVN* phase) const { 2246 const Type *t = klass_value_common(phase); 2247 if (t == Type::TOP) 2248 return t; 2249 2250 return t->make_narrowklass(); 2251 } 2252 2253 //------------------------------Identity--------------------------------------- 2254 // To clean up reflective code, simplify k.java_mirror.as_klass to narrow k. |