2047 src->set_arraycopy_src();
2048 // Add edge from destination object to arraycopy node.
2049 (void)add_edge(dst, ptadr);
2050 dst->set_arraycopy_dst();
2051 }
2052
2053 bool ConnectionGraph::is_oop_field(Node* n, int offset, bool* unsafe) {
2054 const Type* adr_type = n->as_AddP()->bottom_type();
2055 BasicType bt = T_INT;
2056 if (offset == Type::OffsetBot) {
2057 // Check only oop fields.
2058 if (!adr_type->isa_aryptr() ||
2059 (adr_type->isa_aryptr()->klass() == NULL) ||
2060 adr_type->isa_aryptr()->klass()->is_obj_array_klass()) {
2061 // OffsetBot is used to reference array's element. Ignore first AddP.
2062 if (find_second_addp(n, n->in(AddPNode::Base)) == NULL) {
2063 bt = T_OBJECT;
2064 }
2065 }
2066 } else if (offset != oopDesc::klass_offset_in_bytes()) {
2067 if (adr_type->isa_instptr()) {
2068 ciField* field = _compile->alias_type(adr_type->isa_instptr())->field();
2069 if (field != NULL) {
2070 bt = field->layout_type();
2071 } else {
2072 // Check for unsafe oop field access
2073 if (n->has_out_with(Op_StoreP, Op_LoadP, Op_StoreN, Op_LoadN)) {
2074 bt = T_OBJECT;
2075 (*unsafe) = true;
2076 }
2077 }
2078 } else if (adr_type->isa_aryptr()) {
2079 if (offset == arrayOopDesc::length_offset_in_bytes()) {
2080 // Ignore array length load.
2081 } else if (find_second_addp(n, n->in(AddPNode::Base)) != NULL) {
2082 // Ignore first AddP.
2083 } else {
2084 const Type* elemtype = adr_type->isa_aryptr()->elem();
2085 bt = elemtype->array_element_basic_type();
2086 }
2087 } else if (adr_type->isa_rawptr() || adr_type->isa_klassptr()) {
2088 // Allocation initialization, ThreadLocal field access, unsafe access
2977 }
2978 if (alloc->is_CallStaticJava()) {
2979 // Set the scalar_replaceable flag for boxing method
2980 // so it could be eliminated.
2981 alloc->as_CallStaticJava()->_is_scalar_replaceable = true;
2982 }
2983 set_escape_state(ptnode_adr(n->_idx), es); // CheckCastPP escape state
2984 // in order for an object to be scalar-replaceable, it must be:
2985 // - a direct allocation (not a call returning an object)
2986 // - non-escaping
2987 // - eligible to be a unique type
2988 // - not determined to be ineligible by escape analysis
2989 set_map(alloc, n);
2990 set_map(n, alloc);
2991 const TypeOopPtr* tinst = t->cast_to_instance_id(ni);
2992 igvn->hash_delete(n);
2993 igvn->set_type(n, tinst);
2994 n->raise_bottom_type(tinst);
2995 igvn->hash_insert(n);
2996 record_for_optimizer(n);
2997 if (alloc->is_Allocate() && (t->isa_instptr() || t->isa_aryptr())) {
2998
2999 // First, put on the worklist all Field edges from Connection Graph
3000 // which is more accurate than putting immediate users from Ideal Graph.
3001 for (EdgeIterator e(ptn); e.has_next(); e.next()) {
3002 PointsToNode* tgt = e.get();
3003 if (tgt->is_Arraycopy()) {
3004 continue;
3005 }
3006 Node* use = tgt->ideal_node();
3007 assert(tgt->is_Field() && use->is_AddP(),
3008 "only AddP nodes are Field edges in CG");
3009 if (use->outcnt() > 0) { // Don't process dead nodes
3010 Node* addp2 = find_second_addp(use, use->in(AddPNode::Base));
3011 if (addp2 != NULL) {
3012 assert(alloc->is_AllocateArray(),"array allocation was expected");
3013 alloc_worklist.append_if_missing(addp2);
3014 }
3015 alloc_worklist.append_if_missing(use);
3016 }
3017 }
|
2047 src->set_arraycopy_src();
2048 // Add edge from destination object to arraycopy node.
2049 (void)add_edge(dst, ptadr);
2050 dst->set_arraycopy_dst();
2051 }
2052
2053 bool ConnectionGraph::is_oop_field(Node* n, int offset, bool* unsafe) {
2054 const Type* adr_type = n->as_AddP()->bottom_type();
2055 BasicType bt = T_INT;
2056 if (offset == Type::OffsetBot) {
2057 // Check only oop fields.
2058 if (!adr_type->isa_aryptr() ||
2059 (adr_type->isa_aryptr()->klass() == NULL) ||
2060 adr_type->isa_aryptr()->klass()->is_obj_array_klass()) {
2061 // OffsetBot is used to reference array's element. Ignore first AddP.
2062 if (find_second_addp(n, n->in(AddPNode::Base)) == NULL) {
2063 bt = T_OBJECT;
2064 }
2065 }
2066 } else if (offset != oopDesc::klass_offset_in_bytes()) {
2067 if (adr_type->isa_instptr() || adr_type->isa_valuetypeptr()) {
2068 ciField* field = _compile->alias_type(adr_type->is_ptr())->field();
2069 if (field != NULL) {
2070 bt = field->layout_type();
2071 } else {
2072 // Check for unsafe oop field access
2073 if (n->has_out_with(Op_StoreP, Op_LoadP, Op_StoreN, Op_LoadN)) {
2074 bt = T_OBJECT;
2075 (*unsafe) = true;
2076 }
2077 }
2078 } else if (adr_type->isa_aryptr()) {
2079 if (offset == arrayOopDesc::length_offset_in_bytes()) {
2080 // Ignore array length load.
2081 } else if (find_second_addp(n, n->in(AddPNode::Base)) != NULL) {
2082 // Ignore first AddP.
2083 } else {
2084 const Type* elemtype = adr_type->isa_aryptr()->elem();
2085 bt = elemtype->array_element_basic_type();
2086 }
2087 } else if (adr_type->isa_rawptr() || adr_type->isa_klassptr()) {
2088 // Allocation initialization, ThreadLocal field access, unsafe access
2977 }
2978 if (alloc->is_CallStaticJava()) {
2979 // Set the scalar_replaceable flag for boxing method
2980 // so it could be eliminated.
2981 alloc->as_CallStaticJava()->_is_scalar_replaceable = true;
2982 }
2983 set_escape_state(ptnode_adr(n->_idx), es); // CheckCastPP escape state
2984 // in order for an object to be scalar-replaceable, it must be:
2985 // - a direct allocation (not a call returning an object)
2986 // - non-escaping
2987 // - eligible to be a unique type
2988 // - not determined to be ineligible by escape analysis
2989 set_map(alloc, n);
2990 set_map(n, alloc);
2991 const TypeOopPtr* tinst = t->cast_to_instance_id(ni);
2992 igvn->hash_delete(n);
2993 igvn->set_type(n, tinst);
2994 n->raise_bottom_type(tinst);
2995 igvn->hash_insert(n);
2996 record_for_optimizer(n);
2997 if (alloc->is_Allocate() && (t->isa_instptr() || t->isa_aryptr() || t->isa_valuetypeptr())) {
2998
2999 // First, put on the worklist all Field edges from Connection Graph
3000 // which is more accurate than putting immediate users from Ideal Graph.
3001 for (EdgeIterator e(ptn); e.has_next(); e.next()) {
3002 PointsToNode* tgt = e.get();
3003 if (tgt->is_Arraycopy()) {
3004 continue;
3005 }
3006 Node* use = tgt->ideal_node();
3007 assert(tgt->is_Field() && use->is_AddP(),
3008 "only AddP nodes are Field edges in CG");
3009 if (use->outcnt() > 0) { // Don't process dead nodes
3010 Node* addp2 = find_second_addp(use, use->in(AddPNode::Base));
3011 if (addp2 != NULL) {
3012 assert(alloc->is_AllocateArray(),"array allocation was expected");
3013 alloc_worklist.append_if_missing(addp2);
3014 }
3015 alloc_worklist.append_if_missing(use);
3016 }
3017 }
|