867 if (intr == vmIntrinsics::_floatValue || intr == vmIntrinsics::_doubleValue) {
868 // It does not escape if object is always allocated.
869 es = PointsToNode::NoEscape;
870 } else {
871 // It escapes globally if object could be loaded from cache.
872 es = PointsToNode::GlobalEscape;
873 }
874 add_java_object(call, es);
875 } else {
876 BCEscapeAnalyzer* call_analyzer = meth->get_bcea();
877 call_analyzer->copy_dependencies(_compile->dependencies());
878 if (call_analyzer->is_return_allocated()) {
879 // Returns a newly allocated unescaped object, simply
880 // update dependency information.
881 // Mark it as NoEscape so that objects referenced by
882 // it's fields will be marked as NoEscape at least.
883 add_java_object(call, PointsToNode::NoEscape);
884 ptnode_adr(call_idx)->set_scalar_replaceable(false);
885 } else {
886 // Determine whether any arguments are returned.
887 const TypeTuple* d = call->tf()->domain_sig();
888 bool ret_arg = false;
889 for (uint i = TypeFunc::Parms; i < d->cnt(); i++) {
890 if (d->field_at(i)->isa_ptr() != NULL &&
891 call_analyzer->is_arg_returned(i - TypeFunc::Parms)) {
892 ret_arg = true;
893 break;
894 }
895 }
896 if (ret_arg) {
897 add_local_var(call, PointsToNode::ArgEscape);
898 } else {
899 // Returns unknown object.
900 map_ideal_node(call, phantom_obj);
901 }
902 }
903 }
904 } else {
905 // An other type of call, assume the worst case:
906 // returned value is unknown and globally escapes.
907 assert(call->Opcode() == Op_CallDynamicJava, "add failed case check");
1042 }
1043 }
1044 }
1045 break;
1046 }
1047 case Op_CallStaticJava: {
1048 // For a static call, we know exactly what method is being called.
1049 // Use bytecode estimator to record the call's escape affects
1050 #ifdef ASSERT
1051 const char* name = call->as_CallStaticJava()->_name;
1052 assert((name == NULL || strcmp(name, "uncommon_trap") != 0), "normal calls only");
1053 #endif
1054 ciMethod* meth = call->as_CallJava()->method();
1055 if ((meth != NULL) && meth->is_boxing_method()) {
1056 break; // Boxing methods do not modify any oops.
1057 }
1058 BCEscapeAnalyzer* call_analyzer = (meth !=NULL) ? meth->get_bcea() : NULL;
1059 // fall-through if not a Java method or no analyzer information
1060 if (call_analyzer != NULL) {
1061 PointsToNode* call_ptn = ptnode_adr(call->_idx);
1062 const TypeTuple* d = call->tf()->domain_sig();
1063 int extra = 0;
1064 for (uint i = TypeFunc::Parms; i < d->cnt(); i++) {
1065 const Type* at = d->field_at(i);
1066 if (at->isa_valuetypeptr()) {
1067 extra += at->is_valuetypeptr()->value_type()->value_klass()->field_count() - 1;
1068 continue;
1069 }
1070 int k = i - TypeFunc::Parms;
1071 Node* arg = call->in(i + extra);
1072 PointsToNode* arg_ptn = ptnode_adr(arg->_idx);
1073 if (at->isa_ptr() != NULL &&
1074 call_analyzer->is_arg_returned(k)) {
1075 // The call returns arguments.
1076 if (call_ptn != NULL) { // Is call's result used?
1077 assert(call_ptn->is_LocalVar(), "node should be registered");
1078 assert(arg_ptn != NULL, "node should be registered");
1079 add_edge(call_ptn, arg_ptn);
1080 }
1081 }
1082 if (at->isa_oopptr() != NULL &&
1083 arg_ptn->escape_state() < PointsToNode::GlobalEscape) {
1084 if (!call_analyzer->is_arg_stack(k)) {
1085 // The argument global escapes
1086 set_escape_state(arg_ptn, PointsToNode::GlobalEscape);
1087 } else {
1088 set_escape_state(arg_ptn, PointsToNode::ArgEscape);
1089 if (!call_analyzer->is_arg_local(k)) {
1090 // The argument itself doesn't escape, but any fields might
1091 set_fields_escape_state(arg_ptn, PointsToNode::GlobalEscape);
1092 }
1093 }
1094 }
1095 }
1096 if (call_ptn != NULL && call_ptn->is_LocalVar()) {
1097 // The call returns arguments.
1098 assert(call_ptn->edge_count() > 0, "sanity");
1099 if (!call_analyzer->is_return_local()) {
1100 // Returns also unknown object.
1101 add_edge(call_ptn, phantom_obj);
1102 }
1103 }
1104 break;
1105 }
1106 }
1107 default: {
1108 // Fall-through here if not a Java method or no analyzer information
1109 // or some other type of call, assume the worst case: all arguments
1110 // globally escape.
1111 const TypeTuple* d = call->tf()->domain_sig();
1112 for (uint i = TypeFunc::Parms; i < d->cnt(); i++) {
1113 const Type* at = d->field_at(i);
1114 if (at->isa_oopptr() != NULL) {
1115 Node* arg = call->in(i);
1116 if (arg->is_AddP()) {
1117 arg = get_addp_base(arg);
1118 }
1119 assert(ptnode_adr(arg->_idx) != NULL, "should be defined already");
1120 set_escape_state(ptnode_adr(arg->_idx), PointsToNode::GlobalEscape);
1121 }
1122 }
1123 }
1124 }
1125 }
1126
1127
1128 // Finish Graph construction.
1129 bool ConnectionGraph::complete_connection_graph(
1130 GrowableArray<PointsToNode*>& ptnodes_worklist,
1131 GrowableArray<JavaObjectNode*>& non_escaped_worklist,
2037 assert(!src->is_Field() && !dst->is_Field(), "only for JavaObject and LocalVar");
2038 assert((src != null_obj) && (dst != null_obj), "not for ConP NULL");
2039 PointsToNode* ptadr = _nodes.at(n->_idx);
2040 if (ptadr != NULL) {
2041 assert(ptadr->is_Arraycopy() && ptadr->ideal_node() == n, "sanity");
2042 return;
2043 }
2044 Compile* C = _compile;
2045 ptadr = new (C->comp_arena()) ArraycopyNode(this, n, es);
2046 _nodes.at_put(n->_idx, ptadr);
2047 // Add edge from arraycopy node to source object.
2048 (void)add_edge(ptadr, src);
2049 src->set_arraycopy_src();
2050 // Add edge from destination object to arraycopy node.
2051 (void)add_edge(dst, ptadr);
2052 dst->set_arraycopy_dst();
2053 }
2054
2055 bool ConnectionGraph::is_oop_field(Node* n, int offset, bool* unsafe) {
2056 const Type* adr_type = n->as_AddP()->bottom_type();
2057 BasicType bt = T_INT;
2058 if (offset == Type::OffsetBot) {
2059 // Check only oop fields.
2060 if (!adr_type->isa_aryptr() ||
2061 (adr_type->isa_aryptr()->klass() == NULL) ||
2062 adr_type->isa_aryptr()->klass()->is_obj_array_klass()) {
2063 // OffsetBot is used to reference array's element. Ignore first AddP.
2064 if (find_second_addp(n, n->in(AddPNode::Base)) == NULL) {
2065 bt = T_OBJECT;
2066 }
2067 }
2068 } else if (offset != oopDesc::klass_offset_in_bytes()) {
2069 if (adr_type->isa_instptr() || adr_type->isa_valuetypeptr()) {
2070 ciField* field = _compile->alias_type(adr_type->is_ptr())->field();
2071 if (field != NULL) {
2072 bt = field->layout_type();
2073 } else {
2074 // Check for unsafe oop field access
2075 if (n->has_out_with(Op_StoreP, Op_LoadP, Op_StoreN, Op_LoadN) ||
2076 n->has_out_with(Op_GetAndSetP, Op_GetAndSetN, Op_CompareAndExchangeP, Op_CompareAndExchangeN) ||
2077 n->has_out_with(Op_CompareAndSwapP, Op_CompareAndSwapN, Op_WeakCompareAndSwapP, Op_WeakCompareAndSwapN)) {
2078 bt = T_OBJECT;
2079 (*unsafe) = true;
2080 }
2081 }
2082 } else if (adr_type->isa_aryptr()) {
2083 if (offset == arrayOopDesc::length_offset_in_bytes()) {
2084 // Ignore array length load.
2085 } else if (find_second_addp(n, n->in(AddPNode::Base)) != NULL) {
2086 // Ignore first AddP.
2087 } else {
2088 const Type* elemtype = adr_type->isa_aryptr()->elem();
2089 bt = elemtype->array_element_basic_type();
2090 }
2091 } else if (adr_type->isa_rawptr() || adr_type->isa_klassptr()) {
2092 // Allocation initialization, ThreadLocal field access, unsafe access
2093 if (n->has_out_with(Op_StoreP, Op_LoadP, Op_StoreN, Op_LoadN) ||
2094 n->has_out_with(Op_GetAndSetP, Op_GetAndSetN, Op_CompareAndExchangeP, Op_CompareAndExchangeN) ||
2095 n->has_out_with(Op_CompareAndSwapP, Op_CompareAndSwapN, Op_WeakCompareAndSwapP, Op_WeakCompareAndSwapN)) {
2096 bt = T_OBJECT;
2097 }
2098 }
2099 }
2100 return (bt == T_OBJECT || bt == T_NARROWOOP || bt == T_ARRAY);
2101 }
2102
2103 // Returns unique pointed java object or NULL.
2104 JavaObjectNode* ConnectionGraph::unique_java_object(Node *n) {
2105 assert(!_collecting, "should not call when contructed graph");
2106 // If the node was created after the escape computation we can't answer.
2107 uint idx = n->_idx;
2108 if (idx >= nodes_size()) {
2109 return NULL;
2110 }
2394 //
2395 // It could happened on subclass's branch (from the type profiling
2396 // inlining) which was not eliminated during parsing since the exactness
2397 // of the allocation type was not propagated to the subclass type check.
2398 //
2399 // Or the type 't' could be not related to 'base_t' at all.
2400 // It could happened when CHA type is different from MDO type on a dead path
2401 // (for example, from instanceof check) which is not collapsed during parsing.
2402 //
2403 // Do nothing for such AddP node and don't process its users since
2404 // this code branch will go away.
2405 //
2406 if (!t->is_known_instance() &&
2407 !base_t->klass()->is_subtype_of(t->klass())) {
2408 return false; // bail out
2409 }
2410 const TypePtr* tinst = base_t->add_offset(t->offset());
2411 if (tinst->isa_aryptr() && t->isa_aryptr()) {
2412 // In the case of a flattened value type array, each field has its
2413 // own slice so we need to keep track of the field being accessed.
2414 tinst = tinst->is_aryptr()->with_field_offset(t->is_aryptr()->field_offset());
2415 }
2416
2417 // Do NOT remove the next line: ensure a new alias index is allocated
2418 // for the instance type. Note: C++ will not remove it since the call
2419 // has side effect.
2420 int alias_idx = _compile->get_alias_index(tinst);
2421 igvn->set_type(addp, tinst);
2422 // record the allocation in the node map
2423 set_map(addp, get_map(base->_idx));
2424 // Set addp's Base and Address to 'base'.
2425 Node *abase = addp->in(AddPNode::Base);
2426 Node *adr = addp->in(AddPNode::Address);
2427 if (adr->is_Proj() && adr->in(0)->is_Allocate() &&
2428 adr->in(0)->_idx == (uint)inst_id) {
2429 // Skip AddP cases #3 and #5.
2430 } else {
2431 assert(!abase->is_top(), "sanity"); // AddP case #3
2432 if (abase != base) {
2433 igvn->hash_delete(addp);
2434 addp->set_req(AddPNode::Base, base);
|
867 if (intr == vmIntrinsics::_floatValue || intr == vmIntrinsics::_doubleValue) {
868 // It does not escape if object is always allocated.
869 es = PointsToNode::NoEscape;
870 } else {
871 // It escapes globally if object could be loaded from cache.
872 es = PointsToNode::GlobalEscape;
873 }
874 add_java_object(call, es);
875 } else {
876 BCEscapeAnalyzer* call_analyzer = meth->get_bcea();
877 call_analyzer->copy_dependencies(_compile->dependencies());
878 if (call_analyzer->is_return_allocated()) {
879 // Returns a newly allocated unescaped object, simply
880 // update dependency information.
881 // Mark it as NoEscape so that objects referenced by
882 // it's fields will be marked as NoEscape at least.
883 add_java_object(call, PointsToNode::NoEscape);
884 ptnode_adr(call_idx)->set_scalar_replaceable(false);
885 } else {
886 // Determine whether any arguments are returned.
887 const TypeTuple* d = call->tf()->domain_cc();
888 bool ret_arg = false;
889 for (uint i = TypeFunc::Parms; i < d->cnt(); i++) {
890 if (d->field_at(i)->isa_ptr() != NULL &&
891 call_analyzer->is_arg_returned(i - TypeFunc::Parms)) {
892 ret_arg = true;
893 break;
894 }
895 }
896 if (ret_arg) {
897 add_local_var(call, PointsToNode::ArgEscape);
898 } else {
899 // Returns unknown object.
900 map_ideal_node(call, phantom_obj);
901 }
902 }
903 }
904 } else {
905 // An other type of call, assume the worst case:
906 // returned value is unknown and globally escapes.
907 assert(call->Opcode() == Op_CallDynamicJava, "add failed case check");
1042 }
1043 }
1044 }
1045 break;
1046 }
1047 case Op_CallStaticJava: {
1048 // For a static call, we know exactly what method is being called.
1049 // Use bytecode estimator to record the call's escape affects
1050 #ifdef ASSERT
1051 const char* name = call->as_CallStaticJava()->_name;
1052 assert((name == NULL || strcmp(name, "uncommon_trap") != 0), "normal calls only");
1053 #endif
1054 ciMethod* meth = call->as_CallJava()->method();
1055 if ((meth != NULL) && meth->is_boxing_method()) {
1056 break; // Boxing methods do not modify any oops.
1057 }
1058 BCEscapeAnalyzer* call_analyzer = (meth !=NULL) ? meth->get_bcea() : NULL;
1059 // fall-through if not a Java method or no analyzer information
1060 if (call_analyzer != NULL) {
1061 PointsToNode* call_ptn = ptnode_adr(call->_idx);
1062 const TypeTuple* d = call->tf()->domain_cc();
1063 for (uint i = TypeFunc::Parms; i < d->cnt(); i++) {
1064 const Type* at = d->field_at(i);
1065 int k = i - TypeFunc::Parms;
1066 Node* arg = call->in(i);
1067 PointsToNode* arg_ptn = ptnode_adr(arg->_idx);
1068 if (at->isa_ptr() != NULL &&
1069 call_analyzer->is_arg_returned(k)) {
1070 // The call returns arguments.
1071 if (call_ptn != NULL) { // Is call's result used?
1072 assert(call_ptn->is_LocalVar(), "node should be registered");
1073 assert(arg_ptn != NULL, "node should be registered");
1074 add_edge(call_ptn, arg_ptn);
1075 }
1076 }
1077 if (at->isa_oopptr() != NULL &&
1078 arg_ptn->escape_state() < PointsToNode::GlobalEscape) {
1079 if (!call_analyzer->is_arg_stack(k)) {
1080 // The argument global escapes
1081 set_escape_state(arg_ptn, PointsToNode::GlobalEscape);
1082 } else {
1083 set_escape_state(arg_ptn, PointsToNode::ArgEscape);
1084 if (!call_analyzer->is_arg_local(k)) {
1085 // The argument itself doesn't escape, but any fields might
1086 set_fields_escape_state(arg_ptn, PointsToNode::GlobalEscape);
1087 }
1088 }
1089 }
1090 }
1091 if (call_ptn != NULL && call_ptn->is_LocalVar()) {
1092 // The call returns arguments.
1093 assert(call_ptn->edge_count() > 0, "sanity");
1094 if (!call_analyzer->is_return_local()) {
1095 // Returns also unknown object.
1096 add_edge(call_ptn, phantom_obj);
1097 }
1098 }
1099 break;
1100 }
1101 }
1102 default: {
1103 // Fall-through here if not a Java method or no analyzer information
1104 // or some other type of call, assume the worst case: all arguments
1105 // globally escape.
1106 const TypeTuple* d = call->tf()->domain_cc();
1107 for (uint i = TypeFunc::Parms; i < d->cnt(); i++) {
1108 const Type* at = d->field_at(i);
1109 if (at->isa_oopptr() != NULL) {
1110 Node* arg = call->in(i);
1111 if (arg->is_AddP()) {
1112 arg = get_addp_base(arg);
1113 }
1114 assert(ptnode_adr(arg->_idx) != NULL, "should be defined already");
1115 set_escape_state(ptnode_adr(arg->_idx), PointsToNode::GlobalEscape);
1116 }
1117 }
1118 }
1119 }
1120 }
1121
1122
1123 // Finish Graph construction.
1124 bool ConnectionGraph::complete_connection_graph(
1125 GrowableArray<PointsToNode*>& ptnodes_worklist,
1126 GrowableArray<JavaObjectNode*>& non_escaped_worklist,
2032 assert(!src->is_Field() && !dst->is_Field(), "only for JavaObject and LocalVar");
2033 assert((src != null_obj) && (dst != null_obj), "not for ConP NULL");
2034 PointsToNode* ptadr = _nodes.at(n->_idx);
2035 if (ptadr != NULL) {
2036 assert(ptadr->is_Arraycopy() && ptadr->ideal_node() == n, "sanity");
2037 return;
2038 }
2039 Compile* C = _compile;
2040 ptadr = new (C->comp_arena()) ArraycopyNode(this, n, es);
2041 _nodes.at_put(n->_idx, ptadr);
2042 // Add edge from arraycopy node to source object.
2043 (void)add_edge(ptadr, src);
2044 src->set_arraycopy_src();
2045 // Add edge from destination object to arraycopy node.
2046 (void)add_edge(dst, ptadr);
2047 dst->set_arraycopy_dst();
2048 }
2049
2050 bool ConnectionGraph::is_oop_field(Node* n, int offset, bool* unsafe) {
2051 const Type* adr_type = n->as_AddP()->bottom_type();
2052 int field_offset = adr_type->isa_aryptr() ? adr_type->isa_aryptr()->field_offset().get() : Type::OffsetBot;
2053 BasicType bt = T_INT;
2054 if (offset == Type::OffsetBot && field_offset == Type::OffsetBot) {
2055 // Check only oop fields.
2056 if (!adr_type->isa_aryptr() ||
2057 (adr_type->isa_aryptr()->klass() == NULL) ||
2058 adr_type->isa_aryptr()->klass()->is_obj_array_klass()) {
2059 // OffsetBot is used to reference array's element. Ignore first AddP.
2060 if (find_second_addp(n, n->in(AddPNode::Base)) == NULL) {
2061 bt = T_OBJECT;
2062 }
2063 }
2064 } else if (offset != oopDesc::klass_offset_in_bytes()) {
2065 if (adr_type->isa_instptr() || adr_type->isa_valuetypeptr()) {
2066 ciField* field = _compile->alias_type(adr_type->is_ptr())->field();
2067 if (field != NULL) {
2068 bt = field->layout_type();
2069 } else {
2070 // Check for unsafe oop field access
2071 if (n->has_out_with(Op_StoreP, Op_LoadP, Op_StoreN, Op_LoadN) ||
2072 n->has_out_with(Op_GetAndSetP, Op_GetAndSetN, Op_CompareAndExchangeP, Op_CompareAndExchangeN) ||
2073 n->has_out_with(Op_CompareAndSwapP, Op_CompareAndSwapN, Op_WeakCompareAndSwapP, Op_WeakCompareAndSwapN)) {
2074 bt = T_OBJECT;
2075 (*unsafe) = true;
2076 }
2077 }
2078 } else if (adr_type->isa_aryptr()) {
2079 if (offset == arrayOopDesc::length_offset_in_bytes()) {
2080 // Ignore array length load.
2081 } else if (find_second_addp(n, n->in(AddPNode::Base)) != NULL) {
2082 // Ignore first AddP.
2083 } else {
2084 const Type* elemtype = adr_type->isa_aryptr()->elem();
2085 if (elemtype->isa_valuetype()) {
2086 assert(field_offset != Type::OffsetBot, "invalid field offset");
2087 ciValueKlass* vk = elemtype->is_valuetype()->value_klass();
2088 field_offset += vk->first_field_offset();
2089 bt = vk->get_field_by_offset(field_offset, false)->layout_type();
2090 } else {
2091 bt = elemtype->array_element_basic_type();
2092 }
2093 }
2094 } else if (adr_type->isa_rawptr() || adr_type->isa_klassptr()) {
2095 // Allocation initialization, ThreadLocal field access, unsafe access
2096 if (n->has_out_with(Op_StoreP, Op_LoadP, Op_StoreN, Op_LoadN) ||
2097 n->has_out_with(Op_GetAndSetP, Op_GetAndSetN, Op_CompareAndExchangeP, Op_CompareAndExchangeN) ||
2098 n->has_out_with(Op_CompareAndSwapP, Op_CompareAndSwapN, Op_WeakCompareAndSwapP, Op_WeakCompareAndSwapN)) {
2099 bt = T_OBJECT;
2100 }
2101 }
2102 }
2103 return (bt == T_OBJECT || bt == T_NARROWOOP || bt == T_ARRAY);
2104 }
2105
2106 // Returns unique pointed java object or NULL.
2107 JavaObjectNode* ConnectionGraph::unique_java_object(Node *n) {
2108 assert(!_collecting, "should not call when contructed graph");
2109 // If the node was created after the escape computation we can't answer.
2110 uint idx = n->_idx;
2111 if (idx >= nodes_size()) {
2112 return NULL;
2113 }
2397 //
2398 // It could happened on subclass's branch (from the type profiling
2399 // inlining) which was not eliminated during parsing since the exactness
2400 // of the allocation type was not propagated to the subclass type check.
2401 //
2402 // Or the type 't' could be not related to 'base_t' at all.
2403 // It could happened when CHA type is different from MDO type on a dead path
2404 // (for example, from instanceof check) which is not collapsed during parsing.
2405 //
2406 // Do nothing for such AddP node and don't process its users since
2407 // this code branch will go away.
2408 //
2409 if (!t->is_known_instance() &&
2410 !base_t->klass()->is_subtype_of(t->klass())) {
2411 return false; // bail out
2412 }
2413 const TypePtr* tinst = base_t->add_offset(t->offset());
2414 if (tinst->isa_aryptr() && t->isa_aryptr()) {
2415 // In the case of a flattened value type array, each field has its
2416 // own slice so we need to keep track of the field being accessed.
2417 tinst = tinst->is_aryptr()->with_field_offset(t->is_aryptr()->field_offset().get());
2418 }
2419
2420 // Do NOT remove the next line: ensure a new alias index is allocated
2421 // for the instance type. Note: C++ will not remove it since the call
2422 // has side effect.
2423 int alias_idx = _compile->get_alias_index(tinst);
2424 igvn->set_type(addp, tinst);
2425 // record the allocation in the node map
2426 set_map(addp, get_map(base->_idx));
2427 // Set addp's Base and Address to 'base'.
2428 Node *abase = addp->in(AddPNode::Base);
2429 Node *adr = addp->in(AddPNode::Address);
2430 if (adr->is_Proj() && adr->in(0)->is_Allocate() &&
2431 adr->in(0)->_idx == (uint)inst_id) {
2432 // Skip AddP cases #3 and #5.
2433 } else {
2434 assert(!abase->is_top(), "sanity"); // AddP case #3
2435 if (abase != base) {
2436 igvn->hash_delete(addp);
2437 addp->set_req(AddPNode::Base, base);
|