865 if (intr == vmIntrinsics::_floatValue || intr == vmIntrinsics::_doubleValue) {
866 // It does not escape if object is always allocated.
867 es = PointsToNode::NoEscape;
868 } else {
869 // It escapes globally if object could be loaded from cache.
870 es = PointsToNode::GlobalEscape;
871 }
872 add_java_object(call, es);
873 } else {
874 BCEscapeAnalyzer* call_analyzer = meth->get_bcea();
875 call_analyzer->copy_dependencies(_compile->dependencies());
876 if (call_analyzer->is_return_allocated()) {
877 // Returns a newly allocated unescaped object, simply
878 // update dependency information.
879 // Mark it as NoEscape so that objects referenced by
880 // it's fields will be marked as NoEscape at least.
881 add_java_object(call, PointsToNode::NoEscape);
882 ptnode_adr(call_idx)->set_scalar_replaceable(false);
883 } else {
884 // Determine whether any arguments are returned.
885 const TypeTuple* d = call->tf()->domain();
886 bool ret_arg = false;
887 for (uint i = TypeFunc::Parms; i < d->cnt(); i++) {
888 if (d->field_at(i)->isa_ptr() != NULL &&
889 call_analyzer->is_arg_returned(i - TypeFunc::Parms)) {
890 ret_arg = true;
891 break;
892 }
893 }
894 if (ret_arg) {
895 add_local_var(call, PointsToNode::ArgEscape);
896 } else {
897 // Returns unknown object.
898 map_ideal_node(call, phantom_obj);
899 }
900 }
901 }
902 } else {
903 // An other type of call, assume the worst case:
904 // returned value is unknown and globally escapes.
905 assert(call->Opcode() == Op_CallDynamicJava, "add failed case check");
912 switch (call->Opcode()) {
913 #ifdef ASSERT
914 case Op_Allocate:
915 case Op_AllocateArray:
916 case Op_Lock:
917 case Op_Unlock:
918 assert(false, "should be done already");
919 break;
920 #endif
921 case Op_ArrayCopy:
922 case Op_CallLeafNoFP:
923 // Most array copies are ArrayCopy nodes at this point but there
924 // are still a few direct calls to the copy subroutines (See
925 // PhaseStringOpts::copy_string())
926 is_arraycopy = (call->Opcode() == Op_ArrayCopy) ||
927 call->as_CallLeaf()->is_call_to_arraycopystub();
928 // fall through
929 case Op_CallLeaf: {
930 // Stub calls, objects do not escape but they are not scale replaceable.
931 // Adjust escape state for outgoing arguments.
932 const TypeTuple * d = call->tf()->domain();
933 bool src_has_oops = false;
934 for (uint i = TypeFunc::Parms; i < d->cnt(); i++) {
935 const Type* at = d->field_at(i);
936 Node *arg = call->in(i);
937 if (arg == NULL) {
938 continue;
939 }
940 const Type *aat = _igvn->type(arg);
941 if (arg->is_top() || !at->isa_ptr() || !aat->isa_ptr())
942 continue;
943 if (arg->is_AddP()) {
944 //
945 // The inline_native_clone() case when the arraycopy stub is called
946 // after the allocation before Initialize and CheckCastPP nodes.
947 // Or normal arraycopy for object arrays case.
948 //
949 // Set AddP's base (Allocate) as not scalar replaceable since
950 // pointer to the base (with offset) is passed as argument.
951 //
952 arg = get_addp_base(arg);
1040 }
1041 }
1042 }
1043 break;
1044 }
1045 case Op_CallStaticJava: {
1046 // For a static call, we know exactly what method is being called.
1047 // Use bytecode estimator to record the call's escape affects
1048 #ifdef ASSERT
1049 const char* name = call->as_CallStaticJava()->_name;
1050 assert((name == NULL || strcmp(name, "uncommon_trap") != 0), "normal calls only");
1051 #endif
1052 ciMethod* meth = call->as_CallJava()->method();
1053 if ((meth != NULL) && meth->is_boxing_method()) {
1054 break; // Boxing methods do not modify any oops.
1055 }
1056 BCEscapeAnalyzer* call_analyzer = (meth !=NULL) ? meth->get_bcea() : NULL;
1057 // fall-through if not a Java method or no analyzer information
1058 if (call_analyzer != NULL) {
1059 PointsToNode* call_ptn = ptnode_adr(call->_idx);
1060 const TypeTuple* d = call->tf()->domain();
1061 for (uint i = TypeFunc::Parms; i < d->cnt(); i++) {
1062 const Type* at = d->field_at(i);
1063 int k = i - TypeFunc::Parms;
1064 Node* arg = call->in(i);
1065 PointsToNode* arg_ptn = ptnode_adr(arg->_idx);
1066 if (at->isa_ptr() != NULL &&
1067 call_analyzer->is_arg_returned(k)) {
1068 // The call returns arguments.
1069 if (call_ptn != NULL) { // Is call's result used?
1070 assert(call_ptn->is_LocalVar(), "node should be registered");
1071 assert(arg_ptn != NULL, "node should be registered");
1072 add_edge(call_ptn, arg_ptn);
1073 }
1074 }
1075 if (at->isa_oopptr() != NULL &&
1076 arg_ptn->escape_state() < PointsToNode::GlobalEscape) {
1077 if (!call_analyzer->is_arg_stack(k)) {
1078 // The argument global escapes
1079 set_escape_state(arg_ptn, PointsToNode::GlobalEscape);
1080 } else {
1081 set_escape_state(arg_ptn, PointsToNode::ArgEscape);
1082 if (!call_analyzer->is_arg_local(k)) {
1083 // The argument itself doesn't escape, but any fields might
1084 set_fields_escape_state(arg_ptn, PointsToNode::GlobalEscape);
1085 }
1086 }
1087 }
1088 }
1089 if (call_ptn != NULL && call_ptn->is_LocalVar()) {
1090 // The call returns arguments.
1091 assert(call_ptn->edge_count() > 0, "sanity");
1092 if (!call_analyzer->is_return_local()) {
1093 // Returns also unknown object.
1094 add_edge(call_ptn, phantom_obj);
1095 }
1096 }
1097 break;
1098 }
1099 }
1100 default: {
1101 // Fall-through here if not a Java method or no analyzer information
1102 // or some other type of call, assume the worst case: all arguments
1103 // globally escape.
1104 const TypeTuple* d = call->tf()->domain();
1105 for (uint i = TypeFunc::Parms; i < d->cnt(); i++) {
1106 const Type* at = d->field_at(i);
1107 if (at->isa_oopptr() != NULL) {
1108 Node* arg = call->in(i);
1109 if (arg->is_AddP()) {
1110 arg = get_addp_base(arg);
1111 }
1112 assert(ptnode_adr(arg->_idx) != NULL, "should be defined already");
1113 set_escape_state(ptnode_adr(arg->_idx), PointsToNode::GlobalEscape);
1114 }
1115 }
1116 }
1117 }
1118 }
1119
1120
1121 // Finish Graph construction.
1122 bool ConnectionGraph::complete_connection_graph(
1123 GrowableArray<PointsToNode*>& ptnodes_worklist,
1124 GrowableArray<JavaObjectNode*>& non_escaped_worklist,
|
865 if (intr == vmIntrinsics::_floatValue || intr == vmIntrinsics::_doubleValue) {
866 // It does not escape if object is always allocated.
867 es = PointsToNode::NoEscape;
868 } else {
869 // It escapes globally if object could be loaded from cache.
870 es = PointsToNode::GlobalEscape;
871 }
872 add_java_object(call, es);
873 } else {
874 BCEscapeAnalyzer* call_analyzer = meth->get_bcea();
875 call_analyzer->copy_dependencies(_compile->dependencies());
876 if (call_analyzer->is_return_allocated()) {
877 // Returns a newly allocated unescaped object, simply
878 // update dependency information.
879 // Mark it as NoEscape so that objects referenced by
880 // it's fields will be marked as NoEscape at least.
881 add_java_object(call, PointsToNode::NoEscape);
882 ptnode_adr(call_idx)->set_scalar_replaceable(false);
883 } else {
884 // Determine whether any arguments are returned.
885 const TypeTuple* d = call->tf()->domain_sig();
886 bool ret_arg = false;
887 for (uint i = TypeFunc::Parms; i < d->cnt(); i++) {
888 if (d->field_at(i)->isa_ptr() != NULL &&
889 call_analyzer->is_arg_returned(i - TypeFunc::Parms)) {
890 ret_arg = true;
891 break;
892 }
893 }
894 if (ret_arg) {
895 add_local_var(call, PointsToNode::ArgEscape);
896 } else {
897 // Returns unknown object.
898 map_ideal_node(call, phantom_obj);
899 }
900 }
901 }
902 } else {
903 // An other type of call, assume the worst case:
904 // returned value is unknown and globally escapes.
905 assert(call->Opcode() == Op_CallDynamicJava, "add failed case check");
912 switch (call->Opcode()) {
913 #ifdef ASSERT
914 case Op_Allocate:
915 case Op_AllocateArray:
916 case Op_Lock:
917 case Op_Unlock:
918 assert(false, "should be done already");
919 break;
920 #endif
921 case Op_ArrayCopy:
922 case Op_CallLeafNoFP:
923 // Most array copies are ArrayCopy nodes at this point but there
924 // are still a few direct calls to the copy subroutines (See
925 // PhaseStringOpts::copy_string())
926 is_arraycopy = (call->Opcode() == Op_ArrayCopy) ||
927 call->as_CallLeaf()->is_call_to_arraycopystub();
928 // fall through
929 case Op_CallLeaf: {
930 // Stub calls, objects do not escape but they are not scale replaceable.
931 // Adjust escape state for outgoing arguments.
932 const TypeTuple * d = call->tf()->domain_sig();
933 bool src_has_oops = false;
934 for (uint i = TypeFunc::Parms; i < d->cnt(); i++) {
935 const Type* at = d->field_at(i);
936 Node *arg = call->in(i);
937 if (arg == NULL) {
938 continue;
939 }
940 const Type *aat = _igvn->type(arg);
941 if (arg->is_top() || !at->isa_ptr() || !aat->isa_ptr())
942 continue;
943 if (arg->is_AddP()) {
944 //
945 // The inline_native_clone() case when the arraycopy stub is called
946 // after the allocation before Initialize and CheckCastPP nodes.
947 // Or normal arraycopy for object arrays case.
948 //
949 // Set AddP's base (Allocate) as not scalar replaceable since
950 // pointer to the base (with offset) is passed as argument.
951 //
952 arg = get_addp_base(arg);
1040 }
1041 }
1042 }
1043 break;
1044 }
1045 case Op_CallStaticJava: {
1046 // For a static call, we know exactly what method is being called.
1047 // Use bytecode estimator to record the call's escape affects
1048 #ifdef ASSERT
1049 const char* name = call->as_CallStaticJava()->_name;
1050 assert((name == NULL || strcmp(name, "uncommon_trap") != 0), "normal calls only");
1051 #endif
1052 ciMethod* meth = call->as_CallJava()->method();
1053 if ((meth != NULL) && meth->is_boxing_method()) {
1054 break; // Boxing methods do not modify any oops.
1055 }
1056 BCEscapeAnalyzer* call_analyzer = (meth !=NULL) ? meth->get_bcea() : NULL;
1057 // fall-through if not a Java method or no analyzer information
1058 if (call_analyzer != NULL) {
1059 PointsToNode* call_ptn = ptnode_adr(call->_idx);
1060 const TypeTuple* d = call->tf()->domain_sig();
1061 int extra = 0;
1062 for (uint i = TypeFunc::Parms; i < d->cnt(); i++) {
1063 const Type* at = d->field_at(i);
1064 if (at->isa_valuetypeptr()) {
1065 extra += at->is_valuetypeptr()->value_type()->value_klass()->field_count() - 1;
1066 continue;
1067 }
1068 int k = i - TypeFunc::Parms;
1069 Node* arg = call->in(i + extra);
1070 PointsToNode* arg_ptn = ptnode_adr(arg->_idx);
1071 if (at->isa_ptr() != NULL &&
1072 call_analyzer->is_arg_returned(k)) {
1073 // The call returns arguments.
1074 if (call_ptn != NULL) { // Is call's result used?
1075 assert(call_ptn->is_LocalVar(), "node should be registered");
1076 assert(arg_ptn != NULL, "node should be registered");
1077 add_edge(call_ptn, arg_ptn);
1078 }
1079 }
1080 if (at->isa_oopptr() != NULL &&
1081 arg_ptn->escape_state() < PointsToNode::GlobalEscape) {
1082 if (!call_analyzer->is_arg_stack(k)) {
1083 // The argument global escapes
1084 set_escape_state(arg_ptn, PointsToNode::GlobalEscape);
1085 } else {
1086 set_escape_state(arg_ptn, PointsToNode::ArgEscape);
1087 if (!call_analyzer->is_arg_local(k)) {
1088 // The argument itself doesn't escape, but any fields might
1089 set_fields_escape_state(arg_ptn, PointsToNode::GlobalEscape);
1090 }
1091 }
1092 }
1093 }
1094 if (call_ptn != NULL && call_ptn->is_LocalVar()) {
1095 // The call returns arguments.
1096 assert(call_ptn->edge_count() > 0, "sanity");
1097 if (!call_analyzer->is_return_local()) {
1098 // Returns also unknown object.
1099 add_edge(call_ptn, phantom_obj);
1100 }
1101 }
1102 break;
1103 }
1104 }
1105 default: {
1106 // Fall-through here if not a Java method or no analyzer information
1107 // or some other type of call, assume the worst case: all arguments
1108 // globally escape.
1109 const TypeTuple* d = call->tf()->domain_sig();
1110 for (uint i = TypeFunc::Parms; i < d->cnt(); i++) {
1111 const Type* at = d->field_at(i);
1112 if (at->isa_oopptr() != NULL) {
1113 Node* arg = call->in(i);
1114 if (arg->is_AddP()) {
1115 arg = get_addp_base(arg);
1116 }
1117 assert(ptnode_adr(arg->_idx) != NULL, "should be defined already");
1118 set_escape_state(ptnode_adr(arg->_idx), PointsToNode::GlobalEscape);
1119 }
1120 }
1121 }
1122 }
1123 }
1124
1125
1126 // Finish Graph construction.
1127 bool ConnectionGraph::complete_connection_graph(
1128 GrowableArray<PointsToNode*>& ptnodes_worklist,
1129 GrowableArray<JavaObjectNode*>& non_escaped_worklist,
|