140 }
141 }
142 return false;
143 }
144
145 static Node* skip_string_null_check(Node* value) {
146 // Look for a diamond shaped Null check of toString() result
147 // (could be code from String.valueOf()):
148 // (Proj == NULL) ? "null":"CastPP(Proj)#NotNULL
149 if (value->is_Phi()) {
150 int true_path = value->as_Phi()->is_diamond_phi();
151 if (true_path != 0) {
152 // phi->region->if_proj->ifnode->bool
153 BoolNode* b = value->in(0)->in(1)->in(0)->in(1)->as_Bool();
154 Node* cmp = b->in(1);
155 Node* v1 = cmp->in(1);
156 Node* v2 = cmp->in(2);
157 // Null check of the return of toString which can simply be skipped.
158 if (b->_test._test == BoolTest::ne &&
159 v2->bottom_type() == TypePtr::NULL_PTR &&
160 value->in(true_path)->Opcode() == Op_CastPP &&
161 value->in(true_path)->in(1) == v1 &&
162 v1->is_Proj() && is_SB_toString(v1->in(0))) {
163 return v1;
164 }
165 }
166 }
167 return value;
168 }
169
170 Node* argument(int i) {
171 return _arguments->in(i);
172 }
173 Node* argument_uncast(int i) {
174 Node* arg = argument(i);
175 int amode = mode(i);
176 if (amode == StringConcat::StringMode ||
177 amode == StringConcat::StringNullCheckMode) {
178 arg = skip_string_null_check(arg);
179 }
180 return arg;
326 if (projs.fallthrough_catchproj != NULL) {
327 C->gvn_replace_by(projs.fallthrough_catchproj, call->in(TypeFunc::Control));
328 }
329 if (projs.fallthrough_memproj != NULL) {
330 C->gvn_replace_by(projs.fallthrough_memproj, call->in(TypeFunc::Memory));
331 }
332 if (projs.catchall_memproj != NULL) {
333 C->gvn_replace_by(projs.catchall_memproj, C->top());
334 }
335 if (projs.fallthrough_ioproj != NULL) {
336 C->gvn_replace_by(projs.fallthrough_ioproj, call->in(TypeFunc::I_O));
337 }
338 if (projs.catchall_ioproj != NULL) {
339 C->gvn_replace_by(projs.catchall_ioproj, C->top());
340 }
341 if (projs.catchall_catchproj != NULL) {
342 // EA can't cope with the partially collapsed graph this
343 // creates so put it on the worklist to be collapsed later.
344 for (SimpleDUIterator i(projs.catchall_catchproj); i.has_next(); i.next()) {
345 Node *use = i.get();
346 int opc = use->Opcode();
347 if (opc == Op_CreateEx || opc == Op_Region) {
348 _stringopts->record_dead_node(use);
349 }
350 }
351 C->gvn_replace_by(projs.catchall_catchproj, C->top());
352 }
353 if (projs.resproj != NULL) {
354 C->gvn_replace_by(projs.resproj, C->top());
355 }
356 C->gvn_replace_by(call, C->top());
357 }
358
359 void StringConcat::eliminate_initialize(InitializeNode* init) {
360 Compile* C = _stringopts->C;
361
362 // Eliminate Initialize node.
363 assert(init->outcnt() <= 2, "only a control and memory projection expected");
364 assert(init->req() <= InitializeNode::RawStores, "no pending inits");
365 Node *ctrl_proj = init->proj_out(TypeFunc::Control);
366 if (ctrl_proj != NULL) {
367 C->gvn_replace_by(ctrl_proj, init->in(TypeFunc::Control));
662 }
663
664
665 for (int c = 0; c < concats.length(); c++) {
666 StringConcat* sc = concats.at(c);
667 replace_string_concat(sc);
668 }
669
670 remove_dead_nodes();
671 }
672
673 void PhaseStringOpts::record_dead_node(Node* dead) {
674 dead_worklist.push(dead);
675 }
676
677 void PhaseStringOpts::remove_dead_nodes() {
678 // Delete any dead nodes to make things clean enough that escape
679 // analysis doesn't get unhappy.
680 while (dead_worklist.size() > 0) {
681 Node* use = dead_worklist.pop();
682 int opc = use->Opcode();
683 switch (opc) {
684 case Op_Region: {
685 uint i = 1;
686 for (i = 1; i < use->req(); i++) {
687 if (use->in(i) != C->top()) {
688 break;
689 }
690 }
691 if (i >= use->req()) {
692 for (SimpleDUIterator i(use); i.has_next(); i.next()) {
693 Node* m = i.get();
694 if (m->is_Phi()) {
695 dead_worklist.push(m);
696 }
697 }
698 C->gvn_replace_by(use, C->top());
699 }
700 break;
701 }
702 case Op_AddP:
703 case Op_CreateEx: {
704 // Recurisvely clean up references to CreateEx so EA doesn't
705 // get unhappy about the partially collapsed graph.
706 for (SimpleDUIterator i(use); i.has_next(); i.next()) {
707 Node* m = i.get();
708 if (m->is_AddP()) {
709 dead_worklist.push(m);
710 }
711 }
712 C->gvn_replace_by(use, C->top());
713 break;
714 }
715 case Op_Phi:
716 if (use->in(0) == C->top()) {
717 C->gvn_replace_by(use, C->top());
718 }
719 break;
720 }
721 }
722 }
723
724
725 bool StringConcat::validate_mem_flow() {
726 Compile* C = _stringopts->C;
727
728 for (uint i = 0; i < _control.size(); i++) {
729 #ifndef PRODUCT
730 Node_List path;
731 #endif
732 Node* curr = _control.at(i);
733 if (curr->is_Call() && curr != _begin) { // For all calls except the first allocation
734 // Now here's the main invariant in our case:
735 // For memory between the constructor, and appends, and toString we should only see bottom memory,
1040 if (cnode != NULL) {
1041 _stringopts->_visited.test_set(cnode->_idx);
1042 }
1043 Node* result = cnode != NULL ? cnode->proj_out(TypeFunc::Parms) : NULL;
1044 if (result != NULL && result != final_result) {
1045 worklist.push(result);
1046 }
1047 }
1048
1049 Node* last_result = NULL;
1050 while (worklist.size() > 0) {
1051 Node* result = worklist.pop();
1052 if (_stringopts->_visited.test_set(result->_idx))
1053 continue;
1054 for (SimpleDUIterator i(result); i.has_next(); i.next()) {
1055 Node *use = i.get();
1056 if (ctrl_path.member(use)) {
1057 // already checked this
1058 continue;
1059 }
1060 int opc = use->Opcode();
1061 if (opc == Op_CmpP || opc == Op_Node) {
1062 ctrl_path.push(use);
1063 continue;
1064 }
1065 if (opc == Op_CastPP || opc == Op_CheckCastPP) {
1066 for (SimpleDUIterator j(use); j.has_next(); j.next()) {
1067 worklist.push(j.get());
1068 }
1069 worklist.push(use->in(1));
1070 ctrl_path.push(use);
1071 continue;
1072 }
1073 #ifndef PRODUCT
1074 if (PrintOptimizeStringConcat) {
1075 if (result != last_result) {
1076 last_result = result;
1077 tty->print_cr("extra uses for result:");
1078 last_result->dump();
1079 }
1080 use->dump();
1081 }
1082 #endif
1083 fail = true;
1084 break;
1085 }
1445
1446 // Copy contents of a Latin1 encoded string from src_array to dst_array
1447 void PhaseStringOpts::copy_latin1_string(GraphKit& kit, IdealKit& ideal, Node* src_array, IdealVariable& count,
1448 Node* dst_array, Node* dst_coder, Node* start) {
1449 bool dcon = dst_coder->is_Con();
1450 bool dbyte = dcon ? (dst_coder->get_int() == java_lang_String::CODER_LATIN1) : false;
1451
1452 if (!dcon) {
1453 __ if_then(dst_coder, BoolTest::eq, __ ConI(java_lang_String::CODER_LATIN1));
1454 }
1455 if (!dcon || dbyte) {
1456 // Destination is Latin1. Simply emit a byte arraycopy.
1457 arraycopy(kit, ideal, src_array, dst_array, T_BYTE, start, __ value(count));
1458 }
1459 if (!dcon) {
1460 __ else_();
1461 }
1462 if (!dcon || !dbyte) {
1463 // Destination is UTF16. Inflate src_array into dst_array.
1464 kit.sync_kit(ideal);
1465 if (Matcher::match_rule_supported(Op_StrInflatedCopy)) {
1466 // Use fast intrinsic
1467 Node* src = kit.array_element_address(src_array, kit.intcon(0), T_BYTE);
1468 Node* dst = kit.array_element_address(dst_array, start, T_BYTE);
1469 kit.inflate_string(src, dst, TypeAryPtr::BYTES, __ value(count));
1470 } else {
1471 // No intrinsic available, use slow method
1472 kit.inflate_string_slow(src_array, dst_array, start, __ value(count));
1473 }
1474 ideal.sync_kit(&kit);
1475 // Multiply count by two since we now need two bytes per char
1476 __ set(count, __ LShiftI(__ value(count), __ ConI(1)));
1477 }
1478 if (!dcon) {
1479 __ end_if();
1480 }
1481 }
1482
1483 // Read two bytes from index and index+1 and convert them to a char
1484 static jchar readChar(ciTypeArray* array, int index) {
1485 int shift_high, shift_low;
1946
1947 // If we're not reusing an existing String allocation then allocate one here.
1948 result = sc->string_alloc();
1949 if (result == NULL) {
1950 PreserveReexecuteState preexecs(&kit);
1951 // The original jvms is for an allocation of either a String or
1952 // StringBuffer so no stack adjustment is necessary for proper
1953 // reexecution.
1954 kit.jvms()->set_should_reexecute(true);
1955 result = kit.new_instance(__ makecon(TypeKlassPtr::make(C->env()->String_klass())));
1956 }
1957
1958 // Initialize the string
1959 kit.store_String_value(kit.control(), result, dst_array);
1960 kit.store_String_coder(kit.control(), result, coder);
1961
1962 // The value field is final. Emit a barrier here to ensure that the effect
1963 // of the initialization is committed to memory before any code publishes
1964 // a reference to the newly constructed object (see Parse::do_exits()).
1965 assert(AllocateNode::Ideal_allocation(result, _gvn) != NULL, "should be newly allocated");
1966 kit.insert_mem_bar(Op_MemBarRelease, result);
1967 } else {
1968 result = C->top();
1969 }
1970 // hook up the outgoing control and result
1971 kit.replace_call(sc->end(), result);
1972
1973 // Unhook any hook nodes
1974 string_sizes->disconnect_inputs(NULL, C);
1975 sc->cleanup();
1976 }
|
140 }
141 }
142 return false;
143 }
144
145 static Node* skip_string_null_check(Node* value) {
146 // Look for a diamond shaped Null check of toString() result
147 // (could be code from String.valueOf()):
148 // (Proj == NULL) ? "null":"CastPP(Proj)#NotNULL
149 if (value->is_Phi()) {
150 int true_path = value->as_Phi()->is_diamond_phi();
151 if (true_path != 0) {
152 // phi->region->if_proj->ifnode->bool
153 BoolNode* b = value->in(0)->in(1)->in(0)->in(1)->as_Bool();
154 Node* cmp = b->in(1);
155 Node* v1 = cmp->in(1);
156 Node* v2 = cmp->in(2);
157 // Null check of the return of toString which can simply be skipped.
158 if (b->_test._test == BoolTest::ne &&
159 v2->bottom_type() == TypePtr::NULL_PTR &&
160 value->in(true_path)->Opcode() == Opcodes::Op_CastPP &&
161 value->in(true_path)->in(1) == v1 &&
162 v1->is_Proj() && is_SB_toString(v1->in(0))) {
163 return v1;
164 }
165 }
166 }
167 return value;
168 }
169
170 Node* argument(int i) {
171 return _arguments->in(i);
172 }
173 Node* argument_uncast(int i) {
174 Node* arg = argument(i);
175 int amode = mode(i);
176 if (amode == StringConcat::StringMode ||
177 amode == StringConcat::StringNullCheckMode) {
178 arg = skip_string_null_check(arg);
179 }
180 return arg;
326 if (projs.fallthrough_catchproj != NULL) {
327 C->gvn_replace_by(projs.fallthrough_catchproj, call->in(TypeFunc::Control));
328 }
329 if (projs.fallthrough_memproj != NULL) {
330 C->gvn_replace_by(projs.fallthrough_memproj, call->in(TypeFunc::Memory));
331 }
332 if (projs.catchall_memproj != NULL) {
333 C->gvn_replace_by(projs.catchall_memproj, C->top());
334 }
335 if (projs.fallthrough_ioproj != NULL) {
336 C->gvn_replace_by(projs.fallthrough_ioproj, call->in(TypeFunc::I_O));
337 }
338 if (projs.catchall_ioproj != NULL) {
339 C->gvn_replace_by(projs.catchall_ioproj, C->top());
340 }
341 if (projs.catchall_catchproj != NULL) {
342 // EA can't cope with the partially collapsed graph this
343 // creates so put it on the worklist to be collapsed later.
344 for (SimpleDUIterator i(projs.catchall_catchproj); i.has_next(); i.next()) {
345 Node *use = i.get();
346 Opcodes opc = use->Opcode();
347 if (opc == Opcodes::Op_CreateEx || opc == Opcodes::Op_Region) {
348 _stringopts->record_dead_node(use);
349 }
350 }
351 C->gvn_replace_by(projs.catchall_catchproj, C->top());
352 }
353 if (projs.resproj != NULL) {
354 C->gvn_replace_by(projs.resproj, C->top());
355 }
356 C->gvn_replace_by(call, C->top());
357 }
358
359 void StringConcat::eliminate_initialize(InitializeNode* init) {
360 Compile* C = _stringopts->C;
361
362 // Eliminate Initialize node.
363 assert(init->outcnt() <= 2, "only a control and memory projection expected");
364 assert(init->req() <= InitializeNode::RawStores, "no pending inits");
365 Node *ctrl_proj = init->proj_out(TypeFunc::Control);
366 if (ctrl_proj != NULL) {
367 C->gvn_replace_by(ctrl_proj, init->in(TypeFunc::Control));
662 }
663
664
665 for (int c = 0; c < concats.length(); c++) {
666 StringConcat* sc = concats.at(c);
667 replace_string_concat(sc);
668 }
669
670 remove_dead_nodes();
671 }
672
673 void PhaseStringOpts::record_dead_node(Node* dead) {
674 dead_worklist.push(dead);
675 }
676
677 void PhaseStringOpts::remove_dead_nodes() {
678 // Delete any dead nodes to make things clean enough that escape
679 // analysis doesn't get unhappy.
680 while (dead_worklist.size() > 0) {
681 Node* use = dead_worklist.pop();
682 Opcodes opc = use->Opcode();
683 switch (opc) {
684 case Opcodes::Op_Region: {
685 uint i = 1;
686 for (i = 1; i < use->req(); i++) {
687 if (use->in(i) != C->top()) {
688 break;
689 }
690 }
691 if (i >= use->req()) {
692 for (SimpleDUIterator i(use); i.has_next(); i.next()) {
693 Node* m = i.get();
694 if (m->is_Phi()) {
695 dead_worklist.push(m);
696 }
697 }
698 C->gvn_replace_by(use, C->top());
699 }
700 break;
701 }
702 case Opcodes::Op_AddP:
703 case Opcodes::Op_CreateEx: {
704 // Recurisvely clean up references to CreateEx so EA doesn't
705 // get unhappy about the partially collapsed graph.
706 for (SimpleDUIterator i(use); i.has_next(); i.next()) {
707 Node* m = i.get();
708 if (m->is_AddP()) {
709 dead_worklist.push(m);
710 }
711 }
712 C->gvn_replace_by(use, C->top());
713 break;
714 }
715 case Opcodes::Op_Phi:
716 if (use->in(0) == C->top()) {
717 C->gvn_replace_by(use, C->top());
718 }
719 break;
720 }
721 }
722 }
723
724
725 bool StringConcat::validate_mem_flow() {
726 Compile* C = _stringopts->C;
727
728 for (uint i = 0; i < _control.size(); i++) {
729 #ifndef PRODUCT
730 Node_List path;
731 #endif
732 Node* curr = _control.at(i);
733 if (curr->is_Call() && curr != _begin) { // For all calls except the first allocation
734 // Now here's the main invariant in our case:
735 // For memory between the constructor, and appends, and toString we should only see bottom memory,
1040 if (cnode != NULL) {
1041 _stringopts->_visited.test_set(cnode->_idx);
1042 }
1043 Node* result = cnode != NULL ? cnode->proj_out(TypeFunc::Parms) : NULL;
1044 if (result != NULL && result != final_result) {
1045 worklist.push(result);
1046 }
1047 }
1048
1049 Node* last_result = NULL;
1050 while (worklist.size() > 0) {
1051 Node* result = worklist.pop();
1052 if (_stringopts->_visited.test_set(result->_idx))
1053 continue;
1054 for (SimpleDUIterator i(result); i.has_next(); i.next()) {
1055 Node *use = i.get();
1056 if (ctrl_path.member(use)) {
1057 // already checked this
1058 continue;
1059 }
1060 Opcodes opc = use->Opcode();
1061 if (opc == Opcodes::Op_CmpP || opc == Opcodes::Op_Node) {
1062 ctrl_path.push(use);
1063 continue;
1064 }
1065 if (opc == Opcodes::Op_CastPP || opc == Opcodes::Op_CheckCastPP) {
1066 for (SimpleDUIterator j(use); j.has_next(); j.next()) {
1067 worklist.push(j.get());
1068 }
1069 worklist.push(use->in(1));
1070 ctrl_path.push(use);
1071 continue;
1072 }
1073 #ifndef PRODUCT
1074 if (PrintOptimizeStringConcat) {
1075 if (result != last_result) {
1076 last_result = result;
1077 tty->print_cr("extra uses for result:");
1078 last_result->dump();
1079 }
1080 use->dump();
1081 }
1082 #endif
1083 fail = true;
1084 break;
1085 }
1445
1446 // Copy contents of a Latin1 encoded string from src_array to dst_array
1447 void PhaseStringOpts::copy_latin1_string(GraphKit& kit, IdealKit& ideal, Node* src_array, IdealVariable& count,
1448 Node* dst_array, Node* dst_coder, Node* start) {
1449 bool dcon = dst_coder->is_Con();
1450 bool dbyte = dcon ? (dst_coder->get_int() == java_lang_String::CODER_LATIN1) : false;
1451
1452 if (!dcon) {
1453 __ if_then(dst_coder, BoolTest::eq, __ ConI(java_lang_String::CODER_LATIN1));
1454 }
1455 if (!dcon || dbyte) {
1456 // Destination is Latin1. Simply emit a byte arraycopy.
1457 arraycopy(kit, ideal, src_array, dst_array, T_BYTE, start, __ value(count));
1458 }
1459 if (!dcon) {
1460 __ else_();
1461 }
1462 if (!dcon || !dbyte) {
1463 // Destination is UTF16. Inflate src_array into dst_array.
1464 kit.sync_kit(ideal);
1465 if (Matcher::match_rule_supported(Opcodes::Op_StrInflatedCopy)) {
1466 // Use fast intrinsic
1467 Node* src = kit.array_element_address(src_array, kit.intcon(0), T_BYTE);
1468 Node* dst = kit.array_element_address(dst_array, start, T_BYTE);
1469 kit.inflate_string(src, dst, TypeAryPtr::BYTES, __ value(count));
1470 } else {
1471 // No intrinsic available, use slow method
1472 kit.inflate_string_slow(src_array, dst_array, start, __ value(count));
1473 }
1474 ideal.sync_kit(&kit);
1475 // Multiply count by two since we now need two bytes per char
1476 __ set(count, __ LShiftI(__ value(count), __ ConI(1)));
1477 }
1478 if (!dcon) {
1479 __ end_if();
1480 }
1481 }
1482
1483 // Read two bytes from index and index+1 and convert them to a char
1484 static jchar readChar(ciTypeArray* array, int index) {
1485 int shift_high, shift_low;
1946
1947 // If we're not reusing an existing String allocation then allocate one here.
1948 result = sc->string_alloc();
1949 if (result == NULL) {
1950 PreserveReexecuteState preexecs(&kit);
1951 // The original jvms is for an allocation of either a String or
1952 // StringBuffer so no stack adjustment is necessary for proper
1953 // reexecution.
1954 kit.jvms()->set_should_reexecute(true);
1955 result = kit.new_instance(__ makecon(TypeKlassPtr::make(C->env()->String_klass())));
1956 }
1957
1958 // Initialize the string
1959 kit.store_String_value(kit.control(), result, dst_array);
1960 kit.store_String_coder(kit.control(), result, coder);
1961
1962 // The value field is final. Emit a barrier here to ensure that the effect
1963 // of the initialization is committed to memory before any code publishes
1964 // a reference to the newly constructed object (see Parse::do_exits()).
1965 assert(AllocateNode::Ideal_allocation(result, _gvn) != NULL, "should be newly allocated");
1966 kit.insert_mem_bar(Opcodes::Op_MemBarRelease, result);
1967 } else {
1968 result = C->top();
1969 }
1970 // hook up the outgoing control and result
1971 kit.replace_call(sc->end(), result);
1972
1973 // Unhook any hook nodes
1974 string_sizes->disconnect_inputs(NULL, C);
1975 sc->cleanup();
1976 }
|