< prev index next >
src/share/vm/opto/stringopts.cpp
Print this page
*** 155,165 ****
Node* v1 = cmp->in(1);
Node* v2 = cmp->in(2);
// Null check of the return of toString which can simply be skipped.
if (b->_test._test == BoolTest::ne &&
v2->bottom_type() == TypePtr::NULL_PTR &&
! value->in(true_path)->Opcode() == Op_CastPP &&
value->in(true_path)->in(1) == v1 &&
v1->is_Proj() && is_SB_toString(v1->in(0))) {
return v1;
}
}
--- 155,165 ----
Node* v1 = cmp->in(1);
Node* v2 = cmp->in(2);
// Null check of the return of toString which can simply be skipped.
if (b->_test._test == BoolTest::ne &&
v2->bottom_type() == TypePtr::NULL_PTR &&
! value->in(true_path)->Opcode() == Opcodes::Op_CastPP &&
value->in(true_path)->in(1) == v1 &&
v1->is_Proj() && is_SB_toString(v1->in(0))) {
return v1;
}
}
*** 341,352 ****
if (projs.catchall_catchproj != NULL) {
// EA can't cope with the partially collapsed graph this
// creates so put it on the worklist to be collapsed later.
for (SimpleDUIterator i(projs.catchall_catchproj); i.has_next(); i.next()) {
Node *use = i.get();
! int opc = use->Opcode();
! if (opc == Op_CreateEx || opc == Op_Region) {
_stringopts->record_dead_node(use);
}
}
C->gvn_replace_by(projs.catchall_catchproj, C->top());
}
--- 341,352 ----
if (projs.catchall_catchproj != NULL) {
// EA can't cope with the partially collapsed graph this
// creates so put it on the worklist to be collapsed later.
for (SimpleDUIterator i(projs.catchall_catchproj); i.has_next(); i.next()) {
Node *use = i.get();
! Opcodes opc = use->Opcode();
! if (opc == Opcodes::Op_CreateEx || opc == Opcodes::Op_Region) {
_stringopts->record_dead_node(use);
}
}
C->gvn_replace_by(projs.catchall_catchproj, C->top());
}
*** 677,689 ****
void PhaseStringOpts::remove_dead_nodes() {
// Delete any dead nodes to make things clean enough that escape
// analysis doesn't get unhappy.
while (dead_worklist.size() > 0) {
Node* use = dead_worklist.pop();
! int opc = use->Opcode();
switch (opc) {
! case Op_Region: {
uint i = 1;
for (i = 1; i < use->req(); i++) {
if (use->in(i) != C->top()) {
break;
}
--- 677,689 ----
void PhaseStringOpts::remove_dead_nodes() {
// Delete any dead nodes to make things clean enough that escape
// analysis doesn't get unhappy.
while (dead_worklist.size() > 0) {
Node* use = dead_worklist.pop();
! Opcodes opc = use->Opcode();
switch (opc) {
! case Opcodes::Op_Region: {
uint i = 1;
for (i = 1; i < use->req(); i++) {
if (use->in(i) != C->top()) {
break;
}
*** 697,708 ****
}
C->gvn_replace_by(use, C->top());
}
break;
}
! case Op_AddP:
! case Op_CreateEx: {
// Recurisvely clean up references to CreateEx so EA doesn't
// get unhappy about the partially collapsed graph.
for (SimpleDUIterator i(use); i.has_next(); i.next()) {
Node* m = i.get();
if (m->is_AddP()) {
--- 697,708 ----
}
C->gvn_replace_by(use, C->top());
}
break;
}
! case Opcodes::Op_AddP:
! case Opcodes::Op_CreateEx: {
// Recurisvely clean up references to CreateEx so EA doesn't
// get unhappy about the partially collapsed graph.
for (SimpleDUIterator i(use); i.has_next(); i.next()) {
Node* m = i.get();
if (m->is_AddP()) {
*** 710,720 ****
}
}
C->gvn_replace_by(use, C->top());
break;
}
! case Op_Phi:
if (use->in(0) == C->top()) {
C->gvn_replace_by(use, C->top());
}
break;
}
--- 710,720 ----
}
}
C->gvn_replace_by(use, C->top());
break;
}
! case Opcodes::Op_Phi:
if (use->in(0) == C->top()) {
C->gvn_replace_by(use, C->top());
}
break;
}
*** 1055,1070 ****
Node *use = i.get();
if (ctrl_path.member(use)) {
// already checked this
continue;
}
! int opc = use->Opcode();
! if (opc == Op_CmpP || opc == Op_Node) {
ctrl_path.push(use);
continue;
}
! if (opc == Op_CastPP || opc == Op_CheckCastPP) {
for (SimpleDUIterator j(use); j.has_next(); j.next()) {
worklist.push(j.get());
}
worklist.push(use->in(1));
ctrl_path.push(use);
--- 1055,1070 ----
Node *use = i.get();
if (ctrl_path.member(use)) {
// already checked this
continue;
}
! Opcodes opc = use->Opcode();
! if (opc == Opcodes::Op_CmpP || opc == Opcodes::Op_Node) {
ctrl_path.push(use);
continue;
}
! if (opc == Opcodes::Op_CastPP || opc == Opcodes::Op_CheckCastPP) {
for (SimpleDUIterator j(use); j.has_next(); j.next()) {
worklist.push(j.get());
}
worklist.push(use->in(1));
ctrl_path.push(use);
*** 1460,1470 ****
__ else_();
}
if (!dcon || !dbyte) {
// Destination is UTF16. Inflate src_array into dst_array.
kit.sync_kit(ideal);
! if (Matcher::match_rule_supported(Op_StrInflatedCopy)) {
// Use fast intrinsic
Node* src = kit.array_element_address(src_array, kit.intcon(0), T_BYTE);
Node* dst = kit.array_element_address(dst_array, start, T_BYTE);
kit.inflate_string(src, dst, TypeAryPtr::BYTES, __ value(count));
} else {
--- 1460,1470 ----
__ else_();
}
if (!dcon || !dbyte) {
// Destination is UTF16. Inflate src_array into dst_array.
kit.sync_kit(ideal);
! if (Matcher::match_rule_supported(Opcodes::Op_StrInflatedCopy)) {
// Use fast intrinsic
Node* src = kit.array_element_address(src_array, kit.intcon(0), T_BYTE);
Node* dst = kit.array_element_address(dst_array, start, T_BYTE);
kit.inflate_string(src, dst, TypeAryPtr::BYTES, __ value(count));
} else {
*** 1961,1971 ****
// The value field is final. Emit a barrier here to ensure that the effect
// of the initialization is committed to memory before any code publishes
// a reference to the newly constructed object (see Parse::do_exits()).
assert(AllocateNode::Ideal_allocation(result, _gvn) != NULL, "should be newly allocated");
! kit.insert_mem_bar(Op_MemBarRelease, result);
} else {
result = C->top();
}
// hook up the outgoing control and result
kit.replace_call(sc->end(), result);
--- 1961,1971 ----
// The value field is final. Emit a barrier here to ensure that the effect
// of the initialization is committed to memory before any code publishes
// a reference to the newly constructed object (see Parse::do_exits()).
assert(AllocateNode::Ideal_allocation(result, _gvn) != NULL, "should be newly allocated");
! kit.insert_mem_bar(Opcodes::Op_MemBarRelease, result);
} else {
result = C->top();
}
// hook up the outgoing control and result
kit.replace_call(sc->end(), result);
< prev index next >