--- old/src/share/vm/opto/machnode.cpp 2017-06-01 17:27:45.066203609 +0200 +++ new/src/share/vm/opto/machnode.cpp 2017-06-01 17:27:44.992203705 +0200 @@ -668,8 +668,8 @@ uint MachCallNode::cmp( const Node &n ) const { return _tf == ((MachCallNode&)n)._tf; } -const Type *MachCallNode::bottom_type() const { return tf()->range(); } -const Type* MachCallNode::Value(PhaseGVN* phase) const { return tf()->range(); } +const Type *MachCallNode::bottom_type() const { return tf()->range_cc(); } +const Type* MachCallNode::Value(PhaseGVN* phase) const { return tf()->range_cc(); } #ifndef PRODUCT void MachCallNode::dump_spec(outputStream *st) const { @@ -681,11 +681,13 @@ #endif bool MachCallNode::return_value_is_used() const { - if (tf()->range()->cnt() == TypeFunc::Parms) { + if (tf()->range_sig()->cnt() == TypeFunc::Parms) { // void return return false; } + assert(tf()->returns_value_type_as_fields(), "multiple return values not supported"); + // find the projection corresponding to the return value for (DUIterator_Fast imax, i = fast_outs(imax); i < imax; i++) { Node *use = fast_out(i); @@ -701,11 +703,15 @@ // Because this is used in deoptimization, we want the type info, not the data // flow info; the interpreter will "use" things that are dead to the optimizer. bool MachCallNode::returns_pointer() const { - const TypeTuple *r = tf()->range(); + const TypeTuple *r = tf()->range_sig(); return (r->cnt() > TypeFunc::Parms && r->field_at(TypeFunc::Parms)->isa_ptr()); } +bool MachCallNode::returns_vt() const { + return tf()->returns_value_type_as_fields(); +} + //------------------------------Registers-------------------------------------- const RegMask &MachCallNode::in_RegMask(uint idx) const { // Values in the domain use the users calling convention, embodied in the