--- old/src/hotspot/cpu/ppc/ppc.ad 2018-01-10 08:46:32.816550148 -0600 +++ new/src/hotspot/cpu/ppc/ppc.ad 2018-01-10 08:46:32.140523615 -0600 @@ -1658,7 +1658,7 @@ // Figure out which register class each belongs in: rc_int, rc_float or // rc_stack. -enum RC { rc_bad, rc_int, rc_float, rc_stack }; +enum RC { rc_bad, rc_int, rc_float, rc_vs, rc_stack }; static enum RC rc_class(OptoReg::Name reg) { // Return the register class for the given register. The given register @@ -1673,6 +1673,9 @@ // We have 64 floating-point register halves, starting at index 64. if (reg < 64+64) return rc_float; + // We have 64 vector-scalar registers, starting at index 128. + if (reg < 64+64+64) return rc_vs; + // Between float regs & stack are the flags regs. assert(OptoReg::is_stack(reg) || reg < 64+64+64, "blow up if spilling flags"); @@ -1735,6 +1738,50 @@ if (src_lo == dst_lo && src_hi == dst_hi) return size; // Self copy, no move. + if (bottom_type()->isa_vect() != NULL) { + assert(src_lo_rc == rc_vs || dst_lo_rc == rc_vs, "expected at least src or dst is vector-scalar class"); + assert(ideal_reg() == Op_VecX, "expected vector-scalar register class"); + // Memory->Memory Spill. + if (src_lo_rc == rc_vs && dst_lo_rc == rc_vs) { + int src_offset = ra_->reg2offset(src_lo); + int dst_offset = ra_->reg2offset(dst_lo); + if (cbuf) { + MacroAssembler _masm(cbuf); + __ ld(R0, src_offset, R1_SP); + __ std(R0, dst_offset, R1_SP); + __ ld(R0, src_offset+8, R1_SP); + __ std(R0, dst_offset+8, R1_SP); + } + size += 16; + } + // VectorSRegister->Memory Spill. + else if (src_lo_rc == rc_vs && dst_lo_rc == rc_stack) { + VectorSRegister Rsrc = as_VectorSRegister(Matcher::_regEncode[src_lo]); + int dst_offset = ra_->reg2offset(dst_lo); + if (cbuf) { + MacroAssembler _masm(cbuf); + __ addi(R0, R1_SP, dst_offset); + __ stxvd2x(Rsrc, R0); + } + size += 8; + } + // Memory->VectorSRegister Spill. + else if (src_lo_rc == rc_stack && dst_lo_rc == rc_vs) { + VectorSRegister Rdst = as_VectorSRegister(Matcher::_regEncode[dst_lo]); + int src_offset = ra_->reg2offset(src_lo); + if (cbuf) { + MacroAssembler _masm(cbuf); + __ addi(R0, R1_SP, src_offset); + __ lxvd2x(Rdst, R0); + } + size += 8; + } + else { + ShouldNotReachHere(); // No VSR spill. + } + return size; + } + // -------------------------------------- // Memory->Memory Spill. Use R0 to hold the value. if (src_lo_rc == rc_stack && dst_lo_rc == rc_stack) { @@ -3533,8 +3580,8 @@ loadConLReplicatedNodesTuple loadConLNodes = loadConLReplicatedNodesTuple_create(C, ra_, n_toc, op_repl, op_dst, op_zero, - OptoReg::Name(R20_H_num), OptoReg::Name(R20_num), - OptoReg::Name(VSR11_num), OptoReg::Name(VSR10_num)); + OptoReg::Name(R19_H_num), OptoReg::Name(R19_num), + ra_->get_reg_second(this), ra_->get_reg_first(this)); // Push new nodes. if (loadConLNodes._large_hi) { nodes->push(loadConLNodes._large_hi); }