291 // Also handle duplicate copies here.
292 const Type *t = val->is_Con() ? val->bottom_type() : NULL;
293
294 // Scan all registers to see if this value is around already
295 for( uint reg = 0; reg < (uint)_max_reg; reg++ ) {
296 if (reg == (uint)nk_reg) {
297 // Found ourselves so check if there is only one user of this
298 // copy and keep on searching for a better copy if so.
299 bool ignore_self = true;
300 x = n->in(k);
301 DUIterator_Fast imax, i = x->fast_outs(imax);
302 Node* first = x->fast_out(i); i++;
303 while (i < imax && ignore_self) {
304 Node* use = x->fast_out(i); i++;
305 if (use != first) ignore_self = false;
306 }
307 if (ignore_self) continue;
308 }
309
310 Node *vv = value[reg];
311 if (n_regs > 1) { // Doubles check for aligned-adjacent pair
312 if( (reg&1)==0 ) continue; // Wrong half of a pair
313 if( vv != value[reg-1] ) continue; // Not a complete pair
314 }
315 if( vv == val || // Got a direct hit?
316 (t && vv && vv->bottom_type() == t && vv->is_Mach() &&
317 vv->as_Mach()->rule() == val->as_Mach()->rule()) ) { // Or same constant?
318 assert( !n->is_Phi(), "cannot change registers at a Phi so easily" );
319 if( OptoReg::is_stack(nk_reg) || // CISC-loading from stack OR
320 OptoReg::is_reg(reg) || // turning into a register use OR
321 regnd[reg]->outcnt()==1 ) { // last use of a spill-load turns into a CISC use
322 blk_adjust += use_prior_register(n,k,regnd[reg],current_block,value,regnd);
323 if( n->in(k) == regnd[reg] ) // Success! Quit trying
324 return blk_adjust;
325 } // End of if not degrading to a stack
326 } // End of if found value in another register
327 } // End of scan all machine registers
328 return blk_adjust;
329 }
330
331
332 //
333 // Check if nreg already contains the constant value val. Normal copy
556 uint k;
557 for( k = 1; k < n->req(); k++ ) {
558 Node *def = n->in(k); // n->in(k) is a USE; def is the DEF for this USE
559 guarantee(def != NULL, "no disconnected nodes at this point");
560 uint useidx = n2lidx(def); // useidx is the live range index for this USE
561
562 if( useidx ) {
563 OptoReg::Name ureg = lrgs(useidx).reg();
564 if( !value[ureg] ) {
565 int idx; // Skip occasional useless copy
566 while( (idx=def->is_Copy()) != 0 &&
567 def->in(idx) != NULL && // NULL should not happen
568 ureg == lrgs(n2lidx(def->in(idx))).reg() )
569 def = def->in(idx);
570 Node *valdef = skip_copies(def); // tighten up val through non-useless copies
571 value.map(ureg,valdef); // record improved reaching-def info
572 regnd.map(ureg, def);
573 // Record other half of doubles
574 uint def_ideal_reg = def->ideal_reg();
575 int n_regs = RegMask::num_registers(def_ideal_reg);
576 bool is_vec = RegMask::is_vector(def_ideal_reg);
577 for (int l = 1; l < n_regs; l++) {
578 OptoReg::Name ureg_lo = OptoReg::add(ureg,-l);
579 if (!value[ureg_lo] &&
580 (!RegMask::can_represent(ureg_lo) ||
581 lrgs(useidx).mask().Member(ureg_lo))) { // Nearly always adjacent
582 value.map(ureg_lo,valdef); // record improved reaching-def info
583 regnd.map(ureg_lo, def);
584 }
585 }
586 }
587 }
588 }
589
590 const uint two_adr = n->is_Mach() ? n->as_Mach()->two_adr() : 0;
591
592 // Remove copies along input edges
593 for( k = 1; k < n->req(); k++ )
594 j -= elide_copy( n, k, b, value, regnd, two_adr!=k );
595
596 // Unallocated Nodes define no registers
|
291 // Also handle duplicate copies here.
292 const Type *t = val->is_Con() ? val->bottom_type() : NULL;
293
294 // Scan all registers to see if this value is around already
295 for( uint reg = 0; reg < (uint)_max_reg; reg++ ) {
296 if (reg == (uint)nk_reg) {
297 // Found ourselves so check if there is only one user of this
298 // copy and keep on searching for a better copy if so.
299 bool ignore_self = true;
300 x = n->in(k);
301 DUIterator_Fast imax, i = x->fast_outs(imax);
302 Node* first = x->fast_out(i); i++;
303 while (i < imax && ignore_self) {
304 Node* use = x->fast_out(i); i++;
305 if (use != first) ignore_self = false;
306 }
307 if (ignore_self) continue;
308 }
309
310 Node *vv = value[reg];
311 if (n_regs > 1) { // Doubles and vectors check for aligned-adjacent set
312 uint last = (n_regs-1); // Looking for the last part of a set
313 if ((reg&last) != last) continue; // Wrong part of a set
314 if (!register_contains_value(vv, reg, n_regs, value)) continue; // Different value
315 }
316 if( vv == val || // Got a direct hit?
317 (t && vv && vv->bottom_type() == t && vv->is_Mach() &&
318 vv->as_Mach()->rule() == val->as_Mach()->rule()) ) { // Or same constant?
319 assert( !n->is_Phi(), "cannot change registers at a Phi so easily" );
320 if( OptoReg::is_stack(nk_reg) || // CISC-loading from stack OR
321 OptoReg::is_reg(reg) || // turning into a register use OR
322 regnd[reg]->outcnt()==1 ) { // last use of a spill-load turns into a CISC use
323 blk_adjust += use_prior_register(n,k,regnd[reg],current_block,value,regnd);
324 if( n->in(k) == regnd[reg] ) // Success! Quit trying
325 return blk_adjust;
326 } // End of if not degrading to a stack
327 } // End of if found value in another register
328 } // End of scan all machine registers
329 return blk_adjust;
330 }
331
332
333 //
334 // Check if nreg already contains the constant value val. Normal copy
557 uint k;
558 for( k = 1; k < n->req(); k++ ) {
559 Node *def = n->in(k); // n->in(k) is a USE; def is the DEF for this USE
560 guarantee(def != NULL, "no disconnected nodes at this point");
561 uint useidx = n2lidx(def); // useidx is the live range index for this USE
562
563 if( useidx ) {
564 OptoReg::Name ureg = lrgs(useidx).reg();
565 if( !value[ureg] ) {
566 int idx; // Skip occasional useless copy
567 while( (idx=def->is_Copy()) != 0 &&
568 def->in(idx) != NULL && // NULL should not happen
569 ureg == lrgs(n2lidx(def->in(idx))).reg() )
570 def = def->in(idx);
571 Node *valdef = skip_copies(def); // tighten up val through non-useless copies
572 value.map(ureg,valdef); // record improved reaching-def info
573 regnd.map(ureg, def);
574 // Record other half of doubles
575 uint def_ideal_reg = def->ideal_reg();
576 int n_regs = RegMask::num_registers(def_ideal_reg);
577 for (int l = 1; l < n_regs; l++) {
578 OptoReg::Name ureg_lo = OptoReg::add(ureg,-l);
579 if (!value[ureg_lo] &&
580 (!RegMask::can_represent(ureg_lo) ||
581 lrgs(useidx).mask().Member(ureg_lo))) { // Nearly always adjacent
582 value.map(ureg_lo,valdef); // record improved reaching-def info
583 regnd.map(ureg_lo, def);
584 }
585 }
586 }
587 }
588 }
589
590 const uint two_adr = n->is_Mach() ? n->as_Mach()->two_adr() : 0;
591
592 // Remove copies along input edges
593 for( k = 1; k < n->req(); k++ )
594 j -= elide_copy( n, k, b, value, regnd, two_adr!=k );
595
596 // Unallocated Nodes define no registers
|