< prev index next >

src/share/vm/opto/lcm.cpp

Print this page
rev 12685 : 8176518: [9] C2: Invalid ImplicitNullChecks with non-protected heap base
Reviewed-by:


 238     // But a read accessing the base of a heap-based compressed heap will trap.
 239     if (!was_store && needs_explicit_null_check_for_read(val)) {
 240       continue;
 241     }
 242 
 243     // Check that node's control edge is not-null block's head or dominates it,
 244     // otherwise we can't hoist it because there are other control dependencies.
 245     Node* ctrl = mach->in(0);
 246     if (ctrl != NULL && !(ctrl == not_null_block->head() ||
 247         get_block_for_node(ctrl)->dominates(not_null_block))) {
 248       continue;
 249     }
 250 
 251     // check if the offset is not too high for implicit exception
 252     {
 253       intptr_t offset = 0;
 254       const TypePtr *adr_type = NULL;  // Do not need this return value here
 255       const Node* base = mach->get_base_and_disp(offset, adr_type);
 256       if (base == NULL || base == NodeSentinel) {
 257         // Narrow oop address doesn't have base, only index
 258         if( val->bottom_type()->isa_narrowoop() &&
 259             MacroAssembler::needs_explicit_null_check(offset) )

 260           continue;             // Give up if offset is beyond page size
 261         // cannot reason about it; is probably not implicit null exception
 262       } else {
 263         const TypePtr* tptr;
 264         if (UseCompressedOops && (Universe::narrow_oop_shift() == 0 ||
 265                                   Universe::narrow_klass_shift() == 0)) {
 266           // 32-bits narrow oop can be the base of address expressions
 267           tptr = base->get_ptr_type();
 268         } else {
 269           // only regular oops are expected here
 270           tptr = base->bottom_type()->is_ptr();
 271         }
 272         // Give up if offset is not a compile-time constant
 273         if( offset == Type::OffsetBot || tptr->_offset == Type::OffsetBot )
 274           continue;
 275         offset += tptr->_offset; // correct if base is offseted
 276         if( MacroAssembler::needs_explicit_null_check(offset) )
 277           continue;             // Give up is reference is beyond 4K page size




 278       }
 279     }
 280 
 281     // Check ctrl input to see if the null-check dominates the memory op
 282     Block *cb = get_block_for_node(mach);
 283     cb = cb->_idom;             // Always hoist at least 1 block
 284     if( !was_store ) {          // Stores can be hoisted only one block
 285       while( cb->_dom_depth > (block->_dom_depth + 1))
 286         cb = cb->_idom;         // Hoist loads as far as we want
 287       // The non-null-block should dominate the memory op, too. Live
 288       // range spilling will insert a spill in the non-null-block if it is
 289       // needs to spill the memory op for an implicit null check.
 290       if (cb->_dom_depth == (block->_dom_depth + 1)) {
 291         if (cb != not_null_block) continue;
 292         cb = cb->_idom;
 293       }
 294     }
 295     if( cb != block ) continue;
 296 
 297     // Found a memory user; see if it can be hoisted to check-block




 238     // But a read accessing the base of a heap-based compressed heap will trap.
 239     if (!was_store && needs_explicit_null_check_for_read(val)) {
 240       continue;
 241     }
 242 
 243     // Check that node's control edge is not-null block's head or dominates it,
 244     // otherwise we can't hoist it because there are other control dependencies.
 245     Node* ctrl = mach->in(0);
 246     if (ctrl != NULL && !(ctrl == not_null_block->head() ||
 247         get_block_for_node(ctrl)->dominates(not_null_block))) {
 248       continue;
 249     }
 250 
 251     // check if the offset is not too high for implicit exception
 252     {
 253       intptr_t offset = 0;
 254       const TypePtr *adr_type = NULL;  // Do not need this return value here
 255       const Node* base = mach->get_base_and_disp(offset, adr_type);
 256       if (base == NULL || base == NodeSentinel) {
 257         // Narrow oop address doesn't have base, only index
 258         if (val->bottom_type()->isa_narrowoop() &&
 259             (MacroAssembler::needs_explicit_null_check(offset) ||
 260              !Universe::narrow_oop_use_implicit_null_checks()))
 261           continue;             // Give up if offset is beyond page size
 262         // cannot reason about it; is probably not implicit null exception
 263       } else {
 264         const TypePtr* tptr;
 265         if (UseCompressedOops && (Universe::narrow_oop_shift() == 0 ||
 266                                   Universe::narrow_klass_shift() == 0)) {
 267           // 32-bits narrow oop can be the base of address expressions
 268           tptr = base->get_ptr_type();
 269         } else {
 270           // only regular oops are expected here
 271           tptr = base->bottom_type()->is_ptr();
 272         }
 273         // Give up if offset is not a compile-time constant
 274         if (offset == Type::OffsetBot || tptr->_offset == Type::OffsetBot)
 275           continue;
 276         offset += tptr->_offset; // correct if base is offseted
 277         if (MacroAssembler::needs_explicit_null_check(offset))
 278           continue;             // Give up is reference is beyond 4K page size
 279         // Access to non-protected heap base
 280         if (base->is_Mach() && base->as_Mach()->ideal_Opcode() == Op_DecodeN &&
 281             !Universe::narrow_oop_use_implicit_null_checks())
 282           continue;
 283       }
 284     }
 285 
 286     // Check ctrl input to see if the null-check dominates the memory op
 287     Block *cb = get_block_for_node(mach);
 288     cb = cb->_idom;             // Always hoist at least 1 block
 289     if( !was_store ) {          // Stores can be hoisted only one block
 290       while( cb->_dom_depth > (block->_dom_depth + 1))
 291         cb = cb->_idom;         // Hoist loads as far as we want
 292       // The non-null-block should dominate the memory op, too. Live
 293       // range spilling will insert a spill in the non-null-block if it is
 294       // needs to spill the memory op for an implicit null check.
 295       if (cb->_dom_depth == (block->_dom_depth + 1)) {
 296         if (cb != not_null_block) continue;
 297         cb = cb->_idom;
 298       }
 299     }
 300     if( cb != block ) continue;
 301 
 302     // Found a memory user; see if it can be hoisted to check-block


< prev index next >