< prev index next >

src/share/vm/opto/lcm.cpp

Print this page
rev 12694 : 8176518: C2: Invalid ImplicitNullChecks with non-protected heap base
Reviewed-by: zmajo


 237     // If so, only Store operations will trap.
 238     // But a read accessing the base of a heap-based compressed heap will trap.
 239     if (!was_store && needs_explicit_null_check_for_read(val)) {
 240       continue;
 241     }
 242 
 243     // Check that node's control edge is not-null block's head or dominates it,
 244     // otherwise we can't hoist it because there are other control dependencies.
 245     Node* ctrl = mach->in(0);
 246     if (ctrl != NULL && !(ctrl == not_null_block->head() ||
 247         get_block_for_node(ctrl)->dominates(not_null_block))) {
 248       continue;
 249     }
 250 
 251     // check if the offset is not too high for implicit exception
 252     {
 253       intptr_t offset = 0;
 254       const TypePtr *adr_type = NULL;  // Do not need this return value here
 255       const Node* base = mach->get_base_and_disp(offset, adr_type);
 256       if (base == NULL || base == NodeSentinel) {
 257         // Narrow oop address doesn't have base, only index
 258         if( val->bottom_type()->isa_narrowoop() &&
 259             MacroAssembler::needs_explicit_null_check(offset) )
 260           continue;             // Give up if offset is beyond page size


 261         // cannot reason about it; is probably not implicit null exception
 262       } else {
 263         const TypePtr* tptr;
 264         if (UseCompressedOops && (Universe::narrow_oop_shift() == 0 ||
 265                                   Universe::narrow_klass_shift() == 0)) {
 266           // 32-bits narrow oop can be the base of address expressions
 267           tptr = base->get_ptr_type();
 268         } else {
 269           // only regular oops are expected here
 270           tptr = base->bottom_type()->is_ptr();
 271         }
 272         // Give up if offset is not a compile-time constant
 273         if( offset == Type::OffsetBot || tptr->_offset == Type::OffsetBot )
 274           continue;
 275         offset += tptr->_offset; // correct if base is offseted
 276         if( MacroAssembler::needs_explicit_null_check(offset) )
 277           continue;             // Give up is reference is beyond 4K page size





 278       }
 279     }
 280 
 281     // Check ctrl input to see if the null-check dominates the memory op
 282     Block *cb = get_block_for_node(mach);
 283     cb = cb->_idom;             // Always hoist at least 1 block
 284     if( !was_store ) {          // Stores can be hoisted only one block
 285       while( cb->_dom_depth > (block->_dom_depth + 1))
 286         cb = cb->_idom;         // Hoist loads as far as we want
 287       // The non-null-block should dominate the memory op, too. Live
 288       // range spilling will insert a spill in the non-null-block if it is
 289       // needs to spill the memory op for an implicit null check.
 290       if (cb->_dom_depth == (block->_dom_depth + 1)) {
 291         if (cb != not_null_block) continue;
 292         cb = cb->_idom;
 293       }
 294     }
 295     if( cb != block ) continue;
 296 
 297     // Found a memory user; see if it can be hoisted to check-block




 237     // If so, only Store operations will trap.
 238     // But a read accessing the base of a heap-based compressed heap will trap.
 239     if (!was_store && needs_explicit_null_check_for_read(val)) {
 240       continue;
 241     }
 242 
 243     // Check that node's control edge is not-null block's head or dominates it,
 244     // otherwise we can't hoist it because there are other control dependencies.
 245     Node* ctrl = mach->in(0);
 246     if (ctrl != NULL && !(ctrl == not_null_block->head() ||
 247         get_block_for_node(ctrl)->dominates(not_null_block))) {
 248       continue;
 249     }
 250 
 251     // check if the offset is not too high for implicit exception
 252     {
 253       intptr_t offset = 0;
 254       const TypePtr *adr_type = NULL;  // Do not need this return value here
 255       const Node* base = mach->get_base_and_disp(offset, adr_type);
 256       if (base == NULL || base == NodeSentinel) {
 257         // Narrow oop address doesn't have base, only index.
 258         // Give up if offset is beyond page size or if heap base is not protected.
 259         if (val->bottom_type()->isa_narrowoop() &&
 260             (MacroAssembler::needs_explicit_null_check(offset) ||
 261              !Universe::narrow_oop_use_implicit_null_checks()))
 262           continue;
 263         // cannot reason about it; is probably not implicit null exception
 264       } else {
 265         const TypePtr* tptr;
 266         if (UseCompressedOops && (Universe::narrow_oop_shift() == 0 ||
 267                                   Universe::narrow_klass_shift() == 0)) {
 268           // 32-bits narrow oop can be the base of address expressions
 269           tptr = base->get_ptr_type();
 270         } else {
 271           // only regular oops are expected here
 272           tptr = base->bottom_type()->is_ptr();
 273         }
 274         // Give up if offset is not a compile-time constant.
 275         if (offset == Type::OffsetBot || tptr->_offset == Type::OffsetBot)
 276           continue;
 277         offset += tptr->_offset; // correct if base is offseted
 278         // Give up if reference is beyond page size.
 279         if (MacroAssembler::needs_explicit_null_check(offset))
 280           continue;
 281         // Give up if base is a decode node and the heap base is not protected.
 282         if (base->is_Mach() && base->as_Mach()->ideal_Opcode() == Op_DecodeN &&
 283             !Universe::narrow_oop_use_implicit_null_checks())
 284           continue;
 285       }
 286     }
 287 
 288     // Check ctrl input to see if the null-check dominates the memory op
 289     Block *cb = get_block_for_node(mach);
 290     cb = cb->_idom;             // Always hoist at least 1 block
 291     if( !was_store ) {          // Stores can be hoisted only one block
 292       while( cb->_dom_depth > (block->_dom_depth + 1))
 293         cb = cb->_idom;         // Hoist loads as far as we want
 294       // The non-null-block should dominate the memory op, too. Live
 295       // range spilling will insert a spill in the non-null-block if it is
 296       // needs to spill the memory op for an implicit null check.
 297       if (cb->_dom_depth == (block->_dom_depth + 1)) {
 298         if (cb != not_null_block) continue;
 299         cb = cb->_idom;
 300       }
 301     }
 302     if( cb != block ) continue;
 303 
 304     // Found a memory user; see if it can be hoisted to check-block


< prev index next >