< prev index next >

src/hotspot/share/opto/compile.cpp

Print this page




 453 //------------------------------CompileWrapper---------------------------------
 454 class CompileWrapper : public StackObj {
 455   Compile *const _compile;
 456  public:
 457   CompileWrapper(Compile* compile);
 458 
 459   ~CompileWrapper();
 460 };
 461 
 462 CompileWrapper::CompileWrapper(Compile* compile) : _compile(compile) {
 463   // the Compile* pointer is stored in the current ciEnv:
 464   ciEnv* env = compile->env();
 465   assert(env == ciEnv::current(), "must already be a ciEnv active");
 466   assert(env->compiler_data() == NULL, "compile already active?");
 467   env->set_compiler_data(compile);
 468   assert(compile == Compile::current(), "sanity");
 469 
 470   compile->set_type_dict(NULL);
 471   compile->set_clone_map(new Dict(cmpkey, hashkey, _compile->comp_arena()));
 472   compile->clone_map().set_clone_idx(0);
 473   compile->set_type_hwm(NULL);
 474   compile->set_type_last_size(0);
 475   compile->set_last_tf(NULL, NULL);
 476   compile->set_indexSet_arena(NULL);
 477   compile->set_indexSet_free_block_list(NULL);
 478   compile->init_type_arena();
 479   Type::Initialize(compile);
 480   _compile->set_scratch_buffer_blob(NULL);
 481   _compile->begin_method();
 482   _compile->clone_map().set_debug(_compile->has_method() && _compile->directive()->CloneMapDebugOption);
 483 }
 484 CompileWrapper::~CompileWrapper() {
 485   _compile->end_method();
 486   if (_compile->scratch_buffer_blob() != NULL)
 487     BufferBlob::free(_compile->scratch_buffer_blob());
 488   _compile->env()->set_compiler_data(NULL);
 489 }
 490 
 491 
 492 //----------------------------print_compile_messages---------------------------
 493 void Compile::print_compile_messages() {


1477   // Known instance (scalarizable allocation) alias only with itself.
1478   bool is_known_inst = tj->isa_oopptr() != NULL &&
1479                        tj->is_oopptr()->is_known_instance();
1480 
1481   // Process weird unsafe references.
1482   if (offset == Type::OffsetBot && (tj->isa_instptr() /*|| tj->isa_klassptr()*/)) {
1483     bool default_value_load = EnableValhalla && tj->is_instptr()->klass() == ciEnv::current()->Class_klass();
1484     assert(InlineUnsafeOps || default_value_load, "indeterminate pointers come only from unsafe ops");
1485     assert(!is_known_inst, "scalarizable allocation should not have unsafe references");
1486     tj = TypeOopPtr::BOTTOM;
1487     ptr = tj->ptr();
1488     offset = tj->offset();
1489   }
1490 
1491   // Array pointers need some flattening
1492   const TypeAryPtr *ta = tj->isa_aryptr();
1493   if (ta && ta->is_stable()) {
1494     // Erase stability property for alias analysis.
1495     tj = ta = ta->cast_to_stable(false);
1496   }









1497   if( ta && is_known_inst ) {
1498     if ( offset != Type::OffsetBot &&
1499          offset > arrayOopDesc::length_offset_in_bytes() ) {
1500       offset = Type::OffsetBot; // Flatten constant access into array body only
1501       tj = ta = TypeAryPtr::make(ptr, ta->ary(), ta->klass(), true, Type::Offset(offset), ta->field_offset(), ta->instance_id());
1502     }
1503   } else if( ta && _AliasLevel >= 2 ) {
1504     // For arrays indexed by constant indices, we flatten the alias
1505     // space to include all of the array body.  Only the header, klass
1506     // and array length can be accessed un-aliased.
1507     // For flattened value type array, each field has its own slice so
1508     // we must include the field offset.
1509     if( offset != Type::OffsetBot ) {
1510       if( ta->const_oop() ) { // MethodData* or Method*
1511         offset = Type::OffsetBot;   // Flatten constant access into array body
1512         tj = ta = TypeAryPtr::make(ptr,ta->const_oop(),ta->ary(),ta->klass(),false,Type::Offset(offset), ta->field_offset());
1513       } else if( offset == arrayOopDesc::length_offset_in_bytes() ) {
1514         // range is OK as-is.
1515         tj = ta = TypeAryPtr::RANGE;
1516       } else if( offset == oopDesc::klass_offset_in_bytes() ) {


3402   case Op_LoadUS:
3403   case Op_LoadI:
3404   case Op_LoadKlass:
3405   case Op_LoadNKlass:
3406   case Op_LoadL:
3407   case Op_LoadL_unaligned:
3408   case Op_LoadPLocked:
3409   case Op_LoadP:
3410   case Op_LoadN:
3411   case Op_LoadRange:
3412   case Op_LoadS: {
3413   handle_mem:
3414 #ifdef ASSERT
3415     if( VerifyOptoOopOffsets ) {
3416       MemNode* mem  = n->as_Mem();
3417       // Check to see if address types have grounded out somehow.
3418       const TypeInstPtr *tp = mem->in(MemNode::Address)->bottom_type()->isa_instptr();
3419       assert( !tp || oop_offset_is_sane(tp), "" );
3420     }
3421 #endif
3422     if (nop == Op_LoadKlass || nop == Op_LoadNKlass) {
3423       const TypeKlassPtr* tk = n->bottom_type()->make_ptr()->is_klassptr();
3424       assert(!tk->klass_is_exact(), "should have been folded");
3425       if (tk->klass()->is_obj_array_klass() || tk->klass()->is_java_lang_Object()) {
3426         bool maybe_value_array = tk->klass()->is_java_lang_Object();
3427         if (!maybe_value_array) {
3428           ciArrayKlass* ak = tk->klass()->as_array_klass();
3429           ciKlass* elem = ak->element_klass();
3430           maybe_value_array = elem->is_java_lang_Object() || elem->is_interface() || elem->is_valuetype();
3431         }
3432         if (maybe_value_array) {
3433           // Array load klass needs to filter out property bits (but not
3434           // GetNullFreePropertyNode which needs to extract the null free
3435           // bits)
3436           uint last = unique();
3437           Node* pointer = NULL;
3438           if (nop == Op_LoadKlass) {
3439             Node* cast = new CastP2XNode(NULL, n);
3440             Node* masked = new LShiftXNode(cast, new ConINode(TypeInt::make(oopDesc::storage_props_nof_bits)));
3441             masked = new RShiftXNode(masked, new ConINode(TypeInt::make(oopDesc::storage_props_nof_bits)));
3442             pointer = new CastX2PNode(masked);
3443             pointer = new CheckCastPPNode(NULL, pointer, n->bottom_type());
3444           } else {
3445             Node* cast = new CastN2INode(n);
3446             Node* masked = new AndINode(cast, new ConINode(TypeInt::make(oopDesc::compressed_klass_mask())));
3447             pointer = new CastI2NNode(masked, n->bottom_type());
3448           }
3449           for (DUIterator_Fast imax, i = n->fast_outs(imax); i < imax; i++) {
3450             Node* u = n->fast_out(i);
3451             if (u->_idx < last && u->Opcode() != Op_GetNullFreeProperty) {
3452               int nb = u->replace_edge(n, pointer);
3453               --i, imax -= nb;
3454             }
3455           }
3456         }
3457       }
3458     }
3459     break;
3460   }
3461 
3462   case Op_AddP: {               // Assert sane base pointers
3463     Node *addp = n->in(AddPNode::Address);
3464     assert( !addp->is_AddP() ||
3465             addp->in(AddPNode::Base)->is_top() || // Top OK for allocation
3466             addp->in(AddPNode::Base) == n->in(AddPNode::Base),
3467             "Base pointers must match (addp %u)", addp->_idx );
3468 #ifdef _LP64
3469     if ((UseCompressedOops || UseCompressedClassPointers) &&
3470         addp->Opcode() == Op_ConP &&
3471         addp == n->in(AddPNode::Base) &&
3472         n->in(AddPNode::Offset)->is_Con()) {
3473       // If the transformation of ConP to ConN+DecodeN is beneficial depends
3474       // on the platform and on the compressed oops mode.




 453 //------------------------------CompileWrapper---------------------------------
 454 class CompileWrapper : public StackObj {
 455   Compile *const _compile;
 456  public:
 457   CompileWrapper(Compile* compile);
 458 
 459   ~CompileWrapper();
 460 };
 461 
 462 CompileWrapper::CompileWrapper(Compile* compile) : _compile(compile) {
 463   // the Compile* pointer is stored in the current ciEnv:
 464   ciEnv* env = compile->env();
 465   assert(env == ciEnv::current(), "must already be a ciEnv active");
 466   assert(env->compiler_data() == NULL, "compile already active?");
 467   env->set_compiler_data(compile);
 468   assert(compile == Compile::current(), "sanity");
 469 
 470   compile->set_type_dict(NULL);
 471   compile->set_clone_map(new Dict(cmpkey, hashkey, _compile->comp_arena()));
 472   compile->clone_map().set_clone_idx(0);

 473   compile->set_type_last_size(0);
 474   compile->set_last_tf(NULL, NULL);
 475   compile->set_indexSet_arena(NULL);
 476   compile->set_indexSet_free_block_list(NULL);
 477   compile->init_type_arena();
 478   Type::Initialize(compile);
 479   _compile->set_scratch_buffer_blob(NULL);
 480   _compile->begin_method();
 481   _compile->clone_map().set_debug(_compile->has_method() && _compile->directive()->CloneMapDebugOption);
 482 }
 483 CompileWrapper::~CompileWrapper() {
 484   _compile->end_method();
 485   if (_compile->scratch_buffer_blob() != NULL)
 486     BufferBlob::free(_compile->scratch_buffer_blob());
 487   _compile->env()->set_compiler_data(NULL);
 488 }
 489 
 490 
 491 //----------------------------print_compile_messages---------------------------
 492 void Compile::print_compile_messages() {


1476   // Known instance (scalarizable allocation) alias only with itself.
1477   bool is_known_inst = tj->isa_oopptr() != NULL &&
1478                        tj->is_oopptr()->is_known_instance();
1479 
1480   // Process weird unsafe references.
1481   if (offset == Type::OffsetBot && (tj->isa_instptr() /*|| tj->isa_klassptr()*/)) {
1482     bool default_value_load = EnableValhalla && tj->is_instptr()->klass() == ciEnv::current()->Class_klass();
1483     assert(InlineUnsafeOps || default_value_load, "indeterminate pointers come only from unsafe ops");
1484     assert(!is_known_inst, "scalarizable allocation should not have unsafe references");
1485     tj = TypeOopPtr::BOTTOM;
1486     ptr = tj->ptr();
1487     offset = tj->offset();
1488   }
1489 
1490   // Array pointers need some flattening
1491   const TypeAryPtr *ta = tj->isa_aryptr();
1492   if (ta && ta->is_stable()) {
1493     // Erase stability property for alias analysis.
1494     tj = ta = ta->cast_to_stable(false);
1495   }
1496   if (ta && ta->is_not_flat()) {
1497     // Erase not flat property for alias analysis.
1498     tj = ta = ta->cast_to_not_flat(false);
1499   }
1500   if (ta && ta->is_not_null_free()) {
1501     // Erase not null free property for alias analysis.
1502     tj = ta = ta->cast_to_not_null_free(false);
1503   }
1504 
1505   if( ta && is_known_inst ) {
1506     if ( offset != Type::OffsetBot &&
1507          offset > arrayOopDesc::length_offset_in_bytes() ) {
1508       offset = Type::OffsetBot; // Flatten constant access into array body only
1509       tj = ta = TypeAryPtr::make(ptr, ta->ary(), ta->klass(), true, Type::Offset(offset), ta->field_offset(), ta->instance_id());
1510     }
1511   } else if( ta && _AliasLevel >= 2 ) {
1512     // For arrays indexed by constant indices, we flatten the alias
1513     // space to include all of the array body.  Only the header, klass
1514     // and array length can be accessed un-aliased.
1515     // For flattened value type array, each field has its own slice so
1516     // we must include the field offset.
1517     if( offset != Type::OffsetBot ) {
1518       if( ta->const_oop() ) { // MethodData* or Method*
1519         offset = Type::OffsetBot;   // Flatten constant access into array body
1520         tj = ta = TypeAryPtr::make(ptr,ta->const_oop(),ta->ary(),ta->klass(),false,Type::Offset(offset), ta->field_offset());
1521       } else if( offset == arrayOopDesc::length_offset_in_bytes() ) {
1522         // range is OK as-is.
1523         tj = ta = TypeAryPtr::RANGE;
1524       } else if( offset == oopDesc::klass_offset_in_bytes() ) {


3410   case Op_LoadUS:
3411   case Op_LoadI:
3412   case Op_LoadKlass:
3413   case Op_LoadNKlass:
3414   case Op_LoadL:
3415   case Op_LoadL_unaligned:
3416   case Op_LoadPLocked:
3417   case Op_LoadP:
3418   case Op_LoadN:
3419   case Op_LoadRange:
3420   case Op_LoadS: {
3421   handle_mem:
3422 #ifdef ASSERT
3423     if( VerifyOptoOopOffsets ) {
3424       MemNode* mem  = n->as_Mem();
3425       // Check to see if address types have grounded out somehow.
3426       const TypeInstPtr *tp = mem->in(MemNode::Address)->bottom_type()->isa_instptr();
3427       assert( !tp || oop_offset_is_sane(tp), "" );
3428     }
3429 #endif
3430     if (EnableValhalla && (nop == Op_LoadKlass || nop == Op_LoadNKlass)) {
3431       const TypeKlassPtr* tk = n->bottom_type()->make_ptr()->is_klassptr();
3432       assert(!tk->klass_is_exact(), "should have been folded");
3433       ciKlass* klass = tk->klass();
3434       bool maybe_value_array = klass->is_java_lang_Object();
3435       if (!maybe_value_array && klass->is_obj_array_klass()) {
3436         klass = klass->as_array_klass()->element_klass();
3437         maybe_value_array = klass->is_java_lang_Object() || klass->is_interface() || klass->is_valuetype();

3438       }
3439       if (maybe_value_array) {
3440         // Array load klass needs to filter out property bits (but not
3441         // GetNullFreePropertyNode which needs to extract the null free bits)

3442         uint last = unique();
3443         Node* pointer = NULL;
3444         if (nop == Op_LoadKlass) {
3445           Node* cast = new CastP2XNode(NULL, n);
3446           Node* masked = new LShiftXNode(cast, new ConINode(TypeInt::make(oopDesc::storage_props_nof_bits)));
3447           masked = new RShiftXNode(masked, new ConINode(TypeInt::make(oopDesc::storage_props_nof_bits)));
3448           pointer = new CastX2PNode(masked);
3449           pointer = new CheckCastPPNode(NULL, pointer, n->bottom_type());
3450         } else {
3451           Node* cast = new CastN2INode(n);
3452           Node* masked = new AndINode(cast, new ConINode(TypeInt::make(oopDesc::compressed_klass_mask())));
3453           pointer = new CastI2NNode(masked, n->bottom_type());
3454         }
3455         for (DUIterator_Fast imax, i = n->fast_outs(imax); i < imax; i++) {
3456           Node* u = n->fast_out(i);
3457           if (u->_idx < last && u->Opcode() != Op_GetNullFreeProperty) {
3458             int nb = u->replace_edge(n, pointer);
3459             --i, imax -= nb;

3460           }
3461         }
3462       }
3463     }
3464     break;
3465   }
3466 
3467   case Op_AddP: {               // Assert sane base pointers
3468     Node *addp = n->in(AddPNode::Address);
3469     assert( !addp->is_AddP() ||
3470             addp->in(AddPNode::Base)->is_top() || // Top OK for allocation
3471             addp->in(AddPNode::Base) == n->in(AddPNode::Base),
3472             "Base pointers must match (addp %u)", addp->_idx );
3473 #ifdef _LP64
3474     if ((UseCompressedOops || UseCompressedClassPointers) &&
3475         addp->Opcode() == Op_ConP &&
3476         addp == n->in(AddPNode::Base) &&
3477         n->in(AddPNode::Offset)->is_Con()) {
3478       // If the transformation of ConP to ConN+DecodeN is beneficial depends
3479       // on the platform and on the compressed oops mode.


< prev index next >