src/share/vm/opto/parse1.cpp

Print this page




 374     // This x will be typed as Integer if notReached is not yet linked.
 375     // It could also happen due to a problem in ciTypeFlow analysis.
 376     uncommon_trap(Deoptimization::Reason_constraint,
 377                   Deoptimization::Action_reinterpret);
 378     set_map(types_are_good);
 379   }
 380 }
 381 
 382 //------------------------------Parse------------------------------------------
 383 // Main parser constructor.
 384 Parse::Parse(JVMState* caller, ciMethod* parse_method, float expected_uses, Parse* parent)
 385   : _exits(caller), _parent(parent)
 386 {
 387   // Init some variables
 388   _caller = caller;
 389   _method = parse_method;
 390   _expected_uses = expected_uses;
 391   _depth = 1 + (caller->has_method() ? caller->depth() : 0);
 392   _wrote_final = false;
 393   _wrote_volatile = false;


 394   _alloc_with_final = NULL;
 395   _entry_bci = InvocationEntryBci;
 396   _tf = NULL;
 397   _block = NULL;
 398   debug_only(_block_count = -1);
 399   debug_only(_blocks = (Block*)-1);
 400 #ifndef PRODUCT
 401   if (PrintCompilation || PrintOpto) {
 402     // Make sure I have an inline tree, so I can print messages about it.
 403     JVMState* ilt_caller = is_osr_parse() ? caller->caller() : caller;
 404     InlineTree::find_subtree_from_root(C->ilt(), ilt_caller, parse_method);
 405   }
 406   _max_switch_depth = 0;
 407   _est_switch_depth = 0;
 408 #endif
 409 
 410   _tf = TypeFunc::make(method());
 411   _iter.reset_to_method(method());
 412   _flow = method()->get_flow_analysis();
 413   if (_flow->failing()) {


 891     caller.map()->set_req(i, ex_map->in(i));
 892   }
 893   // ...and the exception:
 894   Node*          ex_oop        = saved_ex_oop(ex_map);
 895   SafePointNode* caller_ex_map = caller.make_exception_state(ex_oop);
 896   // Finally, collect the new exception state in my exits:
 897   _exits.add_exception_state(caller_ex_map);
 898 }
 899 
 900 //------------------------------do_exits---------------------------------------
 901 void Parse::do_exits() {
 902   set_parse_bci(InvocationEntryBci);
 903 
 904   // Now peephole on the return bits
 905   Node* region = _exits.control();
 906   _exits.set_control(gvn().transform(region));
 907 
 908   Node* iophi = _exits.i_o();
 909   _exits.set_i_o(gvn().transform(iophi));
 910 
 911   // On PPC64, also add MemBarRelease for constructors which write










 912   // volatile fields. As support_IRIW_for_not_multiple_copy_atomic_cpu
 913   // is set on PPC64, no sync instruction is issued after volatile
 914   // stores. We want to quarantee the same behaviour as on platforms
 915   // with total store order, although this is not required by the Java
 916   // memory model. So as with finals, we add a barrier here.
 917   if (wrote_final() PPC64_ONLY(|| (wrote_volatile() && method()->is_initializer()))) {
 918     // This method (which must be a constructor by the rules of Java)
 919     // wrote a final.  The effects of all initializations must be
 920     // committed to memory before any code after the constructor
 921     // publishes the reference to the newly constructor object.
 922     // Rather than wait for the publication, we simply block the
 923     // writes here.  Rather than put a barrier on only those writes
 924     // which are required to complete, we force all writes to complete.
 925     //
 926     // "All bets are off" unless the first publication occurs after a
 927     // normal return from the constructor.  We do not attempt to detect
 928     // such unusual early publications.  But no barrier is needed on
 929     // exceptional returns, since they cannot publish normally.
 930     //




 931     _exits.insert_mem_bar(Op_MemBarRelease, alloc_with_final());
 932 #ifndef PRODUCT
 933     if (PrintOpto && (Verbose || WizardMode)) {
 934       method()->print_name();
 935       tty->print_cr(" writes finals and needs a memory barrier");













 936     }
 937 #endif
 938   }
 939 
 940   for (MergeMemStream mms(_exits.merged_memory()); mms.next_non_empty(); ) {
 941     // transform each slice of the original memphi:
 942     mms.set_memory(_gvn.transform(mms.memory()));
 943   }
 944 
 945   if (tf()->range()->cnt() > TypeFunc::Parms) {
 946     const Type* ret_type = tf()->range()->field_at(TypeFunc::Parms);
 947     Node*       ret_phi  = _gvn.transform( _exits.argument(0) );
 948     assert(_exits.control()->is_top() || !_gvn.type(ret_phi)->empty(), "return value must be well defined");
 949     _exits.push_node(ret_type->basic_type(), ret_phi);
 950   }
 951 
 952   // Note:  Logic for creating and optimizing the ReturnNode is in Compile.
 953 
 954   // Unlock along the exceptional paths.
 955   // This is done late so that we can common up equivalent exceptions




 374     // This x will be typed as Integer if notReached is not yet linked.
 375     // It could also happen due to a problem in ciTypeFlow analysis.
 376     uncommon_trap(Deoptimization::Reason_constraint,
 377                   Deoptimization::Action_reinterpret);
 378     set_map(types_are_good);
 379   }
 380 }
 381 
 382 //------------------------------Parse------------------------------------------
 383 // Main parser constructor.
 384 Parse::Parse(JVMState* caller, ciMethod* parse_method, float expected_uses, Parse* parent)
 385   : _exits(caller), _parent(parent)
 386 {
 387   // Init some variables
 388   _caller = caller;
 389   _method = parse_method;
 390   _expected_uses = expected_uses;
 391   _depth = 1 + (caller->has_method() ? caller->depth() : 0);
 392   _wrote_final = false;
 393   _wrote_volatile = false;
 394   _wrote_stable = false;
 395   _wrote_fields = false;
 396   _alloc_with_final = NULL;
 397   _entry_bci = InvocationEntryBci;
 398   _tf = NULL;
 399   _block = NULL;
 400   debug_only(_block_count = -1);
 401   debug_only(_blocks = (Block*)-1);
 402 #ifndef PRODUCT
 403   if (PrintCompilation || PrintOpto) {
 404     // Make sure I have an inline tree, so I can print messages about it.
 405     JVMState* ilt_caller = is_osr_parse() ? caller->caller() : caller;
 406     InlineTree::find_subtree_from_root(C->ilt(), ilt_caller, parse_method);
 407   }
 408   _max_switch_depth = 0;
 409   _est_switch_depth = 0;
 410 #endif
 411 
 412   _tf = TypeFunc::make(method());
 413   _iter.reset_to_method(method());
 414   _flow = method()->get_flow_analysis();
 415   if (_flow->failing()) {


 893     caller.map()->set_req(i, ex_map->in(i));
 894   }
 895   // ...and the exception:
 896   Node*          ex_oop        = saved_ex_oop(ex_map);
 897   SafePointNode* caller_ex_map = caller.make_exception_state(ex_oop);
 898   // Finally, collect the new exception state in my exits:
 899   _exits.add_exception_state(caller_ex_map);
 900 }
 901 
 902 //------------------------------do_exits---------------------------------------
 903 void Parse::do_exits() {
 904   set_parse_bci(InvocationEntryBci);
 905 
 906   // Now peephole on the return bits
 907   Node* region = _exits.control();
 908   _exits.set_control(gvn().transform(region));
 909 
 910   Node* iophi = _exits.i_o();
 911   _exits.set_i_o(gvn().transform(iophi));
 912 
 913   // Figure out if we need to emit the trailing barrier. The barrier is only
 914   // needed in the constructors, and only in three cases:
 915   //
 916   // 1. The constructor wrote a final. The effects of all initializations
 917   //    must be committed to memory before any code after the constructor
 918   //    publishes the reference to the newly constructed object. Rather
 919   //    than wait for the publication, we simply block the writes here.
 920   //    Rather than put a barrier on only those writes which are required
 921   //    to complete, we force all writes to complete.
 922   //
 923   // 2. On PPC64, also add MemBarRelease for constructors which write
 924   //    volatile fields. As support_IRIW_for_not_multiple_copy_atomic_cpu
 925   //    is set on PPC64, no sync instruction is issued after volatile
 926   //    stores. We want to guarantee the same behavior as on platforms
 927   //    with total store order, although this is not required by the Java
 928   //    memory model. So as with finals, we add a barrier here.
 929   //
 930   // 3. Experimental VM option is used to force the barrier if any field
 931   //    was written out in the constructor.





 932   //
 933   // "All bets are off" unless the first publication occurs after a
 934   // normal return from the constructor.  We do not attempt to detect
 935   // such unusual early publications.  But no barrier is needed on
 936   // exceptional returns, since they cannot publish normally.
 937   //
 938   if (method()->is_initializer() &&
 939         (wrote_final() ||
 940            PPC64_ONLY(wrote_volatile() ||)
 941            (AlwaysSafeConstructors && wrote_fields()))) {
 942     _exits.insert_mem_bar(Op_MemBarRelease, alloc_with_final());
 943 #ifndef PRODUCT
 944     if (PrintOpto && (Verbose || WizardMode)) {
 945       method()->print_name();
 946       tty->print_cr(" writes finals and needs a memory barrier");
 947     }
 948 #endif
 949   }
 950 
 951   // Any method can write a @Stable field; insert memory barriers after
 952   // those also. If there is a predecessor allocation node, bind the
 953   // barrier there.
 954   if (wrote_stable()) {
 955     _exits.insert_mem_bar(Op_MemBarRelease, alloc_with_final());
 956 #ifndef PRODUCT
 957     if (PrintOpto && (Verbose || WizardMode)) {
 958       method()->print_name();
 959       tty->print_cr(" writes @Stable and needs a memory barrier");
 960     }
 961 #endif
 962   }
 963 
 964   for (MergeMemStream mms(_exits.merged_memory()); mms.next_non_empty(); ) {
 965     // transform each slice of the original memphi:
 966     mms.set_memory(_gvn.transform(mms.memory()));
 967   }
 968 
 969   if (tf()->range()->cnt() > TypeFunc::Parms) {
 970     const Type* ret_type = tf()->range()->field_at(TypeFunc::Parms);
 971     Node*       ret_phi  = _gvn.transform( _exits.argument(0) );
 972     assert(_exits.control()->is_top() || !_gvn.type(ret_phi)->empty(), "return value must be well defined");
 973     _exits.push_node(ret_type->basic_type(), ret_phi);
 974   }
 975 
 976   // Note:  Logic for creating and optimizing the ReturnNode is in Compile.
 977 
 978   // Unlock along the exceptional paths.
 979   // This is done late so that we can common up equivalent exceptions