src/share/vm/opto/parse1.cpp

Print this page




 891     caller.map()->set_req(i, ex_map->in(i));
 892   }
 893   // ...and the exception:
 894   Node*          ex_oop        = saved_ex_oop(ex_map);
 895   SafePointNode* caller_ex_map = caller.make_exception_state(ex_oop);
 896   // Finally, collect the new exception state in my exits:
 897   _exits.add_exception_state(caller_ex_map);
 898 }
 899 
 900 //------------------------------do_exits---------------------------------------
 901 void Parse::do_exits() {
 902   set_parse_bci(InvocationEntryBci);
 903 
 904   // Now peephole on the return bits
 905   Node* region = _exits.control();
 906   _exits.set_control(gvn().transform(region));
 907 
 908   Node* iophi = _exits.i_o();
 909   _exits.set_i_o(gvn().transform(iophi));
 910 
 911   // On PPC64, also add MemBarRelease for constructors which write










 912   // volatile fields. As support_IRIW_for_not_multiple_copy_atomic_cpu
 913   // is set on PPC64, no sync instruction is issued after volatile
 914   // stores. We want to quarantee the same behaviour as on platforms
 915   // with total store order, although this is not required by the Java
 916   // memory model. So as with finals, we add a barrier here.
 917   if (wrote_final() PPC64_ONLY(|| (wrote_volatile() && method()->is_initializer()))) {
 918     // This method (which must be a constructor by the rules of Java)
 919     // wrote a final.  The effects of all initializations must be
 920     // committed to memory before any code after the constructor
 921     // publishes the reference to the newly constructor object.
 922     // Rather than wait for the publication, we simply block the
 923     // writes here.  Rather than put a barrier on only those writes
 924     // which are required to complete, we force all writes to complete.
 925     //
 926     // "All bets are off" unless the first publication occurs after a
 927     // normal return from the constructor.  We do not attempt to detect
 928     // such unusual early publications.  But no barrier is needed on
 929     // exceptional returns, since they cannot publish normally.
 930     //




 931     _exits.insert_mem_bar(Op_MemBarRelease, alloc_with_final());
 932 #ifndef PRODUCT
 933     if (PrintOpto && (Verbose || WizardMode)) {
 934       method()->print_name();
 935       tty->print_cr(" writes finals and needs a memory barrier");
 936     }
 937 #endif
 938   }
 939 
 940   for (MergeMemStream mms(_exits.merged_memory()); mms.next_non_empty(); ) {
 941     // transform each slice of the original memphi:
 942     mms.set_memory(_gvn.transform(mms.memory()));
 943   }
 944 
 945   if (tf()->range()->cnt() > TypeFunc::Parms) {
 946     const Type* ret_type = tf()->range()->field_at(TypeFunc::Parms);
 947     Node*       ret_phi  = _gvn.transform( _exits.argument(0) );
 948     assert(_exits.control()->is_top() || !_gvn.type(ret_phi)->empty(), "return value must be well defined");
 949     _exits.push_node(ret_type->basic_type(), ret_phi);
 950   }




 891     caller.map()->set_req(i, ex_map->in(i));
 892   }
 893   // ...and the exception:
 894   Node*          ex_oop        = saved_ex_oop(ex_map);
 895   SafePointNode* caller_ex_map = caller.make_exception_state(ex_oop);
 896   // Finally, collect the new exception state in my exits:
 897   _exits.add_exception_state(caller_ex_map);
 898 }
 899 
 900 //------------------------------do_exits---------------------------------------
 901 void Parse::do_exits() {
 902   set_parse_bci(InvocationEntryBci);
 903 
 904   // Now peephole on the return bits
 905   Node* region = _exits.control();
 906   _exits.set_control(gvn().transform(region));
 907 
 908   Node* iophi = _exits.i_o();
 909   _exits.set_i_o(gvn().transform(iophi));
 910 
 911   // Figure out if we need to emit the trailing barrier. The barrier is only
 912   // needed in the constructors, and only in three cases:
 913   //
 914   // 1. The constructor wrote a final. The effects of all initializations
 915   //    must be committed to memory before any code after the constructor
 916   //    publishes the reference to the newly constructed object. Rather
 917   //    than wait for the publication, we simply block the writes here.
 918   //    Rather than put a barrier on only those writes which are required
 919   //    to complete, we force all writes to complete.
 920   //
 921   // 2. On PPC64, also add MemBarRelease for constructors which write
 922   //    volatile fields. As support_IRIW_for_not_multiple_copy_atomic_cpu
 923   //    is set on PPC64, no sync instruction is issued after volatile
 924   //    stores. We want to guarantee the same behavior as on platforms
 925   //    with total store order, although this is not required by the Java
 926   //    memory model. So as with finals, we add a barrier here.
 927   //
 928   // 3. Experimental VM option is used to force the barrier if any field
 929   //    was written out in the constructor.





 930   //
 931   // "All bets are off" unless the first publication occurs after a
 932   // normal return from the constructor.  We do not attempt to detect
 933   // such unusual early publications.  But no barrier is needed on
 934   // exceptional returns, since they cannot publish normally.
 935   //
 936   if (method()->is_initializer() &&
 937         (wrote_final() ||
 938            PPC64_ONLY(wrote_volatile() ||)
 939            (AlwaysSafeConstructors && wrote_fields()))) {
 940     _exits.insert_mem_bar(Op_MemBarRelease, alloc_with_final());
 941 #ifndef PRODUCT
 942     if (PrintOpto && (Verbose || WizardMode)) {
 943       method()->print_name();
 944       tty->print_cr(" writes finals and needs a memory barrier");
 945     }
 946 #endif
 947   }
 948 
 949   for (MergeMemStream mms(_exits.merged_memory()); mms.next_non_empty(); ) {
 950     // transform each slice of the original memphi:
 951     mms.set_memory(_gvn.transform(mms.memory()));
 952   }
 953 
 954   if (tf()->range()->cnt() > TypeFunc::Parms) {
 955     const Type* ret_type = tf()->range()->field_at(TypeFunc::Parms);
 956     Node*       ret_phi  = _gvn.transform( _exits.argument(0) );
 957     assert(_exits.control()->is_top() || !_gvn.type(ret_phi)->empty(), "return value must be well defined");
 958     _exits.push_node(ret_type->basic_type(), ret_phi);
 959   }