src/share/vm/opto/parse1.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File 6934604 Sdiff src/share/vm/opto

src/share/vm/opto/parse1.cpp

Print this page




 373     // E.g., Object x = coldAtFirst() && notReached()? "str": new Integer(123).
 374     // This x will be typed as Integer if notReached is not yet linked.
 375     // It could also happen due to a problem in ciTypeFlow analysis.
 376     uncommon_trap(Deoptimization::Reason_constraint,
 377                   Deoptimization::Action_reinterpret);
 378     set_map(types_are_good);
 379   }
 380 }
 381 
 382 //------------------------------Parse------------------------------------------
 383 // Main parser constructor.
 384 Parse::Parse(JVMState* caller, ciMethod* parse_method, float expected_uses)
 385   : _exits(caller)
 386 {
 387   // Init some variables
 388   _caller = caller;
 389   _method = parse_method;
 390   _expected_uses = expected_uses;
 391   _depth = 1 + (caller->has_method() ? caller->depth() : 0);
 392   _wrote_final = false;

 393   _entry_bci = InvocationEntryBci;
 394   _tf = NULL;
 395   _block = NULL;
 396   debug_only(_block_count = -1);
 397   debug_only(_blocks = (Block*)-1);
 398 #ifndef PRODUCT
 399   if (PrintCompilation || PrintOpto) {
 400     // Make sure I have an inline tree, so I can print messages about it.
 401     JVMState* ilt_caller = is_osr_parse() ? caller->caller() : caller;
 402     InlineTree::find_subtree_from_root(C->ilt(), ilt_caller, parse_method);
 403   }
 404   _max_switch_depth = 0;
 405   _est_switch_depth = 0;
 406 #endif
 407 
 408   _tf = TypeFunc::make(method());
 409   _iter.reset_to_method(method());
 410   _flow = method()->get_flow_analysis();
 411   if (_flow->failing()) {
 412     C->record_method_not_compilable_all_tiers(_flow->failure_reason());


 706   }
 707 #endif
 708 }
 709 
 710 //-------------------------------build_exits----------------------------------
 711 // Build normal and exceptional exit merge points.
 712 void Parse::build_exits() {
 713   // make a clone of caller to prevent sharing of side-effects
 714   _exits.set_map(_exits.clone_map());
 715   _exits.clean_stack(_exits.sp());
 716   _exits.sync_jvms();
 717 
 718   RegionNode* region = new (C) RegionNode(1);
 719   record_for_igvn(region);
 720   gvn().set_type_bottom(region);
 721   _exits.set_control(region);
 722 
 723   // Note:  iophi and memphi are not transformed until do_exits.
 724   Node* iophi  = new (C) PhiNode(region, Type::ABIO);
 725   Node* memphi = new (C) PhiNode(region, Type::MEMORY, TypePtr::BOTTOM);


 726   _exits.set_i_o(iophi);
 727   _exits.set_all_memory(memphi);
 728 
 729   // Add a return value to the exit state.  (Do not push it yet.)
 730   if (tf()->range()->cnt() > TypeFunc::Parms) {
 731     const Type* ret_type = tf()->range()->field_at(TypeFunc::Parms);
 732     // Don't "bind" an unloaded return klass to the ret_phi. If the klass
 733     // becomes loaded during the subsequent parsing, the loaded and unloaded
 734     // types will not join when we transform and push in do_exits().
 735     const TypeOopPtr* ret_oop_type = ret_type->isa_oopptr();
 736     if (ret_oop_type && !ret_oop_type->klass()->is_loaded()) {
 737       ret_type = TypeOopPtr::BOTTOM;
 738     }
 739     int         ret_size = type2size[ret_type->basic_type()];
 740     Node*       ret_phi  = new (C) PhiNode(region, ret_type);

 741     _exits.ensure_stack(ret_size);
 742     assert((int)(tf()->range()->cnt() - TypeFunc::Parms) == ret_size, "good tf range");
 743     assert(method()->return_type()->size() == ret_size, "tf agrees w/ method");
 744     _exits.set_argument(0, ret_phi);  // here is where the parser finds it
 745     // Note:  ret_phi is not yet pushed, until do_exits.
 746   }
 747 }
 748 
 749 
 750 //----------------------------build_start_state-------------------------------
 751 // Construct a state which contains only the incoming arguments from an
 752 // unknown caller.  The method & bci will be NULL & InvocationEntryBci.
 753 JVMState* Compile::build_start_state(StartNode* start, const TypeFunc* tf) {
 754   int        arg_size = tf->domain()->cnt();
 755   int        max_size = MAX2(arg_size, (int)tf->range()->cnt());
 756   JVMState*  jvms     = new (this) JVMState(max_size - TypeFunc::Parms);
 757   SafePointNode* map  = new (this) SafePointNode(max_size, NULL);
 758   record_for_igvn(map);
 759   assert(arg_size == TypeFunc::Parms + (is_osr_compilation() ? 1 : method()->arg_size()), "correct arg_size");
 760   Node_Notes* old_nn = default_node_notes();


 900   Node* region = _exits.control();
 901   _exits.set_control(gvn().transform(region));
 902 
 903   Node* iophi = _exits.i_o();
 904   _exits.set_i_o(gvn().transform(iophi));
 905 
 906   if (wrote_final()) {
 907     // This method (which must be a constructor by the rules of Java)
 908     // wrote a final.  The effects of all initializations must be
 909     // committed to memory before any code after the constructor
 910     // publishes the reference to the newly constructor object.
 911     // Rather than wait for the publication, we simply block the
 912     // writes here.  Rather than put a barrier on only those writes
 913     // which are required to complete, we force all writes to complete.
 914     //
 915     // "All bets are off" unless the first publication occurs after a
 916     // normal return from the constructor.  We do not attempt to detect
 917     // such unusual early publications.  But no barrier is needed on
 918     // exceptional returns, since they cannot publish normally.
 919     //
 920     _exits.insert_mem_bar(Op_MemBarRelease);
 921 #ifndef PRODUCT
 922     if (PrintOpto && (Verbose || WizardMode)) {
 923       method()->print_name();
 924       tty->print_cr(" writes finals and needs a memory barrier");
 925     }
 926 #endif
 927   }
 928 
 929   for (MergeMemStream mms(_exits.merged_memory()); mms.next_non_empty(); ) {
 930     // transform each slice of the original memphi:
 931     mms.set_memory(_gvn.transform(mms.memory()));
 932   }
 933 
 934   if (tf()->range()->cnt() > TypeFunc::Parms) {
 935     const Type* ret_type = tf()->range()->field_at(TypeFunc::Parms);
 936     Node*       ret_phi  = _gvn.transform( _exits.argument(0) );
 937     assert(_exits.control()->is_top() || !_gvn.type(ret_phi)->empty(), "return value must be well defined");
 938     _exits.push_node(ret_type->basic_type(), ret_phi);
 939   }
 940 




 373     // E.g., Object x = coldAtFirst() && notReached()? "str": new Integer(123).
 374     // This x will be typed as Integer if notReached is not yet linked.
 375     // It could also happen due to a problem in ciTypeFlow analysis.
 376     uncommon_trap(Deoptimization::Reason_constraint,
 377                   Deoptimization::Action_reinterpret);
 378     set_map(types_are_good);
 379   }
 380 }
 381 
 382 //------------------------------Parse------------------------------------------
 383 // Main parser constructor.
 384 Parse::Parse(JVMState* caller, ciMethod* parse_method, float expected_uses)
 385   : _exits(caller)
 386 {
 387   // Init some variables
 388   _caller = caller;
 389   _method = parse_method;
 390   _expected_uses = expected_uses;
 391   _depth = 1 + (caller->has_method() ? caller->depth() : 0);
 392   _wrote_final = false;
 393   _alloc_with_final = NULL;
 394   _entry_bci = InvocationEntryBci;
 395   _tf = NULL;
 396   _block = NULL;
 397   debug_only(_block_count = -1);
 398   debug_only(_blocks = (Block*)-1);
 399 #ifndef PRODUCT
 400   if (PrintCompilation || PrintOpto) {
 401     // Make sure I have an inline tree, so I can print messages about it.
 402     JVMState* ilt_caller = is_osr_parse() ? caller->caller() : caller;
 403     InlineTree::find_subtree_from_root(C->ilt(), ilt_caller, parse_method);
 404   }
 405   _max_switch_depth = 0;
 406   _est_switch_depth = 0;
 407 #endif
 408 
 409   _tf = TypeFunc::make(method());
 410   _iter.reset_to_method(method());
 411   _flow = method()->get_flow_analysis();
 412   if (_flow->failing()) {
 413     C->record_method_not_compilable_all_tiers(_flow->failure_reason());


 707   }
 708 #endif
 709 }
 710 
 711 //-------------------------------build_exits----------------------------------
 712 // Build normal and exceptional exit merge points.
 713 void Parse::build_exits() {
 714   // make a clone of caller to prevent sharing of side-effects
 715   _exits.set_map(_exits.clone_map());
 716   _exits.clean_stack(_exits.sp());
 717   _exits.sync_jvms();
 718 
 719   RegionNode* region = new (C) RegionNode(1);
 720   record_for_igvn(region);
 721   gvn().set_type_bottom(region);
 722   _exits.set_control(region);
 723 
 724   // Note:  iophi and memphi are not transformed until do_exits.
 725   Node* iophi  = new (C) PhiNode(region, Type::ABIO);
 726   Node* memphi = new (C) PhiNode(region, Type::MEMORY, TypePtr::BOTTOM);
 727   gvn().set_type_bottom(iophi);
 728   gvn().set_type_bottom(memphi);
 729   _exits.set_i_o(iophi);
 730   _exits.set_all_memory(memphi);
 731 
 732   // Add a return value to the exit state.  (Do not push it yet.)
 733   if (tf()->range()->cnt() > TypeFunc::Parms) {
 734     const Type* ret_type = tf()->range()->field_at(TypeFunc::Parms);
 735     // Don't "bind" an unloaded return klass to the ret_phi. If the klass
 736     // becomes loaded during the subsequent parsing, the loaded and unloaded
 737     // types will not join when we transform and push in do_exits().
 738     const TypeOopPtr* ret_oop_type = ret_type->isa_oopptr();
 739     if (ret_oop_type && !ret_oop_type->klass()->is_loaded()) {
 740       ret_type = TypeOopPtr::BOTTOM;
 741     }
 742     int         ret_size = type2size[ret_type->basic_type()];
 743     Node*       ret_phi  = new (C) PhiNode(region, ret_type);
 744     gvn().set_type_bottom(ret_phi);
 745     _exits.ensure_stack(ret_size);
 746     assert((int)(tf()->range()->cnt() - TypeFunc::Parms) == ret_size, "good tf range");
 747     assert(method()->return_type()->size() == ret_size, "tf agrees w/ method");
 748     _exits.set_argument(0, ret_phi);  // here is where the parser finds it
 749     // Note:  ret_phi is not yet pushed, until do_exits.
 750   }
 751 }
 752 
 753 
 754 //----------------------------build_start_state-------------------------------
 755 // Construct a state which contains only the incoming arguments from an
 756 // unknown caller.  The method & bci will be NULL & InvocationEntryBci.
 757 JVMState* Compile::build_start_state(StartNode* start, const TypeFunc* tf) {
 758   int        arg_size = tf->domain()->cnt();
 759   int        max_size = MAX2(arg_size, (int)tf->range()->cnt());
 760   JVMState*  jvms     = new (this) JVMState(max_size - TypeFunc::Parms);
 761   SafePointNode* map  = new (this) SafePointNode(max_size, NULL);
 762   record_for_igvn(map);
 763   assert(arg_size == TypeFunc::Parms + (is_osr_compilation() ? 1 : method()->arg_size()), "correct arg_size");
 764   Node_Notes* old_nn = default_node_notes();


 904   Node* region = _exits.control();
 905   _exits.set_control(gvn().transform(region));
 906 
 907   Node* iophi = _exits.i_o();
 908   _exits.set_i_o(gvn().transform(iophi));
 909 
 910   if (wrote_final()) {
 911     // This method (which must be a constructor by the rules of Java)
 912     // wrote a final.  The effects of all initializations must be
 913     // committed to memory before any code after the constructor
 914     // publishes the reference to the newly constructor object.
 915     // Rather than wait for the publication, we simply block the
 916     // writes here.  Rather than put a barrier on only those writes
 917     // which are required to complete, we force all writes to complete.
 918     //
 919     // "All bets are off" unless the first publication occurs after a
 920     // normal return from the constructor.  We do not attempt to detect
 921     // such unusual early publications.  But no barrier is needed on
 922     // exceptional returns, since they cannot publish normally.
 923     //
 924     _exits.insert_mem_bar(Op_MemBarRelease, alloc_with_final());
 925 #ifndef PRODUCT
 926     if (PrintOpto && (Verbose || WizardMode)) {
 927       method()->print_name();
 928       tty->print_cr(" writes finals and needs a memory barrier");
 929     }
 930 #endif
 931   }
 932 
 933   for (MergeMemStream mms(_exits.merged_memory()); mms.next_non_empty(); ) {
 934     // transform each slice of the original memphi:
 935     mms.set_memory(_gvn.transform(mms.memory()));
 936   }
 937 
 938   if (tf()->range()->cnt() > TypeFunc::Parms) {
 939     const Type* ret_type = tf()->range()->field_at(TypeFunc::Parms);
 940     Node*       ret_phi  = _gvn.transform( _exits.argument(0) );
 941     assert(_exits.control()->is_top() || !_gvn.type(ret_phi)->empty(), "return value must be well defined");
 942     _exits.push_node(ret_type->basic_type(), ret_phi);
 943   }
 944 


src/share/vm/opto/parse1.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File