380 Deoptimization::Action_reinterpret);
381 set_map(types_are_good);
382 }
383 }
384
385 //------------------------------Parse------------------------------------------
386 // Main parser constructor.
387 Parse::Parse(JVMState* caller, ciMethod* parse_method, float expected_uses)
388 : _exits(caller)
389 {
390 // Init some variables
391 _caller = caller;
392 _method = parse_method;
393 _expected_uses = expected_uses;
394 _depth = 1 + (caller->has_method() ? caller->depth() : 0);
395 _wrote_final = false;
396 _wrote_volatile = false;
397 _wrote_stable = false;
398 _wrote_fields = false;
399 _alloc_with_final = NULL;
400 _entry_bci = InvocationEntryBci;
401 _tf = NULL;
402 _block = NULL;
403 _first_return = true;
404 _replaced_nodes_for_exceptions = false;
405 _new_idx = C->unique();
406 debug_only(_block_count = -1);
407 debug_only(_blocks = (Block*)-1);
408 #ifndef PRODUCT
409 if (PrintCompilation || PrintOpto) {
410 // Make sure I have an inline tree, so I can print messages about it.
411 JVMState* ilt_caller = is_osr_parse() ? caller->caller() : caller;
412 InlineTree::find_subtree_from_root(C->ilt(), ilt_caller, parse_method);
413 }
414 _max_switch_depth = 0;
415 _est_switch_depth = 0;
416 #endif
417
418 _tf = TypeFunc::make(method());
419 _iter.reset_to_method(method());
953 // such unusual early publications. But no barrier is needed on
954 // exceptional returns, since they cannot publish normally.
955 //
956 if (method()->is_initializer() &&
957 (wrote_final() ||
958 PPC64_ONLY(wrote_volatile() ||)
959 (AlwaysSafeConstructors && wrote_fields()))) {
960 _exits.insert_mem_bar(Op_MemBarRelease, alloc_with_final());
961 #ifndef PRODUCT
962 if (PrintOpto && (Verbose || WizardMode)) {
963 method()->print_name();
964 tty->print_cr(" writes finals and needs a memory barrier");
965 }
966 #endif
967 }
968
969 // Any method can write a @Stable field; insert memory barriers after
970 // those also. If there is a predecessor allocation node, bind the
971 // barrier there.
972 if (wrote_stable()) {
973 _exits.insert_mem_bar(Op_MemBarRelease, alloc_with_final());
974 #ifndef PRODUCT
975 if (PrintOpto && (Verbose || WizardMode)) {
976 method()->print_name();
977 tty->print_cr(" writes @Stable and needs a memory barrier");
978 }
979 #endif
980 }
981
982 for (MergeMemStream mms(_exits.merged_memory()); mms.next_non_empty(); ) {
983 // transform each slice of the original memphi:
984 mms.set_memory(_gvn.transform(mms.memory()));
985 }
986
987 if (tf()->range()->cnt() > TypeFunc::Parms) {
988 const Type* ret_type = tf()->range()->field_at(TypeFunc::Parms);
989 Node* ret_phi = _gvn.transform( _exits.argument(0) );
990 if (!_exits.control()->is_top() && _gvn.type(ret_phi)->empty()) {
991 // In case of concurrent class loading, the type we set for the
992 // ret_phi in build_exits() may have been too optimistic and the
993 // ret_phi may be top now.
|
380 Deoptimization::Action_reinterpret);
381 set_map(types_are_good);
382 }
383 }
384
385 //------------------------------Parse------------------------------------------
386 // Main parser constructor.
387 Parse::Parse(JVMState* caller, ciMethod* parse_method, float expected_uses)
388 : _exits(caller)
389 {
390 // Init some variables
391 _caller = caller;
392 _method = parse_method;
393 _expected_uses = expected_uses;
394 _depth = 1 + (caller->has_method() ? caller->depth() : 0);
395 _wrote_final = false;
396 _wrote_volatile = false;
397 _wrote_stable = false;
398 _wrote_fields = false;
399 _alloc_with_final = NULL;
400 _alloc_with_stable = NodeSentinel;
401 _entry_bci = InvocationEntryBci;
402 _tf = NULL;
403 _block = NULL;
404 _first_return = true;
405 _replaced_nodes_for_exceptions = false;
406 _new_idx = C->unique();
407 debug_only(_block_count = -1);
408 debug_only(_blocks = (Block*)-1);
409 #ifndef PRODUCT
410 if (PrintCompilation || PrintOpto) {
411 // Make sure I have an inline tree, so I can print messages about it.
412 JVMState* ilt_caller = is_osr_parse() ? caller->caller() : caller;
413 InlineTree::find_subtree_from_root(C->ilt(), ilt_caller, parse_method);
414 }
415 _max_switch_depth = 0;
416 _est_switch_depth = 0;
417 #endif
418
419 _tf = TypeFunc::make(method());
420 _iter.reset_to_method(method());
954 // such unusual early publications. But no barrier is needed on
955 // exceptional returns, since they cannot publish normally.
956 //
957 if (method()->is_initializer() &&
958 (wrote_final() ||
959 PPC64_ONLY(wrote_volatile() ||)
960 (AlwaysSafeConstructors && wrote_fields()))) {
961 _exits.insert_mem_bar(Op_MemBarRelease, alloc_with_final());
962 #ifndef PRODUCT
963 if (PrintOpto && (Verbose || WizardMode)) {
964 method()->print_name();
965 tty->print_cr(" writes finals and needs a memory barrier");
966 }
967 #endif
968 }
969
970 // Any method can write a @Stable field; insert memory barriers after
971 // those also. If there is a predecessor allocation node, bind the
972 // barrier there.
973 if (wrote_stable()) {
974 _exits.insert_mem_bar(Op_MemBarRelease, alloc_with_stable());
975 #ifndef PRODUCT
976 if (PrintOpto && (Verbose || WizardMode)) {
977 method()->print_name();
978 tty->print_cr(" writes @Stable and needs a memory barrier");
979 }
980 #endif
981 }
982
983 for (MergeMemStream mms(_exits.merged_memory()); mms.next_non_empty(); ) {
984 // transform each slice of the original memphi:
985 mms.set_memory(_gvn.transform(mms.memory()));
986 }
987
988 if (tf()->range()->cnt() > TypeFunc::Parms) {
989 const Type* ret_type = tf()->range()->field_at(TypeFunc::Parms);
990 Node* ret_phi = _gvn.transform( _exits.argument(0) );
991 if (!_exits.control()->is_top() && _gvn.type(ret_phi)->empty()) {
992 // In case of concurrent class loading, the type we set for the
993 // ret_phi in build_exits() may have been too optimistic and the
994 // ret_phi may be top now.
|