380 Deoptimization::Action_reinterpret);
381 set_map(types_are_good);
382 }
383 }
384
385 //------------------------------Parse------------------------------------------
386 // Main parser constructor.
387 Parse::Parse(JVMState* caller, ciMethod* parse_method, float expected_uses)
388 : _exits(caller)
389 {
390 // Init some variables
391 _caller = caller;
392 _method = parse_method;
393 _expected_uses = expected_uses;
394 _depth = 1 + (caller->has_method() ? caller->depth() : 0);
395 _wrote_final = false;
396 _wrote_volatile = false;
397 _wrote_stable = false;
398 _wrote_fields = false;
399 _alloc_with_final = NULL;
400 _entry_bci = InvocationEntryBci;
401 _tf = NULL;
402 _block = NULL;
403 _first_return = true;
404 _replaced_nodes_for_exceptions = false;
405 _new_idx = C->unique();
406 debug_only(_block_count = -1);
407 debug_only(_blocks = (Block*)-1);
408 #ifndef PRODUCT
409 if (PrintCompilation || PrintOpto) {
410 // Make sure I have an inline tree, so I can print messages about it.
411 JVMState* ilt_caller = is_osr_parse() ? caller->caller() : caller;
412 InlineTree::find_subtree_from_root(C->ilt(), ilt_caller, parse_method);
413 }
414 _max_switch_depth = 0;
415 _est_switch_depth = 0;
416 #endif
417
418 _tf = TypeFunc::make(method());
419 _iter.reset_to_method(method());
940 //
941 // 2. On PPC64, also add MemBarRelease for constructors which write
942 // volatile fields. As support_IRIW_for_not_multiple_copy_atomic_cpu
943 // is set on PPC64, no sync instruction is issued after volatile
944 // stores. We want to guarantee the same behavior as on platforms
945 // with total store order, although this is not required by the Java
946 // memory model. So as with finals, we add a barrier here.
947 //
948 // 3. Experimental VM option is used to force the barrier if any field
949 // was written out in the constructor.
950 //
951 // "All bets are off" unless the first publication occurs after a
952 // normal return from the constructor. We do not attempt to detect
953 // such unusual early publications. But no barrier is needed on
954 // exceptional returns, since they cannot publish normally.
955 //
956 if (method()->is_initializer() &&
957 (wrote_final() ||
958 PPC64_ONLY(wrote_volatile() ||)
959 (AlwaysSafeConstructors && wrote_fields()))) {
960 _exits.insert_mem_bar(Op_MemBarRelease, alloc_with_final());
961 #ifndef PRODUCT
962 if (PrintOpto && (Verbose || WizardMode)) {
963 method()->print_name();
964 tty->print_cr(" writes finals and needs a memory barrier");
965 }
966 #endif
967 }
968
969 // Any method can write a @Stable field; insert memory barriers after
970 // those also. If there is a predecessor allocation node, bind the
971 // barrier there.
972 if (wrote_stable()) {
973 _exits.insert_mem_bar(Op_MemBarRelease, alloc_with_final());
974 #ifndef PRODUCT
975 if (PrintOpto && (Verbose || WizardMode)) {
976 method()->print_name();
977 tty->print_cr(" writes @Stable and needs a memory barrier");
978 }
979 #endif
980 }
981
982 for (MergeMemStream mms(_exits.merged_memory()); mms.next_non_empty(); ) {
983 // transform each slice of the original memphi:
984 mms.set_memory(_gvn.transform(mms.memory()));
985 }
986
987 if (tf()->range()->cnt() > TypeFunc::Parms) {
988 const Type* ret_type = tf()->range()->field_at(TypeFunc::Parms);
989 Node* ret_phi = _gvn.transform( _exits.argument(0) );
990 if (!_exits.control()->is_top() && _gvn.type(ret_phi)->empty()) {
991 // In case of concurrent class loading, the type we set for the
992 // ret_phi in build_exits() may have been too optimistic and the
993 // ret_phi may be top now.
|
380 Deoptimization::Action_reinterpret);
381 set_map(types_are_good);
382 }
383 }
384
385 //------------------------------Parse------------------------------------------
386 // Main parser constructor.
387 Parse::Parse(JVMState* caller, ciMethod* parse_method, float expected_uses)
388 : _exits(caller)
389 {
390 // Init some variables
391 _caller = caller;
392 _method = parse_method;
393 _expected_uses = expected_uses;
394 _depth = 1 + (caller->has_method() ? caller->depth() : 0);
395 _wrote_final = false;
396 _wrote_volatile = false;
397 _wrote_stable = false;
398 _wrote_fields = false;
399 _alloc_with_final = NULL;
400 _alloc_with_stable = NodeSentinel;
401 _entry_bci = InvocationEntryBci;
402 _tf = NULL;
403 _block = NULL;
404 _first_return = true;
405 _replaced_nodes_for_exceptions = false;
406 _new_idx = C->unique();
407 debug_only(_block_count = -1);
408 debug_only(_blocks = (Block*)-1);
409 #ifndef PRODUCT
410 if (PrintCompilation || PrintOpto) {
411 // Make sure I have an inline tree, so I can print messages about it.
412 JVMState* ilt_caller = is_osr_parse() ? caller->caller() : caller;
413 InlineTree::find_subtree_from_root(C->ilt(), ilt_caller, parse_method);
414 }
415 _max_switch_depth = 0;
416 _est_switch_depth = 0;
417 #endif
418
419 _tf = TypeFunc::make(method());
420 _iter.reset_to_method(method());
941 //
942 // 2. On PPC64, also add MemBarRelease for constructors which write
943 // volatile fields. As support_IRIW_for_not_multiple_copy_atomic_cpu
944 // is set on PPC64, no sync instruction is issued after volatile
945 // stores. We want to guarantee the same behavior as on platforms
946 // with total store order, although this is not required by the Java
947 // memory model. So as with finals, we add a barrier here.
948 //
949 // 3. Experimental VM option is used to force the barrier if any field
950 // was written out in the constructor.
951 //
952 // "All bets are off" unless the first publication occurs after a
953 // normal return from the constructor. We do not attempt to detect
954 // such unusual early publications. But no barrier is needed on
955 // exceptional returns, since they cannot publish normally.
956 //
957 if (method()->is_initializer() &&
958 (wrote_final() ||
959 PPC64_ONLY(wrote_volatile() ||)
960 (AlwaysSafeConstructors && wrote_fields()))) {
961 // For PPC64 worte volatile, if using allocation node as Precedent input
962 // in MemBarRelease node and allocation is not escaped, MemBarRelease will
963 // be removed. As written volatile field is not definitly allocation
964 // instance's field (different with final), can't safely remove MemBar
965 // when allocation is not escape.
966 Node *allocate = alloc_with_final();
967 if (PPC64_ONLY(wrote_volatile()) NOT_PPC(false)) {
968 allocate = NULL;
969 }
970 _exits.insert_mem_bar(Op_MemBarRelease, allocate);
971 #ifndef PRODUCT
972 if (PrintOpto && (Verbose || WizardMode)) {
973 method()->print_name();
974 tty->print_cr(" writes finals and needs a memory barrier");
975 }
976 #endif
977 }
978
979 // Any method can write a @Stable field; insert memory barriers after
980 // those also. If there is a predecessor allocation node, bind the
981 // barrier there.
982 if (wrote_stable()) {
983 _exits.insert_mem_bar(Op_MemBarRelease, alloc_with_stable());
984 #ifndef PRODUCT
985 if (PrintOpto && (Verbose || WizardMode)) {
986 method()->print_name();
987 tty->print_cr(" writes @Stable and needs a memory barrier");
988 }
989 #endif
990 }
991
992 for (MergeMemStream mms(_exits.merged_memory()); mms.next_non_empty(); ) {
993 // transform each slice of the original memphi:
994 mms.set_memory(_gvn.transform(mms.memory()));
995 }
996
997 if (tf()->range()->cnt() > TypeFunc::Parms) {
998 const Type* ret_type = tf()->range()->field_at(TypeFunc::Parms);
999 Node* ret_phi = _gvn.transform( _exits.argument(0) );
1000 if (!_exits.control()->is_top() && _gvn.type(ret_phi)->empty()) {
1001 // In case of concurrent class loading, the type we set for the
1002 // ret_phi in build_exits() may have been too optimistic and the
1003 // ret_phi may be top now.
|