src/share/vm/c1/c1_GraphBuilder.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hotspot Sdiff src/share/vm/c1

src/share/vm/c1/c1_GraphBuilder.cpp

Print this page
rev 8995 : 8046155: JEP165: Compiler Control
Summary:
Reviewed-by:


3348 
3349   // Entire compilation could fail during try_inline_full call.
3350   // In that case printing inlining decision info is useless.
3351   if (!bailed_out())
3352     print_inlining(callee, _inline_bailout_msg, /*success*/ false);
3353 
3354   return false;
3355 }
3356 
3357 
3358 const char* GraphBuilder::check_can_parse(ciMethod* callee) const {
3359   // Certain methods cannot be parsed at all:
3360   if ( callee->is_native())            return "native method";
3361   if ( callee->is_abstract())          return "abstract method";
3362   if (!callee->can_be_compiled())      return "not compilable (disabled)";
3363   return NULL;
3364 }
3365 
3366 // negative filter: should callee NOT be inlined?  returns NULL, ok to inline, or rejection msg
3367 const char* GraphBuilder::should_not_inline(ciMethod* callee) const {
3368   if ( callee->should_not_inline())    return "disallowed by CompileCommand";
3369   if ( callee->dont_inline())          return "don't inline by annotation";
3370   return NULL;
3371 }
3372 
3373 void GraphBuilder::build_graph_for_intrinsic(ciMethod* callee) {
3374   vmIntrinsics::ID id = callee->intrinsic_id();
3375   assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
3376 
3377   // Some intrinsics need special IR nodes.
3378   switch(id) {
3379   case vmIntrinsics::_getObject          : append_unsafe_get_obj(callee, T_OBJECT,  false); return;
3380   case vmIntrinsics::_getBoolean         : append_unsafe_get_obj(callee, T_BOOLEAN, false); return;
3381   case vmIntrinsics::_getByte            : append_unsafe_get_obj(callee, T_BYTE,    false); return;
3382   case vmIntrinsics::_getShort           : append_unsafe_get_obj(callee, T_SHORT,   false); return;
3383   case vmIntrinsics::_getChar            : append_unsafe_get_obj(callee, T_CHAR,    false); return;
3384   case vmIntrinsics::_getInt             : append_unsafe_get_obj(callee, T_INT,     false); return;
3385   case vmIntrinsics::_getLong            : append_unsafe_get_obj(callee, T_LONG,    false); return;
3386   case vmIntrinsics::_getFloat           : append_unsafe_get_obj(callee, T_FLOAT,   false); return;
3387   case vmIntrinsics::_getDouble          : append_unsafe_get_obj(callee, T_DOUBLE,  false); return;
3388   case vmIntrinsics::_putObject          : append_unsafe_put_obj(callee, T_OBJECT,  false); return;


3477                                     args, has_receiver, state_before,
3478                                     vmIntrinsics::preserves_state(id),
3479                                     vmIntrinsics::can_trap(id));
3480   // append instruction & push result
3481   Value value = append_split(result);
3482   if (result_type != voidType) push(result_type, value);
3483 
3484   if (callee != method() && profile_return() && result_type->is_object_kind()) {
3485     profile_return_type(result, callee);
3486   }
3487 }
3488 
3489 bool GraphBuilder::try_inline_intrinsics(ciMethod* callee) {
3490   // For calling is_intrinsic_available we need to transition to
3491   // the '_thread_in_vm' state because is_intrinsic_available()
3492   // accesses critical VM-internal data.
3493   bool is_available = false;
3494   {
3495     VM_ENTRY_MARK;
3496     methodHandle mh(THREAD, callee->get_Method());
3497     methodHandle ct(THREAD, method()->get_Method());
3498     is_available = _compilation->compiler()->is_intrinsic_available(mh, ct);
3499   }
3500 
3501   if (!is_available) {
3502     if (!InlineNatives) {
3503       // Return false and also set message that the inlining of
3504       // intrinsics has been disabled in general.
3505       INLINE_BAILOUT("intrinsic method inlining disabled");
3506     } else {
3507       return false;
3508     }
3509   }
3510   build_graph_for_intrinsic(callee);
3511   return true;
3512 }
3513 
3514 
3515 bool GraphBuilder::try_inline_jsr(int jsr_dest_bci) {
3516   // Introduce a new callee continuation point - all Ret instructions
3517   // will be replaced with Gotos to this point.
3518   BlockBegin* cont = block_at(next_bci());


3673   if (callee->is_synchronized() &&
3674       !InlineSynchronizedMethods         ) INLINE_BAILOUT("callee is synchronized");
3675   if (!callee->holder()->is_initialized()) INLINE_BAILOUT("callee's klass not initialized yet");
3676   if (!callee->has_balanced_monitors())    INLINE_BAILOUT("callee's monitors do not match");
3677 
3678   // Proper inlining of methods with jsrs requires a little more work.
3679   if (callee->has_jsrs()                 ) INLINE_BAILOUT("jsrs not handled properly by inliner yet");
3680 
3681   // When SSE2 is used on intel, then no special handling is needed
3682   // for strictfp because the enum-constant is fixed at compile time,
3683   // the check for UseSSE2 is needed here
3684   if (strict_fp_requires_explicit_rounding && UseSSE < 2 && method()->is_strict() != callee->is_strict()) {
3685     INLINE_BAILOUT("caller and callee have different strict fp requirements");
3686   }
3687 
3688   if (is_profiling() && !callee->ensure_method_data()) {
3689     INLINE_BAILOUT("mdo allocation failed");
3690   }
3691 
3692   // now perform tests that are based on flag settings
3693   if (callee->force_inline() || callee->should_inline()) {

3694     if (inline_level() > MaxForceInlineLevel                    ) INLINE_BAILOUT("MaxForceInlineLevel");
3695     if (recursive_inline_level(callee) > MaxRecursiveInlineLevel) INLINE_BAILOUT("recursive inlining too deep");
3696 
3697     const char* msg = "";
3698     if (callee->force_inline())  msg = "force inline by annotation";
3699     if (callee->should_inline()) msg = "force inline by CompileCommand";
3700     print_inlining(callee, msg);
3701   } else {
3702     // use heuristic controls on inlining
3703     if (inline_level() > MaxInlineLevel                         ) INLINE_BAILOUT("inlining too deep");
3704     if (recursive_inline_level(callee) > MaxRecursiveInlineLevel) INLINE_BAILOUT("recursive inlining too deep");
3705     if (callee->code_size_for_inlining() > max_inline_size()    ) INLINE_BAILOUT("callee is too large");
3706 
3707     // don't inline throwable methods unless the inlining tree is rooted in a throwable class
3708     if (callee->name() == ciSymbol::object_initializer_name() &&
3709         callee->holder()->is_subclass_of(ciEnv::current()->Throwable_klass())) {
3710       // Throwable constructor call
3711       IRScope* top = scope();
3712       while (top->caller() != NULL) {
3713         top = top->caller();
3714       }
3715       if (!top->method()->holder()->is_subclass_of(ciEnv::current()->Throwable_klass())) {
3716         INLINE_BAILOUT("don't inline Throwable constructors");
3717       }
3718     }
3719 


4190         log->inline_success("receiver is statically known");
4191     } else {
4192       if (msg != NULL)
4193         log->inline_fail(msg);
4194       else
4195         log->inline_fail("reason unknown");
4196     }
4197   }
4198 #if INCLUDE_TRACE
4199   EventCompilerInlining event;
4200   if (event.should_commit()) {
4201     event.set_compileID(compilation()->env()->task()->compile_id());
4202     event.set_message(msg);
4203     event.set_succeeded(success);
4204     event.set_bci(bci());
4205     event.set_caller(method()->get_Method());
4206     event.set_callee(callee->to_trace_struct());
4207     event.commit();
4208   }
4209 #endif // INCLUDE_TRACE
4210   if (!PrintInlining && !compilation()->method()->has_option("PrintInlining")) {

4211     return;
4212   }
4213   CompileTask::print_inlining_tty(callee, scope()->level(), bci(), msg);
4214   if (success && CIPrintMethodCodes) {
4215     callee->print_codes();
4216   }
4217 }
4218 
4219 void GraphBuilder::append_unsafe_get_and_set_obj(ciMethod* callee, bool is_add) {
4220   Values* args = state()->pop_arguments(callee->arg_size());
4221   BasicType t = callee->return_type()->basic_type();
4222   null_check(args->at(0));
4223   Instruction* offset = args->at(2);
4224 #ifndef _LP64
4225   offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
4226 #endif
4227   Instruction* op = append(new UnsafeGetAndSetObject(t, args->at(1), offset, args->at(3), is_add));
4228   compilation()->set_has_unsafe_access(true);
4229   kill_all();
4230   push(op->type(), op);




3348 
3349   // Entire compilation could fail during try_inline_full call.
3350   // In that case printing inlining decision info is useless.
3351   if (!bailed_out())
3352     print_inlining(callee, _inline_bailout_msg, /*success*/ false);
3353 
3354   return false;
3355 }
3356 
3357 
3358 const char* GraphBuilder::check_can_parse(ciMethod* callee) const {
3359   // Certain methods cannot be parsed at all:
3360   if ( callee->is_native())            return "native method";
3361   if ( callee->is_abstract())          return "abstract method";
3362   if (!callee->can_be_compiled())      return "not compilable (disabled)";
3363   return NULL;
3364 }
3365 
3366 // negative filter: should callee NOT be inlined?  returns NULL, ok to inline, or rejection msg
3367 const char* GraphBuilder::should_not_inline(ciMethod* callee) const {
3368   if ( compilation()->env()->dirset()->dont_inline_commanded(callee)) return "disallowed by CompileCommand";
3369   if ( callee->dont_inline())          return "don't inline by annotation";
3370   return NULL;
3371 }
3372 
3373 void GraphBuilder::build_graph_for_intrinsic(ciMethod* callee) {
3374   vmIntrinsics::ID id = callee->intrinsic_id();
3375   assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
3376 
3377   // Some intrinsics need special IR nodes.
3378   switch(id) {
3379   case vmIntrinsics::_getObject          : append_unsafe_get_obj(callee, T_OBJECT,  false); return;
3380   case vmIntrinsics::_getBoolean         : append_unsafe_get_obj(callee, T_BOOLEAN, false); return;
3381   case vmIntrinsics::_getByte            : append_unsafe_get_obj(callee, T_BYTE,    false); return;
3382   case vmIntrinsics::_getShort           : append_unsafe_get_obj(callee, T_SHORT,   false); return;
3383   case vmIntrinsics::_getChar            : append_unsafe_get_obj(callee, T_CHAR,    false); return;
3384   case vmIntrinsics::_getInt             : append_unsafe_get_obj(callee, T_INT,     false); return;
3385   case vmIntrinsics::_getLong            : append_unsafe_get_obj(callee, T_LONG,    false); return;
3386   case vmIntrinsics::_getFloat           : append_unsafe_get_obj(callee, T_FLOAT,   false); return;
3387   case vmIntrinsics::_getDouble          : append_unsafe_get_obj(callee, T_DOUBLE,  false); return;
3388   case vmIntrinsics::_putObject          : append_unsafe_put_obj(callee, T_OBJECT,  false); return;


3477                                     args, has_receiver, state_before,
3478                                     vmIntrinsics::preserves_state(id),
3479                                     vmIntrinsics::can_trap(id));
3480   // append instruction & push result
3481   Value value = append_split(result);
3482   if (result_type != voidType) push(result_type, value);
3483 
3484   if (callee != method() && profile_return() && result_type->is_object_kind()) {
3485     profile_return_type(result, callee);
3486   }
3487 }
3488 
3489 bool GraphBuilder::try_inline_intrinsics(ciMethod* callee) {
3490   // For calling is_intrinsic_available we need to transition to
3491   // the '_thread_in_vm' state because is_intrinsic_available()
3492   // accesses critical VM-internal data.
3493   bool is_available = false;
3494   {
3495     VM_ENTRY_MARK;
3496     methodHandle mh(THREAD, callee->get_Method());
3497     is_available = _compilation->compiler()->is_intrinsic_available(mh, _compilation->env()->dirset());

3498   }
3499 
3500   if (!is_available) {
3501     if (!InlineNatives) {
3502       // Return false and also set message that the inlining of
3503       // intrinsics has been disabled in general.
3504       INLINE_BAILOUT("intrinsic method inlining disabled");
3505     } else {
3506       return false;
3507     }
3508   }
3509   build_graph_for_intrinsic(callee);
3510   return true;
3511 }
3512 
3513 
3514 bool GraphBuilder::try_inline_jsr(int jsr_dest_bci) {
3515   // Introduce a new callee continuation point - all Ret instructions
3516   // will be replaced with Gotos to this point.
3517   BlockBegin* cont = block_at(next_bci());


3672   if (callee->is_synchronized() &&
3673       !InlineSynchronizedMethods         ) INLINE_BAILOUT("callee is synchronized");
3674   if (!callee->holder()->is_initialized()) INLINE_BAILOUT("callee's klass not initialized yet");
3675   if (!callee->has_balanced_monitors())    INLINE_BAILOUT("callee's monitors do not match");
3676 
3677   // Proper inlining of methods with jsrs requires a little more work.
3678   if (callee->has_jsrs()                 ) INLINE_BAILOUT("jsrs not handled properly by inliner yet");
3679 
3680   // When SSE2 is used on intel, then no special handling is needed
3681   // for strictfp because the enum-constant is fixed at compile time,
3682   // the check for UseSSE2 is needed here
3683   if (strict_fp_requires_explicit_rounding && UseSSE < 2 && method()->is_strict() != callee->is_strict()) {
3684     INLINE_BAILOUT("caller and callee have different strict fp requirements");
3685   }
3686 
3687   if (is_profiling() && !callee->ensure_method_data()) {
3688     INLINE_BAILOUT("mdo allocation failed");
3689   }
3690 
3691   // now perform tests that are based on flag settings
3692   bool inlinee_by_directive = compilation()->env()->dirset()->inline_commanded(callee);
3693   if (callee->force_inline() || inlinee_by_directive) {
3694     if (inline_level() > MaxForceInlineLevel                    ) INLINE_BAILOUT("MaxForceInlineLevel");
3695     if (recursive_inline_level(callee) > MaxRecursiveInlineLevel) INLINE_BAILOUT("recursive inlining too deep");
3696 
3697     const char* msg = "";
3698     if (callee->force_inline())  msg = "force inline by annotation";
3699     if (inlinee_by_directive)    msg = "force inline by CompileCommand";
3700     print_inlining(callee, msg);
3701   } else {
3702     // use heuristic controls on inlining
3703     if (inline_level() > MaxInlineLevel                         ) INLINE_BAILOUT("inlining too deep");
3704     if (recursive_inline_level(callee) > MaxRecursiveInlineLevel) INLINE_BAILOUT("recursive inlining too deep");
3705     if (callee->code_size_for_inlining() > max_inline_size()    ) INLINE_BAILOUT("callee is too large");
3706 
3707     // don't inline throwable methods unless the inlining tree is rooted in a throwable class
3708     if (callee->name() == ciSymbol::object_initializer_name() &&
3709         callee->holder()->is_subclass_of(ciEnv::current()->Throwable_klass())) {
3710       // Throwable constructor call
3711       IRScope* top = scope();
3712       while (top->caller() != NULL) {
3713         top = top->caller();
3714       }
3715       if (!top->method()->holder()->is_subclass_of(ciEnv::current()->Throwable_klass())) {
3716         INLINE_BAILOUT("don't inline Throwable constructors");
3717       }
3718     }
3719 


4190         log->inline_success("receiver is statically known");
4191     } else {
4192       if (msg != NULL)
4193         log->inline_fail(msg);
4194       else
4195         log->inline_fail("reason unknown");
4196     }
4197   }
4198 #if INCLUDE_TRACE
4199   EventCompilerInlining event;
4200   if (event.should_commit()) {
4201     event.set_compileID(compilation()->env()->task()->compile_id());
4202     event.set_message(msg);
4203     event.set_succeeded(success);
4204     event.set_bci(bci());
4205     event.set_caller(method()->get_Method());
4206     event.set_callee(callee->to_trace_struct());
4207     event.commit();
4208   }
4209 #endif // INCLUDE_TRACE
4210 
4211   if (!compilation()->env()->dirset()->PrintInliningOption) {
4212     return;
4213   }
4214   CompileTask::print_inlining_tty(callee, scope()->level(), bci(), msg);
4215   if (success && CIPrintMethodCodes) {
4216     callee->print_codes();
4217   }
4218 }
4219 
4220 void GraphBuilder::append_unsafe_get_and_set_obj(ciMethod* callee, bool is_add) {
4221   Values* args = state()->pop_arguments(callee->arg_size());
4222   BasicType t = callee->return_type()->basic_type();
4223   null_check(args->at(0));
4224   Instruction* offset = args->at(2);
4225 #ifndef _LP64
4226   offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
4227 #endif
4228   Instruction* op = append(new UnsafeGetAndSetObject(t, args->at(1), offset, args->at(3), is_add));
4229   compilation()->set_has_unsafe_access(true);
4230   kill_all();
4231   push(op->type(), op);


src/share/vm/c1/c1_GraphBuilder.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File