src/share/vm/c1/c1_GraphBuilder.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File 8076112 Sdiff src/share/vm/c1

src/share/vm/c1/c1_GraphBuilder.cpp

Print this page




3140   // complete graph
3141   _vmap        = new ValueMap();
3142   switch (scope->method()->intrinsic_id()) {
3143   case vmIntrinsics::_dabs          : // fall through
3144   case vmIntrinsics::_dsqrt         : // fall through
3145   case vmIntrinsics::_dsin          : // fall through
3146   case vmIntrinsics::_dcos          : // fall through
3147   case vmIntrinsics::_dtan          : // fall through
3148   case vmIntrinsics::_dlog          : // fall through
3149   case vmIntrinsics::_dlog10        : // fall through
3150   case vmIntrinsics::_dexp          : // fall through
3151   case vmIntrinsics::_dpow          : // fall through
3152     {
3153       // Compiles where the root method is an intrinsic need a special
3154       // compilation environment because the bytecodes for the method
3155       // shouldn't be parsed during the compilation, only the special
3156       // Intrinsic node should be emitted.  If this isn't done the the
3157       // code for the inlined version will be different than the root
3158       // compiled version which could lead to monotonicity problems on
3159       // intel.



3160 
3161       // Set up a stream so that appending instructions works properly.
3162       ciBytecodeStream s(scope->method());
3163       s.reset_to_bci(0);
3164       scope_data()->set_stream(&s);
3165       s.next();
3166 
3167       // setup the initial block state
3168       _block = start_block;
3169       _state = start_block->state()->copy_for_parsing();
3170       _last  = start_block;
3171       load_local(doubleType, 0);
3172       if (scope->method()->intrinsic_id() == vmIntrinsics::_dpow) {
3173         load_local(doubleType, 2);
3174       }
3175 
3176       // Emit the intrinsic node.
3177       bool result = try_inline_intrinsics(scope->method());
3178       if (!result) BAILOUT("failed to inline intrinsic");
3179       method_return(dpop());
3180 
3181       // connect the begin and end blocks and we're all done.
3182       BlockEnd* end = last()->as_BlockEnd();
3183       block()->set_end(end);
3184       break;
3185     }
3186 
3187   case vmIntrinsics::_Reference_get:
3188     {
3189       {
3190         // With java.lang.ref.reference.get() we must go through the
3191         // intrinsic - when G1 is enabled - even when get() is the root
3192         // method of the compile so that, if necessary, the value in
3193         // the referent field of the reference object gets recorded by
3194         // the pre-barrier code.
3195         // Specifically, if G1 is enabled, the value in the referent
3196         // field is recorded by the G1 SATB pre barrier. This will
3197         // result in the referent being marked live and the reference
3198         // object removed from the list of discovered references during
3199         // reference processing.



3200 
3201         // Also we need intrinsic to prevent commoning reads from this field
3202         // across safepoint since GC can change its value.
3203 
3204         // Set up a stream so that appending instructions works properly.
3205         ciBytecodeStream s(scope->method());
3206         s.reset_to_bci(0);
3207         scope_data()->set_stream(&s);
3208         s.next();
3209 
3210         // setup the initial block state
3211         _block = start_block;
3212         _state = start_block->state()->copy_for_parsing();
3213         _last  = start_block;
3214         load_local(objectType, 0);
3215 
3216         // Emit the intrinsic node.
3217         bool result = try_inline_intrinsics(scope->method());
3218         if (!result) BAILOUT("failed to inline intrinsic");
3219         method_return(apop());


3300 
3301 bool GraphBuilder::try_inline(ciMethod* callee, bool holder_known, Bytecodes::Code bc, Value receiver) {
3302   const char* msg = NULL;
3303 
3304   // clear out any existing inline bailout condition
3305   clear_inline_bailout();
3306 
3307   // exclude methods we don't want to inline
3308   msg = should_not_inline(callee);
3309   if (msg != NULL) {
3310     print_inlining(callee, msg, /*success*/ false);
3311     return false;
3312   }
3313 
3314   // method handle invokes
3315   if (callee->is_method_handle_intrinsic()) {
3316     return try_method_handle_inline(callee);
3317   }
3318 
3319   // handle intrinsics
3320   if (callee->intrinsic_id() != vmIntrinsics::_none) {

3321     if (try_inline_intrinsics(callee)) {
3322       print_inlining(callee, "intrinsic");
3323       return true;
3324     }
3325     // try normal inlining
3326   }
3327 
3328   // certain methods cannot be parsed at all
3329   msg = check_can_parse(callee);
3330   if (msg != NULL) {
3331     print_inlining(callee, msg, /*success*/ false);
3332     return false;
3333   }
3334 
3335   // If bytecode not set use the current one.
3336   if (bc == Bytecodes::_illegal) {
3337     bc = code();
3338   }
3339   if (try_inline_full(callee, holder_known, bc, receiver))
3340     return true;


4261 }
4262 
4263 
4264 bool GraphBuilder::append_unsafe_put_raw(ciMethod* callee, BasicType t) {
4265   if (InlineUnsafeOps) {
4266     Values* args = state()->pop_arguments(callee->arg_size());
4267     null_check(args->at(0));
4268     Instruction* op = append(new UnsafePutRaw(t, args->at(1), args->at(2)));
4269     compilation()->set_has_unsafe_access(true);
4270   }
4271   return InlineUnsafeOps;
4272 }
4273 
4274 
4275 void GraphBuilder::append_unsafe_CAS(ciMethod* callee) {
4276   ValueStack* state_before = copy_state_for_exception();
4277   ValueType* result_type = as_ValueType(callee->return_type());
4278   assert(result_type->is_int(), "int result");
4279   Values* args = state()->pop_arguments(callee->arg_size());
4280 
4281   // Pop off some args to speically handle, then push back
4282   Value newval = args->pop();
4283   Value cmpval = args->pop();
4284   Value offset = args->pop();
4285   Value src = args->pop();
4286   Value unsafe_obj = args->pop();
4287 
4288   // Separately handle the unsafe arg. It is not needed for code
4289   // generation, but must be null checked
4290   null_check(unsafe_obj);
4291 
4292 #ifndef _LP64
4293   offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
4294 #endif
4295 
4296   args->push(src);
4297   args->push(offset);
4298   args->push(cmpval);
4299   args->push(newval);
4300 
4301   // An unsafe CAS can alias with other field accesses, but we don't




3140   // complete graph
3141   _vmap        = new ValueMap();
3142   switch (scope->method()->intrinsic_id()) {
3143   case vmIntrinsics::_dabs          : // fall through
3144   case vmIntrinsics::_dsqrt         : // fall through
3145   case vmIntrinsics::_dsin          : // fall through
3146   case vmIntrinsics::_dcos          : // fall through
3147   case vmIntrinsics::_dtan          : // fall through
3148   case vmIntrinsics::_dlog          : // fall through
3149   case vmIntrinsics::_dlog10        : // fall through
3150   case vmIntrinsics::_dexp          : // fall through
3151   case vmIntrinsics::_dpow          : // fall through
3152     {
3153       // Compiles where the root method is an intrinsic need a special
3154       // compilation environment because the bytecodes for the method
3155       // shouldn't be parsed during the compilation, only the special
3156       // Intrinsic node should be emitted.  If this isn't done the the
3157       // code for the inlined version will be different than the root
3158       // compiled version which could lead to monotonicity problems on
3159       // intel.
3160       if (CheckIntrinsics && !scope->method()->intrinsic_candidate()) {
3161         BAILOUT("failed to inline intrinsic, method not annotated");
3162       }
3163 
3164       // Set up a stream so that appending instructions works properly.
3165       ciBytecodeStream s(scope->method());
3166       s.reset_to_bci(0);
3167       scope_data()->set_stream(&s);
3168       s.next();
3169 
3170       // setup the initial block state
3171       _block = start_block;
3172       _state = start_block->state()->copy_for_parsing();
3173       _last  = start_block;
3174       load_local(doubleType, 0);
3175       if (scope->method()->intrinsic_id() == vmIntrinsics::_dpow) {
3176         load_local(doubleType, 2);
3177       }
3178 
3179       // Emit the intrinsic node.
3180       bool result = try_inline_intrinsics(scope->method());
3181       if (!result) BAILOUT("failed to inline intrinsic");
3182       method_return(dpop());
3183 
3184       // connect the begin and end blocks and we're all done.
3185       BlockEnd* end = last()->as_BlockEnd();
3186       block()->set_end(end);
3187       break;
3188     }
3189 
3190   case vmIntrinsics::_Reference_get:
3191     {
3192       {
3193         // With java.lang.ref.reference.get() we must go through the
3194         // intrinsic - when G1 is enabled - even when get() is the root
3195         // method of the compile so that, if necessary, the value in
3196         // the referent field of the reference object gets recorded by
3197         // the pre-barrier code.
3198         // Specifically, if G1 is enabled, the value in the referent
3199         // field is recorded by the G1 SATB pre barrier. This will
3200         // result in the referent being marked live and the reference
3201         // object removed from the list of discovered references during
3202         // reference processing.
3203         if (CheckIntrinsics && !scope->method()->intrinsic_candidate()) {
3204           BAILOUT("failed to inline intrinsic, method not annotated");
3205         }
3206 
3207         // Also we need intrinsic to prevent commoning reads from this field
3208         // across safepoint since GC can change its value.
3209 
3210         // Set up a stream so that appending instructions works properly.
3211         ciBytecodeStream s(scope->method());
3212         s.reset_to_bci(0);
3213         scope_data()->set_stream(&s);
3214         s.next();
3215 
3216         // setup the initial block state
3217         _block = start_block;
3218         _state = start_block->state()->copy_for_parsing();
3219         _last  = start_block;
3220         load_local(objectType, 0);
3221 
3222         // Emit the intrinsic node.
3223         bool result = try_inline_intrinsics(scope->method());
3224         if (!result) BAILOUT("failed to inline intrinsic");
3225         method_return(apop());


3306 
3307 bool GraphBuilder::try_inline(ciMethod* callee, bool holder_known, Bytecodes::Code bc, Value receiver) {
3308   const char* msg = NULL;
3309 
3310   // clear out any existing inline bailout condition
3311   clear_inline_bailout();
3312 
3313   // exclude methods we don't want to inline
3314   msg = should_not_inline(callee);
3315   if (msg != NULL) {
3316     print_inlining(callee, msg, /*success*/ false);
3317     return false;
3318   }
3319 
3320   // method handle invokes
3321   if (callee->is_method_handle_intrinsic()) {
3322     return try_method_handle_inline(callee);
3323   }
3324 
3325   // handle intrinsics
3326   if (callee->intrinsic_id() != vmIntrinsics::_none &&
3327       (CheckIntrinsics ? callee->intrinsic_candidate() : true)) {
3328     if (try_inline_intrinsics(callee)) {
3329       print_inlining(callee, "intrinsic");
3330       return true;
3331     }
3332     // try normal inlining
3333   }
3334 
3335   // certain methods cannot be parsed at all
3336   msg = check_can_parse(callee);
3337   if (msg != NULL) {
3338     print_inlining(callee, msg, /*success*/ false);
3339     return false;
3340   }
3341 
3342   // If bytecode not set use the current one.
3343   if (bc == Bytecodes::_illegal) {
3344     bc = code();
3345   }
3346   if (try_inline_full(callee, holder_known, bc, receiver))
3347     return true;


4268 }
4269 
4270 
4271 bool GraphBuilder::append_unsafe_put_raw(ciMethod* callee, BasicType t) {
4272   if (InlineUnsafeOps) {
4273     Values* args = state()->pop_arguments(callee->arg_size());
4274     null_check(args->at(0));
4275     Instruction* op = append(new UnsafePutRaw(t, args->at(1), args->at(2)));
4276     compilation()->set_has_unsafe_access(true);
4277   }
4278   return InlineUnsafeOps;
4279 }
4280 
4281 
4282 void GraphBuilder::append_unsafe_CAS(ciMethod* callee) {
4283   ValueStack* state_before = copy_state_for_exception();
4284   ValueType* result_type = as_ValueType(callee->return_type());
4285   assert(result_type->is_int(), "int result");
4286   Values* args = state()->pop_arguments(callee->arg_size());
4287 
4288   // Pop off some args to specially handle, then push back
4289   Value newval = args->pop();
4290   Value cmpval = args->pop();
4291   Value offset = args->pop();
4292   Value src = args->pop();
4293   Value unsafe_obj = args->pop();
4294 
4295   // Separately handle the unsafe arg. It is not needed for code
4296   // generation, but must be null checked
4297   null_check(unsafe_obj);
4298 
4299 #ifndef _LP64
4300   offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
4301 #endif
4302 
4303   args->push(src);
4304   args->push(offset);
4305   args->push(cmpval);
4306   args->push(newval);
4307 
4308   // An unsafe CAS can alias with other field accesses, but we don't


src/share/vm/c1/c1_GraphBuilder.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File