src/share/vm/c1/c1_GraphBuilder.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File 8130832-review Sdiff src/share/vm/c1

src/share/vm/c1/c1_GraphBuilder.cpp

Print this page
rev 8688 : 1234567: Invalid bug number
Summary: This is a local commit to reduce jprt submit time.
Reviewed-by: kvn


3355 }
3356 
3357 
3358 const char* GraphBuilder::check_can_parse(ciMethod* callee) const {
3359   // Certain methods cannot be parsed at all:
3360   if ( callee->is_native())            return "native method";
3361   if ( callee->is_abstract())          return "abstract method";
3362   if (!callee->can_be_compiled())      return "not compilable (disabled)";
3363   return NULL;
3364 }
3365 
3366 
3367 // negative filter: should callee NOT be inlined?  returns NULL, ok to inline, or rejection msg
3368 const char* GraphBuilder::should_not_inline(ciMethod* callee) const {
3369   if ( callee->should_exclude())       return "excluded by CompilerOracle";
3370   if ( callee->should_not_inline())    return "disallowed by CompilerOracle";
3371   if ( callee->dont_inline())          return "don't inline by annotation";
3372   return NULL;
3373 }
3374 





3375 
3376 bool GraphBuilder::try_inline_intrinsics(ciMethod* callee) {
3377   if (callee->is_synchronized()) {
3378     // We don't currently support any synchronized intrinsics
3379     return false;
3380   }
3381 
3382   // callee seems like a good candidate
3383   // determine id
3384   vmIntrinsics::ID id = callee->intrinsic_id();
3385   if (!InlineNatives && id != vmIntrinsics::_Reference_get) {
3386     // InlineNatives does not control Reference.get
3387     INLINE_BAILOUT("intrinsic method inlining disabled");
3388   }
3389   bool preserves_state = false;
3390   bool cantrap = true;
3391   switch (id) {
3392     case vmIntrinsics::_arraycopy:
3393       if (!InlineArrayCopy) return false;
3394       break;
3395 
3396 #ifdef TRACE_HAVE_INTRINSICS
3397     case vmIntrinsics::_classID:
3398     case vmIntrinsics::_threadID:
3399       preserves_state = true;
3400       cantrap = true;
3401       break;
3402 
3403     case vmIntrinsics::_counterTime:
3404       preserves_state = true;
3405       cantrap = false;
3406       break;
3407 #endif
3408 
3409     case vmIntrinsics::_currentTimeMillis:
3410     case vmIntrinsics::_nanoTime:
3411       preserves_state = true;
3412       cantrap = false;






3413       break;
3414 
3415     case vmIntrinsics::_floatToRawIntBits   :
3416     case vmIntrinsics::_intBitsToFloat      :
3417     case vmIntrinsics::_doubleToRawLongBits :
3418     case vmIntrinsics::_longBitsToDouble    :
3419       if (!InlineMathNatives) return false;
3420       preserves_state = true;
3421       cantrap = false;
3422       break;
3423 
3424     case vmIntrinsics::_getClass      :
3425     case vmIntrinsics::_isInstance    :
3426       if (!InlineClassNatives) return false;
3427       preserves_state = true;
3428       break;
3429 
3430     case vmIntrinsics::_currentThread :
3431       if (!InlineThreadNatives) return false;
3432       preserves_state = true;
3433       cantrap = false;
3434       break;
3435 
3436     case vmIntrinsics::_dabs          : // fall through
3437     case vmIntrinsics::_dsqrt         : // fall through
3438     case vmIntrinsics::_dsin          : // fall through
3439     case vmIntrinsics::_dcos          : // fall through
3440     case vmIntrinsics::_dtan          : // fall through
3441     case vmIntrinsics::_dlog          : // fall through
3442     case vmIntrinsics::_dlog10        : // fall through
3443     case vmIntrinsics::_dexp          : // fall through
3444     case vmIntrinsics::_dpow          : // fall through
3445       if (!InlineMathNatives) return false;
3446       cantrap = false;
3447       preserves_state = true;
3448       break;
3449 
3450     // Use special nodes for Unsafe instructions so we can more easily
3451     // perform an address-mode optimization on the raw variants
3452     case vmIntrinsics::_getObject : return append_unsafe_get_obj(callee, T_OBJECT,  false);
3453     case vmIntrinsics::_getBoolean: return append_unsafe_get_obj(callee, T_BOOLEAN, false);
3454     case vmIntrinsics::_getByte   : return append_unsafe_get_obj(callee, T_BYTE,    false);
3455     case vmIntrinsics::_getShort  : return append_unsafe_get_obj(callee, T_SHORT,   false);
3456     case vmIntrinsics::_getChar   : return append_unsafe_get_obj(callee, T_CHAR,    false);
3457     case vmIntrinsics::_getInt    : return append_unsafe_get_obj(callee, T_INT,     false);
3458     case vmIntrinsics::_getLong   : return append_unsafe_get_obj(callee, T_LONG,    false);
3459     case vmIntrinsics::_getFloat  : return append_unsafe_get_obj(callee, T_FLOAT,   false);
3460     case vmIntrinsics::_getDouble : return append_unsafe_get_obj(callee, T_DOUBLE,  false);
3461 
3462     case vmIntrinsics::_putObject : return append_unsafe_put_obj(callee, T_OBJECT,  false);
3463     case vmIntrinsics::_putBoolean: return append_unsafe_put_obj(callee, T_BOOLEAN, false);
3464     case vmIntrinsics::_putByte   : return append_unsafe_put_obj(callee, T_BYTE,    false);
3465     case vmIntrinsics::_putShort  : return append_unsafe_put_obj(callee, T_SHORT,   false);
3466     case vmIntrinsics::_putChar   : return append_unsafe_put_obj(callee, T_CHAR,    false);
3467     case vmIntrinsics::_putInt    : return append_unsafe_put_obj(callee, T_INT,     false);
3468     case vmIntrinsics::_putLong   : return append_unsafe_put_obj(callee, T_LONG,    false);
3469     case vmIntrinsics::_putFloat  : return append_unsafe_put_obj(callee, T_FLOAT,   false);
3470     case vmIntrinsics::_putDouble : return append_unsafe_put_obj(callee, T_DOUBLE,  false);
3471 
3472     case vmIntrinsics::_getShortUnaligned  :
3473       return UseUnalignedAccesses ? append_unsafe_get_obj(callee, T_SHORT,   false) : false;
3474     case vmIntrinsics::_getCharUnaligned   :
3475       return UseUnalignedAccesses ? append_unsafe_get_obj(callee, T_CHAR,    false) : false;
3476     case vmIntrinsics::_getIntUnaligned    :
3477       return UseUnalignedAccesses ? append_unsafe_get_obj(callee, T_INT,     false) : false;
3478     case vmIntrinsics::_getLongUnaligned   :
3479       return UseUnalignedAccesses ? append_unsafe_get_obj(callee, T_LONG,    false) : false;
3480 
3481     case vmIntrinsics::_putShortUnaligned  :
3482       return UseUnalignedAccesses ? append_unsafe_put_obj(callee, T_SHORT,   false) : false;
3483     case vmIntrinsics::_putCharUnaligned   :
3484       return UseUnalignedAccesses ? append_unsafe_put_obj(callee, T_CHAR,    false) : false;
3485     case vmIntrinsics::_putIntUnaligned    :
3486       return UseUnalignedAccesses ? append_unsafe_put_obj(callee, T_INT,     false) : false;
3487     case vmIntrinsics::_putLongUnaligned   :
3488       return UseUnalignedAccesses ? append_unsafe_put_obj(callee, T_LONG,    false) : false;
3489 
3490     case vmIntrinsics::_getObjectVolatile : return append_unsafe_get_obj(callee, T_OBJECT,  true);
3491     case vmIntrinsics::_getBooleanVolatile: return append_unsafe_get_obj(callee, T_BOOLEAN, true);
3492     case vmIntrinsics::_getByteVolatile   : return append_unsafe_get_obj(callee, T_BYTE,    true);
3493     case vmIntrinsics::_getShortVolatile  : return append_unsafe_get_obj(callee, T_SHORT,   true);
3494     case vmIntrinsics::_getCharVolatile   : return append_unsafe_get_obj(callee, T_CHAR,    true);
3495     case vmIntrinsics::_getIntVolatile    : return append_unsafe_get_obj(callee, T_INT,     true);
3496     case vmIntrinsics::_getLongVolatile   : return append_unsafe_get_obj(callee, T_LONG,    true);
3497     case vmIntrinsics::_getFloatVolatile  : return append_unsafe_get_obj(callee, T_FLOAT,   true);
3498     case vmIntrinsics::_getDoubleVolatile : return append_unsafe_get_obj(callee, T_DOUBLE,  true);
3499 
3500     case vmIntrinsics::_putObjectVolatile : return append_unsafe_put_obj(callee, T_OBJECT,  true);
3501     case vmIntrinsics::_putBooleanVolatile: return append_unsafe_put_obj(callee, T_BOOLEAN, true);
3502     case vmIntrinsics::_putByteVolatile   : return append_unsafe_put_obj(callee, T_BYTE,    true);
3503     case vmIntrinsics::_putShortVolatile  : return append_unsafe_put_obj(callee, T_SHORT,   true);
3504     case vmIntrinsics::_putCharVolatile   : return append_unsafe_put_obj(callee, T_CHAR,    true);
3505     case vmIntrinsics::_putIntVolatile    : return append_unsafe_put_obj(callee, T_INT,     true);
3506     case vmIntrinsics::_putLongVolatile   : return append_unsafe_put_obj(callee, T_LONG,    true);
3507     case vmIntrinsics::_putFloatVolatile  : return append_unsafe_put_obj(callee, T_FLOAT,   true);
3508     case vmIntrinsics::_putDoubleVolatile : return append_unsafe_put_obj(callee, T_DOUBLE,  true);
3509 
3510     case vmIntrinsics::_getByte_raw   : return append_unsafe_get_raw(callee, T_BYTE);
3511     case vmIntrinsics::_getShort_raw  : return append_unsafe_get_raw(callee, T_SHORT);
3512     case vmIntrinsics::_getChar_raw   : return append_unsafe_get_raw(callee, T_CHAR);
3513     case vmIntrinsics::_getInt_raw    : return append_unsafe_get_raw(callee, T_INT);
3514     case vmIntrinsics::_getLong_raw   : return append_unsafe_get_raw(callee, T_LONG);
3515     case vmIntrinsics::_getFloat_raw  : return append_unsafe_get_raw(callee, T_FLOAT);
3516     case vmIntrinsics::_getDouble_raw : return append_unsafe_get_raw(callee, T_DOUBLE);
3517 
3518     case vmIntrinsics::_putByte_raw   : return append_unsafe_put_raw(callee, T_BYTE);
3519     case vmIntrinsics::_putShort_raw  : return append_unsafe_put_raw(callee, T_SHORT);
3520     case vmIntrinsics::_putChar_raw   : return append_unsafe_put_raw(callee, T_CHAR);
3521     case vmIntrinsics::_putInt_raw    : return append_unsafe_put_raw(callee, T_INT);
3522     case vmIntrinsics::_putLong_raw   : return append_unsafe_put_raw(callee, T_LONG);
3523     case vmIntrinsics::_putFloat_raw  : return append_unsafe_put_raw(callee, T_FLOAT);
3524     case vmIntrinsics::_putDouble_raw : return append_unsafe_put_raw(callee, T_DOUBLE);
3525 
3526     case vmIntrinsics::_checkIndex    :
3527       if (!InlineNIOCheckIndex) return false;
3528       preserves_state = true;
3529       break;
3530     case vmIntrinsics::_putOrderedObject : return append_unsafe_put_obj(callee, T_OBJECT,  true);
3531     case vmIntrinsics::_putOrderedInt    : return append_unsafe_put_obj(callee, T_INT,     true);
3532     case vmIntrinsics::_putOrderedLong   : return append_unsafe_put_obj(callee, T_LONG,    true);
3533 
3534     case vmIntrinsics::_compareAndSwapLong:
3535       if (!VM_Version::supports_cx8()) return false;
3536       // fall through
3537     case vmIntrinsics::_compareAndSwapInt:
3538     case vmIntrinsics::_compareAndSwapObject:
3539       append_unsafe_CAS(callee);
3540       return true;
3541 
3542     case vmIntrinsics::_getAndAddInt:
3543       if (!VM_Version::supports_atomic_getadd4()) {
3544         return false;
3545       }
3546       return append_unsafe_get_and_set_obj(callee, true);
3547     case vmIntrinsics::_getAndAddLong:
3548       if (!VM_Version::supports_atomic_getadd8()) {
3549         return false;
3550       }
3551       return append_unsafe_get_and_set_obj(callee, true);
3552     case vmIntrinsics::_getAndSetInt:
3553       if (!VM_Version::supports_atomic_getset4()) {
3554         return false;
3555       }
3556       return append_unsafe_get_and_set_obj(callee, false);
3557     case vmIntrinsics::_getAndSetLong:
3558       if (!VM_Version::supports_atomic_getset8()) {
3559         return false;
3560       }
3561       return append_unsafe_get_and_set_obj(callee, false);
3562     case vmIntrinsics::_getAndSetObject:
3563 #ifdef _LP64
3564       if (!UseCompressedOops && !VM_Version::supports_atomic_getset8()) {
3565         return false;
3566       }
3567       if (UseCompressedOops && !VM_Version::supports_atomic_getset4()) {
3568         return false;
3569       }
3570 #else
3571       if (!VM_Version::supports_atomic_getset4()) {
3572         return false;
3573       }
3574 #endif
3575       return append_unsafe_get_and_set_obj(callee, false);
3576 
3577     case vmIntrinsics::_Reference_get:
3578       // Use the intrinsic version of Reference.get() so that the value in
3579       // the referent field can be registered by the G1 pre-barrier code.
3580       // Also to prevent commoning reads from this field across safepoint
3581       // since GC can change its value.
3582       preserves_state = true;






3583       break;
3584 
3585     case vmIntrinsics::_updateCRC32:
3586     case vmIntrinsics::_updateBytesCRC32:
3587     case vmIntrinsics::_updateByteBufferCRC32:
3588       if (!UseCRC32Intrinsics) return false;
3589       cantrap = false;
3590       preserves_state = true;
3591       break;
3592 
3593     case vmIntrinsics::_loadFence :
3594     case vmIntrinsics::_storeFence:
3595     case vmIntrinsics::_fullFence :

3596       break;






3597 
3598     default                       : return false; // do not inline

































3599   }
















































































































3600   // create intrinsic node
3601   const bool has_receiver = !callee->is_static();
3602   ValueType* result_type = as_ValueType(callee->return_type());
3603   ValueStack* state_before = copy_state_for_exception();
3604 
3605   Values* args = state()->pop_arguments(callee->arg_size());
3606 
3607   if (is_profiling()) {
3608     // Don't profile in the special case where the root method
3609     // is the intrinsic
3610     if (callee != method()) {
3611       // Note that we'd collect profile data in this method if we wanted it.
3612       compilation()->set_would_profile(true);
3613       if (profile_calls()) {
3614         Value recv = NULL;
3615         if (has_receiver) {
3616           recv = args->at(0);
3617           null_check(recv);
3618         }
3619         profile_call(callee, recv, NULL, collect_args_for_profiling(args, callee, true), true);
3620       }
3621     }
3622   }
3623 
3624   Intrinsic* result = new Intrinsic(result_type, id, args, has_receiver, state_before,
3625                                     preserves_state, cantrap);


3626   // append instruction & push result
3627   Value value = append_split(result);
3628   if (result_type != voidType) push(result_type, value);
3629 
3630   if (callee != method() && profile_return() && result_type->is_object_kind()) {
3631     profile_return_type(result, callee);
3632   }

3633 
3634   // done











3635   return true;
3636 }
3637 
3638 
3639 bool GraphBuilder::try_inline_jsr(int jsr_dest_bci) {
3640   // Introduce a new callee continuation point - all Ret instructions
3641   // will be replaced with Gotos to this point.
3642   BlockBegin* cont = block_at(next_bci());
3643   assert(cont != NULL, "continuation must exist (BlockListBuilder starts a new block after a jsr");
3644 
3645   // Note: can not assign state to continuation yet, as we have to
3646   // pick up the state from the Ret instructions.
3647 
3648   // Push callee scope
3649   push_scope_for_jsr(cont, jsr_dest_bci);
3650 
3651   // Temporarily set up bytecode stream so we can append instructions
3652   // (only using the bci of this stream)
3653   scope_data()->set_stream(scope_data()->parent()->stream());
3654 


4207   data->set_scope(scope());
4208   data->setup_jsr_xhandlers();
4209   data->set_continuation(continuation());
4210   data->set_jsr_continuation(jsr_continuation);
4211   _scope_data = data;
4212 }
4213 
4214 
4215 void GraphBuilder::pop_scope() {
4216   int number_of_locks = scope()->number_of_locks();
4217   _scope_data = scope_data()->parent();
4218   // accumulate minimum number of monitor slots to be reserved
4219   scope()->set_min_number_of_locks(number_of_locks);
4220 }
4221 
4222 
4223 void GraphBuilder::pop_scope_for_jsr() {
4224   _scope_data = scope_data()->parent();
4225 }
4226 
4227 bool GraphBuilder::append_unsafe_get_obj(ciMethod* callee, BasicType t, bool is_volatile) {
4228   if (InlineUnsafeOps) {
4229     Values* args = state()->pop_arguments(callee->arg_size());
4230     null_check(args->at(0));
4231     Instruction* offset = args->at(2);
4232 #ifndef _LP64
4233     offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
4234 #endif
4235     Instruction* op = append(new UnsafeGetObject(t, args->at(1), offset, is_volatile));
4236     push(op->type(), op);
4237     compilation()->set_has_unsafe_access(true);
4238   }
4239   return InlineUnsafeOps;
4240 }
4241 
4242 
4243 bool GraphBuilder::append_unsafe_put_obj(ciMethod* callee, BasicType t, bool is_volatile) {
4244   if (InlineUnsafeOps) {
4245     Values* args = state()->pop_arguments(callee->arg_size());
4246     null_check(args->at(0));
4247     Instruction* offset = args->at(2);
4248 #ifndef _LP64
4249     offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
4250 #endif
4251     Instruction* op = append(new UnsafePutObject(t, args->at(1), offset, args->at(3), is_volatile));
4252     compilation()->set_has_unsafe_access(true);
4253     kill_all();
4254   }
4255   return InlineUnsafeOps;
4256 }
4257 
4258 
4259 bool GraphBuilder::append_unsafe_get_raw(ciMethod* callee, BasicType t) {
4260   if (InlineUnsafeOps) {
4261     Values* args = state()->pop_arguments(callee->arg_size());
4262     null_check(args->at(0));
4263     Instruction* op = append(new UnsafeGetRaw(t, args->at(1), false));
4264     push(op->type(), op);
4265     compilation()->set_has_unsafe_access(true);
4266   }
4267   return InlineUnsafeOps;
4268 }
4269 
4270 
4271 bool GraphBuilder::append_unsafe_put_raw(ciMethod* callee, BasicType t) {
4272   if (InlineUnsafeOps) {
4273     Values* args = state()->pop_arguments(callee->arg_size());
4274     null_check(args->at(0));
4275     Instruction* op = append(new UnsafePutRaw(t, args->at(1), args->at(2)));
4276     compilation()->set_has_unsafe_access(true);
4277   }
4278   return InlineUnsafeOps;
4279 }
4280 
4281 
4282 void GraphBuilder::append_unsafe_CAS(ciMethod* callee) {
4283   ValueStack* state_before = copy_state_for_exception();
4284   ValueType* result_type = as_ValueType(callee->return_type());
4285   assert(result_type->is_int(), "int result");
4286   Values* args = state()->pop_arguments(callee->arg_size());
4287 
4288   // Pop off some args to specially handle, then push back
4289   Value newval = args->pop();
4290   Value cmpval = args->pop();
4291   Value offset = args->pop();
4292   Value src = args->pop();
4293   Value unsafe_obj = args->pop();
4294 
4295   // Separately handle the unsafe arg. It is not needed for code
4296   // generation, but must be null checked
4297   null_check(unsafe_obj);
4298 


4335   EventCompilerInlining event;
4336   if (event.should_commit()) {
4337     event.set_compileID(compilation()->env()->task()->compile_id());
4338     event.set_message(msg);
4339     event.set_succeeded(success);
4340     event.set_bci(bci());
4341     event.set_caller(method()->get_Method());
4342     event.set_callee(callee->to_trace_struct());
4343     event.commit();
4344   }
4345 #endif // INCLUDE_TRACE
4346   if (!PrintInlining && !compilation()->method()->has_option("PrintInlining")) {
4347     return;
4348   }
4349   CompileTask::print_inlining(callee, scope()->level(), bci(), msg);
4350   if (success && CIPrintMethodCodes) {
4351     callee->print_codes();
4352   }
4353 }
4354 
4355 bool GraphBuilder::append_unsafe_get_and_set_obj(ciMethod* callee, bool is_add) {
4356   if (InlineUnsafeOps) {
4357     Values* args = state()->pop_arguments(callee->arg_size());
4358     BasicType t = callee->return_type()->basic_type();
4359     null_check(args->at(0));
4360     Instruction* offset = args->at(2);
4361 #ifndef _LP64
4362     offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
4363 #endif
4364     Instruction* op = append(new UnsafeGetAndSetObject(t, args->at(1), offset, args->at(3), is_add));
4365     compilation()->set_has_unsafe_access(true);
4366     kill_all();
4367     push(op->type(), op);
4368   }
4369   return InlineUnsafeOps;
4370 }
4371 
4372 #ifndef PRODUCT
4373 void GraphBuilder::print_stats() {
4374   vmap()->print();
4375 }
4376 #endif // PRODUCT
4377 
4378 void GraphBuilder::profile_call(ciMethod* callee, Value recv, ciKlass* known_holder, Values* obj_args, bool inlined) {
4379   assert(known_holder == NULL || (known_holder->is_instance_klass() &&
4380                                   (!known_holder->is_interface() ||
4381                                    ((ciInstanceKlass*)known_holder)->has_default_methods())), "should be default method");
4382   if (known_holder != NULL) {
4383     if (known_holder->exact_klass() == NULL) {
4384       known_holder = compilation()->cha_exact_type(known_holder);
4385     }
4386   }
4387 
4388   append(new ProfileCall(method(), bci(), callee, recv, known_holder, obj_args, inlined));
4389 }


3355 }
3356 
3357 
3358 const char* GraphBuilder::check_can_parse(ciMethod* callee) const {
3359   // Certain methods cannot be parsed at all:
3360   if ( callee->is_native())            return "native method";
3361   if ( callee->is_abstract())          return "abstract method";
3362   if (!callee->can_be_compiled())      return "not compilable (disabled)";
3363   return NULL;
3364 }
3365 
3366 
3367 // negative filter: should callee NOT be inlined?  returns NULL, ok to inline, or rejection msg
3368 const char* GraphBuilder::should_not_inline(ciMethod* callee) const {
3369   if ( callee->should_exclude())       return "excluded by CompilerOracle";
3370   if ( callee->should_not_inline())    return "disallowed by CompilerOracle";
3371   if ( callee->dont_inline())          return "don't inline by annotation";
3372   return NULL;
3373 }
3374 
3375 bool GraphBuilder::is_intrinsic_available_for(Method* method) {
3376   vmIntrinsics::ID id = method->intrinsic_id();
3377   if (id < vmIntrinsics::FIRST_ID || id >= vmIntrinsics::ID_LIMIT) {
3378     return false;
3379   }
3380 
3381   if (method->is_synchronized()) {
3382     // C1 does not support intrinsification of synchronized methods.

3383     return false;
3384   }
3385 
3386   // -XX:-InlineNatives disables all intrinsics except
3387   // java.lang.ref.Reference::get.

3388   if (!InlineNatives && id != vmIntrinsics::_Reference_get) {
3389     return false;

3390   }
3391 

3392   switch (id) {
3393   case vmIntrinsics::_arraycopy:
3394     if (!InlineArrayCopy) return false;
3395     break;

3396 #ifdef TRACE_HAVE_INTRINSICS
3397   case vmIntrinsics::_classID:
3398   case vmIntrinsics::_threadID:




3399   case vmIntrinsics::_counterTime:



3400 #endif

3401   case vmIntrinsics::_currentTimeMillis:
3402   case vmIntrinsics::_nanoTime:
3403   case vmIntrinsics::_Reference_get:
3404     // Use the intrinsic version of Reference.get() so that the value in
3405     // the referent field can be registered by the G1 pre-barrier code.
3406     // Also to prevent commoning reads from this field across safepoint
3407     // since GC can change its value.
3408   case vmIntrinsics::_loadFence:
3409   case vmIntrinsics::_storeFence:
3410   case vmIntrinsics::_fullFence:
3411     break;
3412   case vmIntrinsics::_floatToRawIntBits:
3413   case vmIntrinsics::_intBitsToFloat:
3414   case vmIntrinsics::_doubleToRawLongBits:
3415   case vmIntrinsics::_longBitsToDouble:

3416     if (!InlineMathNatives) return false;


3417     break;
3418   case vmIntrinsics::_getClass:
3419   case vmIntrinsics::_isInstance:

3420     if (!InlineClassNatives) return false;

3421     break;
3422   case vmIntrinsics::_currentThread:

3423     if (!InlineThreadNatives) return false;


3424     break;
3425   case vmIntrinsics::_dabs:
3426   case vmIntrinsics::_dsqrt:
3427   case vmIntrinsics::_dsin:
3428   case vmIntrinsics::_dcos:
3429   case vmIntrinsics::_dtan:
3430   case vmIntrinsics::_dlog:
3431   case vmIntrinsics::_dlog10:
3432   case vmIntrinsics::_dexp:
3433   case vmIntrinsics::_dpow:

3434     if (!InlineMathNatives) return false;


3435     break;
3436   case vmIntrinsics::_getObject:
3437   case vmIntrinsics::_getBoolean:
3438   case vmIntrinsics::_getByte:
3439   case vmIntrinsics::_getShort:
3440   case vmIntrinsics::_getChar:
3441   case vmIntrinsics::_getInt:
3442   case vmIntrinsics::_getLong:
3443   case vmIntrinsics::_getFloat:
3444   case vmIntrinsics::_getDouble:
3445   case vmIntrinsics::_putObject:
3446   case vmIntrinsics::_putBoolean:
3447   case vmIntrinsics::_putByte:
3448   case vmIntrinsics::_putShort:
3449   case vmIntrinsics::_putChar:
3450   case vmIntrinsics::_putInt:
3451   case vmIntrinsics::_putLong:
3452   case vmIntrinsics::_putFloat:
3453   case vmIntrinsics::_putDouble:
3454   case vmIntrinsics::_getObjectVolatile:
3455   case vmIntrinsics::_getBooleanVolatile:
3456   case vmIntrinsics::_getByteVolatile:
3457   case vmIntrinsics::_getShortVolatile:
3458   case vmIntrinsics::_getCharVolatile:
3459   case vmIntrinsics::_getIntVolatile:
3460   case vmIntrinsics::_getLongVolatile:
3461   case vmIntrinsics::_getFloatVolatile:
3462   case vmIntrinsics::_getDoubleVolatile:
3463   case vmIntrinsics::_putObjectVolatile:
3464   case vmIntrinsics::_putBooleanVolatile:
3465   case vmIntrinsics::_putByteVolatile:
3466   case vmIntrinsics::_putShortVolatile:
3467   case vmIntrinsics::_putCharVolatile:
3468   case vmIntrinsics::_putIntVolatile:
3469   case vmIntrinsics::_putLongVolatile:
3470   case vmIntrinsics::_putFloatVolatile:
3471   case vmIntrinsics::_putDoubleVolatile:
3472   case vmIntrinsics::_getByte_raw:
3473   case vmIntrinsics::_getShort_raw:
3474   case vmIntrinsics::_getChar_raw:
3475   case vmIntrinsics::_getInt_raw:
3476   case vmIntrinsics::_getLong_raw:
3477   case vmIntrinsics::_getFloat_raw:
3478   case vmIntrinsics::_getDouble_raw:
3479   case vmIntrinsics::_putByte_raw:
3480   case vmIntrinsics::_putShort_raw:
3481   case vmIntrinsics::_putChar_raw:
3482   case vmIntrinsics::_putInt_raw:
3483   case vmIntrinsics::_putLong_raw:
3484   case vmIntrinsics::_putFloat_raw:
3485   case vmIntrinsics::_putDouble_raw:
3486   case vmIntrinsics::_putOrderedObject:
3487   case vmIntrinsics::_putOrderedInt:
3488   case vmIntrinsics::_putOrderedLong:
3489     if (!InlineUnsafeOps) return false;


























3490     break;












3491   case vmIntrinsics::_getAndAddInt:
3492     if (!InlineUnsafeOps || !VM_Version::supports_atomic_getadd4()) return false;
3493     break;


3494   case vmIntrinsics::_getAndAddLong:
3495     if (!InlineUnsafeOps || !VM_Version::supports_atomic_getadd8()) return false;
3496     break;


3497   case vmIntrinsics::_getAndSetInt:
3498     if (!InlineUnsafeOps || !VM_Version::supports_atomic_getset4()) return false;
3499     break;


3500   case vmIntrinsics::_getAndSetLong:
3501     if (!InlineUnsafeOps || !VM_Version::supports_atomic_getset8()) return false;
3502     break;


3503   case vmIntrinsics::_getAndSetObject:
3504 #ifdef _LP64
3505     if (!UseCompressedOops && !VM_Version::supports_atomic_getset8()) return false;
3506     if (UseCompressedOops && !VM_Version::supports_atomic_getset4()) return false;




3507 #else
3508     if (!VM_Version::supports_atomic_getset4()) return false;


3509 #endif
3510     if (!InlineUnsafeOps) return false;
3511     break;
3512   case vmIntrinsics::_getShortUnaligned:
3513   case vmIntrinsics::_getCharUnaligned:
3514   case vmIntrinsics::_getIntUnaligned:
3515   case vmIntrinsics::_getLongUnaligned:
3516   case vmIntrinsics::_putShortUnaligned:
3517   case vmIntrinsics::_putCharUnaligned:
3518   case vmIntrinsics::_putIntUnaligned:
3519   case vmIntrinsics::_putLongUnaligned:
3520     if (!UseUnalignedAccesses || !InlineUnsafeOps) return false;
3521     break;
3522   case vmIntrinsics::_checkIndex:
3523     if (!InlineNIOCheckIndex) return false;
3524     break;

3525   case vmIntrinsics::_updateCRC32:
3526   case vmIntrinsics::_updateBytesCRC32:
3527   case vmIntrinsics::_updateByteBufferCRC32:
3528     if (!UseCRC32Intrinsics) return false;


3529     break;
3530   case vmIntrinsics::_compareAndSwapLong:
3531     if (!VM_Version::supports_cx8()) return false;
3532     // fall through
3533   case vmIntrinsics::_compareAndSwapInt:
3534   case vmIntrinsics::_compareAndSwapObject:
3535     break;
3536   default:
3537     return false; // Intrinsics not on the previous list are not available.
3538   }
3539 
3540   return true;
3541 }
3542 
3543 bool GraphBuilder::intrinsic_preserves_state(vmIntrinsics::ID id) {
3544   assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
3545   switch(id) {
3546 #ifdef TRACE_HAVE_INTRINSICS
3547   case vmIntrinsics::_classID:
3548   case vmIntrinsics::_threadID:
3549   case vmIntrinsics::_counterTime:
3550 #endif
3551   case vmIntrinsics::_currentTimeMillis:
3552   case vmIntrinsics::_nanoTime:
3553   case vmIntrinsics::_floatToRawIntBits:
3554   case vmIntrinsics::_intBitsToFloat:
3555   case vmIntrinsics::_doubleToRawLongBits:
3556   case vmIntrinsics::_longBitsToDouble:
3557   case vmIntrinsics::_getClass:
3558   case vmIntrinsics::_isInstance:
3559   case vmIntrinsics::_currentThread:
3560   case vmIntrinsics::_dabs:
3561   case vmIntrinsics::_dsqrt:
3562   case vmIntrinsics::_dsin:
3563   case vmIntrinsics::_dcos:
3564   case vmIntrinsics::_dtan:
3565   case vmIntrinsics::_dlog:
3566   case vmIntrinsics::_dlog10:
3567   case vmIntrinsics::_dexp:
3568   case vmIntrinsics::_dpow:
3569   case vmIntrinsics::_checkIndex:
3570   case vmIntrinsics::_Reference_get:
3571   case vmIntrinsics::_updateCRC32:
3572   case vmIntrinsics::_updateBytesCRC32:
3573   case vmIntrinsics::_updateByteBufferCRC32:
3574     return true;
3575   default:
3576     return false;
3577   }
3578 }
3579 
3580 bool GraphBuilder::intrinsic_can_trap(vmIntrinsics::ID id) {
3581   assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
3582   switch(id) {
3583 #ifdef TRACE_HAVE_INTRINSICS
3584   case vmIntrinsics::_counterTime:
3585 #endif
3586   case vmIntrinsics::_currentTimeMillis:
3587   case vmIntrinsics::_nanoTime:
3588   case vmIntrinsics::_floatToRawIntBits:
3589   case vmIntrinsics::_intBitsToFloat:
3590   case vmIntrinsics::_doubleToRawLongBits:
3591   case vmIntrinsics::_longBitsToDouble:
3592   case vmIntrinsics::_currentThread:
3593   case vmIntrinsics::_dabs:
3594   case vmIntrinsics::_dsqrt:
3595   case vmIntrinsics::_dsin:
3596   case vmIntrinsics::_dcos:
3597   case vmIntrinsics::_dtan:
3598   case vmIntrinsics::_dlog:
3599   case vmIntrinsics::_dlog10:
3600   case vmIntrinsics::_dexp:
3601   case vmIntrinsics::_dpow:
3602   case vmIntrinsics::_updateCRC32:
3603   case vmIntrinsics::_updateBytesCRC32:
3604   case vmIntrinsics::_updateByteBufferCRC32:
3605     return false;
3606   default:
3607     return true;
3608   }
3609 }
3610 
3611 void GraphBuilder::build_graph_for_intrinsic(ciMethod* callee) {
3612   vmIntrinsics::ID id = callee->intrinsic_id();
3613   assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
3614 
3615   // Some intrinsics need special IR nodes.
3616   switch(id) {
3617   case vmIntrinsics::_getObject          : append_unsafe_get_obj(callee, T_OBJECT,  false); return;
3618   case vmIntrinsics::_getBoolean         : append_unsafe_get_obj(callee, T_BOOLEAN, false); return;
3619   case vmIntrinsics::_getByte            : append_unsafe_get_obj(callee, T_BYTE,    false); return;
3620   case vmIntrinsics::_getShort           : append_unsafe_get_obj(callee, T_SHORT,   false); return;
3621   case vmIntrinsics::_getChar            : append_unsafe_get_obj(callee, T_CHAR,    false); return;
3622   case vmIntrinsics::_getInt             : append_unsafe_get_obj(callee, T_INT,     false); return;
3623   case vmIntrinsics::_getLong            : append_unsafe_get_obj(callee, T_LONG,    false); return;
3624   case vmIntrinsics::_getFloat           : append_unsafe_get_obj(callee, T_FLOAT,   false); return;
3625   case vmIntrinsics::_getDouble          : append_unsafe_get_obj(callee, T_DOUBLE,  false); return;
3626   case vmIntrinsics::_putObject          : append_unsafe_put_obj(callee, T_OBJECT,  false); return;
3627   case vmIntrinsics::_putBoolean         : append_unsafe_put_obj(callee, T_BOOLEAN, false); return;
3628   case vmIntrinsics::_putByte            : append_unsafe_put_obj(callee, T_BYTE,    false); return;
3629   case vmIntrinsics::_putShort           : append_unsafe_put_obj(callee, T_SHORT,   false); return;
3630   case vmIntrinsics::_putChar            : append_unsafe_put_obj(callee, T_CHAR,    false); return;
3631   case vmIntrinsics::_putInt             : append_unsafe_put_obj(callee, T_INT,     false); return;
3632   case vmIntrinsics::_putLong            : append_unsafe_put_obj(callee, T_LONG,    false); return;
3633   case vmIntrinsics::_putFloat           : append_unsafe_put_obj(callee, T_FLOAT,   false); return;
3634   case vmIntrinsics::_putDouble          : append_unsafe_put_obj(callee, T_DOUBLE,  false); return;
3635   case vmIntrinsics::_getShortUnaligned  : append_unsafe_get_obj(callee, T_SHORT,   false); return;
3636   case vmIntrinsics::_getCharUnaligned   : append_unsafe_get_obj(callee, T_CHAR,    false); return;
3637   case vmIntrinsics::_getIntUnaligned    : append_unsafe_get_obj(callee, T_INT,     false); return;
3638   case vmIntrinsics::_getLongUnaligned   : append_unsafe_get_obj(callee, T_LONG,    false); return;
3639   case vmIntrinsics::_putShortUnaligned  : append_unsafe_put_obj(callee, T_SHORT,   false); return;
3640   case vmIntrinsics::_putCharUnaligned   : append_unsafe_put_obj(callee, T_CHAR,    false); return;
3641   case vmIntrinsics::_putIntUnaligned    : append_unsafe_put_obj(callee, T_INT,     false); return;
3642   case vmIntrinsics::_putLongUnaligned   : append_unsafe_put_obj(callee, T_LONG,    false); return;
3643   case vmIntrinsics::_getObjectVolatile  : append_unsafe_get_obj(callee, T_OBJECT,  true); return;
3644   case vmIntrinsics::_getBooleanVolatile : append_unsafe_get_obj(callee, T_BOOLEAN, true); return;
3645   case vmIntrinsics::_getByteVolatile    : append_unsafe_get_obj(callee, T_BYTE,    true); return;
3646   case vmIntrinsics::_getShortVolatile   : append_unsafe_get_obj(callee, T_SHORT,   true); return;
3647   case vmIntrinsics::_getCharVolatile    : append_unsafe_get_obj(callee, T_CHAR,    true); return;
3648   case vmIntrinsics::_getIntVolatile     : append_unsafe_get_obj(callee, T_INT,     true); return;
3649   case vmIntrinsics::_getLongVolatile    : append_unsafe_get_obj(callee, T_LONG,    true); return;
3650   case vmIntrinsics::_getFloatVolatile   : append_unsafe_get_obj(callee, T_FLOAT,   true); return;
3651   case vmIntrinsics::_getDoubleVolatile  : append_unsafe_get_obj(callee, T_DOUBLE,  true); return;
3652   case vmIntrinsics::_putObjectVolatile  : append_unsafe_put_obj(callee, T_OBJECT,  true); return;
3653   case vmIntrinsics::_putBooleanVolatile : append_unsafe_put_obj(callee, T_BOOLEAN, true); return;
3654   case vmIntrinsics::_putByteVolatile    : append_unsafe_put_obj(callee, T_BYTE,    true); return;
3655   case vmIntrinsics::_putShortVolatile   : append_unsafe_put_obj(callee, T_SHORT,   true); return;
3656   case vmIntrinsics::_putCharVolatile    : append_unsafe_put_obj(callee, T_CHAR,    true); return;
3657   case vmIntrinsics::_putIntVolatile     : append_unsafe_put_obj(callee, T_INT,     true); return;
3658   case vmIntrinsics::_putLongVolatile    : append_unsafe_put_obj(callee, T_LONG,    true); return;
3659   case vmIntrinsics::_putFloatVolatile   : append_unsafe_put_obj(callee, T_FLOAT,   true); return;
3660   case vmIntrinsics::_putDoubleVolatile  : append_unsafe_put_obj(callee, T_DOUBLE,  true); return;
3661   case vmIntrinsics::_getByte_raw        : append_unsafe_get_raw(callee, T_BYTE  ); return;
3662   case vmIntrinsics::_getShort_raw       : append_unsafe_get_raw(callee, T_SHORT ); return;
3663   case vmIntrinsics::_getChar_raw        : append_unsafe_get_raw(callee, T_CHAR  ); return;
3664   case vmIntrinsics::_getInt_raw         : append_unsafe_get_raw(callee, T_INT   ); return;
3665   case vmIntrinsics::_getLong_raw        : append_unsafe_get_raw(callee, T_LONG  ); return;
3666   case vmIntrinsics::_getFloat_raw       : append_unsafe_get_raw(callee, T_FLOAT ); return;
3667   case vmIntrinsics::_getDouble_raw      : append_unsafe_get_raw(callee, T_DOUBLE); return;
3668   case vmIntrinsics::_putByte_raw        : append_unsafe_put_raw(callee, T_BYTE  ); return;
3669   case vmIntrinsics::_putShort_raw       : append_unsafe_put_raw(callee, T_SHORT ); return;
3670   case vmIntrinsics::_putChar_raw        : append_unsafe_put_raw(callee, T_CHAR  ); return;
3671   case vmIntrinsics::_putInt_raw         : append_unsafe_put_raw(callee, T_INT   ); return;
3672   case vmIntrinsics::_putLong_raw        : append_unsafe_put_raw(callee, T_LONG  ); return;
3673   case vmIntrinsics::_putFloat_raw       : append_unsafe_put_raw(callee, T_FLOAT ); return;
3674   case vmIntrinsics::_putDouble_raw      : append_unsafe_put_raw(callee, T_DOUBLE);  return;
3675   case vmIntrinsics::_putOrderedObject   : append_unsafe_put_obj(callee, T_OBJECT,  true); return;
3676   case vmIntrinsics::_putOrderedInt      : append_unsafe_put_obj(callee, T_INT,     true); return;
3677   case vmIntrinsics::_putOrderedLong     : append_unsafe_put_obj(callee, T_LONG,    true); return;
3678   case vmIntrinsics::_compareAndSwapLong:
3679   case vmIntrinsics::_compareAndSwapInt:
3680   case vmIntrinsics::_compareAndSwapObject: append_unsafe_CAS(callee); return;
3681   case vmIntrinsics::_getAndAddInt:
3682   case vmIntrinsics::_getAndAddLong      : append_unsafe_get_and_set_obj(callee, true); return;
3683   case vmIntrinsics::_getAndSetInt       :
3684   case vmIntrinsics::_getAndSetLong      :
3685   case vmIntrinsics::_getAndSetObject    : append_unsafe_get_and_set_obj(callee, false); return;
3686   default:
3687     break;
3688   }
3689 
3690   // create intrinsic node
3691   const bool has_receiver = !callee->is_static();
3692   ValueType* result_type = as_ValueType(callee->return_type());
3693   ValueStack* state_before = copy_state_for_exception();
3694 
3695   Values* args = state()->pop_arguments(callee->arg_size());
3696 
3697   if (is_profiling()) {
3698     // Don't profile in the special case where the root method
3699     // is the intrinsic
3700     if (callee != method()) {
3701       // Note that we'd collect profile data in this method if we wanted it.
3702       compilation()->set_would_profile(true);
3703       if (profile_calls()) {
3704         Value recv = NULL;
3705         if (has_receiver) {
3706           recv = args->at(0);
3707           null_check(recv);
3708         }
3709         profile_call(callee, recv, NULL, collect_args_for_profiling(args, callee, true), true);
3710       }
3711     }
3712   }
3713 
3714   Intrinsic* result = new Intrinsic(result_type, callee->intrinsic_id(),
3715                                     args, has_receiver, state_before,
3716                                     intrinsic_preserves_state(id),
3717                                     intrinsic_can_trap(id));
3718   // append instruction & push result
3719   Value value = append_split(result);
3720   if (result_type != voidType) push(result_type, value);
3721 
3722   if (callee != method() && profile_return() && result_type->is_object_kind()) {
3723     profile_return_type(result, callee);
3724   }
3725 }
3726 
3727 bool GraphBuilder::try_inline_intrinsics(ciMethod* callee) {
3728   Method* m = callee->get_Method();
3729   if (!is_intrinsic_available_for(m)) {
3730     if (!InlineNatives) {
3731       // Return false and also set message that the inlining of
3732       // intrinsics has been disabled in general.
3733       INLINE_BAILOUT("intrinsic method inlining disabled");
3734     } else {
3735       return false;
3736     }
3737   }
3738   build_graph_for_intrinsic(callee);
3739   return true;
3740 }
3741 
3742 
3743 bool GraphBuilder::try_inline_jsr(int jsr_dest_bci) {
3744   // Introduce a new callee continuation point - all Ret instructions
3745   // will be replaced with Gotos to this point.
3746   BlockBegin* cont = block_at(next_bci());
3747   assert(cont != NULL, "continuation must exist (BlockListBuilder starts a new block after a jsr");
3748 
3749   // Note: can not assign state to continuation yet, as we have to
3750   // pick up the state from the Ret instructions.
3751 
3752   // Push callee scope
3753   push_scope_for_jsr(cont, jsr_dest_bci);
3754 
3755   // Temporarily set up bytecode stream so we can append instructions
3756   // (only using the bci of this stream)
3757   scope_data()->set_stream(scope_data()->parent()->stream());
3758 


4311   data->set_scope(scope());
4312   data->setup_jsr_xhandlers();
4313   data->set_continuation(continuation());
4314   data->set_jsr_continuation(jsr_continuation);
4315   _scope_data = data;
4316 }
4317 
4318 
4319 void GraphBuilder::pop_scope() {
4320   int number_of_locks = scope()->number_of_locks();
4321   _scope_data = scope_data()->parent();
4322   // accumulate minimum number of monitor slots to be reserved
4323   scope()->set_min_number_of_locks(number_of_locks);
4324 }
4325 
4326 
4327 void GraphBuilder::pop_scope_for_jsr() {
4328   _scope_data = scope_data()->parent();
4329 }
4330 
4331 void GraphBuilder::append_unsafe_get_obj(ciMethod* callee, BasicType t, bool is_volatile) {

4332   Values* args = state()->pop_arguments(callee->arg_size());
4333   null_check(args->at(0));
4334   Instruction* offset = args->at(2);
4335 #ifndef _LP64
4336   offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
4337 #endif
4338   Instruction* op = append(new UnsafeGetObject(t, args->at(1), offset, is_volatile));
4339   push(op->type(), op);
4340   compilation()->set_has_unsafe_access(true);


4341 }
4342 
4343 
4344 void GraphBuilder::append_unsafe_put_obj(ciMethod* callee, BasicType t, bool is_volatile) {

4345   Values* args = state()->pop_arguments(callee->arg_size());
4346   null_check(args->at(0));
4347   Instruction* offset = args->at(2);
4348 #ifndef _LP64
4349   offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
4350 #endif
4351   Instruction* op = append(new UnsafePutObject(t, args->at(1), offset, args->at(3), is_volatile));
4352   compilation()->set_has_unsafe_access(true);
4353   kill_all();


4354 }
4355 
4356 
4357 void GraphBuilder::append_unsafe_get_raw(ciMethod* callee, BasicType t) {

4358   Values* args = state()->pop_arguments(callee->arg_size());
4359   null_check(args->at(0));
4360   Instruction* op = append(new UnsafeGetRaw(t, args->at(1), false));
4361   push(op->type(), op);
4362   compilation()->set_has_unsafe_access(true);


4363 }
4364 
4365 
4366 void GraphBuilder::append_unsafe_put_raw(ciMethod* callee, BasicType t) {

4367   Values* args = state()->pop_arguments(callee->arg_size());
4368   null_check(args->at(0));
4369   Instruction* op = append(new UnsafePutRaw(t, args->at(1), args->at(2)));
4370   compilation()->set_has_unsafe_access(true);


4371 }
4372 
4373 
4374 void GraphBuilder::append_unsafe_CAS(ciMethod* callee) {
4375   ValueStack* state_before = copy_state_for_exception();
4376   ValueType* result_type = as_ValueType(callee->return_type());
4377   assert(result_type->is_int(), "int result");
4378   Values* args = state()->pop_arguments(callee->arg_size());
4379 
4380   // Pop off some args to specially handle, then push back
4381   Value newval = args->pop();
4382   Value cmpval = args->pop();
4383   Value offset = args->pop();
4384   Value src = args->pop();
4385   Value unsafe_obj = args->pop();
4386 
4387   // Separately handle the unsafe arg. It is not needed for code
4388   // generation, but must be null checked
4389   null_check(unsafe_obj);
4390 


4427   EventCompilerInlining event;
4428   if (event.should_commit()) {
4429     event.set_compileID(compilation()->env()->task()->compile_id());
4430     event.set_message(msg);
4431     event.set_succeeded(success);
4432     event.set_bci(bci());
4433     event.set_caller(method()->get_Method());
4434     event.set_callee(callee->to_trace_struct());
4435     event.commit();
4436   }
4437 #endif // INCLUDE_TRACE
4438   if (!PrintInlining && !compilation()->method()->has_option("PrintInlining")) {
4439     return;
4440   }
4441   CompileTask::print_inlining(callee, scope()->level(), bci(), msg);
4442   if (success && CIPrintMethodCodes) {
4443     callee->print_codes();
4444   }
4445 }
4446 
4447 void GraphBuilder::append_unsafe_get_and_set_obj(ciMethod* callee, bool is_add) {

4448   Values* args = state()->pop_arguments(callee->arg_size());
4449   BasicType t = callee->return_type()->basic_type();
4450   null_check(args->at(0));
4451   Instruction* offset = args->at(2);
4452 #ifndef _LP64
4453   offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
4454 #endif
4455   Instruction* op = append(new UnsafeGetAndSetObject(t, args->at(1), offset, args->at(3), is_add));
4456   compilation()->set_has_unsafe_access(true);
4457   kill_all();
4458   push(op->type(), op);


4459 }
4460 
4461 #ifndef PRODUCT
4462 void GraphBuilder::print_stats() {
4463   vmap()->print();
4464 }
4465 #endif // PRODUCT
4466 
4467 void GraphBuilder::profile_call(ciMethod* callee, Value recv, ciKlass* known_holder, Values* obj_args, bool inlined) {
4468   assert(known_holder == NULL || (known_holder->is_instance_klass() &&
4469                                   (!known_holder->is_interface() ||
4470                                    ((ciInstanceKlass*)known_holder)->has_default_methods())), "should be default method");
4471   if (known_holder != NULL) {
4472     if (known_holder->exact_klass() == NULL) {
4473       known_holder = compilation()->cha_exact_type(known_holder);
4474     }
4475   }
4476 
4477   append(new ProfileCall(method(), bci(), callee, recv, known_holder, obj_args, inlined));
4478 }
src/share/vm/c1/c1_GraphBuilder.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File