3355 }
3356
3357
3358 const char* GraphBuilder::check_can_parse(ciMethod* callee) const {
3359 // Certain methods cannot be parsed at all:
3360 if ( callee->is_native()) return "native method";
3361 if ( callee->is_abstract()) return "abstract method";
3362 if (!callee->can_be_compiled()) return "not compilable (disabled)";
3363 return NULL;
3364 }
3365
3366
3367 // negative filter: should callee NOT be inlined? returns NULL, ok to inline, or rejection msg
3368 const char* GraphBuilder::should_not_inline(ciMethod* callee) const {
3369 if ( callee->should_exclude()) return "excluded by CompilerOracle";
3370 if ( callee->should_not_inline()) return "disallowed by CompilerOracle";
3371 if ( callee->dont_inline()) return "don't inline by annotation";
3372 return NULL;
3373 }
3374
3375
3376 bool GraphBuilder::try_inline_intrinsics(ciMethod* callee) {
3377 if (callee->is_synchronized()) {
3378 // We don't currently support any synchronized intrinsics
3379 return false;
3380 }
3381
3382 // callee seems like a good candidate
3383 // determine id
3384 vmIntrinsics::ID id = callee->intrinsic_id();
3385 if (!InlineNatives && id != vmIntrinsics::_Reference_get) {
3386 // InlineNatives does not control Reference.get
3387 INLINE_BAILOUT("intrinsic method inlining disabled");
3388 }
3389 bool preserves_state = false;
3390 bool cantrap = true;
3391 switch (id) {
3392 case vmIntrinsics::_arraycopy:
3393 if (!InlineArrayCopy) return false;
3394 break;
3395
3396 #ifdef TRACE_HAVE_INTRINSICS
3397 case vmIntrinsics::_classID:
3398 case vmIntrinsics::_threadID:
3399 preserves_state = true;
3400 cantrap = true;
3401 break;
3402
3403 case vmIntrinsics::_counterTime:
3404 preserves_state = true;
3405 cantrap = false;
3406 break;
3407 #endif
3408
3409 case vmIntrinsics::_currentTimeMillis:
3410 case vmIntrinsics::_nanoTime:
3411 preserves_state = true;
3412 cantrap = false;
3413 break;
3414
3415 case vmIntrinsics::_floatToRawIntBits :
3416 case vmIntrinsics::_intBitsToFloat :
3417 case vmIntrinsics::_doubleToRawLongBits :
3418 case vmIntrinsics::_longBitsToDouble :
3419 if (!InlineMathNatives) return false;
3420 preserves_state = true;
3421 cantrap = false;
3422 break;
3423
3424 case vmIntrinsics::_getClass :
3425 case vmIntrinsics::_isInstance :
3426 if (!InlineClassNatives) return false;
3427 preserves_state = true;
3428 break;
3429
3430 case vmIntrinsics::_currentThread :
3431 if (!InlineThreadNatives) return false;
3432 preserves_state = true;
3433 cantrap = false;
3434 break;
3435
3436 case vmIntrinsics::_dabs : // fall through
3437 case vmIntrinsics::_dsqrt : // fall through
3438 case vmIntrinsics::_dsin : // fall through
3439 case vmIntrinsics::_dcos : // fall through
3440 case vmIntrinsics::_dtan : // fall through
3441 case vmIntrinsics::_dlog : // fall through
3442 case vmIntrinsics::_dlog10 : // fall through
3443 case vmIntrinsics::_dexp : // fall through
3444 case vmIntrinsics::_dpow : // fall through
3445 if (!InlineMathNatives) return false;
3446 cantrap = false;
3447 preserves_state = true;
3448 break;
3449
3450 // Use special nodes for Unsafe instructions so we can more easily
3451 // perform an address-mode optimization on the raw variants
3452 case vmIntrinsics::_getObject : return append_unsafe_get_obj(callee, T_OBJECT, false);
3453 case vmIntrinsics::_getBoolean: return append_unsafe_get_obj(callee, T_BOOLEAN, false);
3454 case vmIntrinsics::_getByte : return append_unsafe_get_obj(callee, T_BYTE, false);
3455 case vmIntrinsics::_getShort : return append_unsafe_get_obj(callee, T_SHORT, false);
3456 case vmIntrinsics::_getChar : return append_unsafe_get_obj(callee, T_CHAR, false);
3457 case vmIntrinsics::_getInt : return append_unsafe_get_obj(callee, T_INT, false);
3458 case vmIntrinsics::_getLong : return append_unsafe_get_obj(callee, T_LONG, false);
3459 case vmIntrinsics::_getFloat : return append_unsafe_get_obj(callee, T_FLOAT, false);
3460 case vmIntrinsics::_getDouble : return append_unsafe_get_obj(callee, T_DOUBLE, false);
3461
3462 case vmIntrinsics::_putObject : return append_unsafe_put_obj(callee, T_OBJECT, false);
3463 case vmIntrinsics::_putBoolean: return append_unsafe_put_obj(callee, T_BOOLEAN, false);
3464 case vmIntrinsics::_putByte : return append_unsafe_put_obj(callee, T_BYTE, false);
3465 case vmIntrinsics::_putShort : return append_unsafe_put_obj(callee, T_SHORT, false);
3466 case vmIntrinsics::_putChar : return append_unsafe_put_obj(callee, T_CHAR, false);
3467 case vmIntrinsics::_putInt : return append_unsafe_put_obj(callee, T_INT, false);
3468 case vmIntrinsics::_putLong : return append_unsafe_put_obj(callee, T_LONG, false);
3469 case vmIntrinsics::_putFloat : return append_unsafe_put_obj(callee, T_FLOAT, false);
3470 case vmIntrinsics::_putDouble : return append_unsafe_put_obj(callee, T_DOUBLE, false);
3471
3472 case vmIntrinsics::_getShortUnaligned :
3473 return UseUnalignedAccesses ? append_unsafe_get_obj(callee, T_SHORT, false) : false;
3474 case vmIntrinsics::_getCharUnaligned :
3475 return UseUnalignedAccesses ? append_unsafe_get_obj(callee, T_CHAR, false) : false;
3476 case vmIntrinsics::_getIntUnaligned :
3477 return UseUnalignedAccesses ? append_unsafe_get_obj(callee, T_INT, false) : false;
3478 case vmIntrinsics::_getLongUnaligned :
3479 return UseUnalignedAccesses ? append_unsafe_get_obj(callee, T_LONG, false) : false;
3480
3481 case vmIntrinsics::_putShortUnaligned :
3482 return UseUnalignedAccesses ? append_unsafe_put_obj(callee, T_SHORT, false) : false;
3483 case vmIntrinsics::_putCharUnaligned :
3484 return UseUnalignedAccesses ? append_unsafe_put_obj(callee, T_CHAR, false) : false;
3485 case vmIntrinsics::_putIntUnaligned :
3486 return UseUnalignedAccesses ? append_unsafe_put_obj(callee, T_INT, false) : false;
3487 case vmIntrinsics::_putLongUnaligned :
3488 return UseUnalignedAccesses ? append_unsafe_put_obj(callee, T_LONG, false) : false;
3489
3490 case vmIntrinsics::_getObjectVolatile : return append_unsafe_get_obj(callee, T_OBJECT, true);
3491 case vmIntrinsics::_getBooleanVolatile: return append_unsafe_get_obj(callee, T_BOOLEAN, true);
3492 case vmIntrinsics::_getByteVolatile : return append_unsafe_get_obj(callee, T_BYTE, true);
3493 case vmIntrinsics::_getShortVolatile : return append_unsafe_get_obj(callee, T_SHORT, true);
3494 case vmIntrinsics::_getCharVolatile : return append_unsafe_get_obj(callee, T_CHAR, true);
3495 case vmIntrinsics::_getIntVolatile : return append_unsafe_get_obj(callee, T_INT, true);
3496 case vmIntrinsics::_getLongVolatile : return append_unsafe_get_obj(callee, T_LONG, true);
3497 case vmIntrinsics::_getFloatVolatile : return append_unsafe_get_obj(callee, T_FLOAT, true);
3498 case vmIntrinsics::_getDoubleVolatile : return append_unsafe_get_obj(callee, T_DOUBLE, true);
3499
3500 case vmIntrinsics::_putObjectVolatile : return append_unsafe_put_obj(callee, T_OBJECT, true);
3501 case vmIntrinsics::_putBooleanVolatile: return append_unsafe_put_obj(callee, T_BOOLEAN, true);
3502 case vmIntrinsics::_putByteVolatile : return append_unsafe_put_obj(callee, T_BYTE, true);
3503 case vmIntrinsics::_putShortVolatile : return append_unsafe_put_obj(callee, T_SHORT, true);
3504 case vmIntrinsics::_putCharVolatile : return append_unsafe_put_obj(callee, T_CHAR, true);
3505 case vmIntrinsics::_putIntVolatile : return append_unsafe_put_obj(callee, T_INT, true);
3506 case vmIntrinsics::_putLongVolatile : return append_unsafe_put_obj(callee, T_LONG, true);
3507 case vmIntrinsics::_putFloatVolatile : return append_unsafe_put_obj(callee, T_FLOAT, true);
3508 case vmIntrinsics::_putDoubleVolatile : return append_unsafe_put_obj(callee, T_DOUBLE, true);
3509
3510 case vmIntrinsics::_getByte_raw : return append_unsafe_get_raw(callee, T_BYTE);
3511 case vmIntrinsics::_getShort_raw : return append_unsafe_get_raw(callee, T_SHORT);
3512 case vmIntrinsics::_getChar_raw : return append_unsafe_get_raw(callee, T_CHAR);
3513 case vmIntrinsics::_getInt_raw : return append_unsafe_get_raw(callee, T_INT);
3514 case vmIntrinsics::_getLong_raw : return append_unsafe_get_raw(callee, T_LONG);
3515 case vmIntrinsics::_getFloat_raw : return append_unsafe_get_raw(callee, T_FLOAT);
3516 case vmIntrinsics::_getDouble_raw : return append_unsafe_get_raw(callee, T_DOUBLE);
3517
3518 case vmIntrinsics::_putByte_raw : return append_unsafe_put_raw(callee, T_BYTE);
3519 case vmIntrinsics::_putShort_raw : return append_unsafe_put_raw(callee, T_SHORT);
3520 case vmIntrinsics::_putChar_raw : return append_unsafe_put_raw(callee, T_CHAR);
3521 case vmIntrinsics::_putInt_raw : return append_unsafe_put_raw(callee, T_INT);
3522 case vmIntrinsics::_putLong_raw : return append_unsafe_put_raw(callee, T_LONG);
3523 case vmIntrinsics::_putFloat_raw : return append_unsafe_put_raw(callee, T_FLOAT);
3524 case vmIntrinsics::_putDouble_raw : return append_unsafe_put_raw(callee, T_DOUBLE);
3525
3526 case vmIntrinsics::_checkIndex :
3527 if (!InlineNIOCheckIndex) return false;
3528 preserves_state = true;
3529 break;
3530 case vmIntrinsics::_putOrderedObject : return append_unsafe_put_obj(callee, T_OBJECT, true);
3531 case vmIntrinsics::_putOrderedInt : return append_unsafe_put_obj(callee, T_INT, true);
3532 case vmIntrinsics::_putOrderedLong : return append_unsafe_put_obj(callee, T_LONG, true);
3533
3534 case vmIntrinsics::_compareAndSwapLong:
3535 if (!VM_Version::supports_cx8()) return false;
3536 // fall through
3537 case vmIntrinsics::_compareAndSwapInt:
3538 case vmIntrinsics::_compareAndSwapObject:
3539 append_unsafe_CAS(callee);
3540 return true;
3541
3542 case vmIntrinsics::_getAndAddInt:
3543 if (!VM_Version::supports_atomic_getadd4()) {
3544 return false;
3545 }
3546 return append_unsafe_get_and_set_obj(callee, true);
3547 case vmIntrinsics::_getAndAddLong:
3548 if (!VM_Version::supports_atomic_getadd8()) {
3549 return false;
3550 }
3551 return append_unsafe_get_and_set_obj(callee, true);
3552 case vmIntrinsics::_getAndSetInt:
3553 if (!VM_Version::supports_atomic_getset4()) {
3554 return false;
3555 }
3556 return append_unsafe_get_and_set_obj(callee, false);
3557 case vmIntrinsics::_getAndSetLong:
3558 if (!VM_Version::supports_atomic_getset8()) {
3559 return false;
3560 }
3561 return append_unsafe_get_and_set_obj(callee, false);
3562 case vmIntrinsics::_getAndSetObject:
3563 #ifdef _LP64
3564 if (!UseCompressedOops && !VM_Version::supports_atomic_getset8()) {
3565 return false;
3566 }
3567 if (UseCompressedOops && !VM_Version::supports_atomic_getset4()) {
3568 return false;
3569 }
3570 #else
3571 if (!VM_Version::supports_atomic_getset4()) {
3572 return false;
3573 }
3574 #endif
3575 return append_unsafe_get_and_set_obj(callee, false);
3576
3577 case vmIntrinsics::_Reference_get:
3578 // Use the intrinsic version of Reference.get() so that the value in
3579 // the referent field can be registered by the G1 pre-barrier code.
3580 // Also to prevent commoning reads from this field across safepoint
3581 // since GC can change its value.
3582 preserves_state = true;
3583 break;
3584
3585 case vmIntrinsics::_updateCRC32:
3586 case vmIntrinsics::_updateBytesCRC32:
3587 case vmIntrinsics::_updateByteBufferCRC32:
3588 if (!UseCRC32Intrinsics) return false;
3589 cantrap = false;
3590 preserves_state = true;
3591 break;
3592
3593 case vmIntrinsics::_loadFence :
3594 case vmIntrinsics::_storeFence:
3595 case vmIntrinsics::_fullFence :
3596 break;
3597
3598 default : return false; // do not inline
3599 }
3600 // create intrinsic node
3601 const bool has_receiver = !callee->is_static();
3602 ValueType* result_type = as_ValueType(callee->return_type());
3603 ValueStack* state_before = copy_state_for_exception();
3604
3605 Values* args = state()->pop_arguments(callee->arg_size());
3606
3607 if (is_profiling()) {
3608 // Don't profile in the special case where the root method
3609 // is the intrinsic
3610 if (callee != method()) {
3611 // Note that we'd collect profile data in this method if we wanted it.
3612 compilation()->set_would_profile(true);
3613 if (profile_calls()) {
3614 Value recv = NULL;
3615 if (has_receiver) {
3616 recv = args->at(0);
3617 null_check(recv);
3618 }
3619 profile_call(callee, recv, NULL, collect_args_for_profiling(args, callee, true), true);
3620 }
3621 }
3622 }
3623
3624 Intrinsic* result = new Intrinsic(result_type, id, args, has_receiver, state_before,
3625 preserves_state, cantrap);
3626 // append instruction & push result
3627 Value value = append_split(result);
3628 if (result_type != voidType) push(result_type, value);
3629
3630 if (callee != method() && profile_return() && result_type->is_object_kind()) {
3631 profile_return_type(result, callee);
3632 }
3633
3634 // done
3635 return true;
3636 }
3637
3638
3639 bool GraphBuilder::try_inline_jsr(int jsr_dest_bci) {
3640 // Introduce a new callee continuation point - all Ret instructions
3641 // will be replaced with Gotos to this point.
3642 BlockBegin* cont = block_at(next_bci());
3643 assert(cont != NULL, "continuation must exist (BlockListBuilder starts a new block after a jsr");
3644
3645 // Note: can not assign state to continuation yet, as we have to
3646 // pick up the state from the Ret instructions.
3647
3648 // Push callee scope
3649 push_scope_for_jsr(cont, jsr_dest_bci);
3650
3651 // Temporarily set up bytecode stream so we can append instructions
3652 // (only using the bci of this stream)
3653 scope_data()->set_stream(scope_data()->parent()->stream());
3654
4207 data->set_scope(scope());
4208 data->setup_jsr_xhandlers();
4209 data->set_continuation(continuation());
4210 data->set_jsr_continuation(jsr_continuation);
4211 _scope_data = data;
4212 }
4213
4214
4215 void GraphBuilder::pop_scope() {
4216 int number_of_locks = scope()->number_of_locks();
4217 _scope_data = scope_data()->parent();
4218 // accumulate minimum number of monitor slots to be reserved
4219 scope()->set_min_number_of_locks(number_of_locks);
4220 }
4221
4222
4223 void GraphBuilder::pop_scope_for_jsr() {
4224 _scope_data = scope_data()->parent();
4225 }
4226
4227 bool GraphBuilder::append_unsafe_get_obj(ciMethod* callee, BasicType t, bool is_volatile) {
4228 if (InlineUnsafeOps) {
4229 Values* args = state()->pop_arguments(callee->arg_size());
4230 null_check(args->at(0));
4231 Instruction* offset = args->at(2);
4232 #ifndef _LP64
4233 offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
4234 #endif
4235 Instruction* op = append(new UnsafeGetObject(t, args->at(1), offset, is_volatile));
4236 push(op->type(), op);
4237 compilation()->set_has_unsafe_access(true);
4238 }
4239 return InlineUnsafeOps;
4240 }
4241
4242
4243 bool GraphBuilder::append_unsafe_put_obj(ciMethod* callee, BasicType t, bool is_volatile) {
4244 if (InlineUnsafeOps) {
4245 Values* args = state()->pop_arguments(callee->arg_size());
4246 null_check(args->at(0));
4247 Instruction* offset = args->at(2);
4248 #ifndef _LP64
4249 offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
4250 #endif
4251 Instruction* op = append(new UnsafePutObject(t, args->at(1), offset, args->at(3), is_volatile));
4252 compilation()->set_has_unsafe_access(true);
4253 kill_all();
4254 }
4255 return InlineUnsafeOps;
4256 }
4257
4258
4259 bool GraphBuilder::append_unsafe_get_raw(ciMethod* callee, BasicType t) {
4260 if (InlineUnsafeOps) {
4261 Values* args = state()->pop_arguments(callee->arg_size());
4262 null_check(args->at(0));
4263 Instruction* op = append(new UnsafeGetRaw(t, args->at(1), false));
4264 push(op->type(), op);
4265 compilation()->set_has_unsafe_access(true);
4266 }
4267 return InlineUnsafeOps;
4268 }
4269
4270
4271 bool GraphBuilder::append_unsafe_put_raw(ciMethod* callee, BasicType t) {
4272 if (InlineUnsafeOps) {
4273 Values* args = state()->pop_arguments(callee->arg_size());
4274 null_check(args->at(0));
4275 Instruction* op = append(new UnsafePutRaw(t, args->at(1), args->at(2)));
4276 compilation()->set_has_unsafe_access(true);
4277 }
4278 return InlineUnsafeOps;
4279 }
4280
4281
4282 void GraphBuilder::append_unsafe_CAS(ciMethod* callee) {
4283 ValueStack* state_before = copy_state_for_exception();
4284 ValueType* result_type = as_ValueType(callee->return_type());
4285 assert(result_type->is_int(), "int result");
4286 Values* args = state()->pop_arguments(callee->arg_size());
4287
4288 // Pop off some args to specially handle, then push back
4289 Value newval = args->pop();
4290 Value cmpval = args->pop();
4291 Value offset = args->pop();
4292 Value src = args->pop();
4293 Value unsafe_obj = args->pop();
4294
4295 // Separately handle the unsafe arg. It is not needed for code
4296 // generation, but must be null checked
4297 null_check(unsafe_obj);
4298
4335 EventCompilerInlining event;
4336 if (event.should_commit()) {
4337 event.set_compileID(compilation()->env()->task()->compile_id());
4338 event.set_message(msg);
4339 event.set_succeeded(success);
4340 event.set_bci(bci());
4341 event.set_caller(method()->get_Method());
4342 event.set_callee(callee->to_trace_struct());
4343 event.commit();
4344 }
4345 #endif // INCLUDE_TRACE
4346 if (!PrintInlining && !compilation()->method()->has_option("PrintInlining")) {
4347 return;
4348 }
4349 CompileTask::print_inlining(callee, scope()->level(), bci(), msg);
4350 if (success && CIPrintMethodCodes) {
4351 callee->print_codes();
4352 }
4353 }
4354
4355 bool GraphBuilder::append_unsafe_get_and_set_obj(ciMethod* callee, bool is_add) {
4356 if (InlineUnsafeOps) {
4357 Values* args = state()->pop_arguments(callee->arg_size());
4358 BasicType t = callee->return_type()->basic_type();
4359 null_check(args->at(0));
4360 Instruction* offset = args->at(2);
4361 #ifndef _LP64
4362 offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
4363 #endif
4364 Instruction* op = append(new UnsafeGetAndSetObject(t, args->at(1), offset, args->at(3), is_add));
4365 compilation()->set_has_unsafe_access(true);
4366 kill_all();
4367 push(op->type(), op);
4368 }
4369 return InlineUnsafeOps;
4370 }
4371
4372 #ifndef PRODUCT
4373 void GraphBuilder::print_stats() {
4374 vmap()->print();
4375 }
4376 #endif // PRODUCT
4377
4378 void GraphBuilder::profile_call(ciMethod* callee, Value recv, ciKlass* known_holder, Values* obj_args, bool inlined) {
4379 assert(known_holder == NULL || (known_holder->is_instance_klass() &&
4380 (!known_holder->is_interface() ||
4381 ((ciInstanceKlass*)known_holder)->has_default_methods())), "should be default method");
4382 if (known_holder != NULL) {
4383 if (known_holder->exact_klass() == NULL) {
4384 known_holder = compilation()->cha_exact_type(known_holder);
4385 }
4386 }
4387
4388 append(new ProfileCall(method(), bci(), callee, recv, known_holder, obj_args, inlined));
4389 }
|
3355 }
3356
3357
3358 const char* GraphBuilder::check_can_parse(ciMethod* callee) const {
3359 // Certain methods cannot be parsed at all:
3360 if ( callee->is_native()) return "native method";
3361 if ( callee->is_abstract()) return "abstract method";
3362 if (!callee->can_be_compiled()) return "not compilable (disabled)";
3363 return NULL;
3364 }
3365
3366
3367 // negative filter: should callee NOT be inlined? returns NULL, ok to inline, or rejection msg
3368 const char* GraphBuilder::should_not_inline(ciMethod* callee) const {
3369 if ( callee->should_exclude()) return "excluded by CompilerOracle";
3370 if ( callee->should_not_inline()) return "disallowed by CompilerOracle";
3371 if ( callee->dont_inline()) return "don't inline by annotation";
3372 return NULL;
3373 }
3374
3375 void GraphBuilder::build_graph_for_intrinsic(ciMethod* callee) {
3376 vmIntrinsics::ID id = callee->intrinsic_id();
3377 assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
3378
3379 // Some intrinsics need special IR nodes.
3380 switch(id) {
3381 case vmIntrinsics::_getObject : append_unsafe_get_obj(callee, T_OBJECT, false); return;
3382 case vmIntrinsics::_getBoolean : append_unsafe_get_obj(callee, T_BOOLEAN, false); return;
3383 case vmIntrinsics::_getByte : append_unsafe_get_obj(callee, T_BYTE, false); return;
3384 case vmIntrinsics::_getShort : append_unsafe_get_obj(callee, T_SHORT, false); return;
3385 case vmIntrinsics::_getChar : append_unsafe_get_obj(callee, T_CHAR, false); return;
3386 case vmIntrinsics::_getInt : append_unsafe_get_obj(callee, T_INT, false); return;
3387 case vmIntrinsics::_getLong : append_unsafe_get_obj(callee, T_LONG, false); return;
3388 case vmIntrinsics::_getFloat : append_unsafe_get_obj(callee, T_FLOAT, false); return;
3389 case vmIntrinsics::_getDouble : append_unsafe_get_obj(callee, T_DOUBLE, false); return;
3390 case vmIntrinsics::_putObject : append_unsafe_put_obj(callee, T_OBJECT, false); return;
3391 case vmIntrinsics::_putBoolean : append_unsafe_put_obj(callee, T_BOOLEAN, false); return;
3392 case vmIntrinsics::_putByte : append_unsafe_put_obj(callee, T_BYTE, false); return;
3393 case vmIntrinsics::_putShort : append_unsafe_put_obj(callee, T_SHORT, false); return;
3394 case vmIntrinsics::_putChar : append_unsafe_put_obj(callee, T_CHAR, false); return;
3395 case vmIntrinsics::_putInt : append_unsafe_put_obj(callee, T_INT, false); return;
3396 case vmIntrinsics::_putLong : append_unsafe_put_obj(callee, T_LONG, false); return;
3397 case vmIntrinsics::_putFloat : append_unsafe_put_obj(callee, T_FLOAT, false); return;
3398 case vmIntrinsics::_putDouble : append_unsafe_put_obj(callee, T_DOUBLE, false); return;
3399 case vmIntrinsics::_getShortUnaligned : append_unsafe_get_obj(callee, T_SHORT, false); return;
3400 case vmIntrinsics::_getCharUnaligned : append_unsafe_get_obj(callee, T_CHAR, false); return;
3401 case vmIntrinsics::_getIntUnaligned : append_unsafe_get_obj(callee, T_INT, false); return;
3402 case vmIntrinsics::_getLongUnaligned : append_unsafe_get_obj(callee, T_LONG, false); return;
3403 case vmIntrinsics::_putShortUnaligned : append_unsafe_put_obj(callee, T_SHORT, false); return;
3404 case vmIntrinsics::_putCharUnaligned : append_unsafe_put_obj(callee, T_CHAR, false); return;
3405 case vmIntrinsics::_putIntUnaligned : append_unsafe_put_obj(callee, T_INT, false); return;
3406 case vmIntrinsics::_putLongUnaligned : append_unsafe_put_obj(callee, T_LONG, false); return;
3407 case vmIntrinsics::_getObjectVolatile : append_unsafe_get_obj(callee, T_OBJECT, true); return;
3408 case vmIntrinsics::_getBooleanVolatile : append_unsafe_get_obj(callee, T_BOOLEAN, true); return;
3409 case vmIntrinsics::_getByteVolatile : append_unsafe_get_obj(callee, T_BYTE, true); return;
3410 case vmIntrinsics::_getShortVolatile : append_unsafe_get_obj(callee, T_SHORT, true); return;
3411 case vmIntrinsics::_getCharVolatile : append_unsafe_get_obj(callee, T_CHAR, true); return;
3412 case vmIntrinsics::_getIntVolatile : append_unsafe_get_obj(callee, T_INT, true); return;
3413 case vmIntrinsics::_getLongVolatile : append_unsafe_get_obj(callee, T_LONG, true); return;
3414 case vmIntrinsics::_getFloatVolatile : append_unsafe_get_obj(callee, T_FLOAT, true); return;
3415 case vmIntrinsics::_getDoubleVolatile : append_unsafe_get_obj(callee, T_DOUBLE, true); return;
3416 case vmIntrinsics::_putObjectVolatile : append_unsafe_put_obj(callee, T_OBJECT, true); return;
3417 case vmIntrinsics::_putBooleanVolatile : append_unsafe_put_obj(callee, T_BOOLEAN, true); return;
3418 case vmIntrinsics::_putByteVolatile : append_unsafe_put_obj(callee, T_BYTE, true); return;
3419 case vmIntrinsics::_putShortVolatile : append_unsafe_put_obj(callee, T_SHORT, true); return;
3420 case vmIntrinsics::_putCharVolatile : append_unsafe_put_obj(callee, T_CHAR, true); return;
3421 case vmIntrinsics::_putIntVolatile : append_unsafe_put_obj(callee, T_INT, true); return;
3422 case vmIntrinsics::_putLongVolatile : append_unsafe_put_obj(callee, T_LONG, true); return;
3423 case vmIntrinsics::_putFloatVolatile : append_unsafe_put_obj(callee, T_FLOAT, true); return;
3424 case vmIntrinsics::_putDoubleVolatile : append_unsafe_put_obj(callee, T_DOUBLE, true); return;
3425 case vmIntrinsics::_getByte_raw : append_unsafe_get_raw(callee, T_BYTE ); return;
3426 case vmIntrinsics::_getShort_raw : append_unsafe_get_raw(callee, T_SHORT ); return;
3427 case vmIntrinsics::_getChar_raw : append_unsafe_get_raw(callee, T_CHAR ); return;
3428 case vmIntrinsics::_getInt_raw : append_unsafe_get_raw(callee, T_INT ); return;
3429 case vmIntrinsics::_getLong_raw : append_unsafe_get_raw(callee, T_LONG ); return;
3430 case vmIntrinsics::_getFloat_raw : append_unsafe_get_raw(callee, T_FLOAT ); return;
3431 case vmIntrinsics::_getDouble_raw : append_unsafe_get_raw(callee, T_DOUBLE); return;
3432 case vmIntrinsics::_putByte_raw : append_unsafe_put_raw(callee, T_BYTE ); return;
3433 case vmIntrinsics::_putShort_raw : append_unsafe_put_raw(callee, T_SHORT ); return;
3434 case vmIntrinsics::_putChar_raw : append_unsafe_put_raw(callee, T_CHAR ); return;
3435 case vmIntrinsics::_putInt_raw : append_unsafe_put_raw(callee, T_INT ); return;
3436 case vmIntrinsics::_putLong_raw : append_unsafe_put_raw(callee, T_LONG ); return;
3437 case vmIntrinsics::_putFloat_raw : append_unsafe_put_raw(callee, T_FLOAT ); return;
3438 case vmIntrinsics::_putDouble_raw : append_unsafe_put_raw(callee, T_DOUBLE); return;
3439 case vmIntrinsics::_putOrderedObject : append_unsafe_put_obj(callee, T_OBJECT, true); return;
3440 case vmIntrinsics::_putOrderedInt : append_unsafe_put_obj(callee, T_INT, true); return;
3441 case vmIntrinsics::_putOrderedLong : append_unsafe_put_obj(callee, T_LONG, true); return;
3442 case vmIntrinsics::_compareAndSwapLong:
3443 case vmIntrinsics::_compareAndSwapInt:
3444 case vmIntrinsics::_compareAndSwapObject: append_unsafe_CAS(callee); return;
3445 case vmIntrinsics::_getAndAddInt:
3446 case vmIntrinsics::_getAndAddLong : append_unsafe_get_and_set_obj(callee, true); return;
3447 case vmIntrinsics::_getAndSetInt :
3448 case vmIntrinsics::_getAndSetLong :
3449 case vmIntrinsics::_getAndSetObject : append_unsafe_get_and_set_obj(callee, false); return;
3450 default:
3451 break;
3452 }
3453
3454 // create intrinsic node
3455 const bool has_receiver = !callee->is_static();
3456 ValueType* result_type = as_ValueType(callee->return_type());
3457 ValueStack* state_before = copy_state_for_exception();
3458
3459 Values* args = state()->pop_arguments(callee->arg_size());
3460
3461 if (is_profiling()) {
3462 // Don't profile in the special case where the root method
3463 // is the intrinsic
3464 if (callee != method()) {
3465 // Note that we'd collect profile data in this method if we wanted it.
3466 compilation()->set_would_profile(true);
3467 if (profile_calls()) {
3468 Value recv = NULL;
3469 if (has_receiver) {
3470 recv = args->at(0);
3471 null_check(recv);
3472 }
3473 profile_call(callee, recv, NULL, collect_args_for_profiling(args, callee, true), true);
3474 }
3475 }
3476 }
3477
3478 Intrinsic* result = new Intrinsic(result_type, callee->intrinsic_id(),
3479 args, has_receiver, state_before,
3480 vmIntrinsics::preserves_state(id),
3481 vmIntrinsics::can_trap(id));
3482 // append instruction & push result
3483 Value value = append_split(result);
3484 if (result_type != voidType) push(result_type, value);
3485
3486 if (callee != method() && profile_return() && result_type->is_object_kind()) {
3487 profile_return_type(result, callee);
3488 }
3489 }
3490
3491 bool GraphBuilder::try_inline_intrinsics(ciMethod* callee) {
3492 // For calling is_intrinsic_available we need to transition to
3493 // the '_thread_in_vm' state because is_intrinsic_available()
3494 // does not accesses critical VM-internal data.
3495 if (!_compilation->compiler()->is_intrinsic_available(callee->get_Method())) {
3496 if (!InlineNatives) {
3497 // Return false and also set message that the inlining of
3498 // intrinsics has been disabled in general.
3499 INLINE_BAILOUT("intrinsic method inlining disabled");
3500 } else {
3501 return false;
3502 }
3503 }
3504 build_graph_for_intrinsic(callee);
3505 return true;
3506 }
3507
3508
3509 bool GraphBuilder::try_inline_jsr(int jsr_dest_bci) {
3510 // Introduce a new callee continuation point - all Ret instructions
3511 // will be replaced with Gotos to this point.
3512 BlockBegin* cont = block_at(next_bci());
3513 assert(cont != NULL, "continuation must exist (BlockListBuilder starts a new block after a jsr");
3514
3515 // Note: can not assign state to continuation yet, as we have to
3516 // pick up the state from the Ret instructions.
3517
3518 // Push callee scope
3519 push_scope_for_jsr(cont, jsr_dest_bci);
3520
3521 // Temporarily set up bytecode stream so we can append instructions
3522 // (only using the bci of this stream)
3523 scope_data()->set_stream(scope_data()->parent()->stream());
3524
4077 data->set_scope(scope());
4078 data->setup_jsr_xhandlers();
4079 data->set_continuation(continuation());
4080 data->set_jsr_continuation(jsr_continuation);
4081 _scope_data = data;
4082 }
4083
4084
4085 void GraphBuilder::pop_scope() {
4086 int number_of_locks = scope()->number_of_locks();
4087 _scope_data = scope_data()->parent();
4088 // accumulate minimum number of monitor slots to be reserved
4089 scope()->set_min_number_of_locks(number_of_locks);
4090 }
4091
4092
4093 void GraphBuilder::pop_scope_for_jsr() {
4094 _scope_data = scope_data()->parent();
4095 }
4096
4097 void GraphBuilder::append_unsafe_get_obj(ciMethod* callee, BasicType t, bool is_volatile) {
4098 Values* args = state()->pop_arguments(callee->arg_size());
4099 null_check(args->at(0));
4100 Instruction* offset = args->at(2);
4101 #ifndef _LP64
4102 offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
4103 #endif
4104 Instruction* op = append(new UnsafeGetObject(t, args->at(1), offset, is_volatile));
4105 push(op->type(), op);
4106 compilation()->set_has_unsafe_access(true);
4107 }
4108
4109
4110 void GraphBuilder::append_unsafe_put_obj(ciMethod* callee, BasicType t, bool is_volatile) {
4111 Values* args = state()->pop_arguments(callee->arg_size());
4112 null_check(args->at(0));
4113 Instruction* offset = args->at(2);
4114 #ifndef _LP64
4115 offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
4116 #endif
4117 Instruction* op = append(new UnsafePutObject(t, args->at(1), offset, args->at(3), is_volatile));
4118 compilation()->set_has_unsafe_access(true);
4119 kill_all();
4120 }
4121
4122
4123 void GraphBuilder::append_unsafe_get_raw(ciMethod* callee, BasicType t) {
4124 Values* args = state()->pop_arguments(callee->arg_size());
4125 null_check(args->at(0));
4126 Instruction* op = append(new UnsafeGetRaw(t, args->at(1), false));
4127 push(op->type(), op);
4128 compilation()->set_has_unsafe_access(true);
4129 }
4130
4131
4132 void GraphBuilder::append_unsafe_put_raw(ciMethod* callee, BasicType t) {
4133 Values* args = state()->pop_arguments(callee->arg_size());
4134 null_check(args->at(0));
4135 Instruction* op = append(new UnsafePutRaw(t, args->at(1), args->at(2)));
4136 compilation()->set_has_unsafe_access(true);
4137 }
4138
4139
4140 void GraphBuilder::append_unsafe_CAS(ciMethod* callee) {
4141 ValueStack* state_before = copy_state_for_exception();
4142 ValueType* result_type = as_ValueType(callee->return_type());
4143 assert(result_type->is_int(), "int result");
4144 Values* args = state()->pop_arguments(callee->arg_size());
4145
4146 // Pop off some args to specially handle, then push back
4147 Value newval = args->pop();
4148 Value cmpval = args->pop();
4149 Value offset = args->pop();
4150 Value src = args->pop();
4151 Value unsafe_obj = args->pop();
4152
4153 // Separately handle the unsafe arg. It is not needed for code
4154 // generation, but must be null checked
4155 null_check(unsafe_obj);
4156
4193 EventCompilerInlining event;
4194 if (event.should_commit()) {
4195 event.set_compileID(compilation()->env()->task()->compile_id());
4196 event.set_message(msg);
4197 event.set_succeeded(success);
4198 event.set_bci(bci());
4199 event.set_caller(method()->get_Method());
4200 event.set_callee(callee->to_trace_struct());
4201 event.commit();
4202 }
4203 #endif // INCLUDE_TRACE
4204 if (!PrintInlining && !compilation()->method()->has_option("PrintInlining")) {
4205 return;
4206 }
4207 CompileTask::print_inlining(callee, scope()->level(), bci(), msg);
4208 if (success && CIPrintMethodCodes) {
4209 callee->print_codes();
4210 }
4211 }
4212
4213 void GraphBuilder::append_unsafe_get_and_set_obj(ciMethod* callee, bool is_add) {
4214 Values* args = state()->pop_arguments(callee->arg_size());
4215 BasicType t = callee->return_type()->basic_type();
4216 null_check(args->at(0));
4217 Instruction* offset = args->at(2);
4218 #ifndef _LP64
4219 offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
4220 #endif
4221 Instruction* op = append(new UnsafeGetAndSetObject(t, args->at(1), offset, args->at(3), is_add));
4222 compilation()->set_has_unsafe_access(true);
4223 kill_all();
4224 push(op->type(), op);
4225 }
4226
4227 #ifndef PRODUCT
4228 void GraphBuilder::print_stats() {
4229 vmap()->print();
4230 }
4231 #endif // PRODUCT
4232
4233 void GraphBuilder::profile_call(ciMethod* callee, Value recv, ciKlass* known_holder, Values* obj_args, bool inlined) {
4234 assert(known_holder == NULL || (known_holder->is_instance_klass() &&
4235 (!known_holder->is_interface() ||
4236 ((ciInstanceKlass*)known_holder)->has_default_methods())), "should be default method");
4237 if (known_holder != NULL) {
4238 if (known_holder->exact_klass() == NULL) {
4239 known_holder = compilation()->cha_exact_type(known_holder);
4240 }
4241 }
4242
4243 append(new ProfileCall(method(), bci(), callee, recv, known_holder, obj_args, inlined));
4244 }
|