14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "asm/macroAssembler.hpp"
27 #include "classfile/systemDictionary.hpp"
28 #include "classfile/vmSymbols.hpp"
29 #include "compiler/compileBroker.hpp"
30 #include "compiler/compileLog.hpp"
31 #include "oops/objArrayKlass.hpp"
32 #include "opto/addnode.hpp"
33 #include "opto/arraycopynode.hpp"
34 #include "opto/callGenerator.hpp"
35 #include "opto/castnode.hpp"
36 #include "opto/cfgnode.hpp"
37 #include "opto/convertnode.hpp"
38 #include "opto/countbitsnode.hpp"
39 #include "opto/intrinsicnode.hpp"
40 #include "opto/idealKit.hpp"
41 #include "opto/mathexactnode.hpp"
42 #include "opto/movenode.hpp"
43 #include "opto/mulnode.hpp"
44 #include "opto/narrowptrnode.hpp"
45 #include "opto/opaquenode.hpp"
46 #include "opto/parse.hpp"
47 #include "opto/runtime.hpp"
48 #include "opto/subnode.hpp"
49 #include "prims/nativeLookup.hpp"
50 #include "runtime/sharedRuntime.hpp"
51 #include "trace/traceMacros.hpp"
52
53 class LibraryIntrinsic : public InlineCallGenerator {
287 Node* get_state_from_sha_object(Node *sha_object);
288 Node* get_state_from_sha5_object(Node *sha_object);
289 Node* inline_digestBase_implCompressMB_predicate(int predicate);
290 bool inline_encodeISOArray();
291 bool inline_updateCRC32();
292 bool inline_updateBytesCRC32();
293 bool inline_updateByteBufferCRC32();
294 Node* get_table_from_crc32c_class(ciInstanceKlass *crc32c_class);
295 bool inline_updateBytesCRC32C();
296 bool inline_updateDirectByteBufferCRC32C();
297 bool inline_multiplyToLen();
298 bool inline_squareToLen();
299 bool inline_mulAdd();
300 bool inline_montgomeryMultiply();
301 bool inline_montgomerySquare();
302
303 bool inline_profileBoolean();
304 bool inline_isCompileConstant();
305 };
306
307
308 //---------------------------make_vm_intrinsic----------------------------
309 CallGenerator* Compile::make_vm_intrinsic(ciMethod* m, bool is_virtual) {
310 vmIntrinsics::ID id = m->intrinsic_id();
311 assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
312
313 ccstr disable_intr = NULL;
314
315 if ((DisableIntrinsic[0] != '\0'
316 && strstr(DisableIntrinsic, vmIntrinsics::name_at(id)) != NULL) ||
317 (method_has_option_value("DisableIntrinsic", disable_intr)
318 && strstr(disable_intr, vmIntrinsics::name_at(id)) != NULL)) {
319 // disabled by a user request on the command line:
320 // example: -XX:DisableIntrinsic=_hashCode,_getClass
321 return NULL;
322 }
323
324 if (!m->is_loaded()) {
325 // do not attempt to inline unloaded methods
326 return NULL;
327 }
328
329 // Only a few intrinsics implement a virtual dispatch.
330 // They are expensive calls which are also frequently overridden.
331 if (is_virtual) {
332 switch (id) {
333 case vmIntrinsics::_hashCode:
334 case vmIntrinsics::_clone:
335 // OK, Object.hashCode and Object.clone intrinsics come in both flavors
336 break;
337 default:
338 return NULL;
339 }
340 }
341
342 // -XX:-InlineNatives disables nearly all intrinsics:
343 if (!InlineNatives) {
344 switch (id) {
345 case vmIntrinsics::_indexOf:
346 case vmIntrinsics::_compareTo:
347 case vmIntrinsics::_equals:
348 case vmIntrinsics::_equalsC:
349 case vmIntrinsics::_getAndAddInt:
350 case vmIntrinsics::_getAndAddLong:
351 case vmIntrinsics::_getAndSetInt:
352 case vmIntrinsics::_getAndSetLong:
353 case vmIntrinsics::_getAndSetObject:
354 case vmIntrinsics::_loadFence:
355 case vmIntrinsics::_storeFence:
356 case vmIntrinsics::_fullFence:
357 break; // InlineNatives does not control String.compareTo
358 case vmIntrinsics::_Reference_get:
359 break; // InlineNatives does not control Reference.get
360 default:
361 return NULL;
362 }
363 }
364
365 int predicates = 0;
366 bool does_virtual_dispatch = false;
367
368 switch (id) {
369 case vmIntrinsics::_compareTo:
370 if (!SpecialStringCompareTo) return NULL;
371 if (!Matcher::match_rule_supported(Op_StrComp)) return NULL;
372 break;
373 case vmIntrinsics::_indexOf:
374 if (!SpecialStringIndexOf) return NULL;
375 break;
376 case vmIntrinsics::_equals:
377 if (!SpecialStringEquals) return NULL;
378 if (!Matcher::match_rule_supported(Op_StrEquals)) return NULL;
379 break;
380 case vmIntrinsics::_equalsC:
381 if (!SpecialArraysEquals) return NULL;
382 if (!Matcher::match_rule_supported(Op_AryEq)) return NULL;
383 break;
384 case vmIntrinsics::_arraycopy:
385 if (!InlineArrayCopy) return NULL;
386 break;
387 case vmIntrinsics::_copyMemory:
388 if (StubRoutines::unsafe_arraycopy() == NULL) return NULL;
389 if (!InlineArrayCopy) return NULL;
390 break;
391 case vmIntrinsics::_hashCode:
392 if (!InlineObjectHash) return NULL;
393 does_virtual_dispatch = true;
394 break;
395 case vmIntrinsics::_clone:
396 does_virtual_dispatch = true;
397 case vmIntrinsics::_copyOf:
398 case vmIntrinsics::_copyOfRange:
399 if (!InlineObjectCopy) return NULL;
400 // These also use the arraycopy intrinsic mechanism:
401 if (!InlineArrayCopy) return NULL;
402 break;
403 case vmIntrinsics::_encodeISOArray:
404 if (!SpecialEncodeISOArray) return NULL;
405 if (!Matcher::match_rule_supported(Op_EncodeISOArray)) return NULL;
406 break;
407 case vmIntrinsics::_checkIndex:
408 // We do not intrinsify this. The optimizer does fine with it.
409 return NULL;
410
411 case vmIntrinsics::_getCallerClass:
412 if (!InlineReflectionGetCallerClass) return NULL;
413 if (SystemDictionary::reflect_CallerSensitive_klass() == NULL) return NULL;
414 break;
415
416 case vmIntrinsics::_bitCount_i:
417 if (!Matcher::match_rule_supported(Op_PopCountI)) return NULL;
418 break;
419
420 case vmIntrinsics::_bitCount_l:
421 if (!Matcher::match_rule_supported(Op_PopCountL)) return NULL;
422 break;
423
424 case vmIntrinsics::_numberOfLeadingZeros_i:
425 if (!Matcher::match_rule_supported(Op_CountLeadingZerosI)) return NULL;
426 break;
427
428 case vmIntrinsics::_numberOfLeadingZeros_l:
429 if (!Matcher::match_rule_supported(Op_CountLeadingZerosL)) return NULL;
430 break;
431
432 case vmIntrinsics::_numberOfTrailingZeros_i:
433 if (!Matcher::match_rule_supported(Op_CountTrailingZerosI)) return NULL;
434 break;
435
436 case vmIntrinsics::_numberOfTrailingZeros_l:
437 if (!Matcher::match_rule_supported(Op_CountTrailingZerosL)) return NULL;
438 break;
439
440 case vmIntrinsics::_reverseBytes_c:
441 if (!Matcher::match_rule_supported(Op_ReverseBytesUS)) return NULL;
442 break;
443 case vmIntrinsics::_reverseBytes_s:
444 if (!Matcher::match_rule_supported(Op_ReverseBytesS)) return NULL;
445 break;
446 case vmIntrinsics::_reverseBytes_i:
447 if (!Matcher::match_rule_supported(Op_ReverseBytesI)) return NULL;
448 break;
449 case vmIntrinsics::_reverseBytes_l:
450 if (!Matcher::match_rule_supported(Op_ReverseBytesL)) return NULL;
451 break;
452
453 case vmIntrinsics::_Reference_get:
454 // Use the intrinsic version of Reference.get() so that the value in
455 // the referent field can be registered by the G1 pre-barrier code.
456 // Also add memory barrier to prevent commoning reads from this field
457 // across safepoint since GC can change it value.
458 break;
459
460 case vmIntrinsics::_compareAndSwapObject:
461 #ifdef _LP64
462 if (!UseCompressedOops && !Matcher::match_rule_supported(Op_CompareAndSwapP)) return NULL;
463 #endif
464 break;
465
466 case vmIntrinsics::_compareAndSwapLong:
467 if (!Matcher::match_rule_supported(Op_CompareAndSwapL)) return NULL;
468 break;
469
470 case vmIntrinsics::_getAndAddInt:
471 if (!Matcher::match_rule_supported(Op_GetAndAddI)) return NULL;
472 break;
473
474 case vmIntrinsics::_getAndAddLong:
475 if (!Matcher::match_rule_supported(Op_GetAndAddL)) return NULL;
476 break;
477
478 case vmIntrinsics::_getAndSetInt:
479 if (!Matcher::match_rule_supported(Op_GetAndSetI)) return NULL;
480 break;
481
482 case vmIntrinsics::_getAndSetLong:
483 if (!Matcher::match_rule_supported(Op_GetAndSetL)) return NULL;
484 break;
485
486 case vmIntrinsics::_getAndSetObject:
487 #ifdef _LP64
488 if (!UseCompressedOops && !Matcher::match_rule_supported(Op_GetAndSetP)) return NULL;
489 if (UseCompressedOops && !Matcher::match_rule_supported(Op_GetAndSetN)) return NULL;
490 break;
491 #else
492 if (!Matcher::match_rule_supported(Op_GetAndSetP)) return NULL;
493 break;
494 #endif
495
496 case vmIntrinsics::_aescrypt_encryptBlock:
497 case vmIntrinsics::_aescrypt_decryptBlock:
498 if (!UseAESIntrinsics) return NULL;
499 break;
500
501 case vmIntrinsics::_multiplyToLen:
502 if (!UseMultiplyToLenIntrinsic) return NULL;
503 break;
504
505 case vmIntrinsics::_squareToLen:
506 if (!UseSquareToLenIntrinsic) return NULL;
507 break;
508
509 case vmIntrinsics::_mulAdd:
510 if (!UseMulAddIntrinsic) return NULL;
511 break;
512
513 case vmIntrinsics::_montgomeryMultiply:
514 if (!UseMontgomeryMultiplyIntrinsic) return NULL;
515 break;
516 case vmIntrinsics::_montgomerySquare:
517 if (!UseMontgomerySquareIntrinsic) return NULL;
518 break;
519
520 case vmIntrinsics::_cipherBlockChaining_encryptAESCrypt:
521 case vmIntrinsics::_cipherBlockChaining_decryptAESCrypt:
522 if (!UseAESIntrinsics) return NULL;
523 // these two require the predicated logic
524 predicates = 1;
525 break;
526
527 case vmIntrinsics::_sha_implCompress:
528 if (!UseSHA1Intrinsics) return NULL;
529 break;
530
531 case vmIntrinsics::_sha2_implCompress:
532 if (!UseSHA256Intrinsics) return NULL;
533 break;
534
535 case vmIntrinsics::_sha5_implCompress:
536 if (!UseSHA512Intrinsics) return NULL;
537 break;
538
539 case vmIntrinsics::_digestBase_implCompressMB:
540 if (!(UseSHA1Intrinsics || UseSHA256Intrinsics || UseSHA512Intrinsics)) return NULL;
541 predicates = 3;
542 break;
543
544 case vmIntrinsics::_ghash_processBlocks:
545 if (!UseGHASHIntrinsics) return NULL;
546 break;
547
548 case vmIntrinsics::_updateCRC32:
549 case vmIntrinsics::_updateBytesCRC32:
550 case vmIntrinsics::_updateByteBufferCRC32:
551 if (!UseCRC32Intrinsics) return NULL;
552 break;
553
554 case vmIntrinsics::_updateBytesCRC32C:
555 case vmIntrinsics::_updateDirectByteBufferCRC32C:
556 if (!UseCRC32CIntrinsics) return NULL;
557 break;
558
559 case vmIntrinsics::_incrementExactI:
560 case vmIntrinsics::_addExactI:
561 if (!Matcher::match_rule_supported(Op_OverflowAddI) || !UseMathExactIntrinsics) return NULL;
562 break;
563 case vmIntrinsics::_incrementExactL:
564 case vmIntrinsics::_addExactL:
565 if (!Matcher::match_rule_supported(Op_OverflowAddL) || !UseMathExactIntrinsics) return NULL;
566 break;
567 case vmIntrinsics::_decrementExactI:
568 case vmIntrinsics::_subtractExactI:
569 if (!Matcher::match_rule_supported(Op_OverflowSubI) || !UseMathExactIntrinsics) return NULL;
570 break;
571 case vmIntrinsics::_decrementExactL:
572 case vmIntrinsics::_subtractExactL:
573 if (!Matcher::match_rule_supported(Op_OverflowSubL) || !UseMathExactIntrinsics) return NULL;
574 break;
575 case vmIntrinsics::_negateExactI:
576 if (!Matcher::match_rule_supported(Op_OverflowSubI) || !UseMathExactIntrinsics) return NULL;
577 break;
578 case vmIntrinsics::_negateExactL:
579 if (!Matcher::match_rule_supported(Op_OverflowSubL) || !UseMathExactIntrinsics) return NULL;
580 break;
581 case vmIntrinsics::_multiplyExactI:
582 if (!Matcher::match_rule_supported(Op_OverflowMulI) || !UseMathExactIntrinsics) return NULL;
583 break;
584 case vmIntrinsics::_multiplyExactL:
585 if (!Matcher::match_rule_supported(Op_OverflowMulL) || !UseMathExactIntrinsics) return NULL;
586 break;
587
588 case vmIntrinsics::_getShortUnaligned:
589 case vmIntrinsics::_getCharUnaligned:
590 case vmIntrinsics::_getIntUnaligned:
591 case vmIntrinsics::_getLongUnaligned:
592 case vmIntrinsics::_putShortUnaligned:
593 case vmIntrinsics::_putCharUnaligned:
594 case vmIntrinsics::_putIntUnaligned:
595 case vmIntrinsics::_putLongUnaligned:
596 if (!UseUnalignedAccesses) return NULL;
597 break;
598
599 default:
600 assert(id <= vmIntrinsics::LAST_COMPILER_INLINE, "caller responsibility");
601 assert(id != vmIntrinsics::_Object_init && id != vmIntrinsics::_invoke, "enum out of order?");
602 break;
603 }
604
605 // -XX:-InlineClassNatives disables natives from the Class class.
606 // The flag applies to all reflective calls, notably Array.newArray
607 // (visible to Java programmers as Array.newInstance).
608 if (m->holder()->name() == ciSymbol::java_lang_Class() ||
609 m->holder()->name() == ciSymbol::java_lang_reflect_Array()) {
610 if (!InlineClassNatives) return NULL;
611 }
612
613 // -XX:-InlineThreadNatives disables natives from the Thread class.
614 if (m->holder()->name() == ciSymbol::java_lang_Thread()) {
615 if (!InlineThreadNatives) return NULL;
616 }
617
618 // -XX:-InlineMathNatives disables natives from the Math,Float and Double classes.
619 if (m->holder()->name() == ciSymbol::java_lang_Math() ||
620 m->holder()->name() == ciSymbol::java_lang_Float() ||
621 m->holder()->name() == ciSymbol::java_lang_Double()) {
622 if (!InlineMathNatives) return NULL;
623 }
624
625 // -XX:-InlineUnsafeOps disables natives from the Unsafe class.
626 if (m->holder()->name() == ciSymbol::sun_misc_Unsafe()) {
627 if (!InlineUnsafeOps) return NULL;
628 }
629
630 return new LibraryIntrinsic(m, is_virtual, predicates, does_virtual_dispatch, (vmIntrinsics::ID) id);
631 }
632
633 //----------------------register_library_intrinsics-----------------------
634 // Initialize this file's data structures, for each Compile instance.
635 void Compile::register_library_intrinsics() {
636 // Nothing to do here.
637 }
638
639 JVMState* LibraryIntrinsic::generate(JVMState* jvms) {
640 LibraryCallKit kit(jvms, this);
641 Compile* C = kit.C;
642 int nodes = C->unique();
643 #ifndef PRODUCT
644 if ((C->print_intrinsics() || C->print_inlining()) && Verbose) {
645 char buf[1000];
646 const char* str = vmIntrinsics::short_name_as_C_string(intrinsic_id(), buf, sizeof(buf));
647 tty->print_cr("Intrinsic %s", str);
648 }
649 #endif
650 ciMethod* callee = kit.callee();
787 case vmIntrinsics::_negateExactI: return inline_math_negateExactI();
788 case vmIntrinsics::_negateExactL: return inline_math_negateExactL();
789 case vmIntrinsics::_subtractExactI: return inline_math_subtractExactI(false /* subtract */);
790 case vmIntrinsics::_subtractExactL: return inline_math_subtractExactL(false /* subtract */);
791
792 case vmIntrinsics::_arraycopy: return inline_arraycopy();
793
794 case vmIntrinsics::_compareTo: return inline_string_compareTo();
795 case vmIntrinsics::_indexOf: return inline_string_indexOf();
796 case vmIntrinsics::_equals: return inline_string_equals();
797
798 case vmIntrinsics::_getObject: return inline_unsafe_access(!is_native_ptr, !is_store, T_OBJECT, !is_volatile);
799 case vmIntrinsics::_getBoolean: return inline_unsafe_access(!is_native_ptr, !is_store, T_BOOLEAN, !is_volatile);
800 case vmIntrinsics::_getByte: return inline_unsafe_access(!is_native_ptr, !is_store, T_BYTE, !is_volatile);
801 case vmIntrinsics::_getShort: return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT, !is_volatile);
802 case vmIntrinsics::_getChar: return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR, !is_volatile);
803 case vmIntrinsics::_getInt: return inline_unsafe_access(!is_native_ptr, !is_store, T_INT, !is_volatile);
804 case vmIntrinsics::_getLong: return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG, !is_volatile);
805 case vmIntrinsics::_getFloat: return inline_unsafe_access(!is_native_ptr, !is_store, T_FLOAT, !is_volatile);
806 case vmIntrinsics::_getDouble: return inline_unsafe_access(!is_native_ptr, !is_store, T_DOUBLE, !is_volatile);
807
808 case vmIntrinsics::_putObject: return inline_unsafe_access(!is_native_ptr, is_store, T_OBJECT, !is_volatile);
809 case vmIntrinsics::_putBoolean: return inline_unsafe_access(!is_native_ptr, is_store, T_BOOLEAN, !is_volatile);
810 case vmIntrinsics::_putByte: return inline_unsafe_access(!is_native_ptr, is_store, T_BYTE, !is_volatile);
811 case vmIntrinsics::_putShort: return inline_unsafe_access(!is_native_ptr, is_store, T_SHORT, !is_volatile);
812 case vmIntrinsics::_putChar: return inline_unsafe_access(!is_native_ptr, is_store, T_CHAR, !is_volatile);
813 case vmIntrinsics::_putInt: return inline_unsafe_access(!is_native_ptr, is_store, T_INT, !is_volatile);
814 case vmIntrinsics::_putLong: return inline_unsafe_access(!is_native_ptr, is_store, T_LONG, !is_volatile);
815 case vmIntrinsics::_putFloat: return inline_unsafe_access(!is_native_ptr, is_store, T_FLOAT, !is_volatile);
816 case vmIntrinsics::_putDouble: return inline_unsafe_access(!is_native_ptr, is_store, T_DOUBLE, !is_volatile);
817
818 case vmIntrinsics::_getByte_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_BYTE, !is_volatile);
819 case vmIntrinsics::_getShort_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_SHORT, !is_volatile);
820 case vmIntrinsics::_getChar_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_CHAR, !is_volatile);
821 case vmIntrinsics::_getInt_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_INT, !is_volatile);
822 case vmIntrinsics::_getLong_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_LONG, !is_volatile);
823 case vmIntrinsics::_getFloat_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_FLOAT, !is_volatile);
824 case vmIntrinsics::_getDouble_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_DOUBLE, !is_volatile);
825 case vmIntrinsics::_getAddress_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_ADDRESS, !is_volatile);
826
827 case vmIntrinsics::_putByte_raw: return inline_unsafe_access( is_native_ptr, is_store, T_BYTE, !is_volatile);
|
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "asm/macroAssembler.hpp"
27 #include "classfile/systemDictionary.hpp"
28 #include "classfile/vmSymbols.hpp"
29 #include "compiler/compileBroker.hpp"
30 #include "compiler/compileLog.hpp"
31 #include "oops/objArrayKlass.hpp"
32 #include "opto/addnode.hpp"
33 #include "opto/arraycopynode.hpp"
34 #include "opto/c2compiler.hpp"
35 #include "opto/callGenerator.hpp"
36 #include "opto/castnode.hpp"
37 #include "opto/cfgnode.hpp"
38 #include "opto/convertnode.hpp"
39 #include "opto/countbitsnode.hpp"
40 #include "opto/intrinsicnode.hpp"
41 #include "opto/idealKit.hpp"
42 #include "opto/mathexactnode.hpp"
43 #include "opto/movenode.hpp"
44 #include "opto/mulnode.hpp"
45 #include "opto/narrowptrnode.hpp"
46 #include "opto/opaquenode.hpp"
47 #include "opto/parse.hpp"
48 #include "opto/runtime.hpp"
49 #include "opto/subnode.hpp"
50 #include "prims/nativeLookup.hpp"
51 #include "runtime/sharedRuntime.hpp"
52 #include "trace/traceMacros.hpp"
53
54 class LibraryIntrinsic : public InlineCallGenerator {
288 Node* get_state_from_sha_object(Node *sha_object);
289 Node* get_state_from_sha5_object(Node *sha_object);
290 Node* inline_digestBase_implCompressMB_predicate(int predicate);
291 bool inline_encodeISOArray();
292 bool inline_updateCRC32();
293 bool inline_updateBytesCRC32();
294 bool inline_updateByteBufferCRC32();
295 Node* get_table_from_crc32c_class(ciInstanceKlass *crc32c_class);
296 bool inline_updateBytesCRC32C();
297 bool inline_updateDirectByteBufferCRC32C();
298 bool inline_multiplyToLen();
299 bool inline_squareToLen();
300 bool inline_mulAdd();
301 bool inline_montgomeryMultiply();
302 bool inline_montgomerySquare();
303
304 bool inline_profileBoolean();
305 bool inline_isCompileConstant();
306 };
307
308 //---------------------------make_vm_intrinsic----------------------------
309 CallGenerator* Compile::make_vm_intrinsic(ciMethod* m, bool is_virtual) {
310 vmIntrinsics::ID id = m->intrinsic_id();
311 assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
312
313 if (!m->is_loaded()) {
314 // Do not attempt to inline unloaded methods.
315 return NULL;
316 }
317
318 C2Compiler* compiler = (C2Compiler*)CompileBroker::compiler(CompLevel_full_optimization);
319 bool is_available = !vmIntrinsics::is_disabled_by_flags(m->intrinsic_id());
320 if (!is_available) {
321 return NULL;
322 }
323
324 {
325 // For calling is_intrinsic_supported and is_intrinsic_disabled_by_flag
326 // we need to transition to the '_thread_in_vm' state because both
327 // methods access VM-internal data.
328 VM_ENTRY_MARK;
329 methodHandle mh(THREAD, m->get_Method());
330 methodHandle ct(THREAD, method()->get_Method());
331 is_available = is_available &&
332 compiler->is_intrinsic_supported(mh, is_virtual) &&
333 !compiler->is_intrinsic_disabled_by_flag(mh, ct);
334 }
335
336 if (is_available) {
337 assert(id <= vmIntrinsics::LAST_COMPILER_INLINE, "caller responsibility");
338 assert(id != vmIntrinsics::_Object_init && id != vmIntrinsics::_invoke, "enum out of order?");
339 return new LibraryIntrinsic(m, is_virtual,
340 vmIntrinsics::predicates_needed(id),
341 vmIntrinsics::does_virtual_dispatch(id),
342 (vmIntrinsics::ID) id);
343 } else {
344 return NULL;
345 }
346 }
347
348 //----------------------register_library_intrinsics-----------------------
349 // Initialize this file's data structures, for each Compile instance.
350 void Compile::register_library_intrinsics() {
351 // Nothing to do here.
352 }
353
354 JVMState* LibraryIntrinsic::generate(JVMState* jvms) {
355 LibraryCallKit kit(jvms, this);
356 Compile* C = kit.C;
357 int nodes = C->unique();
358 #ifndef PRODUCT
359 if ((C->print_intrinsics() || C->print_inlining()) && Verbose) {
360 char buf[1000];
361 const char* str = vmIntrinsics::short_name_as_C_string(intrinsic_id(), buf, sizeof(buf));
362 tty->print_cr("Intrinsic %s", str);
363 }
364 #endif
365 ciMethod* callee = kit.callee();
502 case vmIntrinsics::_negateExactI: return inline_math_negateExactI();
503 case vmIntrinsics::_negateExactL: return inline_math_negateExactL();
504 case vmIntrinsics::_subtractExactI: return inline_math_subtractExactI(false /* subtract */);
505 case vmIntrinsics::_subtractExactL: return inline_math_subtractExactL(false /* subtract */);
506
507 case vmIntrinsics::_arraycopy: return inline_arraycopy();
508
509 case vmIntrinsics::_compareTo: return inline_string_compareTo();
510 case vmIntrinsics::_indexOf: return inline_string_indexOf();
511 case vmIntrinsics::_equals: return inline_string_equals();
512
513 case vmIntrinsics::_getObject: return inline_unsafe_access(!is_native_ptr, !is_store, T_OBJECT, !is_volatile);
514 case vmIntrinsics::_getBoolean: return inline_unsafe_access(!is_native_ptr, !is_store, T_BOOLEAN, !is_volatile);
515 case vmIntrinsics::_getByte: return inline_unsafe_access(!is_native_ptr, !is_store, T_BYTE, !is_volatile);
516 case vmIntrinsics::_getShort: return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT, !is_volatile);
517 case vmIntrinsics::_getChar: return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR, !is_volatile);
518 case vmIntrinsics::_getInt: return inline_unsafe_access(!is_native_ptr, !is_store, T_INT, !is_volatile);
519 case vmIntrinsics::_getLong: return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG, !is_volatile);
520 case vmIntrinsics::_getFloat: return inline_unsafe_access(!is_native_ptr, !is_store, T_FLOAT, !is_volatile);
521 case vmIntrinsics::_getDouble: return inline_unsafe_access(!is_native_ptr, !is_store, T_DOUBLE, !is_volatile);
522 case vmIntrinsics::_putObject: return inline_unsafe_access(!is_native_ptr, is_store, T_OBJECT, !is_volatile);
523 case vmIntrinsics::_putBoolean: return inline_unsafe_access(!is_native_ptr, is_store, T_BOOLEAN, !is_volatile);
524 case vmIntrinsics::_putByte: return inline_unsafe_access(!is_native_ptr, is_store, T_BYTE, !is_volatile);
525 case vmIntrinsics::_putShort: return inline_unsafe_access(!is_native_ptr, is_store, T_SHORT, !is_volatile);
526 case vmIntrinsics::_putChar: return inline_unsafe_access(!is_native_ptr, is_store, T_CHAR, !is_volatile);
527 case vmIntrinsics::_putInt: return inline_unsafe_access(!is_native_ptr, is_store, T_INT, !is_volatile);
528 case vmIntrinsics::_putLong: return inline_unsafe_access(!is_native_ptr, is_store, T_LONG, !is_volatile);
529 case vmIntrinsics::_putFloat: return inline_unsafe_access(!is_native_ptr, is_store, T_FLOAT, !is_volatile);
530 case vmIntrinsics::_putDouble: return inline_unsafe_access(!is_native_ptr, is_store, T_DOUBLE, !is_volatile);
531
532 case vmIntrinsics::_getByte_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_BYTE, !is_volatile);
533 case vmIntrinsics::_getShort_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_SHORT, !is_volatile);
534 case vmIntrinsics::_getChar_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_CHAR, !is_volatile);
535 case vmIntrinsics::_getInt_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_INT, !is_volatile);
536 case vmIntrinsics::_getLong_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_LONG, !is_volatile);
537 case vmIntrinsics::_getFloat_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_FLOAT, !is_volatile);
538 case vmIntrinsics::_getDouble_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_DOUBLE, !is_volatile);
539 case vmIntrinsics::_getAddress_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_ADDRESS, !is_volatile);
540
541 case vmIntrinsics::_putByte_raw: return inline_unsafe_access( is_native_ptr, is_store, T_BYTE, !is_volatile);
|