76 {
77 }
78 virtual bool is_intrinsic() const { return true; }
79 virtual bool is_virtual() const { return _is_virtual; }
80 virtual bool is_predicated() const { return _predicates_count > 0; }
81 virtual int predicates_count() const { return _predicates_count; }
82 virtual bool does_virtual_dispatch() const { return _does_virtual_dispatch; }
83 virtual JVMState* generate(JVMState* jvms);
84 virtual Node* generate_predicate(JVMState* jvms, int predicate);
85 vmIntrinsics::ID intrinsic_id() const { return _intrinsic_id; }
86 };
87
88
89 // Local helper class for LibraryIntrinsic:
90 class LibraryCallKit : public GraphKit {
91 private:
92 LibraryIntrinsic* _intrinsic; // the library intrinsic being called
93 Node* _result; // the result node, if any
94 int _reexecute_sp; // the stack pointer when bytecode needs to be reexecuted
95
96 const TypeOopPtr* sharpen_unsafe_type(Compile::AliasType* alias_type, const TypePtr *adr_type, bool is_native_ptr = false);
97
98 public:
99 LibraryCallKit(JVMState* jvms, LibraryIntrinsic* intrinsic)
100 : GraphKit(jvms),
101 _intrinsic(intrinsic),
102 _result(NULL)
103 {
104 // Check if this is a root compile. In that case we don't have a caller.
105 if (!jvms->has_method()) {
106 _reexecute_sp = sp();
107 } else {
108 // Find out how many arguments the interpreter needs when deoptimizing
109 // and save the stack pointer value so it can used by uncommon_trap.
110 // We find the argument count by looking at the declared signature.
111 bool ignored_will_link;
112 ciSignature* declared_signature = NULL;
113 ciMethod* ignored_callee = caller()->get_method_at_bci(bci(), ignored_will_link, &declared_signature);
114 const int nargs = declared_signature->arg_size_for_bc(caller()->java_code_at_bci(bci()));
115 _reexecute_sp = sp() + nargs; // "push" arguments back on stack
116 }
230 bool inline_math_addExactI(bool is_increment);
231 bool inline_math_addExactL(bool is_increment);
232 bool inline_math_multiplyExactI();
233 bool inline_math_multiplyExactL();
234 bool inline_math_negateExactI();
235 bool inline_math_negateExactL();
236 bool inline_math_subtractExactI(bool is_decrement);
237 bool inline_math_subtractExactL(bool is_decrement);
238 bool inline_min_max(vmIntrinsics::ID id);
239 bool inline_notify(vmIntrinsics::ID id);
240 Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y);
241 // This returns Type::AnyPtr, RawPtr, or OopPtr.
242 int classify_unsafe_addr(Node* &base, Node* &offset);
243 Node* make_unsafe_address(Node* base, Node* offset);
244 // Helper for inline_unsafe_access.
245 // Generates the guards that check whether the result of
246 // Unsafe.getObject should be recorded in an SATB log buffer.
247 void insert_pre_barrier(Node* base_oop, Node* offset, Node* pre_val, bool need_mem_bar);
248
249 typedef enum { Relaxed, Opaque, Volatile, Acquire, Release } AccessKind;
250 bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, AccessKind kind, bool is_unaligned);
251 static bool klass_needs_init_guard(Node* kls);
252 bool inline_unsafe_allocate();
253 bool inline_unsafe_newArray(bool uninitialized);
254 bool inline_unsafe_copyMemory();
255 bool inline_native_currentThread();
256
257 bool inline_native_time_funcs(address method, const char* funcName);
258 bool inline_native_isInterrupted();
259 bool inline_native_Class_query(vmIntrinsics::ID id);
260 bool inline_native_subtype_check();
261 bool inline_native_getLength();
262 bool inline_array_copyOf(bool is_copyOfRange);
263 bool inline_array_equals(StrIntrinsicNode::ArgEnc ae);
264 bool inline_objects_checkIndex();
265 void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array, bool card_mark);
266 bool inline_native_clone(bool is_virtual);
267 bool inline_native_Reflection_getCallerClass();
268 // Helper function for inlining native object hash method
269 bool inline_native_hashcode(bool is_virtual, bool is_static);
270 bool inline_native_getClass();
458 if (C->print_intrinsics() || C->print_inlining()) {
459 if (jvms->has_method()) {
460 // Not a root compile.
461 const char* msg = "failed to generate predicate for intrinsic";
462 C->print_inlining(kit.callee(), jvms->depth() - 1, bci, msg);
463 } else {
464 // Root compile
465 C->print_inlining_stream()->print("Did not generate predicate for intrinsic %s%s at bci:%d in",
466 vmIntrinsics::name_at(intrinsic_id()),
467 (is_virtual() ? " (virtual)" : ""), bci);
468 }
469 }
470 C->gather_intrinsic_statistics(intrinsic_id(), is_virtual(), Compile::_intrinsic_failed);
471 return NULL;
472 }
473
474 bool LibraryCallKit::try_to_inline(int predicate) {
475 // Handle symbolic names for otherwise undistinguished boolean switches:
476 const bool is_store = true;
477 const bool is_compress = true;
478 const bool is_native_ptr = true;
479 const bool is_static = true;
480 const bool is_volatile = true;
481
482 if (!jvms()->has_method()) {
483 // Root JVMState has a null method.
484 assert(map()->memory()->Opcode() == Op_Parm, "");
485 // Insert the memory aliasing node
486 set_all_memory(reset_memory());
487 }
488 assert(merged_memory(), "");
489
490
491 switch (intrinsic_id()) {
492 case vmIntrinsics::_hashCode: return inline_native_hashcode(intrinsic()->is_virtual(), !is_static);
493 case vmIntrinsics::_identityHashCode: return inline_native_hashcode(/*!virtual*/ false, is_static);
494 case vmIntrinsics::_getClass: return inline_native_getClass();
495
496 case vmIntrinsics::_dsin:
497 case vmIntrinsics::_dcos:
498 case vmIntrinsics::_dtan:
538 case vmIntrinsics::_indexOfU: return inline_string_indexOf(StrIntrinsicNode::UU);
539 case vmIntrinsics::_indexOfUL: return inline_string_indexOf(StrIntrinsicNode::UL);
540 case vmIntrinsics::_indexOfIL: return inline_string_indexOfI(StrIntrinsicNode::LL);
541 case vmIntrinsics::_indexOfIU: return inline_string_indexOfI(StrIntrinsicNode::UU);
542 case vmIntrinsics::_indexOfIUL: return inline_string_indexOfI(StrIntrinsicNode::UL);
543 case vmIntrinsics::_indexOfU_char: return inline_string_indexOfChar();
544
545 case vmIntrinsics::_equalsL: return inline_string_equals(StrIntrinsicNode::LL);
546 case vmIntrinsics::_equalsU: return inline_string_equals(StrIntrinsicNode::UU);
547
548 case vmIntrinsics::_toBytesStringU: return inline_string_toBytesU();
549 case vmIntrinsics::_getCharsStringU: return inline_string_getCharsU();
550 case vmIntrinsics::_getCharStringU: return inline_string_char_access(!is_store);
551 case vmIntrinsics::_putCharStringU: return inline_string_char_access( is_store);
552
553 case vmIntrinsics::_compressStringC:
554 case vmIntrinsics::_compressStringB: return inline_string_copy( is_compress);
555 case vmIntrinsics::_inflateStringC:
556 case vmIntrinsics::_inflateStringB: return inline_string_copy(!is_compress);
557
558 case vmIntrinsics::_getObject: return inline_unsafe_access(!is_native_ptr, !is_store, T_OBJECT, Relaxed, false);
559 case vmIntrinsics::_getBoolean: return inline_unsafe_access(!is_native_ptr, !is_store, T_BOOLEAN, Relaxed, false);
560 case vmIntrinsics::_getByte: return inline_unsafe_access(!is_native_ptr, !is_store, T_BYTE, Relaxed, false);
561 case vmIntrinsics::_getShort: return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT, Relaxed, false);
562 case vmIntrinsics::_getChar: return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR, Relaxed, false);
563 case vmIntrinsics::_getInt: return inline_unsafe_access(!is_native_ptr, !is_store, T_INT, Relaxed, false);
564 case vmIntrinsics::_getLong: return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG, Relaxed, false);
565 case vmIntrinsics::_getFloat: return inline_unsafe_access(!is_native_ptr, !is_store, T_FLOAT, Relaxed, false);
566 case vmIntrinsics::_getDouble: return inline_unsafe_access(!is_native_ptr, !is_store, T_DOUBLE, Relaxed, false);
567
568 case vmIntrinsics::_putObject: return inline_unsafe_access(!is_native_ptr, is_store, T_OBJECT, Relaxed, false);
569 case vmIntrinsics::_putBoolean: return inline_unsafe_access(!is_native_ptr, is_store, T_BOOLEAN, Relaxed, false);
570 case vmIntrinsics::_putByte: return inline_unsafe_access(!is_native_ptr, is_store, T_BYTE, Relaxed, false);
571 case vmIntrinsics::_putShort: return inline_unsafe_access(!is_native_ptr, is_store, T_SHORT, Relaxed, false);
572 case vmIntrinsics::_putChar: return inline_unsafe_access(!is_native_ptr, is_store, T_CHAR, Relaxed, false);
573 case vmIntrinsics::_putInt: return inline_unsafe_access(!is_native_ptr, is_store, T_INT, Relaxed, false);
574 case vmIntrinsics::_putLong: return inline_unsafe_access(!is_native_ptr, is_store, T_LONG, Relaxed, false);
575 case vmIntrinsics::_putFloat: return inline_unsafe_access(!is_native_ptr, is_store, T_FLOAT, Relaxed, false);
576 case vmIntrinsics::_putDouble: return inline_unsafe_access(!is_native_ptr, is_store, T_DOUBLE, Relaxed, false);
577
578 case vmIntrinsics::_getByte_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_BYTE, Relaxed, false);
579 case vmIntrinsics::_getShort_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_SHORT, Relaxed, false);
580 case vmIntrinsics::_getChar_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_CHAR, Relaxed, false);
581 case vmIntrinsics::_getInt_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_INT, Relaxed, false);
582 case vmIntrinsics::_getLong_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_LONG, Relaxed, false);
583 case vmIntrinsics::_getFloat_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_FLOAT, Relaxed, false);
584 case vmIntrinsics::_getDouble_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_DOUBLE, Relaxed, false);
585 case vmIntrinsics::_getAddress_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_ADDRESS, Relaxed, false);
586
587 case vmIntrinsics::_putByte_raw: return inline_unsafe_access( is_native_ptr, is_store, T_BYTE, Relaxed, false);
588 case vmIntrinsics::_putShort_raw: return inline_unsafe_access( is_native_ptr, is_store, T_SHORT, Relaxed, false);
589 case vmIntrinsics::_putChar_raw: return inline_unsafe_access( is_native_ptr, is_store, T_CHAR, Relaxed, false);
590 case vmIntrinsics::_putInt_raw: return inline_unsafe_access( is_native_ptr, is_store, T_INT, Relaxed, false);
591 case vmIntrinsics::_putLong_raw: return inline_unsafe_access( is_native_ptr, is_store, T_LONG, Relaxed, false);
592 case vmIntrinsics::_putFloat_raw: return inline_unsafe_access( is_native_ptr, is_store, T_FLOAT, Relaxed, false);
593 case vmIntrinsics::_putDouble_raw: return inline_unsafe_access( is_native_ptr, is_store, T_DOUBLE, Relaxed, false);
594 case vmIntrinsics::_putAddress_raw: return inline_unsafe_access( is_native_ptr, is_store, T_ADDRESS, Relaxed, false);
595
596 case vmIntrinsics::_getObjectVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_OBJECT, Volatile, false);
597 case vmIntrinsics::_getBooleanVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_BOOLEAN, Volatile, false);
598 case vmIntrinsics::_getByteVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_BYTE, Volatile, false);
599 case vmIntrinsics::_getShortVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT, Volatile, false);
600 case vmIntrinsics::_getCharVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR, Volatile, false);
601 case vmIntrinsics::_getIntVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_INT, Volatile, false);
602 case vmIntrinsics::_getLongVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG, Volatile, false);
603 case vmIntrinsics::_getFloatVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_FLOAT, Volatile, false);
604 case vmIntrinsics::_getDoubleVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_DOUBLE, Volatile, false);
605
606 case vmIntrinsics::_putObjectVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_OBJECT, Volatile, false);
607 case vmIntrinsics::_putBooleanVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_BOOLEAN, Volatile, false);
608 case vmIntrinsics::_putByteVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_BYTE, Volatile, false);
609 case vmIntrinsics::_putShortVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_SHORT, Volatile, false);
610 case vmIntrinsics::_putCharVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_CHAR, Volatile, false);
611 case vmIntrinsics::_putIntVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_INT, Volatile, false);
612 case vmIntrinsics::_putLongVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_LONG, Volatile, false);
613 case vmIntrinsics::_putFloatVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_FLOAT, Volatile, false);
614 case vmIntrinsics::_putDoubleVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_DOUBLE, Volatile, false);
615
616 case vmIntrinsics::_getShortUnaligned: return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT, Relaxed, true);
617 case vmIntrinsics::_getCharUnaligned: return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR, Relaxed, true);
618 case vmIntrinsics::_getIntUnaligned: return inline_unsafe_access(!is_native_ptr, !is_store, T_INT, Relaxed, true);
619 case vmIntrinsics::_getLongUnaligned: return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG, Relaxed, true);
620
621 case vmIntrinsics::_putShortUnaligned: return inline_unsafe_access(!is_native_ptr, is_store, T_SHORT, Relaxed, true);
622 case vmIntrinsics::_putCharUnaligned: return inline_unsafe_access(!is_native_ptr, is_store, T_CHAR, Relaxed, true);
623 case vmIntrinsics::_putIntUnaligned: return inline_unsafe_access(!is_native_ptr, is_store, T_INT, Relaxed, true);
624 case vmIntrinsics::_putLongUnaligned: return inline_unsafe_access(!is_native_ptr, is_store, T_LONG, Relaxed, true);
625
626 case vmIntrinsics::_getObjectAcquire: return inline_unsafe_access(!is_native_ptr, !is_store, T_OBJECT, Acquire, false);
627 case vmIntrinsics::_getBooleanAcquire: return inline_unsafe_access(!is_native_ptr, !is_store, T_BOOLEAN, Acquire, false);
628 case vmIntrinsics::_getByteAcquire: return inline_unsafe_access(!is_native_ptr, !is_store, T_BYTE, Acquire, false);
629 case vmIntrinsics::_getShortAcquire: return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT, Acquire, false);
630 case vmIntrinsics::_getCharAcquire: return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR, Acquire, false);
631 case vmIntrinsics::_getIntAcquire: return inline_unsafe_access(!is_native_ptr, !is_store, T_INT, Acquire, false);
632 case vmIntrinsics::_getLongAcquire: return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG, Acquire, false);
633 case vmIntrinsics::_getFloatAcquire: return inline_unsafe_access(!is_native_ptr, !is_store, T_FLOAT, Acquire, false);
634 case vmIntrinsics::_getDoubleAcquire: return inline_unsafe_access(!is_native_ptr, !is_store, T_DOUBLE, Acquire, false);
635
636 case vmIntrinsics::_putObjectRelease: return inline_unsafe_access(!is_native_ptr, is_store, T_OBJECT, Release, false);
637 case vmIntrinsics::_putBooleanRelease: return inline_unsafe_access(!is_native_ptr, is_store, T_BOOLEAN, Release, false);
638 case vmIntrinsics::_putByteRelease: return inline_unsafe_access(!is_native_ptr, is_store, T_BYTE, Release, false);
639 case vmIntrinsics::_putShortRelease: return inline_unsafe_access(!is_native_ptr, is_store, T_SHORT, Release, false);
640 case vmIntrinsics::_putCharRelease: return inline_unsafe_access(!is_native_ptr, is_store, T_CHAR, Release, false);
641 case vmIntrinsics::_putIntRelease: return inline_unsafe_access(!is_native_ptr, is_store, T_INT, Release, false);
642 case vmIntrinsics::_putLongRelease: return inline_unsafe_access(!is_native_ptr, is_store, T_LONG, Release, false);
643 case vmIntrinsics::_putFloatRelease: return inline_unsafe_access(!is_native_ptr, is_store, T_FLOAT, Release, false);
644 case vmIntrinsics::_putDoubleRelease: return inline_unsafe_access(!is_native_ptr, is_store, T_DOUBLE, Release, false);
645
646 case vmIntrinsics::_getObjectOpaque: return inline_unsafe_access(!is_native_ptr, !is_store, T_OBJECT, Opaque, false);
647 case vmIntrinsics::_getBooleanOpaque: return inline_unsafe_access(!is_native_ptr, !is_store, T_BOOLEAN, Opaque, false);
648 case vmIntrinsics::_getByteOpaque: return inline_unsafe_access(!is_native_ptr, !is_store, T_BYTE, Opaque, false);
649 case vmIntrinsics::_getShortOpaque: return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT, Opaque, false);
650 case vmIntrinsics::_getCharOpaque: return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR, Opaque, false);
651 case vmIntrinsics::_getIntOpaque: return inline_unsafe_access(!is_native_ptr, !is_store, T_INT, Opaque, false);
652 case vmIntrinsics::_getLongOpaque: return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG, Opaque, false);
653 case vmIntrinsics::_getFloatOpaque: return inline_unsafe_access(!is_native_ptr, !is_store, T_FLOAT, Opaque, false);
654 case vmIntrinsics::_getDoubleOpaque: return inline_unsafe_access(!is_native_ptr, !is_store, T_DOUBLE, Opaque, false);
655
656 case vmIntrinsics::_putObjectOpaque: return inline_unsafe_access(!is_native_ptr, is_store, T_OBJECT, Opaque, false);
657 case vmIntrinsics::_putBooleanOpaque: return inline_unsafe_access(!is_native_ptr, is_store, T_BOOLEAN, Opaque, false);
658 case vmIntrinsics::_putByteOpaque: return inline_unsafe_access(!is_native_ptr, is_store, T_BYTE, Opaque, false);
659 case vmIntrinsics::_putShortOpaque: return inline_unsafe_access(!is_native_ptr, is_store, T_SHORT, Opaque, false);
660 case vmIntrinsics::_putCharOpaque: return inline_unsafe_access(!is_native_ptr, is_store, T_CHAR, Opaque, false);
661 case vmIntrinsics::_putIntOpaque: return inline_unsafe_access(!is_native_ptr, is_store, T_INT, Opaque, false);
662 case vmIntrinsics::_putLongOpaque: return inline_unsafe_access(!is_native_ptr, is_store, T_LONG, Opaque, false);
663 case vmIntrinsics::_putFloatOpaque: return inline_unsafe_access(!is_native_ptr, is_store, T_FLOAT, Opaque, false);
664 case vmIntrinsics::_putDoubleOpaque: return inline_unsafe_access(!is_native_ptr, is_store, T_DOUBLE, Opaque, false);
665
666 case vmIntrinsics::_compareAndSwapObject: return inline_unsafe_load_store(T_OBJECT, LS_cmp_swap, Volatile);
667 case vmIntrinsics::_compareAndSwapInt: return inline_unsafe_load_store(T_INT, LS_cmp_swap, Volatile);
668 case vmIntrinsics::_compareAndSwapLong: return inline_unsafe_load_store(T_LONG, LS_cmp_swap, Volatile);
669
670 case vmIntrinsics::_weakCompareAndSwapObject: return inline_unsafe_load_store(T_OBJECT, LS_cmp_swap_weak, Relaxed);
671 case vmIntrinsics::_weakCompareAndSwapObjectAcquire: return inline_unsafe_load_store(T_OBJECT, LS_cmp_swap_weak, Acquire);
672 case vmIntrinsics::_weakCompareAndSwapObjectRelease: return inline_unsafe_load_store(T_OBJECT, LS_cmp_swap_weak, Release);
673 case vmIntrinsics::_weakCompareAndSwapInt: return inline_unsafe_load_store(T_INT, LS_cmp_swap_weak, Relaxed);
674 case vmIntrinsics::_weakCompareAndSwapIntAcquire: return inline_unsafe_load_store(T_INT, LS_cmp_swap_weak, Acquire);
675 case vmIntrinsics::_weakCompareAndSwapIntRelease: return inline_unsafe_load_store(T_INT, LS_cmp_swap_weak, Release);
676 case vmIntrinsics::_weakCompareAndSwapLong: return inline_unsafe_load_store(T_LONG, LS_cmp_swap_weak, Relaxed);
677 case vmIntrinsics::_weakCompareAndSwapLongAcquire: return inline_unsafe_load_store(T_LONG, LS_cmp_swap_weak, Acquire);
678 case vmIntrinsics::_weakCompareAndSwapLongRelease: return inline_unsafe_load_store(T_LONG, LS_cmp_swap_weak, Release);
679
680 case vmIntrinsics::_compareAndExchangeObjectVolatile: return inline_unsafe_load_store(T_OBJECT, LS_cmp_exchange, Volatile);
681 case vmIntrinsics::_compareAndExchangeObjectAcquire: return inline_unsafe_load_store(T_OBJECT, LS_cmp_exchange, Acquire);
682 case vmIntrinsics::_compareAndExchangeObjectRelease: return inline_unsafe_load_store(T_OBJECT, LS_cmp_exchange, Release);
683 case vmIntrinsics::_compareAndExchangeIntVolatile: return inline_unsafe_load_store(T_INT, LS_cmp_exchange, Volatile);
684 case vmIntrinsics::_compareAndExchangeIntAcquire: return inline_unsafe_load_store(T_INT, LS_cmp_exchange, Acquire);
2179 Node* n = NULL;
2180 switch (id) {
2181 case vmIntrinsics::_numberOfLeadingZeros_i: n = new CountLeadingZerosINode( arg); break;
2182 case vmIntrinsics::_numberOfLeadingZeros_l: n = new CountLeadingZerosLNode( arg); break;
2183 case vmIntrinsics::_numberOfTrailingZeros_i: n = new CountTrailingZerosINode(arg); break;
2184 case vmIntrinsics::_numberOfTrailingZeros_l: n = new CountTrailingZerosLNode(arg); break;
2185 case vmIntrinsics::_bitCount_i: n = new PopCountINode( arg); break;
2186 case vmIntrinsics::_bitCount_l: n = new PopCountLNode( arg); break;
2187 case vmIntrinsics::_reverseBytes_c: n = new ReverseBytesUSNode(0, arg); break;
2188 case vmIntrinsics::_reverseBytes_s: n = new ReverseBytesSNode( 0, arg); break;
2189 case vmIntrinsics::_reverseBytes_i: n = new ReverseBytesINode( 0, arg); break;
2190 case vmIntrinsics::_reverseBytes_l: n = new ReverseBytesLNode( 0, arg); break;
2191 default: fatal_unexpected_iid(id); break;
2192 }
2193 set_result(_gvn.transform(n));
2194 return true;
2195 }
2196
2197 //----------------------------inline_unsafe_access----------------------------
2198
2199 const static BasicType T_ADDRESS_HOLDER = T_LONG;
2200
2201 // Helper that guards and inserts a pre-barrier.
2202 void LibraryCallKit::insert_pre_barrier(Node* base_oop, Node* offset,
2203 Node* pre_val, bool need_mem_bar) {
2204 // We could be accessing the referent field of a reference object. If so, when G1
2205 // is enabled, we need to log the value in the referent field in an SATB buffer.
2206 // This routine performs some compile time filters and generates suitable
2207 // runtime filters that guard the pre-barrier code.
2208 // Also add memory barrier for non volatile load from the referent field
2209 // to prevent commoning of loads across safepoint.
2210 if (!UseG1GC && !need_mem_bar)
2211 return;
2212
2213 // Some compile time checks.
2214
2215 // If offset is a constant, is it java_lang_ref_Reference::_reference_offset?
2216 const TypeX* otype = offset->find_intptr_t_type();
2217 if (otype != NULL && otype->is_con() &&
2218 otype->get_con() != java_lang_ref_Reference::referent_offset) {
2219 // Constant offset but not the reference_offset so just return
2220 return;
2281 NULL /* obj */, NULL /* adr */, max_juint /* alias_idx */, NULL /* val */, NULL /* val_type */,
2282 pre_val /* pre_val */,
2283 T_OBJECT);
2284 if (need_mem_bar) {
2285 // Add memory barrier to prevent commoning reads from this field
2286 // across safepoint since GC can change its value.
2287 insert_mem_bar(Op_MemBarCPUOrder);
2288 }
2289 // Update IdealKit from graphKit.
2290 __ sync_kit(this);
2291
2292 } __ end_if(); // _ref_type != ref_none
2293 } __ end_if(); // offset == referent_offset
2294
2295 // Final sync IdealKit and GraphKit.
2296 final_sync(ideal);
2297 #undef __
2298 }
2299
2300
2301 const TypeOopPtr* LibraryCallKit::sharpen_unsafe_type(Compile::AliasType* alias_type, const TypePtr *adr_type, bool is_native_ptr) {
2302 // Attempt to infer a sharper value type from the offset and base type.
2303 ciKlass* sharpened_klass = NULL;
2304
2305 // See if it is an instance field, with an object type.
2306 if (alias_type->field() != NULL) {
2307 assert(!is_native_ptr, "native pointer op cannot use a java address");
2308 if (alias_type->field()->type()->is_klass()) {
2309 sharpened_klass = alias_type->field()->type()->as_klass();
2310 }
2311 }
2312
2313 // See if it is a narrow oop array.
2314 if (adr_type->isa_aryptr()) {
2315 if (adr_type->offset() >= objArrayOopDesc::base_offset_in_bytes()) {
2316 const TypeOopPtr *elem_type = adr_type->is_aryptr()->elem()->isa_oopptr();
2317 if (elem_type != NULL) {
2318 sharpened_klass = elem_type->klass();
2319 }
2320 }
2321 }
2322
2323 // The sharpened class might be unloaded if there is no class loader
2324 // contraint in place.
2325 if (sharpened_klass != NULL && sharpened_klass->is_loaded()) {
2326 const TypeOopPtr* tjp = TypeOopPtr::make_from_klass(sharpened_klass);
2327
2328 #ifndef PRODUCT
2329 if (C->print_intrinsics() || C->print_inlining()) {
2330 tty->print(" from base type: "); adr_type->dump();
2331 tty->print(" sharpened value: "); tjp->dump();
2332 }
2333 #endif
2334 // Sharpen the value type.
2335 return tjp;
2336 }
2337 return NULL;
2338 }
2339
2340 bool LibraryCallKit::inline_unsafe_access(const bool is_native_ptr, bool is_store, const BasicType type, const AccessKind kind, const bool unaligned) {
2341 if (callee()->is_static()) return false; // caller must have the capability!
2342 guarantee(!is_store || kind != Acquire, "Acquire accesses can be produced only for loads");
2343 guarantee( is_store || kind != Release, "Release accesses can be produced only for stores");
2344 assert(type != T_OBJECT || !unaligned, "unaligned access not supported with object type");
2345
2346 #ifndef PRODUCT
2347 {
2348 ResourceMark rm;
2349 // Check the signatures.
2350 ciSignature* sig = callee()->signature();
2351 #ifdef ASSERT
2352 if (!is_store) {
2353 // Object getObject(Object base, int/long offset), etc.
2354 BasicType rtype = sig->return_type()->basic_type();
2355 if (rtype == T_ADDRESS_HOLDER && callee()->name() == ciSymbol::getAddress_name())
2356 rtype = T_ADDRESS; // it is really a C void*
2357 assert(rtype == type, "getter must return the expected value");
2358 if (!is_native_ptr) {
2359 assert(sig->count() == 2, "oop getter has 2 arguments");
2360 assert(sig->type_at(0)->basic_type() == T_OBJECT, "getter base is object");
2361 assert(sig->type_at(1)->basic_type() == T_LONG, "getter offset is correct");
2362 } else {
2363 assert(sig->count() == 1, "native getter has 1 argument");
2364 assert(sig->type_at(0)->basic_type() == T_LONG, "getter base is long");
2365 }
2366 } else {
2367 // void putObject(Object base, int/long offset, Object x), etc.
2368 assert(sig->return_type()->basic_type() == T_VOID, "putter must not return a value");
2369 if (!is_native_ptr) {
2370 assert(sig->count() == 3, "oop putter has 3 arguments");
2371 assert(sig->type_at(0)->basic_type() == T_OBJECT, "putter base is object");
2372 assert(sig->type_at(1)->basic_type() == T_LONG, "putter offset is correct");
2373 } else {
2374 assert(sig->count() == 2, "native putter has 2 arguments");
2375 assert(sig->type_at(0)->basic_type() == T_LONG, "putter base is long");
2376 }
2377 BasicType vtype = sig->type_at(sig->count()-1)->basic_type();
2378 if (vtype == T_ADDRESS_HOLDER && callee()->name() == ciSymbol::putAddress_name())
2379 vtype = T_ADDRESS; // it is really a C void*
2380 assert(vtype == type, "putter must accept the expected value");
2381 }
2382 #endif // ASSERT
2383 }
2384 #endif //PRODUCT
2385
2386 C->set_has_unsafe_access(true); // Mark eventual nmethod as "unsafe".
2387
2388 Node* receiver = argument(0); // type: oop
2389
2390 // Build address expression.
2391 Node* adr;
2392 Node* heap_base_oop = top();
2393 Node* offset = top();
2394 Node* val;
2395
2396 if (!is_native_ptr) {
2397 // The base is either a Java object or a value produced by Unsafe.staticFieldBase
2398 Node* base = argument(1); // type: oop
2399 // The offset is a value produced by Unsafe.staticFieldOffset or Unsafe.objectFieldOffset
2400 offset = argument(2); // type: long
2401 // We currently rely on the cookies produced by Unsafe.xxxFieldOffset
2402 // to be plain byte offsets, which are also the same as those accepted
2403 // by oopDesc::field_base.
2404 assert(Unsafe_field_offset_to_byte_offset(11) == 11,
2405 "fieldOffset must be byte-scaled");
2406 // 32-bit machines ignore the high half!
2407 offset = ConvL2X(offset);
2408 adr = make_unsafe_address(base, offset);
2409 heap_base_oop = base;
2410 val = is_store ? argument(4) : NULL;
2411 } else {
2412 Node* ptr = argument(1); // type: long
2413 ptr = ConvL2X(ptr); // adjust Java long to machine word
2414 adr = make_unsafe_address(NULL, ptr);
2415 val = is_store ? argument(3) : NULL;
2416 }
2417
2418 const TypePtr *adr_type = _gvn.type(adr)->isa_ptr();
2419
2420 // Try to categorize the address. If it comes up as TypeJavaPtr::BOTTOM,
2421 // there was not enough information to nail it down.
2422 Compile::AliasType* alias_type = C->alias_type(adr_type);
2423 assert(alias_type->index() != Compile::AliasIdxBot, "no bare pointers here");
2424
2425 assert(alias_type->adr_type() == TypeRawPtr::BOTTOM || alias_type->adr_type() == TypeOopPtr::BOTTOM ||
2426 alias_type->basic_type() != T_ILLEGAL, "field, array element or unknown");
2427 bool mismatched = false;
2428 BasicType bt = alias_type->basic_type();
2429 if (bt != T_ILLEGAL) {
2430 if (bt == T_BYTE && adr_type->isa_aryptr()) {
2431 // Alias type doesn't differentiate between byte[] and boolean[]).
2432 // Use address type to get the element type.
2433 bt = adr_type->is_aryptr()->elem()->array_element_basic_type();
2434 }
2435 if (bt == T_ARRAY || bt == T_NARROWOOP) {
2436 // accessing an array field with getObject is not a mismatch
2477 break;
2478 case Acquire:
2479 case Release:
2480 case Volatile:
2481 requires_atomic_access = true;
2482 break;
2483 default:
2484 ShouldNotReachHere();
2485 }
2486
2487 // Figure out the memory ordering.
2488 // Acquire/Release/Volatile accesses require marking the loads/stores with MemOrd
2489 MemNode::MemOrd mo = access_kind_to_memord_LS(kind, is_store);
2490
2491 // If we are reading the value of the referent field of a Reference
2492 // object (either by using Unsafe directly or through reflection)
2493 // then, if G1 is enabled, we need to record the referent in an
2494 // SATB log buffer using the pre-barrier mechanism.
2495 // Also we need to add memory barrier to prevent commoning reads
2496 // from this field across safepoint since GC can change its value.
2497 bool need_read_barrier = !is_native_ptr && !is_store &&
2498 offset != top() && heap_base_oop != top();
2499
2500 if (!is_store && type == T_OBJECT) {
2501 const TypeOopPtr* tjp = sharpen_unsafe_type(alias_type, adr_type, is_native_ptr);
2502 if (tjp != NULL) {
2503 value_type = tjp;
2504 }
2505 }
2506
2507 receiver = null_check(receiver);
2508 if (stopped()) {
2509 return true;
2510 }
2511 // Heap pointers get a null-check from the interpreter,
2512 // as a courtesy. However, this is not guaranteed by Unsafe,
2513 // and it is not possible to fully distinguish unintended nulls
2514 // from intended ones in this API.
2515
2516 // We need to emit leading and trailing CPU membars (see below) in
2517 // addition to memory membars for special access modes. This is a little
2518 // too strong, but avoids the need to insert per-alias-type
2519 // volatile membars (for stores; compare Parse::do_put_xxx), which
2520 // we cannot do effectively here because we probably only have a
2521 // rough approximation of type.
|
76 {
77 }
78 virtual bool is_intrinsic() const { return true; }
79 virtual bool is_virtual() const { return _is_virtual; }
80 virtual bool is_predicated() const { return _predicates_count > 0; }
81 virtual int predicates_count() const { return _predicates_count; }
82 virtual bool does_virtual_dispatch() const { return _does_virtual_dispatch; }
83 virtual JVMState* generate(JVMState* jvms);
84 virtual Node* generate_predicate(JVMState* jvms, int predicate);
85 vmIntrinsics::ID intrinsic_id() const { return _intrinsic_id; }
86 };
87
88
89 // Local helper class for LibraryIntrinsic:
90 class LibraryCallKit : public GraphKit {
91 private:
92 LibraryIntrinsic* _intrinsic; // the library intrinsic being called
93 Node* _result; // the result node, if any
94 int _reexecute_sp; // the stack pointer when bytecode needs to be reexecuted
95
96 const TypeOopPtr* sharpen_unsafe_type(Compile::AliasType* alias_type, const TypePtr *adr_type);
97
98 public:
99 LibraryCallKit(JVMState* jvms, LibraryIntrinsic* intrinsic)
100 : GraphKit(jvms),
101 _intrinsic(intrinsic),
102 _result(NULL)
103 {
104 // Check if this is a root compile. In that case we don't have a caller.
105 if (!jvms->has_method()) {
106 _reexecute_sp = sp();
107 } else {
108 // Find out how many arguments the interpreter needs when deoptimizing
109 // and save the stack pointer value so it can used by uncommon_trap.
110 // We find the argument count by looking at the declared signature.
111 bool ignored_will_link;
112 ciSignature* declared_signature = NULL;
113 ciMethod* ignored_callee = caller()->get_method_at_bci(bci(), ignored_will_link, &declared_signature);
114 const int nargs = declared_signature->arg_size_for_bc(caller()->java_code_at_bci(bci()));
115 _reexecute_sp = sp() + nargs; // "push" arguments back on stack
116 }
230 bool inline_math_addExactI(bool is_increment);
231 bool inline_math_addExactL(bool is_increment);
232 bool inline_math_multiplyExactI();
233 bool inline_math_multiplyExactL();
234 bool inline_math_negateExactI();
235 bool inline_math_negateExactL();
236 bool inline_math_subtractExactI(bool is_decrement);
237 bool inline_math_subtractExactL(bool is_decrement);
238 bool inline_min_max(vmIntrinsics::ID id);
239 bool inline_notify(vmIntrinsics::ID id);
240 Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y);
241 // This returns Type::AnyPtr, RawPtr, or OopPtr.
242 int classify_unsafe_addr(Node* &base, Node* &offset);
243 Node* make_unsafe_address(Node* base, Node* offset);
244 // Helper for inline_unsafe_access.
245 // Generates the guards that check whether the result of
246 // Unsafe.getObject should be recorded in an SATB log buffer.
247 void insert_pre_barrier(Node* base_oop, Node* offset, Node* pre_val, bool need_mem_bar);
248
249 typedef enum { Relaxed, Opaque, Volatile, Acquire, Release } AccessKind;
250 bool inline_unsafe_access(bool is_store, BasicType type, AccessKind kind, bool is_unaligned);
251 static bool klass_needs_init_guard(Node* kls);
252 bool inline_unsafe_allocate();
253 bool inline_unsafe_newArray(bool uninitialized);
254 bool inline_unsafe_copyMemory();
255 bool inline_native_currentThread();
256
257 bool inline_native_time_funcs(address method, const char* funcName);
258 bool inline_native_isInterrupted();
259 bool inline_native_Class_query(vmIntrinsics::ID id);
260 bool inline_native_subtype_check();
261 bool inline_native_getLength();
262 bool inline_array_copyOf(bool is_copyOfRange);
263 bool inline_array_equals(StrIntrinsicNode::ArgEnc ae);
264 bool inline_objects_checkIndex();
265 void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array, bool card_mark);
266 bool inline_native_clone(bool is_virtual);
267 bool inline_native_Reflection_getCallerClass();
268 // Helper function for inlining native object hash method
269 bool inline_native_hashcode(bool is_virtual, bool is_static);
270 bool inline_native_getClass();
458 if (C->print_intrinsics() || C->print_inlining()) {
459 if (jvms->has_method()) {
460 // Not a root compile.
461 const char* msg = "failed to generate predicate for intrinsic";
462 C->print_inlining(kit.callee(), jvms->depth() - 1, bci, msg);
463 } else {
464 // Root compile
465 C->print_inlining_stream()->print("Did not generate predicate for intrinsic %s%s at bci:%d in",
466 vmIntrinsics::name_at(intrinsic_id()),
467 (is_virtual() ? " (virtual)" : ""), bci);
468 }
469 }
470 C->gather_intrinsic_statistics(intrinsic_id(), is_virtual(), Compile::_intrinsic_failed);
471 return NULL;
472 }
473
474 bool LibraryCallKit::try_to_inline(int predicate) {
475 // Handle symbolic names for otherwise undistinguished boolean switches:
476 const bool is_store = true;
477 const bool is_compress = true;
478 const bool is_static = true;
479 const bool is_volatile = true;
480
481 if (!jvms()->has_method()) {
482 // Root JVMState has a null method.
483 assert(map()->memory()->Opcode() == Op_Parm, "");
484 // Insert the memory aliasing node
485 set_all_memory(reset_memory());
486 }
487 assert(merged_memory(), "");
488
489
490 switch (intrinsic_id()) {
491 case vmIntrinsics::_hashCode: return inline_native_hashcode(intrinsic()->is_virtual(), !is_static);
492 case vmIntrinsics::_identityHashCode: return inline_native_hashcode(/*!virtual*/ false, is_static);
493 case vmIntrinsics::_getClass: return inline_native_getClass();
494
495 case vmIntrinsics::_dsin:
496 case vmIntrinsics::_dcos:
497 case vmIntrinsics::_dtan:
537 case vmIntrinsics::_indexOfU: return inline_string_indexOf(StrIntrinsicNode::UU);
538 case vmIntrinsics::_indexOfUL: return inline_string_indexOf(StrIntrinsicNode::UL);
539 case vmIntrinsics::_indexOfIL: return inline_string_indexOfI(StrIntrinsicNode::LL);
540 case vmIntrinsics::_indexOfIU: return inline_string_indexOfI(StrIntrinsicNode::UU);
541 case vmIntrinsics::_indexOfIUL: return inline_string_indexOfI(StrIntrinsicNode::UL);
542 case vmIntrinsics::_indexOfU_char: return inline_string_indexOfChar();
543
544 case vmIntrinsics::_equalsL: return inline_string_equals(StrIntrinsicNode::LL);
545 case vmIntrinsics::_equalsU: return inline_string_equals(StrIntrinsicNode::UU);
546
547 case vmIntrinsics::_toBytesStringU: return inline_string_toBytesU();
548 case vmIntrinsics::_getCharsStringU: return inline_string_getCharsU();
549 case vmIntrinsics::_getCharStringU: return inline_string_char_access(!is_store);
550 case vmIntrinsics::_putCharStringU: return inline_string_char_access( is_store);
551
552 case vmIntrinsics::_compressStringC:
553 case vmIntrinsics::_compressStringB: return inline_string_copy( is_compress);
554 case vmIntrinsics::_inflateStringC:
555 case vmIntrinsics::_inflateStringB: return inline_string_copy(!is_compress);
556
557 case vmIntrinsics::_getObject: return inline_unsafe_access(!is_store, T_OBJECT, Relaxed, false);
558 case vmIntrinsics::_getBoolean: return inline_unsafe_access(!is_store, T_BOOLEAN, Relaxed, false);
559 case vmIntrinsics::_getByte: return inline_unsafe_access(!is_store, T_BYTE, Relaxed, false);
560 case vmIntrinsics::_getShort: return inline_unsafe_access(!is_store, T_SHORT, Relaxed, false);
561 case vmIntrinsics::_getChar: return inline_unsafe_access(!is_store, T_CHAR, Relaxed, false);
562 case vmIntrinsics::_getInt: return inline_unsafe_access(!is_store, T_INT, Relaxed, false);
563 case vmIntrinsics::_getLong: return inline_unsafe_access(!is_store, T_LONG, Relaxed, false);
564 case vmIntrinsics::_getFloat: return inline_unsafe_access(!is_store, T_FLOAT, Relaxed, false);
565 case vmIntrinsics::_getDouble: return inline_unsafe_access(!is_store, T_DOUBLE, Relaxed, false);
566
567 case vmIntrinsics::_putObject: return inline_unsafe_access( is_store, T_OBJECT, Relaxed, false);
568 case vmIntrinsics::_putBoolean: return inline_unsafe_access( is_store, T_BOOLEAN, Relaxed, false);
569 case vmIntrinsics::_putByte: return inline_unsafe_access( is_store, T_BYTE, Relaxed, false);
570 case vmIntrinsics::_putShort: return inline_unsafe_access( is_store, T_SHORT, Relaxed, false);
571 case vmIntrinsics::_putChar: return inline_unsafe_access( is_store, T_CHAR, Relaxed, false);
572 case vmIntrinsics::_putInt: return inline_unsafe_access( is_store, T_INT, Relaxed, false);
573 case vmIntrinsics::_putLong: return inline_unsafe_access( is_store, T_LONG, Relaxed, false);
574 case vmIntrinsics::_putFloat: return inline_unsafe_access( is_store, T_FLOAT, Relaxed, false);
575 case vmIntrinsics::_putDouble: return inline_unsafe_access( is_store, T_DOUBLE, Relaxed, false);
576
577 case vmIntrinsics::_getObjectVolatile: return inline_unsafe_access(!is_store, T_OBJECT, Volatile, false);
578 case vmIntrinsics::_getBooleanVolatile: return inline_unsafe_access(!is_store, T_BOOLEAN, Volatile, false);
579 case vmIntrinsics::_getByteVolatile: return inline_unsafe_access(!is_store, T_BYTE, Volatile, false);
580 case vmIntrinsics::_getShortVolatile: return inline_unsafe_access(!is_store, T_SHORT, Volatile, false);
581 case vmIntrinsics::_getCharVolatile: return inline_unsafe_access(!is_store, T_CHAR, Volatile, false);
582 case vmIntrinsics::_getIntVolatile: return inline_unsafe_access(!is_store, T_INT, Volatile, false);
583 case vmIntrinsics::_getLongVolatile: return inline_unsafe_access(!is_store, T_LONG, Volatile, false);
584 case vmIntrinsics::_getFloatVolatile: return inline_unsafe_access(!is_store, T_FLOAT, Volatile, false);
585 case vmIntrinsics::_getDoubleVolatile: return inline_unsafe_access(!is_store, T_DOUBLE, Volatile, false);
586
587 case vmIntrinsics::_putObjectVolatile: return inline_unsafe_access( is_store, T_OBJECT, Volatile, false);
588 case vmIntrinsics::_putBooleanVolatile: return inline_unsafe_access( is_store, T_BOOLEAN, Volatile, false);
589 case vmIntrinsics::_putByteVolatile: return inline_unsafe_access( is_store, T_BYTE, Volatile, false);
590 case vmIntrinsics::_putShortVolatile: return inline_unsafe_access( is_store, T_SHORT, Volatile, false);
591 case vmIntrinsics::_putCharVolatile: return inline_unsafe_access( is_store, T_CHAR, Volatile, false);
592 case vmIntrinsics::_putIntVolatile: return inline_unsafe_access( is_store, T_INT, Volatile, false);
593 case vmIntrinsics::_putLongVolatile: return inline_unsafe_access( is_store, T_LONG, Volatile, false);
594 case vmIntrinsics::_putFloatVolatile: return inline_unsafe_access( is_store, T_FLOAT, Volatile, false);
595 case vmIntrinsics::_putDoubleVolatile: return inline_unsafe_access( is_store, T_DOUBLE, Volatile, false);
596
597 case vmIntrinsics::_getShortUnaligned: return inline_unsafe_access(!is_store, T_SHORT, Relaxed, true);
598 case vmIntrinsics::_getCharUnaligned: return inline_unsafe_access(!is_store, T_CHAR, Relaxed, true);
599 case vmIntrinsics::_getIntUnaligned: return inline_unsafe_access(!is_store, T_INT, Relaxed, true);
600 case vmIntrinsics::_getLongUnaligned: return inline_unsafe_access(!is_store, T_LONG, Relaxed, true);
601
602 case vmIntrinsics::_putShortUnaligned: return inline_unsafe_access( is_store, T_SHORT, Relaxed, true);
603 case vmIntrinsics::_putCharUnaligned: return inline_unsafe_access( is_store, T_CHAR, Relaxed, true);
604 case vmIntrinsics::_putIntUnaligned: return inline_unsafe_access( is_store, T_INT, Relaxed, true);
605 case vmIntrinsics::_putLongUnaligned: return inline_unsafe_access( is_store, T_LONG, Relaxed, true);
606
607 case vmIntrinsics::_getObjectAcquire: return inline_unsafe_access(!is_store, T_OBJECT, Acquire, false);
608 case vmIntrinsics::_getBooleanAcquire: return inline_unsafe_access(!is_store, T_BOOLEAN, Acquire, false);
609 case vmIntrinsics::_getByteAcquire: return inline_unsafe_access(!is_store, T_BYTE, Acquire, false);
610 case vmIntrinsics::_getShortAcquire: return inline_unsafe_access(!is_store, T_SHORT, Acquire, false);
611 case vmIntrinsics::_getCharAcquire: return inline_unsafe_access(!is_store, T_CHAR, Acquire, false);
612 case vmIntrinsics::_getIntAcquire: return inline_unsafe_access(!is_store, T_INT, Acquire, false);
613 case vmIntrinsics::_getLongAcquire: return inline_unsafe_access(!is_store, T_LONG, Acquire, false);
614 case vmIntrinsics::_getFloatAcquire: return inline_unsafe_access(!is_store, T_FLOAT, Acquire, false);
615 case vmIntrinsics::_getDoubleAcquire: return inline_unsafe_access(!is_store, T_DOUBLE, Acquire, false);
616
617 case vmIntrinsics::_putObjectRelease: return inline_unsafe_access( is_store, T_OBJECT, Release, false);
618 case vmIntrinsics::_putBooleanRelease: return inline_unsafe_access( is_store, T_BOOLEAN, Release, false);
619 case vmIntrinsics::_putByteRelease: return inline_unsafe_access( is_store, T_BYTE, Release, false);
620 case vmIntrinsics::_putShortRelease: return inline_unsafe_access( is_store, T_SHORT, Release, false);
621 case vmIntrinsics::_putCharRelease: return inline_unsafe_access( is_store, T_CHAR, Release, false);
622 case vmIntrinsics::_putIntRelease: return inline_unsafe_access( is_store, T_INT, Release, false);
623 case vmIntrinsics::_putLongRelease: return inline_unsafe_access( is_store, T_LONG, Release, false);
624 case vmIntrinsics::_putFloatRelease: return inline_unsafe_access( is_store, T_FLOAT, Release, false);
625 case vmIntrinsics::_putDoubleRelease: return inline_unsafe_access( is_store, T_DOUBLE, Release, false);
626
627 case vmIntrinsics::_getObjectOpaque: return inline_unsafe_access(!is_store, T_OBJECT, Opaque, false);
628 case vmIntrinsics::_getBooleanOpaque: return inline_unsafe_access(!is_store, T_BOOLEAN, Opaque, false);
629 case vmIntrinsics::_getByteOpaque: return inline_unsafe_access(!is_store, T_BYTE, Opaque, false);
630 case vmIntrinsics::_getShortOpaque: return inline_unsafe_access(!is_store, T_SHORT, Opaque, false);
631 case vmIntrinsics::_getCharOpaque: return inline_unsafe_access(!is_store, T_CHAR, Opaque, false);
632 case vmIntrinsics::_getIntOpaque: return inline_unsafe_access(!is_store, T_INT, Opaque, false);
633 case vmIntrinsics::_getLongOpaque: return inline_unsafe_access(!is_store, T_LONG, Opaque, false);
634 case vmIntrinsics::_getFloatOpaque: return inline_unsafe_access(!is_store, T_FLOAT, Opaque, false);
635 case vmIntrinsics::_getDoubleOpaque: return inline_unsafe_access(!is_store, T_DOUBLE, Opaque, false);
636
637 case vmIntrinsics::_putObjectOpaque: return inline_unsafe_access( is_store, T_OBJECT, Opaque, false);
638 case vmIntrinsics::_putBooleanOpaque: return inline_unsafe_access( is_store, T_BOOLEAN, Opaque, false);
639 case vmIntrinsics::_putByteOpaque: return inline_unsafe_access( is_store, T_BYTE, Opaque, false);
640 case vmIntrinsics::_putShortOpaque: return inline_unsafe_access( is_store, T_SHORT, Opaque, false);
641 case vmIntrinsics::_putCharOpaque: return inline_unsafe_access( is_store, T_CHAR, Opaque, false);
642 case vmIntrinsics::_putIntOpaque: return inline_unsafe_access( is_store, T_INT, Opaque, false);
643 case vmIntrinsics::_putLongOpaque: return inline_unsafe_access( is_store, T_LONG, Opaque, false);
644 case vmIntrinsics::_putFloatOpaque: return inline_unsafe_access( is_store, T_FLOAT, Opaque, false);
645 case vmIntrinsics::_putDoubleOpaque: return inline_unsafe_access( is_store, T_DOUBLE, Opaque, false);
646
647 case vmIntrinsics::_compareAndSwapObject: return inline_unsafe_load_store(T_OBJECT, LS_cmp_swap, Volatile);
648 case vmIntrinsics::_compareAndSwapInt: return inline_unsafe_load_store(T_INT, LS_cmp_swap, Volatile);
649 case vmIntrinsics::_compareAndSwapLong: return inline_unsafe_load_store(T_LONG, LS_cmp_swap, Volatile);
650
651 case vmIntrinsics::_weakCompareAndSwapObject: return inline_unsafe_load_store(T_OBJECT, LS_cmp_swap_weak, Relaxed);
652 case vmIntrinsics::_weakCompareAndSwapObjectAcquire: return inline_unsafe_load_store(T_OBJECT, LS_cmp_swap_weak, Acquire);
653 case vmIntrinsics::_weakCompareAndSwapObjectRelease: return inline_unsafe_load_store(T_OBJECT, LS_cmp_swap_weak, Release);
654 case vmIntrinsics::_weakCompareAndSwapInt: return inline_unsafe_load_store(T_INT, LS_cmp_swap_weak, Relaxed);
655 case vmIntrinsics::_weakCompareAndSwapIntAcquire: return inline_unsafe_load_store(T_INT, LS_cmp_swap_weak, Acquire);
656 case vmIntrinsics::_weakCompareAndSwapIntRelease: return inline_unsafe_load_store(T_INT, LS_cmp_swap_weak, Release);
657 case vmIntrinsics::_weakCompareAndSwapLong: return inline_unsafe_load_store(T_LONG, LS_cmp_swap_weak, Relaxed);
658 case vmIntrinsics::_weakCompareAndSwapLongAcquire: return inline_unsafe_load_store(T_LONG, LS_cmp_swap_weak, Acquire);
659 case vmIntrinsics::_weakCompareAndSwapLongRelease: return inline_unsafe_load_store(T_LONG, LS_cmp_swap_weak, Release);
660
661 case vmIntrinsics::_compareAndExchangeObjectVolatile: return inline_unsafe_load_store(T_OBJECT, LS_cmp_exchange, Volatile);
662 case vmIntrinsics::_compareAndExchangeObjectAcquire: return inline_unsafe_load_store(T_OBJECT, LS_cmp_exchange, Acquire);
663 case vmIntrinsics::_compareAndExchangeObjectRelease: return inline_unsafe_load_store(T_OBJECT, LS_cmp_exchange, Release);
664 case vmIntrinsics::_compareAndExchangeIntVolatile: return inline_unsafe_load_store(T_INT, LS_cmp_exchange, Volatile);
665 case vmIntrinsics::_compareAndExchangeIntAcquire: return inline_unsafe_load_store(T_INT, LS_cmp_exchange, Acquire);
2160 Node* n = NULL;
2161 switch (id) {
2162 case vmIntrinsics::_numberOfLeadingZeros_i: n = new CountLeadingZerosINode( arg); break;
2163 case vmIntrinsics::_numberOfLeadingZeros_l: n = new CountLeadingZerosLNode( arg); break;
2164 case vmIntrinsics::_numberOfTrailingZeros_i: n = new CountTrailingZerosINode(arg); break;
2165 case vmIntrinsics::_numberOfTrailingZeros_l: n = new CountTrailingZerosLNode(arg); break;
2166 case vmIntrinsics::_bitCount_i: n = new PopCountINode( arg); break;
2167 case vmIntrinsics::_bitCount_l: n = new PopCountLNode( arg); break;
2168 case vmIntrinsics::_reverseBytes_c: n = new ReverseBytesUSNode(0, arg); break;
2169 case vmIntrinsics::_reverseBytes_s: n = new ReverseBytesSNode( 0, arg); break;
2170 case vmIntrinsics::_reverseBytes_i: n = new ReverseBytesINode( 0, arg); break;
2171 case vmIntrinsics::_reverseBytes_l: n = new ReverseBytesLNode( 0, arg); break;
2172 default: fatal_unexpected_iid(id); break;
2173 }
2174 set_result(_gvn.transform(n));
2175 return true;
2176 }
2177
2178 //----------------------------inline_unsafe_access----------------------------
2179
2180 // Helper that guards and inserts a pre-barrier.
2181 void LibraryCallKit::insert_pre_barrier(Node* base_oop, Node* offset,
2182 Node* pre_val, bool need_mem_bar) {
2183 // We could be accessing the referent field of a reference object. If so, when G1
2184 // is enabled, we need to log the value in the referent field in an SATB buffer.
2185 // This routine performs some compile time filters and generates suitable
2186 // runtime filters that guard the pre-barrier code.
2187 // Also add memory barrier for non volatile load from the referent field
2188 // to prevent commoning of loads across safepoint.
2189 if (!UseG1GC && !need_mem_bar)
2190 return;
2191
2192 // Some compile time checks.
2193
2194 // If offset is a constant, is it java_lang_ref_Reference::_reference_offset?
2195 const TypeX* otype = offset->find_intptr_t_type();
2196 if (otype != NULL && otype->is_con() &&
2197 otype->get_con() != java_lang_ref_Reference::referent_offset) {
2198 // Constant offset but not the reference_offset so just return
2199 return;
2260 NULL /* obj */, NULL /* adr */, max_juint /* alias_idx */, NULL /* val */, NULL /* val_type */,
2261 pre_val /* pre_val */,
2262 T_OBJECT);
2263 if (need_mem_bar) {
2264 // Add memory barrier to prevent commoning reads from this field
2265 // across safepoint since GC can change its value.
2266 insert_mem_bar(Op_MemBarCPUOrder);
2267 }
2268 // Update IdealKit from graphKit.
2269 __ sync_kit(this);
2270
2271 } __ end_if(); // _ref_type != ref_none
2272 } __ end_if(); // offset == referent_offset
2273
2274 // Final sync IdealKit and GraphKit.
2275 final_sync(ideal);
2276 #undef __
2277 }
2278
2279
2280 const TypeOopPtr* LibraryCallKit::sharpen_unsafe_type(Compile::AliasType* alias_type, const TypePtr *adr_type) {
2281 // Attempt to infer a sharper value type from the offset and base type.
2282 ciKlass* sharpened_klass = NULL;
2283
2284 // See if it is an instance field, with an object type.
2285 if (alias_type->field() != NULL) {
2286 if (alias_type->field()->type()->is_klass()) {
2287 sharpened_klass = alias_type->field()->type()->as_klass();
2288 }
2289 }
2290
2291 // See if it is a narrow oop array.
2292 if (adr_type->isa_aryptr()) {
2293 if (adr_type->offset() >= objArrayOopDesc::base_offset_in_bytes()) {
2294 const TypeOopPtr *elem_type = adr_type->is_aryptr()->elem()->isa_oopptr();
2295 if (elem_type != NULL) {
2296 sharpened_klass = elem_type->klass();
2297 }
2298 }
2299 }
2300
2301 // The sharpened class might be unloaded if there is no class loader
2302 // contraint in place.
2303 if (sharpened_klass != NULL && sharpened_klass->is_loaded()) {
2304 const TypeOopPtr* tjp = TypeOopPtr::make_from_klass(sharpened_klass);
2305
2306 #ifndef PRODUCT
2307 if (C->print_intrinsics() || C->print_inlining()) {
2308 tty->print(" from base type: "); adr_type->dump();
2309 tty->print(" sharpened value: "); tjp->dump();
2310 }
2311 #endif
2312 // Sharpen the value type.
2313 return tjp;
2314 }
2315 return NULL;
2316 }
2317
2318 bool LibraryCallKit::inline_unsafe_access(bool is_store, const BasicType type, const AccessKind kind, const bool unaligned) {
2319 if (callee()->is_static()) return false; // caller must have the capability!
2320 guarantee(!is_store || kind != Acquire, "Acquire accesses can be produced only for loads");
2321 guarantee( is_store || kind != Release, "Release accesses can be produced only for stores");
2322 assert(type != T_OBJECT || !unaligned, "unaligned access not supported with object type");
2323
2324 #ifndef PRODUCT
2325 {
2326 ResourceMark rm;
2327 // Check the signatures.
2328 ciSignature* sig = callee()->signature();
2329 #ifdef ASSERT
2330 if (!is_store) {
2331 // Object getObject(Object base, int/long offset), etc.
2332 BasicType rtype = sig->return_type()->basic_type();
2333 assert(rtype == type, "getter must return the expected value");
2334 assert(sig->count() == 2, "oop getter has 2 arguments");
2335 assert(sig->type_at(0)->basic_type() == T_OBJECT, "getter base is object");
2336 assert(sig->type_at(1)->basic_type() == T_LONG, "getter offset is correct");
2337 } else {
2338 // void putObject(Object base, int/long offset, Object x), etc.
2339 assert(sig->return_type()->basic_type() == T_VOID, "putter must not return a value");
2340 assert(sig->count() == 3, "oop putter has 3 arguments");
2341 assert(sig->type_at(0)->basic_type() == T_OBJECT, "putter base is object");
2342 assert(sig->type_at(1)->basic_type() == T_LONG, "putter offset is correct");
2343 BasicType vtype = sig->type_at(sig->count()-1)->basic_type();
2344 assert(vtype == type, "putter must accept the expected value");
2345 }
2346 #endif // ASSERT
2347 }
2348 #endif //PRODUCT
2349
2350 C->set_has_unsafe_access(true); // Mark eventual nmethod as "unsafe".
2351
2352 Node* receiver = argument(0); // type: oop
2353
2354 // Build address expression.
2355 Node* adr;
2356 Node* heap_base_oop = top();
2357 Node* offset = top();
2358 Node* val;
2359
2360 // The base is either a Java object or a value produced by Unsafe.staticFieldBase
2361 Node* base = argument(1); // type: oop
2362 // The offset is a value produced by Unsafe.staticFieldOffset or Unsafe.objectFieldOffset
2363 offset = argument(2); // type: long
2364 // We currently rely on the cookies produced by Unsafe.xxxFieldOffset
2365 // to be plain byte offsets, which are also the same as those accepted
2366 // by oopDesc::field_base.
2367 assert(Unsafe_field_offset_to_byte_offset(11) == 11,
2368 "fieldOffset must be byte-scaled");
2369 // 32-bit machines ignore the high half!
2370 offset = ConvL2X(offset);
2371 adr = make_unsafe_address(base, offset);
2372 if (_gvn.type(base)->isa_ptr() != TypePtr::NULL_PTR) {
2373 heap_base_oop = base;
2374 }
2375 val = is_store ? argument(4) : NULL;
2376
2377 const TypePtr *adr_type = _gvn.type(adr)->isa_ptr();
2378
2379 // Try to categorize the address. If it comes up as TypeJavaPtr::BOTTOM,
2380 // there was not enough information to nail it down.
2381 Compile::AliasType* alias_type = C->alias_type(adr_type);
2382 assert(alias_type->index() != Compile::AliasIdxBot, "no bare pointers here");
2383
2384 assert(alias_type->adr_type() == TypeRawPtr::BOTTOM || alias_type->adr_type() == TypeOopPtr::BOTTOM ||
2385 alias_type->basic_type() != T_ILLEGAL, "field, array element or unknown");
2386 bool mismatched = false;
2387 BasicType bt = alias_type->basic_type();
2388 if (bt != T_ILLEGAL) {
2389 if (bt == T_BYTE && adr_type->isa_aryptr()) {
2390 // Alias type doesn't differentiate between byte[] and boolean[]).
2391 // Use address type to get the element type.
2392 bt = adr_type->is_aryptr()->elem()->array_element_basic_type();
2393 }
2394 if (bt == T_ARRAY || bt == T_NARROWOOP) {
2395 // accessing an array field with getObject is not a mismatch
2436 break;
2437 case Acquire:
2438 case Release:
2439 case Volatile:
2440 requires_atomic_access = true;
2441 break;
2442 default:
2443 ShouldNotReachHere();
2444 }
2445
2446 // Figure out the memory ordering.
2447 // Acquire/Release/Volatile accesses require marking the loads/stores with MemOrd
2448 MemNode::MemOrd mo = access_kind_to_memord_LS(kind, is_store);
2449
2450 // If we are reading the value of the referent field of a Reference
2451 // object (either by using Unsafe directly or through reflection)
2452 // then, if G1 is enabled, we need to record the referent in an
2453 // SATB log buffer using the pre-barrier mechanism.
2454 // Also we need to add memory barrier to prevent commoning reads
2455 // from this field across safepoint since GC can change its value.
2456 bool need_read_barrier = !is_store &&
2457 offset != top() && heap_base_oop != top();
2458
2459 if (!is_store && type == T_OBJECT) {
2460 const TypeOopPtr* tjp = sharpen_unsafe_type(alias_type, adr_type);
2461 if (tjp != NULL) {
2462 value_type = tjp;
2463 }
2464 }
2465
2466 receiver = null_check(receiver);
2467 if (stopped()) {
2468 return true;
2469 }
2470 // Heap pointers get a null-check from the interpreter,
2471 // as a courtesy. However, this is not guaranteed by Unsafe,
2472 // and it is not possible to fully distinguish unintended nulls
2473 // from intended ones in this API.
2474
2475 // We need to emit leading and trailing CPU membars (see below) in
2476 // addition to memory membars for special access modes. This is a little
2477 // too strong, but avoids the need to insert per-alias-type
2478 // volatile membars (for stores; compare Parse::do_put_xxx), which
2479 // we cannot do effectively here because we probably only have a
2480 // rough approximation of type.
|