< prev index next >

src/share/vm/opto/library_call.cpp

Print this page




 213   bool inline_math_addExactI(bool is_increment);
 214   bool inline_math_addExactL(bool is_increment);
 215   bool inline_math_multiplyExactI();
 216   bool inline_math_multiplyExactL();
 217   bool inline_math_negateExactI();
 218   bool inline_math_negateExactL();
 219   bool inline_math_subtractExactI(bool is_decrement);
 220   bool inline_math_subtractExactL(bool is_decrement);
 221   bool inline_exp();
 222   bool inline_pow();
 223   Node* finish_pow_exp(Node* result, Node* x, Node* y, const TypeFunc* call_type, address funcAddr, const char* funcName);
 224   bool inline_min_max(vmIntrinsics::ID id);
 225   Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y);
 226   // This returns Type::AnyPtr, RawPtr, or OopPtr.
 227   int classify_unsafe_addr(Node* &base, Node* &offset);
 228   Node* make_unsafe_address(Node* base, Node* offset);
 229   // Helper for inline_unsafe_access.
 230   // Generates the guards that check whether the result of
 231   // Unsafe.getObject should be recorded in an SATB log buffer.
 232   void insert_pre_barrier(Node* base_oop, Node* offset, Node* pre_val, bool need_mem_bar);
 233   bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile);
 234   bool inline_unsafe_prefetch(bool is_native_ptr, bool is_store, bool is_static);
 235   static bool klass_needs_init_guard(Node* kls);
 236   bool inline_unsafe_allocate();
 237   bool inline_unsafe_copyMemory();
 238   bool inline_native_currentThread();
 239 #ifdef TRACE_HAVE_INTRINSICS
 240   bool inline_native_classID();
 241   bool inline_native_threadID();
 242 #endif
 243   bool inline_native_time_funcs(address method, const char* funcName);
 244   bool inline_native_isInterrupted();
 245   bool inline_native_Class_query(vmIntrinsics::ID id);
 246   bool inline_native_subtype_check();
 247 
 248   bool inline_native_newArray();
 249   bool inline_native_getLength();
 250   bool inline_array_copyOf(bool is_copyOfRange);
 251   bool inline_array_equals();
 252   void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array, bool card_mark);
 253   bool inline_native_clone(bool is_virtual);


 778 
 779   case vmIntrinsics::_addExactI:                return inline_math_addExactI(false /* add */);
 780   case vmIntrinsics::_addExactL:                return inline_math_addExactL(false /* add */);
 781   case vmIntrinsics::_decrementExactI:          return inline_math_subtractExactI(true /* decrement */);
 782   case vmIntrinsics::_decrementExactL:          return inline_math_subtractExactL(true /* decrement */);
 783   case vmIntrinsics::_incrementExactI:          return inline_math_addExactI(true /* increment */);
 784   case vmIntrinsics::_incrementExactL:          return inline_math_addExactL(true /* increment */);
 785   case vmIntrinsics::_multiplyExactI:           return inline_math_multiplyExactI();
 786   case vmIntrinsics::_multiplyExactL:           return inline_math_multiplyExactL();
 787   case vmIntrinsics::_negateExactI:             return inline_math_negateExactI();
 788   case vmIntrinsics::_negateExactL:             return inline_math_negateExactL();
 789   case vmIntrinsics::_subtractExactI:           return inline_math_subtractExactI(false /* subtract */);
 790   case vmIntrinsics::_subtractExactL:           return inline_math_subtractExactL(false /* subtract */);
 791 
 792   case vmIntrinsics::_arraycopy:                return inline_arraycopy();
 793 
 794   case vmIntrinsics::_compareTo:                return inline_string_compareTo();
 795   case vmIntrinsics::_indexOf:                  return inline_string_indexOf();
 796   case vmIntrinsics::_equals:                   return inline_string_equals();
 797 
 798   case vmIntrinsics::_getObject:                return inline_unsafe_access(!is_native_ptr, !is_store, T_OBJECT,  !is_volatile);
 799   case vmIntrinsics::_getBoolean:               return inline_unsafe_access(!is_native_ptr, !is_store, T_BOOLEAN, !is_volatile);
 800   case vmIntrinsics::_getByte:                  return inline_unsafe_access(!is_native_ptr, !is_store, T_BYTE,    !is_volatile);
 801   case vmIntrinsics::_getShort:                 return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT,   !is_volatile);
 802   case vmIntrinsics::_getChar:                  return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR,    !is_volatile);
 803   case vmIntrinsics::_getInt:                   return inline_unsafe_access(!is_native_ptr, !is_store, T_INT,     !is_volatile);
 804   case vmIntrinsics::_getLong:                  return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG,    !is_volatile);
 805   case vmIntrinsics::_getFloat:                 return inline_unsafe_access(!is_native_ptr, !is_store, T_FLOAT,   !is_volatile);
 806   case vmIntrinsics::_getDouble:                return inline_unsafe_access(!is_native_ptr, !is_store, T_DOUBLE,  !is_volatile);
 807 
 808   case vmIntrinsics::_putObject:                return inline_unsafe_access(!is_native_ptr,  is_store, T_OBJECT,  !is_volatile);
 809   case vmIntrinsics::_putBoolean:               return inline_unsafe_access(!is_native_ptr,  is_store, T_BOOLEAN, !is_volatile);
 810   case vmIntrinsics::_putByte:                  return inline_unsafe_access(!is_native_ptr,  is_store, T_BYTE,    !is_volatile);
 811   case vmIntrinsics::_putShort:                 return inline_unsafe_access(!is_native_ptr,  is_store, T_SHORT,   !is_volatile);
 812   case vmIntrinsics::_putChar:                  return inline_unsafe_access(!is_native_ptr,  is_store, T_CHAR,    !is_volatile);
 813   case vmIntrinsics::_putInt:                   return inline_unsafe_access(!is_native_ptr,  is_store, T_INT,     !is_volatile);
 814   case vmIntrinsics::_putLong:                  return inline_unsafe_access(!is_native_ptr,  is_store, T_LONG,    !is_volatile);
 815   case vmIntrinsics::_putFloat:                 return inline_unsafe_access(!is_native_ptr,  is_store, T_FLOAT,   !is_volatile);
 816   case vmIntrinsics::_putDouble:                return inline_unsafe_access(!is_native_ptr,  is_store, T_DOUBLE,  !is_volatile);
 817 
 818   case vmIntrinsics::_getByte_raw:              return inline_unsafe_access( is_native_ptr, !is_store, T_BYTE,    !is_volatile);
 819   case vmIntrinsics::_getShort_raw:             return inline_unsafe_access( is_native_ptr, !is_store, T_SHORT,   !is_volatile);
 820   case vmIntrinsics::_getChar_raw:              return inline_unsafe_access( is_native_ptr, !is_store, T_CHAR,    !is_volatile);
 821   case vmIntrinsics::_getInt_raw:               return inline_unsafe_access( is_native_ptr, !is_store, T_INT,     !is_volatile);
 822   case vmIntrinsics::_getLong_raw:              return inline_unsafe_access( is_native_ptr, !is_store, T_LONG,    !is_volatile);
 823   case vmIntrinsics::_getFloat_raw:             return inline_unsafe_access( is_native_ptr, !is_store, T_FLOAT,   !is_volatile);
 824   case vmIntrinsics::_getDouble_raw:            return inline_unsafe_access( is_native_ptr, !is_store, T_DOUBLE,  !is_volatile);
 825   case vmIntrinsics::_getAddress_raw:           return inline_unsafe_access( is_native_ptr, !is_store, T_ADDRESS, !is_volatile);
 826 
 827   case vmIntrinsics::_putByte_raw:              return inline_unsafe_access( is_native_ptr,  is_store, T_BYTE,    !is_volatile);
 828   case vmIntrinsics::_putShort_raw:             return inline_unsafe_access( is_native_ptr,  is_store, T_SHORT,   !is_volatile);
 829   case vmIntrinsics::_putChar_raw:              return inline_unsafe_access( is_native_ptr,  is_store, T_CHAR,    !is_volatile);
 830   case vmIntrinsics::_putInt_raw:               return inline_unsafe_access( is_native_ptr,  is_store, T_INT,     !is_volatile);
 831   case vmIntrinsics::_putLong_raw:              return inline_unsafe_access( is_native_ptr,  is_store, T_LONG,    !is_volatile);
 832   case vmIntrinsics::_putFloat_raw:             return inline_unsafe_access( is_native_ptr,  is_store, T_FLOAT,   !is_volatile);
 833   case vmIntrinsics::_putDouble_raw:            return inline_unsafe_access( is_native_ptr,  is_store, T_DOUBLE,  !is_volatile);
 834   case vmIntrinsics::_putAddress_raw:           return inline_unsafe_access( is_native_ptr,  is_store, T_ADDRESS, !is_volatile);
 835 
 836   case vmIntrinsics::_getObjectVolatile:        return inline_unsafe_access(!is_native_ptr, !is_store, T_OBJECT,   is_volatile);
 837   case vmIntrinsics::_getBooleanVolatile:       return inline_unsafe_access(!is_native_ptr, !is_store, T_BOOLEAN,  is_volatile);
 838   case vmIntrinsics::_getByteVolatile:          return inline_unsafe_access(!is_native_ptr, !is_store, T_BYTE,     is_volatile);
 839   case vmIntrinsics::_getShortVolatile:         return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT,    is_volatile);
 840   case vmIntrinsics::_getCharVolatile:          return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR,     is_volatile);
 841   case vmIntrinsics::_getIntVolatile:           return inline_unsafe_access(!is_native_ptr, !is_store, T_INT,      is_volatile);
 842   case vmIntrinsics::_getLongVolatile:          return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG,     is_volatile);
 843   case vmIntrinsics::_getFloatVolatile:         return inline_unsafe_access(!is_native_ptr, !is_store, T_FLOAT,    is_volatile);
 844   case vmIntrinsics::_getDoubleVolatile:        return inline_unsafe_access(!is_native_ptr, !is_store, T_DOUBLE,   is_volatile);
 845 
 846   case vmIntrinsics::_putObjectVolatile:        return inline_unsafe_access(!is_native_ptr,  is_store, T_OBJECT,   is_volatile);
 847   case vmIntrinsics::_putBooleanVolatile:       return inline_unsafe_access(!is_native_ptr,  is_store, T_BOOLEAN,  is_volatile);
 848   case vmIntrinsics::_putByteVolatile:          return inline_unsafe_access(!is_native_ptr,  is_store, T_BYTE,     is_volatile);
 849   case vmIntrinsics::_putShortVolatile:         return inline_unsafe_access(!is_native_ptr,  is_store, T_SHORT,    is_volatile);
 850   case vmIntrinsics::_putCharVolatile:          return inline_unsafe_access(!is_native_ptr,  is_store, T_CHAR,     is_volatile);
 851   case vmIntrinsics::_putIntVolatile:           return inline_unsafe_access(!is_native_ptr,  is_store, T_INT,      is_volatile);
 852   case vmIntrinsics::_putLongVolatile:          return inline_unsafe_access(!is_native_ptr,  is_store, T_LONG,     is_volatile);
 853   case vmIntrinsics::_putFloatVolatile:         return inline_unsafe_access(!is_native_ptr,  is_store, T_FLOAT,    is_volatile);
 854   case vmIntrinsics::_putDoubleVolatile:        return inline_unsafe_access(!is_native_ptr,  is_store, T_DOUBLE,   is_volatile);
 855 
 856   case vmIntrinsics::_prefetchRead:             return inline_unsafe_prefetch(!is_native_ptr, !is_store, !is_static);
 857   case vmIntrinsics::_prefetchWrite:            return inline_unsafe_prefetch(!is_native_ptr,  is_store, !is_static);
 858   case vmIntrinsics::_prefetchReadStatic:       return inline_unsafe_prefetch(!is_native_ptr, !is_store,  is_static);
 859   case vmIntrinsics::_prefetchWriteStatic:      return inline_unsafe_prefetch(!is_native_ptr,  is_store,  is_static);
 860 
 861   case vmIntrinsics::_compareAndSwapObject:     return inline_unsafe_load_store(T_OBJECT, LS_cmpxchg);
 862   case vmIntrinsics::_compareAndSwapInt:        return inline_unsafe_load_store(T_INT,    LS_cmpxchg);
 863   case vmIntrinsics::_compareAndSwapLong:       return inline_unsafe_load_store(T_LONG,   LS_cmpxchg);
 864 
 865   case vmIntrinsics::_putOrderedObject:         return inline_unsafe_ordered_store(T_OBJECT);
 866   case vmIntrinsics::_putOrderedInt:            return inline_unsafe_ordered_store(T_INT);
 867   case vmIntrinsics::_putOrderedLong:           return inline_unsafe_ordered_store(T_LONG);
 868 
 869   case vmIntrinsics::_getAndAddInt:             return inline_unsafe_load_store(T_INT,    LS_xadd);
 870   case vmIntrinsics::_getAndAddLong:            return inline_unsafe_load_store(T_LONG,   LS_xadd);
 871   case vmIntrinsics::_getAndSetInt:             return inline_unsafe_load_store(T_INT,    LS_xchg);
 872   case vmIntrinsics::_getAndSetLong:            return inline_unsafe_load_store(T_LONG,   LS_xchg);
 873   case vmIntrinsics::_getAndSetObject:          return inline_unsafe_load_store(T_OBJECT, LS_xchg);
 874 


2537     }
2538   }
2539 
2540   // The sharpened class might be unloaded if there is no class loader
2541   // contraint in place.
2542   if (sharpened_klass != NULL && sharpened_klass->is_loaded()) {
2543     const TypeOopPtr* tjp = TypeOopPtr::make_from_klass(sharpened_klass);
2544 
2545 #ifndef PRODUCT
2546     if (C->print_intrinsics() || C->print_inlining()) {
2547       tty->print("  from base type: ");  adr_type->dump();
2548       tty->print("  sharpened value: ");  tjp->dump();
2549     }
2550 #endif
2551     // Sharpen the value type.
2552     return tjp;
2553   }
2554   return NULL;
2555 }
2556 
2557 bool LibraryCallKit::inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile) {
2558   if (callee()->is_static())  return false;  // caller must have the capability!
2559 
2560 #ifndef PRODUCT
2561   {
2562     ResourceMark rm;
2563     // Check the signatures.
2564     ciSignature* sig = callee()->signature();
2565 #ifdef ASSERT
2566     if (!is_store) {
2567       // Object getObject(Object base, int/long offset), etc.
2568       BasicType rtype = sig->return_type()->basic_type();
2569       if (rtype == T_ADDRESS_HOLDER && callee()->name() == ciSymbol::getAddress_name())
2570           rtype = T_ADDRESS;  // it is really a C void*
2571       assert(rtype == type, "getter must return the expected value");
2572       if (!is_native_ptr) {
2573         assert(sig->count() == 2, "oop getter has 2 arguments");
2574         assert(sig->type_at(0)->basic_type() == T_OBJECT, "getter base is object");
2575         assert(sig->type_at(1)->basic_type() == T_LONG, "getter offset is correct");
2576       } else {
2577         assert(sig->count() == 1, "native getter has 1 argument");


2680     // rough approximation of type.
2681     need_mem_bar = true;
2682     // For Stores, place a memory ordering barrier now.
2683     if (is_store) {
2684       insert_mem_bar(Op_MemBarRelease);
2685     } else {
2686       if (support_IRIW_for_not_multiple_copy_atomic_cpu) {
2687         insert_mem_bar(Op_MemBarVolatile);
2688       }
2689     }
2690   }
2691 
2692   // Memory barrier to prevent normal and 'unsafe' accesses from
2693   // bypassing each other.  Happens after null checks, so the
2694   // exception paths do not take memory state from the memory barrier,
2695   // so there's no problems making a strong assert about mixing users
2696   // of safe & unsafe memory.  Otherwise fails in a CTW of rt.jar
2697   // around 5701, class sun/reflect/UnsafeBooleanFieldAccessorImpl.
2698   if (need_mem_bar) insert_mem_bar(Op_MemBarCPUOrder);
2699 

















2700   if (!is_store) {
2701     MemNode::MemOrd mo = is_volatile ? MemNode::acquire : MemNode::unordered;
2702     // To be valid, unsafe loads may depend on other conditions than
2703     // the one that guards them: pin the Load node
2704     Node* p = make_load(control(), adr, value_type, type, adr_type, mo, LoadNode::Pinned, is_volatile);
2705     // load value
2706     switch (type) {
2707     case T_BOOLEAN:
2708     case T_CHAR:
2709     case T_BYTE:
2710     case T_SHORT:
2711     case T_INT:
2712     case T_LONG:
2713     case T_FLOAT:
2714     case T_DOUBLE:
2715       break;
2716     case T_OBJECT:
2717       if (need_read_barrier) {
2718         insert_pre_barrier(heap_base_oop, offset, p, !(is_volatile || need_mem_bar));
2719       }
2720       break;
2721     case T_ADDRESS:
2722       // Cast to an int type.
2723       p = _gvn.transform(new (C) CastP2XNode(NULL, p));
2724       p = ConvX2UL(p);


2730     // The load node has the control of the preceding MemBarCPUOrder.  All
2731     // following nodes will have the control of the MemBarCPUOrder inserted at
2732     // the end of this method.  So, pushing the load onto the stack at a later
2733     // point is fine.
2734     set_result(p);
2735   } else {
2736     // place effect of store into memory
2737     switch (type) {
2738     case T_DOUBLE:
2739       val = dstore_rounding(val);
2740       break;
2741     case T_ADDRESS:
2742       // Repackage the long as a pointer.
2743       val = ConvL2X(val);
2744       val = _gvn.transform(new (C) CastX2PNode(val));
2745       break;
2746     }
2747 
2748     MemNode::MemOrd mo = is_volatile ? MemNode::release : MemNode::unordered;
2749     if (type != T_OBJECT ) {
2750       (void) store_to_memory(control(), adr, val, type, adr_type, mo, is_volatile);
2751     } else {
2752       // Possibly an oop being stored to Java heap or native memory
2753       if (!TypePtr::NULL_PTR->higher_equal(_gvn.type(heap_base_oop))) {
2754         // oop to Java heap.
2755         (void) store_oop_to_unknown(control(), heap_base_oop, adr, adr_type, val, type, mo);
2756       } else {
2757         // We can't tell at compile time if we are storing in the Java heap or outside
2758         // of it. So we need to emit code to conditionally do the proper type of
2759         // store.
2760 
2761         IdealKit ideal(this);
2762 #define __ ideal.
2763         // QQQ who knows what probability is here??
2764         __ if_then(heap_base_oop, BoolTest::ne, null(), PROB_UNLIKELY(0.999)); {
2765           // Sync IdealKit and graphKit.
2766           sync_kit(ideal);
2767           Node* st = store_oop_to_unknown(control(), heap_base_oop, adr, adr_type, val, type, mo);
2768           // Update IdealKit memory.
2769           __ sync_kit(this);
2770         } __ else_(); {
2771           __ store(__ ctrl(), adr, val, type, alias_type->index(), mo, is_volatile);
2772         } __ end_if();
2773         // Final sync IdealKit and GraphKit.
2774         final_sync(ideal);
2775 #undef __
2776       }
2777     }
2778   }
2779 
2780   if (is_volatile) {
2781     if (!is_store) {
2782       insert_mem_bar(Op_MemBarAcquire);
2783     } else {
2784       if (!support_IRIW_for_not_multiple_copy_atomic_cpu) {
2785         insert_mem_bar(Op_MemBarVolatile);
2786       }
2787     }
2788   }
2789 
2790   if (need_mem_bar) insert_mem_bar(Op_MemBarCPUOrder);
2791 




 213   bool inline_math_addExactI(bool is_increment);
 214   bool inline_math_addExactL(bool is_increment);
 215   bool inline_math_multiplyExactI();
 216   bool inline_math_multiplyExactL();
 217   bool inline_math_negateExactI();
 218   bool inline_math_negateExactL();
 219   bool inline_math_subtractExactI(bool is_decrement);
 220   bool inline_math_subtractExactL(bool is_decrement);
 221   bool inline_exp();
 222   bool inline_pow();
 223   Node* finish_pow_exp(Node* result, Node* x, Node* y, const TypeFunc* call_type, address funcAddr, const char* funcName);
 224   bool inline_min_max(vmIntrinsics::ID id);
 225   Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y);
 226   // This returns Type::AnyPtr, RawPtr, or OopPtr.
 227   int classify_unsafe_addr(Node* &base, Node* &offset);
 228   Node* make_unsafe_address(Node* base, Node* offset);
 229   // Helper for inline_unsafe_access.
 230   // Generates the guards that check whether the result of
 231   // Unsafe.getObject should be recorded in an SATB log buffer.
 232   void insert_pre_barrier(Node* base_oop, Node* offset, Node* pre_val, bool need_mem_bar);
 233   bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile, bool is_unaligned);
 234   bool inline_unsafe_prefetch(bool is_native_ptr, bool is_store, bool is_static);
 235   static bool klass_needs_init_guard(Node* kls);
 236   bool inline_unsafe_allocate();
 237   bool inline_unsafe_copyMemory();
 238   bool inline_native_currentThread();
 239 #ifdef TRACE_HAVE_INTRINSICS
 240   bool inline_native_classID();
 241   bool inline_native_threadID();
 242 #endif
 243   bool inline_native_time_funcs(address method, const char* funcName);
 244   bool inline_native_isInterrupted();
 245   bool inline_native_Class_query(vmIntrinsics::ID id);
 246   bool inline_native_subtype_check();
 247 
 248   bool inline_native_newArray();
 249   bool inline_native_getLength();
 250   bool inline_array_copyOf(bool is_copyOfRange);
 251   bool inline_array_equals();
 252   void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array, bool card_mark);
 253   bool inline_native_clone(bool is_virtual);


 778 
 779   case vmIntrinsics::_addExactI:                return inline_math_addExactI(false /* add */);
 780   case vmIntrinsics::_addExactL:                return inline_math_addExactL(false /* add */);
 781   case vmIntrinsics::_decrementExactI:          return inline_math_subtractExactI(true /* decrement */);
 782   case vmIntrinsics::_decrementExactL:          return inline_math_subtractExactL(true /* decrement */);
 783   case vmIntrinsics::_incrementExactI:          return inline_math_addExactI(true /* increment */);
 784   case vmIntrinsics::_incrementExactL:          return inline_math_addExactL(true /* increment */);
 785   case vmIntrinsics::_multiplyExactI:           return inline_math_multiplyExactI();
 786   case vmIntrinsics::_multiplyExactL:           return inline_math_multiplyExactL();
 787   case vmIntrinsics::_negateExactI:             return inline_math_negateExactI();
 788   case vmIntrinsics::_negateExactL:             return inline_math_negateExactL();
 789   case vmIntrinsics::_subtractExactI:           return inline_math_subtractExactI(false /* subtract */);
 790   case vmIntrinsics::_subtractExactL:           return inline_math_subtractExactL(false /* subtract */);
 791 
 792   case vmIntrinsics::_arraycopy:                return inline_arraycopy();
 793 
 794   case vmIntrinsics::_compareTo:                return inline_string_compareTo();
 795   case vmIntrinsics::_indexOf:                  return inline_string_indexOf();
 796   case vmIntrinsics::_equals:                   return inline_string_equals();
 797 
 798   case vmIntrinsics::_getObject:                return inline_unsafe_access(!is_native_ptr, !is_store, T_OBJECT,  !is_volatile, false);
 799   case vmIntrinsics::_getBoolean:               return inline_unsafe_access(!is_native_ptr, !is_store, T_BOOLEAN, !is_volatile, false);
 800   case vmIntrinsics::_getByte:                  return inline_unsafe_access(!is_native_ptr, !is_store, T_BYTE,    !is_volatile, false);
 801   case vmIntrinsics::_getShort:                 return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT,   !is_volatile, false);
 802   case vmIntrinsics::_getChar:                  return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR,    !is_volatile, false);
 803   case vmIntrinsics::_getInt:                   return inline_unsafe_access(!is_native_ptr, !is_store, T_INT,     !is_volatile, false);
 804   case vmIntrinsics::_getLong:                  return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG,    !is_volatile, false);
 805   case vmIntrinsics::_getFloat:                 return inline_unsafe_access(!is_native_ptr, !is_store, T_FLOAT,   !is_volatile, false);
 806   case vmIntrinsics::_getDouble:                return inline_unsafe_access(!is_native_ptr, !is_store, T_DOUBLE,  !is_volatile, false);
 807 
 808   case vmIntrinsics::_putObject:                return inline_unsafe_access(!is_native_ptr,  is_store, T_OBJECT,  !is_volatile, false);
 809   case vmIntrinsics::_putBoolean:               return inline_unsafe_access(!is_native_ptr,  is_store, T_BOOLEAN, !is_volatile, false);
 810   case vmIntrinsics::_putByte:                  return inline_unsafe_access(!is_native_ptr,  is_store, T_BYTE,    !is_volatile, false);
 811   case vmIntrinsics::_putShort:                 return inline_unsafe_access(!is_native_ptr,  is_store, T_SHORT,   !is_volatile, false);
 812   case vmIntrinsics::_putChar:                  return inline_unsafe_access(!is_native_ptr,  is_store, T_CHAR,    !is_volatile, false);
 813   case vmIntrinsics::_putInt:                   return inline_unsafe_access(!is_native_ptr,  is_store, T_INT,     !is_volatile, false);
 814   case vmIntrinsics::_putLong:                  return inline_unsafe_access(!is_native_ptr,  is_store, T_LONG,    !is_volatile, false);
 815   case vmIntrinsics::_putFloat:                 return inline_unsafe_access(!is_native_ptr,  is_store, T_FLOAT,   !is_volatile, false);
 816   case vmIntrinsics::_putDouble:                return inline_unsafe_access(!is_native_ptr,  is_store, T_DOUBLE,  !is_volatile, false);
 817 
 818   case vmIntrinsics::_getByte_raw:              return inline_unsafe_access( is_native_ptr, !is_store, T_BYTE,    !is_volatile, false);
 819   case vmIntrinsics::_getShort_raw:             return inline_unsafe_access( is_native_ptr, !is_store, T_SHORT,   !is_volatile, false);
 820   case vmIntrinsics::_getChar_raw:              return inline_unsafe_access( is_native_ptr, !is_store, T_CHAR,    !is_volatile, false);
 821   case vmIntrinsics::_getInt_raw:               return inline_unsafe_access( is_native_ptr, !is_store, T_INT,     !is_volatile, false);
 822   case vmIntrinsics::_getLong_raw:              return inline_unsafe_access( is_native_ptr, !is_store, T_LONG,    !is_volatile, false);
 823   case vmIntrinsics::_getFloat_raw:             return inline_unsafe_access( is_native_ptr, !is_store, T_FLOAT,   !is_volatile, false);
 824   case vmIntrinsics::_getDouble_raw:            return inline_unsafe_access( is_native_ptr, !is_store, T_DOUBLE,  !is_volatile, false);
 825   case vmIntrinsics::_getAddress_raw:           return inline_unsafe_access( is_native_ptr, !is_store, T_ADDRESS, !is_volatile, false);
 826 
 827   case vmIntrinsics::_putByte_raw:              return inline_unsafe_access( is_native_ptr,  is_store, T_BYTE,    !is_volatile, false);
 828   case vmIntrinsics::_putShort_raw:             return inline_unsafe_access( is_native_ptr,  is_store, T_SHORT,   !is_volatile, false);
 829   case vmIntrinsics::_putChar_raw:              return inline_unsafe_access( is_native_ptr,  is_store, T_CHAR,    !is_volatile, false);
 830   case vmIntrinsics::_putInt_raw:               return inline_unsafe_access( is_native_ptr,  is_store, T_INT,     !is_volatile, false);
 831   case vmIntrinsics::_putLong_raw:              return inline_unsafe_access( is_native_ptr,  is_store, T_LONG,    !is_volatile, false);
 832   case vmIntrinsics::_putFloat_raw:             return inline_unsafe_access( is_native_ptr,  is_store, T_FLOAT,   !is_volatile, false);
 833   case vmIntrinsics::_putDouble_raw:            return inline_unsafe_access( is_native_ptr,  is_store, T_DOUBLE,  !is_volatile, false);
 834   case vmIntrinsics::_putAddress_raw:           return inline_unsafe_access( is_native_ptr,  is_store, T_ADDRESS, !is_volatile, false);
 835 
 836   case vmIntrinsics::_getObjectVolatile:        return inline_unsafe_access(!is_native_ptr, !is_store, T_OBJECT,   is_volatile, false);
 837   case vmIntrinsics::_getBooleanVolatile:       return inline_unsafe_access(!is_native_ptr, !is_store, T_BOOLEAN,  is_volatile, false);
 838   case vmIntrinsics::_getByteVolatile:          return inline_unsafe_access(!is_native_ptr, !is_store, T_BYTE,     is_volatile, false);
 839   case vmIntrinsics::_getShortVolatile:         return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT,    is_volatile, false);
 840   case vmIntrinsics::_getCharVolatile:          return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR,     is_volatile, false);
 841   case vmIntrinsics::_getIntVolatile:           return inline_unsafe_access(!is_native_ptr, !is_store, T_INT,      is_volatile, false);
 842   case vmIntrinsics::_getLongVolatile:          return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG,     is_volatile, false);
 843   case vmIntrinsics::_getFloatVolatile:         return inline_unsafe_access(!is_native_ptr, !is_store, T_FLOAT,    is_volatile, false);
 844   case vmIntrinsics::_getDoubleVolatile:        return inline_unsafe_access(!is_native_ptr, !is_store, T_DOUBLE,   is_volatile, false);
 845 
 846   case vmIntrinsics::_putObjectVolatile:        return inline_unsafe_access(!is_native_ptr,  is_store, T_OBJECT,   is_volatile, false);
 847   case vmIntrinsics::_putBooleanVolatile:       return inline_unsafe_access(!is_native_ptr,  is_store, T_BOOLEAN,  is_volatile, false);
 848   case vmIntrinsics::_putByteVolatile:          return inline_unsafe_access(!is_native_ptr,  is_store, T_BYTE,     is_volatile, false);
 849   case vmIntrinsics::_putShortVolatile:         return inline_unsafe_access(!is_native_ptr,  is_store, T_SHORT,    is_volatile, false);
 850   case vmIntrinsics::_putCharVolatile:          return inline_unsafe_access(!is_native_ptr,  is_store, T_CHAR,     is_volatile, false);
 851   case vmIntrinsics::_putIntVolatile:           return inline_unsafe_access(!is_native_ptr,  is_store, T_INT,      is_volatile, false);
 852   case vmIntrinsics::_putLongVolatile:          return inline_unsafe_access(!is_native_ptr,  is_store, T_LONG,     is_volatile, false);
 853   case vmIntrinsics::_putFloatVolatile:         return inline_unsafe_access(!is_native_ptr,  is_store, T_FLOAT,    is_volatile, false);
 854   case vmIntrinsics::_putDoubleVolatile:        return inline_unsafe_access(!is_native_ptr,  is_store, T_DOUBLE,   is_volatile, false);
 855 
 856   case vmIntrinsics::_prefetchRead:             return inline_unsafe_prefetch(!is_native_ptr, !is_store, !is_static);
 857   case vmIntrinsics::_prefetchWrite:            return inline_unsafe_prefetch(!is_native_ptr,  is_store, !is_static);
 858   case vmIntrinsics::_prefetchReadStatic:       return inline_unsafe_prefetch(!is_native_ptr, !is_store,  is_static);
 859   case vmIntrinsics::_prefetchWriteStatic:      return inline_unsafe_prefetch(!is_native_ptr,  is_store,  is_static);
 860 
 861   case vmIntrinsics::_compareAndSwapObject:     return inline_unsafe_load_store(T_OBJECT, LS_cmpxchg);
 862   case vmIntrinsics::_compareAndSwapInt:        return inline_unsafe_load_store(T_INT,    LS_cmpxchg);
 863   case vmIntrinsics::_compareAndSwapLong:       return inline_unsafe_load_store(T_LONG,   LS_cmpxchg);
 864 
 865   case vmIntrinsics::_putOrderedObject:         return inline_unsafe_ordered_store(T_OBJECT);
 866   case vmIntrinsics::_putOrderedInt:            return inline_unsafe_ordered_store(T_INT);
 867   case vmIntrinsics::_putOrderedLong:           return inline_unsafe_ordered_store(T_LONG);
 868 
 869   case vmIntrinsics::_getAndAddInt:             return inline_unsafe_load_store(T_INT,    LS_xadd);
 870   case vmIntrinsics::_getAndAddLong:            return inline_unsafe_load_store(T_LONG,   LS_xadd);
 871   case vmIntrinsics::_getAndSetInt:             return inline_unsafe_load_store(T_INT,    LS_xchg);
 872   case vmIntrinsics::_getAndSetLong:            return inline_unsafe_load_store(T_LONG,   LS_xchg);
 873   case vmIntrinsics::_getAndSetObject:          return inline_unsafe_load_store(T_OBJECT, LS_xchg);
 874 


2537     }
2538   }
2539 
2540   // The sharpened class might be unloaded if there is no class loader
2541   // contraint in place.
2542   if (sharpened_klass != NULL && sharpened_klass->is_loaded()) {
2543     const TypeOopPtr* tjp = TypeOopPtr::make_from_klass(sharpened_klass);
2544 
2545 #ifndef PRODUCT
2546     if (C->print_intrinsics() || C->print_inlining()) {
2547       tty->print("  from base type: ");  adr_type->dump();
2548       tty->print("  sharpened value: ");  tjp->dump();
2549     }
2550 #endif
2551     // Sharpen the value type.
2552     return tjp;
2553   }
2554   return NULL;
2555 }
2556 
2557 bool LibraryCallKit::inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile, bool unaligned) {
2558   if (callee()->is_static())  return false;  // caller must have the capability!
2559 
2560 #ifndef PRODUCT
2561   {
2562     ResourceMark rm;
2563     // Check the signatures.
2564     ciSignature* sig = callee()->signature();
2565 #ifdef ASSERT
2566     if (!is_store) {
2567       // Object getObject(Object base, int/long offset), etc.
2568       BasicType rtype = sig->return_type()->basic_type();
2569       if (rtype == T_ADDRESS_HOLDER && callee()->name() == ciSymbol::getAddress_name())
2570           rtype = T_ADDRESS;  // it is really a C void*
2571       assert(rtype == type, "getter must return the expected value");
2572       if (!is_native_ptr) {
2573         assert(sig->count() == 2, "oop getter has 2 arguments");
2574         assert(sig->type_at(0)->basic_type() == T_OBJECT, "getter base is object");
2575         assert(sig->type_at(1)->basic_type() == T_LONG, "getter offset is correct");
2576       } else {
2577         assert(sig->count() == 1, "native getter has 1 argument");


2680     // rough approximation of type.
2681     need_mem_bar = true;
2682     // For Stores, place a memory ordering barrier now.
2683     if (is_store) {
2684       insert_mem_bar(Op_MemBarRelease);
2685     } else {
2686       if (support_IRIW_for_not_multiple_copy_atomic_cpu) {
2687         insert_mem_bar(Op_MemBarVolatile);
2688       }
2689     }
2690   }
2691 
2692   // Memory barrier to prevent normal and 'unsafe' accesses from
2693   // bypassing each other.  Happens after null checks, so the
2694   // exception paths do not take memory state from the memory barrier,
2695   // so there's no problems making a strong assert about mixing users
2696   // of safe & unsafe memory.  Otherwise fails in a CTW of rt.jar
2697   // around 5701, class sun/reflect/UnsafeBooleanFieldAccessorImpl.
2698   if (need_mem_bar) insert_mem_bar(Op_MemBarCPUOrder);
2699 
2700   assert(is_native_ptr || alias_type->adr_type() == TypeOopPtr::BOTTOM ||
2701          alias_type->field() != NULL || alias_type->element() != NULL, "field, array element or unknown");
2702   bool mismatched = false;
2703   if (alias_type->element() != NULL || alias_type->field() != NULL) {
2704     BasicType bt;
2705     if (alias_type->element() != NULL) {
2706       const Type* element = alias_type->element();
2707       bt = element->isa_narrowoop() ? T_OBJECT : element->array_element_basic_type();
2708     } else {
2709       bt = alias_type->field()->type()->basic_type();
2710     }
2711     if (bt != type) {
2712       mismatched = true;
2713     }
2714   }
2715   assert(type != T_OBJECT || !unaligned, "unaligned access not supported with object type");
2716 
2717   if (!is_store) {
2718     MemNode::MemOrd mo = is_volatile ? MemNode::acquire : MemNode::unordered;
2719     // To be valid, unsafe loads may depend on other conditions than
2720     // the one that guards them: pin the Load node
2721     Node* p = make_load(control(), adr, value_type, type, adr_type, mo, LoadNode::Pinned, is_volatile, unaligned, mismatched);
2722     // load value
2723     switch (type) {
2724     case T_BOOLEAN:
2725     case T_CHAR:
2726     case T_BYTE:
2727     case T_SHORT:
2728     case T_INT:
2729     case T_LONG:
2730     case T_FLOAT:
2731     case T_DOUBLE:
2732       break;
2733     case T_OBJECT:
2734       if (need_read_barrier) {
2735         insert_pre_barrier(heap_base_oop, offset, p, !(is_volatile || need_mem_bar));
2736       }
2737       break;
2738     case T_ADDRESS:
2739       // Cast to an int type.
2740       p = _gvn.transform(new (C) CastP2XNode(NULL, p));
2741       p = ConvX2UL(p);


2747     // The load node has the control of the preceding MemBarCPUOrder.  All
2748     // following nodes will have the control of the MemBarCPUOrder inserted at
2749     // the end of this method.  So, pushing the load onto the stack at a later
2750     // point is fine.
2751     set_result(p);
2752   } else {
2753     // place effect of store into memory
2754     switch (type) {
2755     case T_DOUBLE:
2756       val = dstore_rounding(val);
2757       break;
2758     case T_ADDRESS:
2759       // Repackage the long as a pointer.
2760       val = ConvL2X(val);
2761       val = _gvn.transform(new (C) CastX2PNode(val));
2762       break;
2763     }
2764 
2765     MemNode::MemOrd mo = is_volatile ? MemNode::release : MemNode::unordered;
2766     if (type != T_OBJECT ) {
2767       (void) store_to_memory(control(), adr, val, type, adr_type, mo, is_volatile, unaligned, mismatched);
2768     } else {
2769       // Possibly an oop being stored to Java heap or native memory
2770       if (!TypePtr::NULL_PTR->higher_equal(_gvn.type(heap_base_oop))) {
2771         // oop to Java heap.
2772         (void) store_oop_to_unknown(control(), heap_base_oop, adr, adr_type, val, type, mo, mismatched);
2773       } else {
2774         // We can't tell at compile time if we are storing in the Java heap or outside
2775         // of it. So we need to emit code to conditionally do the proper type of
2776         // store.
2777 
2778         IdealKit ideal(this);
2779 #define __ ideal.
2780         // QQQ who knows what probability is here??
2781         __ if_then(heap_base_oop, BoolTest::ne, null(), PROB_UNLIKELY(0.999)); {
2782           // Sync IdealKit and graphKit.
2783           sync_kit(ideal);
2784           Node* st = store_oop_to_unknown(control(), heap_base_oop, adr, adr_type, val, type, mo, mismatched);
2785           // Update IdealKit memory.
2786           __ sync_kit(this);
2787         } __ else_(); {
2788           __ store(__ ctrl(), adr, val, type, alias_type->index(), mo, is_volatile, mismatched);
2789         } __ end_if();
2790         // Final sync IdealKit and GraphKit.
2791         final_sync(ideal);
2792 #undef __
2793       }
2794     }
2795   }
2796 
2797   if (is_volatile) {
2798     if (!is_store) {
2799       insert_mem_bar(Op_MemBarAcquire);
2800     } else {
2801       if (!support_IRIW_for_not_multiple_copy_atomic_cpu) {
2802         insert_mem_bar(Op_MemBarVolatile);
2803       }
2804     }
2805   }
2806 
2807   if (need_mem_bar) insert_mem_bar(Op_MemBarCPUOrder);
2808 


< prev index next >