src/share/vm/opto/library_call.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hotspot Sdiff src/share/vm/opto

src/share/vm/opto/library_call.cpp

Print this page
rev 9085 : review


 218   bool inline_math_addExactL(bool is_increment);
 219   bool inline_math_multiplyExactI();
 220   bool inline_math_multiplyExactL();
 221   bool inline_math_negateExactI();
 222   bool inline_math_negateExactL();
 223   bool inline_math_subtractExactI(bool is_decrement);
 224   bool inline_math_subtractExactL(bool is_decrement);
 225   bool inline_exp();
 226   bool inline_pow();
 227   Node* finish_pow_exp(Node* result, Node* x, Node* y, const TypeFunc* call_type, address funcAddr, const char* funcName);
 228   bool inline_min_max(vmIntrinsics::ID id);
 229   bool inline_notify(vmIntrinsics::ID id);
 230   Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y);
 231   // This returns Type::AnyPtr, RawPtr, or OopPtr.
 232   int classify_unsafe_addr(Node* &base, Node* &offset);
 233   Node* make_unsafe_address(Node* base, Node* offset);
 234   // Helper for inline_unsafe_access.
 235   // Generates the guards that check whether the result of
 236   // Unsafe.getObject should be recorded in an SATB log buffer.
 237   void insert_pre_barrier(Node* base_oop, Node* offset, Node* pre_val, bool need_mem_bar);
 238   bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile);
 239   static bool klass_needs_init_guard(Node* kls);
 240   bool inline_unsafe_allocate();
 241   bool inline_unsafe_copyMemory();
 242   bool inline_native_currentThread();
 243 #ifdef TRACE_HAVE_INTRINSICS
 244   bool inline_native_classID();
 245   bool inline_native_threadID();
 246 #endif
 247   bool inline_native_time_funcs(address method, const char* funcName);
 248   bool inline_native_isInterrupted();
 249   bool inline_native_Class_query(vmIntrinsics::ID id);
 250   bool inline_native_subtype_check();
 251 
 252   bool inline_native_newArray();
 253   bool inline_native_getLength();
 254   bool inline_array_copyOf(bool is_copyOfRange);
 255   bool inline_array_equals();
 256   void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array, bool card_mark);
 257   bool inline_native_clone(bool is_virtual);
 258   bool inline_native_Reflection_getCallerClass();


 499 
 500   case vmIntrinsics::_addExactI:                return inline_math_addExactI(false /* add */);
 501   case vmIntrinsics::_addExactL:                return inline_math_addExactL(false /* add */);
 502   case vmIntrinsics::_decrementExactI:          return inline_math_subtractExactI(true /* decrement */);
 503   case vmIntrinsics::_decrementExactL:          return inline_math_subtractExactL(true /* decrement */);
 504   case vmIntrinsics::_incrementExactI:          return inline_math_addExactI(true /* increment */);
 505   case vmIntrinsics::_incrementExactL:          return inline_math_addExactL(true /* increment */);
 506   case vmIntrinsics::_multiplyExactI:           return inline_math_multiplyExactI();
 507   case vmIntrinsics::_multiplyExactL:           return inline_math_multiplyExactL();
 508   case vmIntrinsics::_negateExactI:             return inline_math_negateExactI();
 509   case vmIntrinsics::_negateExactL:             return inline_math_negateExactL();
 510   case vmIntrinsics::_subtractExactI:           return inline_math_subtractExactI(false /* subtract */);
 511   case vmIntrinsics::_subtractExactL:           return inline_math_subtractExactL(false /* subtract */);
 512 
 513   case vmIntrinsics::_arraycopy:                return inline_arraycopy();
 514 
 515   case vmIntrinsics::_compareTo:                return inline_string_compareTo();
 516   case vmIntrinsics::_indexOf:                  return inline_string_indexOf();
 517   case vmIntrinsics::_equals:                   return inline_string_equals();
 518 
 519   case vmIntrinsics::_getObject:                return inline_unsafe_access(!is_native_ptr, !is_store, T_OBJECT,  !is_volatile);
 520   case vmIntrinsics::_getBoolean:               return inline_unsafe_access(!is_native_ptr, !is_store, T_BOOLEAN, !is_volatile);
 521   case vmIntrinsics::_getByte:                  return inline_unsafe_access(!is_native_ptr, !is_store, T_BYTE,    !is_volatile);
 522   case vmIntrinsics::_getShort:                 return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT,   !is_volatile);
 523   case vmIntrinsics::_getChar:                  return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR,    !is_volatile);
 524   case vmIntrinsics::_getInt:                   return inline_unsafe_access(!is_native_ptr, !is_store, T_INT,     !is_volatile);
 525   case vmIntrinsics::_getLong:                  return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG,    !is_volatile);
 526   case vmIntrinsics::_getFloat:                 return inline_unsafe_access(!is_native_ptr, !is_store, T_FLOAT,   !is_volatile);
 527   case vmIntrinsics::_getDouble:                return inline_unsafe_access(!is_native_ptr, !is_store, T_DOUBLE,  !is_volatile);
 528   case vmIntrinsics::_putObject:                return inline_unsafe_access(!is_native_ptr,  is_store, T_OBJECT,  !is_volatile);
 529   case vmIntrinsics::_putBoolean:               return inline_unsafe_access(!is_native_ptr,  is_store, T_BOOLEAN, !is_volatile);
 530   case vmIntrinsics::_putByte:                  return inline_unsafe_access(!is_native_ptr,  is_store, T_BYTE,    !is_volatile);
 531   case vmIntrinsics::_putShort:                 return inline_unsafe_access(!is_native_ptr,  is_store, T_SHORT,   !is_volatile);
 532   case vmIntrinsics::_putChar:                  return inline_unsafe_access(!is_native_ptr,  is_store, T_CHAR,    !is_volatile);
 533   case vmIntrinsics::_putInt:                   return inline_unsafe_access(!is_native_ptr,  is_store, T_INT,     !is_volatile);
 534   case vmIntrinsics::_putLong:                  return inline_unsafe_access(!is_native_ptr,  is_store, T_LONG,    !is_volatile);
 535   case vmIntrinsics::_putFloat:                 return inline_unsafe_access(!is_native_ptr,  is_store, T_FLOAT,   !is_volatile);
 536   case vmIntrinsics::_putDouble:                return inline_unsafe_access(!is_native_ptr,  is_store, T_DOUBLE,  !is_volatile);
 537 
 538   case vmIntrinsics::_getByte_raw:              return inline_unsafe_access( is_native_ptr, !is_store, T_BYTE,    !is_volatile);
 539   case vmIntrinsics::_getShort_raw:             return inline_unsafe_access( is_native_ptr, !is_store, T_SHORT,   !is_volatile);
 540   case vmIntrinsics::_getChar_raw:              return inline_unsafe_access( is_native_ptr, !is_store, T_CHAR,    !is_volatile);
 541   case vmIntrinsics::_getInt_raw:               return inline_unsafe_access( is_native_ptr, !is_store, T_INT,     !is_volatile);
 542   case vmIntrinsics::_getLong_raw:              return inline_unsafe_access( is_native_ptr, !is_store, T_LONG,    !is_volatile);
 543   case vmIntrinsics::_getFloat_raw:             return inline_unsafe_access( is_native_ptr, !is_store, T_FLOAT,   !is_volatile);
 544   case vmIntrinsics::_getDouble_raw:            return inline_unsafe_access( is_native_ptr, !is_store, T_DOUBLE,  !is_volatile);
 545   case vmIntrinsics::_getAddress_raw:           return inline_unsafe_access( is_native_ptr, !is_store, T_ADDRESS, !is_volatile);
 546 
 547   case vmIntrinsics::_putByte_raw:              return inline_unsafe_access( is_native_ptr,  is_store, T_BYTE,    !is_volatile);
 548   case vmIntrinsics::_putShort_raw:             return inline_unsafe_access( is_native_ptr,  is_store, T_SHORT,   !is_volatile);
 549   case vmIntrinsics::_putChar_raw:              return inline_unsafe_access( is_native_ptr,  is_store, T_CHAR,    !is_volatile);
 550   case vmIntrinsics::_putInt_raw:               return inline_unsafe_access( is_native_ptr,  is_store, T_INT,     !is_volatile);
 551   case vmIntrinsics::_putLong_raw:              return inline_unsafe_access( is_native_ptr,  is_store, T_LONG,    !is_volatile);
 552   case vmIntrinsics::_putFloat_raw:             return inline_unsafe_access( is_native_ptr,  is_store, T_FLOAT,   !is_volatile);
 553   case vmIntrinsics::_putDouble_raw:            return inline_unsafe_access( is_native_ptr,  is_store, T_DOUBLE,  !is_volatile);
 554   case vmIntrinsics::_putAddress_raw:           return inline_unsafe_access( is_native_ptr,  is_store, T_ADDRESS, !is_volatile);
 555 
 556   case vmIntrinsics::_getObjectVolatile:        return inline_unsafe_access(!is_native_ptr, !is_store, T_OBJECT,   is_volatile);
 557   case vmIntrinsics::_getBooleanVolatile:       return inline_unsafe_access(!is_native_ptr, !is_store, T_BOOLEAN,  is_volatile);
 558   case vmIntrinsics::_getByteVolatile:          return inline_unsafe_access(!is_native_ptr, !is_store, T_BYTE,     is_volatile);
 559   case vmIntrinsics::_getShortVolatile:         return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT,    is_volatile);
 560   case vmIntrinsics::_getCharVolatile:          return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR,     is_volatile);
 561   case vmIntrinsics::_getIntVolatile:           return inline_unsafe_access(!is_native_ptr, !is_store, T_INT,      is_volatile);
 562   case vmIntrinsics::_getLongVolatile:          return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG,     is_volatile);
 563   case vmIntrinsics::_getFloatVolatile:         return inline_unsafe_access(!is_native_ptr, !is_store, T_FLOAT,    is_volatile);
 564   case vmIntrinsics::_getDoubleVolatile:        return inline_unsafe_access(!is_native_ptr, !is_store, T_DOUBLE,   is_volatile);
 565 
 566   case vmIntrinsics::_putObjectVolatile:        return inline_unsafe_access(!is_native_ptr,  is_store, T_OBJECT,   is_volatile);
 567   case vmIntrinsics::_putBooleanVolatile:       return inline_unsafe_access(!is_native_ptr,  is_store, T_BOOLEAN,  is_volatile);
 568   case vmIntrinsics::_putByteVolatile:          return inline_unsafe_access(!is_native_ptr,  is_store, T_BYTE,     is_volatile);
 569   case vmIntrinsics::_putShortVolatile:         return inline_unsafe_access(!is_native_ptr,  is_store, T_SHORT,    is_volatile);
 570   case vmIntrinsics::_putCharVolatile:          return inline_unsafe_access(!is_native_ptr,  is_store, T_CHAR,     is_volatile);
 571   case vmIntrinsics::_putIntVolatile:           return inline_unsafe_access(!is_native_ptr,  is_store, T_INT,      is_volatile);
 572   case vmIntrinsics::_putLongVolatile:          return inline_unsafe_access(!is_native_ptr,  is_store, T_LONG,     is_volatile);
 573   case vmIntrinsics::_putFloatVolatile:         return inline_unsafe_access(!is_native_ptr,  is_store, T_FLOAT,    is_volatile);
 574   case vmIntrinsics::_putDoubleVolatile:        return inline_unsafe_access(!is_native_ptr,  is_store, T_DOUBLE,   is_volatile);
 575 
 576   case vmIntrinsics::_getShortUnaligned:        return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT,   !is_volatile);
 577   case vmIntrinsics::_getCharUnaligned:         return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR,    !is_volatile);
 578   case vmIntrinsics::_getIntUnaligned:          return inline_unsafe_access(!is_native_ptr, !is_store, T_INT,     !is_volatile);
 579   case vmIntrinsics::_getLongUnaligned:         return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG,    !is_volatile);
 580 
 581   case vmIntrinsics::_putShortUnaligned:        return inline_unsafe_access(!is_native_ptr,  is_store, T_SHORT,   !is_volatile);
 582   case vmIntrinsics::_putCharUnaligned:         return inline_unsafe_access(!is_native_ptr,  is_store, T_CHAR,    !is_volatile);
 583   case vmIntrinsics::_putIntUnaligned:          return inline_unsafe_access(!is_native_ptr,  is_store, T_INT,     !is_volatile);
 584   case vmIntrinsics::_putLongUnaligned:         return inline_unsafe_access(!is_native_ptr,  is_store, T_LONG,    !is_volatile);
 585 
 586   case vmIntrinsics::_compareAndSwapObject:     return inline_unsafe_load_store(T_OBJECT, LS_cmpxchg);
 587   case vmIntrinsics::_compareAndSwapInt:        return inline_unsafe_load_store(T_INT,    LS_cmpxchg);
 588   case vmIntrinsics::_compareAndSwapLong:       return inline_unsafe_load_store(T_LONG,   LS_cmpxchg);
 589 
 590   case vmIntrinsics::_putOrderedObject:         return inline_unsafe_ordered_store(T_OBJECT);
 591   case vmIntrinsics::_putOrderedInt:            return inline_unsafe_ordered_store(T_INT);
 592   case vmIntrinsics::_putOrderedLong:           return inline_unsafe_ordered_store(T_LONG);
 593 
 594   case vmIntrinsics::_getAndAddInt:             return inline_unsafe_load_store(T_INT,    LS_xadd);
 595   case vmIntrinsics::_getAndAddLong:            return inline_unsafe_load_store(T_LONG,   LS_xadd);
 596   case vmIntrinsics::_getAndSetInt:             return inline_unsafe_load_store(T_INT,    LS_xchg);
 597   case vmIntrinsics::_getAndSetLong:            return inline_unsafe_load_store(T_LONG,   LS_xchg);
 598   case vmIntrinsics::_getAndSetObject:          return inline_unsafe_load_store(T_OBJECT, LS_xchg);
 599 
 600   case vmIntrinsics::_loadFence:
 601   case vmIntrinsics::_storeFence:
 602   case vmIntrinsics::_fullFence:                return inline_unsafe_fence(intrinsic_id());
 603 
 604   case vmIntrinsics::_currentThread:            return inline_native_currentThread();


2268     }
2269   }
2270 
2271   // The sharpened class might be unloaded if there is no class loader
2272   // contraint in place.
2273   if (sharpened_klass != NULL && sharpened_klass->is_loaded()) {
2274     const TypeOopPtr* tjp = TypeOopPtr::make_from_klass(sharpened_klass);
2275 
2276 #ifndef PRODUCT
2277     if (C->print_intrinsics() || C->print_inlining()) {
2278       tty->print("  from base type: ");  adr_type->dump();
2279       tty->print("  sharpened value: ");  tjp->dump();
2280     }
2281 #endif
2282     // Sharpen the value type.
2283     return tjp;
2284   }
2285   return NULL;
2286 }
2287 
2288 bool LibraryCallKit::inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile) {
2289   if (callee()->is_static())  return false;  // caller must have the capability!
2290 
2291 #ifndef PRODUCT
2292   {
2293     ResourceMark rm;
2294     // Check the signatures.
2295     ciSignature* sig = callee()->signature();
2296 #ifdef ASSERT
2297     if (!is_store) {
2298       // Object getObject(Object base, int/long offset), etc.
2299       BasicType rtype = sig->return_type()->basic_type();
2300       if (rtype == T_ADDRESS_HOLDER && callee()->name() == ciSymbol::getAddress_name())
2301           rtype = T_ADDRESS;  // it is really a C void*
2302       assert(rtype == type, "getter must return the expected value");
2303       if (!is_native_ptr) {
2304         assert(sig->count() == 2, "oop getter has 2 arguments");
2305         assert(sig->type_at(0)->basic_type() == T_OBJECT, "getter base is object");
2306         assert(sig->type_at(1)->basic_type() == T_LONG, "getter offset is correct");
2307       } else {
2308         assert(sig->count() == 1, "native getter has 1 argument");


2426   // so there's no problems making a strong assert about mixing users
2427   // of safe & unsafe memory.
2428   if (need_mem_bar) insert_mem_bar(Op_MemBarCPUOrder);
2429 
2430    if (!is_store) {
2431     Node* p = NULL;
2432     // Try to constant fold a load from a constant field
2433     ciField* field = alias_type->field();
2434     if (heap_base_oop != top() &&
2435         field != NULL && field->is_constant() && field->layout_type() == type) {
2436       // final or stable field
2437       const Type* con_type = Type::make_constant(alias_type->field(), heap_base_oop);
2438       if (con_type != NULL) {
2439         p = makecon(con_type);
2440       }
2441     }
2442     if (p == NULL) {
2443       MemNode::MemOrd mo = is_volatile ? MemNode::acquire : MemNode::unordered;
2444       // To be valid, unsafe loads may depend on other conditions than
2445       // the one that guards them: pin the Load node
2446       p = make_load(control(), adr, value_type, type, adr_type, mo, LoadNode::Pinned, is_volatile);
2447       // load value
2448       switch (type) {
2449       case T_BOOLEAN:
2450       case T_CHAR:
2451       case T_BYTE:
2452       case T_SHORT:
2453       case T_INT:
2454       case T_LONG:
2455       case T_FLOAT:
2456       case T_DOUBLE:
2457         break;
2458       case T_OBJECT:
2459         if (need_read_barrier) {
2460           insert_pre_barrier(heap_base_oop, offset, p, !(is_volatile || need_mem_bar));
2461         }
2462         break;
2463       case T_ADDRESS:
2464         // Cast to an int type.
2465         p = _gvn.transform(new CastP2XNode(NULL, p));
2466         p = ConvX2UL(p);


2473     // The load node has the control of the preceding MemBarCPUOrder.  All
2474     // following nodes will have the control of the MemBarCPUOrder inserted at
2475     // the end of this method.  So, pushing the load onto the stack at a later
2476     // point is fine.
2477     set_result(p);
2478   } else {
2479     // place effect of store into memory
2480     switch (type) {
2481     case T_DOUBLE:
2482       val = dstore_rounding(val);
2483       break;
2484     case T_ADDRESS:
2485       // Repackage the long as a pointer.
2486       val = ConvL2X(val);
2487       val = _gvn.transform(new CastX2PNode(val));
2488       break;
2489     }
2490 
2491     MemNode::MemOrd mo = is_volatile ? MemNode::release : MemNode::unordered;
2492     if (type != T_OBJECT ) {
2493       (void) store_to_memory(control(), adr, val, type, adr_type, mo, is_volatile);
2494     } else {

2495       // Possibly an oop being stored to Java heap or native memory
2496       if (!TypePtr::NULL_PTR->higher_equal(_gvn.type(heap_base_oop))) {
2497         // oop to Java heap.
2498         (void) store_oop_to_unknown(control(), heap_base_oop, adr, adr_type, val, type, mo);
2499       } else {
2500         // We can't tell at compile time if we are storing in the Java heap or outside
2501         // of it. So we need to emit code to conditionally do the proper type of
2502         // store.
2503 
2504         IdealKit ideal(this);
2505 #define __ ideal.
2506         // QQQ who knows what probability is here??
2507         __ if_then(heap_base_oop, BoolTest::ne, null(), PROB_UNLIKELY(0.999)); {
2508           // Sync IdealKit and graphKit.
2509           sync_kit(ideal);
2510           Node* st = store_oop_to_unknown(control(), heap_base_oop, adr, adr_type, val, type, mo);
2511           // Update IdealKit memory.
2512           __ sync_kit(this);
2513         } __ else_(); {
2514           __ store(__ ctrl(), adr, val, type, alias_type->index(), mo, is_volatile);




 218   bool inline_math_addExactL(bool is_increment);
 219   bool inline_math_multiplyExactI();
 220   bool inline_math_multiplyExactL();
 221   bool inline_math_negateExactI();
 222   bool inline_math_negateExactL();
 223   bool inline_math_subtractExactI(bool is_decrement);
 224   bool inline_math_subtractExactL(bool is_decrement);
 225   bool inline_exp();
 226   bool inline_pow();
 227   Node* finish_pow_exp(Node* result, Node* x, Node* y, const TypeFunc* call_type, address funcAddr, const char* funcName);
 228   bool inline_min_max(vmIntrinsics::ID id);
 229   bool inline_notify(vmIntrinsics::ID id);
 230   Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y);
 231   // This returns Type::AnyPtr, RawPtr, or OopPtr.
 232   int classify_unsafe_addr(Node* &base, Node* &offset);
 233   Node* make_unsafe_address(Node* base, Node* offset);
 234   // Helper for inline_unsafe_access.
 235   // Generates the guards that check whether the result of
 236   // Unsafe.getObject should be recorded in an SATB log buffer.
 237   void insert_pre_barrier(Node* base_oop, Node* offset, Node* pre_val, bool need_mem_bar);
 238   bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile, bool is_unaligned);
 239   static bool klass_needs_init_guard(Node* kls);
 240   bool inline_unsafe_allocate();
 241   bool inline_unsafe_copyMemory();
 242   bool inline_native_currentThread();
 243 #ifdef TRACE_HAVE_INTRINSICS
 244   bool inline_native_classID();
 245   bool inline_native_threadID();
 246 #endif
 247   bool inline_native_time_funcs(address method, const char* funcName);
 248   bool inline_native_isInterrupted();
 249   bool inline_native_Class_query(vmIntrinsics::ID id);
 250   bool inline_native_subtype_check();
 251 
 252   bool inline_native_newArray();
 253   bool inline_native_getLength();
 254   bool inline_array_copyOf(bool is_copyOfRange);
 255   bool inline_array_equals();
 256   void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array, bool card_mark);
 257   bool inline_native_clone(bool is_virtual);
 258   bool inline_native_Reflection_getCallerClass();


 499 
 500   case vmIntrinsics::_addExactI:                return inline_math_addExactI(false /* add */);
 501   case vmIntrinsics::_addExactL:                return inline_math_addExactL(false /* add */);
 502   case vmIntrinsics::_decrementExactI:          return inline_math_subtractExactI(true /* decrement */);
 503   case vmIntrinsics::_decrementExactL:          return inline_math_subtractExactL(true /* decrement */);
 504   case vmIntrinsics::_incrementExactI:          return inline_math_addExactI(true /* increment */);
 505   case vmIntrinsics::_incrementExactL:          return inline_math_addExactL(true /* increment */);
 506   case vmIntrinsics::_multiplyExactI:           return inline_math_multiplyExactI();
 507   case vmIntrinsics::_multiplyExactL:           return inline_math_multiplyExactL();
 508   case vmIntrinsics::_negateExactI:             return inline_math_negateExactI();
 509   case vmIntrinsics::_negateExactL:             return inline_math_negateExactL();
 510   case vmIntrinsics::_subtractExactI:           return inline_math_subtractExactI(false /* subtract */);
 511   case vmIntrinsics::_subtractExactL:           return inline_math_subtractExactL(false /* subtract */);
 512 
 513   case vmIntrinsics::_arraycopy:                return inline_arraycopy();
 514 
 515   case vmIntrinsics::_compareTo:                return inline_string_compareTo();
 516   case vmIntrinsics::_indexOf:                  return inline_string_indexOf();
 517   case vmIntrinsics::_equals:                   return inline_string_equals();
 518 
 519   case vmIntrinsics::_getObject:                return inline_unsafe_access(!is_native_ptr, !is_store, T_OBJECT,  !is_volatile, false);
 520   case vmIntrinsics::_getBoolean:               return inline_unsafe_access(!is_native_ptr, !is_store, T_BOOLEAN, !is_volatile, false);
 521   case vmIntrinsics::_getByte:                  return inline_unsafe_access(!is_native_ptr, !is_store, T_BYTE,    !is_volatile, false);
 522   case vmIntrinsics::_getShort:                 return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT,   !is_volatile, false);
 523   case vmIntrinsics::_getChar:                  return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR,    !is_volatile, false);
 524   case vmIntrinsics::_getInt:                   return inline_unsafe_access(!is_native_ptr, !is_store, T_INT,     !is_volatile, false);
 525   case vmIntrinsics::_getLong:                  return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG,    !is_volatile, false);
 526   case vmIntrinsics::_getFloat:                 return inline_unsafe_access(!is_native_ptr, !is_store, T_FLOAT,   !is_volatile, false);
 527   case vmIntrinsics::_getDouble:                return inline_unsafe_access(!is_native_ptr, !is_store, T_DOUBLE,  !is_volatile, false);
 528   case vmIntrinsics::_putObject:                return inline_unsafe_access(!is_native_ptr,  is_store, T_OBJECT,  !is_volatile, false);
 529   case vmIntrinsics::_putBoolean:               return inline_unsafe_access(!is_native_ptr,  is_store, T_BOOLEAN, !is_volatile, false);
 530   case vmIntrinsics::_putByte:                  return inline_unsafe_access(!is_native_ptr,  is_store, T_BYTE,    !is_volatile, false);
 531   case vmIntrinsics::_putShort:                 return inline_unsafe_access(!is_native_ptr,  is_store, T_SHORT,   !is_volatile, false);
 532   case vmIntrinsics::_putChar:                  return inline_unsafe_access(!is_native_ptr,  is_store, T_CHAR,    !is_volatile, false);
 533   case vmIntrinsics::_putInt:                   return inline_unsafe_access(!is_native_ptr,  is_store, T_INT,     !is_volatile, false);
 534   case vmIntrinsics::_putLong:                  return inline_unsafe_access(!is_native_ptr,  is_store, T_LONG,    !is_volatile, false);
 535   case vmIntrinsics::_putFloat:                 return inline_unsafe_access(!is_native_ptr,  is_store, T_FLOAT,   !is_volatile, false);
 536   case vmIntrinsics::_putDouble:                return inline_unsafe_access(!is_native_ptr,  is_store, T_DOUBLE,  !is_volatile, false);
 537 
 538   case vmIntrinsics::_getByte_raw:              return inline_unsafe_access( is_native_ptr, !is_store, T_BYTE,    !is_volatile, false);
 539   case vmIntrinsics::_getShort_raw:             return inline_unsafe_access( is_native_ptr, !is_store, T_SHORT,   !is_volatile, false);
 540   case vmIntrinsics::_getChar_raw:              return inline_unsafe_access( is_native_ptr, !is_store, T_CHAR,    !is_volatile, false);
 541   case vmIntrinsics::_getInt_raw:               return inline_unsafe_access( is_native_ptr, !is_store, T_INT,     !is_volatile, false);
 542   case vmIntrinsics::_getLong_raw:              return inline_unsafe_access( is_native_ptr, !is_store, T_LONG,    !is_volatile, false);
 543   case vmIntrinsics::_getFloat_raw:             return inline_unsafe_access( is_native_ptr, !is_store, T_FLOAT,   !is_volatile, false);
 544   case vmIntrinsics::_getDouble_raw:            return inline_unsafe_access( is_native_ptr, !is_store, T_DOUBLE,  !is_volatile, false);
 545   case vmIntrinsics::_getAddress_raw:           return inline_unsafe_access( is_native_ptr, !is_store, T_ADDRESS, !is_volatile, false);
 546 
 547   case vmIntrinsics::_putByte_raw:              return inline_unsafe_access( is_native_ptr,  is_store, T_BYTE,    !is_volatile, false);
 548   case vmIntrinsics::_putShort_raw:             return inline_unsafe_access( is_native_ptr,  is_store, T_SHORT,   !is_volatile, false);
 549   case vmIntrinsics::_putChar_raw:              return inline_unsafe_access( is_native_ptr,  is_store, T_CHAR,    !is_volatile, false);
 550   case vmIntrinsics::_putInt_raw:               return inline_unsafe_access( is_native_ptr,  is_store, T_INT,     !is_volatile, false);
 551   case vmIntrinsics::_putLong_raw:              return inline_unsafe_access( is_native_ptr,  is_store, T_LONG,    !is_volatile, false);
 552   case vmIntrinsics::_putFloat_raw:             return inline_unsafe_access( is_native_ptr,  is_store, T_FLOAT,   !is_volatile, false);
 553   case vmIntrinsics::_putDouble_raw:            return inline_unsafe_access( is_native_ptr,  is_store, T_DOUBLE,  !is_volatile, false);
 554   case vmIntrinsics::_putAddress_raw:           return inline_unsafe_access( is_native_ptr,  is_store, T_ADDRESS, !is_volatile, false);
 555 
 556   case vmIntrinsics::_getObjectVolatile:        return inline_unsafe_access(!is_native_ptr, !is_store, T_OBJECT,   is_volatile, false);
 557   case vmIntrinsics::_getBooleanVolatile:       return inline_unsafe_access(!is_native_ptr, !is_store, T_BOOLEAN,  is_volatile, false);
 558   case vmIntrinsics::_getByteVolatile:          return inline_unsafe_access(!is_native_ptr, !is_store, T_BYTE,     is_volatile, false);
 559   case vmIntrinsics::_getShortVolatile:         return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT,    is_volatile, false);
 560   case vmIntrinsics::_getCharVolatile:          return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR,     is_volatile, false);
 561   case vmIntrinsics::_getIntVolatile:           return inline_unsafe_access(!is_native_ptr, !is_store, T_INT,      is_volatile, false);
 562   case vmIntrinsics::_getLongVolatile:          return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG,     is_volatile, false);
 563   case vmIntrinsics::_getFloatVolatile:         return inline_unsafe_access(!is_native_ptr, !is_store, T_FLOAT,    is_volatile, false);
 564   case vmIntrinsics::_getDoubleVolatile:        return inline_unsafe_access(!is_native_ptr, !is_store, T_DOUBLE,   is_volatile, false);
 565 
 566   case vmIntrinsics::_putObjectVolatile:        return inline_unsafe_access(!is_native_ptr,  is_store, T_OBJECT,   is_volatile, false);
 567   case vmIntrinsics::_putBooleanVolatile:       return inline_unsafe_access(!is_native_ptr,  is_store, T_BOOLEAN,  is_volatile, false);
 568   case vmIntrinsics::_putByteVolatile:          return inline_unsafe_access(!is_native_ptr,  is_store, T_BYTE,     is_volatile, false);
 569   case vmIntrinsics::_putShortVolatile:         return inline_unsafe_access(!is_native_ptr,  is_store, T_SHORT,    is_volatile, false);
 570   case vmIntrinsics::_putCharVolatile:          return inline_unsafe_access(!is_native_ptr,  is_store, T_CHAR,     is_volatile, false);
 571   case vmIntrinsics::_putIntVolatile:           return inline_unsafe_access(!is_native_ptr,  is_store, T_INT,      is_volatile, false);
 572   case vmIntrinsics::_putLongVolatile:          return inline_unsafe_access(!is_native_ptr,  is_store, T_LONG,     is_volatile, false);
 573   case vmIntrinsics::_putFloatVolatile:         return inline_unsafe_access(!is_native_ptr,  is_store, T_FLOAT,    is_volatile, false);
 574   case vmIntrinsics::_putDoubleVolatile:        return inline_unsafe_access(!is_native_ptr,  is_store, T_DOUBLE,   is_volatile, false);
 575 
 576   case vmIntrinsics::_getShortUnaligned:        return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT,   !is_volatile, true);
 577   case vmIntrinsics::_getCharUnaligned:         return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR,    !is_volatile, true);
 578   case vmIntrinsics::_getIntUnaligned:          return inline_unsafe_access(!is_native_ptr, !is_store, T_INT,     !is_volatile, true);
 579   case vmIntrinsics::_getLongUnaligned:         return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG,    !is_volatile, true);
 580 
 581   case vmIntrinsics::_putShortUnaligned:        return inline_unsafe_access(!is_native_ptr,  is_store, T_SHORT,   !is_volatile, true);
 582   case vmIntrinsics::_putCharUnaligned:         return inline_unsafe_access(!is_native_ptr,  is_store, T_CHAR,    !is_volatile, true);
 583   case vmIntrinsics::_putIntUnaligned:          return inline_unsafe_access(!is_native_ptr,  is_store, T_INT,     !is_volatile, true);
 584   case vmIntrinsics::_putLongUnaligned:         return inline_unsafe_access(!is_native_ptr,  is_store, T_LONG,    !is_volatile, true);
 585 
 586   case vmIntrinsics::_compareAndSwapObject:     return inline_unsafe_load_store(T_OBJECT, LS_cmpxchg);
 587   case vmIntrinsics::_compareAndSwapInt:        return inline_unsafe_load_store(T_INT,    LS_cmpxchg);
 588   case vmIntrinsics::_compareAndSwapLong:       return inline_unsafe_load_store(T_LONG,   LS_cmpxchg);
 589 
 590   case vmIntrinsics::_putOrderedObject:         return inline_unsafe_ordered_store(T_OBJECT);
 591   case vmIntrinsics::_putOrderedInt:            return inline_unsafe_ordered_store(T_INT);
 592   case vmIntrinsics::_putOrderedLong:           return inline_unsafe_ordered_store(T_LONG);
 593 
 594   case vmIntrinsics::_getAndAddInt:             return inline_unsafe_load_store(T_INT,    LS_xadd);
 595   case vmIntrinsics::_getAndAddLong:            return inline_unsafe_load_store(T_LONG,   LS_xadd);
 596   case vmIntrinsics::_getAndSetInt:             return inline_unsafe_load_store(T_INT,    LS_xchg);
 597   case vmIntrinsics::_getAndSetLong:            return inline_unsafe_load_store(T_LONG,   LS_xchg);
 598   case vmIntrinsics::_getAndSetObject:          return inline_unsafe_load_store(T_OBJECT, LS_xchg);
 599 
 600   case vmIntrinsics::_loadFence:
 601   case vmIntrinsics::_storeFence:
 602   case vmIntrinsics::_fullFence:                return inline_unsafe_fence(intrinsic_id());
 603 
 604   case vmIntrinsics::_currentThread:            return inline_native_currentThread();


2268     }
2269   }
2270 
2271   // The sharpened class might be unloaded if there is no class loader
2272   // contraint in place.
2273   if (sharpened_klass != NULL && sharpened_klass->is_loaded()) {
2274     const TypeOopPtr* tjp = TypeOopPtr::make_from_klass(sharpened_klass);
2275 
2276 #ifndef PRODUCT
2277     if (C->print_intrinsics() || C->print_inlining()) {
2278       tty->print("  from base type: ");  adr_type->dump();
2279       tty->print("  sharpened value: ");  tjp->dump();
2280     }
2281 #endif
2282     // Sharpen the value type.
2283     return tjp;
2284   }
2285   return NULL;
2286 }
2287 
2288 bool LibraryCallKit::inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile, bool unaligned) {
2289   if (callee()->is_static())  return false;  // caller must have the capability!
2290 
2291 #ifndef PRODUCT
2292   {
2293     ResourceMark rm;
2294     // Check the signatures.
2295     ciSignature* sig = callee()->signature();
2296 #ifdef ASSERT
2297     if (!is_store) {
2298       // Object getObject(Object base, int/long offset), etc.
2299       BasicType rtype = sig->return_type()->basic_type();
2300       if (rtype == T_ADDRESS_HOLDER && callee()->name() == ciSymbol::getAddress_name())
2301           rtype = T_ADDRESS;  // it is really a C void*
2302       assert(rtype == type, "getter must return the expected value");
2303       if (!is_native_ptr) {
2304         assert(sig->count() == 2, "oop getter has 2 arguments");
2305         assert(sig->type_at(0)->basic_type() == T_OBJECT, "getter base is object");
2306         assert(sig->type_at(1)->basic_type() == T_LONG, "getter offset is correct");
2307       } else {
2308         assert(sig->count() == 1, "native getter has 1 argument");


2426   // so there's no problems making a strong assert about mixing users
2427   // of safe & unsafe memory.
2428   if (need_mem_bar) insert_mem_bar(Op_MemBarCPUOrder);
2429 
2430    if (!is_store) {
2431     Node* p = NULL;
2432     // Try to constant fold a load from a constant field
2433     ciField* field = alias_type->field();
2434     if (heap_base_oop != top() &&
2435         field != NULL && field->is_constant() && field->layout_type() == type) {
2436       // final or stable field
2437       const Type* con_type = Type::make_constant(alias_type->field(), heap_base_oop);
2438       if (con_type != NULL) {
2439         p = makecon(con_type);
2440       }
2441     }
2442     if (p == NULL) {
2443       MemNode::MemOrd mo = is_volatile ? MemNode::acquire : MemNode::unordered;
2444       // To be valid, unsafe loads may depend on other conditions than
2445       // the one that guards them: pin the Load node
2446       p = make_load(control(), adr, value_type, type, adr_type, mo, LoadNode::Pinned, is_volatile, unaligned);
2447       // load value
2448       switch (type) {
2449       case T_BOOLEAN:
2450       case T_CHAR:
2451       case T_BYTE:
2452       case T_SHORT:
2453       case T_INT:
2454       case T_LONG:
2455       case T_FLOAT:
2456       case T_DOUBLE:
2457         break;
2458       case T_OBJECT:
2459         if (need_read_barrier) {
2460           insert_pre_barrier(heap_base_oop, offset, p, !(is_volatile || need_mem_bar));
2461         }
2462         break;
2463       case T_ADDRESS:
2464         // Cast to an int type.
2465         p = _gvn.transform(new CastP2XNode(NULL, p));
2466         p = ConvX2UL(p);


2473     // The load node has the control of the preceding MemBarCPUOrder.  All
2474     // following nodes will have the control of the MemBarCPUOrder inserted at
2475     // the end of this method.  So, pushing the load onto the stack at a later
2476     // point is fine.
2477     set_result(p);
2478   } else {
2479     // place effect of store into memory
2480     switch (type) {
2481     case T_DOUBLE:
2482       val = dstore_rounding(val);
2483       break;
2484     case T_ADDRESS:
2485       // Repackage the long as a pointer.
2486       val = ConvL2X(val);
2487       val = _gvn.transform(new CastX2PNode(val));
2488       break;
2489     }
2490 
2491     MemNode::MemOrd mo = is_volatile ? MemNode::release : MemNode::unordered;
2492     if (type != T_OBJECT ) {
2493       (void) store_to_memory(control(), adr, val, type, adr_type, mo, is_volatile, unaligned);
2494     } else {
2495       assert(!unaligned, "unaligned access not supported with object type");
2496       // Possibly an oop being stored to Java heap or native memory
2497       if (!TypePtr::NULL_PTR->higher_equal(_gvn.type(heap_base_oop))) {
2498         // oop to Java heap.
2499         (void) store_oop_to_unknown(control(), heap_base_oop, adr, adr_type, val, type, mo);
2500       } else {
2501         // We can't tell at compile time if we are storing in the Java heap or outside
2502         // of it. So we need to emit code to conditionally do the proper type of
2503         // store.
2504 
2505         IdealKit ideal(this);
2506 #define __ ideal.
2507         // QQQ who knows what probability is here??
2508         __ if_then(heap_base_oop, BoolTest::ne, null(), PROB_UNLIKELY(0.999)); {
2509           // Sync IdealKit and graphKit.
2510           sync_kit(ideal);
2511           Node* st = store_oop_to_unknown(control(), heap_base_oop, adr, adr_type, val, type, mo);
2512           // Update IdealKit memory.
2513           __ sync_kit(this);
2514         } __ else_(); {
2515           __ store(__ ctrl(), adr, val, type, alias_type->index(), mo, is_volatile);


src/share/vm/opto/library_call.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File