< prev index next >

src/share/vm/opto/library_call.cpp

Print this page
rev 12711 : 8181292: Backport Rename internal Unsafe.compare methods from 10 to 9
Reviewed-by: psandoz, dholmes, thartmann
Contributed-by: ron.pressler@oracle.com, claes.redestad@oracle.com


 632   case vmIntrinsics::_getObjectOpaque:          return inline_unsafe_access(!is_store, T_OBJECT,   Opaque, false);
 633   case vmIntrinsics::_getBooleanOpaque:         return inline_unsafe_access(!is_store, T_BOOLEAN,  Opaque, false);
 634   case vmIntrinsics::_getByteOpaque:            return inline_unsafe_access(!is_store, T_BYTE,     Opaque, false);
 635   case vmIntrinsics::_getShortOpaque:           return inline_unsafe_access(!is_store, T_SHORT,    Opaque, false);
 636   case vmIntrinsics::_getCharOpaque:            return inline_unsafe_access(!is_store, T_CHAR,     Opaque, false);
 637   case vmIntrinsics::_getIntOpaque:             return inline_unsafe_access(!is_store, T_INT,      Opaque, false);
 638   case vmIntrinsics::_getLongOpaque:            return inline_unsafe_access(!is_store, T_LONG,     Opaque, false);
 639   case vmIntrinsics::_getFloatOpaque:           return inline_unsafe_access(!is_store, T_FLOAT,    Opaque, false);
 640   case vmIntrinsics::_getDoubleOpaque:          return inline_unsafe_access(!is_store, T_DOUBLE,   Opaque, false);
 641 
 642   case vmIntrinsics::_putObjectOpaque:          return inline_unsafe_access( is_store, T_OBJECT,   Opaque, false);
 643   case vmIntrinsics::_putBooleanOpaque:         return inline_unsafe_access( is_store, T_BOOLEAN,  Opaque, false);
 644   case vmIntrinsics::_putByteOpaque:            return inline_unsafe_access( is_store, T_BYTE,     Opaque, false);
 645   case vmIntrinsics::_putShortOpaque:           return inline_unsafe_access( is_store, T_SHORT,    Opaque, false);
 646   case vmIntrinsics::_putCharOpaque:            return inline_unsafe_access( is_store, T_CHAR,     Opaque, false);
 647   case vmIntrinsics::_putIntOpaque:             return inline_unsafe_access( is_store, T_INT,      Opaque, false);
 648   case vmIntrinsics::_putLongOpaque:            return inline_unsafe_access( is_store, T_LONG,     Opaque, false);
 649   case vmIntrinsics::_putFloatOpaque:           return inline_unsafe_access( is_store, T_FLOAT,    Opaque, false);
 650   case vmIntrinsics::_putDoubleOpaque:          return inline_unsafe_access( is_store, T_DOUBLE,   Opaque, false);
 651 
 652   case vmIntrinsics::_compareAndSwapObject:             return inline_unsafe_load_store(T_OBJECT, LS_cmp_swap,      Volatile);
 653   case vmIntrinsics::_compareAndSwapByte:               return inline_unsafe_load_store(T_BYTE,   LS_cmp_swap,      Volatile);
 654   case vmIntrinsics::_compareAndSwapShort:              return inline_unsafe_load_store(T_SHORT,  LS_cmp_swap,      Volatile);
 655   case vmIntrinsics::_compareAndSwapInt:                return inline_unsafe_load_store(T_INT,    LS_cmp_swap,      Volatile);
 656   case vmIntrinsics::_compareAndSwapLong:               return inline_unsafe_load_store(T_LONG,   LS_cmp_swap,      Volatile);
 657 
 658   case vmIntrinsics::_weakCompareAndSwapObject:         return inline_unsafe_load_store(T_OBJECT, LS_cmp_swap_weak, Relaxed);
 659   case vmIntrinsics::_weakCompareAndSwapObjectAcquire:  return inline_unsafe_load_store(T_OBJECT, LS_cmp_swap_weak, Acquire);
 660   case vmIntrinsics::_weakCompareAndSwapObjectRelease:  return inline_unsafe_load_store(T_OBJECT, LS_cmp_swap_weak, Release);
 661   case vmIntrinsics::_weakCompareAndSwapObjectVolatile: return inline_unsafe_load_store(T_OBJECT, LS_cmp_swap_weak, Volatile);
 662   case vmIntrinsics::_weakCompareAndSwapByte:           return inline_unsafe_load_store(T_BYTE,   LS_cmp_swap_weak, Relaxed);
 663   case vmIntrinsics::_weakCompareAndSwapByteAcquire:    return inline_unsafe_load_store(T_BYTE,   LS_cmp_swap_weak, Acquire);
 664   case vmIntrinsics::_weakCompareAndSwapByteRelease:    return inline_unsafe_load_store(T_BYTE,   LS_cmp_swap_weak, Release);
 665   case vmIntrinsics::_weakCompareAndSwapByteVolatile:   return inline_unsafe_load_store(T_BYTE,   LS_cmp_swap_weak, Volatile);
 666   case vmIntrinsics::_weakCompareAndSwapShort:          return inline_unsafe_load_store(T_SHORT,  LS_cmp_swap_weak, Relaxed);
 667   case vmIntrinsics::_weakCompareAndSwapShortAcquire:   return inline_unsafe_load_store(T_SHORT,  LS_cmp_swap_weak, Acquire);
 668   case vmIntrinsics::_weakCompareAndSwapShortRelease:   return inline_unsafe_load_store(T_SHORT,  LS_cmp_swap_weak, Release);
 669   case vmIntrinsics::_weakCompareAndSwapShortVolatile:  return inline_unsafe_load_store(T_SHORT,  LS_cmp_swap_weak, Volatile);
 670   case vmIntrinsics::_weakCompareAndSwapInt:            return inline_unsafe_load_store(T_INT,    LS_cmp_swap_weak, Relaxed);
 671   case vmIntrinsics::_weakCompareAndSwapIntAcquire:     return inline_unsafe_load_store(T_INT,    LS_cmp_swap_weak, Acquire);
 672   case vmIntrinsics::_weakCompareAndSwapIntRelease:     return inline_unsafe_load_store(T_INT,    LS_cmp_swap_weak, Release);
 673   case vmIntrinsics::_weakCompareAndSwapIntVolatile:    return inline_unsafe_load_store(T_INT,    LS_cmp_swap_weak, Volatile);
 674   case vmIntrinsics::_weakCompareAndSwapLong:           return inline_unsafe_load_store(T_LONG,   LS_cmp_swap_weak, Relaxed);
 675   case vmIntrinsics::_weakCompareAndSwapLongAcquire:    return inline_unsafe_load_store(T_LONG,   LS_cmp_swap_weak, Acquire);
 676   case vmIntrinsics::_weakCompareAndSwapLongRelease:    return inline_unsafe_load_store(T_LONG,   LS_cmp_swap_weak, Release);
 677   case vmIntrinsics::_weakCompareAndSwapLongVolatile:   return inline_unsafe_load_store(T_LONG,   LS_cmp_swap_weak, Volatile);
 678 
 679   case vmIntrinsics::_compareAndExchangeObjectVolatile: return inline_unsafe_load_store(T_OBJECT, LS_cmp_exchange,  Volatile);
 680   case vmIntrinsics::_compareAndExchangeObjectAcquire:  return inline_unsafe_load_store(T_OBJECT, LS_cmp_exchange,  Acquire);
 681   case vmIntrinsics::_compareAndExchangeObjectRelease:  return inline_unsafe_load_store(T_OBJECT, LS_cmp_exchange,  Release);
 682   case vmIntrinsics::_compareAndExchangeByteVolatile:   return inline_unsafe_load_store(T_BYTE,   LS_cmp_exchange,  Volatile);
 683   case vmIntrinsics::_compareAndExchangeByteAcquire:    return inline_unsafe_load_store(T_BYTE,   LS_cmp_exchange,  Acquire);
 684   case vmIntrinsics::_compareAndExchangeByteRelease:    return inline_unsafe_load_store(T_BYTE,   LS_cmp_exchange,  Release);
 685   case vmIntrinsics::_compareAndExchangeShortVolatile:  return inline_unsafe_load_store(T_SHORT,  LS_cmp_exchange,  Volatile);
 686   case vmIntrinsics::_compareAndExchangeShortAcquire:   return inline_unsafe_load_store(T_SHORT,  LS_cmp_exchange,  Acquire);
 687   case vmIntrinsics::_compareAndExchangeShortRelease:   return inline_unsafe_load_store(T_SHORT,  LS_cmp_exchange,  Release);
 688   case vmIntrinsics::_compareAndExchangeIntVolatile:    return inline_unsafe_load_store(T_INT,    LS_cmp_exchange,  Volatile);
 689   case vmIntrinsics::_compareAndExchangeIntAcquire:     return inline_unsafe_load_store(T_INT,    LS_cmp_exchange,  Acquire);
 690   case vmIntrinsics::_compareAndExchangeIntRelease:     return inline_unsafe_load_store(T_INT,    LS_cmp_exchange,  Release);
 691   case vmIntrinsics::_compareAndExchangeLongVolatile:   return inline_unsafe_load_store(T_LONG,   LS_cmp_exchange,  Volatile);
 692   case vmIntrinsics::_compareAndExchangeLongAcquire:    return inline_unsafe_load_store(T_LONG,   LS_cmp_exchange,  Acquire);
 693   case vmIntrinsics::_compareAndExchangeLongRelease:    return inline_unsafe_load_store(T_LONG,   LS_cmp_exchange,  Release);
 694 
 695   case vmIntrinsics::_getAndAddByte:                    return inline_unsafe_load_store(T_BYTE,   LS_get_add,       Volatile);
 696   case vmIntrinsics::_getAndAddShort:                   return inline_unsafe_load_store(T_SHORT,  LS_get_add,       Volatile);
 697   case vmIntrinsics::_getAndAddInt:                     return inline_unsafe_load_store(T_INT,    LS_get_add,       Volatile);
 698   case vmIntrinsics::_getAndAddLong:                    return inline_unsafe_load_store(T_LONG,   LS_get_add,       Volatile);
 699 
 700   case vmIntrinsics::_getAndSetByte:                    return inline_unsafe_load_store(T_BYTE,   LS_get_set,       Volatile);
 701   case vmIntrinsics::_getAndSetShort:                   return inline_unsafe_load_store(T_SHORT,  LS_get_set,       Volatile);
 702   case vmIntrinsics::_getAndSetInt:                     return inline_unsafe_load_store(T_INT,    LS_get_set,       Volatile);
 703   case vmIntrinsics::_getAndSetLong:                    return inline_unsafe_load_store(T_LONG,   LS_get_set,       Volatile);
 704   case vmIntrinsics::_getAndSetObject:                  return inline_unsafe_load_store(T_OBJECT, LS_get_set,       Volatile);
 705 
 706   case vmIntrinsics::_loadFence:
 707   case vmIntrinsics::_storeFence:
 708   case vmIntrinsics::_fullFence:                return inline_unsafe_fence(intrinsic_id());
 709 
 710   case vmIntrinsics::_onSpinWait:               return inline_onspinwait();
 711 


2567       } else {
2568         if (!support_IRIW_for_not_multiple_copy_atomic_cpu) {
2569           insert_mem_bar(Op_MemBarVolatile);
2570         }
2571       }
2572       break;
2573     default:
2574       ShouldNotReachHere();
2575   }
2576 
2577   if (need_mem_bar) insert_mem_bar(Op_MemBarCPUOrder);
2578 
2579   return true;
2580 }
2581 
2582 //----------------------------inline_unsafe_load_store----------------------------
2583 // This method serves a couple of different customers (depending on LoadStoreKind):
2584 //
2585 // LS_cmp_swap:
2586 //
2587 //   boolean compareAndSwapObject(Object o, long offset, Object expected, Object x);
2588 //   boolean compareAndSwapInt(   Object o, long offset, int    expected, int    x);
2589 //   boolean compareAndSwapLong(  Object o, long offset, long   expected, long   x);
2590 //
2591 // LS_cmp_swap_weak:
2592 //
2593 //   boolean weakCompareAndSwapObject(       Object o, long offset, Object expected, Object x);
2594 //   boolean weakCompareAndSwapObjectAcquire(Object o, long offset, Object expected, Object x);
2595 //   boolean weakCompareAndSwapObjectRelease(Object o, long offset, Object expected, Object x);
2596 //
2597 //   boolean weakCompareAndSwapInt(          Object o, long offset, int    expected, int    x);
2598 //   boolean weakCompareAndSwapIntAcquire(   Object o, long offset, int    expected, int    x);
2599 //   boolean weakCompareAndSwapIntRelease(   Object o, long offset, int    expected, int    x);
2600 //
2601 //   boolean weakCompareAndSwapLong(         Object o, long offset, long   expected, long   x);
2602 //   boolean weakCompareAndSwapLongAcquire(  Object o, long offset, long   expected, long   x);
2603 //   boolean weakCompareAndSwapLongRelease(  Object o, long offset, long   expected, long   x);



2604 //
2605 // LS_cmp_exchange:
2606 //
2607 //   Object compareAndExchangeObjectVolatile(Object o, long offset, Object expected, Object x);
2608 //   Object compareAndExchangeObjectAcquire( Object o, long offset, Object expected, Object x);
2609 //   Object compareAndExchangeObjectRelease( Object o, long offset, Object expected, Object x);
2610 //
2611 //   Object compareAndExchangeIntVolatile(   Object o, long offset, Object expected, Object x);
2612 //   Object compareAndExchangeIntAcquire(    Object o, long offset, Object expected, Object x);
2613 //   Object compareAndExchangeIntRelease(    Object o, long offset, Object expected, Object x);
2614 //
2615 //   Object compareAndExchangeLongVolatile(  Object o, long offset, Object expected, Object x);
2616 //   Object compareAndExchangeLongAcquire(   Object o, long offset, Object expected, Object x);
2617 //   Object compareAndExchangeLongRelease(   Object o, long offset, Object expected, Object x);
2618 //
2619 // LS_get_add:
2620 //
2621 //   int  getAndAddInt( Object o, long offset, int  delta)
2622 //   long getAndAddLong(Object o, long offset, long delta)
2623 //


4945 //                                                      int length);
4946 bool LibraryCallKit::inline_arraycopy() {
4947   // Get the arguments.
4948   Node* src         = argument(0);  // type: oop
4949   Node* src_offset  = argument(1);  // type: int
4950   Node* dest        = argument(2);  // type: oop
4951   Node* dest_offset = argument(3);  // type: int
4952   Node* length      = argument(4);  // type: int
4953 
4954   uint new_idx = C->unique();
4955 
4956   // Check for allocation before we add nodes that would confuse
4957   // tightly_coupled_allocation()
4958   AllocateArrayNode* alloc = tightly_coupled_allocation(dest, NULL);
4959 
4960   int saved_reexecute_sp = -1;
4961   JVMState* saved_jvms = arraycopy_restore_alloc_state(alloc, saved_reexecute_sp);
4962   // See arraycopy_restore_alloc_state() comment
4963   // if alloc == NULL we don't have to worry about a tightly coupled allocation so we can emit all needed guards
4964   // if saved_jvms != NULL (then alloc != NULL) then we can handle guards and a tightly coupled allocation
4965   // if saved_jvms == NULL and alloc != NULL, we can’t emit any guards
4966   bool can_emit_guards = (alloc == NULL || saved_jvms != NULL);
4967 
4968   // The following tests must be performed
4969   // (1) src and dest are arrays.
4970   // (2) src and dest arrays must have elements of the same BasicType
4971   // (3) src and dest must not be null.
4972   // (4) src_offset must not be negative.
4973   // (5) dest_offset must not be negative.
4974   // (6) length must not be negative.
4975   // (7) src_offset + length must not exceed length of src.
4976   // (8) dest_offset + length must not exceed length of dest.
4977   // (9) each element of an oop array must be assignable
4978 
4979   // (3) src and dest must not be null.
4980   // always do this here because we need the JVM state for uncommon traps
4981   Node* null_ctl = top();
4982   src  = saved_jvms != NULL ? null_check_oop(src, &null_ctl, true, true) : null_check(src,  T_ARRAY);
4983   assert(null_ctl->is_top(), "no null control here");
4984   dest = null_check(dest, T_ARRAY);
4985 




 632   case vmIntrinsics::_getObjectOpaque:          return inline_unsafe_access(!is_store, T_OBJECT,   Opaque, false);
 633   case vmIntrinsics::_getBooleanOpaque:         return inline_unsafe_access(!is_store, T_BOOLEAN,  Opaque, false);
 634   case vmIntrinsics::_getByteOpaque:            return inline_unsafe_access(!is_store, T_BYTE,     Opaque, false);
 635   case vmIntrinsics::_getShortOpaque:           return inline_unsafe_access(!is_store, T_SHORT,    Opaque, false);
 636   case vmIntrinsics::_getCharOpaque:            return inline_unsafe_access(!is_store, T_CHAR,     Opaque, false);
 637   case vmIntrinsics::_getIntOpaque:             return inline_unsafe_access(!is_store, T_INT,      Opaque, false);
 638   case vmIntrinsics::_getLongOpaque:            return inline_unsafe_access(!is_store, T_LONG,     Opaque, false);
 639   case vmIntrinsics::_getFloatOpaque:           return inline_unsafe_access(!is_store, T_FLOAT,    Opaque, false);
 640   case vmIntrinsics::_getDoubleOpaque:          return inline_unsafe_access(!is_store, T_DOUBLE,   Opaque, false);
 641 
 642   case vmIntrinsics::_putObjectOpaque:          return inline_unsafe_access( is_store, T_OBJECT,   Opaque, false);
 643   case vmIntrinsics::_putBooleanOpaque:         return inline_unsafe_access( is_store, T_BOOLEAN,  Opaque, false);
 644   case vmIntrinsics::_putByteOpaque:            return inline_unsafe_access( is_store, T_BYTE,     Opaque, false);
 645   case vmIntrinsics::_putShortOpaque:           return inline_unsafe_access( is_store, T_SHORT,    Opaque, false);
 646   case vmIntrinsics::_putCharOpaque:            return inline_unsafe_access( is_store, T_CHAR,     Opaque, false);
 647   case vmIntrinsics::_putIntOpaque:             return inline_unsafe_access( is_store, T_INT,      Opaque, false);
 648   case vmIntrinsics::_putLongOpaque:            return inline_unsafe_access( is_store, T_LONG,     Opaque, false);
 649   case vmIntrinsics::_putFloatOpaque:           return inline_unsafe_access( is_store, T_FLOAT,    Opaque, false);
 650   case vmIntrinsics::_putDoubleOpaque:          return inline_unsafe_access( is_store, T_DOUBLE,   Opaque, false);
 651 
 652   case vmIntrinsics::_compareAndSetObject:              return inline_unsafe_load_store(T_OBJECT, LS_cmp_swap,      Volatile);
 653   case vmIntrinsics::_compareAndSetByte:                return inline_unsafe_load_store(T_BYTE,   LS_cmp_swap,      Volatile);
 654   case vmIntrinsics::_compareAndSetShort:               return inline_unsafe_load_store(T_SHORT,  LS_cmp_swap,      Volatile);
 655   case vmIntrinsics::_compareAndSetInt:                 return inline_unsafe_load_store(T_INT,    LS_cmp_swap,      Volatile);
 656   case vmIntrinsics::_compareAndSetLong:                return inline_unsafe_load_store(T_LONG,   LS_cmp_swap,      Volatile);
 657 
 658   case vmIntrinsics::_weakCompareAndSetObjectPlain:     return inline_unsafe_load_store(T_OBJECT, LS_cmp_swap_weak, Relaxed);
 659   case vmIntrinsics::_weakCompareAndSetObjectAcquire:   return inline_unsafe_load_store(T_OBJECT, LS_cmp_swap_weak, Acquire);
 660   case vmIntrinsics::_weakCompareAndSetObjectRelease:   return inline_unsafe_load_store(T_OBJECT, LS_cmp_swap_weak, Release);
 661   case vmIntrinsics::_weakCompareAndSetObject:          return inline_unsafe_load_store(T_OBJECT, LS_cmp_swap_weak, Volatile);
 662   case vmIntrinsics::_weakCompareAndSetBytePlain:       return inline_unsafe_load_store(T_BYTE,   LS_cmp_swap_weak, Relaxed);
 663   case vmIntrinsics::_weakCompareAndSetByteAcquire:     return inline_unsafe_load_store(T_BYTE,   LS_cmp_swap_weak, Acquire);
 664   case vmIntrinsics::_weakCompareAndSetByteRelease:     return inline_unsafe_load_store(T_BYTE,   LS_cmp_swap_weak, Release);
 665   case vmIntrinsics::_weakCompareAndSetByte:            return inline_unsafe_load_store(T_BYTE,   LS_cmp_swap_weak, Volatile);
 666   case vmIntrinsics::_weakCompareAndSetShortPlain:      return inline_unsafe_load_store(T_SHORT,  LS_cmp_swap_weak, Relaxed);
 667   case vmIntrinsics::_weakCompareAndSetShortAcquire:    return inline_unsafe_load_store(T_SHORT,  LS_cmp_swap_weak, Acquire);
 668   case vmIntrinsics::_weakCompareAndSetShortRelease:    return inline_unsafe_load_store(T_SHORT,  LS_cmp_swap_weak, Release);
 669   case vmIntrinsics::_weakCompareAndSetShort:           return inline_unsafe_load_store(T_SHORT,  LS_cmp_swap_weak, Volatile);
 670   case vmIntrinsics::_weakCompareAndSetIntPlain:        return inline_unsafe_load_store(T_INT,    LS_cmp_swap_weak, Relaxed);
 671   case vmIntrinsics::_weakCompareAndSetIntAcquire:      return inline_unsafe_load_store(T_INT,    LS_cmp_swap_weak, Acquire);
 672   case vmIntrinsics::_weakCompareAndSetIntRelease:      return inline_unsafe_load_store(T_INT,    LS_cmp_swap_weak, Release);
 673   case vmIntrinsics::_weakCompareAndSetInt:             return inline_unsafe_load_store(T_INT,    LS_cmp_swap_weak, Volatile);
 674   case vmIntrinsics::_weakCompareAndSetLongPlain:       return inline_unsafe_load_store(T_LONG,   LS_cmp_swap_weak, Relaxed);
 675   case vmIntrinsics::_weakCompareAndSetLongAcquire:     return inline_unsafe_load_store(T_LONG,   LS_cmp_swap_weak, Acquire);
 676   case vmIntrinsics::_weakCompareAndSetLongRelease:     return inline_unsafe_load_store(T_LONG,   LS_cmp_swap_weak, Release);
 677   case vmIntrinsics::_weakCompareAndSetLong:            return inline_unsafe_load_store(T_LONG,   LS_cmp_swap_weak, Volatile);
 678 
 679   case vmIntrinsics::_compareAndExchangeObject:         return inline_unsafe_load_store(T_OBJECT, LS_cmp_exchange,  Volatile);
 680   case vmIntrinsics::_compareAndExchangeObjectAcquire:  return inline_unsafe_load_store(T_OBJECT, LS_cmp_exchange,  Acquire);
 681   case vmIntrinsics::_compareAndExchangeObjectRelease:  return inline_unsafe_load_store(T_OBJECT, LS_cmp_exchange,  Release);
 682   case vmIntrinsics::_compareAndExchangeByte:           return inline_unsafe_load_store(T_BYTE,   LS_cmp_exchange,  Volatile);
 683   case vmIntrinsics::_compareAndExchangeByteAcquire:    return inline_unsafe_load_store(T_BYTE,   LS_cmp_exchange,  Acquire);
 684   case vmIntrinsics::_compareAndExchangeByteRelease:    return inline_unsafe_load_store(T_BYTE,   LS_cmp_exchange,  Release);
 685   case vmIntrinsics::_compareAndExchangeShort:          return inline_unsafe_load_store(T_SHORT,  LS_cmp_exchange,  Volatile);
 686   case vmIntrinsics::_compareAndExchangeShortAcquire:   return inline_unsafe_load_store(T_SHORT,  LS_cmp_exchange,  Acquire);
 687   case vmIntrinsics::_compareAndExchangeShortRelease:   return inline_unsafe_load_store(T_SHORT,  LS_cmp_exchange,  Release);
 688   case vmIntrinsics::_compareAndExchangeInt:            return inline_unsafe_load_store(T_INT,    LS_cmp_exchange,  Volatile);
 689   case vmIntrinsics::_compareAndExchangeIntAcquire:     return inline_unsafe_load_store(T_INT,    LS_cmp_exchange,  Acquire);
 690   case vmIntrinsics::_compareAndExchangeIntRelease:     return inline_unsafe_load_store(T_INT,    LS_cmp_exchange,  Release);
 691   case vmIntrinsics::_compareAndExchangeLong:           return inline_unsafe_load_store(T_LONG,   LS_cmp_exchange,  Volatile);
 692   case vmIntrinsics::_compareAndExchangeLongAcquire:    return inline_unsafe_load_store(T_LONG,   LS_cmp_exchange,  Acquire);
 693   case vmIntrinsics::_compareAndExchangeLongRelease:    return inline_unsafe_load_store(T_LONG,   LS_cmp_exchange,  Release);
 694 
 695   case vmIntrinsics::_getAndAddByte:                    return inline_unsafe_load_store(T_BYTE,   LS_get_add,       Volatile);
 696   case vmIntrinsics::_getAndAddShort:                   return inline_unsafe_load_store(T_SHORT,  LS_get_add,       Volatile);
 697   case vmIntrinsics::_getAndAddInt:                     return inline_unsafe_load_store(T_INT,    LS_get_add,       Volatile);
 698   case vmIntrinsics::_getAndAddLong:                    return inline_unsafe_load_store(T_LONG,   LS_get_add,       Volatile);
 699 
 700   case vmIntrinsics::_getAndSetByte:                    return inline_unsafe_load_store(T_BYTE,   LS_get_set,       Volatile);
 701   case vmIntrinsics::_getAndSetShort:                   return inline_unsafe_load_store(T_SHORT,  LS_get_set,       Volatile);
 702   case vmIntrinsics::_getAndSetInt:                     return inline_unsafe_load_store(T_INT,    LS_get_set,       Volatile);
 703   case vmIntrinsics::_getAndSetLong:                    return inline_unsafe_load_store(T_LONG,   LS_get_set,       Volatile);
 704   case vmIntrinsics::_getAndSetObject:                  return inline_unsafe_load_store(T_OBJECT, LS_get_set,       Volatile);
 705 
 706   case vmIntrinsics::_loadFence:
 707   case vmIntrinsics::_storeFence:
 708   case vmIntrinsics::_fullFence:                return inline_unsafe_fence(intrinsic_id());
 709 
 710   case vmIntrinsics::_onSpinWait:               return inline_onspinwait();
 711 


2567       } else {
2568         if (!support_IRIW_for_not_multiple_copy_atomic_cpu) {
2569           insert_mem_bar(Op_MemBarVolatile);
2570         }
2571       }
2572       break;
2573     default:
2574       ShouldNotReachHere();
2575   }
2576 
2577   if (need_mem_bar) insert_mem_bar(Op_MemBarCPUOrder);
2578 
2579   return true;
2580 }
2581 
2582 //----------------------------inline_unsafe_load_store----------------------------
2583 // This method serves a couple of different customers (depending on LoadStoreKind):
2584 //
2585 // LS_cmp_swap:
2586 //
2587 //   boolean compareAndSetObject(Object o, long offset, Object expected, Object x);
2588 //   boolean compareAndSetInt(   Object o, long offset, int    expected, int    x);
2589 //   boolean compareAndSetLong(  Object o, long offset, long   expected, long   x);
2590 //
2591 // LS_cmp_swap_weak:
2592 //
2593 //   boolean weakCompareAndSetObject(       Object o, long offset, Object expected, Object x);
2594 //   boolean weakCompareAndSetObjectPlain(  Object o, long offset, Object expected, Object x);
2595 //   boolean weakCompareAndSetObjectAcquire(Object o, long offset, Object expected, Object x);
2596 //   boolean weakCompareAndSetObjectRelease(Object o, long offset, Object expected, Object x);
2597 //
2598 //   boolean weakCompareAndSetInt(          Object o, long offset, int    expected, int    x);
2599 //   boolean weakCompareAndSetIntPlain(     Object o, long offset, int    expected, int    x);
2600 //   boolean weakCompareAndSetIntAcquire(   Object o, long offset, int    expected, int    x);
2601 //   boolean weakCompareAndSetIntRelease(   Object o, long offset, int    expected, int    x);
2602 //
2603 //   boolean weakCompareAndSetLong(         Object o, long offset, long   expected, long   x);
2604 //   boolean weakCompareAndSetLongPlain(    Object o, long offset, long   expected, long   x);
2605 //   boolean weakCompareAndSetLongAcquire(  Object o, long offset, long   expected, long   x);
2606 //   boolean weakCompareAndSetLongRelease(  Object o, long offset, long   expected, long   x);
2607 //
2608 // LS_cmp_exchange:
2609 //
2610 //   Object compareAndExchangeObjectVolatile(Object o, long offset, Object expected, Object x);
2611 //   Object compareAndExchangeObjectAcquire( Object o, long offset, Object expected, Object x);
2612 //   Object compareAndExchangeObjectRelease( Object o, long offset, Object expected, Object x);
2613 //
2614 //   Object compareAndExchangeIntVolatile(   Object o, long offset, Object expected, Object x);
2615 //   Object compareAndExchangeIntAcquire(    Object o, long offset, Object expected, Object x);
2616 //   Object compareAndExchangeIntRelease(    Object o, long offset, Object expected, Object x);
2617 //
2618 //   Object compareAndExchangeLongVolatile(  Object o, long offset, Object expected, Object x);
2619 //   Object compareAndExchangeLongAcquire(   Object o, long offset, Object expected, Object x);
2620 //   Object compareAndExchangeLongRelease(   Object o, long offset, Object expected, Object x);
2621 //
2622 // LS_get_add:
2623 //
2624 //   int  getAndAddInt( Object o, long offset, int  delta)
2625 //   long getAndAddLong(Object o, long offset, long delta)
2626 //


4948 //                                                      int length);
4949 bool LibraryCallKit::inline_arraycopy() {
4950   // Get the arguments.
4951   Node* src         = argument(0);  // type: oop
4952   Node* src_offset  = argument(1);  // type: int
4953   Node* dest        = argument(2);  // type: oop
4954   Node* dest_offset = argument(3);  // type: int
4955   Node* length      = argument(4);  // type: int
4956 
4957   uint new_idx = C->unique();
4958 
4959   // Check for allocation before we add nodes that would confuse
4960   // tightly_coupled_allocation()
4961   AllocateArrayNode* alloc = tightly_coupled_allocation(dest, NULL);
4962 
4963   int saved_reexecute_sp = -1;
4964   JVMState* saved_jvms = arraycopy_restore_alloc_state(alloc, saved_reexecute_sp);
4965   // See arraycopy_restore_alloc_state() comment
4966   // if alloc == NULL we don't have to worry about a tightly coupled allocation so we can emit all needed guards
4967   // if saved_jvms != NULL (then alloc != NULL) then we can handle guards and a tightly coupled allocation
4968   // if saved_jvms == NULL and alloc != NULL, we can't emit any guards
4969   bool can_emit_guards = (alloc == NULL || saved_jvms != NULL);
4970 
4971   // The following tests must be performed
4972   // (1) src and dest are arrays.
4973   // (2) src and dest arrays must have elements of the same BasicType
4974   // (3) src and dest must not be null.
4975   // (4) src_offset must not be negative.
4976   // (5) dest_offset must not be negative.
4977   // (6) length must not be negative.
4978   // (7) src_offset + length must not exceed length of src.
4979   // (8) dest_offset + length must not exceed length of dest.
4980   // (9) each element of an oop array must be assignable
4981 
4982   // (3) src and dest must not be null.
4983   // always do this here because we need the JVM state for uncommon traps
4984   Node* null_ctl = top();
4985   src  = saved_jvms != NULL ? null_check_oop(src, &null_ctl, true, true) : null_check(src,  T_ARRAY);
4986   assert(null_ctl->is_top(), "no null control here");
4987   dest = null_check(dest, T_ARRAY);
4988 


< prev index next >