hotspot/src/share/vm/opto/library_call.cpp
changeset 36316 7a83de7aabca
parent 36314 31a4d71411b9
child 36337 d4b2f60ff5a9
equal deleted inserted replaced
36315:150a415079ae 36316:7a83de7aabca
   239   Node* make_unsafe_address(Node* base, Node* offset);
   239   Node* make_unsafe_address(Node* base, Node* offset);
   240   // Helper for inline_unsafe_access.
   240   // Helper for inline_unsafe_access.
   241   // Generates the guards that check whether the result of
   241   // Generates the guards that check whether the result of
   242   // Unsafe.getObject should be recorded in an SATB log buffer.
   242   // Unsafe.getObject should be recorded in an SATB log buffer.
   243   void insert_pre_barrier(Node* base_oop, Node* offset, Node* pre_val, bool need_mem_bar);
   243   void insert_pre_barrier(Node* base_oop, Node* offset, Node* pre_val, bool need_mem_bar);
   244   bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile, bool is_unaligned);
   244 
       
   245   typedef enum { Relaxed, Opaque, Volatile, Acquire, Release } AccessKind;
       
   246   bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, AccessKind kind, bool is_unaligned);
   245   static bool klass_needs_init_guard(Node* kls);
   247   static bool klass_needs_init_guard(Node* kls);
   246   bool inline_unsafe_allocate();
   248   bool inline_unsafe_allocate();
   247   bool inline_unsafe_copyMemory();
   249   bool inline_unsafe_copyMemory();
   248   bool inline_native_currentThread();
   250   bool inline_native_currentThread();
   249 #ifdef TRACE_HAVE_INTRINSICS
   251 #ifdef TRACE_HAVE_INTRINSICS
   272   AllocateArrayNode* tightly_coupled_allocation(Node* ptr,
   274   AllocateArrayNode* tightly_coupled_allocation(Node* ptr,
   273                                                 RegionNode* slow_region);
   275                                                 RegionNode* slow_region);
   274   JVMState* arraycopy_restore_alloc_state(AllocateArrayNode* alloc, int& saved_reexecute_sp);
   276   JVMState* arraycopy_restore_alloc_state(AllocateArrayNode* alloc, int& saved_reexecute_sp);
   275   void arraycopy_move_allocation_here(AllocateArrayNode* alloc, Node* dest, JVMState* saved_jvms, int saved_reexecute_sp);
   277   void arraycopy_move_allocation_here(AllocateArrayNode* alloc, Node* dest, JVMState* saved_jvms, int saved_reexecute_sp);
   276 
   278 
   277   typedef enum { LS_xadd, LS_xchg, LS_cmpxchg } LoadStoreKind;
   279   typedef enum { LS_get_add, LS_get_set, LS_cmp_swap, LS_cmp_swap_weak, LS_cmp_exchange } LoadStoreKind;
   278   bool inline_unsafe_load_store(BasicType type,  LoadStoreKind kind);
   280   MemNode::MemOrd access_kind_to_memord_LS(AccessKind access_kind, bool is_store);
   279   bool inline_unsafe_ordered_store(BasicType type);
   281   MemNode::MemOrd access_kind_to_memord(AccessKind access_kind);
       
   282   bool inline_unsafe_load_store(BasicType type,  LoadStoreKind kind, AccessKind access_kind);
   280   bool inline_unsafe_fence(vmIntrinsics::ID id);
   283   bool inline_unsafe_fence(vmIntrinsics::ID id);
   281   bool inline_fp_conversions(vmIntrinsics::ID id);
   284   bool inline_fp_conversions(vmIntrinsics::ID id);
   282   bool inline_number_methods(vmIntrinsics::ID id);
   285   bool inline_number_methods(vmIntrinsics::ID id);
   283   bool inline_reference_get();
   286   bool inline_reference_get();
   284   bool inline_Class_cast();
   287   bool inline_Class_cast();
   551   case vmIntrinsics::_compressStringC:
   554   case vmIntrinsics::_compressStringC:
   552   case vmIntrinsics::_compressStringB:          return inline_string_copy( is_compress);
   555   case vmIntrinsics::_compressStringB:          return inline_string_copy( is_compress);
   553   case vmIntrinsics::_inflateStringC:
   556   case vmIntrinsics::_inflateStringC:
   554   case vmIntrinsics::_inflateStringB:           return inline_string_copy(!is_compress);
   557   case vmIntrinsics::_inflateStringB:           return inline_string_copy(!is_compress);
   555 
   558 
   556   case vmIntrinsics::_getObject:                return inline_unsafe_access(!is_native_ptr, !is_store, T_OBJECT,  !is_volatile, false);
   559   case vmIntrinsics::_getObject:                return inline_unsafe_access(!is_native_ptr, !is_store, T_OBJECT,   Relaxed, false);
   557   case vmIntrinsics::_getBoolean:               return inline_unsafe_access(!is_native_ptr, !is_store, T_BOOLEAN, !is_volatile, false);
   560   case vmIntrinsics::_getBoolean:               return inline_unsafe_access(!is_native_ptr, !is_store, T_BOOLEAN,  Relaxed, false);
   558   case vmIntrinsics::_getByte:                  return inline_unsafe_access(!is_native_ptr, !is_store, T_BYTE,    !is_volatile, false);
   561   case vmIntrinsics::_getByte:                  return inline_unsafe_access(!is_native_ptr, !is_store, T_BYTE,     Relaxed, false);
   559   case vmIntrinsics::_getShort:                 return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT,   !is_volatile, false);
   562   case vmIntrinsics::_getShort:                 return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT,    Relaxed, false);
   560   case vmIntrinsics::_getChar:                  return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR,    !is_volatile, false);
   563   case vmIntrinsics::_getChar:                  return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR,     Relaxed, false);
   561   case vmIntrinsics::_getInt:                   return inline_unsafe_access(!is_native_ptr, !is_store, T_INT,     !is_volatile, false);
   564   case vmIntrinsics::_getInt:                   return inline_unsafe_access(!is_native_ptr, !is_store, T_INT,      Relaxed, false);
   562   case vmIntrinsics::_getLong:                  return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG,    !is_volatile, false);
   565   case vmIntrinsics::_getLong:                  return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG,     Relaxed, false);
   563   case vmIntrinsics::_getFloat:                 return inline_unsafe_access(!is_native_ptr, !is_store, T_FLOAT,   !is_volatile, false);
   566   case vmIntrinsics::_getFloat:                 return inline_unsafe_access(!is_native_ptr, !is_store, T_FLOAT,    Relaxed, false);
   564   case vmIntrinsics::_getDouble:                return inline_unsafe_access(!is_native_ptr, !is_store, T_DOUBLE,  !is_volatile, false);
   567   case vmIntrinsics::_getDouble:                return inline_unsafe_access(!is_native_ptr, !is_store, T_DOUBLE,   Relaxed, false);
   565   case vmIntrinsics::_putObject:                return inline_unsafe_access(!is_native_ptr,  is_store, T_OBJECT,  !is_volatile, false);
   568 
   566   case vmIntrinsics::_putBoolean:               return inline_unsafe_access(!is_native_ptr,  is_store, T_BOOLEAN, !is_volatile, false);
   569   case vmIntrinsics::_putObject:                return inline_unsafe_access(!is_native_ptr,  is_store, T_OBJECT,   Relaxed, false);
   567   case vmIntrinsics::_putByte:                  return inline_unsafe_access(!is_native_ptr,  is_store, T_BYTE,    !is_volatile, false);
   570   case vmIntrinsics::_putBoolean:               return inline_unsafe_access(!is_native_ptr,  is_store, T_BOOLEAN,  Relaxed, false);
   568   case vmIntrinsics::_putShort:                 return inline_unsafe_access(!is_native_ptr,  is_store, T_SHORT,   !is_volatile, false);
   571   case vmIntrinsics::_putByte:                  return inline_unsafe_access(!is_native_ptr,  is_store, T_BYTE,     Relaxed, false);
   569   case vmIntrinsics::_putChar:                  return inline_unsafe_access(!is_native_ptr,  is_store, T_CHAR,    !is_volatile, false);
   572   case vmIntrinsics::_putShort:                 return inline_unsafe_access(!is_native_ptr,  is_store, T_SHORT,    Relaxed, false);
   570   case vmIntrinsics::_putInt:                   return inline_unsafe_access(!is_native_ptr,  is_store, T_INT,     !is_volatile, false);
   573   case vmIntrinsics::_putChar:                  return inline_unsafe_access(!is_native_ptr,  is_store, T_CHAR,     Relaxed, false);
   571   case vmIntrinsics::_putLong:                  return inline_unsafe_access(!is_native_ptr,  is_store, T_LONG,    !is_volatile, false);
   574   case vmIntrinsics::_putInt:                   return inline_unsafe_access(!is_native_ptr,  is_store, T_INT,      Relaxed, false);
   572   case vmIntrinsics::_putFloat:                 return inline_unsafe_access(!is_native_ptr,  is_store, T_FLOAT,   !is_volatile, false);
   575   case vmIntrinsics::_putLong:                  return inline_unsafe_access(!is_native_ptr,  is_store, T_LONG,     Relaxed, false);
   573   case vmIntrinsics::_putDouble:                return inline_unsafe_access(!is_native_ptr,  is_store, T_DOUBLE,  !is_volatile, false);
   576   case vmIntrinsics::_putFloat:                 return inline_unsafe_access(!is_native_ptr,  is_store, T_FLOAT,    Relaxed, false);
   574 
   577   case vmIntrinsics::_putDouble:                return inline_unsafe_access(!is_native_ptr,  is_store, T_DOUBLE,   Relaxed, false);
   575   case vmIntrinsics::_getByte_raw:              return inline_unsafe_access( is_native_ptr, !is_store, T_BYTE,    !is_volatile, false);
   578 
   576   case vmIntrinsics::_getShort_raw:             return inline_unsafe_access( is_native_ptr, !is_store, T_SHORT,   !is_volatile, false);
   579   case vmIntrinsics::_getByte_raw:              return inline_unsafe_access( is_native_ptr, !is_store, T_BYTE,     Relaxed, false);
   577   case vmIntrinsics::_getChar_raw:              return inline_unsafe_access( is_native_ptr, !is_store, T_CHAR,    !is_volatile, false);
   580   case vmIntrinsics::_getShort_raw:             return inline_unsafe_access( is_native_ptr, !is_store, T_SHORT,    Relaxed, false);
   578   case vmIntrinsics::_getInt_raw:               return inline_unsafe_access( is_native_ptr, !is_store, T_INT,     !is_volatile, false);
   581   case vmIntrinsics::_getChar_raw:              return inline_unsafe_access( is_native_ptr, !is_store, T_CHAR,     Relaxed, false);
   579   case vmIntrinsics::_getLong_raw:              return inline_unsafe_access( is_native_ptr, !is_store, T_LONG,    !is_volatile, false);
   582   case vmIntrinsics::_getInt_raw:               return inline_unsafe_access( is_native_ptr, !is_store, T_INT,      Relaxed, false);
   580   case vmIntrinsics::_getFloat_raw:             return inline_unsafe_access( is_native_ptr, !is_store, T_FLOAT,   !is_volatile, false);
   583   case vmIntrinsics::_getLong_raw:              return inline_unsafe_access( is_native_ptr, !is_store, T_LONG,     Relaxed, false);
   581   case vmIntrinsics::_getDouble_raw:            return inline_unsafe_access( is_native_ptr, !is_store, T_DOUBLE,  !is_volatile, false);
   584   case vmIntrinsics::_getFloat_raw:             return inline_unsafe_access( is_native_ptr, !is_store, T_FLOAT,    Relaxed, false);
   582   case vmIntrinsics::_getAddress_raw:           return inline_unsafe_access( is_native_ptr, !is_store, T_ADDRESS, !is_volatile, false);
   585   case vmIntrinsics::_getDouble_raw:            return inline_unsafe_access( is_native_ptr, !is_store, T_DOUBLE,   Relaxed, false);
   583 
   586   case vmIntrinsics::_getAddress_raw:           return inline_unsafe_access( is_native_ptr, !is_store, T_ADDRESS,  Relaxed, false);
   584   case vmIntrinsics::_putByte_raw:              return inline_unsafe_access( is_native_ptr,  is_store, T_BYTE,    !is_volatile, false);
   587 
   585   case vmIntrinsics::_putShort_raw:             return inline_unsafe_access( is_native_ptr,  is_store, T_SHORT,   !is_volatile, false);
   588   case vmIntrinsics::_putByte_raw:              return inline_unsafe_access( is_native_ptr,  is_store, T_BYTE,     Relaxed, false);
   586   case vmIntrinsics::_putChar_raw:              return inline_unsafe_access( is_native_ptr,  is_store, T_CHAR,    !is_volatile, false);
   589   case vmIntrinsics::_putShort_raw:             return inline_unsafe_access( is_native_ptr,  is_store, T_SHORT,    Relaxed, false);
   587   case vmIntrinsics::_putInt_raw:               return inline_unsafe_access( is_native_ptr,  is_store, T_INT,     !is_volatile, false);
   590   case vmIntrinsics::_putChar_raw:              return inline_unsafe_access( is_native_ptr,  is_store, T_CHAR,     Relaxed, false);
   588   case vmIntrinsics::_putLong_raw:              return inline_unsafe_access( is_native_ptr,  is_store, T_LONG,    !is_volatile, false);
   591   case vmIntrinsics::_putInt_raw:               return inline_unsafe_access( is_native_ptr,  is_store, T_INT,      Relaxed, false);
   589   case vmIntrinsics::_putFloat_raw:             return inline_unsafe_access( is_native_ptr,  is_store, T_FLOAT,   !is_volatile, false);
   592   case vmIntrinsics::_putLong_raw:              return inline_unsafe_access( is_native_ptr,  is_store, T_LONG,     Relaxed, false);
   590   case vmIntrinsics::_putDouble_raw:            return inline_unsafe_access( is_native_ptr,  is_store, T_DOUBLE,  !is_volatile, false);
   593   case vmIntrinsics::_putFloat_raw:             return inline_unsafe_access( is_native_ptr,  is_store, T_FLOAT,    Relaxed, false);
   591   case vmIntrinsics::_putAddress_raw:           return inline_unsafe_access( is_native_ptr,  is_store, T_ADDRESS, !is_volatile, false);
   594   case vmIntrinsics::_putDouble_raw:            return inline_unsafe_access( is_native_ptr,  is_store, T_DOUBLE,   Relaxed, false);
   592 
   595   case vmIntrinsics::_putAddress_raw:           return inline_unsafe_access( is_native_ptr,  is_store, T_ADDRESS,  Relaxed, false);
   593   case vmIntrinsics::_getObjectVolatile:        return inline_unsafe_access(!is_native_ptr, !is_store, T_OBJECT,   is_volatile, false);
   596 
   594   case vmIntrinsics::_getBooleanVolatile:       return inline_unsafe_access(!is_native_ptr, !is_store, T_BOOLEAN,  is_volatile, false);
   597   case vmIntrinsics::_getObjectVolatile:        return inline_unsafe_access(!is_native_ptr, !is_store, T_OBJECT,   Volatile, false);
   595   case vmIntrinsics::_getByteVolatile:          return inline_unsafe_access(!is_native_ptr, !is_store, T_BYTE,     is_volatile, false);
   598   case vmIntrinsics::_getBooleanVolatile:       return inline_unsafe_access(!is_native_ptr, !is_store, T_BOOLEAN,  Volatile, false);
   596   case vmIntrinsics::_getShortVolatile:         return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT,    is_volatile, false);
   599   case vmIntrinsics::_getByteVolatile:          return inline_unsafe_access(!is_native_ptr, !is_store, T_BYTE,     Volatile, false);
   597   case vmIntrinsics::_getCharVolatile:          return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR,     is_volatile, false);
   600   case vmIntrinsics::_getShortVolatile:         return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT,    Volatile, false);
   598   case vmIntrinsics::_getIntVolatile:           return inline_unsafe_access(!is_native_ptr, !is_store, T_INT,      is_volatile, false);
   601   case vmIntrinsics::_getCharVolatile:          return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR,     Volatile, false);
   599   case vmIntrinsics::_getLongVolatile:          return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG,     is_volatile, false);
   602   case vmIntrinsics::_getIntVolatile:           return inline_unsafe_access(!is_native_ptr, !is_store, T_INT,      Volatile, false);
   600   case vmIntrinsics::_getFloatVolatile:         return inline_unsafe_access(!is_native_ptr, !is_store, T_FLOAT,    is_volatile, false);
   603   case vmIntrinsics::_getLongVolatile:          return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG,     Volatile, false);
   601   case vmIntrinsics::_getDoubleVolatile:        return inline_unsafe_access(!is_native_ptr, !is_store, T_DOUBLE,   is_volatile, false);
   604   case vmIntrinsics::_getFloatVolatile:         return inline_unsafe_access(!is_native_ptr, !is_store, T_FLOAT,    Volatile, false);
   602 
   605   case vmIntrinsics::_getDoubleVolatile:        return inline_unsafe_access(!is_native_ptr, !is_store, T_DOUBLE,   Volatile, false);
   603   case vmIntrinsics::_putObjectVolatile:        return inline_unsafe_access(!is_native_ptr,  is_store, T_OBJECT,   is_volatile, false);
   606 
   604   case vmIntrinsics::_putBooleanVolatile:       return inline_unsafe_access(!is_native_ptr,  is_store, T_BOOLEAN,  is_volatile, false);
   607   case vmIntrinsics::_putObjectVolatile:        return inline_unsafe_access(!is_native_ptr,  is_store, T_OBJECT,   Volatile, false);
   605   case vmIntrinsics::_putByteVolatile:          return inline_unsafe_access(!is_native_ptr,  is_store, T_BYTE,     is_volatile, false);
   608   case vmIntrinsics::_putBooleanVolatile:       return inline_unsafe_access(!is_native_ptr,  is_store, T_BOOLEAN,  Volatile, false);
   606   case vmIntrinsics::_putShortVolatile:         return inline_unsafe_access(!is_native_ptr,  is_store, T_SHORT,    is_volatile, false);
   609   case vmIntrinsics::_putByteVolatile:          return inline_unsafe_access(!is_native_ptr,  is_store, T_BYTE,     Volatile, false);
   607   case vmIntrinsics::_putCharVolatile:          return inline_unsafe_access(!is_native_ptr,  is_store, T_CHAR,     is_volatile, false);
   610   case vmIntrinsics::_putShortVolatile:         return inline_unsafe_access(!is_native_ptr,  is_store, T_SHORT,    Volatile, false);
   608   case vmIntrinsics::_putIntVolatile:           return inline_unsafe_access(!is_native_ptr,  is_store, T_INT,      is_volatile, false);
   611   case vmIntrinsics::_putCharVolatile:          return inline_unsafe_access(!is_native_ptr,  is_store, T_CHAR,     Volatile, false);
   609   case vmIntrinsics::_putLongVolatile:          return inline_unsafe_access(!is_native_ptr,  is_store, T_LONG,     is_volatile, false);
   612   case vmIntrinsics::_putIntVolatile:           return inline_unsafe_access(!is_native_ptr,  is_store, T_INT,      Volatile, false);
   610   case vmIntrinsics::_putFloatVolatile:         return inline_unsafe_access(!is_native_ptr,  is_store, T_FLOAT,    is_volatile, false);
   613   case vmIntrinsics::_putLongVolatile:          return inline_unsafe_access(!is_native_ptr,  is_store, T_LONG,     Volatile, false);
   611   case vmIntrinsics::_putDoubleVolatile:        return inline_unsafe_access(!is_native_ptr,  is_store, T_DOUBLE,   is_volatile, false);
   614   case vmIntrinsics::_putFloatVolatile:         return inline_unsafe_access(!is_native_ptr,  is_store, T_FLOAT,    Volatile, false);
   612 
   615   case vmIntrinsics::_putDoubleVolatile:        return inline_unsafe_access(!is_native_ptr,  is_store, T_DOUBLE,   Volatile, false);
   613   case vmIntrinsics::_getShortUnaligned:        return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT,   !is_volatile, true);
   616 
   614   case vmIntrinsics::_getCharUnaligned:         return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR,    !is_volatile, true);
   617   case vmIntrinsics::_getShortUnaligned:        return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT,    Relaxed, true);
   615   case vmIntrinsics::_getIntUnaligned:          return inline_unsafe_access(!is_native_ptr, !is_store, T_INT,     !is_volatile, true);
   618   case vmIntrinsics::_getCharUnaligned:         return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR,     Relaxed, true);
   616   case vmIntrinsics::_getLongUnaligned:         return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG,    !is_volatile, true);
   619   case vmIntrinsics::_getIntUnaligned:          return inline_unsafe_access(!is_native_ptr, !is_store, T_INT,      Relaxed, true);
   617 
   620   case vmIntrinsics::_getLongUnaligned:         return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG,     Relaxed, true);
   618   case vmIntrinsics::_putShortUnaligned:        return inline_unsafe_access(!is_native_ptr,  is_store, T_SHORT,   !is_volatile, true);
   621 
   619   case vmIntrinsics::_putCharUnaligned:         return inline_unsafe_access(!is_native_ptr,  is_store, T_CHAR,    !is_volatile, true);
   622   case vmIntrinsics::_putShortUnaligned:        return inline_unsafe_access(!is_native_ptr,  is_store, T_SHORT,    Relaxed, true);
   620   case vmIntrinsics::_putIntUnaligned:          return inline_unsafe_access(!is_native_ptr,  is_store, T_INT,     !is_volatile, true);
   623   case vmIntrinsics::_putCharUnaligned:         return inline_unsafe_access(!is_native_ptr,  is_store, T_CHAR,     Relaxed, true);
   621   case vmIntrinsics::_putLongUnaligned:         return inline_unsafe_access(!is_native_ptr,  is_store, T_LONG,    !is_volatile, true);
   624   case vmIntrinsics::_putIntUnaligned:          return inline_unsafe_access(!is_native_ptr,  is_store, T_INT,      Relaxed, true);
   622 
   625   case vmIntrinsics::_putLongUnaligned:         return inline_unsafe_access(!is_native_ptr,  is_store, T_LONG,     Relaxed, true);
   623   case vmIntrinsics::_compareAndSwapObject:     return inline_unsafe_load_store(T_OBJECT, LS_cmpxchg);
   626 
   624   case vmIntrinsics::_compareAndSwapInt:        return inline_unsafe_load_store(T_INT,    LS_cmpxchg);
   627   case vmIntrinsics::_putOrderedObject:         return inline_unsafe_access(!is_native_ptr,  is_store, T_OBJECT,   Release, false);
   625   case vmIntrinsics::_compareAndSwapLong:       return inline_unsafe_load_store(T_LONG,   LS_cmpxchg);
   628   case vmIntrinsics::_putOrderedInt:            return inline_unsafe_access(!is_native_ptr,  is_store, T_INT,      Release, false);
   626 
   629   case vmIntrinsics::_putOrderedLong:           return inline_unsafe_access(!is_native_ptr,  is_store, T_LONG,     Release, false);
   627   case vmIntrinsics::_putOrderedObject:         return inline_unsafe_ordered_store(T_OBJECT);
   630 
   628   case vmIntrinsics::_putOrderedInt:            return inline_unsafe_ordered_store(T_INT);
   631   case vmIntrinsics::_getObjectAcquire:         return inline_unsafe_access(!is_native_ptr, !is_store, T_OBJECT,   Acquire, false);
   629   case vmIntrinsics::_putOrderedLong:           return inline_unsafe_ordered_store(T_LONG);
   632   case vmIntrinsics::_getBooleanAcquire:        return inline_unsafe_access(!is_native_ptr, !is_store, T_BOOLEAN,  Acquire, false);
   630 
   633   case vmIntrinsics::_getByteAcquire:           return inline_unsafe_access(!is_native_ptr, !is_store, T_BYTE,     Acquire, false);
   631   case vmIntrinsics::_getAndAddInt:             return inline_unsafe_load_store(T_INT,    LS_xadd);
   634   case vmIntrinsics::_getShortAcquire:          return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT,    Acquire, false);
   632   case vmIntrinsics::_getAndAddLong:            return inline_unsafe_load_store(T_LONG,   LS_xadd);
   635   case vmIntrinsics::_getCharAcquire:           return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR,     Acquire, false);
   633   case vmIntrinsics::_getAndSetInt:             return inline_unsafe_load_store(T_INT,    LS_xchg);
   636   case vmIntrinsics::_getIntAcquire:            return inline_unsafe_access(!is_native_ptr, !is_store, T_INT,      Acquire, false);
   634   case vmIntrinsics::_getAndSetLong:            return inline_unsafe_load_store(T_LONG,   LS_xchg);
   637   case vmIntrinsics::_getLongAcquire:           return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG,     Acquire, false);
   635   case vmIntrinsics::_getAndSetObject:          return inline_unsafe_load_store(T_OBJECT, LS_xchg);
   638   case vmIntrinsics::_getFloatAcquire:          return inline_unsafe_access(!is_native_ptr, !is_store, T_FLOAT,    Acquire, false);
       
   639   case vmIntrinsics::_getDoubleAcquire:         return inline_unsafe_access(!is_native_ptr, !is_store, T_DOUBLE,   Acquire, false);
       
   640 
       
   641   case vmIntrinsics::_putObjectRelease:         return inline_unsafe_access(!is_native_ptr,  is_store, T_OBJECT,   Release, false);
       
   642   case vmIntrinsics::_putBooleanRelease:        return inline_unsafe_access(!is_native_ptr,  is_store, T_BOOLEAN,  Release, false);
       
   643   case vmIntrinsics::_putByteRelease:           return inline_unsafe_access(!is_native_ptr,  is_store, T_BYTE,     Release, false);
       
   644   case vmIntrinsics::_putShortRelease:          return inline_unsafe_access(!is_native_ptr,  is_store, T_SHORT,    Release, false);
       
   645   case vmIntrinsics::_putCharRelease:           return inline_unsafe_access(!is_native_ptr,  is_store, T_CHAR,     Release, false);
       
   646   case vmIntrinsics::_putIntRelease:            return inline_unsafe_access(!is_native_ptr,  is_store, T_INT,      Release, false);
       
   647   case vmIntrinsics::_putLongRelease:           return inline_unsafe_access(!is_native_ptr,  is_store, T_LONG,     Release, false);
       
   648   case vmIntrinsics::_putFloatRelease:          return inline_unsafe_access(!is_native_ptr,  is_store, T_FLOAT,    Release, false);
       
   649   case vmIntrinsics::_putDoubleRelease:         return inline_unsafe_access(!is_native_ptr,  is_store, T_DOUBLE,   Release, false);
       
   650 
       
   651   case vmIntrinsics::_getObjectOpaque:          return inline_unsafe_access(!is_native_ptr, !is_store, T_OBJECT,   Opaque, false);
       
   652   case vmIntrinsics::_getBooleanOpaque:         return inline_unsafe_access(!is_native_ptr, !is_store, T_BOOLEAN,  Opaque, false);
       
   653   case vmIntrinsics::_getByteOpaque:            return inline_unsafe_access(!is_native_ptr, !is_store, T_BYTE,     Opaque, false);
       
   654   case vmIntrinsics::_getShortOpaque:           return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT,    Opaque, false);
       
   655   case vmIntrinsics::_getCharOpaque:            return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR,     Opaque, false);
       
   656   case vmIntrinsics::_getIntOpaque:             return inline_unsafe_access(!is_native_ptr, !is_store, T_INT,      Opaque, false);
       
   657   case vmIntrinsics::_getLongOpaque:            return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG,     Opaque, false);
       
   658   case vmIntrinsics::_getFloatOpaque:           return inline_unsafe_access(!is_native_ptr, !is_store, T_FLOAT,    Opaque, false);
       
   659   case vmIntrinsics::_getDoubleOpaque:          return inline_unsafe_access(!is_native_ptr, !is_store, T_DOUBLE,   Opaque, false);
       
   660 
       
   661   case vmIntrinsics::_putObjectOpaque:          return inline_unsafe_access(!is_native_ptr,  is_store, T_OBJECT,   Opaque, false);
       
   662   case vmIntrinsics::_putBooleanOpaque:         return inline_unsafe_access(!is_native_ptr,  is_store, T_BOOLEAN,  Opaque, false);
       
   663   case vmIntrinsics::_putByteOpaque:            return inline_unsafe_access(!is_native_ptr,  is_store, T_BYTE,     Opaque, false);
       
   664   case vmIntrinsics::_putShortOpaque:           return inline_unsafe_access(!is_native_ptr,  is_store, T_SHORT,    Opaque, false);
       
   665   case vmIntrinsics::_putCharOpaque:            return inline_unsafe_access(!is_native_ptr,  is_store, T_CHAR,     Opaque, false);
       
   666   case vmIntrinsics::_putIntOpaque:             return inline_unsafe_access(!is_native_ptr,  is_store, T_INT,      Opaque, false);
       
   667   case vmIntrinsics::_putLongOpaque:            return inline_unsafe_access(!is_native_ptr,  is_store, T_LONG,     Opaque, false);
       
   668   case vmIntrinsics::_putFloatOpaque:           return inline_unsafe_access(!is_native_ptr,  is_store, T_FLOAT,    Opaque, false);
       
   669   case vmIntrinsics::_putDoubleOpaque:          return inline_unsafe_access(!is_native_ptr,  is_store, T_DOUBLE,   Opaque, false);
       
   670 
       
   671   case vmIntrinsics::_compareAndSwapObject:             return inline_unsafe_load_store(T_OBJECT, LS_cmp_swap,      Volatile);
       
   672   case vmIntrinsics::_compareAndSwapInt:                return inline_unsafe_load_store(T_INT,    LS_cmp_swap,      Volatile);
       
   673   case vmIntrinsics::_compareAndSwapLong:               return inline_unsafe_load_store(T_LONG,   LS_cmp_swap,      Volatile);
       
   674 
       
   675   case vmIntrinsics::_weakCompareAndSwapObject:         return inline_unsafe_load_store(T_OBJECT, LS_cmp_swap_weak, Relaxed);
       
   676   case vmIntrinsics::_weakCompareAndSwapObjectAcquire:  return inline_unsafe_load_store(T_OBJECT, LS_cmp_swap_weak, Acquire);
       
   677   case vmIntrinsics::_weakCompareAndSwapObjectRelease:  return inline_unsafe_load_store(T_OBJECT, LS_cmp_swap_weak, Release);
       
   678   case vmIntrinsics::_weakCompareAndSwapInt:            return inline_unsafe_load_store(T_INT,    LS_cmp_swap_weak, Relaxed);
       
   679   case vmIntrinsics::_weakCompareAndSwapIntAcquire:     return inline_unsafe_load_store(T_INT,    LS_cmp_swap_weak, Acquire);
       
   680   case vmIntrinsics::_weakCompareAndSwapIntRelease:     return inline_unsafe_load_store(T_INT,    LS_cmp_swap_weak, Release);
       
   681   case vmIntrinsics::_weakCompareAndSwapLong:           return inline_unsafe_load_store(T_LONG,   LS_cmp_swap_weak, Relaxed);
       
   682   case vmIntrinsics::_weakCompareAndSwapLongAcquire:    return inline_unsafe_load_store(T_LONG,   LS_cmp_swap_weak, Acquire);
       
   683   case vmIntrinsics::_weakCompareAndSwapLongRelease:    return inline_unsafe_load_store(T_LONG,   LS_cmp_swap_weak, Release);
       
   684 
       
   685   case vmIntrinsics::_compareAndExchangeObjectVolatile: return inline_unsafe_load_store(T_OBJECT, LS_cmp_exchange,  Volatile);
       
   686   case vmIntrinsics::_compareAndExchangeObjectAcquire:  return inline_unsafe_load_store(T_OBJECT, LS_cmp_exchange,  Acquire);
       
   687   case vmIntrinsics::_compareAndExchangeObjectRelease:  return inline_unsafe_load_store(T_OBJECT, LS_cmp_exchange,  Release);
       
   688   case vmIntrinsics::_compareAndExchangeIntVolatile:    return inline_unsafe_load_store(T_INT,    LS_cmp_exchange,  Volatile);
       
   689   case vmIntrinsics::_compareAndExchangeIntAcquire:     return inline_unsafe_load_store(T_INT,    LS_cmp_exchange,  Acquire);
       
   690   case vmIntrinsics::_compareAndExchangeIntRelease:     return inline_unsafe_load_store(T_INT,    LS_cmp_exchange,  Release);
       
   691   case vmIntrinsics::_compareAndExchangeLongVolatile:   return inline_unsafe_load_store(T_LONG,   LS_cmp_exchange,  Volatile);
       
   692   case vmIntrinsics::_compareAndExchangeLongAcquire:    return inline_unsafe_load_store(T_LONG,   LS_cmp_exchange,  Acquire);
       
   693   case vmIntrinsics::_compareAndExchangeLongRelease:    return inline_unsafe_load_store(T_LONG,   LS_cmp_exchange,  Release);
       
   694 
       
   695   case vmIntrinsics::_getAndAddInt:                     return inline_unsafe_load_store(T_INT,    LS_get_add,       Volatile);
       
   696   case vmIntrinsics::_getAndAddLong:                    return inline_unsafe_load_store(T_LONG,   LS_get_add,       Volatile);
       
   697   case vmIntrinsics::_getAndSetInt:                     return inline_unsafe_load_store(T_INT,    LS_get_set,       Volatile);
       
   698   case vmIntrinsics::_getAndSetLong:                    return inline_unsafe_load_store(T_LONG,   LS_get_set,       Volatile);
       
   699   case vmIntrinsics::_getAndSetObject:                  return inline_unsafe_load_store(T_OBJECT, LS_get_set,       Volatile);
   636 
   700 
   637   case vmIntrinsics::_loadFence:
   701   case vmIntrinsics::_loadFence:
   638   case vmIntrinsics::_storeFence:
   702   case vmIntrinsics::_storeFence:
   639   case vmIntrinsics::_fullFence:                return inline_unsafe_fence(intrinsic_id());
   703   case vmIntrinsics::_fullFence:                return inline_unsafe_fence(intrinsic_id());
   640 
   704 
  2282     return tjp;
  2346     return tjp;
  2283   }
  2347   }
  2284   return NULL;
  2348   return NULL;
  2285 }
  2349 }
  2286 
  2350 
  2287 bool LibraryCallKit::inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile, bool unaligned) {
  2351 bool LibraryCallKit::inline_unsafe_access(const bool is_native_ptr, bool is_store, const BasicType type, const AccessKind kind, const bool unaligned) {
  2288   if (callee()->is_static())  return false;  // caller must have the capability!
  2352   if (callee()->is_static())  return false;  // caller must have the capability!
       
  2353   guarantee(!is_store || kind != Acquire, "Acquire accesses can be produced only for loads");
       
  2354   guarantee( is_store || kind != Release, "Release accesses can be produced only for stores");
  2289 
  2355 
  2290 #ifndef PRODUCT
  2356 #ifndef PRODUCT
  2291   {
  2357   {
  2292     ResourceMark rm;
  2358     ResourceMark rm;
  2293     // Check the signatures.
  2359     // Check the signatures.
  2372   // We will need memory barriers unless we can determine a unique
  2438   // We will need memory barriers unless we can determine a unique
  2373   // alias category for this reference.  (Note:  If for some reason
  2439   // alias category for this reference.  (Note:  If for some reason
  2374   // the barriers get omitted and the unsafe reference begins to "pollute"
  2440   // the barriers get omitted and the unsafe reference begins to "pollute"
  2375   // the alias analysis of the rest of the graph, either Compile::can_alias
  2441   // the alias analysis of the rest of the graph, either Compile::can_alias
  2376   // or Compile::must_alias will throw a diagnostic assert.)
  2442   // or Compile::must_alias will throw a diagnostic assert.)
  2377   bool need_mem_bar = (alias_type->adr_type() == TypeOopPtr::BOTTOM);
  2443   bool need_mem_bar;
       
  2444   switch (kind) {
       
  2445       case Relaxed:
       
  2446           need_mem_bar = (alias_type->adr_type() == TypeOopPtr::BOTTOM);
       
  2447           break;
       
  2448       case Opaque:
       
  2449           // Opaque uses CPUOrder membars for protection against code movement.
       
  2450       case Acquire:
       
  2451       case Release:
       
  2452       case Volatile:
       
  2453           need_mem_bar = true;
       
  2454           break;
       
  2455       default:
       
  2456           ShouldNotReachHere();
       
  2457   }
       
  2458 
       
  2459   // Some accesses require access atomicity for all types, notably longs and doubles.
       
  2460   // When AlwaysAtomicAccesses is enabled, all accesses are atomic.
       
  2461   bool requires_atomic_access = false;
       
  2462   switch (kind) {
       
  2463       case Relaxed:
       
  2464       case Opaque:
       
  2465           requires_atomic_access = AlwaysAtomicAccesses;
       
  2466           break;
       
  2467       case Acquire:
       
  2468       case Release:
       
  2469       case Volatile:
       
  2470           requires_atomic_access = true;
       
  2471           break;
       
  2472       default:
       
  2473           ShouldNotReachHere();
       
  2474   }
       
  2475 
       
  2476   // Figure out the memory ordering.
       
  2477   // Acquire/Release/Volatile accesses require marking the loads/stores with MemOrd
       
  2478   MemNode::MemOrd mo = access_kind_to_memord_LS(kind, is_store);
  2378 
  2479 
  2379   // If we are reading the value of the referent field of a Reference
  2480   // If we are reading the value of the referent field of a Reference
  2380   // object (either by using Unsafe directly or through reflection)
  2481   // object (either by using Unsafe directly or through reflection)
  2381   // then, if G1 is enabled, we need to record the referent in an
  2482   // then, if G1 is enabled, we need to record the referent in an
  2382   // SATB log buffer using the pre-barrier mechanism.
  2483   // SATB log buffer using the pre-barrier mechanism.
  2399   // Heap pointers get a null-check from the interpreter,
  2500   // Heap pointers get a null-check from the interpreter,
  2400   // as a courtesy.  However, this is not guaranteed by Unsafe,
  2501   // as a courtesy.  However, this is not guaranteed by Unsafe,
  2401   // and it is not possible to fully distinguish unintended nulls
  2502   // and it is not possible to fully distinguish unintended nulls
  2402   // from intended ones in this API.
  2503   // from intended ones in this API.
  2403 
  2504 
  2404   if (is_volatile) {
  2505   // We need to emit leading and trailing CPU membars (see below) in
  2405     // We need to emit leading and trailing CPU membars (see below) in
  2506   // addition to memory membars for special access modes. This is a little
  2406     // addition to memory membars when is_volatile. This is a little
  2507   // too strong, but avoids the need to insert per-alias-type
  2407     // too strong, but avoids the need to insert per-alias-type
  2508   // volatile membars (for stores; compare Parse::do_put_xxx), which
  2408     // volatile membars (for stores; compare Parse::do_put_xxx), which
  2509   // we cannot do effectively here because we probably only have a
  2409     // we cannot do effectively here because we probably only have a
  2510   // rough approximation of type.
  2410     // rough approximation of type.
  2511 
  2411     need_mem_bar = true;
  2512   switch(kind) {
  2412     // For Stores, place a memory ordering barrier now.
  2513     case Relaxed:
  2413     if (is_store) {
  2514     case Opaque:
  2414       insert_mem_bar(Op_MemBarRelease);
  2515     case Acquire:
  2415     } else {
  2516       break;
  2416       if (support_IRIW_for_not_multiple_copy_atomic_cpu) {
  2517     case Release:
  2417         insert_mem_bar(Op_MemBarVolatile);
  2518     case Volatile:
       
  2519       if (is_store) {
       
  2520         insert_mem_bar(Op_MemBarRelease);
       
  2521       } else {
       
  2522         if (support_IRIW_for_not_multiple_copy_atomic_cpu) {
       
  2523           insert_mem_bar(Op_MemBarVolatile);
       
  2524         }
  2418       }
  2525       }
  2419     }
  2526       break;
       
  2527     default:
       
  2528       ShouldNotReachHere();
  2420   }
  2529   }
  2421 
  2530 
  2422   // Memory barrier to prevent normal and 'unsafe' accesses from
  2531   // Memory barrier to prevent normal and 'unsafe' accesses from
  2423   // bypassing each other.  Happens after null checks, so the
  2532   // bypassing each other.  Happens after null checks, so the
  2424   // exception paths do not take memory state from the memory barrier,
  2533   // exception paths do not take memory state from the memory barrier,
  2458       if (con_type != NULL) {
  2567       if (con_type != NULL) {
  2459         p = makecon(con_type);
  2568         p = makecon(con_type);
  2460       }
  2569       }
  2461     }
  2570     }
  2462     if (p == NULL) {
  2571     if (p == NULL) {
  2463       MemNode::MemOrd mo = is_volatile ? MemNode::acquire : MemNode::unordered;
       
  2464       // To be valid, unsafe loads may depend on other conditions than
  2572       // To be valid, unsafe loads may depend on other conditions than
  2465       // the one that guards them: pin the Load node
  2573       // the one that guards them: pin the Load node
  2466       p = make_load(control(), adr, value_type, type, adr_type, mo, LoadNode::Pinned, is_volatile, unaligned, mismatched);
  2574       p = make_load(control(), adr, value_type, type, adr_type, mo, LoadNode::Pinned, requires_atomic_access, unaligned, mismatched);
  2467       // load value
  2575       // load value
  2468       switch (type) {
  2576       switch (type) {
  2469       case T_BOOLEAN:
  2577       case T_BOOLEAN:
  2470       case T_CHAR:
  2578       case T_CHAR:
  2471       case T_BYTE:
  2579       case T_BYTE:
  2475       case T_FLOAT:
  2583       case T_FLOAT:
  2476       case T_DOUBLE:
  2584       case T_DOUBLE:
  2477         break;
  2585         break;
  2478       case T_OBJECT:
  2586       case T_OBJECT:
  2479         if (need_read_barrier) {
  2587         if (need_read_barrier) {
  2480           insert_pre_barrier(heap_base_oop, offset, p, !(is_volatile || need_mem_bar));
  2588           // We do not require a mem bar inside pre_barrier if need_mem_bar
       
  2589           // is set: the barriers would be emitted by us.
       
  2590           insert_pre_barrier(heap_base_oop, offset, p, !need_mem_bar);
  2481         }
  2591         }
  2482         break;
  2592         break;
  2483       case T_ADDRESS:
  2593       case T_ADDRESS:
  2484         // Cast to an int type.
  2594         // Cast to an int type.
  2485         p = _gvn.transform(new CastP2XNode(NULL, p));
  2595         p = _gvn.transform(new CastP2XNode(NULL, p));
  2506       val = ConvL2X(val);
  2616       val = ConvL2X(val);
  2507       val = _gvn.transform(new CastX2PNode(val));
  2617       val = _gvn.transform(new CastX2PNode(val));
  2508       break;
  2618       break;
  2509     }
  2619     }
  2510 
  2620 
  2511     MemNode::MemOrd mo = is_volatile ? MemNode::release : MemNode::unordered;
  2621     if (type != T_OBJECT) {
  2512     if (type != T_OBJECT ) {
  2622       (void) store_to_memory(control(), adr, val, type, adr_type, mo, requires_atomic_access, unaligned, mismatched);
  2513       (void) store_to_memory(control(), adr, val, type, adr_type, mo, is_volatile, unaligned, mismatched);
       
  2514     } else {
  2623     } else {
  2515       // Possibly an oop being stored to Java heap or native memory
  2624       // Possibly an oop being stored to Java heap or native memory
  2516       if (!TypePtr::NULL_PTR->higher_equal(_gvn.type(heap_base_oop))) {
  2625       if (!TypePtr::NULL_PTR->higher_equal(_gvn.type(heap_base_oop))) {
  2517         // oop to Java heap.
  2626         // oop to Java heap.
  2518         (void) store_oop_to_unknown(control(), heap_base_oop, adr, adr_type, val, type, mo, mismatched);
  2627         (void) store_oop_to_unknown(control(), heap_base_oop, adr, adr_type, val, type, mo, mismatched);
  2529           sync_kit(ideal);
  2638           sync_kit(ideal);
  2530           Node* st = store_oop_to_unknown(control(), heap_base_oop, adr, adr_type, val, type, mo, mismatched);
  2639           Node* st = store_oop_to_unknown(control(), heap_base_oop, adr, adr_type, val, type, mo, mismatched);
  2531           // Update IdealKit memory.
  2640           // Update IdealKit memory.
  2532           __ sync_kit(this);
  2641           __ sync_kit(this);
  2533         } __ else_(); {
  2642         } __ else_(); {
  2534           __ store(__ ctrl(), adr, val, type, alias_type->index(), mo, is_volatile, mismatched);
  2643           __ store(__ ctrl(), adr, val, type, alias_type->index(), mo, requires_atomic_access, mismatched);
  2535         } __ end_if();
  2644         } __ end_if();
  2536         // Final sync IdealKit and GraphKit.
  2645         // Final sync IdealKit and GraphKit.
  2537         final_sync(ideal);
  2646         final_sync(ideal);
  2538 #undef __
  2647 #undef __
  2539       }
  2648       }
  2540     }
  2649     }
  2541   }
  2650   }
  2542 
  2651 
  2543   if (is_volatile) {
  2652   switch(kind) {
  2544     if (!is_store) {
  2653     case Relaxed:
  2545       insert_mem_bar(Op_MemBarAcquire);
  2654     case Opaque:
  2546     } else {
  2655     case Release:
  2547       if (!support_IRIW_for_not_multiple_copy_atomic_cpu) {
  2656       break;
  2548         insert_mem_bar(Op_MemBarVolatile);
  2657     case Acquire:
       
  2658     case Volatile:
       
  2659       if (!is_store) {
       
  2660         insert_mem_bar(Op_MemBarAcquire);
       
  2661       } else {
       
  2662         if (!support_IRIW_for_not_multiple_copy_atomic_cpu) {
       
  2663           insert_mem_bar(Op_MemBarVolatile);
       
  2664         }
  2549       }
  2665       }
  2550     }
  2666       break;
       
  2667     default:
       
  2668       ShouldNotReachHere();
  2551   }
  2669   }
  2552 
  2670 
  2553   if (need_mem_bar) insert_mem_bar(Op_MemBarCPUOrder);
  2671   if (need_mem_bar) insert_mem_bar(Op_MemBarCPUOrder);
  2554 
  2672 
  2555   return true;
  2673   return true;
  2556 }
  2674 }
  2557 
  2675 
  2558 //----------------------------inline_unsafe_load_store----------------------------
  2676 //----------------------------inline_unsafe_load_store----------------------------
  2559 // This method serves a couple of different customers (depending on LoadStoreKind):
  2677 // This method serves a couple of different customers (depending on LoadStoreKind):
  2560 //
  2678 //
  2561 // LS_cmpxchg:
  2679 // LS_cmp_swap:
  2562 //   public final native boolean compareAndSwapObject(Object o, long offset, Object expected, Object x);
       
  2563 //   public final native boolean compareAndSwapInt(   Object o, long offset, int    expected, int    x);
       
  2564 //   public final native boolean compareAndSwapLong(  Object o, long offset, long   expected, long   x);
       
  2565 //
  2680 //
  2566 // LS_xadd:
  2681 //   boolean compareAndSwapObject(Object o, long offset, Object expected, Object x);
  2567 //   public int  getAndAddInt( Object o, long offset, int  delta)
  2682 //   boolean compareAndSwapInt(   Object o, long offset, int    expected, int    x);
  2568 //   public long getAndAddLong(Object o, long offset, long delta)
  2683 //   boolean compareAndSwapLong(  Object o, long offset, long   expected, long   x);
  2569 //
  2684 //
  2570 // LS_xchg:
  2685 // LS_cmp_swap_weak:
       
  2686 //
       
  2687 //   boolean weakCompareAndSwapObject(       Object o, long offset, Object expected, Object x);
       
  2688 //   boolean weakCompareAndSwapObjectAcquire(Object o, long offset, Object expected, Object x);
       
  2689 //   boolean weakCompareAndSwapObjectRelease(Object o, long offset, Object expected, Object x);
       
  2690 //
       
  2691 //   boolean weakCompareAndSwapInt(          Object o, long offset, int    expected, int    x);
       
  2692 //   boolean weakCompareAndSwapIntAcquire(   Object o, long offset, int    expected, int    x);
       
  2693 //   boolean weakCompareAndSwapIntRelease(   Object o, long offset, int    expected, int    x);
       
  2694 //
       
  2695 //   boolean weakCompareAndSwapLong(         Object o, long offset, long   expected, long   x);
       
  2696 //   boolean weakCompareAndSwapLongAcquire(  Object o, long offset, long   expected, long   x);
       
  2697 //   boolean weakCompareAndSwapLongRelease(  Object o, long offset, long   expected, long   x);
       
  2698 //
       
  2699 // LS_cmp_exchange:
       
  2700 //
       
  2701 //   Object compareAndExchangeObjectVolatile(Object o, long offset, Object expected, Object x);
       
  2702 //   Object compareAndExchangeObjectAcquire( Object o, long offset, Object expected, Object x);
       
  2703 //   Object compareAndExchangeObjectRelease( Object o, long offset, Object expected, Object x);
       
  2704 //
       
  2705 //   Object compareAndExchangeIntVolatile(   Object o, long offset, Object expected, Object x);
       
  2706 //   Object compareAndExchangeIntAcquire(    Object o, long offset, Object expected, Object x);
       
  2707 //   Object compareAndExchangeIntRelease(    Object o, long offset, Object expected, Object x);
       
  2708 //
       
  2709 //   Object compareAndExchangeLongVolatile(  Object o, long offset, Object expected, Object x);
       
  2710 //   Object compareAndExchangeLongAcquire(   Object o, long offset, Object expected, Object x);
       
  2711 //   Object compareAndExchangeLongRelease(   Object o, long offset, Object expected, Object x);
       
  2712 //
       
  2713 // LS_get_add:
       
  2714 //
       
  2715 //   int  getAndAddInt( Object o, long offset, int  delta)
       
  2716 //   long getAndAddLong(Object o, long offset, long delta)
       
  2717 //
       
  2718 // LS_get_set:
       
  2719 //
  2571 //   int    getAndSet(Object o, long offset, int    newValue)
  2720 //   int    getAndSet(Object o, long offset, int    newValue)
  2572 //   long   getAndSet(Object o, long offset, long   newValue)
  2721 //   long   getAndSet(Object o, long offset, long   newValue)
  2573 //   Object getAndSet(Object o, long offset, Object newValue)
  2722 //   Object getAndSet(Object o, long offset, Object newValue)
  2574 //
  2723 //
  2575 bool LibraryCallKit::inline_unsafe_load_store(BasicType type, LoadStoreKind kind) {
  2724 bool LibraryCallKit::inline_unsafe_load_store(const BasicType type, const LoadStoreKind kind, const AccessKind access_kind) {
  2576   // This basic scheme here is the same as inline_unsafe_access, but
  2725   // This basic scheme here is the same as inline_unsafe_access, but
  2577   // differs in enough details that combining them would make the code
  2726   // differs in enough details that combining them would make the code
  2578   // overly confusing.  (This is a true fact! I originally combined
  2727   // overly confusing.  (This is a true fact! I originally combined
  2579   // them, but even I was confused by it!) As much code/comments as
  2728   // them, but even I was confused by it!) As much code/comments as
  2580   // possible are retained from inline_unsafe_access though to make
  2729   // possible are retained from inline_unsafe_access though to make
  2587   {
  2736   {
  2588     ResourceMark rm;
  2737     ResourceMark rm;
  2589     // Check the signatures.
  2738     // Check the signatures.
  2590     ciSignature* sig = callee()->signature();
  2739     ciSignature* sig = callee()->signature();
  2591     rtype = sig->return_type()->basic_type();
  2740     rtype = sig->return_type()->basic_type();
  2592     if (kind == LS_xadd || kind == LS_xchg) {
  2741     switch(kind) {
       
  2742       case LS_get_add:
       
  2743       case LS_get_set: {
  2593       // Check the signatures.
  2744       // Check the signatures.
  2594 #ifdef ASSERT
  2745 #ifdef ASSERT
  2595       assert(rtype == type, "get and set must return the expected type");
  2746       assert(rtype == type, "get and set must return the expected type");
  2596       assert(sig->count() == 3, "get and set has 3 arguments");
  2747       assert(sig->count() == 3, "get and set has 3 arguments");
  2597       assert(sig->type_at(0)->basic_type() == T_OBJECT, "get and set base is object");
  2748       assert(sig->type_at(0)->basic_type() == T_OBJECT, "get and set base is object");
  2598       assert(sig->type_at(1)->basic_type() == T_LONG, "get and set offset is long");
  2749       assert(sig->type_at(1)->basic_type() == T_LONG, "get and set offset is long");
  2599       assert(sig->type_at(2)->basic_type() == type, "get and set must take expected type as new value/delta");
  2750       assert(sig->type_at(2)->basic_type() == type, "get and set must take expected type as new value/delta");
  2600 #endif // ASSERT
  2751 #endif // ASSERT
  2601     } else if (kind == LS_cmpxchg) {
  2752         break;
       
  2753       }
       
  2754       case LS_cmp_swap:
       
  2755       case LS_cmp_swap_weak: {
  2602       // Check the signatures.
  2756       // Check the signatures.
  2603 #ifdef ASSERT
  2757 #ifdef ASSERT
  2604       assert(rtype == T_BOOLEAN, "CAS must return boolean");
  2758       assert(rtype == T_BOOLEAN, "CAS must return boolean");
  2605       assert(sig->count() == 4, "CAS has 4 arguments");
  2759       assert(sig->count() == 4, "CAS has 4 arguments");
  2606       assert(sig->type_at(0)->basic_type() == T_OBJECT, "CAS base is object");
  2760       assert(sig->type_at(0)->basic_type() == T_OBJECT, "CAS base is object");
  2607       assert(sig->type_at(1)->basic_type() == T_LONG, "CAS offset is long");
  2761       assert(sig->type_at(1)->basic_type() == T_LONG, "CAS offset is long");
  2608 #endif // ASSERT
  2762 #endif // ASSERT
  2609     } else {
  2763         break;
  2610       ShouldNotReachHere();
  2764       }
       
  2765       case LS_cmp_exchange: {
       
  2766       // Check the signatures.
       
  2767 #ifdef ASSERT
       
  2768       assert(rtype == type, "CAS must return the expected type");
       
  2769       assert(sig->count() == 4, "CAS has 4 arguments");
       
  2770       assert(sig->type_at(0)->basic_type() == T_OBJECT, "CAS base is object");
       
  2771       assert(sig->type_at(1)->basic_type() == T_LONG, "CAS offset is long");
       
  2772 #endif // ASSERT
       
  2773         break;
       
  2774       }
       
  2775       default:
       
  2776         ShouldNotReachHere();
  2611     }
  2777     }
  2612   }
  2778   }
  2613 #endif //PRODUCT
  2779 #endif //PRODUCT
  2614 
  2780 
  2615   C->set_has_unsafe_access(true);  // Mark eventual nmethod as "unsafe".
  2781   C->set_has_unsafe_access(true);  // Mark eventual nmethod as "unsafe".
  2618   Node* receiver = NULL;
  2784   Node* receiver = NULL;
  2619   Node* base     = NULL;
  2785   Node* base     = NULL;
  2620   Node* offset   = NULL;
  2786   Node* offset   = NULL;
  2621   Node* oldval   = NULL;
  2787   Node* oldval   = NULL;
  2622   Node* newval   = NULL;
  2788   Node* newval   = NULL;
  2623   if (kind == LS_cmpxchg) {
  2789   switch(kind) {
  2624     const bool two_slot_type = type2size[type] == 2;
  2790     case LS_cmp_swap:
  2625     receiver = argument(0);  // type: oop
  2791     case LS_cmp_swap_weak:
  2626     base     = argument(1);  // type: oop
  2792     case LS_cmp_exchange: {
  2627     offset   = argument(2);  // type: long
  2793       const bool two_slot_type = type2size[type] == 2;
  2628     oldval   = argument(4);  // type: oop, int, or long
  2794       receiver = argument(0);  // type: oop
  2629     newval   = argument(two_slot_type ? 6 : 5);  // type: oop, int, or long
  2795       base     = argument(1);  // type: oop
  2630   } else if (kind == LS_xadd || kind == LS_xchg){
  2796       offset   = argument(2);  // type: long
  2631     receiver = argument(0);  // type: oop
  2797       oldval   = argument(4);  // type: oop, int, or long
  2632     base     = argument(1);  // type: oop
  2798       newval   = argument(two_slot_type ? 6 : 5);  // type: oop, int, or long
  2633     offset   = argument(2);  // type: long
  2799       break;
  2634     oldval   = NULL;
  2800     }
  2635     newval   = argument(4);  // type: oop, int, or long
  2801     case LS_get_add:
       
  2802     case LS_get_set: {
       
  2803       receiver = argument(0);  // type: oop
       
  2804       base     = argument(1);  // type: oop
       
  2805       offset   = argument(2);  // type: long
       
  2806       oldval   = NULL;
       
  2807       newval   = argument(4);  // type: oop, int, or long
       
  2808       break;
       
  2809     }
       
  2810     default:
       
  2811       ShouldNotReachHere();
  2636   }
  2812   }
  2637 
  2813 
  2638   // Null check receiver.
  2814   // Null check receiver.
  2639   receiver = null_check(receiver);
  2815   receiver = null_check(receiver);
  2640   if (stopped()) {
  2816   if (stopped()) {
  2655   // trying to refine types. Just use the coarse types here.
  2831   // trying to refine types. Just use the coarse types here.
  2656   const Type *value_type = Type::get_const_basic_type(type);
  2832   const Type *value_type = Type::get_const_basic_type(type);
  2657   Compile::AliasType* alias_type = C->alias_type(adr_type);
  2833   Compile::AliasType* alias_type = C->alias_type(adr_type);
  2658   assert(alias_type->index() != Compile::AliasIdxBot, "no bare pointers here");
  2834   assert(alias_type->index() != Compile::AliasIdxBot, "no bare pointers here");
  2659 
  2835 
  2660   if (kind == LS_xchg && type == T_OBJECT) {
  2836   switch (kind) {
  2661     const TypeOopPtr* tjp = sharpen_unsafe_type(alias_type, adr_type);
  2837     case LS_get_set:
  2662     if (tjp != NULL) {
  2838     case LS_cmp_exchange: {
  2663       value_type = tjp;
  2839       if (type == T_OBJECT) {
  2664     }
  2840         const TypeOopPtr* tjp = sharpen_unsafe_type(alias_type, adr_type);
       
  2841         if (tjp != NULL) {
       
  2842           value_type = tjp;
       
  2843         }
       
  2844       }
       
  2845       break;
       
  2846     }
       
  2847     case LS_cmp_swap:
       
  2848     case LS_cmp_swap_weak:
       
  2849     case LS_get_add:
       
  2850       break;
       
  2851     default:
       
  2852       ShouldNotReachHere();
  2665   }
  2853   }
  2666 
  2854 
  2667   int alias_idx = C->get_alias_index(adr_type);
  2855   int alias_idx = C->get_alias_index(adr_type);
  2668 
  2856 
  2669   // Memory-model-wise, a LoadStore acts like a little synchronized
  2857   // Memory-model-wise, a LoadStore acts like a little synchronized
  2670   // block, so needs barriers on each side.  These don't translate
  2858   // block, so needs barriers on each side.  These don't translate
  2671   // into actual barriers on most machines, but we still need rest of
  2859   // into actual barriers on most machines, but we still need rest of
  2672   // compiler to respect ordering.
  2860   // compiler to respect ordering.
  2673 
  2861 
  2674   insert_mem_bar(Op_MemBarRelease);
  2862   switch (access_kind) {
       
  2863     case Relaxed:
       
  2864     case Acquire:
       
  2865       break;
       
  2866     case Release:
       
  2867     case Volatile:
       
  2868       insert_mem_bar(Op_MemBarRelease);
       
  2869       break;
       
  2870     default:
       
  2871       ShouldNotReachHere();
       
  2872   }
  2675   insert_mem_bar(Op_MemBarCPUOrder);
  2873   insert_mem_bar(Op_MemBarCPUOrder);
       
  2874 
       
  2875   // Figure out the memory ordering.
       
  2876   MemNode::MemOrd mo = access_kind_to_memord(access_kind);
  2676 
  2877 
  2677   // 4984716: MemBars must be inserted before this
  2878   // 4984716: MemBars must be inserted before this
  2678   //          memory node in order to avoid a false
  2879   //          memory node in order to avoid a false
  2679   //          dependency which will confuse the scheduler.
  2880   //          dependency which will confuse the scheduler.
  2680   Node *mem = memory(alias_idx);
  2881   Node *mem = memory(alias_idx);
  2682   // For now, we handle only those cases that actually exist: ints,
  2883   // For now, we handle only those cases that actually exist: ints,
  2683   // longs, and Object. Adding others should be straightforward.
  2884   // longs, and Object. Adding others should be straightforward.
  2684   Node* load_store = NULL;
  2885   Node* load_store = NULL;
  2685   switch(type) {
  2886   switch(type) {
  2686   case T_INT:
  2887   case T_INT:
  2687     if (kind == LS_xadd) {
  2888     switch(kind) {
  2688       load_store = _gvn.transform(new GetAndAddINode(control(), mem, adr, newval, adr_type));
  2889       case LS_get_add:
  2689     } else if (kind == LS_xchg) {
  2890         load_store = _gvn.transform(new GetAndAddINode(control(), mem, adr, newval, adr_type));
  2690       load_store = _gvn.transform(new GetAndSetINode(control(), mem, adr, newval, adr_type));
  2891         break;
  2691     } else if (kind == LS_cmpxchg) {
  2892       case LS_get_set:
  2692       load_store = _gvn.transform(new CompareAndSwapINode(control(), mem, adr, newval, oldval));
  2893         load_store = _gvn.transform(new GetAndSetINode(control(), mem, adr, newval, adr_type));
  2693     } else {
  2894         break;
  2694       ShouldNotReachHere();
  2895       case LS_cmp_swap_weak:
       
  2896         load_store = _gvn.transform(new WeakCompareAndSwapINode(control(), mem, adr, newval, oldval, mo));
       
  2897         break;
       
  2898       case LS_cmp_swap:
       
  2899         load_store = _gvn.transform(new CompareAndSwapINode(control(), mem, adr, newval, oldval, mo));
       
  2900         break;
       
  2901       case LS_cmp_exchange:
       
  2902         load_store = _gvn.transform(new CompareAndExchangeINode(control(), mem, adr, newval, oldval, adr_type, mo));
       
  2903         break;
       
  2904       default:
       
  2905         ShouldNotReachHere();
  2695     }
  2906     }
  2696     break;
  2907     break;
  2697   case T_LONG:
  2908   case T_LONG:
  2698     if (kind == LS_xadd) {
  2909     switch(kind) {
  2699       load_store = _gvn.transform(new GetAndAddLNode(control(), mem, adr, newval, adr_type));
  2910       case LS_get_add:
  2700     } else if (kind == LS_xchg) {
  2911         load_store = _gvn.transform(new GetAndAddLNode(control(), mem, adr, newval, adr_type));
  2701       load_store = _gvn.transform(new GetAndSetLNode(control(), mem, adr, newval, adr_type));
  2912         break;
  2702     } else if (kind == LS_cmpxchg) {
  2913       case LS_get_set:
  2703       load_store = _gvn.transform(new CompareAndSwapLNode(control(), mem, adr, newval, oldval));
  2914         load_store = _gvn.transform(new GetAndSetLNode(control(), mem, adr, newval, adr_type));
  2704     } else {
  2915         break;
  2705       ShouldNotReachHere();
  2916       case LS_cmp_swap_weak:
       
  2917         load_store = _gvn.transform(new WeakCompareAndSwapLNode(control(), mem, adr, newval, oldval, mo));
       
  2918         break;
       
  2919       case LS_cmp_swap:
       
  2920         load_store = _gvn.transform(new CompareAndSwapLNode(control(), mem, adr, newval, oldval, mo));
       
  2921         break;
       
  2922       case LS_cmp_exchange:
       
  2923         load_store = _gvn.transform(new CompareAndExchangeLNode(control(), mem, adr, newval, oldval, adr_type, mo));
       
  2924         break;
       
  2925       default:
       
  2926         ShouldNotReachHere();
  2706     }
  2927     }
  2707     break;
  2928     break;
  2708   case T_OBJECT:
  2929   case T_OBJECT:
  2709     // Transformation of a value which could be NULL pointer (CastPP #NULL)
  2930     // Transformation of a value which could be NULL pointer (CastPP #NULL)
  2710     // could be delayed during Parse (for example, in adjust_map_after_if()).
  2931     // could be delayed during Parse (for example, in adjust_map_after_if()).
  2711     // Execute transformation here to avoid barrier generation in such case.
  2932     // Execute transformation here to avoid barrier generation in such case.
  2712     if (_gvn.type(newval) == TypePtr::NULL_PTR)
  2933     if (_gvn.type(newval) == TypePtr::NULL_PTR)
  2713       newval = _gvn.makecon(TypePtr::NULL_PTR);
  2934       newval = _gvn.makecon(TypePtr::NULL_PTR);
  2714 
  2935 
  2715     // Reference stores need a store barrier.
  2936     // Reference stores need a store barrier.
  2716     if (kind == LS_xchg) {
  2937     switch(kind) {
  2717       // If pre-barrier must execute before the oop store, old value will require do_load here.
  2938       case LS_get_set: {
  2718       if (!can_move_pre_barrier()) {
  2939         // If pre-barrier must execute before the oop store, old value will require do_load here.
  2719         pre_barrier(true /* do_load*/,
  2940         if (!can_move_pre_barrier()) {
  2720                     control(), base, adr, alias_idx, newval, value_type->make_oopptr(),
  2941           pre_barrier(true /* do_load*/,
  2721                     NULL /* pre_val*/,
  2942                       control(), base, adr, alias_idx, newval, value_type->make_oopptr(),
       
  2943                       NULL /* pre_val*/,
       
  2944                       T_OBJECT);
       
  2945         } // Else move pre_barrier to use load_store value, see below.
       
  2946         break;
       
  2947       }
       
  2948       case LS_cmp_swap_weak:
       
  2949       case LS_cmp_swap:
       
  2950       case LS_cmp_exchange: {
       
  2951         // Same as for newval above:
       
  2952         if (_gvn.type(oldval) == TypePtr::NULL_PTR) {
       
  2953           oldval = _gvn.makecon(TypePtr::NULL_PTR);
       
  2954         }
       
  2955         // The only known value which might get overwritten is oldval.
       
  2956         pre_barrier(false /* do_load */,
       
  2957                     control(), NULL, NULL, max_juint, NULL, NULL,
       
  2958                     oldval /* pre_val */,
  2722                     T_OBJECT);
  2959                     T_OBJECT);
  2723       } // Else move pre_barrier to use load_store value, see below.
  2960         break;
  2724     } else if (kind == LS_cmpxchg) {
       
  2725       // Same as for newval above:
       
  2726       if (_gvn.type(oldval) == TypePtr::NULL_PTR) {
       
  2727         oldval = _gvn.makecon(TypePtr::NULL_PTR);
       
  2728       }
  2961       }
  2729       // The only known value which might get overwritten is oldval.
  2962       default:
  2730       pre_barrier(false /* do_load */,
  2963         ShouldNotReachHere();
  2731                   control(), NULL, NULL, max_juint, NULL, NULL,
       
  2732                   oldval /* pre_val */,
       
  2733                   T_OBJECT);
       
  2734     } else {
       
  2735       ShouldNotReachHere();
       
  2736     }
  2964     }
  2737 
  2965 
  2738 #ifdef _LP64
  2966 #ifdef _LP64
  2739     if (adr->bottom_type()->is_ptr_to_narrowoop()) {
  2967     if (adr->bottom_type()->is_ptr_to_narrowoop()) {
  2740       Node *newval_enc = _gvn.transform(new EncodePNode(newval, newval->bottom_type()->make_narrowoop()));
  2968       Node *newval_enc = _gvn.transform(new EncodePNode(newval, newval->bottom_type()->make_narrowoop()));
  2741       if (kind == LS_xchg) {
  2969 
  2742         load_store = _gvn.transform(new GetAndSetNNode(control(), mem, adr,
  2970       switch(kind) {
  2743                                                        newval_enc, adr_type, value_type->make_narrowoop()));
  2971         case LS_get_set:
  2744       } else {
  2972           load_store = _gvn.transform(new GetAndSetNNode(control(), mem, adr, newval_enc, adr_type, value_type->make_narrowoop()));
  2745         assert(kind == LS_cmpxchg, "wrong LoadStore operation");
  2973           break;
  2746         Node *oldval_enc = _gvn.transform(new EncodePNode(oldval, oldval->bottom_type()->make_narrowoop()));
  2974         case LS_cmp_swap_weak: {
  2747         load_store = _gvn.transform(new CompareAndSwapNNode(control(), mem, adr,
  2975           Node *oldval_enc = _gvn.transform(new EncodePNode(oldval, oldval->bottom_type()->make_narrowoop()));
  2748                                                                 newval_enc, oldval_enc));
  2976           load_store = _gvn.transform(new WeakCompareAndSwapNNode(control(), mem, adr, newval_enc, oldval_enc, mo));
       
  2977           break;
       
  2978         }
       
  2979         case LS_cmp_swap: {
       
  2980           Node *oldval_enc = _gvn.transform(new EncodePNode(oldval, oldval->bottom_type()->make_narrowoop()));
       
  2981           load_store = _gvn.transform(new CompareAndSwapNNode(control(), mem, adr, newval_enc, oldval_enc, mo));
       
  2982           break;
       
  2983         }
       
  2984         case LS_cmp_exchange: {
       
  2985           Node *oldval_enc = _gvn.transform(new EncodePNode(oldval, oldval->bottom_type()->make_narrowoop()));
       
  2986           load_store = _gvn.transform(new CompareAndExchangeNNode(control(), mem, adr, newval_enc, oldval_enc, adr_type, value_type->make_narrowoop(), mo));
       
  2987           break;
       
  2988         }
       
  2989         default:
       
  2990           ShouldNotReachHere();
  2749       }
  2991       }
  2750     } else
  2992     } else
  2751 #endif
  2993 #endif
  2752     {
  2994     switch (kind) {
  2753       if (kind == LS_xchg) {
  2995       case LS_get_set:
  2754         load_store = _gvn.transform(new GetAndSetPNode(control(), mem, adr, newval, adr_type, value_type->is_oopptr()));
  2996         load_store = _gvn.transform(new GetAndSetPNode(control(), mem, adr, newval, adr_type, value_type->is_oopptr()));
  2755       } else {
  2997         break;
  2756         assert(kind == LS_cmpxchg, "wrong LoadStore operation");
  2998       case LS_cmp_swap_weak:
  2757         load_store = _gvn.transform(new CompareAndSwapPNode(control(), mem, adr, newval, oldval));
  2999         load_store = _gvn.transform(new WeakCompareAndSwapPNode(control(), mem, adr, newval, oldval, mo));
       
  3000         break;
       
  3001       case LS_cmp_swap:
       
  3002         load_store = _gvn.transform(new CompareAndSwapPNode(control(), mem, adr, newval, oldval, mo));
       
  3003         break;
       
  3004       case LS_cmp_exchange:
       
  3005         load_store = _gvn.transform(new CompareAndExchangePNode(control(), mem, adr, newval, oldval, adr_type, value_type->is_oopptr(), mo));
       
  3006         break;
       
  3007       default:
       
  3008         ShouldNotReachHere();
       
  3009     }
       
  3010 
       
  3011     // Emit the post barrier only when the actual store happened. This makes sense
       
  3012     // to check only for LS_cmp_* that can fail to set the value.
       
  3013     // LS_cmp_exchange does not produce any branches by default, so there is no
       
  3014     // boolean result to piggyback on. TODO: When we merge CompareAndSwap with
       
  3015     // CompareAndExchange and move branches here, it would make sense to conditionalize
       
  3016     // post_barriers for LS_cmp_exchange as well.
       
  3017     //
       
  3018     // CAS success path is marked more likely since we anticipate this is a performance
       
  3019     // critical path, while CAS failure path can use the penalty for going through unlikely
       
  3020     // path as backoff. Which is still better than doing a store barrier there.
       
  3021     switch (kind) {
       
  3022       case LS_get_set:
       
  3023       case LS_cmp_exchange: {
       
  3024         post_barrier(control(), load_store, base, adr, alias_idx, newval, T_OBJECT, true);
       
  3025         break;
  2758       }
  3026       }
  2759     }
  3027       case LS_cmp_swap_weak:
  2760     if (kind == LS_cmpxchg) {
  3028       case LS_cmp_swap: {
  2761       // Emit the post barrier only when the actual store happened.
  3029         IdealKit ideal(this);
  2762       // This makes sense to check only for compareAndSet that can fail to set the value.
  3030         ideal.if_then(load_store, BoolTest::ne, ideal.ConI(0), PROB_STATIC_FREQUENT); {
  2763       // CAS success path is marked more likely since we anticipate this is a performance
  3031           sync_kit(ideal);
  2764       // critical path, while CAS failure path can use the penalty for going through unlikely
  3032           post_barrier(ideal.ctrl(), load_store, base, adr, alias_idx, newval, T_OBJECT, true);
  2765       // path as backoff. Which is still better than doing a store barrier there.
  3033           ideal.sync_kit(this);
  2766       IdealKit ideal(this);
  3034         } ideal.end_if();
  2767       ideal.if_then(load_store, BoolTest::ne, ideal.ConI(0), PROB_STATIC_FREQUENT); {
  3035         final_sync(ideal);
  2768         sync_kit(ideal);
  3036         break;
  2769         post_barrier(ideal.ctrl(), load_store, base, adr, alias_idx, newval, T_OBJECT, true);
  3037       }
  2770         ideal.sync_kit(this);
  3038       default:
  2771       } ideal.end_if();
  3039         ShouldNotReachHere();
  2772       final_sync(ideal);
       
  2773     } else {
       
  2774       post_barrier(control(), load_store, base, adr, alias_idx, newval, T_OBJECT, true);
       
  2775     }
  3040     }
  2776     break;
  3041     break;
  2777   default:
  3042   default:
  2778     fatal("unexpected type %d: %s", type, type2name(type));
  3043     fatal("unexpected type %d: %s", type, type2name(type));
  2779     break;
  3044     break;
  2783   // main role is to prevent LoadStore nodes from being optimized away
  3048   // main role is to prevent LoadStore nodes from being optimized away
  2784   // when their results aren't used.
  3049   // when their results aren't used.
  2785   Node* proj = _gvn.transform(new SCMemProjNode(load_store));
  3050   Node* proj = _gvn.transform(new SCMemProjNode(load_store));
  2786   set_memory(proj, alias_idx);
  3051   set_memory(proj, alias_idx);
  2787 
  3052 
  2788   if (type == T_OBJECT && kind == LS_xchg) {
  3053   if (type == T_OBJECT && (kind == LS_get_set || kind == LS_cmp_exchange)) {
  2789 #ifdef _LP64
  3054 #ifdef _LP64
  2790     if (adr->bottom_type()->is_ptr_to_narrowoop()) {
  3055     if (adr->bottom_type()->is_ptr_to_narrowoop()) {
  2791       load_store = _gvn.transform(new DecodeNNode(load_store, load_store->get_ptr_type()));
  3056       load_store = _gvn.transform(new DecodeNNode(load_store, load_store->get_ptr_type()));
  2792     }
  3057     }
  2793 #endif
  3058 #endif
  2802     }
  3067     }
  2803   }
  3068   }
  2804 
  3069 
  2805   // Add the trailing membar surrounding the access
  3070   // Add the trailing membar surrounding the access
  2806   insert_mem_bar(Op_MemBarCPUOrder);
  3071   insert_mem_bar(Op_MemBarCPUOrder);
  2807   insert_mem_bar(Op_MemBarAcquire);
  3072 
       
  3073   switch (access_kind) {
       
  3074     case Relaxed:
       
  3075     case Release:
       
  3076       break; // do nothing
       
  3077     case Acquire:
       
  3078     case Volatile:
       
  3079       insert_mem_bar(Op_MemBarAcquire);
       
  3080       break;
       
  3081     default:
       
  3082       ShouldNotReachHere();
       
  3083   }
  2808 
  3084 
  2809   assert(type2size[load_store->bottom_type()->basic_type()] == type2size[rtype], "result type should match");
  3085   assert(type2size[load_store->bottom_type()->basic_type()] == type2size[rtype], "result type should match");
  2810   set_result(load_store);
  3086   set_result(load_store);
  2811   return true;
  3087   return true;
  2812 }
  3088 }
  2813 
  3089 
  2814 //----------------------------inline_unsafe_ordered_store----------------------
  3090 MemNode::MemOrd LibraryCallKit::access_kind_to_memord_LS(AccessKind kind, bool is_store) {
  2815 // public native void Unsafe.putOrderedObject(Object o, long offset, Object x);
  3091   MemNode::MemOrd mo = MemNode::unset;
  2816 // public native void Unsafe.putOrderedInt(Object o, long offset, int x);
  3092   switch(kind) {
  2817 // public native void Unsafe.putOrderedLong(Object o, long offset, long x);
  3093     case Opaque:
  2818 bool LibraryCallKit::inline_unsafe_ordered_store(BasicType type) {
  3094     case Relaxed:  mo = MemNode::unordered; break;
  2819   // This is another variant of inline_unsafe_access, differing in
  3095     case Acquire:  mo = MemNode::acquire;   break;
  2820   // that it always issues store-store ("release") barrier and ensures
  3096     case Release:  mo = MemNode::release;   break;
  2821   // store-atomicity (which only matters for "long").
  3097     case Volatile: mo = is_store ? MemNode::release : MemNode::acquire; break;
  2822 
  3098     default:
  2823   if (callee()->is_static())  return false;  // caller must have the capability!
  3099       ShouldNotReachHere();
  2824 
  3100   }
  2825 #ifndef PRODUCT
  3101   guarantee(mo != MemNode::unset, "Should select memory ordering");
  2826   {
  3102   return mo;
  2827     ResourceMark rm;
  3103 }
  2828     // Check the signatures.
  3104 
  2829     ciSignature* sig = callee()->signature();
  3105 MemNode::MemOrd LibraryCallKit::access_kind_to_memord(AccessKind kind) {
  2830 #ifdef ASSERT
  3106   MemNode::MemOrd mo = MemNode::unset;
  2831     BasicType rtype = sig->return_type()->basic_type();
  3107   switch(kind) {
  2832     assert(rtype == T_VOID, "must return void");
  3108     case Opaque:
  2833     assert(sig->count() == 3, "has 3 arguments");
  3109     case Relaxed:  mo = MemNode::unordered; break;
  2834     assert(sig->type_at(0)->basic_type() == T_OBJECT, "base is object");
  3110     case Acquire:  mo = MemNode::acquire;   break;
  2835     assert(sig->type_at(1)->basic_type() == T_LONG, "offset is long");
  3111     case Release:  mo = MemNode::release;   break;
  2836 #endif // ASSERT
  3112     case Volatile: mo = MemNode::seqcst;    break;
  2837   }
  3113     default:
  2838 #endif //PRODUCT
  3114       ShouldNotReachHere();
  2839 
  3115   }
  2840   C->set_has_unsafe_access(true);  // Mark eventual nmethod as "unsafe".
  3116   guarantee(mo != MemNode::unset, "Should select memory ordering");
  2841 
  3117   return mo;
  2842   // Get arguments:
       
  2843   Node* receiver = argument(0);  // type: oop
       
  2844   Node* base     = argument(1);  // type: oop
       
  2845   Node* offset   = argument(2);  // type: long
       
  2846   Node* val      = argument(4);  // type: oop, int, or long
       
  2847 
       
  2848   // Null check receiver.
       
  2849   receiver = null_check(receiver);
       
  2850   if (stopped()) {
       
  2851     return true;
       
  2852   }
       
  2853 
       
  2854   // Build field offset expression.
       
  2855   assert(Unsafe_field_offset_to_byte_offset(11) == 11, "fieldOffset must be byte-scaled");
       
  2856   // 32-bit machines ignore the high half of long offsets
       
  2857   offset = ConvL2X(offset);
       
  2858   Node* adr = make_unsafe_address(base, offset);
       
  2859   const TypePtr *adr_type = _gvn.type(adr)->isa_ptr();
       
  2860   const Type *value_type = Type::get_const_basic_type(type);
       
  2861   Compile::AliasType* alias_type = C->alias_type(adr_type);
       
  2862 
       
  2863   insert_mem_bar(Op_MemBarRelease);
       
  2864   insert_mem_bar(Op_MemBarCPUOrder);
       
  2865   // Ensure that the store is atomic for longs:
       
  2866   const bool require_atomic_access = true;
       
  2867   Node* store;
       
  2868   if (type == T_OBJECT) // reference stores need a store barrier.
       
  2869     store = store_oop_to_unknown(control(), base, adr, adr_type, val, type, MemNode::release);
       
  2870   else {
       
  2871     store = store_to_memory(control(), adr, val, type, adr_type, MemNode::release, require_atomic_access);
       
  2872   }
       
  2873   insert_mem_bar(Op_MemBarCPUOrder);
       
  2874   return true;
       
  2875 }
  3118 }
  2876 
  3119 
  2877 bool LibraryCallKit::inline_unsafe_fence(vmIntrinsics::ID id) {
  3120 bool LibraryCallKit::inline_unsafe_fence(vmIntrinsics::ID id) {
  2878   // Regardless of form, don't allow previous ld/st to move down,
  3121   // Regardless of form, don't allow previous ld/st to move down,
  2879   // then issue acquire, release, or volatile mem_bar.
  3122   // then issue acquire, release, or volatile mem_bar.