src/hotspot/share/opto/library_call.cpp
branchdatagramsocketimpl-branch
changeset 58678 9cf78a70fa4f
parent 54750 1851a532ddfe
child 58679 9c3209ff7550
equal deleted inserted replaced
58677:13588c901957 58678:9cf78a70fa4f
    30 #include "compiler/compileBroker.hpp"
    30 #include "compiler/compileBroker.hpp"
    31 #include "compiler/compileLog.hpp"
    31 #include "compiler/compileLog.hpp"
    32 #include "gc/shared/barrierSet.hpp"
    32 #include "gc/shared/barrierSet.hpp"
    33 #include "jfr/support/jfrIntrinsics.hpp"
    33 #include "jfr/support/jfrIntrinsics.hpp"
    34 #include "memory/resourceArea.hpp"
    34 #include "memory/resourceArea.hpp"
       
    35 #include "oops/klass.inline.hpp"
    35 #include "oops/objArrayKlass.hpp"
    36 #include "oops/objArrayKlass.hpp"
    36 #include "opto/addnode.hpp"
    37 #include "opto/addnode.hpp"
    37 #include "opto/arraycopynode.hpp"
    38 #include "opto/arraycopynode.hpp"
    38 #include "opto/c2compiler.hpp"
    39 #include "opto/c2compiler.hpp"
    39 #include "opto/callGenerator.hpp"
    40 #include "opto/callGenerator.hpp"
   251   DecoratorSet mo_decorator_for_access_kind(AccessKind kind);
   252   DecoratorSet mo_decorator_for_access_kind(AccessKind kind);
   252   bool inline_unsafe_access(bool is_store, BasicType type, AccessKind kind, bool is_unaligned);
   253   bool inline_unsafe_access(bool is_store, BasicType type, AccessKind kind, bool is_unaligned);
   253   static bool klass_needs_init_guard(Node* kls);
   254   static bool klass_needs_init_guard(Node* kls);
   254   bool inline_unsafe_allocate();
   255   bool inline_unsafe_allocate();
   255   bool inline_unsafe_newArray(bool uninitialized);
   256   bool inline_unsafe_newArray(bool uninitialized);
       
   257   bool inline_unsafe_writeback0();
       
   258   bool inline_unsafe_writebackSync0(bool is_pre);
   256   bool inline_unsafe_copyMemory();
   259   bool inline_unsafe_copyMemory();
   257   bool inline_native_currentThread();
   260   bool inline_native_currentThread();
   258 
   261 
   259   bool inline_native_time_funcs(address method, const char* funcName);
   262   bool inline_native_time_funcs(address method, const char* funcName);
   260 #ifdef JFR_HAVE_INTRINSICS
   263 #ifdef JFR_HAVE_INTRINSICS
   291   bool inline_number_methods(vmIntrinsics::ID id);
   294   bool inline_number_methods(vmIntrinsics::ID id);
   292   bool inline_reference_get();
   295   bool inline_reference_get();
   293   bool inline_Class_cast();
   296   bool inline_Class_cast();
   294   bool inline_aescrypt_Block(vmIntrinsics::ID id);
   297   bool inline_aescrypt_Block(vmIntrinsics::ID id);
   295   bool inline_cipherBlockChaining_AESCrypt(vmIntrinsics::ID id);
   298   bool inline_cipherBlockChaining_AESCrypt(vmIntrinsics::ID id);
       
   299   bool inline_electronicCodeBook_AESCrypt(vmIntrinsics::ID id);
   296   bool inline_counterMode_AESCrypt(vmIntrinsics::ID id);
   300   bool inline_counterMode_AESCrypt(vmIntrinsics::ID id);
   297   Node* inline_cipherBlockChaining_AESCrypt_predicate(bool decrypting);
   301   Node* inline_cipherBlockChaining_AESCrypt_predicate(bool decrypting);
       
   302   Node* inline_electronicCodeBook_AESCrypt_predicate(bool decrypting);
   298   Node* inline_counterMode_AESCrypt_predicate();
   303   Node* inline_counterMode_AESCrypt_predicate();
   299   Node* get_key_start_from_aescrypt_object(Node* aescrypt_object);
   304   Node* get_key_start_from_aescrypt_object(Node* aescrypt_object);
   300   Node* get_original_key_start_from_aescrypt_object(Node* aescrypt_object);
   305   Node* get_original_key_start_from_aescrypt_object(Node* aescrypt_object);
   301   bool inline_ghash_processBlocks();
   306   bool inline_ghash_processBlocks();
   302   bool inline_base64_encodeBlock();
   307   bool inline_base64_encodeBlock();
   528   switch (intrinsic_id()) {
   533   switch (intrinsic_id()) {
   529   case vmIntrinsics::_hashCode:                 return inline_native_hashcode(intrinsic()->is_virtual(), !is_static);
   534   case vmIntrinsics::_hashCode:                 return inline_native_hashcode(intrinsic()->is_virtual(), !is_static);
   530   case vmIntrinsics::_identityHashCode:         return inline_native_hashcode(/*!virtual*/ false,         is_static);
   535   case vmIntrinsics::_identityHashCode:         return inline_native_hashcode(/*!virtual*/ false,         is_static);
   531   case vmIntrinsics::_getClass:                 return inline_native_getClass();
   536   case vmIntrinsics::_getClass:                 return inline_native_getClass();
   532 
   537 
       
   538   case vmIntrinsics::_ceil:
       
   539   case vmIntrinsics::_floor:
       
   540   case vmIntrinsics::_rint:
   533   case vmIntrinsics::_dsin:
   541   case vmIntrinsics::_dsin:
   534   case vmIntrinsics::_dcos:
   542   case vmIntrinsics::_dcos:
   535   case vmIntrinsics::_dtan:
   543   case vmIntrinsics::_dtan:
   536   case vmIntrinsics::_dabs:
   544   case vmIntrinsics::_dabs:
   537   case vmIntrinsics::_fabs:
   545   case vmIntrinsics::_fabs:
   751   case vmIntrinsics::_getClassId:               return inline_native_classID();
   759   case vmIntrinsics::_getClassId:               return inline_native_classID();
   752   case vmIntrinsics::_getEventWriter:           return inline_native_getEventWriter();
   760   case vmIntrinsics::_getEventWriter:           return inline_native_getEventWriter();
   753 #endif
   761 #endif
   754   case vmIntrinsics::_currentTimeMillis:        return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeMillis), "currentTimeMillis");
   762   case vmIntrinsics::_currentTimeMillis:        return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeMillis), "currentTimeMillis");
   755   case vmIntrinsics::_nanoTime:                 return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeNanos), "nanoTime");
   763   case vmIntrinsics::_nanoTime:                 return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeNanos), "nanoTime");
       
   764   case vmIntrinsics::_writeback0:               return inline_unsafe_writeback0();
       
   765   case vmIntrinsics::_writebackPreSync0:        return inline_unsafe_writebackSync0(true);
       
   766   case vmIntrinsics::_writebackPostSync0:       return inline_unsafe_writebackSync0(false);
   756   case vmIntrinsics::_allocateInstance:         return inline_unsafe_allocate();
   767   case vmIntrinsics::_allocateInstance:         return inline_unsafe_allocate();
   757   case vmIntrinsics::_copyMemory:               return inline_unsafe_copyMemory();
   768   case vmIntrinsics::_copyMemory:               return inline_unsafe_copyMemory();
   758   case vmIntrinsics::_getLength:                return inline_native_getLength();
   769   case vmIntrinsics::_getLength:                return inline_native_getLength();
   759   case vmIntrinsics::_copyOf:                   return inline_array_copyOf(false);
   770   case vmIntrinsics::_copyOf:                   return inline_array_copyOf(false);
   760   case vmIntrinsics::_copyOfRange:              return inline_array_copyOf(true);
   771   case vmIntrinsics::_copyOfRange:              return inline_array_copyOf(true);
   805 
   816 
   806   case vmIntrinsics::_cipherBlockChaining_encryptAESCrypt:
   817   case vmIntrinsics::_cipherBlockChaining_encryptAESCrypt:
   807   case vmIntrinsics::_cipherBlockChaining_decryptAESCrypt:
   818   case vmIntrinsics::_cipherBlockChaining_decryptAESCrypt:
   808     return inline_cipherBlockChaining_AESCrypt(intrinsic_id());
   819     return inline_cipherBlockChaining_AESCrypt(intrinsic_id());
   809 
   820 
       
   821   case vmIntrinsics::_electronicCodeBook_encryptAESCrypt:
       
   822   case vmIntrinsics::_electronicCodeBook_decryptAESCrypt:
       
   823     return inline_electronicCodeBook_AESCrypt(intrinsic_id());
       
   824 
   810   case vmIntrinsics::_counterMode_AESCrypt:
   825   case vmIntrinsics::_counterMode_AESCrypt:
   811     return inline_counterMode_AESCrypt(intrinsic_id());
   826     return inline_counterMode_AESCrypt(intrinsic_id());
   812 
   827 
   813   case vmIntrinsics::_sha_implCompress:
   828   case vmIntrinsics::_sha_implCompress:
   814   case vmIntrinsics::_sha2_implCompress:
   829   case vmIntrinsics::_sha2_implCompress:
   910   switch (intrinsic_id()) {
   925   switch (intrinsic_id()) {
   911   case vmIntrinsics::_cipherBlockChaining_encryptAESCrypt:
   926   case vmIntrinsics::_cipherBlockChaining_encryptAESCrypt:
   912     return inline_cipherBlockChaining_AESCrypt_predicate(false);
   927     return inline_cipherBlockChaining_AESCrypt_predicate(false);
   913   case vmIntrinsics::_cipherBlockChaining_decryptAESCrypt:
   928   case vmIntrinsics::_cipherBlockChaining_decryptAESCrypt:
   914     return inline_cipherBlockChaining_AESCrypt_predicate(true);
   929     return inline_cipherBlockChaining_AESCrypt_predicate(true);
       
   930   case vmIntrinsics::_electronicCodeBook_encryptAESCrypt:
       
   931     return inline_electronicCodeBook_AESCrypt_predicate(false);
       
   932   case vmIntrinsics::_electronicCodeBook_decryptAESCrypt:
       
   933     return inline_electronicCodeBook_AESCrypt_predicate(true);
   915   case vmIntrinsics::_counterMode_AESCrypt:
   934   case vmIntrinsics::_counterMode_AESCrypt:
   916     return inline_counterMode_AESCrypt_predicate();
   935     return inline_counterMode_AESCrypt_predicate();
   917   case vmIntrinsics::_digestBase_implCompressMB:
   936   case vmIntrinsics::_digestBase_implCompressMB:
   918     return inline_digestBase_implCompressMB_predicate(predicate);
   937     return inline_digestBase_implCompressMB_predicate(predicate);
   919 
   938 
  1801   Node* arg = round_double_node(argument(0));
  1820   Node* arg = round_double_node(argument(0));
  1802   Node* n = NULL;
  1821   Node* n = NULL;
  1803   switch (id) {
  1822   switch (id) {
  1804   case vmIntrinsics::_dabs:   n = new AbsDNode(                arg);  break;
  1823   case vmIntrinsics::_dabs:   n = new AbsDNode(                arg);  break;
  1805   case vmIntrinsics::_dsqrt:  n = new SqrtDNode(C, control(),  arg);  break;
  1824   case vmIntrinsics::_dsqrt:  n = new SqrtDNode(C, control(),  arg);  break;
       
  1825   case vmIntrinsics::_ceil:   n = new RoundDoubleModeNode(arg, makecon(TypeInt::make(2))); break;
       
  1826   case vmIntrinsics::_floor:  n = new RoundDoubleModeNode(arg, makecon(TypeInt::make(1))); break;
       
  1827   case vmIntrinsics::_rint:   n = new RoundDoubleModeNode(arg, makecon(TypeInt::make(0))); break;
  1806   default:  fatal_unexpected_iid(id);  break;
  1828   default:  fatal_unexpected_iid(id);  break;
  1807   }
  1829   }
  1808   set_result(_gvn.transform(n));
  1830   set_result(_gvn.transform(n));
  1809   return true;
  1831   return true;
  1810 }
  1832 }
  1874     return StubRoutines::dlog10() != NULL ?
  1896     return StubRoutines::dlog10() != NULL ?
  1875       runtime_math(OptoRuntime::Math_D_D_Type(), StubRoutines::dlog10(), "dlog10") :
  1897       runtime_math(OptoRuntime::Math_D_D_Type(), StubRoutines::dlog10(), "dlog10") :
  1876       runtime_math(OptoRuntime::Math_D_D_Type(), FN_PTR(SharedRuntime::dlog10), "LOG10");
  1898       runtime_math(OptoRuntime::Math_D_D_Type(), FN_PTR(SharedRuntime::dlog10), "LOG10");
  1877 
  1899 
  1878     // These intrinsics are supported on all hardware
  1900     // These intrinsics are supported on all hardware
       
  1901   case vmIntrinsics::_ceil:
       
  1902   case vmIntrinsics::_floor:
       
  1903   case vmIntrinsics::_rint:   return Matcher::match_rule_supported(Op_RoundDoubleMode) ? inline_double_math(id) : false;
  1879   case vmIntrinsics::_dsqrt:  return Matcher::match_rule_supported(Op_SqrtD) ? inline_double_math(id) : false;
  1904   case vmIntrinsics::_dsqrt:  return Matcher::match_rule_supported(Op_SqrtD) ? inline_double_math(id) : false;
  1880   case vmIntrinsics::_dabs:   return Matcher::has_match_rule(Op_AbsD)   ? inline_double_math(id) : false;
  1905   case vmIntrinsics::_dabs:   return Matcher::has_match_rule(Op_AbsD)   ? inline_double_math(id) : false;
  1881   case vmIntrinsics::_fabs:   return Matcher::match_rule_supported(Op_AbsF)   ? inline_math(id) : false;
  1906   case vmIntrinsics::_fabs:   return Matcher::match_rule_supported(Op_AbsF)   ? inline_math(id) : false;
  1882   case vmIntrinsics::_iabs:   return Matcher::match_rule_supported(Op_AbsI)   ? inline_math(id) : false;
  1907   case vmIntrinsics::_iabs:   return Matcher::match_rule_supported(Op_AbsI)   ? inline_math(id) : false;
  1883   case vmIntrinsics::_labs:   return Matcher::match_rule_supported(Op_AbsL)   ? inline_math(id) : false;
  1908   case vmIntrinsics::_labs:   return Matcher::match_rule_supported(Op_AbsL)   ? inline_math(id) : false;
  2365   DecoratorSet decorators = C2_UNSAFE_ACCESS;
  2390   DecoratorSet decorators = C2_UNSAFE_ACCESS;
  2366   guarantee(!is_store || kind != Acquire, "Acquire accesses can be produced only for loads");
  2391   guarantee(!is_store || kind != Acquire, "Acquire accesses can be produced only for loads");
  2367   guarantee( is_store || kind != Release, "Release accesses can be produced only for stores");
  2392   guarantee( is_store || kind != Release, "Release accesses can be produced only for stores");
  2368   assert(type != T_OBJECT || !unaligned, "unaligned access not supported with object type");
  2393   assert(type != T_OBJECT || !unaligned, "unaligned access not supported with object type");
  2369 
  2394 
  2370   if (type == T_OBJECT || type == T_ARRAY) {
  2395   if (is_reference_type(type)) {
  2371     decorators |= ON_UNKNOWN_OOP_REF;
  2396     decorators |= ON_UNKNOWN_OOP_REF;
  2372   }
  2397   }
  2373 
  2398 
  2374   if (unaligned) {
  2399   if (unaligned) {
  2375     decorators |= C2_UNALIGNED;
  2400     decorators |= C2_UNALIGNED;
  2437     decorators |= IN_HEAP;
  2462     decorators |= IN_HEAP;
  2438   }
  2463   }
  2439 
  2464 
  2440   val = is_store ? argument(4) : NULL;
  2465   val = is_store ? argument(4) : NULL;
  2441 
  2466 
  2442   const TypePtr *adr_type = _gvn.type(adr)->isa_ptr();
  2467   const TypePtr* adr_type = _gvn.type(adr)->isa_ptr();
       
  2468   if (adr_type == TypePtr::NULL_PTR) {
       
  2469     return false; // off-heap access with zero address
       
  2470   }
  2443 
  2471 
  2444   // Try to categorize the address.
  2472   // Try to categorize the address.
  2445   Compile::AliasType* alias_type = C->alias_type(adr_type);
  2473   Compile::AliasType* alias_type = C->alias_type(adr_type);
  2446   assert(alias_type->index() != Compile::AliasIdxBot, "no bare pointers here");
  2474   assert(alias_type->index() != Compile::AliasIdxBot, "no bare pointers here");
  2447 
  2475 
  2710   const TypePtr *adr_type = _gvn.type(adr)->isa_ptr();
  2738   const TypePtr *adr_type = _gvn.type(adr)->isa_ptr();
  2711 
  2739 
  2712   Compile::AliasType* alias_type = C->alias_type(adr_type);
  2740   Compile::AliasType* alias_type = C->alias_type(adr_type);
  2713   BasicType bt = alias_type->basic_type();
  2741   BasicType bt = alias_type->basic_type();
  2714   if (bt != T_ILLEGAL &&
  2742   if (bt != T_ILLEGAL &&
  2715       ((bt == T_OBJECT || bt == T_ARRAY) != (type == T_OBJECT))) {
  2743       (is_reference_type(bt) != (type == T_OBJECT))) {
  2716     // Don't intrinsify mismatched object accesses.
  2744     // Don't intrinsify mismatched object accesses.
  2717     return false;
  2745     return false;
  2718   }
  2746   }
  2719 
  2747 
  2720   // For CAS, unlike inline_unsafe_access, there seems no point in
  2748   // For CAS, unlike inline_unsafe_access, there seems no point in
  2747     return true;
  2775     return true;
  2748   }
  2776   }
  2749 
  2777 
  2750   int alias_idx = C->get_alias_index(adr_type);
  2778   int alias_idx = C->get_alias_index(adr_type);
  2751 
  2779 
  2752   if (type == T_OBJECT || type == T_ARRAY) {
  2780   if (is_reference_type(type)) {
  2753     decorators |= IN_HEAP | ON_UNKNOWN_OOP_REF;
  2781     decorators |= IN_HEAP | ON_UNKNOWN_OOP_REF;
  2754 
  2782 
  2755     // Transformation of a value which could be NULL pointer (CastPP #NULL)
  2783     // Transformation of a value which could be NULL pointer (CastPP #NULL)
  2756     // could be delayed during Parse (for example, in adjust_map_after_if()).
  2784     // could be delayed during Parse (for example, in adjust_map_after_if()).
  2757     // Execute transformation here to avoid barrier generation in such case.
  2785     // Execute transformation here to avoid barrier generation in such case.
  2831     return true;
  2859     return true;
  2832   }
  2860   }
  2833   ciInstanceKlass* ik = klsptr->klass()->as_instance_klass();
  2861   ciInstanceKlass* ik = klsptr->klass()->as_instance_klass();
  2834   // don't need a guard for a klass that is already initialized
  2862   // don't need a guard for a klass that is already initialized
  2835   return !ik->is_initialized();
  2863   return !ik->is_initialized();
       
  2864 }
       
  2865 
       
  2866 //----------------------------inline_unsafe_writeback0-------------------------
       
  2867 // public native void Unsafe.writeback0(long address)
       
  2868 bool LibraryCallKit::inline_unsafe_writeback0() {
       
  2869   if (!Matcher::has_match_rule(Op_CacheWB)) {
       
  2870     return false;
       
  2871   }
       
  2872 #ifndef PRODUCT
       
  2873   assert(Matcher::has_match_rule(Op_CacheWBPreSync), "found match rule for CacheWB but not CacheWBPreSync");
       
  2874   assert(Matcher::has_match_rule(Op_CacheWBPostSync), "found match rule for CacheWB but not CacheWBPostSync");
       
  2875   ciSignature* sig = callee()->signature();
       
  2876   assert(sig->type_at(0)->basic_type() == T_LONG, "Unsafe_writeback0 address is long!");
       
  2877 #endif
       
  2878   null_check_receiver();  // null-check, then ignore
       
  2879   Node *addr = argument(1);
       
  2880   addr = new CastX2PNode(addr);
       
  2881   addr = _gvn.transform(addr);
       
  2882   Node *flush = new CacheWBNode(control(), memory(TypeRawPtr::BOTTOM), addr);
       
  2883   flush = _gvn.transform(flush);
       
  2884   set_memory(flush, TypeRawPtr::BOTTOM);
       
  2885   return true;
       
  2886 }
       
  2887 
       
  2888 //----------------------------inline_unsafe_writeback0-------------------------
       
  2889 // public native void Unsafe.writeback0(long address)
       
  2890 bool LibraryCallKit::inline_unsafe_writebackSync0(bool is_pre) {
       
  2891   if (is_pre && !Matcher::has_match_rule(Op_CacheWBPreSync)) {
       
  2892     return false;
       
  2893   }
       
  2894   if (!is_pre && !Matcher::has_match_rule(Op_CacheWBPostSync)) {
       
  2895     return false;
       
  2896   }
       
  2897 #ifndef PRODUCT
       
  2898   assert(Matcher::has_match_rule(Op_CacheWB),
       
  2899          (is_pre ? "found match rule for CacheWBPreSync but not CacheWB"
       
  2900                 : "found match rule for CacheWBPostSync but not CacheWB"));
       
  2901 
       
  2902 #endif
       
  2903   null_check_receiver();  // null-check, then ignore
       
  2904   Node *sync;
       
  2905   if (is_pre) {
       
  2906     sync = new CacheWBPreSyncNode(control(), memory(TypeRawPtr::BOTTOM));
       
  2907   } else {
       
  2908     sync = new CacheWBPostSyncNode(control(), memory(TypeRawPtr::BOTTOM));
       
  2909   }
       
  2910   sync = _gvn.transform(sync);
       
  2911   set_memory(sync, TypeRawPtr::BOTTOM);
       
  2912   return true;
  2836 }
  2913 }
  2837 
  2914 
  2838 //----------------------------inline_unsafe_allocate---------------------------
  2915 //----------------------------inline_unsafe_allocate---------------------------
  2839 // public native Object Unsafe.allocateInstance(Class<?> cls);
  2916 // public native Object Unsafe.allocateInstance(Class<?> cls);
  2840 bool LibraryCallKit::inline_unsafe_allocate() {
  2917 bool LibraryCallKit::inline_unsafe_allocate() {
  3950   // the null check after castPP removal.
  4027   // the null check after castPP removal.
  3951   Node* no_ctrl = NULL;
  4028   Node* no_ctrl = NULL;
  3952   Node* header = make_load(no_ctrl, header_addr, TypeX_X, TypeX_X->basic_type(), MemNode::unordered);
  4029   Node* header = make_load(no_ctrl, header_addr, TypeX_X, TypeX_X->basic_type(), MemNode::unordered);
  3953 
  4030 
  3954   // Test the header to see if it is unlocked.
  4031   // Test the header to see if it is unlocked.
  3955   Node *lock_mask      = _gvn.MakeConX(markOopDesc::biased_lock_mask_in_place);
  4032   Node *lock_mask      = _gvn.MakeConX(markWord::biased_lock_mask_in_place);
  3956   Node *lmasked_header = _gvn.transform(new AndXNode(header, lock_mask));
  4033   Node *lmasked_header = _gvn.transform(new AndXNode(header, lock_mask));
  3957   Node *unlocked_val   = _gvn.MakeConX(markOopDesc::unlocked_value);
  4034   Node *unlocked_val   = _gvn.MakeConX(markWord::unlocked_value);
  3958   Node *chk_unlocked   = _gvn.transform(new CmpXNode( lmasked_header, unlocked_val));
  4035   Node *chk_unlocked   = _gvn.transform(new CmpXNode( lmasked_header, unlocked_val));
  3959   Node *test_unlocked  = _gvn.transform(new BoolNode( chk_unlocked, BoolTest::ne));
  4036   Node *test_unlocked  = _gvn.transform(new BoolNode( chk_unlocked, BoolTest::ne));
  3960 
  4037 
  3961   generate_slow_guard(test_unlocked, slow_region);
  4038   generate_slow_guard(test_unlocked, slow_region);
  3962 
  4039 
  3963   // Get the hash value and check to see that it has been properly assigned.
  4040   // Get the hash value and check to see that it has been properly assigned.
  3964   // We depend on hash_mask being at most 32 bits and avoid the use of
  4041   // We depend on hash_mask being at most 32 bits and avoid the use of
  3965   // hash_mask_in_place because it could be larger than 32 bits in a 64-bit
  4042   // hash_mask_in_place because it could be larger than 32 bits in a 64-bit
  3966   // vm: see markOop.hpp.
  4043   // vm: see markWord.hpp.
  3967   Node *hash_mask      = _gvn.intcon(markOopDesc::hash_mask);
  4044   Node *hash_mask      = _gvn.intcon(markWord::hash_mask);
  3968   Node *hash_shift     = _gvn.intcon(markOopDesc::hash_shift);
  4045   Node *hash_shift     = _gvn.intcon(markWord::hash_shift);
  3969   Node *hshifted_header= _gvn.transform(new URShiftXNode(header, hash_shift));
  4046   Node *hshifted_header= _gvn.transform(new URShiftXNode(header, hash_shift));
  3970   // This hack lets the hash bits live anywhere in the mark object now, as long
  4047   // This hack lets the hash bits live anywhere in the mark object now, as long
  3971   // as the shift drops the relevant bits into the low 32 bits.  Note that
  4048   // as the shift drops the relevant bits into the low 32 bits.  Note that
  3972   // Java spec says that HashCode is an int so there's no point in capturing
  4049   // Java spec says that HashCode is an int so there's no point in capturing
  3973   // an 'X'-sized hashcode (32 in 32-bit build or 64 in 64-bit build).
  4050   // an 'X'-sized hashcode (32 in 32-bit build or 64 in 64-bit build).
  3974   hshifted_header      = ConvX2I(hshifted_header);
  4051   hshifted_header      = ConvX2I(hshifted_header);
  3975   Node *hash_val       = _gvn.transform(new AndINode(hshifted_header, hash_mask));
  4052   Node *hash_val       = _gvn.transform(new AndINode(hshifted_header, hash_mask));
  3976 
  4053 
  3977   Node *no_hash_val    = _gvn.intcon(markOopDesc::no_hash);
  4054   Node *no_hash_val    = _gvn.intcon(markWord::no_hash);
  3978   Node *chk_assigned   = _gvn.transform(new CmpINode( hash_val, no_hash_val));
  4055   Node *chk_assigned   = _gvn.transform(new CmpINode( hash_val, no_hash_val));
  3979   Node *test_assigned  = _gvn.transform(new BoolNode( chk_assigned, BoolTest::eq));
  4056   Node *test_assigned  = _gvn.transform(new BoolNode( chk_assigned, BoolTest::eq));
  3980 
  4057 
  3981   generate_slow_guard(test_assigned, slow_region);
  4058   generate_slow_guard(test_assigned, slow_region);
  3982 
  4059 
  4235 
  4312 
  4236   // Conservatively insert a memory barrier on all memory slices.
  4313   // Conservatively insert a memory barrier on all memory slices.
  4237   // Do not let writes of the copy source or destination float below the copy.
  4314   // Do not let writes of the copy source or destination float below the copy.
  4238   insert_mem_bar(Op_MemBarCPUOrder);
  4315   insert_mem_bar(Op_MemBarCPUOrder);
  4239 
  4316 
       
  4317   Node* thread = _gvn.transform(new ThreadLocalNode());
       
  4318   Node* doing_unsafe_access_addr = basic_plus_adr(top(), thread, in_bytes(JavaThread::doing_unsafe_access_offset()));
       
  4319   BasicType doing_unsafe_access_bt = T_BYTE;
       
  4320   assert((sizeof(bool) * CHAR_BIT) == 8, "not implemented");
       
  4321 
       
  4322   // update volatile field
       
  4323   store_to_memory(control(), doing_unsafe_access_addr, intcon(1), doing_unsafe_access_bt, Compile::AliasIdxRaw, MemNode::unordered);
       
  4324 
  4240   // Call it.  Note that the length argument is not scaled.
  4325   // Call it.  Note that the length argument is not scaled.
  4241   make_runtime_call(RC_LEAF|RC_NO_FP,
  4326   make_runtime_call(RC_LEAF|RC_NO_FP,
  4242                     OptoRuntime::fast_arraycopy_Type(),
  4327                     OptoRuntime::fast_arraycopy_Type(),
  4243                     StubRoutines::unsafe_arraycopy(),
  4328                     StubRoutines::unsafe_arraycopy(),
  4244                     "unsafe_arraycopy",
  4329                     "unsafe_arraycopy",
  4245                     TypeRawPtr::BOTTOM,
  4330                     TypeRawPtr::BOTTOM,
  4246                     src, dst, size XTOP);
  4331                     src, dst, size XTOP);
       
  4332 
       
  4333   store_to_memory(control(), doing_unsafe_access_addr, intcon(0), doing_unsafe_access_bt, Compile::AliasIdxRaw, MemNode::unordered);
  4247 
  4334 
  4248   // Do not let reads of the copy destination float above the copy.
  4335   // Do not let reads of the copy destination float above the copy.
  4249   insert_mem_bar(Op_MemBarCPUOrder);
  4336   insert_mem_bar(Op_MemBarCPUOrder);
  4250 
  4337 
  4251   return true;
  4338   return true;
  4496 JVMState* LibraryCallKit::arraycopy_restore_alloc_state(AllocateArrayNode* alloc, int& saved_reexecute_sp) {
  4583 JVMState* LibraryCallKit::arraycopy_restore_alloc_state(AllocateArrayNode* alloc, int& saved_reexecute_sp) {
  4497   if (alloc != NULL) {
  4584   if (alloc != NULL) {
  4498     ciMethod* trap_method = alloc->jvms()->method();
  4585     ciMethod* trap_method = alloc->jvms()->method();
  4499     int trap_bci = alloc->jvms()->bci();
  4586     int trap_bci = alloc->jvms()->bci();
  4500 
  4587 
  4501     if (!C->too_many_traps(trap_method, trap_bci, Deoptimization::Reason_intrinsic) &
  4588     if (!C->too_many_traps(trap_method, trap_bci, Deoptimization::Reason_intrinsic) &&
  4502           !C->too_many_traps(trap_method, trap_bci, Deoptimization::Reason_null_check)) {
  4589         !C->too_many_traps(trap_method, trap_bci, Deoptimization::Reason_null_check)) {
  4503       // Make sure there's no store between the allocation and the
  4590       // Make sure there's no store between the allocation and the
  4504       // arraycopy otherwise visible side effects could be rexecuted
  4591       // arraycopy otherwise visible side effects could be rexecuted
  4505       // in case of deoptimization and cause incorrect execution.
  4592       // in case of deoptimization and cause incorrect execution.
  4506       bool no_interfering_store = true;
  4593       bool no_interfering_store = true;
  4507       Node* mem = alloc->in(TypeFunc::Memory);
  4594       Node* mem = alloc->in(TypeFunc::Memory);
  4738   }
  4825   }
  4739 
  4826 
  4740   if (has_src && has_dest && can_emit_guards) {
  4827   if (has_src && has_dest && can_emit_guards) {
  4741     BasicType src_elem  = top_src->klass()->as_array_klass()->element_type()->basic_type();
  4828     BasicType src_elem  = top_src->klass()->as_array_klass()->element_type()->basic_type();
  4742     BasicType dest_elem = top_dest->klass()->as_array_klass()->element_type()->basic_type();
  4829     BasicType dest_elem = top_dest->klass()->as_array_klass()->element_type()->basic_type();
  4743     if (src_elem  == T_ARRAY)  src_elem  = T_OBJECT;
  4830     if (is_reference_type(src_elem))   src_elem  = T_OBJECT;
  4744     if (dest_elem == T_ARRAY)  dest_elem = T_OBJECT;
  4831     if (is_reference_type(dest_elem))  dest_elem = T_OBJECT;
  4745 
  4832 
  4746     if (src_elem == dest_elem && src_elem == T_OBJECT) {
  4833     if (src_elem == dest_elem && src_elem == T_OBJECT) {
  4747       // If both arrays are object arrays then having the exact types
  4834       // If both arrays are object arrays then having the exact types
  4748       // for both will remove the need for a subtype check at runtime
  4835       // for both will remove the need for a subtype check at runtime
  4749       // before the call and may make it possible to pick a faster copy
  4836       // before the call and may make it possible to pick a faster copy
  6004   Node* retvalue = _gvn.transform(new ProjNode(cbcCrypt, TypeFunc::Parms));
  6091   Node* retvalue = _gvn.transform(new ProjNode(cbcCrypt, TypeFunc::Parms));
  6005   set_result(retvalue);
  6092   set_result(retvalue);
  6006   return true;
  6093   return true;
  6007 }
  6094 }
  6008 
  6095 
       
  6096 //------------------------------inline_electronicCodeBook_AESCrypt-----------------------
       
  6097 bool LibraryCallKit::inline_electronicCodeBook_AESCrypt(vmIntrinsics::ID id) {
       
  6098   address stubAddr = NULL;
       
  6099   const char *stubName = NULL;
       
  6100 
       
  6101   assert(UseAES, "need AES instruction support");
       
  6102 
       
  6103   switch (id) {
       
  6104   case vmIntrinsics::_electronicCodeBook_encryptAESCrypt:
       
  6105     stubAddr = StubRoutines::electronicCodeBook_encryptAESCrypt();
       
  6106     stubName = "electronicCodeBook_encryptAESCrypt";
       
  6107     break;
       
  6108   case vmIntrinsics::_electronicCodeBook_decryptAESCrypt:
       
  6109     stubAddr = StubRoutines::electronicCodeBook_decryptAESCrypt();
       
  6110     stubName = "electronicCodeBook_decryptAESCrypt";
       
  6111     break;
       
  6112   default:
       
  6113     break;
       
  6114   }
       
  6115 
       
  6116   if (stubAddr == NULL) return false;
       
  6117 
       
  6118   Node* electronicCodeBook_object = argument(0);
       
  6119   Node* src                       = argument(1);
       
  6120   Node* src_offset                = argument(2);
       
  6121   Node* len                       = argument(3);
       
  6122   Node* dest                      = argument(4);
       
  6123   Node* dest_offset               = argument(5);
       
  6124 
       
  6125   // (1) src and dest are arrays.
       
  6126   const Type* src_type = src->Value(&_gvn);
       
  6127   const Type* dest_type = dest->Value(&_gvn);
       
  6128   const TypeAryPtr* top_src = src_type->isa_aryptr();
       
  6129   const TypeAryPtr* top_dest = dest_type->isa_aryptr();
       
  6130   assert(top_src != NULL && top_src->klass() != NULL
       
  6131          &&  top_dest != NULL && top_dest->klass() != NULL, "args are strange");
       
  6132 
       
  6133   // checks are the responsibility of the caller
       
  6134   Node* src_start = src;
       
  6135   Node* dest_start = dest;
       
  6136   if (src_offset != NULL || dest_offset != NULL) {
       
  6137     assert(src_offset != NULL && dest_offset != NULL, "");
       
  6138     src_start = array_element_address(src, src_offset, T_BYTE);
       
  6139     dest_start = array_element_address(dest, dest_offset, T_BYTE);
       
  6140   }
       
  6141 
       
  6142   // if we are in this set of code, we "know" the embeddedCipher is an AESCrypt object
       
  6143   // (because of the predicated logic executed earlier).
       
  6144   // so we cast it here safely.
       
  6145   // this requires a newer class file that has this array as littleEndian ints, otherwise we revert to java
       
  6146 
       
  6147   Node* embeddedCipherObj = load_field_from_object(electronicCodeBook_object, "embeddedCipher", "Lcom/sun/crypto/provider/SymmetricCipher;", /*is_exact*/ false);
       
  6148   if (embeddedCipherObj == NULL) return false;
       
  6149 
       
  6150   // cast it to what we know it will be at runtime
       
  6151   const TypeInstPtr* tinst = _gvn.type(electronicCodeBook_object)->isa_instptr();
       
  6152   assert(tinst != NULL, "ECB obj is null");
       
  6153   assert(tinst->klass()->is_loaded(), "ECB obj is not loaded");
       
  6154   ciKlass* klass_AESCrypt = tinst->klass()->as_instance_klass()->find_klass(ciSymbol::make("com/sun/crypto/provider/AESCrypt"));
       
  6155   assert(klass_AESCrypt->is_loaded(), "predicate checks that this class is loaded");
       
  6156 
       
  6157   ciInstanceKlass* instklass_AESCrypt = klass_AESCrypt->as_instance_klass();
       
  6158   const TypeKlassPtr* aklass = TypeKlassPtr::make(instklass_AESCrypt);
       
  6159   const TypeOopPtr* xtype = aklass->as_instance_type();
       
  6160   Node* aescrypt_object = new CheckCastPPNode(control(), embeddedCipherObj, xtype);
       
  6161   aescrypt_object = _gvn.transform(aescrypt_object);
       
  6162 
       
  6163   // we need to get the start of the aescrypt_object's expanded key array
       
  6164   Node* k_start = get_key_start_from_aescrypt_object(aescrypt_object);
       
  6165   if (k_start == NULL) return false;
       
  6166 
       
  6167   Node* ecbCrypt;
       
  6168   if (Matcher::pass_original_key_for_aes()) {
       
  6169     // no SPARC version for AES/ECB intrinsics now.
       
  6170     return false;
       
  6171   }
       
  6172   // Call the stub, passing src_start, dest_start, k_start, r_start and src_len
       
  6173   ecbCrypt = make_runtime_call(RC_LEAF | RC_NO_FP,
       
  6174                                OptoRuntime::electronicCodeBook_aescrypt_Type(),
       
  6175                                stubAddr, stubName, TypePtr::BOTTOM,
       
  6176                                src_start, dest_start, k_start, len);
       
  6177 
       
  6178   // return cipher length (int)
       
  6179   Node* retvalue = _gvn.transform(new ProjNode(ecbCrypt, TypeFunc::Parms));
       
  6180   set_result(retvalue);
       
  6181   return true;
       
  6182 }
       
  6183 
  6009 //------------------------------inline_counterMode_AESCrypt-----------------------
  6184 //------------------------------inline_counterMode_AESCrypt-----------------------
  6010 bool LibraryCallKit::inline_counterMode_AESCrypt(vmIntrinsics::ID id) {
  6185 bool LibraryCallKit::inline_counterMode_AESCrypt(vmIntrinsics::ID id) {
  6011   assert(UseAES, "need AES instruction support");
  6186   assert(UseAES, "need AES instruction support");
  6012   if (!UseAESCTRIntrinsics) return false;
  6187   if (!UseAESCTRIntrinsics) return false;
  6013 
  6188 
  6200 
  6375 
  6201   record_for_igvn(region);
  6376   record_for_igvn(region);
  6202   return _gvn.transform(region);
  6377   return _gvn.transform(region);
  6203 }
  6378 }
  6204 
  6379 
       
  6380 //----------------------------inline_electronicCodeBook_AESCrypt_predicate----------------------------
       
  6381 // Return node representing slow path of predicate check.
       
  6382 // the pseudo code we want to emulate with this predicate is:
       
  6383 // for encryption:
       
  6384 //    if (embeddedCipherObj instanceof AESCrypt) do_intrinsic, else do_javapath
       
  6385 // for decryption:
       
  6386 //    if ((embeddedCipherObj instanceof AESCrypt) && (cipher!=plain)) do_intrinsic, else do_javapath
       
  6387 //    note cipher==plain is more conservative than the original java code but that's OK
       
  6388 //
       
  6389 Node* LibraryCallKit::inline_electronicCodeBook_AESCrypt_predicate(bool decrypting) {
       
  6390   // The receiver was checked for NULL already.
       
  6391   Node* objECB = argument(0);
       
  6392 
       
  6393   // Load embeddedCipher field of ElectronicCodeBook object.
       
  6394   Node* embeddedCipherObj = load_field_from_object(objECB, "embeddedCipher", "Lcom/sun/crypto/provider/SymmetricCipher;", /*is_exact*/ false);
       
  6395 
       
  6396   // get AESCrypt klass for instanceOf check
       
  6397   // AESCrypt might not be loaded yet if some other SymmetricCipher got us to this compile point
       
  6398   // will have same classloader as ElectronicCodeBook object
       
  6399   const TypeInstPtr* tinst = _gvn.type(objECB)->isa_instptr();
       
  6400   assert(tinst != NULL, "ECBobj is null");
       
  6401   assert(tinst->klass()->is_loaded(), "ECBobj is not loaded");
       
  6402 
       
  6403   // we want to do an instanceof comparison against the AESCrypt class
       
  6404   ciKlass* klass_AESCrypt = tinst->klass()->as_instance_klass()->find_klass(ciSymbol::make("com/sun/crypto/provider/AESCrypt"));
       
  6405   if (!klass_AESCrypt->is_loaded()) {
       
  6406     // if AESCrypt is not even loaded, we never take the intrinsic fast path
       
  6407     Node* ctrl = control();
       
  6408     set_control(top()); // no regular fast path
       
  6409     return ctrl;
       
  6410   }
       
  6411   ciInstanceKlass* instklass_AESCrypt = klass_AESCrypt->as_instance_klass();
       
  6412 
       
  6413   Node* instof = gen_instanceof(embeddedCipherObj, makecon(TypeKlassPtr::make(instklass_AESCrypt)));
       
  6414   Node* cmp_instof = _gvn.transform(new CmpINode(instof, intcon(1)));
       
  6415   Node* bool_instof = _gvn.transform(new BoolNode(cmp_instof, BoolTest::ne));
       
  6416 
       
  6417   Node* instof_false = generate_guard(bool_instof, NULL, PROB_MIN);
       
  6418 
       
  6419   // for encryption, we are done
       
  6420   if (!decrypting)
       
  6421     return instof_false;  // even if it is NULL
       
  6422 
       
  6423   // for decryption, we need to add a further check to avoid
       
  6424   // taking the intrinsic path when cipher and plain are the same
       
  6425   // see the original java code for why.
       
  6426   RegionNode* region = new RegionNode(3);
       
  6427   region->init_req(1, instof_false);
       
  6428   Node* src = argument(1);
       
  6429   Node* dest = argument(4);
       
  6430   Node* cmp_src_dest = _gvn.transform(new CmpPNode(src, dest));
       
  6431   Node* bool_src_dest = _gvn.transform(new BoolNode(cmp_src_dest, BoolTest::eq));
       
  6432   Node* src_dest_conjoint = generate_guard(bool_src_dest, NULL, PROB_MIN);
       
  6433   region->init_req(2, src_dest_conjoint);
       
  6434 
       
  6435   record_for_igvn(region);
       
  6436   return _gvn.transform(region);
       
  6437 }
       
  6438 
  6205 //----------------------------inline_counterMode_AESCrypt_predicate----------------------------
  6439 //----------------------------inline_counterMode_AESCrypt_predicate----------------------------
  6206 // Return node representing slow path of predicate check.
  6440 // Return node representing slow path of predicate check.
  6207 // the pseudo code we want to emulate with this predicate is:
  6441 // the pseudo code we want to emulate with this predicate is:
  6208 // for encryption:
  6442 // for encryption:
  6209 //    if (embeddedCipherObj instanceof AESCrypt) do_intrinsic, else do_javapath
  6443 //    if (embeddedCipherObj instanceof AESCrypt) do_intrinsic, else do_javapath
  6691     break;
  6925     break;
  6692   default:
  6926   default:
  6693     fatal_unexpected_iid(id);
  6927     fatal_unexpected_iid(id);
  6694     break;
  6928     break;
  6695   }
  6929   }
  6696   if (a->is_Con() || b->is_Con()) {
       
  6697     return false;
       
  6698   }
       
  6699   switch (id) {
  6930   switch (id) {
  6700   case vmIntrinsics::_maxF:  n = new MaxFNode(a, b);  break;
  6931   case vmIntrinsics::_maxF:  n = new MaxFNode(a, b);  break;
  6701   case vmIntrinsics::_minF:  n = new MinFNode(a, b);  break;
  6932   case vmIntrinsics::_minF:  n = new MinFNode(a, b);  break;
  6702   case vmIntrinsics::_maxD:  n = new MaxDNode(a, b);  break;
  6933   case vmIntrinsics::_maxD:  n = new MaxDNode(a, b);  break;
  6703   case vmIntrinsics::_minD:  n = new MinDNode(a, b);  break;
  6934   case vmIntrinsics::_minD:  n = new MinDNode(a, b);  break;