src/hotspot/share/code/compiledIC.cpp
changeset 52384 d6dc479bcdd3
parent 51314 82adcc8ad853
child 52857 7e268f863ff0
equal deleted inserted replaced
52383:71564a544d4c 52384:d6dc479bcdd3
    22  *
    22  *
    23  */
    23  */
    24 
    24 
    25 #include "precompiled.hpp"
    25 #include "precompiled.hpp"
    26 #include "classfile/systemDictionary.hpp"
    26 #include "classfile/systemDictionary.hpp"
       
    27 #include "code/codeBehaviours.hpp"
    27 #include "code/codeCache.hpp"
    28 #include "code/codeCache.hpp"
    28 #include "code/compiledIC.hpp"
    29 #include "code/compiledIC.hpp"
    29 #include "code/icBuffer.hpp"
    30 #include "code/icBuffer.hpp"
    30 #include "code/nmethod.hpp"
    31 #include "code/nmethod.hpp"
    31 #include "code/vtableStubs.hpp"
    32 #include "code/vtableStubs.hpp"
    45 
    46 
    46 
    47 
    47 // Every time a compiled IC is changed or its type is being accessed,
    48 // Every time a compiled IC is changed or its type is being accessed,
    48 // either the CompiledIC_lock must be set or we must be at a safe point.
    49 // either the CompiledIC_lock must be set or we must be at a safe point.
    49 
    50 
       
    51 CompiledICLocker::CompiledICLocker(CompiledMethod* method)
       
    52   : _method(method),
       
    53     _behaviour(CompiledICProtectionBehaviour::current()),
       
    54     _locked(_behaviour->lock(_method)){
       
    55 }
       
    56 
       
    57 CompiledICLocker::~CompiledICLocker() {
       
    58   if (_locked) {
       
    59     _behaviour->unlock(_method);
       
    60   }
       
    61 }
       
    62 
       
    63 bool CompiledICLocker::is_safe(CompiledMethod* method) {
       
    64   return CompiledICProtectionBehaviour::current()->is_safe(method);
       
    65 }
       
    66 
       
    67 bool CompiledICLocker::is_safe(address code) {
       
    68   CodeBlob* cb = CodeCache::find_blob_unsafe(code);
       
    69   assert(cb != NULL && cb->is_compiled(), "must be compiled");
       
    70   CompiledMethod* cm = cb->as_compiled_method();
       
    71   return CompiledICProtectionBehaviour::current()->is_safe(cm);
       
    72 }
       
    73 
    50 //-----------------------------------------------------------------------------
    74 //-----------------------------------------------------------------------------
    51 // Low-level access to an inline cache. Private, since they might not be
    75 // Low-level access to an inline cache. Private, since they might not be
    52 // MT-safe to use.
    76 // MT-safe to use.
    53 
    77 
    54 void* CompiledIC::cached_value() const {
    78 void* CompiledIC::cached_value() const {
    55   assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
    79   assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
    56   assert (!is_optimized(), "an optimized virtual call does not have a cached metadata");
    80   assert (!is_optimized(), "an optimized virtual call does not have a cached metadata");
    57 
    81 
    58   if (!is_in_transition_state()) {
    82   if (!is_in_transition_state()) {
    59     void* data = get_data();
    83     void* data = get_data();
    60     // If we let the metadata value here be initialized to zero...
    84     // If we let the metadata value here be initialized to zero...
    67 }
    91 }
    68 
    92 
    69 
    93 
    70 void CompiledIC::internal_set_ic_destination(address entry_point, bool is_icstub, void* cache, bool is_icholder) {
    94 void CompiledIC::internal_set_ic_destination(address entry_point, bool is_icstub, void* cache, bool is_icholder) {
    71   assert(entry_point != NULL, "must set legal entry point");
    95   assert(entry_point != NULL, "must set legal entry point");
    72   assert(CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
    96   assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
    73   assert (!is_optimized() || cache == NULL, "an optimized virtual call does not have a cached metadata");
    97   assert (!is_optimized() || cache == NULL, "an optimized virtual call does not have a cached metadata");
    74   assert (cache == NULL || cache != (Metadata*)badOopVal, "invalid metadata");
    98   assert (cache == NULL || cache != (Metadata*)badOopVal, "invalid metadata");
    75 
    99 
    76   assert(!is_icholder || is_icholder_entry(entry_point), "must be");
   100   assert(!is_icholder || is_icholder_entry(entry_point), "must be");
    77 
   101 
    99     }
   123     }
   100     tty->cr();
   124     tty->cr();
   101   }
   125   }
   102 
   126 
   103   {
   127   {
   104     MutexLockerEx pl(SafepointSynchronize::is_at_safepoint() ? NULL : Patching_lock, Mutex::_no_safepoint_check_flag);
       
   105 #ifdef ASSERT
       
   106     CodeBlob* cb = CodeCache::find_blob_unsafe(_call->instruction_address());
   128     CodeBlob* cb = CodeCache::find_blob_unsafe(_call->instruction_address());
       
   129     MutexLockerEx pl(CompiledICLocker::is_safe(cb->as_compiled_method()) ? NULL : Patching_lock, Mutex::_no_safepoint_check_flag);
   107     assert(cb != NULL && cb->is_compiled(), "must be compiled");
   130     assert(cb != NULL && cb->is_compiled(), "must be compiled");
   108 #endif
       
   109     _call->set_destination_mt_safe(entry_point);
   131     _call->set_destination_mt_safe(entry_point);
   110   }
   132   }
   111 
   133 
   112   if (is_optimized() || is_icstub) {
   134   if (is_optimized() || is_icstub) {
   113     // Optimized call sites don't have a cache value and ICStub call
   135     // Optimized call sites don't have a cache value and ICStub call
   128 }
   150 }
   129 
   151 
   130 
   152 
   131 
   153 
   132 address CompiledIC::ic_destination() const {
   154 address CompiledIC::ic_destination() const {
   133  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
   155   assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
   134  if (!is_in_transition_state()) {
   156   if (!is_in_transition_state()) {
   135    return _call->destination();
   157     return _call->destination();
   136  } else {
   158   } else {
   137    return InlineCacheBuffer::ic_destination_for((CompiledIC *)this);
   159     return InlineCacheBuffer::ic_destination_for((CompiledIC *)this);
   138  }
   160   }
   139 }
   161 }
   140 
   162 
   141 
   163 
   142 bool CompiledIC::is_in_transition_state() const {
   164 bool CompiledIC::is_in_transition_state() const {
   143   assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
   165   assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
   144   return InlineCacheBuffer::contains(_call->destination());;
   166   return InlineCacheBuffer::contains(_call->destination());;
   145 }
   167 }
   146 
   168 
   147 
   169 
   148 bool CompiledIC::is_icholder_call() const {
   170 bool CompiledIC::is_icholder_call() const {
   149   assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
   171   assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
   150   return !_is_optimized && is_icholder_entry(ic_destination());
   172   return !_is_optimized && is_icholder_entry(ic_destination());
   151 }
   173 }
   152 
   174 
   153 // Returns native address of 'call' instruction in inline-cache. Used by
   175 // Returns native address of 'call' instruction in inline-cache. Used by
   154 // the InlineCacheBuffer when it needs to find the stub.
   176 // the InlineCacheBuffer when it needs to find the stub.
   214 
   236 
   215   initialize_from_iter(iter);
   237   initialize_from_iter(iter);
   216 }
   238 }
   217 
   239 
   218 bool CompiledIC::set_to_megamorphic(CallInfo* call_info, Bytecodes::Code bytecode, TRAPS) {
   240 bool CompiledIC::set_to_megamorphic(CallInfo* call_info, Bytecodes::Code bytecode, TRAPS) {
   219   assert(CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
   241   assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
   220   assert(!is_optimized(), "cannot set an optimized virtual call to megamorphic");
   242   assert(!is_optimized(), "cannot set an optimized virtual call to megamorphic");
   221   assert(is_call_to_compiled() || is_call_to_interpreted(), "going directly to megamorphic?");
   243   assert(is_call_to_compiled() || is_call_to_interpreted(), "going directly to megamorphic?");
   222 
   244 
   223   address entry;
   245   address entry;
   224   if (call_info->call_kind() == CallInfo::itable_call) {
   246   if (call_info->call_kind() == CallInfo::itable_call) {
   268 }
   290 }
   269 
   291 
   270 
   292 
   271 // true if destination is megamorphic stub
   293 // true if destination is megamorphic stub
   272 bool CompiledIC::is_megamorphic() const {
   294 bool CompiledIC::is_megamorphic() const {
   273   assert(CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
   295   assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
   274   assert(!is_optimized(), "an optimized call cannot be megamorphic");
   296   assert(!is_optimized(), "an optimized call cannot be megamorphic");
   275 
   297 
   276   // Cannot rely on cached_value. It is either an interface or a method.
   298   // Cannot rely on cached_value. It is either an interface or a method.
   277   return VtableStubs::entry_point(ic_destination()) != NULL;
   299   return VtableStubs::entry_point(ic_destination()) != NULL;
   278 }
   300 }
   279 
   301 
   280 bool CompiledIC::is_call_to_compiled() const {
   302 bool CompiledIC::is_call_to_compiled() const {
   281   assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
   303   assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
   282 
   304 
   283   // Use unsafe, since an inline cache might point to a zombie method. However, the zombie
   305   // Use unsafe, since an inline cache might point to a zombie method. However, the zombie
   284   // method is guaranteed to still exist, since we only remove methods after all inline caches
   306   // method is guaranteed to still exist, since we only remove methods after all inline caches
   285   // has been cleaned up
   307   // has been cleaned up
   286   CodeBlob* cb = CodeCache::find_blob_unsafe(ic_destination());
   308   CodeBlob* cb = CodeCache::find_blob_unsafe(ic_destination());
   302   return is_monomorphic;
   324   return is_monomorphic;
   303 }
   325 }
   304 
   326 
   305 
   327 
   306 bool CompiledIC::is_call_to_interpreted() const {
   328 bool CompiledIC::is_call_to_interpreted() const {
   307   assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
   329   assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
   308   // Call to interpreter if destination is either calling to a stub (if it
   330   // Call to interpreter if destination is either calling to a stub (if it
   309   // is optimized), or calling to an I2C blob
   331   // is optimized), or calling to an I2C blob
   310   bool is_call_to_interpreted = false;
   332   bool is_call_to_interpreted = false;
   311   if (!is_optimized()) {
   333   if (!is_optimized()) {
   312     // must use unsafe because the destination can be a zombie (and we're cleaning)
   334     // must use unsafe because the destination can be a zombie (and we're cleaning)
   327   }
   349   }
   328   return is_call_to_interpreted;
   350   return is_call_to_interpreted;
   329 }
   351 }
   330 
   352 
   331 void CompiledIC::set_to_clean(bool in_use) {
   353 void CompiledIC::set_to_clean(bool in_use) {
   332   assert(SafepointSynchronize::is_at_safepoint() || CompiledIC_lock->is_locked() , "MT-unsafe call");
   354   assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
   333   if (TraceInlineCacheClearing || TraceICs) {
   355   if (TraceInlineCacheClearing || TraceICs) {
   334     tty->print_cr("IC@" INTPTR_FORMAT ": set to clean", p2i(instruction_address()));
   356     tty->print_cr("IC@" INTPTR_FORMAT ": set to clean", p2i(instruction_address()));
   335     print();
   357     print();
   336   }
   358   }
   337 
   359 
   338   address entry = _call->get_resolve_call_stub(is_optimized());
   360   address entry = _call->get_resolve_call_stub(is_optimized());
   339 
   361 
   340   // A zombie transition will always be safe, since the metadata has already been set to NULL, so
   362   // A zombie transition will always be safe, since the metadata has already been set to NULL, so
   341   // we only need to patch the destination
   363   // we only need to patch the destination
   342   bool safe_transition = _call->is_safe_for_patching() || !in_use || is_optimized() || SafepointSynchronize::is_at_safepoint();
   364   bool safe_transition = _call->is_safe_for_patching() || !in_use || is_optimized() || CompiledICLocker::is_safe(_method);
   343 
   365 
   344   if (safe_transition) {
   366   if (safe_transition) {
   345     // Kill any leftover stub we might have too
   367     // Kill any leftover stub we might have too
   346     clear_ic_stub();
   368     clear_ic_stub();
   347     if (is_optimized()) {
   369     if (is_optimized()) {
   361   // cleaning it immediately is harmless.
   383   // cleaning it immediately is harmless.
   362   // assert(is_clean(), "sanity check");
   384   // assert(is_clean(), "sanity check");
   363 }
   385 }
   364 
   386 
   365 bool CompiledIC::is_clean() const {
   387 bool CompiledIC::is_clean() const {
   366   assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
   388   assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
   367   bool is_clean = false;
   389   bool is_clean = false;
   368   address dest = ic_destination();
   390   address dest = ic_destination();
   369   is_clean = dest == _call->get_resolve_call_stub(is_optimized());
   391   is_clean = dest == _call->get_resolve_call_stub(is_optimized());
   370   assert(!is_clean || is_optimized() || cached_value() == NULL, "sanity check");
   392   assert(!is_clean || is_optimized() || cached_value() == NULL, "sanity check");
   371   return is_clean;
   393   return is_clean;
   372 }
   394 }
   373 
   395 
   374 void CompiledIC::set_to_monomorphic(CompiledICInfo& info) {
   396 void CompiledIC::set_to_monomorphic(CompiledICInfo& info) {
   375   assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
   397   assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
   376   // Updating a cache to the wrong entry can cause bugs that are very hard
   398   // Updating a cache to the wrong entry can cause bugs that are very hard
   377   // to track down - if cache entry gets invalid - we just clean it. In
   399   // to track down - if cache entry gets invalid - we just clean it. In
   378   // this way it is always the same code path that is responsible for
   400   // this way it is always the same code path that is responsible for
   379   // updating and resolving an inline cache
   401   // updating and resolving an inline cache
   380   //
   402   //
   553 
   575 
   554 // ----------------------------------------------------------------------------
   576 // ----------------------------------------------------------------------------
   555 
   577 
   556 void CompiledStaticCall::set_to_clean(bool in_use) {
   578 void CompiledStaticCall::set_to_clean(bool in_use) {
   557   // in_use is unused but needed to match template function in CompiledMethod
   579   // in_use is unused but needed to match template function in CompiledMethod
   558   assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
   580   assert(CompiledICLocker::is_safe(instruction_address()), "mt unsafe call");
   559   // Reset call site
   581   // Reset call site
   560   MutexLockerEx pl(SafepointSynchronize::is_at_safepoint() ? NULL : Patching_lock, Mutex::_no_safepoint_check_flag);
   582   MutexLockerEx pl(SafepointSynchronize::is_at_safepoint() ? NULL : Patching_lock, Mutex::_no_safepoint_check_flag);
   561 #ifdef ASSERT
       
   562   CodeBlob* cb = CodeCache::find_blob_unsafe(instruction_address());
       
   563   assert(cb != NULL && cb->is_compiled(), "must be compiled");
       
   564 #endif
       
   565 
       
   566   set_destination_mt_safe(resolve_call_stub());
   583   set_destination_mt_safe(resolve_call_stub());
   567 
   584 
   568   // Do not reset stub here:  It is too expensive to call find_stub.
   585   // Do not reset stub here:  It is too expensive to call find_stub.
   569   // Instead, rely on caller (nmethod::clear_inline_caches) to clear
   586   // Instead, rely on caller (nmethod::clear_inline_caches) to clear
   570   // both the call and its stub.
   587   // both the call and its stub.
   604   assert(CodeCache::contains(entry), "wrong entry point");
   621   assert(CodeCache::contains(entry), "wrong entry point");
   605   set_destination_mt_safe(entry);
   622   set_destination_mt_safe(entry);
   606 }
   623 }
   607 
   624 
   608 void CompiledStaticCall::set(const StaticCallInfo& info) {
   625 void CompiledStaticCall::set(const StaticCallInfo& info) {
   609   assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
   626   assert(CompiledICLocker::is_safe(instruction_address()), "mt unsafe call");
   610   MutexLockerEx pl(Patching_lock, Mutex::_no_safepoint_check_flag);
   627   MutexLockerEx pl(Patching_lock, Mutex::_no_safepoint_check_flag);
   611   // Updating a cache to the wrong entry can cause bugs that are very hard
   628   // Updating a cache to the wrong entry can cause bugs that are very hard
   612   // to track down - if cache entry gets invalid - we just clean it. In
   629   // to track down - if cache entry gets invalid - we just clean it. In
   613   // this way it is always the same code path that is responsible for
   630   // this way it is always the same code path that is responsible for
   614   // updating and resolving an inline cache
   631   // updating and resolving an inline cache