hotspot/src/share/vm/code/compiledMethod.cpp
changeset 38133 78b95467b9f1
child 39430 ab7acda033f4
equal deleted inserted replaced
38132:ba888a4f352a 38133:78b95467b9f1
       
     1 /*
       
     2  * Copyright (c) 2015, 2016, Oracle and/or its affiliates. All rights reserved.
       
     3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
       
     4  *
       
     5  * This code is free software; you can redistribute it and/or modify it
       
     6  * under the terms of the GNU General Public License version 2 only, as
       
     7  * published by the Free Software Foundation.
       
     8  *
       
     9  * This code is distributed in the hope that it will be useful, but WITHOUT
       
    10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
       
    11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
       
    12  * version 2 for more details (a copy is included in the LICENSE file that
       
    13  * accompanied this code).
       
    14  *
       
    15  * You should have received a copy of the GNU General Public License version
       
    16  * 2 along with this work; if not, write to the Free Software Foundation,
       
    17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
       
    18  *
       
    19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
       
    20  * or visit www.oracle.com if you need additional information or have any
       
    21  * questions.
       
    22  *
       
    23  */
       
    24 
       
    25 #include "precompiled.hpp"
       
    26 #include "code/compiledIC.hpp"
       
    27 #include "code/scopeDesc.hpp"
       
    28 #include "code/codeCache.hpp"
       
    29 #include "prims/methodHandles.hpp"
       
    30 #include "interpreter/bytecode.hpp"
       
    31 #include "memory/resourceArea.hpp"
       
    32 #include "runtime/mutexLocker.hpp"
       
    33 
       
    34 CompiledMethod::CompiledMethod(Method* method, const char* name, const CodeBlobLayout& layout, int frame_complete_offset, int frame_size, ImmutableOopMapSet* oop_maps, bool caller_must_gc_arguments)
       
    35   : CodeBlob(name, layout, frame_complete_offset, frame_size, oop_maps, caller_must_gc_arguments),
       
    36   _method(method), _mark_for_deoptimization_status(not_marked) {
       
    37   init_defaults();
       
    38 }
       
    39 
       
    40 CompiledMethod::CompiledMethod(Method* method, const char* name, int size, int header_size, CodeBuffer* cb, int frame_complete_offset, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments)
       
    41   : CodeBlob(name, CodeBlobLayout((address) this, size, header_size, cb), cb, frame_complete_offset, frame_size, oop_maps, caller_must_gc_arguments),
       
    42   _method(method), _mark_for_deoptimization_status(not_marked) {
       
    43   init_defaults();
       
    44 }
       
    45 
       
    46 void CompiledMethod::init_defaults() {
       
    47   _has_unsafe_access          = 0;
       
    48   _has_method_handle_invokes  = 0;
       
    49   _lazy_critical_native       = 0;
       
    50   _has_wide_vectors           = 0;
       
    51   _unloading_clock            = 0;
       
    52 }
       
    53 
       
    54 bool CompiledMethod::is_method_handle_return(address return_pc) {
       
    55   if (!has_method_handle_invokes())  return false;
       
    56   PcDesc* pd = pc_desc_at(return_pc);
       
    57   if (pd == NULL)
       
    58     return false;
       
    59   return pd->is_method_handle_invoke();
       
    60 }
       
    61 
       
    62 // When using JVMCI the address might be off by the size of a call instruction.
       
    63 bool CompiledMethod::is_deopt_entry(address pc) {
       
    64   return pc == deopt_handler_begin()
       
    65 #if INCLUDE_JVMCI
       
    66     || pc == (deopt_handler_begin() + NativeCall::instruction_size)
       
    67 #endif
       
    68     ;
       
    69 }
       
    70 
       
    71 // Returns a string version of the method state.
       
    72 const char* CompiledMethod::state() const {
       
    73   int state = get_state();
       
    74   switch (state) {
       
    75   case in_use:
       
    76     return "in use";
       
    77   case not_used:
       
    78     return "not_used";
       
    79   case not_entrant:
       
    80     return "not_entrant";
       
    81   case zombie:
       
    82     return "zombie";
       
    83   case unloaded:
       
    84     return "unloaded";
       
    85   default:
       
    86     fatal("unexpected method state: %d", state);
       
    87     return NULL;
       
    88   }
       
    89 }
       
    90 
       
    91 //-----------------------------------------------------------------------------
       
    92 
       
    93 void CompiledMethod::add_exception_cache_entry(ExceptionCache* new_entry) {
       
    94   assert(ExceptionCache_lock->owned_by_self(),"Must hold the ExceptionCache_lock");
       
    95   assert(new_entry != NULL,"Must be non null");
       
    96   assert(new_entry->next() == NULL, "Must be null");
       
    97 
       
    98   ExceptionCache *ec = exception_cache();
       
    99   if (ec != NULL) {
       
   100     new_entry->set_next(ec);
       
   101   }
       
   102   release_set_exception_cache(new_entry);
       
   103 }
       
   104 
       
   105 void CompiledMethod::clean_exception_cache(BoolObjectClosure* is_alive) {
       
   106   ExceptionCache* prev = NULL;
       
   107   ExceptionCache* curr = exception_cache();
       
   108 
       
   109   while (curr != NULL) {
       
   110     ExceptionCache* next = curr->next();
       
   111 
       
   112     Klass* ex_klass = curr->exception_type();
       
   113     if (ex_klass != NULL && !ex_klass->is_loader_alive(is_alive)) {
       
   114       if (prev == NULL) {
       
   115         set_exception_cache(next);
       
   116       } else {
       
   117         prev->set_next(next);
       
   118       }
       
   119       delete curr;
       
   120       // prev stays the same.
       
   121     } else {
       
   122       prev = curr;
       
   123     }
       
   124 
       
   125     curr = next;
       
   126   }
       
   127 }
       
   128 
       
   129 // public method for accessing the exception cache
       
   130 // These are the public access methods.
       
   131 address CompiledMethod::handler_for_exception_and_pc(Handle exception, address pc) {
       
   132   // We never grab a lock to read the exception cache, so we may
       
   133   // have false negatives. This is okay, as it can only happen during
       
   134   // the first few exception lookups for a given nmethod.
       
   135   ExceptionCache* ec = exception_cache();
       
   136   while (ec != NULL) {
       
   137     address ret_val;
       
   138     if ((ret_val = ec->match(exception,pc)) != NULL) {
       
   139       return ret_val;
       
   140     }
       
   141     ec = ec->next();
       
   142   }
       
   143   return NULL;
       
   144 }
       
   145 
       
   146 void CompiledMethod::add_handler_for_exception_and_pc(Handle exception, address pc, address handler) {
       
   147   // There are potential race conditions during exception cache updates, so we
       
   148   // must own the ExceptionCache_lock before doing ANY modifications. Because
       
   149   // we don't lock during reads, it is possible to have several threads attempt
       
   150   // to update the cache with the same data. We need to check for already inserted
       
   151   // copies of the current data before adding it.
       
   152 
       
   153   MutexLocker ml(ExceptionCache_lock);
       
   154   ExceptionCache* target_entry = exception_cache_entry_for_exception(exception);
       
   155 
       
   156   if (target_entry == NULL || !target_entry->add_address_and_handler(pc,handler)) {
       
   157     target_entry = new ExceptionCache(exception,pc,handler);
       
   158     add_exception_cache_entry(target_entry);
       
   159   }
       
   160 }
       
   161 
       
   162 //-------------end of code for ExceptionCache--------------
       
   163 
       
   164 // private method for handling exception cache
       
   165 // These methods are private, and used to manipulate the exception cache
       
   166 // directly.
       
   167 ExceptionCache* CompiledMethod::exception_cache_entry_for_exception(Handle exception) {
       
   168   ExceptionCache* ec = exception_cache();
       
   169   while (ec != NULL) {
       
   170     if (ec->match_exception_with_space(exception)) {
       
   171       return ec;
       
   172     }
       
   173     ec = ec->next();
       
   174   }
       
   175   return NULL;
       
   176 }
       
   177 
       
   178 bool CompiledMethod::is_at_poll_return(address pc) {
       
   179   RelocIterator iter(this, pc, pc+1);
       
   180   while (iter.next()) {
       
   181     if (iter.type() == relocInfo::poll_return_type)
       
   182       return true;
       
   183   }
       
   184   return false;
       
   185 }
       
   186 
       
   187 
       
   188 bool CompiledMethod::is_at_poll_or_poll_return(address pc) {
       
   189   RelocIterator iter(this, pc, pc+1);
       
   190   while (iter.next()) {
       
   191     relocInfo::relocType t = iter.type();
       
   192     if (t == relocInfo::poll_return_type || t == relocInfo::poll_type)
       
   193       return true;
       
   194   }
       
   195   return false;
       
   196 }
       
   197 
       
   198 void CompiledMethod::verify_oop_relocations() {
       
   199   // Ensure sure that the code matches the current oop values
       
   200   RelocIterator iter(this, NULL, NULL);
       
   201   while (iter.next()) {
       
   202     if (iter.type() == relocInfo::oop_type) {
       
   203       oop_Relocation* reloc = iter.oop_reloc();
       
   204       if (!reloc->oop_is_immediate()) {
       
   205         reloc->verify_oop_relocation();
       
   206       }
       
   207     }
       
   208   }
       
   209 }
       
   210 
       
   211 
       
   212 ScopeDesc* CompiledMethod::scope_desc_at(address pc) {
       
   213   PcDesc* pd = pc_desc_at(pc);
       
   214   guarantee(pd != NULL, "scope must be present");
       
   215   return new ScopeDesc(this, pd->scope_decode_offset(),
       
   216                        pd->obj_decode_offset(), pd->should_reexecute(), pd->rethrow_exception(),
       
   217                        pd->return_oop());
       
   218 }
       
   219 
       
   220 void CompiledMethod::cleanup_inline_caches(bool clean_all/*=false*/) {
       
   221   assert_locked_or_safepoint(CompiledIC_lock);
       
   222 
       
   223   // If the method is not entrant or zombie then a JMP is plastered over the
       
   224   // first few bytes.  If an oop in the old code was there, that oop
       
   225   // should not get GC'd.  Skip the first few bytes of oops on
       
   226   // not-entrant methods.
       
   227   address low_boundary = verified_entry_point();
       
   228   if (!is_in_use() && is_nmethod()) {
       
   229     low_boundary += NativeJump::instruction_size;
       
   230     // %%% Note:  On SPARC we patch only a 4-byte trap, not a full NativeJump.
       
   231     // This means that the low_boundary is going to be a little too high.
       
   232     // This shouldn't matter, since oops of non-entrant methods are never used.
       
   233     // In fact, why are we bothering to look at oops in a non-entrant method??
       
   234   }
       
   235 
       
   236   // Find all calls in an nmethod and clear the ones that point to non-entrant,
       
   237   // zombie and unloaded nmethods.
       
   238   ResourceMark rm;
       
   239   RelocIterator iter(this, low_boundary);
       
   240   while(iter.next()) {
       
   241     switch(iter.type()) {
       
   242       case relocInfo::virtual_call_type:
       
   243       case relocInfo::opt_virtual_call_type: {
       
   244         CompiledIC *ic = CompiledIC_at(&iter);
       
   245         // Ok, to lookup references to zombies here
       
   246         CodeBlob *cb = CodeCache::find_blob_unsafe(ic->ic_destination());
       
   247         if( cb != NULL && cb->is_compiled() ) {
       
   248           CompiledMethod* nm = cb->as_compiled_method();
       
   249           // Clean inline caches pointing to zombie, non-entrant and unloaded methods
       
   250           if (clean_all || !nm->is_in_use() || (nm->method()->code() != nm)) ic->set_to_clean(is_alive());
       
   251         }
       
   252         break;
       
   253       }
       
   254       case relocInfo::static_call_type: {
       
   255           CompiledStaticCall *csc = compiledStaticCall_at(iter.reloc());
       
   256           CodeBlob *cb = CodeCache::find_blob_unsafe(csc->destination());
       
   257           if( cb != NULL && cb->is_compiled() ) {
       
   258             CompiledMethod* cm = cb->as_compiled_method();
       
   259             // Clean inline caches pointing to zombie, non-entrant and unloaded methods
       
   260             if (clean_all || !cm->is_in_use() || (cm->method()->code() != cm)) {
       
   261               csc->set_to_clean();
       
   262             }
       
   263           }
       
   264         break;
       
   265       }
       
   266     }
       
   267   }
       
   268 }
       
   269 
       
   270 int CompiledMethod::verify_icholder_relocations() {
       
   271   ResourceMark rm;
       
   272   int count = 0;
       
   273 
       
   274   RelocIterator iter(this);
       
   275   while(iter.next()) {
       
   276     if (iter.type() == relocInfo::virtual_call_type) {
       
   277       if (CompiledIC::is_icholder_call_site(iter.virtual_call_reloc())) {
       
   278         CompiledIC *ic = CompiledIC_at(&iter);
       
   279         if (TraceCompiledIC) {
       
   280           tty->print("noticed icholder " INTPTR_FORMAT " ", p2i(ic->cached_icholder()));
       
   281           ic->print();
       
   282         }
       
   283         assert(ic->cached_icholder() != NULL, "must be non-NULL");
       
   284         count++;
       
   285       }
       
   286     }
       
   287   }
       
   288 
       
   289   return count;
       
   290 }
       
   291 
       
   292 // Method that knows how to preserve outgoing arguments at call. This method must be
       
   293 // called with a frame corresponding to a Java invoke
       
   294 void CompiledMethod::preserve_callee_argument_oops(frame fr, const RegisterMap *reg_map, OopClosure* f) {
       
   295 #ifndef SHARK
       
   296   if (method() != NULL && !method()->is_native()) {
       
   297     address pc = fr.pc();
       
   298     SimpleScopeDesc ssd(this, pc);
       
   299     Bytecode_invoke call(ssd.method(), ssd.bci());
       
   300     bool has_receiver = call.has_receiver();
       
   301     bool has_appendix = call.has_appendix();
       
   302     Symbol* signature = call.signature();
       
   303 
       
   304     // The method attached by JIT-compilers should be used, if present.
       
   305     // Bytecode can be inaccurate in such case.
       
   306     Method* callee = attached_method_before_pc(pc);
       
   307     if (callee != NULL) {
       
   308       has_receiver = !(callee->access_flags().is_static());
       
   309       has_appendix = false;
       
   310       signature = callee->signature();
       
   311     }
       
   312 
       
   313     fr.oops_compiled_arguments_do(signature, has_receiver, has_appendix, reg_map, f);
       
   314   }
       
   315 #endif // !SHARK
       
   316 }
       
   317 
       
   318 // -----------------------------------------------------------------------------
       
   319 // CompiledMethod::get_deopt_original_pc
       
   320 //
       
   321 // Return the original PC for the given PC if:
       
   322 // (a) the given PC belongs to a nmethod and
       
   323 // (b) it is a deopt PC
       
   324 address CompiledMethod::get_deopt_original_pc(const frame* fr) {
       
   325   if (fr->cb() == NULL)  return NULL;
       
   326 
       
   327   CompiledMethod* cm = fr->cb()->as_compiled_method_or_null();
       
   328   if (cm != NULL && cm->is_deopt_pc(fr->pc()))
       
   329     return cm->get_original_pc(fr);
       
   330 
       
   331   return NULL;
       
   332 }
       
   333 
       
   334 Method* CompiledMethod::attached_method(address call_instr) {
       
   335   assert(code_contains(call_instr), "not part of the nmethod");
       
   336   RelocIterator iter(this, call_instr, call_instr + 1);
       
   337   while (iter.next()) {
       
   338     if (iter.addr() == call_instr) {
       
   339       switch(iter.type()) {
       
   340         case relocInfo::static_call_type:      return iter.static_call_reloc()->method_value();
       
   341         case relocInfo::opt_virtual_call_type: return iter.opt_virtual_call_reloc()->method_value();
       
   342         case relocInfo::virtual_call_type:     return iter.virtual_call_reloc()->method_value();
       
   343       }
       
   344     }
       
   345   }
       
   346   return NULL; // not found
       
   347 }
       
   348 
       
   349 Method* CompiledMethod::attached_method_before_pc(address pc) {
       
   350   if (NativeCall::is_call_before(pc)) {
       
   351     NativeCall* ncall = nativeCall_before(pc);
       
   352     return attached_method(ncall->instruction_address());
       
   353   }
       
   354   return NULL; // not a call
       
   355 }
       
   356 
       
   357 void CompiledMethod::clear_inline_caches() {
       
   358   assert(SafepointSynchronize::is_at_safepoint(), "cleaning of IC's only allowed at safepoint");
       
   359   if (is_zombie()) {
       
   360     return;
       
   361   }
       
   362 
       
   363   RelocIterator iter(this);
       
   364   while (iter.next()) {
       
   365     iter.reloc()->clear_inline_cache();
       
   366   }
       
   367 }
       
   368 
       
   369 // Clear ICStubs of all compiled ICs
       
   370 void CompiledMethod::clear_ic_stubs() {
       
   371   assert_locked_or_safepoint(CompiledIC_lock);
       
   372   RelocIterator iter(this);
       
   373   while(iter.next()) {
       
   374     if (iter.type() == relocInfo::virtual_call_type) {
       
   375       CompiledIC* ic = CompiledIC_at(&iter);
       
   376       ic->clear_ic_stub();
       
   377     }
       
   378   }
       
   379 }
       
   380 
       
   381 #ifdef ASSERT
       
   382 
       
   383 class CheckClass : AllStatic {
       
   384   static BoolObjectClosure* _is_alive;
       
   385 
       
   386   // Check class_loader is alive for this bit of metadata.
       
   387   static void check_class(Metadata* md) {
       
   388     Klass* klass = NULL;
       
   389     if (md->is_klass()) {
       
   390       klass = ((Klass*)md);
       
   391     } else if (md->is_method()) {
       
   392       klass = ((Method*)md)->method_holder();
       
   393     } else if (md->is_methodData()) {
       
   394       klass = ((MethodData*)md)->method()->method_holder();
       
   395     } else {
       
   396       md->print();
       
   397       ShouldNotReachHere();
       
   398     }
       
   399     assert(klass->is_loader_alive(_is_alive), "must be alive");
       
   400   }
       
   401  public:
       
   402   static void do_check_class(BoolObjectClosure* is_alive, CompiledMethod* nm) {
       
   403     assert(SafepointSynchronize::is_at_safepoint(), "this is only ok at safepoint");
       
   404     _is_alive = is_alive;
       
   405     nm->metadata_do(check_class);
       
   406   }
       
   407 };
       
   408 
       
   409 // This is called during a safepoint so can use static data
       
   410 BoolObjectClosure* CheckClass::_is_alive = NULL;
       
   411 #endif // ASSERT
       
   412 
       
   413 void CompiledMethod::clean_ic_if_metadata_is_dead(CompiledIC *ic, BoolObjectClosure *is_alive) {
       
   414   if (ic->is_icholder_call()) {
       
   415     // The only exception is compiledICHolder oops which may
       
   416     // yet be marked below. (We check this further below).
       
   417     CompiledICHolder* cichk_oop = ic->cached_icholder();
       
   418 
       
   419     if (cichk_oop->holder_method()->method_holder()->is_loader_alive(is_alive) &&
       
   420         cichk_oop->holder_klass()->is_loader_alive(is_alive)) {
       
   421       return;
       
   422     }
       
   423   } else {
       
   424     Metadata* ic_oop = ic->cached_metadata();
       
   425     if (ic_oop != NULL) {
       
   426       if (ic_oop->is_klass()) {
       
   427         if (((Klass*)ic_oop)->is_loader_alive(is_alive)) {
       
   428           return;
       
   429         }
       
   430       } else if (ic_oop->is_method()) {
       
   431         if (((Method*)ic_oop)->method_holder()->is_loader_alive(is_alive)) {
       
   432           return;
       
   433         }
       
   434       } else {
       
   435         ShouldNotReachHere();
       
   436       }
       
   437     }
       
   438   }
       
   439 
       
   440   ic->set_to_clean();
       
   441 }
       
   442 
       
   443 unsigned char CompiledMethod::_global_unloading_clock = 0;
       
   444 
       
   445 void CompiledMethod::increase_unloading_clock() {
       
   446   _global_unloading_clock++;
       
   447   if (_global_unloading_clock == 0) {
       
   448     // _nmethods are allocated with _unloading_clock == 0,
       
   449     // so 0 is never used as a clock value.
       
   450     _global_unloading_clock = 1;
       
   451   }
       
   452 }
       
   453 
       
   454 void CompiledMethod::set_unloading_clock(unsigned char unloading_clock) {
       
   455   OrderAccess::release_store((volatile jubyte*)&_unloading_clock, unloading_clock);
       
   456 }
       
   457 
       
   458 unsigned char CompiledMethod::unloading_clock() {
       
   459   return (unsigned char)OrderAccess::load_acquire((volatile jubyte*)&_unloading_clock);
       
   460 }
       
   461 
       
   462 // Processing of oop references should have been sufficient to keep
       
   463 // all strong references alive.  Any weak references should have been
       
   464 // cleared as well.  Visit all the metadata and ensure that it's
       
   465 // really alive.
       
   466 void CompiledMethod::verify_metadata_loaders(address low_boundary, BoolObjectClosure* is_alive) {
       
   467 #ifdef ASSERT
       
   468     RelocIterator iter(this, low_boundary);
       
   469     while (iter.next()) {
       
   470     // static_stub_Relocations may have dangling references to
       
   471     // Method*s so trim them out here.  Otherwise it looks like
       
   472     // compiled code is maintaining a link to dead metadata.
       
   473     address static_call_addr = NULL;
       
   474     if (iter.type() == relocInfo::opt_virtual_call_type) {
       
   475       CompiledIC* cic = CompiledIC_at(&iter);
       
   476       if (!cic->is_call_to_interpreted()) {
       
   477         static_call_addr = iter.addr();
       
   478       }
       
   479     } else if (iter.type() == relocInfo::static_call_type) {
       
   480       CompiledStaticCall* csc = compiledStaticCall_at(iter.reloc());
       
   481       if (!csc->is_call_to_interpreted()) {
       
   482         static_call_addr = iter.addr();
       
   483       }
       
   484     }
       
   485     if (static_call_addr != NULL) {
       
   486       RelocIterator sciter(this, low_boundary);
       
   487       while (sciter.next()) {
       
   488         if (sciter.type() == relocInfo::static_stub_type &&
       
   489             sciter.static_stub_reloc()->static_call() == static_call_addr) {
       
   490           sciter.static_stub_reloc()->clear_inline_cache();
       
   491         }
       
   492       }
       
   493     }
       
   494   }
       
   495   // Check that the metadata embedded in the nmethod is alive
       
   496   CheckClass::do_check_class(is_alive, this);
       
   497 #endif
       
   498 }
       
   499 
       
   500 // This is called at the end of the strong tracing/marking phase of a
       
   501 // GC to unload an nmethod if it contains otherwise unreachable
       
   502 // oops.
       
   503 
       
   504 void CompiledMethod::do_unloading(BoolObjectClosure* is_alive, bool unloading_occurred) {
       
   505   // Make sure the oop's ready to receive visitors
       
   506   assert(!is_zombie() && !is_unloaded(),
       
   507          "should not call follow on zombie or unloaded nmethod");
       
   508 
       
   509   // If the method is not entrant then a JMP is plastered over the
       
   510   // first few bytes.  If an oop in the old code was there, that oop
       
   511   // should not get GC'd.  Skip the first few bytes of oops on
       
   512   // not-entrant methods.
       
   513   address low_boundary = verified_entry_point();
       
   514   if (is_not_entrant()) {
       
   515     low_boundary += NativeJump::instruction_size;
       
   516     // %%% Note:  On SPARC we patch only a 4-byte trap, not a full NativeJump.
       
   517     // (See comment above.)
       
   518   }
       
   519 
       
   520   // The RedefineClasses() API can cause the class unloading invariant
       
   521   // to no longer be true. See jvmtiExport.hpp for details.
       
   522   // Also, leave a debugging breadcrumb in local flag.
       
   523   if (JvmtiExport::has_redefined_a_class()) {
       
   524     // This set of the unloading_occurred flag is done before the
       
   525     // call to post_compiled_method_unload() so that the unloading
       
   526     // of this nmethod is reported.
       
   527     unloading_occurred = true;
       
   528   }
       
   529 
       
   530   // Exception cache
       
   531   clean_exception_cache(is_alive);
       
   532 
       
   533   // If class unloading occurred we first iterate over all inline caches and
       
   534   // clear ICs where the cached oop is referring to an unloaded klass or method.
       
   535   // The remaining live cached oops will be traversed in the relocInfo::oop_type
       
   536   // iteration below.
       
   537   if (unloading_occurred) {
       
   538     RelocIterator iter(this, low_boundary);
       
   539     while(iter.next()) {
       
   540       if (iter.type() == relocInfo::virtual_call_type) {
       
   541         CompiledIC *ic = CompiledIC_at(&iter);
       
   542         clean_ic_if_metadata_is_dead(ic, is_alive);
       
   543       }
       
   544     }
       
   545   }
       
   546 
       
   547   if (do_unloading_oops(low_boundary, is_alive, unloading_occurred)) {
       
   548     return;
       
   549   }
       
   550 
       
   551 #if INCLUDE_JVMCI
       
   552   if (do_unloading_jvmci(is_alive, unloading_occurred)) {
       
   553     return;
       
   554   }
       
   555 #endif
       
   556 
       
   557   // Ensure that all metadata is still alive
       
   558   verify_metadata_loaders(low_boundary, is_alive);
       
   559 }
       
   560 
       
   561 template <class CompiledICorStaticCall>
       
   562 static bool clean_if_nmethod_is_unloaded(CompiledICorStaticCall *ic, address addr, BoolObjectClosure *is_alive, CompiledMethod* from) {
       
   563   // Ok, to lookup references to zombies here
       
   564   CodeBlob *cb = CodeCache::find_blob_unsafe(addr);
       
   565   CompiledMethod* nm = (cb != NULL) ? cb->as_compiled_method_or_null() : NULL;
       
   566   if (nm != NULL) {
       
   567     if (nm->unloading_clock() != CompiledMethod::global_unloading_clock()) {
       
   568       // The nmethod has not been processed yet.
       
   569       return true;
       
   570     }
       
   571 
       
   572     // Clean inline caches pointing to both zombie and not_entrant methods
       
   573     if (!nm->is_in_use() || (nm->method()->code() != nm)) {
       
   574       ic->set_to_clean();
       
   575       assert(ic->is_clean(), "nmethod " PTR_FORMAT "not clean %s", p2i(from), from->method()->name_and_sig_as_C_string());
       
   576     }
       
   577   }
       
   578 
       
   579   return false;
       
   580 }
       
   581 
       
   582 static bool clean_if_nmethod_is_unloaded(CompiledIC *ic, BoolObjectClosure *is_alive, CompiledMethod* from) {
       
   583   return clean_if_nmethod_is_unloaded(ic, ic->ic_destination(), is_alive, from);
       
   584 }
       
   585 
       
   586 static bool clean_if_nmethod_is_unloaded(CompiledStaticCall *csc, BoolObjectClosure *is_alive, CompiledMethod* from) {
       
   587   return clean_if_nmethod_is_unloaded(csc, csc->destination(), is_alive, from);
       
   588 }
       
   589 
       
   590 bool CompiledMethod::do_unloading_parallel(BoolObjectClosure* is_alive, bool unloading_occurred) {
       
   591   ResourceMark rm;
       
   592 
       
   593   // Make sure the oop's ready to receive visitors
       
   594   assert(!is_zombie() && !is_unloaded(),
       
   595          "should not call follow on zombie or unloaded nmethod");
       
   596 
       
   597   // If the method is not entrant then a JMP is plastered over the
       
   598   // first few bytes.  If an oop in the old code was there, that oop
       
   599   // should not get GC'd.  Skip the first few bytes of oops on
       
   600   // not-entrant methods.
       
   601   address low_boundary = verified_entry_point();
       
   602   if (is_not_entrant()) {
       
   603     low_boundary += NativeJump::instruction_size;
       
   604     // %%% Note:  On SPARC we patch only a 4-byte trap, not a full NativeJump.
       
   605     // (See comment above.)
       
   606   }
       
   607 
       
   608   // The RedefineClasses() API can cause the class unloading invariant
       
   609   // to no longer be true. See jvmtiExport.hpp for details.
       
   610   // Also, leave a debugging breadcrumb in local flag.
       
   611   if (JvmtiExport::has_redefined_a_class()) {
       
   612     // This set of the unloading_occurred flag is done before the
       
   613     // call to post_compiled_method_unload() so that the unloading
       
   614     // of this nmethod is reported.
       
   615     unloading_occurred = true;
       
   616   }
       
   617 
       
   618   // Exception cache
       
   619   clean_exception_cache(is_alive);
       
   620 
       
   621   bool postponed = false;
       
   622 
       
   623   RelocIterator iter(this, low_boundary);
       
   624   while(iter.next()) {
       
   625 
       
   626     switch (iter.type()) {
       
   627 
       
   628     case relocInfo::virtual_call_type:
       
   629       if (unloading_occurred) {
       
   630         // If class unloading occurred we first iterate over all inline caches and
       
   631         // clear ICs where the cached oop is referring to an unloaded klass or method.
       
   632         clean_ic_if_metadata_is_dead(CompiledIC_at(&iter), is_alive);
       
   633       }
       
   634 
       
   635       postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this);
       
   636       break;
       
   637 
       
   638     case relocInfo::opt_virtual_call_type:
       
   639       postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this);
       
   640       break;
       
   641 
       
   642     case relocInfo::static_call_type:
       
   643       postponed |= clean_if_nmethod_is_unloaded(compiledStaticCall_at(iter.reloc()), is_alive, this);
       
   644       break;
       
   645 
       
   646     case relocInfo::oop_type:
       
   647       // handled by do_unloading_oops below
       
   648       break;
       
   649 
       
   650     case relocInfo::metadata_type:
       
   651       break; // nothing to do.
       
   652     }
       
   653   }
       
   654 
       
   655   if (do_unloading_oops(low_boundary, is_alive, unloading_occurred)) {
       
   656     return postponed;
       
   657   }
       
   658 
       
   659 #if INCLUDE_JVMCI
       
   660   if (do_unloading_jvmci(is_alive, unloading_occurred)) {
       
   661     return postponed;
       
   662   }
       
   663 #endif
       
   664 
       
   665   // Ensure that all metadata is still alive
       
   666   verify_metadata_loaders(low_boundary, is_alive);
       
   667 
       
   668   return postponed;
       
   669 }
       
   670 
       
   671 void CompiledMethod::do_unloading_parallel_postponed(BoolObjectClosure* is_alive, bool unloading_occurred) {
       
   672   ResourceMark rm;
       
   673 
       
   674   // Make sure the oop's ready to receive visitors
       
   675   assert(!is_zombie(),
       
   676          "should not call follow on zombie nmethod");
       
   677 
       
   678   // If the method is not entrant then a JMP is plastered over the
       
   679   // first few bytes.  If an oop in the old code was there, that oop
       
   680   // should not get GC'd.  Skip the first few bytes of oops on
       
   681   // not-entrant methods.
       
   682   address low_boundary = verified_entry_point();
       
   683   if (is_not_entrant()) {
       
   684     low_boundary += NativeJump::instruction_size;
       
   685     // %%% Note:  On SPARC we patch only a 4-byte trap, not a full NativeJump.
       
   686     // (See comment above.)
       
   687   }
       
   688 
       
   689   RelocIterator iter(this, low_boundary);
       
   690   while(iter.next()) {
       
   691 
       
   692     switch (iter.type()) {
       
   693 
       
   694     case relocInfo::virtual_call_type:
       
   695       clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this);
       
   696       break;
       
   697 
       
   698     case relocInfo::opt_virtual_call_type:
       
   699       clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this);
       
   700       break;
       
   701 
       
   702     case relocInfo::static_call_type:
       
   703       clean_if_nmethod_is_unloaded(compiledStaticCall_at(iter.reloc()), is_alive, this);
       
   704       break;
       
   705     }
       
   706   }
       
   707 }