src/hotspot/share/code/compiledMethod.cpp
changeset 50416 ef980b9ac191
parent 49969 8624981f1ffa
child 50995 3c59afe1afc9
equal deleted inserted replaced
50415:e4301f8c3aaa 50416:ef980b9ac191
    26 #include "code/compiledIC.hpp"
    26 #include "code/compiledIC.hpp"
    27 #include "code/compiledMethod.inline.hpp"
    27 #include "code/compiledMethod.inline.hpp"
    28 #include "code/scopeDesc.hpp"
    28 #include "code/scopeDesc.hpp"
    29 #include "code/codeCache.hpp"
    29 #include "code/codeCache.hpp"
    30 #include "interpreter/bytecode.inline.hpp"
    30 #include "interpreter/bytecode.inline.hpp"
       
    31 #include "logging/log.hpp"
       
    32 #include "logging/logTag.hpp"
    31 #include "memory/resourceArea.hpp"
    33 #include "memory/resourceArea.hpp"
    32 #include "oops/methodData.hpp"
    34 #include "oops/methodData.hpp"
    33 #include "oops/method.inline.hpp"
    35 #include "oops/method.inline.hpp"
    34 #include "prims/methodHandles.hpp"
    36 #include "prims/methodHandles.hpp"
    35 #include "runtime/handles.inline.hpp"
    37 #include "runtime/handles.inline.hpp"
   220   return new ScopeDesc(this, pd->scope_decode_offset(),
   222   return new ScopeDesc(this, pd->scope_decode_offset(),
   221                        pd->obj_decode_offset(), pd->should_reexecute(), pd->rethrow_exception(),
   223                        pd->obj_decode_offset(), pd->should_reexecute(), pd->rethrow_exception(),
   222                        pd->return_oop());
   224                        pd->return_oop());
   223 }
   225 }
   224 
   226 
   225 void CompiledMethod::cleanup_inline_caches(bool clean_all/*=false*/) {
   227 address CompiledMethod::oops_reloc_begin() const {
   226   assert_locked_or_safepoint(CompiledIC_lock);
       
   227 
       
   228   // If the method is not entrant or zombie then a JMP is plastered over the
   228   // If the method is not entrant or zombie then a JMP is plastered over the
   229   // first few bytes.  If an oop in the old code was there, that oop
   229   // first few bytes.  If an oop in the old code was there, that oop
   230   // should not get GC'd.  Skip the first few bytes of oops on
   230   // should not get GC'd.  Skip the first few bytes of oops on
   231   // not-entrant methods.
   231   // not-entrant methods.
   232   address low_boundary = verified_entry_point();
   232   address low_boundary = verified_entry_point();
   235     // %%% Note:  On SPARC we patch only a 4-byte trap, not a full NativeJump.
   235     // %%% Note:  On SPARC we patch only a 4-byte trap, not a full NativeJump.
   236     // This means that the low_boundary is going to be a little too high.
   236     // This means that the low_boundary is going to be a little too high.
   237     // This shouldn't matter, since oops of non-entrant methods are never used.
   237     // This shouldn't matter, since oops of non-entrant methods are never used.
   238     // In fact, why are we bothering to look at oops in a non-entrant method??
   238     // In fact, why are we bothering to look at oops in a non-entrant method??
   239   }
   239   }
   240 
   240   return low_boundary;
   241   // Find all calls in an nmethod and clear the ones that point to non-entrant,
       
   242   // zombie and unloaded nmethods.
       
   243   ResourceMark rm;
       
   244   RelocIterator iter(this, low_boundary);
       
   245   while(iter.next()) {
       
   246     switch(iter.type()) {
       
   247       case relocInfo::virtual_call_type:
       
   248       case relocInfo::opt_virtual_call_type: {
       
   249         CompiledIC *ic = CompiledIC_at(&iter);
       
   250         // Ok, to lookup references to zombies here
       
   251         CodeBlob *cb = CodeCache::find_blob_unsafe(ic->ic_destination());
       
   252         if( cb != NULL && cb->is_compiled() ) {
       
   253           CompiledMethod* nm = cb->as_compiled_method();
       
   254           // Clean inline caches pointing to zombie, non-entrant and unloaded methods
       
   255           if (clean_all || !nm->is_in_use() || (nm->method()->code() != nm)) ic->set_to_clean(is_alive());
       
   256         }
       
   257         break;
       
   258       }
       
   259       case relocInfo::static_call_type: {
       
   260           CompiledStaticCall *csc = compiledStaticCall_at(iter.reloc());
       
   261           CodeBlob *cb = CodeCache::find_blob_unsafe(csc->destination());
       
   262           if( cb != NULL && cb->is_compiled() ) {
       
   263             CompiledMethod* cm = cb->as_compiled_method();
       
   264             // Clean inline caches pointing to zombie, non-entrant and unloaded methods
       
   265             if (clean_all || !cm->is_in_use() || (cm->method()->code() != cm)) {
       
   266               csc->set_to_clean();
       
   267             }
       
   268           }
       
   269         break;
       
   270       }
       
   271       default:
       
   272         break;
       
   273     }
       
   274   }
       
   275 }
   241 }
   276 
   242 
   277 int CompiledMethod::verify_icholder_relocations() {
   243 int CompiledMethod::verify_icholder_relocations() {
   278   ResourceMark rm;
   244   ResourceMark rm;
   279   int count = 0;
   245   int count = 0;
   435 
   401 
   436 unsigned char CompiledMethod::unloading_clock() {
   402 unsigned char CompiledMethod::unloading_clock() {
   437   return OrderAccess::load_acquire(&_unloading_clock);
   403   return OrderAccess::load_acquire(&_unloading_clock);
   438 }
   404 }
   439 
   405 
   440 // Processing of oop references should have been sufficient to keep
   406 
   441 // all strong references alive.  Any weak references should have been
   407 // static_stub_Relocations may have dangling references to
   442 // cleared as well.  Visit all the metadata and ensure that it's
   408 // nmethods so trim them out here.  Otherwise it looks like
   443 // really alive.
   409 // compiled code is maintaining a link to dead metadata.
   444 void CompiledMethod::verify_metadata_loaders(address low_boundary) {
   410 void CompiledMethod::clean_ic_stubs() {
   445 #ifdef ASSERT
   411 #ifdef ASSERT
   446     RelocIterator iter(this, low_boundary);
   412   address low_boundary = oops_reloc_begin();
   447     while (iter.next()) {
   413   RelocIterator iter(this, low_boundary);
   448     // static_stub_Relocations may have dangling references to
   414   while (iter.next()) {
   449     // Method*s so trim them out here.  Otherwise it looks like
       
   450     // compiled code is maintaining a link to dead metadata.
       
   451     address static_call_addr = NULL;
   415     address static_call_addr = NULL;
   452     if (iter.type() == relocInfo::opt_virtual_call_type) {
   416     if (iter.type() == relocInfo::opt_virtual_call_type) {
   453       CompiledIC* cic = CompiledIC_at(&iter);
   417       CompiledIC* cic = CompiledIC_at(&iter);
   454       if (!cic->is_call_to_interpreted()) {
   418       if (!cic->is_call_to_interpreted()) {
   455         static_call_addr = iter.addr();
   419         static_call_addr = iter.addr();
   468           sciter.static_stub_reloc()->clear_inline_cache();
   432           sciter.static_stub_reloc()->clear_inline_cache();
   469         }
   433         }
   470       }
   434       }
   471     }
   435     }
   472   }
   436   }
   473   // Check that the metadata embedded in the nmethod is alive
       
   474   metadata_do(check_class);
       
   475 #endif
   437 #endif
   476 }
   438 }
   477 
   439 
   478 // This is called at the end of the strong tracing/marking phase of a
   440 // This is called at the end of the strong tracing/marking phase of a
   479 // GC to unload an nmethod if it contains otherwise unreachable
   441 // GC to unload an nmethod if it contains otherwise unreachable
   480 // oops.
   442 // oops.
   481 
   443 
   482 void CompiledMethod::do_unloading(BoolObjectClosure* is_alive, bool unloading_occurred) {
   444 void CompiledMethod::do_unloading(BoolObjectClosure* is_alive) {
   483   // Make sure the oop's ready to receive visitors
   445   // Make sure the oop's ready to receive visitors
   484   assert(!is_zombie() && !is_unloaded(),
   446   assert(!is_zombie() && !is_unloaded(),
   485          "should not call follow on zombie or unloaded nmethod");
   447          "should not call follow on zombie or unloaded nmethod");
   486 
   448 
   487   // If the method is not entrant then a JMP is plastered over the
   449   address low_boundary = oops_reloc_begin();
   488   // first few bytes.  If an oop in the old code was there, that oop
   450 
   489   // should not get GC'd.  Skip the first few bytes of oops on
   451   if (do_unloading_oops(low_boundary, is_alive)) {
   490   // not-entrant methods.
       
   491   address low_boundary = verified_entry_point();
       
   492   if (is_not_entrant()) {
       
   493     low_boundary += NativeJump::instruction_size;
       
   494     // %%% Note:  On SPARC we patch only a 4-byte trap, not a full NativeJump.
       
   495     // (See comment above.)
       
   496   }
       
   497 
       
   498   // Exception cache
       
   499   clean_exception_cache();
       
   500 
       
   501   // If class unloading occurred we first iterate over all inline caches and
       
   502   // clear ICs where the cached oop is referring to an unloaded klass or method.
       
   503   // The remaining live cached oops will be traversed in the relocInfo::oop_type
       
   504   // iteration below.
       
   505   if (unloading_occurred) {
       
   506     RelocIterator iter(this, low_boundary);
       
   507     while(iter.next()) {
       
   508       if (iter.type() == relocInfo::virtual_call_type) {
       
   509         CompiledIC *ic = CompiledIC_at(&iter);
       
   510         clean_ic_if_metadata_is_dead(ic);
       
   511       }
       
   512     }
       
   513   }
       
   514 
       
   515   if (do_unloading_oops(low_boundary, is_alive, unloading_occurred)) {
       
   516     return;
   452     return;
   517   }
   453   }
   518 
   454 
   519 #if INCLUDE_JVMCI
   455 #if INCLUDE_JVMCI
   520   if (do_unloading_jvmci(unloading_occurred)) {
   456   if (do_unloading_jvmci()) {
   521     return;
   457     return;
   522   }
   458   }
   523 #endif
   459 #endif
   524 
   460 
   525   // Ensure that all metadata is still alive
   461   // Cleanup exception cache and inline caches happens
   526   verify_metadata_loaders(low_boundary);
   462   // after all the unloaded methods are found.
   527 }
   463 }
   528 
   464 
       
   465 // Clean references to unloaded nmethods at addr from this one, which is not unloaded.
   529 template <class CompiledICorStaticCall>
   466 template <class CompiledICorStaticCall>
   530 static bool clean_if_nmethod_is_unloaded(CompiledICorStaticCall *ic, address addr, CompiledMethod* from) {
   467 static bool clean_if_nmethod_is_unloaded(CompiledICorStaticCall *ic, address addr, CompiledMethod* from,
       
   468                                          bool parallel, bool clean_all) {
   531   // Ok, to lookup references to zombies here
   469   // Ok, to lookup references to zombies here
   532   CodeBlob *cb = CodeCache::find_blob_unsafe(addr);
   470   CodeBlob *cb = CodeCache::find_blob_unsafe(addr);
   533   CompiledMethod* nm = (cb != NULL) ? cb->as_compiled_method_or_null() : NULL;
   471   CompiledMethod* nm = (cb != NULL) ? cb->as_compiled_method_or_null() : NULL;
   534   if (nm != NULL) {
   472   if (nm != NULL) {
   535     if (nm->unloading_clock() != CompiledMethod::global_unloading_clock()) {
   473     if (parallel && nm->unloading_clock() != CompiledMethod::global_unloading_clock()) {
   536       // The nmethod has not been processed yet.
   474       // The nmethod has not been processed yet.
   537       return true;
   475       return true;
   538     }
   476     }
   539 
   477 
   540     // Clean inline caches pointing to both zombie and not_entrant methods
   478     // Clean inline caches pointing to both zombie and not_entrant methods
   541     if (!nm->is_in_use() || (nm->method()->code() != nm)) {
   479     if (clean_all || !nm->is_in_use() || (nm->method()->code() != nm)) {
   542       ic->set_to_clean();
   480       ic->set_to_clean(from->is_alive());
   543       assert(ic->is_clean(), "nmethod " PTR_FORMAT "not clean %s", p2i(from), from->method()->name_and_sig_as_C_string());
   481       assert(ic->is_clean(), "nmethod " PTR_FORMAT "not clean %s", p2i(from), from->method()->name_and_sig_as_C_string());
   544     }
   482     }
   545   }
   483   }
   546 
   484 
   547   return false;
   485   return false;
   548 }
   486 }
   549 
   487 
   550 static bool clean_if_nmethod_is_unloaded(CompiledIC *ic, CompiledMethod* from) {
   488 static bool clean_if_nmethod_is_unloaded(CompiledIC *ic, CompiledMethod* from,
   551   return clean_if_nmethod_is_unloaded(ic, ic->ic_destination(), from);
   489                                          bool parallel, bool clean_all = false) {
   552 }
   490   return clean_if_nmethod_is_unloaded(ic, ic->ic_destination(), from, parallel, clean_all);
   553 
   491 }
   554 static bool clean_if_nmethod_is_unloaded(CompiledStaticCall *csc, CompiledMethod* from) {
   492 
   555   return clean_if_nmethod_is_unloaded(csc, csc->destination(), from);
   493 static bool clean_if_nmethod_is_unloaded(CompiledStaticCall *csc, CompiledMethod* from,
       
   494                                          bool parallel, bool clean_all = false) {
       
   495   return clean_if_nmethod_is_unloaded(csc, csc->destination(), from, parallel, clean_all);
   556 }
   496 }
   557 
   497 
   558 bool CompiledMethod::do_unloading_parallel(BoolObjectClosure* is_alive, bool unloading_occurred) {
   498 bool CompiledMethod::do_unloading_parallel(BoolObjectClosure* is_alive, bool unloading_occurred) {
   559   ResourceMark rm;
   499   ResourceMark rm;
   560 
   500 
   561   // Make sure the oop's ready to receive visitors
   501   // Make sure the oop's ready to receive visitors
   562   assert(!is_zombie() && !is_unloaded(),
   502   assert(!is_zombie() && !is_unloaded(),
   563          "should not call follow on zombie or unloaded nmethod");
   503          "should not call follow on zombie or unloaded nmethod");
   564 
   504 
   565   // If the method is not entrant then a JMP is plastered over the
   505   address low_boundary = oops_reloc_begin();
   566   // first few bytes.  If an oop in the old code was there, that oop
   506 
   567   // should not get GC'd.  Skip the first few bytes of oops on
   507   if (do_unloading_oops(low_boundary, is_alive)) {
   568   // not-entrant methods.
   508     return false;
   569   address low_boundary = verified_entry_point();
   509   }
   570   if (is_not_entrant()) {
   510 
   571     low_boundary += NativeJump::instruction_size;
   511 #if INCLUDE_JVMCI
   572     // %%% Note:  On SPARC we patch only a 4-byte trap, not a full NativeJump.
   512   if (do_unloading_jvmci()) {
   573     // (See comment above.)
   513     return false;
   574   }
   514   }
   575 
   515 #endif
   576   // Exception cache
   516 
   577   clean_exception_cache();
   517   return unload_nmethod_caches(/*parallel*/true, unloading_occurred);
   578 
   518 }
       
   519 
       
   520 // Cleans caches in nmethods that point to either classes that are unloaded
       
   521 // or nmethods that are unloaded.
       
   522 //
       
   523 // Can be called either in parallel by G1 currently or after all
       
   524 // nmethods are unloaded.  Return postponed=true in the parallel case for
       
   525 // inline caches found that point to nmethods that are not yet visited during
       
   526 // the do_unloading walk.
       
   527 bool CompiledMethod::unload_nmethod_caches(bool parallel, bool unloading_occurred) {
       
   528 
       
   529   // Exception cache only needs to be called if unloading occurred
       
   530   if (unloading_occurred) {
       
   531     clean_exception_cache();
       
   532   }
       
   533 
       
   534   bool postponed = cleanup_inline_caches_impl(parallel, unloading_occurred, /*clean_all*/false);
       
   535 
       
   536   // All static stubs need to be cleaned.
       
   537   clean_ic_stubs();
       
   538 
       
   539   // Check that the metadata embedded in the nmethod is alive
       
   540   DEBUG_ONLY(metadata_do(check_class));
       
   541 
       
   542   return postponed;
       
   543 }
       
   544 
       
   545 // Called to clean up after class unloading for live nmethods and from the sweeper
       
   546 // for all methods.
       
   547 bool CompiledMethod::cleanup_inline_caches_impl(bool parallel, bool unloading_occurred, bool clean_all) {
       
   548   assert_locked_or_safepoint(CompiledIC_lock);
   579   bool postponed = false;
   549   bool postponed = false;
   580 
   550 
   581   RelocIterator iter(this, low_boundary);
   551   // Find all calls in an nmethod and clear the ones that point to non-entrant,
       
   552   // zombie and unloaded nmethods.
       
   553   RelocIterator iter(this, oops_reloc_begin());
   582   while(iter.next()) {
   554   while(iter.next()) {
   583 
   555 
   584     switch (iter.type()) {
   556     switch (iter.type()) {
   585 
   557 
   586     case relocInfo::virtual_call_type:
   558     case relocInfo::virtual_call_type:
   587       if (unloading_occurred) {
   559       if (unloading_occurred) {
   588         // If class unloading occurred we first iterate over all inline caches and
   560         // If class unloading occurred we first clear ICs where the cached metadata
   589         // clear ICs where the cached oop is referring to an unloaded klass or method.
   561         // is referring to an unloaded klass or method.
   590         clean_ic_if_metadata_is_dead(CompiledIC_at(&iter));
   562         clean_ic_if_metadata_is_dead(CompiledIC_at(&iter));
   591       }
   563       }
   592 
   564 
   593       postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), this);
   565       postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), this, parallel, clean_all);
   594       break;
   566       break;
   595 
   567 
   596     case relocInfo::opt_virtual_call_type:
   568     case relocInfo::opt_virtual_call_type:
   597       postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), this);
   569       postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), this, parallel, clean_all);
   598       break;
   570       break;
   599 
   571 
   600     case relocInfo::static_call_type:
   572     case relocInfo::static_call_type:
   601       postponed |= clean_if_nmethod_is_unloaded(compiledStaticCall_at(iter.reloc()), this);
   573       postponed |= clean_if_nmethod_is_unloaded(compiledStaticCall_at(iter.reloc()), this, parallel, clean_all);
   602       break;
   574       break;
   603 
   575 
   604     case relocInfo::oop_type:
   576     case relocInfo::oop_type:
   605       // handled by do_unloading_oops below
   577       // handled by do_unloading_oops already
   606       break;
   578       break;
   607 
   579 
   608     case relocInfo::metadata_type:
   580     case relocInfo::metadata_type:
   609       break; // nothing to do.
   581       break; // nothing to do.
   610 
   582 
   611     default:
   583     default:
   612       break;
   584       break;
   613     }
   585     }
   614   }
   586   }
   615 
       
   616   if (do_unloading_oops(low_boundary, is_alive, unloading_occurred)) {
       
   617     return postponed;
       
   618   }
       
   619 
       
   620 #if INCLUDE_JVMCI
       
   621   if (do_unloading_jvmci(unloading_occurred)) {
       
   622     return postponed;
       
   623   }
       
   624 #endif
       
   625 
       
   626   // Ensure that all metadata is still alive
       
   627   verify_metadata_loaders(low_boundary);
       
   628 
   587 
   629   return postponed;
   588   return postponed;
   630 }
   589 }
   631 
   590 
   632 void CompiledMethod::do_unloading_parallel_postponed() {
   591 void CompiledMethod::do_unloading_parallel_postponed() {
   634 
   593 
   635   // Make sure the oop's ready to receive visitors
   594   // Make sure the oop's ready to receive visitors
   636   assert(!is_zombie(),
   595   assert(!is_zombie(),
   637          "should not call follow on zombie nmethod");
   596          "should not call follow on zombie nmethod");
   638 
   597 
   639   // If the method is not entrant then a JMP is plastered over the
   598   RelocIterator iter(this, oops_reloc_begin());
   640   // first few bytes.  If an oop in the old code was there, that oop
       
   641   // should not get GC'd.  Skip the first few bytes of oops on
       
   642   // not-entrant methods.
       
   643   address low_boundary = verified_entry_point();
       
   644   if (is_not_entrant()) {
       
   645     low_boundary += NativeJump::instruction_size;
       
   646     // %%% Note:  On SPARC we patch only a 4-byte trap, not a full NativeJump.
       
   647     // (See comment above.)
       
   648   }
       
   649 
       
   650   RelocIterator iter(this, low_boundary);
       
   651   while(iter.next()) {
   599   while(iter.next()) {
   652 
   600 
   653     switch (iter.type()) {
   601     switch (iter.type()) {
   654 
   602 
   655     case relocInfo::virtual_call_type:
   603     case relocInfo::virtual_call_type:
   656       clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), this);
   604       clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), this, true);
   657       break;
   605       break;
   658 
   606 
   659     case relocInfo::opt_virtual_call_type:
   607     case relocInfo::opt_virtual_call_type:
   660       clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), this);
   608       clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), this, true);
   661       break;
   609       break;
   662 
   610 
   663     case relocInfo::static_call_type:
   611     case relocInfo::static_call_type:
   664       clean_if_nmethod_is_unloaded(compiledStaticCall_at(iter.reloc()), this);
   612       clean_if_nmethod_is_unloaded(compiledStaticCall_at(iter.reloc()), this, true);
   665       break;
   613       break;
   666 
   614 
   667     default:
   615     default:
   668       break;
   616       break;
   669     }
   617     }