hotspot/src/share/vm/runtime/sharedRuntime.cpp
changeset 33160 c59f1676d27e
parent 32582 56619bb8bcaa
child 33198 b37ad9fbf681
equal deleted inserted replaced
33159:89b942323bd1 33160:c59f1676d27e
    21  * questions.
    21  * questions.
    22  *
    22  *
    23  */
    23  */
    24 
    24 
    25 #include "precompiled.hpp"
    25 #include "precompiled.hpp"
       
    26 #include "classfile/stringTable.hpp"
    26 #include "classfile/systemDictionary.hpp"
    27 #include "classfile/systemDictionary.hpp"
    27 #include "classfile/vmSymbols.hpp"
    28 #include "classfile/vmSymbols.hpp"
    28 #include "code/codeCache.hpp"
    29 #include "code/codeCache.hpp"
    29 #include "code/compiledIC.hpp"
    30 #include "code/compiledIC.hpp"
    30 #include "code/codeCacheExtensions.hpp"
    31 #include "code/codeCacheExtensions.hpp"
    44 #include "prims/methodHandles.hpp"
    45 #include "prims/methodHandles.hpp"
    45 #include "prims/nativeLookup.hpp"
    46 #include "prims/nativeLookup.hpp"
    46 #include "runtime/arguments.hpp"
    47 #include "runtime/arguments.hpp"
    47 #include "runtime/atomic.inline.hpp"
    48 #include "runtime/atomic.inline.hpp"
    48 #include "runtime/biasedLocking.hpp"
    49 #include "runtime/biasedLocking.hpp"
       
    50 #include "runtime/compilationPolicy.hpp"
    49 #include "runtime/handles.inline.hpp"
    51 #include "runtime/handles.inline.hpp"
    50 #include "runtime/init.hpp"
    52 #include "runtime/init.hpp"
    51 #include "runtime/interfaceSupport.hpp"
    53 #include "runtime/interfaceSupport.hpp"
    52 #include "runtime/javaCalls.hpp"
    54 #include "runtime/javaCalls.hpp"
    53 #include "runtime/sharedRuntime.hpp"
    55 #include "runtime/sharedRuntime.hpp"
    91   _ic_miss_blob                        = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method_ic_miss),  "ic_miss_stub");
    93   _ic_miss_blob                        = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method_ic_miss),  "ic_miss_stub");
    92   _resolve_opt_virtual_call_blob       = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_opt_virtual_call_C),   "resolve_opt_virtual_call");
    94   _resolve_opt_virtual_call_blob       = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_opt_virtual_call_C),   "resolve_opt_virtual_call");
    93   _resolve_virtual_call_blob           = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_virtual_call_C),       "resolve_virtual_call");
    95   _resolve_virtual_call_blob           = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_virtual_call_C),       "resolve_virtual_call");
    94   _resolve_static_call_blob            = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_static_call_C),        "resolve_static_call");
    96   _resolve_static_call_blob            = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_static_call_C),        "resolve_static_call");
    95 
    97 
    96 #ifdef COMPILER2
    98 #if defined(COMPILER2) || INCLUDE_JVMCI
    97   // Vectors are generated only by C2.
    99   // Vectors are generated only by C2 and JVMCI.
    98   if (is_wide_vector(MaxVectorSize)) {
   100   bool support_wide = is_wide_vector(MaxVectorSize);
       
   101   if (support_wide) {
    99     _polling_page_vectors_safepoint_handler_blob = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception), POLL_AT_VECTOR_LOOP);
   102     _polling_page_vectors_safepoint_handler_blob = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception), POLL_AT_VECTOR_LOOP);
   100   }
   103   }
   101 #endif // COMPILER2
   104 #endif // COMPILER2 || INCLUDE_JVMCI
   102   _polling_page_safepoint_handler_blob = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception), POLL_AT_LOOP);
   105   _polling_page_safepoint_handler_blob = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception), POLL_AT_LOOP);
   103   _polling_page_return_handler_blob    = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception), POLL_AT_RETURN);
   106   _polling_page_return_handler_blob    = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception), POLL_AT_RETURN);
   104 
   107 
   105   generate_deopt_blob();
   108   generate_deopt_blob();
   106 
   109 
   459   assert(thread->frames_to_pop_failed_realloc() == 0 || Interpreter::contains(return_address), "missed frames to pop?");
   462   assert(thread->frames_to_pop_failed_realloc() == 0 || Interpreter::contains(return_address), "missed frames to pop?");
   460 
   463 
   461   // Reset method handle flag.
   464   // Reset method handle flag.
   462   thread->set_is_method_handle_return(false);
   465   thread->set_is_method_handle_return(false);
   463 
   466 
       
   467 #if INCLUDE_JVMCI
       
   468   // JVMCI's ExceptionHandlerStub expects the thread local exception PC to be clear
       
   469   // and other exception handler continuations do not read it
       
   470   thread->set_exception_pc(NULL);
       
   471 #endif
       
   472 
   464   // The fastest case first
   473   // The fastest case first
   465   CodeBlob* blob = CodeCache::find_blob(return_address);
   474   CodeBlob* blob = CodeCache::find_blob(return_address);
   466   nmethod* nm = (blob != NULL) ? blob->as_nmethod_or_null() : NULL;
   475   nmethod* nm = (blob != NULL) ? blob->as_nmethod_or_null() : NULL;
   467   if (nm != NULL) {
   476   if (nm != NULL) {
   468     // Set flag if return address is a method handle call site.
   477     // Set flag if return address is a method handle call site.
   524 
   533 
   525   // Look up the relocation information
   534   // Look up the relocation information
   526   assert(((nmethod*)cb)->is_at_poll_or_poll_return(pc),
   535   assert(((nmethod*)cb)->is_at_poll_or_poll_return(pc),
   527     "safepoint polling: type must be poll");
   536     "safepoint polling: type must be poll");
   528 
   537 
   529   assert(((NativeInstruction*)pc)->is_safepoint_poll(),
   538 #ifdef ASSERT
   530     "Only polling locations are used for safepoint");
   539   if (!((NativeInstruction*)pc)->is_safepoint_poll()) {
       
   540     tty->print_cr("bad pc: " PTR_FORMAT, p2i(pc));
       
   541     Disassembler::decode(cb);
       
   542     fatal("Only polling locations are used for safepoint");
       
   543   }
       
   544 #endif
   531 
   545 
   532   bool at_poll_return = ((nmethod*)cb)->is_at_poll_return(pc);
   546   bool at_poll_return = ((nmethod*)cb)->is_at_poll_return(pc);
   533   bool has_wide_vectors = ((nmethod*)cb)->has_wide_vectors();
   547   bool has_wide_vectors = ((nmethod*)cb)->has_wide_vectors();
   534   if (at_poll_return) {
   548   if (at_poll_return) {
   535     assert(SharedRuntime::polling_page_return_handler_blob() != NULL,
   549     assert(SharedRuntime::polling_page_return_handler_blob() != NULL,
   614 // for given exception
   628 // for given exception
   615 address SharedRuntime::compute_compiled_exc_handler(nmethod* nm, address ret_pc, Handle& exception,
   629 address SharedRuntime::compute_compiled_exc_handler(nmethod* nm, address ret_pc, Handle& exception,
   616                                                     bool force_unwind, bool top_frame_only) {
   630                                                     bool force_unwind, bool top_frame_only) {
   617   assert(nm != NULL, "must exist");
   631   assert(nm != NULL, "must exist");
   618   ResourceMark rm;
   632   ResourceMark rm;
       
   633 
       
   634 #if INCLUDE_JVMCI
       
   635   if (nm->is_compiled_by_jvmci()) {
       
   636     // lookup exception handler for this pc
       
   637     int catch_pco = ret_pc - nm->code_begin();
       
   638     ExceptionHandlerTable table(nm);
       
   639     HandlerTableEntry *t = table.entry_for(catch_pco, -1, 0);
       
   640     if (t != NULL) {
       
   641       return nm->code_begin() + t->pco();
       
   642     } else {
       
   643       // there is no exception handler for this pc => deoptimize
       
   644       nm->make_not_entrant();
       
   645 
       
   646       // Use Deoptimization::deoptimize for all of its side-effects:
       
   647       // revoking biases of monitors, gathering traps statistics, logging...
       
   648       // it also patches the return pc but we do not care about that
       
   649       // since we return a continuation to the deopt_blob below.
       
   650       JavaThread* thread = JavaThread::current();
       
   651       RegisterMap reg_map(thread, UseBiasedLocking);
       
   652       frame runtime_frame = thread->last_frame();
       
   653       frame caller_frame = runtime_frame.sender(&reg_map);
       
   654       Deoptimization::deoptimize(thread, caller_frame, &reg_map, Deoptimization::Reason_not_compiled_exception_handler);
       
   655 
       
   656       return SharedRuntime::deopt_blob()->unpack_with_exception_in_tls();
       
   657     }
       
   658   }
       
   659 #endif // INCLUDE_JVMCI
   619 
   660 
   620   ScopeDesc* sd = nm->scope_desc_at(ret_pc);
   661   ScopeDesc* sd = nm->scope_desc_at(ret_pc);
   621   // determine handler bci, if any
   662   // determine handler bci, if any
   622   EXCEPTION_MARK;
   663   EXCEPTION_MARK;
   623 
   664 
   735   // Increment counter for hs_err file reporting
   776   // Increment counter for hs_err file reporting
   736   Atomic::inc(&Exceptions::_stack_overflow_errors);
   777   Atomic::inc(&Exceptions::_stack_overflow_errors);
   737   throw_and_post_jvmti_exception(thread, exception);
   778   throw_and_post_jvmti_exception(thread, exception);
   738 JRT_END
   779 JRT_END
   739 
   780 
       
   781 #if INCLUDE_JVMCI
       
   782 address SharedRuntime::deoptimize_for_implicit_exception(JavaThread* thread, address pc, nmethod* nm, int deopt_reason) {
       
   783   assert(deopt_reason > Deoptimization::Reason_none && deopt_reason < Deoptimization::Reason_LIMIT, "invalid deopt reason");
       
   784   thread->set_jvmci_implicit_exception_pc(pc);
       
   785   thread->set_pending_deoptimization(Deoptimization::make_trap_request((Deoptimization::DeoptReason)deopt_reason, Deoptimization::Action_reinterpret));
       
   786   return (SharedRuntime::deopt_blob()->implicit_exception_uncommon_trap());
       
   787 }
       
   788 #endif // INCLUDE_JVMCI
       
   789 
   740 address SharedRuntime::continuation_for_implicit_exception(JavaThread* thread,
   790 address SharedRuntime::continuation_for_implicit_exception(JavaThread* thread,
   741                                                            address pc,
   791                                                            address pc,
   742                                                            SharedRuntime::ImplicitExceptionKind exception_kind)
   792                                                            SharedRuntime::ImplicitExceptionKind exception_kind)
   743 {
   793 {
   744   address target_pc = NULL;
   794   address target_pc = NULL;
   804           // 3. Implicit null exception in nmethod
   854           // 3. Implicit null exception in nmethod
   805 
   855 
   806           if (!cb->is_nmethod()) {
   856           if (!cb->is_nmethod()) {
   807             bool is_in_blob = cb->is_adapter_blob() || cb->is_method_handles_adapter_blob();
   857             bool is_in_blob = cb->is_adapter_blob() || cb->is_method_handles_adapter_blob();
   808             if (!is_in_blob) {
   858             if (!is_in_blob) {
   809               cb->print();
   859               // Allow normal crash reporting to handle this
   810               fatal(err_msg("exception happened outside interpreter, nmethods and vtable stubs at pc " INTPTR_FORMAT, pc));
   860               return NULL;
   811             }
   861             }
   812             Events::log_exception(thread, "NullPointerException in code blob at " INTPTR_FORMAT, pc);
   862             Events::log_exception(thread, "NullPointerException in code blob at " INTPTR_FORMAT, pc);
   813             // There is no handler here, so we will simply unwind.
   863             // There is no handler here, so we will simply unwind.
   814             return StubRoutines::throw_NullPointerException_at_call_entry();
   864             return StubRoutines::throw_NullPointerException_at_call_entry();
   815           }
   865           }
   832           }
   882           }
   833 
   883 
   834 #ifndef PRODUCT
   884 #ifndef PRODUCT
   835           _implicit_null_throws++;
   885           _implicit_null_throws++;
   836 #endif
   886 #endif
       
   887 #if INCLUDE_JVMCI
       
   888           if (nm->is_compiled_by_jvmci() && nm->pc_desc_at(pc) != NULL) {
       
   889             // If there's no PcDesc then we'll die way down inside of
       
   890             // deopt instead of just getting normal error reporting,
       
   891             // so only go there if it will succeed.
       
   892             return deoptimize_for_implicit_exception(thread, pc, nm, Deoptimization::Reason_null_check);
       
   893           } else {
       
   894 #endif // INCLUDE_JVMCI
       
   895           assert (nm->is_nmethod(), "Expect nmethod");
   837           target_pc = nm->continuation_for_implicit_exception(pc);
   896           target_pc = nm->continuation_for_implicit_exception(pc);
       
   897 #if INCLUDE_JVMCI
       
   898           }
       
   899 #endif // INCLUDE_JVMCI
   838           // If there's an unexpected fault, target_pc might be NULL,
   900           // If there's an unexpected fault, target_pc might be NULL,
   839           // in which case we want to fall through into the normal
   901           // in which case we want to fall through into the normal
   840           // error handling code.
   902           // error handling code.
   841         }
   903         }
   842 
   904 
   844       }
   906       }
   845 
   907 
   846 
   908 
   847       case IMPLICIT_DIVIDE_BY_ZERO: {
   909       case IMPLICIT_DIVIDE_BY_ZERO: {
   848         nmethod* nm = CodeCache::find_nmethod(pc);
   910         nmethod* nm = CodeCache::find_nmethod(pc);
   849         guarantee(nm != NULL, "must have containing nmethod for implicit division-by-zero exceptions");
   911         guarantee(nm != NULL, "must have containing compiled method for implicit division-by-zero exceptions");
   850 #ifndef PRODUCT
   912 #ifndef PRODUCT
   851         _implicit_div0_throws++;
   913         _implicit_div0_throws++;
   852 #endif
   914 #endif
       
   915 #if INCLUDE_JVMCI
       
   916         if (nm->is_compiled_by_jvmci() && nm->pc_desc_at(pc) != NULL) {
       
   917           return deoptimize_for_implicit_exception(thread, pc, nm, Deoptimization::Reason_div0_check);
       
   918         } else {
       
   919 #endif // INCLUDE_JVMCI
   853         target_pc = nm->continuation_for_implicit_exception(pc);
   920         target_pc = nm->continuation_for_implicit_exception(pc);
       
   921 #if INCLUDE_JVMCI
       
   922         }
       
   923 #endif // INCLUDE_JVMCI
   854         // If there's an unexpected fault, target_pc might be NULL,
   924         // If there's an unexpected fault, target_pc might be NULL,
   855         // in which case we want to fall through into the normal
   925         // in which case we want to fall through into the normal
   856         // error handling code.
   926         // error handling code.
   857         break; // fall through
   927         break; // fall through
   858       }
   928       }
   860       default: ShouldNotReachHere();
   930       default: ShouldNotReachHere();
   861     }
   931     }
   862 
   932 
   863     assert(exception_kind == IMPLICIT_NULL || exception_kind == IMPLICIT_DIVIDE_BY_ZERO, "wrong implicit exception kind");
   933     assert(exception_kind == IMPLICIT_NULL || exception_kind == IMPLICIT_DIVIDE_BY_ZERO, "wrong implicit exception kind");
   864 
   934 
   865     // for AbortVMOnException flag
       
   866     NOT_PRODUCT(Exceptions::debug_check_abort("java.lang.NullPointerException"));
       
   867     if (exception_kind == IMPLICIT_NULL) {
   935     if (exception_kind == IMPLICIT_NULL) {
       
   936 #ifndef PRODUCT
       
   937       // for AbortVMOnException flag
       
   938       Exceptions::debug_check_abort("java.lang.NullPointerException");
       
   939 #endif //PRODUCT
   868       Events::log_exception(thread, "Implicit null exception at " INTPTR_FORMAT " to " INTPTR_FORMAT, pc, target_pc);
   940       Events::log_exception(thread, "Implicit null exception at " INTPTR_FORMAT " to " INTPTR_FORMAT, pc, target_pc);
   869     } else {
   941     } else {
       
   942 #ifndef PRODUCT
       
   943       // for AbortVMOnException flag
       
   944       Exceptions::debug_check_abort("java.lang.ArithmeticException");
       
   945 #endif //PRODUCT
   870       Events::log_exception(thread, "Implicit division by zero exception at " INTPTR_FORMAT " to " INTPTR_FORMAT, pc, target_pc);
   946       Events::log_exception(thread, "Implicit division by zero exception at " INTPTR_FORMAT " to " INTPTR_FORMAT, pc, target_pc);
   871     }
   947     }
   872     return target_pc;
   948     return target_pc;
   873   }
   949   }
   874 
   950 
   914 JRT_END
   990 JRT_END
   915 #endif // !PRODUCT
   991 #endif // !PRODUCT
   916 
   992 
   917 JRT_ENTRY_NO_ASYNC(void, SharedRuntime::register_finalizer(JavaThread* thread, oopDesc* obj))
   993 JRT_ENTRY_NO_ASYNC(void, SharedRuntime::register_finalizer(JavaThread* thread, oopDesc* obj))
   918   assert(obj->is_oop(), "must be a valid oop");
   994   assert(obj->is_oop(), "must be a valid oop");
       
   995 #if INCLUDE_JVMCI
       
   996   // This removes the requirement for JVMCI compilers to emit code
       
   997   // performing a dynamic check that obj has a finalizer before
       
   998   // calling this routine. There should be no performance impact
       
   999   // for C1 since it emits a dynamic check. C2 and the interpreter
       
  1000   // uses other runtime routines for registering finalizers.
       
  1001   if (!obj->klass()->has_finalizer()) {
       
  1002     return;
       
  1003   }
       
  1004 #endif // INCLUDE_JVMCI
   919   assert(obj->klass()->has_finalizer(), "shouldn't be here otherwise");
  1005   assert(obj->klass()->has_finalizer(), "shouldn't be here otherwise");
   920   InstanceKlass::register_finalizer(instanceOop(obj), CHECK);
  1006   InstanceKlass::register_finalizer(instanceOop(obj), CHECK);
   921 JRT_END
  1007 JRT_END
   922 
  1008 
   923 
  1009 
  1155   Handle receiver = find_callee_info(thread, invoke_code,
  1241   Handle receiver = find_callee_info(thread, invoke_code,
  1156                                      call_info, CHECK_(methodHandle()));
  1242                                      call_info, CHECK_(methodHandle()));
  1157   methodHandle callee_method = call_info.selected_method();
  1243   methodHandle callee_method = call_info.selected_method();
  1158 
  1244 
  1159   assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
  1245   assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
       
  1246          (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
  1160          (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
  1247          (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
  1161          (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
  1248          (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
  1162          ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
  1249          ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
  1163 
  1250 
  1164   assert(caller_nm->is_alive(), "It should be alive");
  1251   assert(caller_nm->is_alive(), "It should be alive");
  1365   assert(callee_method->verified_code_entry() != NULL, " Jump to zero!");
  1452   assert(callee_method->verified_code_entry() != NULL, " Jump to zero!");
  1366   return callee_method->verified_code_entry();
  1453   return callee_method->verified_code_entry();
  1367 JRT_END
  1454 JRT_END
  1368 
  1455 
  1369 
  1456 
  1370 
       
  1371 
       
  1372 
       
  1373 methodHandle SharedRuntime::handle_ic_miss_helper(JavaThread *thread, TRAPS) {
  1457 methodHandle SharedRuntime::handle_ic_miss_helper(JavaThread *thread, TRAPS) {
  1374   ResourceMark rm(thread);
  1458   ResourceMark rm(thread);
  1375   CallInfo call_info;
  1459   CallInfo call_info;
  1376   Bytecodes::Code bc;
  1460   Bytecodes::Code bc;
  1377 
  1461 
  1491           inline_cache->set_to_clean();
  1575           inline_cache->set_to_clean();
  1492         }
  1576         }
  1493       } else {
  1577       } else {
  1494         // Either clean or megamorphic
  1578         // Either clean or megamorphic
  1495       }
  1579       }
       
  1580     } else {
       
  1581       fatal("Unimplemented");
  1496     }
  1582     }
  1497   } // Release CompiledIC_lock
  1583   } // Release CompiledIC_lock
  1498 
  1584 
  1499   return callee_method;
  1585   return callee_method;
  1500 }
  1586 }
  1517   // so no update to the caller is needed.
  1603   // so no update to the caller is needed.
  1518 
  1604 
  1519   if (caller.is_compiled_frame() && !caller.is_deoptimized_frame()) {
  1605   if (caller.is_compiled_frame() && !caller.is_deoptimized_frame()) {
  1520 
  1606 
  1521     address pc = caller.pc();
  1607     address pc = caller.pc();
       
  1608 
       
  1609     // Check for static or virtual call
       
  1610     bool is_static_call = false;
       
  1611     nmethod* caller_nm = CodeCache::find_nmethod(pc);
  1522 
  1612 
  1523     // Default call_addr is the location of the "basic" call.
  1613     // Default call_addr is the location of the "basic" call.
  1524     // Determine the address of the call we a reresolving. With
  1614     // Determine the address of the call we a reresolving. With
  1525     // Inline Caches we will always find a recognizable call.
  1615     // Inline Caches we will always find a recognizable call.
  1526     // With Inline Caches disabled we may or may not find a
  1616     // With Inline Caches disabled we may or may not find a
  1547       if (NativeCall::is_call_before(pc)) {
  1637       if (NativeCall::is_call_before(pc)) {
  1548         NativeCall *ncall = nativeCall_before(pc);
  1638         NativeCall *ncall = nativeCall_before(pc);
  1549         call_addr = ncall->instruction_address();
  1639         call_addr = ncall->instruction_address();
  1550       }
  1640       }
  1551     }
  1641     }
  1552 
       
  1553     // Check for static or virtual call
       
  1554     bool is_static_call = false;
       
  1555     nmethod* caller_nm = CodeCache::find_nmethod(pc);
       
  1556     // Make sure nmethod doesn't get deoptimized and removed until
  1642     // Make sure nmethod doesn't get deoptimized and removed until
  1557     // this is done with it.
  1643     // this is done with it.
  1558     // CLEANUP - with lazy deopt shouldn't need this lock
  1644     // CLEANUP - with lazy deopt shouldn't need this lock
  1559     nmethodLocker nmlock(caller_nm);
  1645     nmethodLocker nmlock(caller_nm);
  1560 
  1646 
  2565 
  2651 
  2566   {
  2652   {
  2567     // Perform the work while holding the lock, but perform any printing outside the lock
  2653     // Perform the work while holding the lock, but perform any printing outside the lock
  2568     MutexLocker mu(AdapterHandlerLibrary_lock);
  2654     MutexLocker mu(AdapterHandlerLibrary_lock);
  2569     // See if somebody beat us to it
  2655     // See if somebody beat us to it
  2570     nm = method->code();
  2656     if (method->code() != NULL) {
  2571     if (nm != NULL) {
       
  2572       return;
  2657       return;
  2573     }
  2658     }
  2574 
  2659 
  2575     const int compile_id = CompileBroker::assign_compile_id(method, CompileBroker::standard_entry_bci);
  2660     const int compile_id = CompileBroker::assign_compile_id(method, CompileBroker::standard_entry_bci);
  2576     assert(compile_id > 0, "Must generate native wrapper");
  2661     assert(compile_id > 0, "Must generate native wrapper");
  2808 
  2893 
  2809 JRT_LEAF(void, SharedRuntime::OSR_migration_end( intptr_t* buf) )
  2894 JRT_LEAF(void, SharedRuntime::OSR_migration_end( intptr_t* buf) )
  2810   FREE_C_HEAP_ARRAY(intptr_t, buf);
  2895   FREE_C_HEAP_ARRAY(intptr_t, buf);
  2811 JRT_END
  2896 JRT_END
  2812 
  2897 
  2813 bool AdapterHandlerLibrary::contains(CodeBlob* b) {
  2898 bool AdapterHandlerLibrary::contains(const CodeBlob* b) {
  2814   AdapterHandlerTableIterator iter(_adapters);
  2899   AdapterHandlerTableIterator iter(_adapters);
  2815   while (iter.has_next()) {
  2900   while (iter.has_next()) {
  2816     AdapterHandlerEntry* a = iter.next();
  2901     AdapterHandlerEntry* a = iter.next();
  2817     if (b == CodeCache::find_blob(a->get_i2c_entry())) return true;
  2902     if (b == CodeCache::find_blob(a->get_i2c_entry())) return true;
  2818   }
  2903   }
  2819   return false;
  2904   return false;
  2820 }
  2905 }
  2821 
  2906 
  2822 void AdapterHandlerLibrary::print_handler_on(outputStream* st, CodeBlob* b) {
  2907 void AdapterHandlerLibrary::print_handler_on(outputStream* st, const CodeBlob* b) {
  2823   AdapterHandlerTableIterator iter(_adapters);
  2908   AdapterHandlerTableIterator iter(_adapters);
  2824   while (iter.has_next()) {
  2909   while (iter.has_next()) {
  2825     AdapterHandlerEntry* a = iter.next();
  2910     AdapterHandlerEntry* a = iter.next();
  2826     if (b == CodeCache::find_blob(a->get_i2c_entry())) {
  2911     if (b == CodeCache::find_blob(a->get_i2c_entry())) {
  2827       st->print("Adapter for signature: ");
  2912       st->print("Adapter for signature: ");