hotspot/src/share/vm/runtime/sharedRuntime.cpp
changeset 38133 78b95467b9f1
parent 37439 e8970711113b
child 38174 f611c50b8703
equal deleted inserted replaced
38132:ba888a4f352a 38133:78b95467b9f1
   538   address stub;
   538   address stub;
   539   // Look up the code blob
   539   // Look up the code blob
   540   CodeBlob *cb = CodeCache::find_blob(pc);
   540   CodeBlob *cb = CodeCache::find_blob(pc);
   541 
   541 
   542   // Should be an nmethod
   542   // Should be an nmethod
   543   assert(cb && cb->is_nmethod(), "safepoint polling: pc must refer to an nmethod");
   543   assert(cb && cb->is_compiled(), "safepoint polling: pc must refer to an nmethod");
   544 
   544 
   545   // Look up the relocation information
   545   // Look up the relocation information
   546   assert(((nmethod*)cb)->is_at_poll_or_poll_return(pc),
   546   assert(((CompiledMethod*)cb)->is_at_poll_or_poll_return(pc),
   547     "safepoint polling: type must be poll");
   547     "safepoint polling: type must be poll");
   548 
   548 
   549 #ifdef ASSERT
   549 #ifdef ASSERT
   550   if (!((NativeInstruction*)pc)->is_safepoint_poll()) {
   550   if (!((NativeInstruction*)pc)->is_safepoint_poll()) {
   551     tty->print_cr("bad pc: " PTR_FORMAT, p2i(pc));
   551     tty->print_cr("bad pc: " PTR_FORMAT, p2i(pc));
   552     Disassembler::decode(cb);
   552     Disassembler::decode(cb);
   553     fatal("Only polling locations are used for safepoint");
   553     fatal("Only polling locations are used for safepoint");
   554   }
   554   }
   555 #endif
   555 #endif
   556 
   556 
   557   bool at_poll_return = ((nmethod*)cb)->is_at_poll_return(pc);
   557   bool at_poll_return = ((CompiledMethod*)cb)->is_at_poll_return(pc);
   558   bool has_wide_vectors = ((nmethod*)cb)->has_wide_vectors();
   558   bool has_wide_vectors = ((CompiledMethod*)cb)->has_wide_vectors();
   559   if (at_poll_return) {
   559   if (at_poll_return) {
   560     assert(SharedRuntime::polling_page_return_handler_blob() != NULL,
   560     assert(SharedRuntime::polling_page_return_handler_blob() != NULL,
   561            "polling page return stub not created yet");
   561            "polling page return stub not created yet");
   562     stub = SharedRuntime::polling_page_return_handler_blob()->entry_point();
   562     stub = SharedRuntime::polling_page_return_handler_blob()->entry_point();
   563   } else if (has_wide_vectors) {
   563   } else if (has_wide_vectors) {
   628   return 0;
   628   return 0;
   629 JRT_END
   629 JRT_END
   630 
   630 
   631 // ret_pc points into caller; we are returning caller's exception handler
   631 // ret_pc points into caller; we are returning caller's exception handler
   632 // for given exception
   632 // for given exception
   633 address SharedRuntime::compute_compiled_exc_handler(nmethod* nm, address ret_pc, Handle& exception,
   633 address SharedRuntime::compute_compiled_exc_handler(CompiledMethod* cm, address ret_pc, Handle& exception,
   634                                                     bool force_unwind, bool top_frame_only) {
   634                                                     bool force_unwind, bool top_frame_only) {
   635   assert(nm != NULL, "must exist");
   635   assert(cm != NULL, "must exist");
   636   ResourceMark rm;
   636   ResourceMark rm;
   637 
   637 
   638 #if INCLUDE_JVMCI
   638 #if INCLUDE_JVMCI
   639   if (nm->is_compiled_by_jvmci()) {
   639   if (cm->is_compiled_by_jvmci()) {
   640     // lookup exception handler for this pc
   640     // lookup exception handler for this pc
   641     int catch_pco = ret_pc - nm->code_begin();
   641     int catch_pco = ret_pc - cm->code_begin();
   642     ExceptionHandlerTable table(nm);
   642     ExceptionHandlerTable table(cm);
   643     HandlerTableEntry *t = table.entry_for(catch_pco, -1, 0);
   643     HandlerTableEntry *t = table.entry_for(catch_pco, -1, 0);
   644     if (t != NULL) {
   644     if (t != NULL) {
   645       return nm->code_begin() + t->pco();
   645       return cm->code_begin() + t->pco();
   646     } else {
   646     } else {
   647       // there is no exception handler for this pc => deoptimize
   647       // there is no exception handler for this pc => deoptimize
   648       nm->make_not_entrant();
   648       cm->make_not_entrant();
   649 
   649 
   650       // Use Deoptimization::deoptimize for all of its side-effects:
   650       // Use Deoptimization::deoptimize for all of its side-effects:
   651       // revoking biases of monitors, gathering traps statistics, logging...
   651       // revoking biases of monitors, gathering traps statistics, logging...
   652       // it also patches the return pc but we do not care about that
   652       // it also patches the return pc but we do not care about that
   653       // since we return a continuation to the deopt_blob below.
   653       // since we return a continuation to the deopt_blob below.
   660       return SharedRuntime::deopt_blob()->unpack_with_exception_in_tls();
   660       return SharedRuntime::deopt_blob()->unpack_with_exception_in_tls();
   661     }
   661     }
   662   }
   662   }
   663 #endif // INCLUDE_JVMCI
   663 #endif // INCLUDE_JVMCI
   664 
   664 
       
   665   nmethod* nm = cm->as_nmethod();
   665   ScopeDesc* sd = nm->scope_desc_at(ret_pc);
   666   ScopeDesc* sd = nm->scope_desc_at(ret_pc);
   666   // determine handler bci, if any
   667   // determine handler bci, if any
   667   EXCEPTION_MARK;
   668   EXCEPTION_MARK;
   668 
   669 
   669   int handler_bci = -1;
   670   int handler_bci = -1;
   795   Atomic::inc(&Exceptions::_stack_overflow_errors);
   796   Atomic::inc(&Exceptions::_stack_overflow_errors);
   796   throw_and_post_jvmti_exception(thread, exception);
   797   throw_and_post_jvmti_exception(thread, exception);
   797 }
   798 }
   798 
   799 
   799 #if INCLUDE_JVMCI
   800 #if INCLUDE_JVMCI
   800 address SharedRuntime::deoptimize_for_implicit_exception(JavaThread* thread, address pc, nmethod* nm, int deopt_reason) {
   801 address SharedRuntime::deoptimize_for_implicit_exception(JavaThread* thread, address pc, CompiledMethod* nm, int deopt_reason) {
   801   assert(deopt_reason > Deoptimization::Reason_none && deopt_reason < Deoptimization::Reason_LIMIT, "invalid deopt reason");
   802   assert(deopt_reason > Deoptimization::Reason_none && deopt_reason < Deoptimization::Reason_LIMIT, "invalid deopt reason");
   802   thread->set_jvmci_implicit_exception_pc(pc);
   803   thread->set_jvmci_implicit_exception_pc(pc);
   803   thread->set_pending_deoptimization(Deoptimization::make_trap_request((Deoptimization::DeoptReason)deopt_reason, Deoptimization::Action_reinterpret));
   804   thread->set_pending_deoptimization(Deoptimization::make_trap_request((Deoptimization::DeoptReason)deopt_reason, Deoptimization::Action_reinterpret));
   804   return (SharedRuntime::deopt_blob()->implicit_exception_uncommon_trap());
   805   return (SharedRuntime::deopt_blob()->implicit_exception_uncommon_trap());
   805 }
   806 }
   869           // Exception happened in CodeCache. Must be either:
   870           // Exception happened in CodeCache. Must be either:
   870           // 1. Inline-cache check in C2I handler blob,
   871           // 1. Inline-cache check in C2I handler blob,
   871           // 2. Inline-cache check in nmethod, or
   872           // 2. Inline-cache check in nmethod, or
   872           // 3. Implicit null exception in nmethod
   873           // 3. Implicit null exception in nmethod
   873 
   874 
   874           if (!cb->is_nmethod()) {
   875           if (!cb->is_compiled()) {
   875             bool is_in_blob = cb->is_adapter_blob() || cb->is_method_handles_adapter_blob();
   876             bool is_in_blob = cb->is_adapter_blob() || cb->is_method_handles_adapter_blob();
   876             if (!is_in_blob) {
   877             if (!is_in_blob) {
   877               // Allow normal crash reporting to handle this
   878               // Allow normal crash reporting to handle this
   878               return NULL;
   879               return NULL;
   879             }
   880             }
   880             Events::log_exception(thread, "NullPointerException in code blob at " INTPTR_FORMAT, p2i(pc));
   881             Events::log_exception(thread, "NullPointerException in code blob at " INTPTR_FORMAT, p2i(pc));
   881             // There is no handler here, so we will simply unwind.
   882             // There is no handler here, so we will simply unwind.
   882             return StubRoutines::throw_NullPointerException_at_call_entry();
   883             return StubRoutines::throw_NullPointerException_at_call_entry();
   883           }
   884           }
   884 
   885 
   885           // Otherwise, it's an nmethod.  Consult its exception handlers.
   886           // Otherwise, it's a compiled method.  Consult its exception handlers.
   886           nmethod* nm = (nmethod*)cb;
   887           CompiledMethod* cm = (CompiledMethod*)cb;
   887           if (nm->inlinecache_check_contains(pc)) {
   888           if (cm->inlinecache_check_contains(pc)) {
   888             // exception happened inside inline-cache check code
   889             // exception happened inside inline-cache check code
   889             // => the nmethod is not yet active (i.e., the frame
   890             // => the nmethod is not yet active (i.e., the frame
   890             // is not set up yet) => use return address pushed by
   891             // is not set up yet) => use return address pushed by
   891             // caller => don't push another return address
   892             // caller => don't push another return address
   892             Events::log_exception(thread, "NullPointerException in IC check " INTPTR_FORMAT, p2i(pc));
   893             Events::log_exception(thread, "NullPointerException in IC check " INTPTR_FORMAT, p2i(pc));
   893             return StubRoutines::throw_NullPointerException_at_call_entry();
   894             return StubRoutines::throw_NullPointerException_at_call_entry();
   894           }
   895           }
   895 
   896 
   896           if (nm->method()->is_method_handle_intrinsic()) {
   897           if (cm->method()->is_method_handle_intrinsic()) {
   897             // exception happened inside MH dispatch code, similar to a vtable stub
   898             // exception happened inside MH dispatch code, similar to a vtable stub
   898             Events::log_exception(thread, "NullPointerException in MH adapter " INTPTR_FORMAT, p2i(pc));
   899             Events::log_exception(thread, "NullPointerException in MH adapter " INTPTR_FORMAT, p2i(pc));
   899             return StubRoutines::throw_NullPointerException_at_call_entry();
   900             return StubRoutines::throw_NullPointerException_at_call_entry();
   900           }
   901           }
   901 
   902 
   902 #ifndef PRODUCT
   903 #ifndef PRODUCT
   903           _implicit_null_throws++;
   904           _implicit_null_throws++;
   904 #endif
   905 #endif
   905 #if INCLUDE_JVMCI
   906 #if INCLUDE_JVMCI
   906           if (nm->is_compiled_by_jvmci() && nm->pc_desc_at(pc) != NULL) {
   907           if (cm->is_compiled_by_jvmci() && cm->pc_desc_at(pc) != NULL) {
   907             // If there's no PcDesc then we'll die way down inside of
   908             // If there's no PcDesc then we'll die way down inside of
   908             // deopt instead of just getting normal error reporting,
   909             // deopt instead of just getting normal error reporting,
   909             // so only go there if it will succeed.
   910             // so only go there if it will succeed.
   910             return deoptimize_for_implicit_exception(thread, pc, nm, Deoptimization::Reason_null_check);
   911             return deoptimize_for_implicit_exception(thread, pc, cm, Deoptimization::Reason_null_check);
   911           } else {
   912           } else {
   912 #endif // INCLUDE_JVMCI
   913 #endif // INCLUDE_JVMCI
   913           assert (nm->is_nmethod(), "Expect nmethod");
   914           assert (cm->is_nmethod(), "Expect nmethod");
   914           target_pc = nm->continuation_for_implicit_exception(pc);
   915           target_pc = ((nmethod*)cm)->continuation_for_implicit_exception(pc);
   915 #if INCLUDE_JVMCI
   916 #if INCLUDE_JVMCI
   916           }
   917           }
   917 #endif // INCLUDE_JVMCI
   918 #endif // INCLUDE_JVMCI
   918           // If there's an unexpected fault, target_pc might be NULL,
   919           // If there's an unexpected fault, target_pc might be NULL,
   919           // in which case we want to fall through into the normal
   920           // in which case we want to fall through into the normal
   923         break; // fall through
   924         break; // fall through
   924       }
   925       }
   925 
   926 
   926 
   927 
   927       case IMPLICIT_DIVIDE_BY_ZERO: {
   928       case IMPLICIT_DIVIDE_BY_ZERO: {
   928         nmethod* nm = CodeCache::find_nmethod(pc);
   929         CompiledMethod* cm = CodeCache::find_compiled(pc);
   929         guarantee(nm != NULL, "must have containing compiled method for implicit division-by-zero exceptions");
   930         guarantee(cm != NULL, "must have containing compiled method for implicit division-by-zero exceptions");
   930 #ifndef PRODUCT
   931 #ifndef PRODUCT
   931         _implicit_div0_throws++;
   932         _implicit_div0_throws++;
   932 #endif
   933 #endif
   933 #if INCLUDE_JVMCI
   934 #if INCLUDE_JVMCI
   934         if (nm->is_compiled_by_jvmci() && nm->pc_desc_at(pc) != NULL) {
   935         if (cm->is_compiled_by_jvmci() && cm->pc_desc_at(pc) != NULL) {
   935           return deoptimize_for_implicit_exception(thread, pc, nm, Deoptimization::Reason_div0_check);
   936           return deoptimize_for_implicit_exception(thread, pc, cm, Deoptimization::Reason_div0_check);
   936         } else {
   937         } else {
   937 #endif // INCLUDE_JVMCI
   938 #endif // INCLUDE_JVMCI
   938         target_pc = nm->continuation_for_implicit_exception(pc);
   939         target_pc = cm->continuation_for_implicit_exception(pc);
   939 #if INCLUDE_JVMCI
   940 #if INCLUDE_JVMCI
   940         }
   941         }
   941 #endif // INCLUDE_JVMCI
   942 #endif // INCLUDE_JVMCI
   942         // If there's an unexpected fault, target_pc might be NULL,
   943         // If there's an unexpected fault, target_pc might be NULL,
   943         // in which case we want to fall through into the normal
   944         // in which case we want to fall through into the normal
  1082 
  1083 
  1083   return find_callee_info_helper(thread, vfst, bc, callinfo, THREAD);
  1084   return find_callee_info_helper(thread, vfst, bc, callinfo, THREAD);
  1084 }
  1085 }
  1085 
  1086 
  1086 methodHandle SharedRuntime::extract_attached_method(vframeStream& vfst) {
  1087 methodHandle SharedRuntime::extract_attached_method(vframeStream& vfst) {
  1087   nmethod* caller_nm = vfst.nm();
  1088   CompiledMethod* caller = vfst.nm();
  1088 
  1089 
  1089   nmethodLocker caller_lock(caller_nm);
  1090   nmethodLocker caller_lock(caller);
  1090 
  1091 
  1091   address pc = vfst.frame_pc();
  1092   address pc = vfst.frame_pc();
  1092   { // Get call instruction under lock because another thread may be busy patching it.
  1093   { // Get call instruction under lock because another thread may be busy patching it.
  1093     MutexLockerEx ml_patch(Patching_lock, Mutex::_no_safepoint_check_flag);
  1094     MutexLockerEx ml_patch(Patching_lock, Mutex::_no_safepoint_check_flag);
  1094     return caller_nm->attached_method_before_pc(pc);
  1095     return caller->attached_method_before_pc(pc);
  1095   }
  1096   }
  1096   return NULL;
  1097   return NULL;
  1097 }
  1098 }
  1098 
  1099 
  1099 // Finds receiver, CallInfo (i.e. receiver method), and calling bytecode
  1100 // Finds receiver, CallInfo (i.e. receiver method), and calling bytecode
  1281   ResourceMark rm(thread);
  1282   ResourceMark rm(thread);
  1282   RegisterMap cbl_map(thread, false);
  1283   RegisterMap cbl_map(thread, false);
  1283   frame caller_frame = thread->last_frame().sender(&cbl_map);
  1284   frame caller_frame = thread->last_frame().sender(&cbl_map);
  1284 
  1285 
  1285   CodeBlob* caller_cb = caller_frame.cb();
  1286   CodeBlob* caller_cb = caller_frame.cb();
  1286   guarantee(caller_cb != NULL && caller_cb->is_nmethod(), "must be called from nmethod");
  1287   guarantee(caller_cb != NULL && caller_cb->is_compiled(), "must be called from compiled method");
  1287   nmethod* caller_nm = caller_cb->as_nmethod_or_null();
  1288   CompiledMethod* caller_nm = caller_cb->as_compiled_method_or_null();
  1288 
  1289 
  1289   // make sure caller is not getting deoptimized
  1290   // make sure caller is not getting deoptimized
  1290   // and removed before we are done with it.
  1291   // and removed before we are done with it.
  1291   // CLEANUP - with lazy deopt shouldn't need this lock
  1292   // CLEANUP - with lazy deopt shouldn't need this lock
  1292   nmethodLocker caller_lock(caller_nm);
  1293   nmethodLocker caller_lock(caller_nm);
  1345   StaticCallInfo static_call_info;
  1346   StaticCallInfo static_call_info;
  1346   CompiledICInfo virtual_call_info;
  1347   CompiledICInfo virtual_call_info;
  1347 
  1348 
  1348   // Make sure the callee nmethod does not get deoptimized and removed before
  1349   // Make sure the callee nmethod does not get deoptimized and removed before
  1349   // we are done patching the code.
  1350   // we are done patching the code.
  1350   nmethod* callee_nm = callee_method->code();
  1351   CompiledMethod* callee = callee_method->code();
  1351   if (callee_nm != NULL && !callee_nm->is_in_use()) {
  1352 
       
  1353   if (callee != NULL) {
       
  1354     assert(callee->is_compiled(), "must be nmethod for patching");
       
  1355   }
       
  1356 
       
  1357   if (callee != NULL && !callee->is_in_use()) {
  1352     // Patch call site to C2I adapter if callee nmethod is deoptimized or unloaded.
  1358     // Patch call site to C2I adapter if callee nmethod is deoptimized or unloaded.
  1353     callee_nm = NULL;
  1359     callee = NULL;
  1354   }
  1360   }
  1355   nmethodLocker nl_callee(callee_nm);
  1361   nmethodLocker nl_callee(callee);
  1356 #ifdef ASSERT
  1362 #ifdef ASSERT
  1357   address dest_entry_point = callee_nm == NULL ? 0 : callee_nm->entry_point(); // used below
  1363   address dest_entry_point = callee == NULL ? 0 : callee->entry_point(); // used below
  1358 #endif
  1364 #endif
  1359 
  1365 
  1360   if (is_virtual) {
  1366   if (is_virtual) {
  1361     assert(receiver.not_null() || invoke_code == Bytecodes::_invokehandle, "sanity check");
  1367     assert(receiver.not_null() || invoke_code == Bytecodes::_invokehandle, "sanity check");
  1362     bool static_bound = call_info.resolved_method()->can_be_statically_bound();
  1368     bool static_bound = call_info.resolved_method()->can_be_statically_bound();
  1380     // Don't update call site if callee nmethod was unloaded or deoptimized.
  1386     // Don't update call site if callee nmethod was unloaded or deoptimized.
  1381     // Don't update call site if callee nmethod was replaced by an other nmethod
  1387     // Don't update call site if callee nmethod was replaced by an other nmethod
  1382     // which may happen when multiply alive nmethod (tiered compilation)
  1388     // which may happen when multiply alive nmethod (tiered compilation)
  1383     // will be supported.
  1389     // will be supported.
  1384     if (!callee_method->is_old() &&
  1390     if (!callee_method->is_old() &&
  1385         (callee_nm == NULL || callee_nm->is_in_use() && (callee_method->code() == callee_nm))) {
  1391         (callee == NULL || callee->is_in_use() && (callee_method->code() == callee))) {
  1386 #ifdef ASSERT
  1392 #ifdef ASSERT
  1387       // We must not try to patch to jump to an already unloaded method.
  1393       // We must not try to patch to jump to an already unloaded method.
  1388       if (dest_entry_point != 0) {
  1394       if (dest_entry_point != 0) {
  1389         CodeBlob* cb = CodeCache::find_blob(dest_entry_point);
  1395         CodeBlob* cb = CodeCache::find_blob(dest_entry_point);
  1390         assert((cb != NULL) && cb->is_nmethod() && (((nmethod*)cb) == callee_nm),
  1396         assert((cb != NULL) && cb->is_compiled() && (((CompiledMethod*)cb) == callee),
  1391                "should not call unloaded nmethod");
  1397                "should not call unloaded nmethod");
  1392       }
  1398       }
  1393 #endif
  1399 #endif
  1394       if (is_virtual) {
  1400       if (is_virtual) {
  1395         CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
  1401         CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
  1580   // Update inline cache to megamorphic. Skip update if we are called from interpreted.
  1586   // Update inline cache to megamorphic. Skip update if we are called from interpreted.
  1581   { MutexLocker ml_patch (CompiledIC_lock);
  1587   { MutexLocker ml_patch (CompiledIC_lock);
  1582     RegisterMap reg_map(thread, false);
  1588     RegisterMap reg_map(thread, false);
  1583     frame caller_frame = thread->last_frame().sender(&reg_map);
  1589     frame caller_frame = thread->last_frame().sender(&reg_map);
  1584     CodeBlob* cb = caller_frame.cb();
  1590     CodeBlob* cb = caller_frame.cb();
  1585     if (cb->is_nmethod()) {
  1591     CompiledMethod* caller_nm = cb->as_compiled_method_or_null();
  1586       CompiledIC* inline_cache = CompiledIC_before(((nmethod*)cb), caller_frame.pc());
  1592     if (cb->is_compiled()) {
       
  1593       CompiledIC* inline_cache = CompiledIC_before(((CompiledMethod*)cb), caller_frame.pc());
  1587       bool should_be_mono = false;
  1594       bool should_be_mono = false;
  1588       if (inline_cache->is_optimized()) {
  1595       if (inline_cache->is_optimized()) {
  1589         if (TraceCallFixup) {
  1596         if (TraceCallFixup) {
  1590           ResourceMark rm(thread);
  1597           ResourceMark rm(thread);
  1591           tty->print("OPTIMIZED IC miss (%s) call to", Bytecodes::name(bc));
  1598           tty->print("OPTIMIZED IC miss (%s) call to", Bytecodes::name(bc));
  1665 
  1672 
  1666     address pc = caller.pc();
  1673     address pc = caller.pc();
  1667 
  1674 
  1668     // Check for static or virtual call
  1675     // Check for static or virtual call
  1669     bool is_static_call = false;
  1676     bool is_static_call = false;
  1670     nmethod* caller_nm = CodeCache::find_nmethod(pc);
  1677     CompiledMethod* caller_nm = CodeCache::find_compiled(pc);
  1671 
  1678 
  1672     // Default call_addr is the location of the "basic" call.
  1679     // Default call_addr is the location of the "basic" call.
  1673     // Determine the address of the call we a reresolving. With
  1680     // Determine the address of the call we a reresolving. With
  1674     // Inline Caches we will always find a recognizable call.
  1681     // Inline Caches we will always find a recognizable call.
  1675     // With Inline Caches disabled we may or may not find a
  1682     // With Inline Caches disabled we may or may not find a
  1800   // we did we'd leap into space because the callsite needs to use
  1807   // we did we'd leap into space because the callsite needs to use
  1801   // "to interpreter" stub in order to load up the Method*. Don't
  1808   // "to interpreter" stub in order to load up the Method*. Don't
  1802   // ask me how I know this...
  1809   // ask me how I know this...
  1803 
  1810 
  1804   CodeBlob* cb = CodeCache::find_blob(caller_pc);
  1811   CodeBlob* cb = CodeCache::find_blob(caller_pc);
  1805   if (!cb->is_nmethod() || entry_point == moop->get_c2i_entry()) {
  1812   if (!cb->is_compiled() || entry_point == moop->get_c2i_entry()) {
  1806     return;
  1813     return;
  1807   }
  1814   }
  1808 
  1815 
  1809   // The check above makes sure this is a nmethod.
  1816   // The check above makes sure this is a nmethod.
  1810   nmethod* nm = cb->as_nmethod_or_null();
  1817   CompiledMethod* nm = cb->as_compiled_method_or_null();
  1811   assert(nm, "must be");
  1818   assert(nm, "must be");
  1812 
  1819 
  1813   // Get the return PC for the passed caller PC.
  1820   // Get the return PC for the passed caller PC.
  1814   address return_pc = caller_pc + frame::pc_return_offset;
  1821   address return_pc = caller_pc + frame::pc_return_offset;
  1815 
  1822