hotspot/src/share/vm/runtime/sharedRuntime.cpp
changeset 25345 783763c99421
parent 25064 244218e6ec0a
child 25365 6db782823853
equal deleted inserted replaced
25344:c9c8045983ec 25345:783763c99421
  1174   assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
  1174   assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
  1175          (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
  1175          (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
  1176          (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
  1176          (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
  1177          ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
  1177          ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
  1178 
  1178 
  1179   // We do not patch the call site if the caller nmethod has been made non-entrant.
  1179   assert(caller_nm->is_alive(), "It should be alive");
  1180   if (!caller_nm->is_in_use()) {
       
  1181     return callee_method;
       
  1182   }
       
  1183 
  1180 
  1184 #ifndef PRODUCT
  1181 #ifndef PRODUCT
  1185   // tracing/debugging/statistics
  1182   // tracing/debugging/statistics
  1186   int *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
  1183   int *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
  1187                 (is_virtual) ? (&_resolve_virtual_ctr) :
  1184                 (is_virtual) ? (&_resolve_virtual_ctr) :
  1247 
  1244 
  1248     // Lock blocks for safepoint during which both nmethods can change state.
  1245     // Lock blocks for safepoint during which both nmethods can change state.
  1249 
  1246 
  1250     // Now that we are ready to patch if the Method* was redefined then
  1247     // Now that we are ready to patch if the Method* was redefined then
  1251     // don't update call site and let the caller retry.
  1248     // don't update call site and let the caller retry.
  1252     // Don't update call site if caller nmethod has been made non-entrant
       
  1253     // as it is a waste of time.
       
  1254     // Don't update call site if callee nmethod was unloaded or deoptimized.
  1249     // Don't update call site if callee nmethod was unloaded or deoptimized.
  1255     // Don't update call site if callee nmethod was replaced by an other nmethod
  1250     // Don't update call site if callee nmethod was replaced by an other nmethod
  1256     // which may happen when multiply alive nmethod (tiered compilation)
  1251     // which may happen when multiply alive nmethod (tiered compilation)
  1257     // will be supported.
  1252     // will be supported.
  1258     if (!callee_method->is_old() && caller_nm->is_in_use() &&
  1253     if (!callee_method->is_old() &&
  1259         (callee_nm == NULL || callee_nm->is_in_use() && (callee_method->code() == callee_nm))) {
  1254         (callee_nm == NULL || callee_nm->is_in_use() && (callee_method->code() == callee_nm))) {
  1260 #ifdef ASSERT
  1255 #ifdef ASSERT
  1261       // We must not try to patch to jump to an already unloaded method.
  1256       // We must not try to patch to jump to an already unloaded method.
  1262       if (dest_entry_point != 0) {
  1257       if (dest_entry_point != 0) {
  1263         CodeBlob* cb = CodeCache::find_blob(dest_entry_point);
  1258         CodeBlob* cb = CodeCache::find_blob(dest_entry_point);
  1452   // event can't be posted when the stub is created as locks are held
  1447   // event can't be posted when the stub is created as locks are held
  1453   // - instead the event will be deferred until the event collector goes
  1448   // - instead the event will be deferred until the event collector goes
  1454   // out of scope.
  1449   // out of scope.
  1455   JvmtiDynamicCodeEventCollector event_collector;
  1450   JvmtiDynamicCodeEventCollector event_collector;
  1456 
  1451 
  1457   // Update inline cache to megamorphic. Skip update if caller has been
  1452   // Update inline cache to megamorphic. Skip update if we are called from interpreted.
  1458   // made non-entrant or we are called from interpreted.
       
  1459   { MutexLocker ml_patch (CompiledIC_lock);
  1453   { MutexLocker ml_patch (CompiledIC_lock);
  1460     RegisterMap reg_map(thread, false);
  1454     RegisterMap reg_map(thread, false);
  1461     frame caller_frame = thread->last_frame().sender(&reg_map);
  1455     frame caller_frame = thread->last_frame().sender(&reg_map);
  1462     CodeBlob* cb = caller_frame.cb();
  1456     CodeBlob* cb = caller_frame.cb();
  1463     if (cb->is_nmethod() && ((nmethod*)cb)->is_in_use()) {
  1457     if (cb->is_nmethod()) {
  1464       // Not a non-entrant nmethod, so find inline_cache
       
  1465       CompiledIC* inline_cache = CompiledIC_before(((nmethod*)cb), caller_frame.pc());
  1458       CompiledIC* inline_cache = CompiledIC_before(((nmethod*)cb), caller_frame.pc());
  1466       bool should_be_mono = false;
  1459       bool should_be_mono = false;
  1467       if (inline_cache->is_optimized()) {
  1460       if (inline_cache->is_optimized()) {
  1468         if (TraceCallFixup) {
  1461         if (TraceCallFixup) {
  1469           ResourceMark rm(thread);
  1462           ResourceMark rm(thread);
  1602       // leads to very hard to track down bugs, if an inline cache gets updated
  1595       // leads to very hard to track down bugs, if an inline cache gets updated
  1603       // to a wrong method). It should not be performance critical, since the
  1596       // to a wrong method). It should not be performance critical, since the
  1604       // resolve is only done once.
  1597       // resolve is only done once.
  1605 
  1598 
  1606       MutexLocker ml(CompiledIC_lock);
  1599       MutexLocker ml(CompiledIC_lock);
  1607       //
  1600       if (is_static_call) {
  1608       // We do not patch the call site if the nmethod has been made non-entrant
  1601         CompiledStaticCall* ssc= compiledStaticCall_at(call_addr);
  1609       // as it is a waste of time
  1602         ssc->set_to_clean();
  1610       //
  1603       } else {
  1611       if (caller_nm->is_in_use()) {
  1604         // compiled, dispatched call (which used to call an interpreted method)
  1612         if (is_static_call) {
  1605         CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
  1613           CompiledStaticCall* ssc= compiledStaticCall_at(call_addr);
  1606         inline_cache->set_to_clean();
  1614           ssc->set_to_clean();
       
  1615         } else {
       
  1616           // compiled, dispatched call (which used to call an interpreted method)
       
  1617           CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
       
  1618           inline_cache->set_to_clean();
       
  1619         }
       
  1620       }
  1607       }
  1621     }
  1608     }
  1622 
  1609 
  1623   }
  1610   }
  1624 
  1611