hotspot/src/share/vm/interpreter/rewriter.cpp
changeset 9971 d496ecd7b9de
parent 9116 9bc44be338d6
child 13391 30245956af37
equal deleted inserted replaced
9954:9a04e9b8518b 9971:d496ecd7b9de
    61             "all cp cache indexes fit in a u2");
    61             "all cp cache indexes fit in a u2");
    62 
    62 
    63   _have_invoke_dynamic = ((tag_mask & (1 << JVM_CONSTANT_InvokeDynamic)) != 0);
    63   _have_invoke_dynamic = ((tag_mask & (1 << JVM_CONSTANT_InvokeDynamic)) != 0);
    64 }
    64 }
    65 
    65 
       
    66 // Unrewrite the bytecodes if an error occurs.
       
    67 void Rewriter::restore_bytecodes() {
       
    68   int len = _methods->length();
       
    69 
       
    70   for (int i = len-1; i >= 0; i--) {
       
    71     methodOop method = (methodOop)_methods->obj_at(i);
       
    72     scan_method(method, true);
       
    73   }
       
    74 }
    66 
    75 
    67 // Creates a constant pool cache given a CPC map
    76 // Creates a constant pool cache given a CPC map
    68 void Rewriter::make_constant_pool_cache(TRAPS) {
    77 void Rewriter::make_constant_pool_cache(TRAPS) {
    69   const int length = _cp_cache_map.length();
    78   const int length = _cp_cache_map.length();
    70   constantPoolCacheOop cache =
    79   constantPoolCacheOop cache =
   131   }
   140   }
   132 }
   141 }
   133 
   142 
   134 
   143 
   135 // Rewrite a classfile-order CP index into a native-order CPC index.
   144 // Rewrite a classfile-order CP index into a native-order CPC index.
   136 void Rewriter::rewrite_member_reference(address bcp, int offset) {
   145 void Rewriter::rewrite_member_reference(address bcp, int offset, bool reverse) {
   137   address p = bcp + offset;
   146   address p = bcp + offset;
   138   int  cp_index    = Bytes::get_Java_u2(p);
   147   if (!reverse) {
   139   int  cache_index = cp_entry_to_cp_cache(cp_index);
   148     int  cp_index    = Bytes::get_Java_u2(p);
   140   Bytes::put_native_u2(p, cache_index);
   149     int  cache_index = cp_entry_to_cp_cache(cp_index);
   141 }
   150     Bytes::put_native_u2(p, cache_index);
   142 
   151   } else {
   143 
   152     int cache_index = Bytes::get_native_u2(p);
   144 void Rewriter::rewrite_invokedynamic(address bcp, int offset) {
   153     int pool_index = cp_cache_entry_pool_index(cache_index);
       
   154     Bytes::put_Java_u2(p, pool_index);
       
   155   }
       
   156 }
       
   157 
       
   158 
       
   159 void Rewriter::rewrite_invokedynamic(address bcp, int offset, bool reverse) {
   145   address p = bcp + offset;
   160   address p = bcp + offset;
   146   assert(p[-1] == Bytecodes::_invokedynamic, "");
   161   assert(p[-1] == Bytecodes::_invokedynamic, "not invokedynamic bytecode");
   147   int cp_index = Bytes::get_Java_u2(p);
   162   if (!reverse) {
   148   int cpc  = maybe_add_cp_cache_entry(cp_index);  // add lazily
   163     int cp_index = Bytes::get_Java_u2(p);
   149   int cpc2 = add_secondary_cp_cache_entry(cpc);
   164     int cpc  = maybe_add_cp_cache_entry(cp_index);  // add lazily
   150 
   165     int cpc2 = add_secondary_cp_cache_entry(cpc);
   151   // Replace the trailing four bytes with a CPC index for the dynamic
   166 
   152   // call site.  Unlike other CPC entries, there is one per bytecode,
   167     // Replace the trailing four bytes with a CPC index for the dynamic
   153   // not just one per distinct CP entry.  In other words, the
   168     // call site.  Unlike other CPC entries, there is one per bytecode,
   154   // CPC-to-CP relation is many-to-one for invokedynamic entries.
   169     // not just one per distinct CP entry.  In other words, the
   155   // This means we must use a larger index size than u2 to address
   170     // CPC-to-CP relation is many-to-one for invokedynamic entries.
   156   // all these entries.  That is the main reason invokedynamic
   171     // This means we must use a larger index size than u2 to address
   157   // must have a five-byte instruction format.  (Of course, other JVM
   172     // all these entries.  That is the main reason invokedynamic
   158   // implementations can use the bytes for other purposes.)
   173     // must have a five-byte instruction format.  (Of course, other JVM
   159   Bytes::put_native_u4(p, constantPoolCacheOopDesc::encode_secondary_index(cpc2));
   174     // implementations can use the bytes for other purposes.)
   160   // Note: We use native_u4 format exclusively for 4-byte indexes.
   175     Bytes::put_native_u4(p, constantPoolCacheOopDesc::encode_secondary_index(cpc2));
       
   176     // Note: We use native_u4 format exclusively for 4-byte indexes.
       
   177   } else {
       
   178     int cache_index = constantPoolCacheOopDesc::decode_secondary_index(
       
   179                         Bytes::get_native_u4(p));
       
   180     int secondary_index = cp_cache_secondary_entry_main_index(cache_index);
       
   181     int pool_index = cp_cache_entry_pool_index(secondary_index);
       
   182     assert(_pool->tag_at(pool_index).is_invoke_dynamic(), "wrong index");
       
   183     // zero out 4 bytes
       
   184     Bytes::put_Java_u4(p, 0);
       
   185     Bytes::put_Java_u2(p, pool_index);
       
   186   }
   161 }
   187 }
   162 
   188 
   163 
   189 
   164 // Rewrite some ldc bytecodes to _fast_aldc
   190 // Rewrite some ldc bytecodes to _fast_aldc
   165 void Rewriter::maybe_rewrite_ldc(address bcp, int offset, bool is_wide) {
   191 void Rewriter::maybe_rewrite_ldc(address bcp, int offset, bool is_wide,
   166   assert((*bcp) == (is_wide ? Bytecodes::_ldc_w : Bytecodes::_ldc), "");
   192                                  bool reverse) {
   167   address p = bcp + offset;
   193   if (!reverse) {
   168   int cp_index = is_wide ? Bytes::get_Java_u2(p) : (u1)(*p);
   194     assert((*bcp) == (is_wide ? Bytecodes::_ldc_w : Bytecodes::_ldc), "not ldc bytecode");
   169   constantTag tag = _pool->tag_at(cp_index).value();
   195     address p = bcp + offset;
   170   if (tag.is_method_handle() || tag.is_method_type()) {
   196     int cp_index = is_wide ? Bytes::get_Java_u2(p) : (u1)(*p);
   171     int cache_index = cp_entry_to_cp_cache(cp_index);
   197     constantTag tag = _pool->tag_at(cp_index).value();
   172     if (is_wide) {
   198     if (tag.is_method_handle() || tag.is_method_type()) {
   173       (*bcp) = Bytecodes::_fast_aldc_w;
   199       int cache_index = cp_entry_to_cp_cache(cp_index);
   174       assert(cache_index == (u2)cache_index, "");
   200       if (is_wide) {
   175       Bytes::put_native_u2(p, cache_index);
   201         (*bcp) = Bytecodes::_fast_aldc_w;
   176     } else {
   202         assert(cache_index == (u2)cache_index, "index overflow");
   177       (*bcp) = Bytecodes::_fast_aldc;
   203         Bytes::put_native_u2(p, cache_index);
   178       assert(cache_index == (u1)cache_index, "");
   204       } else {
   179       (*p) = (u1)cache_index;
   205         (*bcp) = Bytecodes::_fast_aldc;
       
   206         assert(cache_index == (u1)cache_index, "index overflow");
       
   207         (*p) = (u1)cache_index;
       
   208       }
       
   209     }
       
   210   } else {
       
   211     Bytecodes::Code rewritten_bc =
       
   212               (is_wide ? Bytecodes::_fast_aldc_w : Bytecodes::_fast_aldc);
       
   213     if ((*bcp) == rewritten_bc) {
       
   214       address p = bcp + offset;
       
   215       int cache_index = is_wide ? Bytes::get_native_u2(p) : (u1)(*p);
       
   216       int pool_index = cp_cache_entry_pool_index(cache_index);
       
   217       if (is_wide) {
       
   218         (*bcp) = Bytecodes::_ldc_w;
       
   219         assert(pool_index == (u2)pool_index, "index overflow");
       
   220         Bytes::put_Java_u2(p, pool_index);
       
   221       } else {
       
   222         (*bcp) = Bytecodes::_ldc;
       
   223         assert(pool_index == (u1)pool_index, "index overflow");
       
   224         (*p) = (u1)pool_index;
       
   225       }
   180     }
   226     }
   181   }
   227   }
   182 }
   228 }
   183 
   229 
   184 
   230 
   185 // Rewrites a method given the index_map information
   231 // Rewrites a method given the index_map information
   186 void Rewriter::scan_method(methodOop method) {
   232 void Rewriter::scan_method(methodOop method, bool reverse) {
   187 
   233 
   188   int nof_jsrs = 0;
   234   int nof_jsrs = 0;
   189   bool has_monitor_bytecodes = false;
   235   bool has_monitor_bytecodes = false;
   190 
   236 
   191   {
   237   {
   234             : Bytecodes::_fast_binaryswitch
   280             : Bytecodes::_fast_binaryswitch
   235           );
   281           );
   236 #endif
   282 #endif
   237           break;
   283           break;
   238         }
   284         }
       
   285         case Bytecodes::_fast_linearswitch:
       
   286         case Bytecodes::_fast_binaryswitch: {
       
   287 #ifndef CC_INTERP
       
   288           (*bcp) = Bytecodes::_lookupswitch;
       
   289 #endif
       
   290           break;
       
   291         }
   239         case Bytecodes::_getstatic      : // fall through
   292         case Bytecodes::_getstatic      : // fall through
   240         case Bytecodes::_putstatic      : // fall through
   293         case Bytecodes::_putstatic      : // fall through
   241         case Bytecodes::_getfield       : // fall through
   294         case Bytecodes::_getfield       : // fall through
   242         case Bytecodes::_putfield       : // fall through
   295         case Bytecodes::_putfield       : // fall through
   243         case Bytecodes::_invokevirtual  : // fall through
   296         case Bytecodes::_invokevirtual  : // fall through
   244         case Bytecodes::_invokespecial  : // fall through
   297         case Bytecodes::_invokespecial  : // fall through
   245         case Bytecodes::_invokestatic   :
   298         case Bytecodes::_invokestatic   :
   246         case Bytecodes::_invokeinterface:
   299         case Bytecodes::_invokeinterface:
   247           rewrite_member_reference(bcp, prefix_length+1);
   300           rewrite_member_reference(bcp, prefix_length+1, reverse);
   248           break;
   301           break;
   249         case Bytecodes::_invokedynamic:
   302         case Bytecodes::_invokedynamic:
   250           rewrite_invokedynamic(bcp, prefix_length+1);
   303           rewrite_invokedynamic(bcp, prefix_length+1, reverse);
   251           break;
   304           break;
   252         case Bytecodes::_ldc:
   305         case Bytecodes::_ldc:
   253           maybe_rewrite_ldc(bcp, prefix_length+1, false);
   306         case Bytecodes::_fast_aldc:
       
   307           maybe_rewrite_ldc(bcp, prefix_length+1, false, reverse);
   254           break;
   308           break;
   255         case Bytecodes::_ldc_w:
   309         case Bytecodes::_ldc_w:
   256           maybe_rewrite_ldc(bcp, prefix_length+1, true);
   310         case Bytecodes::_fast_aldc_w:
       
   311           maybe_rewrite_ldc(bcp, prefix_length+1, true, reverse);
   257           break;
   312           break;
   258         case Bytecodes::_jsr            : // fall through
   313         case Bytecodes::_jsr            : // fall through
   259         case Bytecodes::_jsr_w          : nof_jsrs++;                   break;
   314         case Bytecodes::_jsr_w          : nof_jsrs++;                   break;
   260         case Bytecodes::_monitorenter   : // fall through
   315         case Bytecodes::_monitorenter   : // fall through
   261         case Bytecodes::_monitorexit    : has_monitor_bytecodes = true; break;
   316         case Bytecodes::_monitorexit    : has_monitor_bytecodes = true; break;
   271   // The present of a jsr bytecode implies that the method might potentially
   326   // The present of a jsr bytecode implies that the method might potentially
   272   // have to be rewritten, so we run the oopMapGenerator on the method
   327   // have to be rewritten, so we run the oopMapGenerator on the method
   273   if (nof_jsrs > 0) {
   328   if (nof_jsrs > 0) {
   274     method->set_has_jsrs();
   329     method->set_has_jsrs();
   275     // Second pass will revisit this method.
   330     // Second pass will revisit this method.
   276     assert(method->has_jsrs(), "");
   331     assert(method->has_jsrs(), "didn't we just set this?");
   277   }
   332   }
   278 }
   333 }
   279 
   334 
   280 // After constant pool is created, revisit methods containing jsrs.
   335 // After constant pool is created, revisit methods containing jsrs.
   281 methodHandle Rewriter::rewrite_jsrs(methodHandle method, TRAPS) {
   336 methodHandle Rewriter::rewrite_jsrs(methodHandle method, TRAPS) {
       
   337   ResourceMark rm(THREAD);
   282   ResolveOopMapConflicts romc(method);
   338   ResolveOopMapConflicts romc(method);
   283   methodHandle original_method = method;
   339   methodHandle original_method = method;
   284   method = romc.do_potential_rewrite(CHECK_(methodHandle()));
   340   method = romc.do_potential_rewrite(CHECK_(methodHandle()));
   285   if (method() != original_method()) {
   341   if (method() != original_method()) {
   286     // Insert invalid bytecode into original methodOop and set
   342     // Insert invalid bytecode into original methodOop and set
   297     method->set_guaranteed_monitor_matching();
   353     method->set_guaranteed_monitor_matching();
   298   }
   354   }
   299 
   355 
   300   return method;
   356   return method;
   301 }
   357 }
   302 
       
   303 
   358 
   304 void Rewriter::rewrite(instanceKlassHandle klass, TRAPS) {
   359 void Rewriter::rewrite(instanceKlassHandle klass, TRAPS) {
   305   ResourceMark rm(THREAD);
   360   ResourceMark rm(THREAD);
   306   Rewriter     rw(klass, klass->constants(), klass->methods(), CHECK);
   361   Rewriter     rw(klass, klass->constants(), klass->methods(), CHECK);
   307   // (That's all, folks.)
   362   // (That's all, folks.)
   341     }
   396     }
   342     assert(did_rewrite, "must find Object::<init> to rewrite it");
   397     assert(did_rewrite, "must find Object::<init> to rewrite it");
   343   }
   398   }
   344 
   399 
   345   // rewrite methods, in two passes
   400   // rewrite methods, in two passes
   346   int i, len = _methods->length();
   401   int len = _methods->length();
   347 
   402 
   348   for (i = len; --i >= 0; ) {
   403   for (int i = len-1; i >= 0; i--) {
   349     methodOop method = (methodOop)_methods->obj_at(i);
   404     methodOop method = (methodOop)_methods->obj_at(i);
   350     scan_method(method);
   405     scan_method(method);
   351   }
   406   }
   352 
   407 
   353   // allocate constant pool cache, now that we've seen all the bytecodes
   408   // allocate constant pool cache, now that we've seen all the bytecodes
   354   make_constant_pool_cache(CHECK);
   409   make_constant_pool_cache(THREAD);
   355 
   410 
   356   for (i = len; --i >= 0; ) {
   411   // Restore bytecodes to their unrewritten state if there are exceptions
   357     methodHandle m(THREAD, (methodOop)_methods->obj_at(i));
   412   // rewriting bytecodes or allocating the cpCache
       
   413   if (HAS_PENDING_EXCEPTION) {
       
   414     restore_bytecodes();
       
   415     return;
       
   416   }
       
   417 }
       
   418 
       
   419 // Relocate jsr/rets in a method.  This can't be done with the rewriter
       
   420 // stage because it can throw other exceptions, leaving the bytecodes
       
   421 // pointing at constant pool cache entries.
       
   422 // Link and check jvmti dependencies while we're iterating over the methods.
       
   423 // JSR292 code calls with a different set of methods, so two entry points.
       
   424 void Rewriter::relocate_and_link(instanceKlassHandle this_oop, TRAPS) {
       
   425   objArrayHandle methods(THREAD, this_oop->methods());
       
   426   relocate_and_link(this_oop, methods, THREAD);
       
   427 }
       
   428 
       
   429 void Rewriter::relocate_and_link(instanceKlassHandle this_oop,
       
   430                                  objArrayHandle methods, TRAPS) {
       
   431   int len = methods->length();
       
   432   for (int i = len-1; i >= 0; i--) {
       
   433     methodHandle m(THREAD, (methodOop)methods->obj_at(i));
   358 
   434 
   359     if (m->has_jsrs()) {
   435     if (m->has_jsrs()) {
   360       m = rewrite_jsrs(m, CHECK);
   436       m = rewrite_jsrs(m, CHECK);
   361       // Method might have gotten rewritten.
   437       // Method might have gotten rewritten.
   362       _methods->obj_at_put(i, m());
   438       methods->obj_at_put(i, m());
   363     }
   439     }
   364 
   440 
   365     // Set up method entry points for compiler and interpreter.
   441     // Set up method entry points for compiler and interpreter    .
   366     m->link_method(m, CHECK);
   442     m->link_method(m, CHECK);
   367 
   443 
       
   444     // This is for JVMTI and unrelated to relocator but the last thing we do
   368 #ifdef ASSERT
   445 #ifdef ASSERT
   369     if (StressMethodComparator) {
   446     if (StressMethodComparator) {
   370       static int nmc = 0;
   447       static int nmc = 0;
   371       for (int j = i; j >= 0 && j >= i-4; j--) {
   448       for (int j = i; j >= 0 && j >= i-4; j--) {
   372         if ((++nmc % 1000) == 0)  tty->print_cr("Have run MethodComparator %d times...", nmc);
   449         if ((++nmc % 1000) == 0)  tty->print_cr("Have run MethodComparator %d times...", nmc);
   373         bool z = MethodComparator::methods_EMCP(m(), (methodOop)_methods->obj_at(j));
   450         bool z = MethodComparator::methods_EMCP(m(),
       
   451                    (methodOop)methods->obj_at(j));
   374         if (j == i && !z) {
   452         if (j == i && !z) {
   375           tty->print("MethodComparator FAIL: "); m->print(); m->print_codes();
   453           tty->print("MethodComparator FAIL: "); m->print(); m->print_codes();
   376           assert(z, "method must compare equal to itself");
   454           assert(z, "method must compare equal to itself");
   377         }
   455         }
   378       }
   456       }