6981788: GC map generator sometimes picks up the wrong kind of instruction operand
Summary: Distinguish pool indexes from cache indexes in recently changed code.
Reviewed-by: never
--- a/hotspot/src/share/vm/oops/constantPoolOop.cpp Thu Oct 28 00:48:18 2010 -0700
+++ b/hotspot/src/share/vm/oops/constantPoolOop.cpp Sat Oct 30 11:45:35 2010 -0700
@@ -265,8 +265,7 @@
int i = which;
if (!uncached && cache() != NULL) {
if (constantPoolCacheOopDesc::is_secondary_index(which)) {
- // Invokedynamic indexes are always processed in native order
- // so there is no question of reading a native u2 in Java order here.
+ // Invokedynamic index.
int pool_index = cache()->main_entry_at(which)->constant_pool_index();
if (tag_at(pool_index).is_invoke_dynamic())
pool_index = invoke_dynamic_name_and_type_ref_index_at(pool_index);
--- a/hotspot/src/share/vm/oops/constantPoolOop.hpp Thu Oct 28 00:48:18 2010 -0700
+++ b/hotspot/src/share/vm/oops/constantPoolOop.hpp Sat Oct 30 11:45:35 2010 -0700
@@ -414,14 +414,19 @@
// The following methods (name/signature/klass_ref_at, klass_ref_at_noresolve,
// name_and_type_ref_index_at) all expect to be passed indices obtained
- // directly from the bytecode, and extracted according to java byte order.
+ // directly from the bytecode.
// If the indices are meant to refer to fields or methods, they are
- // actually potentially byte-swapped, rewritten constant pool cache indices.
+ // actually rewritten constant pool cache indices.
// The routine remap_instruction_operand_from_cache manages the adjustment
// of these values back to constant pool indices.
// There are also "uncached" versions which do not adjust the operand index; see below.
+ // FIXME: Consider renaming these with a prefix "cached_" to make the distinction clear.
+ // In a few cases (the verifier) there are uses before a cpcache has been built,
+ // which are handled by a dynamic check in remap_instruction_operand_from_cache.
+ // FIXME: Remove the dynamic check, and adjust all callers to specify the correct mode.
+
// Lookup for entries consisting of (klass_index, name_and_type index)
klassOop klass_ref_at(int which, TRAPS);
symbolOop klass_ref_at_noresolve(int which);
@@ -484,7 +489,7 @@
static klassOop klass_ref_at_if_loaded_check(constantPoolHandle this_oop, int which, TRAPS);
// Routines currently used for annotations (only called by jvm.cpp) but which might be used in the
- // future by other Java code. These take constant pool indices rather than possibly-byte-swapped
+ // future by other Java code. These take constant pool indices rather than
// constant pool cache indices as do the peer methods above.
symbolOop uncached_klass_ref_at_noresolve(int which);
symbolOop uncached_name_ref_at(int which) { return impl_name_ref_at(which, true); }
--- a/hotspot/src/share/vm/oops/generateOopMap.cpp Thu Oct 28 00:48:18 2010 -0700
+++ b/hotspot/src/share/vm/oops/generateOopMap.cpp Sat Oct 30 11:45:35 2010 -0700
@@ -1254,7 +1254,7 @@
case Bytecodes::_invokestatic:
case Bytecodes::_invokedynamic:
case Bytecodes::_invokeinterface:
- int idx = currentBC->has_index_u4() ? currentBC->get_index_u4() : currentBC->get_index_u2();
+ int idx = currentBC->has_index_u4() ? currentBC->get_index_u4() : currentBC->get_index_u2_cpcache();
constantPoolOop cp = method()->constants();
int nameAndTypeIdx = cp->name_and_type_ref_index_at(idx);
int signatureIdx = cp->signature_ref_index_at(nameAndTypeIdx);
@@ -1286,7 +1286,7 @@
case Bytecodes::_invokestatic:
case Bytecodes::_invokedynamic:
case Bytecodes::_invokeinterface:
- int idx = currentBC->has_index_u4() ? currentBC->get_index_u4() : currentBC->get_index_u2();
+ int idx = currentBC->has_index_u4() ? currentBC->get_index_u4() : currentBC->get_index_u2_cpcache();
constantPoolOop cp = method()->constants();
int nameAndTypeIdx = cp->name_and_type_ref_index_at(idx);
int signatureIdx = cp->signature_ref_index_at(nameAndTypeIdx);
@@ -1356,8 +1356,8 @@
case Bytecodes::_ldc2_w: ppush(vvCTS); break;
- case Bytecodes::_ldc: do_ldc(itr->get_index(), itr->bci()); break;
- case Bytecodes::_ldc_w: do_ldc(itr->get_index_u2(), itr->bci()); break;
+ case Bytecodes::_ldc: // fall through:
+ case Bytecodes::_ldc_w: do_ldc(itr->bci()); break;
case Bytecodes::_iload:
case Bytecodes::_fload: ppload(vCTS, itr->get_index()); break;
@@ -1829,9 +1829,16 @@
-void GenerateOopMap::do_ldc(int idx, int bci) {
+void GenerateOopMap::do_ldc(int bci) {
+ Bytecode_loadconstant* ldc = Bytecode_loadconstant_at(method(), bci);
constantPoolOop cp = method()->constants();
- CellTypeState cts = cp->is_pointer_entry(idx) ? CellTypeState::make_line_ref(bci) : valCTS;
+ BasicType bt = ldc->result_type();
+ CellTypeState cts = (bt == T_OBJECT) ? CellTypeState::make_line_ref(bci) : valCTS;
+ // Make sure bt==T_OBJECT is the same as old code (is_pointer_entry).
+ // Note that CONSTANT_MethodHandle entries are u2 index pairs, not pointer-entries,
+ // and they are processed by _fast_aldc and the CP cache.
+ assert((ldc->has_cache_index() || cp->is_pointer_entry(ldc->pool_index()))
+ ? (bt == T_OBJECT) : true, "expected object type");
ppush1(cts);
}
--- a/hotspot/src/share/vm/oops/generateOopMap.hpp Thu Oct 28 00:48:18 2010 -0700
+++ b/hotspot/src/share/vm/oops/generateOopMap.hpp Sat Oct 30 11:45:35 2010 -0700
@@ -389,7 +389,7 @@
void pp (CellTypeState *in, CellTypeState *out);
void pp_new_ref (CellTypeState *in, int bci);
void ppdupswap (int poplen, const char *out);
- void do_ldc (int idx, int bci);
+ void do_ldc (int bci);
void do_astore (int idx);
void do_jsr (int delta);
void do_field (int is_get, int is_static, int idx, int bci);