diff -r 6061df52d610 -r 53ccc37bda19 hotspot/src/share/vm/c1/c1_Runtime1.cpp --- a/hotspot/src/share/vm/c1/c1_Runtime1.cpp Wed Mar 15 11:44:46 2017 +0100 +++ b/hotspot/src/share/vm/c1/c1_Runtime1.cpp Wed Mar 15 10:25:37 2017 -0400 @@ -331,7 +331,7 @@ assert(klass->is_klass(), "not a class"); Handle holder(THREAD, klass->klass_holder()); // keep the klass alive - instanceKlassHandle h(thread, klass); + InstanceKlass* h = InstanceKlass::cast(klass); h->check_valid_for_instantiation(true, CHECK); // make sure klass is initialized h->initialize(CHECK); @@ -857,8 +857,8 @@ bool deoptimize_for_volatile = false; bool deoptimize_for_atomic = false; int patch_field_offset = -1; - KlassHandle init_klass(THREAD, NULL); // klass needed by load_klass_patching code - KlassHandle load_klass(THREAD, NULL); // klass needed by load_klass_patching code + Klass* init_klass = NULL; // klass needed by load_klass_patching code + Klass* load_klass = NULL; // klass needed by load_klass_patching code Handle mirror(THREAD, NULL); // oop needed by load_mirror_patching code Handle appendix(THREAD, NULL); // oop needed by appendix_patching code bool load_klass_or_mirror_patch_id = @@ -905,7 +905,7 @@ case Bytecodes::_putstatic: case Bytecodes::_getstatic: { Klass* klass = resolve_field_return_klass(caller_method, bci, CHECK); - init_klass = KlassHandle(THREAD, klass); + init_klass = klass; mirror = Handle(THREAD, klass->java_mirror()); } break; @@ -945,8 +945,7 @@ break; default: fatal("unexpected bytecode for load_klass_or_mirror_patch_id"); } - // convert to handle - load_klass = KlassHandle(THREAD, k); + load_klass = k; } else if (stub_id == load_appendix_patching_id) { Bytecode_invoke bytecode(caller_method, bci); Bytecodes::Code bc = bytecode.invoke_code(); @@ -1067,7 +1066,7 @@ // initializing thread are forced to come into the VM and // block. do_patch = (code != Bytecodes::_getstatic && code != Bytecodes::_putstatic) || - InstanceKlass::cast(init_klass())->is_initialized(); + InstanceKlass::cast(init_klass)->is_initialized(); NativeGeneralJump* jump = nativeGeneralJump_at(instr_pc); if (jump->jump_destination() == being_initialized_entry) { assert(do_patch == true, "initialization must be complete at this point"); @@ -1079,8 +1078,8 @@ n_copy->data() == (intptr_t)Universe::non_oop_word(), "illegal init value"); if (stub_id == Runtime1::load_klass_patching_id) { - assert(load_klass() != NULL, "klass not set"); - n_copy->set_data((intx) (load_klass())); + assert(load_klass != NULL, "klass not set"); + n_copy->set_data((intx) (load_klass)); } else { assert(mirror() != NULL, "klass not set"); // Don't need a G1 pre-barrier here since we assert above that data isn't an oop. @@ -1131,7 +1130,7 @@ assert(stub_id == Runtime1::load_klass_patching_id, "wrong stub id"); metadata_Relocation* r = mds.metadata_reloc(); Metadata** metadata_adr = r->metadata_addr(); - *metadata_adr = load_klass(); + *metadata_adr = load_klass; r->fix_metadata_relocation(); found = true; }