hotspot/src/cpu/sparc/vm/methodHandles_sparc.cpp
changeset 13881 a326d528f3e1
parent 13743 154102966e74
child 13968 8c9029c01470
equal deleted inserted replaced
13751:7fd047780d47 13881:a326d528f3e1
   119 #endif // ASSERT
   119 #endif // ASSERT
   120 
   120 
   121 void MethodHandles::jump_from_method_handle(MacroAssembler* _masm, Register method, Register target, Register temp,
   121 void MethodHandles::jump_from_method_handle(MacroAssembler* _masm, Register method, Register target, Register temp,
   122                                             bool for_compiler_entry) {
   122                                             bool for_compiler_entry) {
   123   assert(method == G5_method, "interpreter calling convention");
   123   assert(method == G5_method, "interpreter calling convention");
       
   124   assert_different_registers(method, target, temp);
   124 
   125 
   125   if (!for_compiler_entry && JvmtiExport::can_post_interpreter_events()) {
   126   if (!for_compiler_entry && JvmtiExport::can_post_interpreter_events()) {
   126     Label run_compiled_code;
   127     Label run_compiled_code;
   127     // JVMTI events, such as single-stepping, are implemented partly by avoiding running
   128     // JVMTI events, such as single-stepping, are implemented partly by avoiding running
   128     // compiled code in threads for which the event is enabled.  Check here for
   129     // compiled code in threads for which the event is enabled.  Check here for
   151                                         Register temp2, Register temp3,
   152                                         Register temp2, Register temp3,
   152                                         bool for_compiler_entry) {
   153                                         bool for_compiler_entry) {
   153   BLOCK_COMMENT("jump_to_lambda_form {");
   154   BLOCK_COMMENT("jump_to_lambda_form {");
   154   // This is the initial entry point of a lazy method handle.
   155   // This is the initial entry point of a lazy method handle.
   155   // After type checking, it picks up the invoker from the LambdaForm.
   156   // After type checking, it picks up the invoker from the LambdaForm.
   156   assert_different_registers(recv, method_temp, temp2, temp3);
   157   assert_different_registers(recv, method_temp, temp2);  // temp3 is only passed on
   157   assert(method_temp == G5_method, "required register for loading method");
   158   assert(method_temp == G5_method, "required register for loading method");
   158 
   159 
   159   //NOT_PRODUCT({ FlagSetting fs(TraceMethodHandles, true); trace_method_handle(_masm, "LZMH"); });
   160   //NOT_PRODUCT({ FlagSetting fs(TraceMethodHandles, true); trace_method_handle(_masm, "LZMH"); });
   160 
   161 
   161   // Load the invoker, as MH -> MH.form -> LF.vmentry
   162   // Load the invoker, as MH -> MH.form -> LF.vmentry
   162   __ verify_oop(recv);
   163   __ verify_oop(recv);
   163   __ load_heap_oop(Address(recv,        NONZERO(java_lang_invoke_MethodHandle::form_offset_in_bytes())),       method_temp);
   164   __ load_heap_oop(Address(recv,        NONZERO(java_lang_invoke_MethodHandle::form_offset_in_bytes())),   method_temp);
   164   __ verify_oop(method_temp);
   165   __ verify_oop(method_temp);
   165   __ load_heap_oop(Address(method_temp, NONZERO(java_lang_invoke_LambdaForm::vmentry_offset_in_bytes())), method_temp);
   166   __ load_heap_oop(Address(method_temp, NONZERO(java_lang_invoke_LambdaForm::vmentry_offset_in_bytes())),  method_temp);
   166   __ verify_oop(method_temp);
   167   __ verify_oop(method_temp);
   167   // the following assumes that a Method* is normally compressed in the vmtarget field:
   168   // the following assumes that a Method* is normally compressed in the vmtarget field:
   168   __ ld_ptr(Address(method_temp, NONZERO(java_lang_invoke_MemberName::vmtarget_offset_in_bytes())),     method_temp);
   169   __ ld_ptr(       Address(method_temp, NONZERO(java_lang_invoke_MemberName::vmtarget_offset_in_bytes())), method_temp);
   169 
   170 
   170   if (VerifyMethodHandles && !for_compiler_entry) {
   171   if (VerifyMethodHandles && !for_compiler_entry) {
   171     // make sure recv is already on stack
   172     // make sure recv is already on stack
   172     __ load_sized_value(Address(method_temp, Method::size_of_parameters_offset()),
   173     __ load_sized_value(Address(method_temp, Method::size_of_parameters_offset()),
   173                         temp2,
   174                         temp2,
   301                                                     vmIntrinsics::ID iid,
   302                                                     vmIntrinsics::ID iid,
   302                                                     Register receiver_reg,
   303                                                     Register receiver_reg,
   303                                                     Register member_reg,
   304                                                     Register member_reg,
   304                                                     bool for_compiler_entry) {
   305                                                     bool for_compiler_entry) {
   305   assert(is_signature_polymorphic(iid), "expected invoke iid");
   306   assert(is_signature_polymorphic(iid), "expected invoke iid");
   306   // temps used in this code are not used in *either* compiled or interpreted calling sequences
       
   307   Register temp1 = (for_compiler_entry ? G1_scratch : O1);
   307   Register temp1 = (for_compiler_entry ? G1_scratch : O1);
   308   Register temp2 = (for_compiler_entry ? G4_scratch : O4);
   308   Register temp2 = (for_compiler_entry ? G3_scratch : O2);
   309   Register temp3 = G3_scratch;
   309   Register temp3 = (for_compiler_entry ? G4_scratch : O3);
   310   Register temp4 = (for_compiler_entry ? noreg      : O2);
   310   Register temp4 = (for_compiler_entry ? noreg      : O4);
   311   if (for_compiler_entry) {
   311   if (for_compiler_entry) {
   312     assert(receiver_reg == (iid == vmIntrinsics::_linkToStatic ? noreg : O0), "only valid assignment");
   312     assert(receiver_reg == (iid == vmIntrinsics::_linkToStatic ? noreg : O0), "only valid assignment");
   313     assert_different_registers(temp1,      O0, O1, O2, O3, O4, O5);
   313     assert_different_registers(temp1, O0, O1, O2, O3, O4, O5);
   314     assert_different_registers(temp2,      O0, O1, O2, O3, O4, O5);
   314     assert_different_registers(temp2, O0, O1, O2, O3, O4, O5);
   315     assert_different_registers(temp3,      O0, O1, O2, O3, O4, O5);
   315     assert_different_registers(temp3, O0, O1, O2, O3, O4, O5);
   316     assert_different_registers(temp4,      O0, O1, O2, O3, O4, O5);
   316     assert_different_registers(temp4, O0, O1, O2, O3, O4, O5);
       
   317   } else {
       
   318     assert_different_registers(temp1, temp2, temp3, temp4, O5_savedSP);  // don't trash lastSP
   317   }
   319   }
   318   if (receiver_reg != noreg)  assert_different_registers(temp1, temp2, temp3, temp4, receiver_reg);
   320   if (receiver_reg != noreg)  assert_different_registers(temp1, temp2, temp3, temp4, receiver_reg);
   319   if (member_reg   != noreg)  assert_different_registers(temp1, temp2, temp3, temp4, member_reg);
   321   if (member_reg   != noreg)  assert_different_registers(temp1, temp2, temp3, temp4, member_reg);
   320   if (!for_compiler_entry)    assert_different_registers(temp1, temp2, temp3, temp4, O5_savedSP);  // don't trash lastSP
       
   321 
   322 
   322   if (iid == vmIntrinsics::_invokeBasic) {
   323   if (iid == vmIntrinsics::_invokeBasic) {
   323     // indirect through MH.form.vmentry.vmtarget
   324     // indirect through MH.form.vmentry.vmtarget
   324     jump_to_lambda_form(_masm, receiver_reg, G5_method, temp2, temp3, for_compiler_entry);
   325     jump_to_lambda_form(_masm, receiver_reg, G5_method, temp1, temp2, for_compiler_entry);
   325 
   326 
   326   } else {
   327   } else {
   327     // The method is a member invoker used by direct method handles.
   328     // The method is a member invoker used by direct method handles.
   328     if (VerifyMethodHandles) {
   329     if (VerifyMethodHandles) {
   329       // make sure the trailing argument really is a MemberName (caller responsibility)
   330       // make sure the trailing argument really is a MemberName (caller responsibility)
   376 
   377 
   377     // Live registers at this point:
   378     // Live registers at this point:
   378     //  member_reg - MemberName that was the trailing argument
   379     //  member_reg - MemberName that was the trailing argument
   379     //  temp1_recv_klass - klass of stacked receiver, if needed
   380     //  temp1_recv_klass - klass of stacked receiver, if needed
   380     //  O5_savedSP - interpreter linkage (if interpreted)
   381     //  O5_savedSP - interpreter linkage (if interpreted)
   381     //  O0..O7,G1,G4 - compiler arguments (if compiled)
   382     //  O0..O5 - compiler arguments (if compiled)
   382 
   383 
   383     bool method_is_live = false;
   384     Label L_incompatible_class_change_error;
   384     switch (iid) {
   385     switch (iid) {
   385     case vmIntrinsics::_linkToSpecial:
   386     case vmIntrinsics::_linkToSpecial:
   386       if (VerifyMethodHandles) {
   387       if (VerifyMethodHandles) {
   387         verify_ref_kind(_masm, JVM_REF_invokeSpecial, member_reg, temp3);
   388         verify_ref_kind(_masm, JVM_REF_invokeSpecial, member_reg, temp2);
   388       }
   389       }
   389       __ ld_ptr(member_vmtarget, G5_method);
   390       __ ld_ptr(member_vmtarget, G5_method);
   390       method_is_live = true;
       
   391       break;
   391       break;
   392 
   392 
   393     case vmIntrinsics::_linkToStatic:
   393     case vmIntrinsics::_linkToStatic:
   394       if (VerifyMethodHandles) {
   394       if (VerifyMethodHandles) {
   395         verify_ref_kind(_masm, JVM_REF_invokeStatic, member_reg, temp3);
   395         verify_ref_kind(_masm, JVM_REF_invokeStatic, member_reg, temp2);
   396       }
   396       }
   397       __ ld_ptr(member_vmtarget, G5_method);
   397       __ ld_ptr(member_vmtarget, G5_method);
   398       method_is_live = true;
       
   399       break;
   398       break;
   400 
   399 
   401     case vmIntrinsics::_linkToVirtual:
   400     case vmIntrinsics::_linkToVirtual:
   402     {
   401     {
   403       // same as TemplateTable::invokevirtual,
   402       // same as TemplateTable::invokevirtual,
   404       // minus the CP setup and profiling:
   403       // minus the CP setup and profiling:
   405 
   404 
   406       if (VerifyMethodHandles) {
   405       if (VerifyMethodHandles) {
   407         verify_ref_kind(_masm, JVM_REF_invokeVirtual, member_reg, temp3);
   406         verify_ref_kind(_masm, JVM_REF_invokeVirtual, member_reg, temp2);
   408       }
   407       }
   409 
   408 
   410       // pick out the vtable index from the MemberName, and then we can discard it:
   409       // pick out the vtable index from the MemberName, and then we can discard it:
   411       Register temp2_index = temp2;
   410       Register temp2_index = temp2;
   412       __ ld_ptr(member_vmindex, temp2_index);
   411       __ ld_ptr(member_vmindex, temp2_index);
   421       // Note:  The verifier invariants allow us to ignore MemberName.clazz and vmtarget
   420       // Note:  The verifier invariants allow us to ignore MemberName.clazz and vmtarget
   422       // at this point.  And VerifyMethodHandles has already checked clazz, if needed.
   421       // at this point.  And VerifyMethodHandles has already checked clazz, if needed.
   423 
   422 
   424       // get target Method* & entry point
   423       // get target Method* & entry point
   425       __ lookup_virtual_method(temp1_recv_klass, temp2_index, G5_method);
   424       __ lookup_virtual_method(temp1_recv_klass, temp2_index, G5_method);
   426       method_is_live = true;
       
   427       break;
   425       break;
   428     }
   426     }
   429 
   427 
   430     case vmIntrinsics::_linkToInterface:
   428     case vmIntrinsics::_linkToInterface:
   431     {
   429     {
   432       // same as TemplateTable::invokeinterface
   430       // same as TemplateTable::invokeinterface
   433       // (minus the CP setup and profiling, with different argument motion)
   431       // (minus the CP setup and profiling, with different argument motion)
   434       if (VerifyMethodHandles) {
   432       if (VerifyMethodHandles) {
   435         verify_ref_kind(_masm, JVM_REF_invokeInterface, member_reg, temp3);
   433         verify_ref_kind(_masm, JVM_REF_invokeInterface, member_reg, temp2);
   436       }
   434       }
   437 
   435 
   438       Register temp3_intf = temp3;
   436       Register temp2_intf = temp2;
   439       __ load_heap_oop(member_clazz, temp3_intf);
   437       __ load_heap_oop(member_clazz, temp2_intf);
   440       load_klass_from_Class(_masm, temp3_intf, temp2, temp4);
   438       load_klass_from_Class(_masm, temp2_intf, temp3, temp4);
   441       __ verify_klass_ptr(temp3_intf);
   439       __ verify_klass_ptr(temp2_intf);
   442 
   440 
   443       Register G5_index = G5_method;
   441       Register G5_index = G5_method;
   444       __ ld_ptr(member_vmindex, G5_index);
   442       __ ld_ptr(member_vmindex, G5_index);
   445       if (VerifyMethodHandles) {
   443       if (VerifyMethodHandles) {
   446         Label L;
   444         Label L;
   448         __ STOP("invalid vtable index for MH.invokeInterface");
   446         __ STOP("invalid vtable index for MH.invokeInterface");
   449         __ bind(L);
   447         __ bind(L);
   450       }
   448       }
   451 
   449 
   452       // given intf, index, and recv klass, dispatch to the implementation method
   450       // given intf, index, and recv klass, dispatch to the implementation method
   453       Label L_no_such_interface;
   451       __ lookup_interface_method(temp1_recv_klass, temp2_intf,
   454       Register no_sethi_temp = noreg;
       
   455       __ lookup_interface_method(temp1_recv_klass, temp3_intf,
       
   456                                  // note: next two args must be the same:
   452                                  // note: next two args must be the same:
   457                                  G5_index, G5_method,
   453                                  G5_index, G5_method,
   458                                  temp2, no_sethi_temp,
   454                                  temp3, temp4,
   459                                  L_no_such_interface);
   455                                  L_incompatible_class_change_error);
   460 
   456       break;
   461       __ verify_method_ptr(G5_method);
   457     }
   462       jump_from_method_handle(_masm, G5_method, temp2, temp3, for_compiler_entry);
   458 
   463 
   459     default:
   464       __ bind(L_no_such_interface);
   460       fatal(err_msg_res("unexpected intrinsic %d: %s", iid, vmIntrinsics::name_at(iid)));
       
   461       break;
       
   462     }
       
   463 
       
   464     // Live at this point:
       
   465     //   G5_method
       
   466     //   O5_savedSP (if interpreted)
       
   467 
       
   468     // After figuring out which concrete method to call, jump into it.
       
   469     // Note that this works in the interpreter with no data motion.
       
   470     // But the compiled version will require that rcx_recv be shifted out.
       
   471     __ verify_method_ptr(G5_method);
       
   472     jump_from_method_handle(_masm, G5_method, temp1, temp2, for_compiler_entry);
       
   473 
       
   474     if (iid == vmIntrinsics::_linkToInterface) {
       
   475       __ BIND(L_incompatible_class_change_error);
   465       AddressLiteral icce(StubRoutines::throw_IncompatibleClassChangeError_entry());
   476       AddressLiteral icce(StubRoutines::throw_IncompatibleClassChangeError_entry());
   466       __ jump_to(icce, temp3);
   477       __ jump_to(icce, temp1);
   467       __ delayed()->nop();
   478       __ delayed()->nop();
   468       break;
       
   469     }
       
   470 
       
   471     default:
       
   472       fatal(err_msg("unexpected intrinsic %d: %s", iid, vmIntrinsics::name_at(iid)));
       
   473       break;
       
   474     }
       
   475 
       
   476     if (method_is_live) {
       
   477       // live at this point:  G5_method, O5_savedSP (if interpreted)
       
   478 
       
   479       // After figuring out which concrete method to call, jump into it.
       
   480       // Note that this works in the interpreter with no data motion.
       
   481       // But the compiled version will require that rcx_recv be shifted out.
       
   482       __ verify_method_ptr(G5_method);
       
   483       jump_from_method_handle(_masm, G5_method, temp1, temp3, for_compiler_entry);
       
   484     }
   479     }
   485   }
   480   }
   486 }
   481 }
   487 
   482 
   488 #ifndef PRODUCT
   483 #ifndef PRODUCT