2091 entry_oop_arraycopy, |
2091 entry_oop_arraycopy, |
2092 entry_jlong_arraycopy, |
2092 entry_jlong_arraycopy, |
2093 entry_checkcast_arraycopy); |
2093 entry_checkcast_arraycopy); |
2094 } |
2094 } |
2095 |
2095 |
2096 void generate_math_stubs() { |
|
2097 { |
|
2098 StubCodeMark mark(this, "StubRoutines", "log10"); |
|
2099 StubRoutines::_intrinsic_log10 = (double (*)(double)) __ pc(); |
|
2100 |
|
2101 __ fld_d(Address(rsp, 4)); |
|
2102 __ flog10(); |
|
2103 __ ret(0); |
|
2104 } |
|
2105 { |
|
2106 StubCodeMark mark(this, "StubRoutines", "tan"); |
|
2107 StubRoutines::_intrinsic_tan = (double (*)(double)) __ pc(); |
|
2108 |
|
2109 __ fld_d(Address(rsp, 4)); |
|
2110 __ trigfunc('t'); |
|
2111 __ ret(0); |
|
2112 } |
|
2113 } |
|
2114 |
|
2115 // AES intrinsic stubs |
2096 // AES intrinsic stubs |
2116 enum {AESBlockSize = 16}; |
2097 enum {AESBlockSize = 16}; |
2117 |
2098 |
2118 address generate_key_shuffle_mask() { |
2099 address generate_key_shuffle_mask() { |
2119 __ align(16); |
2100 __ align(16); |
3532 |
3513 |
3533 return start; |
3514 return start; |
3534 |
3515 |
3535 } |
3516 } |
3536 |
3517 |
|
3518 address generate_libmLog10() { |
|
3519 address start = __ pc(); |
|
3520 |
|
3521 const XMMRegister x0 = xmm0; |
|
3522 const XMMRegister x1 = xmm1; |
|
3523 const XMMRegister x2 = xmm2; |
|
3524 const XMMRegister x3 = xmm3; |
|
3525 |
|
3526 const XMMRegister x4 = xmm4; |
|
3527 const XMMRegister x5 = xmm5; |
|
3528 const XMMRegister x6 = xmm6; |
|
3529 const XMMRegister x7 = xmm7; |
|
3530 |
|
3531 const Register tmp = rbx; |
|
3532 |
|
3533 BLOCK_COMMENT("Entry:"); |
|
3534 __ enter(); // required for proper stackwalking of RuntimeStub frame |
|
3535 __ fast_log10(x0, x1, x2, x3, x4, x5, x6, x7, rax, rcx, rdx, tmp); |
|
3536 __ leave(); // required for proper stackwalking of RuntimeStub frame |
|
3537 __ ret(0); |
|
3538 |
|
3539 return start; |
|
3540 |
|
3541 } |
|
3542 |
3537 address generate_libmPow() { |
3543 address generate_libmPow() { |
3538 address start = __ pc(); |
3544 address start = __ pc(); |
3539 |
3545 |
3540 const XMMRegister x0 = xmm0; |
3546 const XMMRegister x0 = xmm0; |
3541 const XMMRegister x1 = xmm1; |
3547 const XMMRegister x1 = xmm1; |
3621 const Register tmp = rbx; |
3627 const Register tmp = rbx; |
3622 |
3628 |
3623 BLOCK_COMMENT("Entry:"); |
3629 BLOCK_COMMENT("Entry:"); |
3624 __ enter(); // required for proper stackwalking of RuntimeStub frame |
3630 __ enter(); // required for proper stackwalking of RuntimeStub frame |
3625 __ fast_cos(x0, x1, x2, x3, x4, x5, x6, x7, rax, rcx, rdx, tmp); |
3631 __ fast_cos(x0, x1, x2, x3, x4, x5, x6, x7, rax, rcx, rdx, tmp); |
|
3632 __ leave(); // required for proper stackwalking of RuntimeStub frame |
|
3633 __ ret(0); |
|
3634 |
|
3635 return start; |
|
3636 |
|
3637 } |
|
3638 |
|
3639 address generate_libm_tan_cot_huge() { |
|
3640 address start = __ pc(); |
|
3641 |
|
3642 const XMMRegister x0 = xmm0; |
|
3643 const XMMRegister x1 = xmm1; |
|
3644 |
|
3645 BLOCK_COMMENT("Entry:"); |
|
3646 __ libm_tancot_huge(x0, x1, rax, rcx, rdx, rbx, rsi, rdi, rbp, rsp); |
|
3647 |
|
3648 return start; |
|
3649 |
|
3650 } |
|
3651 |
|
3652 address generate_libmTan() { |
|
3653 address start = __ pc(); |
|
3654 |
|
3655 const XMMRegister x0 = xmm0; |
|
3656 const XMMRegister x1 = xmm1; |
|
3657 const XMMRegister x2 = xmm2; |
|
3658 const XMMRegister x3 = xmm3; |
|
3659 |
|
3660 const XMMRegister x4 = xmm4; |
|
3661 const XMMRegister x5 = xmm5; |
|
3662 const XMMRegister x6 = xmm6; |
|
3663 const XMMRegister x7 = xmm7; |
|
3664 |
|
3665 const Register tmp = rbx; |
|
3666 |
|
3667 BLOCK_COMMENT("Entry:"); |
|
3668 __ enter(); // required for proper stackwalking of RuntimeStub frame |
|
3669 __ fast_tan(x0, x1, x2, x3, x4, x5, x6, x7, rax, rcx, rdx, tmp); |
3626 __ leave(); // required for proper stackwalking of RuntimeStub frame |
3670 __ leave(); // required for proper stackwalking of RuntimeStub frame |
3627 __ ret(0); |
3671 __ ret(0); |
3628 |
3672 |
3629 return start; |
3673 return start; |
3630 |
3674 |
3851 bool supports_clmul = VM_Version::supports_clmul(); |
3895 bool supports_clmul = VM_Version::supports_clmul(); |
3852 StubRoutines::x86::generate_CRC32C_table(supports_clmul); |
3896 StubRoutines::x86::generate_CRC32C_table(supports_clmul); |
3853 StubRoutines::_crc32c_table_addr = (address)StubRoutines::x86::_crc32c_table; |
3897 StubRoutines::_crc32c_table_addr = (address)StubRoutines::x86::_crc32c_table; |
3854 StubRoutines::_updateBytesCRC32C = generate_updateBytesCRC32C(supports_clmul); |
3898 StubRoutines::_updateBytesCRC32C = generate_updateBytesCRC32C(supports_clmul); |
3855 } |
3899 } |
3856 if (VM_Version::supports_sse2()) { |
3900 if (VM_Version::supports_sse2() && UseLibmIntrinsic) { |
|
3901 StubRoutines::x86::_L_2il0floatpacket_0_adr = (address)StubRoutines::x86::_L_2il0floatpacket_0; |
|
3902 StubRoutines::x86::_Pi4Inv_adr = (address)StubRoutines::x86::_Pi4Inv; |
|
3903 StubRoutines::x86::_Pi4x3_adr = (address)StubRoutines::x86::_Pi4x3; |
|
3904 StubRoutines::x86::_Pi4x4_adr = (address)StubRoutines::x86::_Pi4x4; |
|
3905 StubRoutines::x86::_ones_adr = (address)StubRoutines::x86::_ones; |
3857 StubRoutines::_dexp = generate_libmExp(); |
3906 StubRoutines::_dexp = generate_libmExp(); |
3858 StubRoutines::_dlog = generate_libmLog(); |
3907 StubRoutines::_dlog = generate_libmLog(); |
|
3908 StubRoutines::_dlog10 = generate_libmLog10(); |
3859 StubRoutines::_dpow = generate_libmPow(); |
3909 StubRoutines::_dpow = generate_libmPow(); |
3860 if (UseLibmSinIntrinsic || UseLibmCosIntrinsic) { |
3910 StubRoutines::_dlibm_reduce_pi04l = generate_libm_reduce_pi04l(); |
3861 StubRoutines::_dlibm_reduce_pi04l = generate_libm_reduce_pi04l(); |
3911 StubRoutines::_dlibm_sin_cos_huge = generate_libm_sin_cos_huge(); |
3862 StubRoutines::_dlibm_sin_cos_huge = generate_libm_sin_cos_huge(); |
3912 StubRoutines::_dsin = generate_libmSin(); |
3863 } |
3913 StubRoutines::_dcos = generate_libmCos(); |
3864 if (UseLibmSinIntrinsic) { |
3914 StubRoutines::_dlibm_tan_cot_huge = generate_libm_tan_cot_huge(); |
3865 StubRoutines::_dsin = generate_libmSin(); |
3915 StubRoutines::_dtan = generate_libmTan(); |
3866 } |
3916 } |
3867 if (UseLibmCosIntrinsic) { |
3917 } |
3868 StubRoutines::_dcos = generate_libmCos(); |
|
3869 } |
|
3870 } |
|
3871 } |
|
3872 |
|
3873 |
3918 |
3874 void generate_all() { |
3919 void generate_all() { |
3875 // Generates all stubs and initializes the entry points |
3920 // Generates all stubs and initializes the entry points |
3876 |
3921 |
3877 // These entry points require SharedInfo::stack0 to be set up in non-core builds |
3922 // These entry points require SharedInfo::stack0 to be set up in non-core builds |
3886 // support for verify_oop (must happen after universe_init) |
3931 // support for verify_oop (must happen after universe_init) |
3887 StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop(); |
3932 StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop(); |
3888 |
3933 |
3889 // arraycopy stubs used by compilers |
3934 // arraycopy stubs used by compilers |
3890 generate_arraycopy_stubs(); |
3935 generate_arraycopy_stubs(); |
3891 |
|
3892 generate_math_stubs(); |
|
3893 |
3936 |
3894 // don't bother generating these AES intrinsic stubs unless global flag is set |
3937 // don't bother generating these AES intrinsic stubs unless global flag is set |
3895 if (UseAESIntrinsics) { |
3938 if (UseAESIntrinsics) { |
3896 StubRoutines::x86::_key_shuffle_mask_addr = generate_key_shuffle_mask(); // might be needed by the others |
3939 StubRoutines::x86::_key_shuffle_mask_addr = generate_key_shuffle_mask(); // might be needed by the others |
3897 |
3940 |