--- a/hotspot/src/cpu/s390/vm/c1_LIRAssembler_s390.cpp Thu Nov 10 23:26:56 2016 -0800
+++ b/hotspot/src/cpu/s390/vm/c1_LIRAssembler_s390.cpp Mon Nov 07 12:37:28 2016 +0100
@@ -1075,8 +1075,7 @@
{
if (UseCompressedOops && !wide) {
Register compressed_src = Z_R14;
- __ z_lgr(compressed_src, from->as_register());
- __ encode_heap_oop(compressed_src);
+ __ oop_encoder(compressed_src, from->as_register(), true, (disp_reg != Z_R1) ? Z_R1 : Z_R0, -1, true);
offset = code_offset();
if (short_disp) {
__ z_st(compressed_src, disp_value, disp_reg, dest);
--- a/hotspot/src/cpu/s390/vm/frame_s390.cpp Thu Nov 10 23:26:56 2016 -0800
+++ b/hotspot/src/cpu/s390/vm/frame_s390.cpp Mon Nov 07 12:37:28 2016 +0100
@@ -156,7 +156,7 @@
}
own_abi()->return_pc = (uint64_t)pc;
_cb = CodeCache::find_blob(pc);
- address original_pc = nmethod::get_deopt_original_pc(this);
+ address original_pc = CompiledMethod::get_deopt_original_pc(this);
if (original_pc != NULL) {
assert(original_pc == _pc, "expected original to be stored before patching");
_deopt_state = is_deoptimized;
--- a/hotspot/src/cpu/s390/vm/frame_s390.inline.hpp Thu Nov 10 23:26:56 2016 -0800
+++ b/hotspot/src/cpu/s390/vm/frame_s390.inline.hpp Mon Nov 07 12:37:28 2016 +0100
@@ -39,7 +39,7 @@
_fp = (intptr_t *) own_abi()->callers_sp;
- address original_pc = nmethod::get_deopt_original_pc(this);
+ address original_pc = CompiledMethod::get_deopt_original_pc(this);
if (original_pc != NULL) {
_pc = original_pc;
_deopt_state = is_deoptimized;
--- a/hotspot/src/cpu/s390/vm/globals_s390.hpp Thu Nov 10 23:26:56 2016 -0800
+++ b/hotspot/src/cpu/s390/vm/globals_s390.hpp Mon Nov 07 12:37:28 2016 +0100
@@ -92,9 +92,6 @@
product(bool, ReoptimizeCallSequences, true, \
"Reoptimize code-sequences of calls at runtime.") \
\
- product(bool, UseCountLeadingZerosInstruction, true, \
- "Use count leading zeros instruction.") \
- \
product(bool, UseByteReverseInstruction, true, \
"Use byte reverse instruction.") \
\
--- a/hotspot/src/cpu/s390/vm/macroAssembler_s390.hpp Thu Nov 10 23:26:56 2016 -0800
+++ b/hotspot/src/cpu/s390/vm/macroAssembler_s390.hpp Mon Nov 07 12:37:28 2016 +0100
@@ -574,6 +574,7 @@
static int call_far_patchable_ret_addr_offset() { return call_far_patchable_size(); }
static bool call_far_patchable_requires_alignment_nop(address pc) {
+ if (!os::is_MP()) return false;
int size = call_far_patchable_size();
return ((intptr_t)(pc + size) & 0x03L) != 0;
}
--- a/hotspot/src/cpu/s390/vm/nativeInst_s390.cpp Thu Nov 10 23:26:56 2016 -0800
+++ b/hotspot/src/cpu/s390/vm/nativeInst_s390.cpp Mon Nov 07 12:37:28 2016 +0100
@@ -256,11 +256,7 @@
address NativeFarCall::destination() {
assert(MacroAssembler::is_call_far_patchable_at((address)this), "unexpected call type");
address ctable = NULL;
- if (MacroAssembler::call_far_patchable_requires_alignment_nop((address)this)) {
- return MacroAssembler::get_dest_of_call_far_patchable_at(((address)this)+MacroAssembler::nop_size(), ctable);
- } else {
- return MacroAssembler::get_dest_of_call_far_patchable_at((address)this, ctable);
- }
+ return MacroAssembler::get_dest_of_call_far_patchable_at((address)this, ctable);
}
@@ -610,20 +606,20 @@
unsigned long inst1;
Assembler::get_instruction(l2, &inst1);
- if (!Assembler::is_z_lb(inst1) &&
- !Assembler::is_z_llgh(inst1) &&
- !Assembler::is_z_lh(inst1) &&
- !Assembler::is_z_l(inst1) &&
- !Assembler::is_z_llgf(inst1) &&
- !Assembler::is_z_lg(inst1) &&
- !Assembler::is_z_le(inst1) &&
- !Assembler::is_z_ld(inst1) &&
- !Assembler::is_z_stc(inst1) &&
- !Assembler::is_z_sth(inst1) &&
- !Assembler::is_z_st(inst1) &&
- !(Assembler::is_z_lgr(inst1) && UseCompressedOops) &&
- !Assembler::is_z_stg(inst1) &&
- !Assembler::is_z_ste(inst1) &&
+ if (!Assembler::is_z_lb(inst1) &&
+ !Assembler::is_z_llgh(inst1) &&
+ !Assembler::is_z_lh(inst1) &&
+ !Assembler::is_z_l(inst1) &&
+ !Assembler::is_z_llgf(inst1) &&
+ !Assembler::is_z_lg(inst1) &&
+ !Assembler::is_z_le(inst1) &&
+ !Assembler::is_z_ld(inst1) &&
+ !Assembler::is_z_stc(inst1) &&
+ !Assembler::is_z_sth(inst1) &&
+ !Assembler::is_z_st(inst1) &&
+ !UseCompressedOops &&
+ !Assembler::is_z_stg(inst1) &&
+ !Assembler::is_z_ste(inst1) &&
!Assembler::is_z_std(inst1)) {
tty->cr();
tty->print_cr("NativeMovRegMem::verify(): verifying addr " PTR_FORMAT
--- a/hotspot/src/cpu/s390/vm/relocInfo_s390.cpp Thu Nov 10 23:26:56 2016 -0800
+++ b/hotspot/src/cpu/s390/vm/relocInfo_s390.cpp Mon Nov 07 12:37:28 2016 +0100
@@ -102,11 +102,8 @@
if (orig_addr == NULL) {
call = nativeFarCall_at(inst_addr);
} else {
- if (MacroAssembler::is_call_far_patchable_pcrelative_at(inst_addr)) {
- call = nativeFarCall_at(orig_addr);
- } else {
- call = nativeFarCall_at(orig_addr); // must access location (in CP) where destination is stored in unmoved code, because load from CP is pc-relative
- }
+ // must access location (in CP) where destination is stored in unmoved code, because load from CP is pc-relative
+ call = nativeFarCall_at(orig_addr);
}
return call->destination();
}
--- a/hotspot/src/cpu/s390/vm/s390.ad Thu Nov 10 23:26:56 2016 -0800
+++ b/hotspot/src/cpu/s390/vm/s390.ad Mon Nov 07 12:37:28 2016 +0100
@@ -1489,8 +1489,8 @@
case Op_CountLeadingZerosL:
case Op_CountTrailingZerosI:
case Op_CountTrailingZerosL:
- // Implementation requires FLOGR instruction.
- return UseCountLeadingZerosInstruction;
+ // Implementation requires FLOGR instruction, which is available since z9.
+ return true;
case Op_ReverseBytesI:
case Op_ReverseBytesL:
@@ -9897,7 +9897,6 @@
// String IndexOfChar
instruct indexOfChar_U(iRegP haystack, iRegI haycnt, iRegI ch, iRegI result, roddRegL oddReg, revenRegL evenReg, flagsReg cr) %{
- predicate(CompactStrings);
match(Set result (StrIndexOfChar (Binary haystack haycnt) ch));
effect(TEMP_DEF result, TEMP evenReg, TEMP oddReg, KILL cr); // R0, R1 are killed, too.
ins_cost(200);
@@ -10590,7 +10589,6 @@
instruct countLeadingZerosI(revenRegI dst, iRegI src, roddRegI tmp, flagsReg cr) %{
match(Set dst (CountLeadingZerosI src));
effect(KILL tmp, KILL cr);
- predicate(UseCountLeadingZerosInstruction); // See Matcher::match_rule_supported
ins_cost(3 * DEFAULT_COST);
size(14);
format %{ "SLLG $dst,$src,32\t# no need to always count 32 zeroes first\n\t"
@@ -10629,7 +10627,6 @@
instruct countLeadingZerosL(revenRegI dst, iRegL src, roddRegI tmp, flagsReg cr) %{
match(Set dst (CountLeadingZerosL src));
effect(KILL tmp, KILL cr);
- predicate(UseCountLeadingZerosInstruction); // See Matcher::match_rule_supported
ins_cost(DEFAULT_COST);
size(4);
format %{ "FLOGR $dst,$src \t# count leading zeros (long)\n\t" %}
@@ -10655,7 +10652,6 @@
instruct countTrailingZerosI(revenRegI dst, iRegI src, roddRegI tmp, flagsReg cr) %{
match(Set dst (CountTrailingZerosI src));
effect(TEMP_DEF dst, TEMP tmp, KILL cr);
- predicate(UseCountLeadingZerosInstruction); // See Matcher::match_rule_supported
ins_cost(8 * DEFAULT_COST);
// TODO: s390 port size(FIXED_SIZE); // Emitted code depends on PreferLAoverADD being on/off.
format %{ "LLGFR $dst,$src \t# clear upper 32 bits (we are dealing with int)\n\t"
@@ -10709,7 +10705,6 @@
instruct countTrailingZerosL(revenRegI dst, iRegL src, roddRegL tmp, flagsReg cr) %{
match(Set dst (CountTrailingZerosL src));
effect(TEMP_DEF dst, KILL tmp, KILL cr);
- predicate(UseCountLeadingZerosInstruction); // See Matcher::match_rule_supported
ins_cost(8 * DEFAULT_COST);
// TODO: s390 port size(FIXED_SIZE); // Emitted code depends on PreferLAoverADD being on/off.
format %{ "LCGR $dst,$src \t# preserve src\n\t"
--- a/hotspot/src/cpu/s390/vm/templateTable_s390.cpp Thu Nov 10 23:26:56 2016 -0800
+++ b/hotspot/src/cpu/s390/vm/templateTable_s390.cpp Mon Nov 07 12:37:28 2016 +0100
@@ -3831,17 +3831,17 @@
// Call runtime.
__ z_llgc(Z_ARG2, at_bcp(1)); // type
- // size in Z_tos
+ __ z_lgfr(Z_ARG3, Z_tos); // size
call_VM(Z_RET,
CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
- Z_ARG2, Z_tos);
+ Z_ARG2, Z_ARG3);
}
void TemplateTable::anewarray() {
transition(itos, atos);
__ get_2_byte_integer_at_bcp(Z_ARG3, 1, InterpreterMacroAssembler::Unsigned);
__ get_constant_pool(Z_ARG2);
- __ z_llgfr(Z_ARG4, Z_tos);
+ __ z_lgfr(Z_ARG4, Z_tos);
call_VM(Z_tos, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
Z_ARG2, Z_ARG3, Z_ARG4);
}
--- a/hotspot/src/cpu/s390/vm/vm_version_s390.cpp Thu Nov 10 23:26:56 2016 -0800
+++ b/hotspot/src/cpu/s390/vm/vm_version_s390.cpp Mon Nov 07 12:37:28 2016 +0100
@@ -271,6 +271,31 @@
tty->print_cr(" oldest detected generation is %s", _features_string);
_features_string = "z/Architecture (ambiguous detection)";
}
+
+ if (has_Crypto_AES()) {
+ char buf[256];
+ assert(strlen(_features_string) + 4 + 3*4 + 1 < sizeof(buf), "increase buffer size");
+ jio_snprintf(buf, sizeof(buf), "%s aes%s%s%s", // String 'aes' must be surrounded by spaces so that jtreg tests recognize it.
+ _features_string,
+ has_Crypto_AES128() ? " 128" : "",
+ has_Crypto_AES192() ? " 192" : "",
+ has_Crypto_AES256() ? " 256" : "");
+ _features_string = os::strdup(buf);
+ }
+
+ if (has_Crypto_SHA()) {
+ char buf[256];
+ assert(strlen(_features_string) + 4 + 2 + 2*4 + 6 + 1 < sizeof(buf), "increase buffer size");
+ // String 'sha1' etc must be surrounded by spaces so that jtreg tests recognize it.
+ jio_snprintf(buf, sizeof(buf), "%s %s%s%s%s",
+ _features_string,
+ has_Crypto_SHA1() ? " sha1" : "",
+ has_Crypto_SHA256() ? " sha256" : "",
+ has_Crypto_SHA512() ? " sha512" : "",
+ has_Crypto_GHASH() ? " ghash" : "");
+ if (has_Crypto_AES()) { os::free((void *)_features_string); }
+ _features_string = os::strdup(buf);
+ }
}
// featureBuffer - bit array indicating availability of various features
@@ -369,7 +394,7 @@
if (has_Crypto()) {
tty->cr();
- tty->print_cr("detailled availability of %s capabilities:", "CryptoFacility");
+ tty->print_cr("detailed availability of %s capabilities:", "CryptoFacility");
if (test_feature_bit(&_cipher_features[0], -1, 2*Cipher::_featureBits)) {
tty->cr();
tty->print_cr(" available: %s", "Message Cipher Functions");
@@ -479,7 +504,6 @@
}
}
-
void VM_Version::set_features_z900(bool reset) {
reset_features(reset);
--- a/hotspot/src/os_cpu/linux_s390/vm/os_linux_s390.cpp Thu Nov 10 23:26:56 2016 -0800
+++ b/hotspot/src/os_cpu/linux_s390/vm/os_linux_s390.cpp Mon Nov 07 12:37:28 2016 +0100
@@ -171,6 +171,8 @@
}
frame os::current_frame() {
+ // Expected to return the stack pointer of this method.
+ // But if inlined, returns the stack pointer of our caller!
intptr_t* csp = (intptr_t*) *((intptr_t*) os::current_stack_pointer());
assert (csp != NULL, "sp should not be NULL");
// Pass a dummy pc. This way we don't have to load it from the
@@ -184,8 +186,13 @@
assert(senderFrame.pc() != NULL, "Sender pc should not be NULL");
// Return sender of sender of current topframe which hopefully
// both have pc != NULL.
+#ifdef _NMT_NOINLINE_ // Is set in slowdebug builds.
+ // Current_stack_pointer is not inlined, we must pop one more frame.
frame tmp = os::get_sender_for_C_frame(&topframe);
return os::get_sender_for_C_frame(&tmp);
+#else
+ return os::get_sender_for_C_frame(&topframe);
+#endif
}
}
@@ -374,7 +381,7 @@
// BugId 4454115: A read from a MappedByteBuffer can fault here if the
// underlying file has been truncated. Do not crash the VM in such a case.
CodeBlob* cb = CodeCache::find_blob_unsafe(pc);
- nmethod* nm = (cb != NULL && cb->is_nmethod()) ? (nmethod*)cb : NULL;
+ CompiledMethod* nm = (cb != NULL) ? cb->as_compiled_method_or_null() : NULL;
if (nm != NULL && nm->has_unsafe_access()) {
// We don't really need a stub here! Just set the pending exeption and
// continue at the next instruction after the faulting read. Returning