8201447: C1 does backedge profiling incorrectly
Summary: Increment backedge counter only for backward arms of conditional branches.
Reviewed-by: never, kvn
--- a/src/hotspot/cpu/aarch64/c1_LIRGenerator_aarch64.cpp Tue May 15 14:49:10 2018 -0700
+++ b/src/hotspot/cpu/aarch64/c1_LIRGenerator_aarch64.cpp Wed May 16 15:19:28 2018 -0700
@@ -1284,17 +1284,19 @@
yin->load_item();
}
- // add safepoint before generating condition code so it can be recomputed
- if (x->is_safepoint()) {
- // increment backedge counter if needed
- increment_backedge_counter(state_for(x, x->state_before()), x->profiled_bci());
- __ safepoint(LIR_OprFact::illegalOpr, state_for(x, x->state_before()));
- }
set_no_result(x);
LIR_Opr left = xin->result();
LIR_Opr right = yin->result();
+ // add safepoint before generating condition code so it can be recomputed
+ if (x->is_safepoint()) {
+ // increment backedge counter if needed
+ increment_backedge_counter_conditionally(lir_cond(cond), left, right, state_for(x, x->state_before()),
+ x->tsux()->bci(), x->fsux()->bci(), x->profiled_bci());
+ __ safepoint(LIR_OprFact::illegalOpr, state_for(x, x->state_before()));
+ }
+
__ cmp(lir_cond(cond), left, right);
// Generate branch profiling. Profiling code doesn't kill flags.
profile_branch(x, cond);
--- a/src/hotspot/cpu/arm/c1_LIRGenerator_arm.cpp Tue May 15 14:49:10 2018 -0700
+++ b/src/hotspot/cpu/arm/c1_LIRGenerator_arm.cpp Wed May 16 15:19:28 2018 -0700
@@ -1522,7 +1522,8 @@
// add safepoint before generating condition code so it can be recomputed
if (x->is_safepoint()) {
- increment_backedge_counter(state_for(x, x->state_before()), x->profiled_bci());
+ increment_backedge_counter_conditionally(lir_cond(cond), left, right, state_for(x, x->state_before()),
+ x->tsux()->bci(), x->fsux()->bci(), x->profiled_bci());
__ safepoint(LIR_OprFact::illegalOpr, state_for(x, x->state_before()));
}
--- a/src/hotspot/cpu/ppc/c1_LIRGenerator_ppc.cpp Tue May 15 14:49:10 2018 -0700
+++ b/src/hotspot/cpu/ppc/c1_LIRGenerator_ppc.cpp Wed May 16 15:19:28 2018 -0700
@@ -1157,7 +1157,8 @@
// Add safepoint before generating condition code so it can be recomputed.
if (x->is_safepoint()) {
// Increment backedge counter if needed.
- increment_backedge_counter(state_for(x, x->state_before()), x->profiled_bci());
+ increment_backedge_counter_conditionally(lir_cond(cond), left, right, state_for(x, x->state_before()),
+ x->tsux()->bci(), x->fsux()->bci(), x->profiled_bci());
__ safepoint(safepoint_poll_register(), state_for(x, x->state_before()));
}
--- a/src/hotspot/cpu/s390/c1_LIRGenerator_s390.cpp Tue May 15 14:49:10 2018 -0700
+++ b/src/hotspot/cpu/s390/c1_LIRGenerator_s390.cpp Wed May 16 15:19:28 2018 -0700
@@ -963,17 +963,20 @@
yin->dont_load_item();
}
+ LIR_Opr left = xin->result();
+ LIR_Opr right = yin->result();
+
+ set_no_result(x);
+
// Add safepoint before generating condition code so it can be recomputed.
if (x->is_safepoint()) {
// Increment backedge counter if needed.
- increment_backedge_counter(state_for (x, x->state_before()), x->profiled_bci());
+ increment_backedge_counter_conditionally(lir_cond(cond), left, right, state_for(x, x->state_before()),
+ x->tsux()->bci(), x->fsux()->bci(), x->profiled_bci());
// Use safepoint_poll_register() instead of LIR_OprFact::illegalOpr.
__ safepoint(safepoint_poll_register(), state_for (x, x->state_before()));
}
- set_no_result(x);
- LIR_Opr left = xin->result();
- LIR_Opr right = yin->result();
__ cmp(lir_cond(cond), left, right);
// Generate branch profiling. Profiling code doesn't kill flags.
profile_branch(x, cond);
--- a/src/hotspot/cpu/sparc/c1_LIRGenerator_sparc.cpp Tue May 15 14:49:10 2018 -0700
+++ b/src/hotspot/cpu/sparc/c1_LIRGenerator_sparc.cpp Wed May 16 15:19:28 2018 -0700
@@ -1219,7 +1219,8 @@
// add safepoint before generating condition code so it can be recomputed
if (x->is_safepoint()) {
// increment backedge counter if needed
- increment_backedge_counter(state_for(x, x->state_before()), x->profiled_bci());
+ increment_backedge_counter_conditionally(lir_cond(cond), left, right, state_for(x, x->state_before()),
+ x->tsux()->bci(), x->fsux()->bci(), x->profiled_bci());
__ safepoint(safepoint_poll_register(), state_for(x, x->state_before()));
}
--- a/src/hotspot/cpu/x86/c1_LIRGenerator_x86.cpp Tue May 15 14:49:10 2018 -0700
+++ b/src/hotspot/cpu/x86/c1_LIRGenerator_x86.cpp Wed May 16 15:19:28 2018 -0700
@@ -1402,16 +1402,19 @@
yin->dont_load_item();
}
+ LIR_Opr left = xin->result();
+ LIR_Opr right = yin->result();
+
+ set_no_result(x);
+
// add safepoint before generating condition code so it can be recomputed
if (x->is_safepoint()) {
// increment backedge counter if needed
- increment_backedge_counter(state_for(x, x->state_before()), x->profiled_bci());
+ increment_backedge_counter_conditionally(lir_cond(cond), left, right, state_for(x, x->state_before()),
+ x->tsux()->bci(), x->fsux()->bci(), x->profiled_bci());
__ safepoint(safepoint_poll_register(), state_for(x, x->state_before()));
}
- set_no_result(x);
- LIR_Opr left = xin->result();
- LIR_Opr right = yin->result();
__ cmp(lir_cond(cond), left, right);
// Generate branch profiling. Profiling code doesn't kill flags.
profile_branch(x, cond);
--- a/src/hotspot/share/c1/c1_Canonicalizer.cpp Tue May 15 14:49:10 2018 -0700
+++ b/src/hotspot/share/c1/c1_Canonicalizer.cpp Wed May 16 15:19:28 2018 -0700
@@ -779,7 +779,7 @@
if (cmp->x() == cmp->y()) {
do_If(canon);
} else {
- if (compilation()->profile_branches()) {
+ if (compilation()->profile_branches() || compilation()->count_backedges()) {
// TODO: If profiling, leave floating point comparisons unoptimized.
// We currently do not support profiling of the unordered case.
switch(cmp->op()) {
--- a/src/hotspot/share/c1/c1_LIRGenerator.cpp Tue May 15 14:49:10 2018 -0700
+++ b/src/hotspot/share/c1/c1_LIRGenerator.cpp Wed May 16 15:19:28 2018 -0700
@@ -3247,11 +3247,26 @@
if (_method->has_option_value("CompileThresholdScaling", scale)) {
freq_log = Arguments::scaled_freq_log(freq_log, scale);
}
- increment_event_counter_impl(info, x->inlinee(), right_n_bits(freq_log), InvocationEntryBci, false, true);
+ increment_event_counter_impl(info, x->inlinee(), LIR_OprFact::intConst(InvocationCounter::count_increment), right_n_bits(freq_log), InvocationEntryBci, false, true);
}
}
-void LIRGenerator::increment_event_counter(CodeEmitInfo* info, int bci, bool backedge) {
+void LIRGenerator::increment_backedge_counter_conditionally(LIR_Condition cond, LIR_Opr left, LIR_Opr right, CodeEmitInfo* info, int left_bci, int right_bci, int bci) {
+ if (compilation()->count_backedges()) {
+ __ cmp(cond, left, right);
+ LIR_Opr step = new_register(T_INT);
+ LIR_Opr plus_one = LIR_OprFact::intConst(InvocationCounter::count_increment);
+ LIR_Opr zero = LIR_OprFact::intConst(0);
+ __ cmove(cond,
+ (left_bci < bci) ? plus_one : zero,
+ (right_bci < bci) ? plus_one : zero,
+ step, left->type());
+ increment_backedge_counter(info, step, bci);
+ }
+}
+
+
+void LIRGenerator::increment_event_counter(CodeEmitInfo* info, LIR_Opr step, int bci, bool backedge) {
int freq_log = 0;
int level = compilation()->env()->comp_level();
if (level == CompLevel_limited_profile) {
@@ -3266,7 +3281,7 @@
if (_method->has_option_value("CompileThresholdScaling", scale)) {
freq_log = Arguments::scaled_freq_log(freq_log, scale);
}
- increment_event_counter_impl(info, info->scope()->method(), right_n_bits(freq_log), bci, backedge, true);
+ increment_event_counter_impl(info, info->scope()->method(), step, right_n_bits(freq_log), bci, backedge, true);
}
void LIRGenerator::decrement_age(CodeEmitInfo* info) {
@@ -3291,7 +3306,7 @@
void LIRGenerator::increment_event_counter_impl(CodeEmitInfo* info,
- ciMethod *method, int frequency,
+ ciMethod *method, LIR_Opr step, int frequency,
int bci, bool backedge, bool notify) {
assert(frequency == 0 || is_power_of_2(frequency + 1), "Frequency must be x^2 - 1 or 0");
int level = _compilation->env()->comp_level();
@@ -3322,7 +3337,7 @@
LIR_Address* counter = new LIR_Address(counter_holder, offset, T_INT);
LIR_Opr result = new_register(T_INT);
__ load(counter, result);
- __ add(result, LIR_OprFact::intConst(InvocationCounter::count_increment), result);
+ __ add(result, step, result);
__ store(result, counter);
if (notify && (!backedge || UseOnStackReplacement)) {
LIR_Opr meth = LIR_OprFact::metadataConst(method->constant_encoding());
@@ -3330,9 +3345,19 @@
CodeStub* overflow = new CounterOverflowStub(info, bci, meth);
int freq = frequency << InvocationCounter::count_shift;
if (freq == 0) {
- __ branch(lir_cond_always, T_ILLEGAL, overflow);
+ if (!step->is_constant()) {
+ __ cmp(lir_cond_notEqual, step, LIR_OprFact::intConst(0));
+ __ branch(lir_cond_notEqual, T_ILLEGAL, overflow);
+ } else {
+ __ branch(lir_cond_always, T_ILLEGAL, overflow);
+ }
} else {
LIR_Opr mask = load_immediate(freq, T_INT);
+ if (!step->is_constant()) {
+ // If step is 0, make sure the overflow check below always fails
+ __ cmp(lir_cond_notEqual, step, LIR_OprFact::intConst(0));
+ __ cmove(lir_cond_notEqual, result, LIR_OprFact::intConst(InvocationCounter::count_increment), result, T_INT);
+ }
__ logical_and(result, mask, result);
__ cmp(lir_cond_equal, result, LIR_OprFact::intConst(0));
__ branch(lir_cond_equal, T_INT, overflow);
--- a/src/hotspot/share/c1/c1_LIRGenerator.hpp Tue May 15 14:49:10 2018 -0700
+++ b/src/hotspot/share/c1/c1_LIRGenerator.hpp Wed May 16 15:19:28 2018 -0700
@@ -391,17 +391,23 @@
void profile_branch(If* if_instr, If::Condition cond);
void increment_event_counter_impl(CodeEmitInfo* info,
- ciMethod *method, int frequency,
+ ciMethod *method, LIR_Opr step, int frequency,
int bci, bool backedge, bool notify);
- void increment_event_counter(CodeEmitInfo* info, int bci, bool backedge);
+ void increment_event_counter(CodeEmitInfo* info, LIR_Opr step, int bci, bool backedge);
void increment_invocation_counter(CodeEmitInfo *info) {
if (compilation()->count_invocations()) {
- increment_event_counter(info, InvocationEntryBci, false);
+ increment_event_counter(info, LIR_OprFact::intConst(InvocationCounter::count_increment), InvocationEntryBci, false);
}
}
void increment_backedge_counter(CodeEmitInfo* info, int bci) {
if (compilation()->count_backedges()) {
- increment_event_counter(info, bci, true);
+ increment_event_counter(info, LIR_OprFact::intConst(InvocationCounter::count_increment), bci, true);
+ }
+ }
+ void increment_backedge_counter_conditionally(LIR_Condition cond, LIR_Opr left, LIR_Opr right, CodeEmitInfo* info, int left_bci, int right_bci, int bci);
+ void increment_backedge_counter(CodeEmitInfo* info, LIR_Opr step, int bci) {
+ if (compilation()->count_backedges()) {
+ increment_event_counter(info, step, bci, true);
}
}
void decrement_age(CodeEmitInfo* info);