--- a/hotspot/src/share/vm/opto/graphKit.cpp Sat Mar 29 14:54:48 2014 +0400
+++ b/hotspot/src/share/vm/opto/graphKit.cpp Mon Mar 31 09:08:53 2014 +0200
@@ -612,10 +612,10 @@
// Usual case: Bail to interpreter.
// Reserve the right to recompile if we haven't seen anything yet.
- assert(!Deoptimization::reason_is_speculate(reason), "unsupported");
+ ciMethod* m = Deoptimization::reason_is_speculate(reason) ? C->method() : NULL;
Deoptimization::DeoptAction action = Deoptimization::Action_maybe_recompile;
if (treat_throw_as_hot
- && (method()->method_data()->trap_recompiled_at(bci(), NULL)
+ && (method()->method_data()->trap_recompiled_at(bci(), m)
|| C->too_many_traps(reason))) {
// We cannot afford to take more traps here. Suffer in the interpreter.
if (C->log() != NULL)
@@ -1181,7 +1181,8 @@
Node* GraphKit::null_check_common(Node* value, BasicType type,
// optional arguments for variations:
bool assert_null,
- Node* *null_control) {
+ Node* *null_control,
+ bool speculative) {
assert(!assert_null || null_control == NULL, "not both at once");
if (stopped()) return top();
if (!GenerateCompilerNullChecks && !assert_null && null_control == NULL) {
@@ -1291,13 +1292,13 @@
// Branch to failure if null
float ok_prob = PROB_MAX; // a priori estimate: nulls never happen
Deoptimization::DeoptReason reason;
- if (assert_null)
+ if (assert_null) {
reason = Deoptimization::Reason_null_assert;
- else if (type == T_OBJECT)
- reason = Deoptimization::Reason_null_check;
- else
+ } else if (type == T_OBJECT) {
+ reason = Deoptimization::reason_null_check(speculative);
+ } else {
reason = Deoptimization::Reason_div0_check;
-
+ }
// %%% Since Reason_unhandled is not recorded on a per-bytecode basis,
// ciMethodData::has_trap_at will return a conservative -1 if any
// must-be-null assertion has failed. This could cause performance
@@ -2120,21 +2121,36 @@
*
* @param n node that the type applies to
* @param exact_kls type from profiling
+ * @param maybe_null did profiling see null?
*
* @return node with improved type
*/
-Node* GraphKit::record_profile_for_speculation(Node* n, ciKlass* exact_kls) {
+Node* GraphKit::record_profile_for_speculation(Node* n, ciKlass* exact_kls, bool maybe_null) {
const Type* current_type = _gvn.type(n);
assert(UseTypeSpeculation, "type speculation must be on");
- const TypeOopPtr* speculative = current_type->speculative();
-
+ const TypePtr* speculative = current_type->speculative();
+
+ // Should the klass from the profile be recorded in the speculative type?
if (current_type->would_improve_type(exact_kls, jvms()->depth())) {
const TypeKlassPtr* tklass = TypeKlassPtr::make(exact_kls);
const TypeOopPtr* xtype = tklass->as_instance_type();
assert(xtype->klass_is_exact(), "Should be exact");
+ // Any reason to believe n is not null (from this profiling or a previous one)?
+ const TypePtr* ptr = (maybe_null && current_type->speculative_maybe_null()) ? TypePtr::BOTTOM : TypePtr::NOTNULL;
// record the new speculative type's depth
- speculative = xtype->with_inline_depth(jvms()->depth());
+ speculative = xtype->cast_to_ptr_type(ptr->ptr())->is_ptr();
+ speculative = speculative->with_inline_depth(jvms()->depth());
+ } else if (current_type->would_improve_ptr(maybe_null)) {
+ // Profiling report that null was never seen so we can change the
+ // speculative type to non null ptr.
+ assert(!maybe_null, "nothing to improve");
+ if (speculative == NULL) {
+ speculative = TypePtr::NOTNULL;
+ } else {
+ const TypePtr* ptr = TypePtr::NOTNULL;
+ speculative = speculative->cast_to_ptr_type(ptr->ptr())->is_ptr();
+ }
}
if (speculative != current_type->speculative()) {
@@ -2167,7 +2183,15 @@
return n;
}
ciKlass* exact_kls = profile_has_unique_klass();
- return record_profile_for_speculation(n, exact_kls);
+ bool maybe_null = true;
+ if (java_bc() == Bytecodes::_checkcast ||
+ java_bc() == Bytecodes::_instanceof ||
+ java_bc() == Bytecodes::_aastore) {
+ ciProfileData* data = method()->method_data()->bci_to_data(bci());
+ bool maybe_null = data == NULL ? true : data->as_BitData()->null_seen();
+ }
+ return record_profile_for_speculation(n, exact_kls, maybe_null);
+ return n;
}
/**
@@ -2187,9 +2211,10 @@
for (int j = skip, i = 0; j < nargs && i < TypeProfileArgsLimit; j++) {
const Type *targ = tf->_domain->field_at(j + TypeFunc::Parms);
if (targ->basic_type() == T_OBJECT || targ->basic_type() == T_ARRAY) {
- ciKlass* better_type = method()->argument_profiled_type(bci(), i);
- if (better_type != NULL) {
- record_profile_for_speculation(argument(j), better_type);
+ bool maybe_null = true;
+ ciKlass* better_type = NULL;
+ if (method()->argument_profiled_type(bci(), i, better_type, maybe_null)) {
+ record_profile_for_speculation(argument(j), better_type, maybe_null);
}
i++;
}
@@ -2206,15 +2231,34 @@
}
for (int i = 0, j = 0; i < method()->arg_size() ; i++) {
if (_gvn.type(local(i))->isa_oopptr()) {
- ciKlass* better_type = method()->parameter_profiled_type(j);
- if (better_type != NULL) {
- record_profile_for_speculation(local(i), better_type);
+ bool maybe_null = true;
+ ciKlass* better_type = NULL;
+ if (method()->parameter_profiled_type(j, better_type, maybe_null)) {
+ record_profile_for_speculation(local(i), better_type, maybe_null);
}
j++;
}
}
}
+/**
+ * Record profiling data from return value profiling at an invoke with
+ * the type system so that it can propagate it (speculation)
+ */
+void GraphKit::record_profiled_return_for_speculation() {
+ if (!UseTypeSpeculation) {
+ return;
+ }
+ bool maybe_null = true;
+ ciKlass* better_type = NULL;
+ if (method()->return_profiled_type(bci(), better_type, maybe_null)) {
+ // If profiling reports a single type for the return value,
+ // feed it to the type system so it can propagate it as a
+ // speculative type
+ record_profile_for_speculation(stack(sp()-1), better_type, maybe_null);
+ }
+}
+
void GraphKit::round_double_result(ciMethod* dest_method) {
// A non-strict method may return a double value which has an extended
// exponent, but this must not be visible in a caller which is 'strict'
@@ -2294,10 +2338,12 @@
// Null check oop. Set null-path control into Region in slot 3.
// Make a cast-not-nullness use the other not-null control. Return cast.
Node* GraphKit::null_check_oop(Node* value, Node* *null_control,
- bool never_see_null, bool safe_for_replace) {
+ bool never_see_null,
+ bool safe_for_replace,
+ bool speculative) {
// Initial NULL check taken path
(*null_control) = top();
- Node* cast = null_check_common(value, T_OBJECT, false, null_control);
+ Node* cast = null_check_common(value, T_OBJECT, false, null_control, speculative);
// Generate uncommon_trap:
if (never_see_null && (*null_control) != top()) {
@@ -2308,7 +2354,8 @@
PreserveJVMState pjvms(this);
set_control(*null_control);
replace_in_map(value, null());
- uncommon_trap(Deoptimization::Reason_null_check,
+ Deoptimization::DeoptReason reason = Deoptimization::reason_null_check(speculative);
+ uncommon_trap(reason,
Deoptimization::Action_make_not_entrant);
(*null_control) = top(); // NULL path is dead
}
@@ -2732,11 +2779,16 @@
// recompile; the offending check will be recompiled to handle NULLs.
// If we see several offending BCIs, then all checks in the
// method will be recompiled.
-bool GraphKit::seems_never_null(Node* obj, ciProfileData* data) {
+bool GraphKit::seems_never_null(Node* obj, ciProfileData* data, bool& speculating) {
+ speculating = !_gvn.type(obj)->speculative_maybe_null();
+ Deoptimization::DeoptReason reason = Deoptimization::reason_null_check(speculating);
if (UncommonNullCast // Cutout for this technique
&& obj != null() // And not the -Xcomp stupid case?
- && !too_many_traps(Deoptimization::Reason_null_check)
+ && !too_many_traps(reason)
) {
+ if (speculating) {
+ return true;
+ }
if (data == NULL)
// Edge case: no mature data. Be optimistic here.
return true;
@@ -2746,6 +2798,7 @@
java_bc() == Bytecodes::_aastore, "MDO must collect null_seen bit here");
return !data->as_BitData()->null_seen();
}
+ speculating = false;
return false;
}
@@ -2758,7 +2811,7 @@
bool safe_for_replace) {
if (!UseTypeProfile || !TypeProfileCasts) return NULL;
- Deoptimization::DeoptReason reason = spec_klass == NULL ? Deoptimization::Reason_class_check : Deoptimization::Reason_speculate_class_check;
+ Deoptimization::DeoptReason reason = Deoptimization::reason_class_check(spec_klass != NULL);
// Make sure we haven't already deoptimized from this tactic.
if (too_many_traps(reason))
@@ -2811,7 +2864,7 @@
// type == NULL if profiling tells us this object is always null
if (type != NULL) {
Deoptimization::DeoptReason class_reason = Deoptimization::Reason_speculate_class_check;
- Deoptimization::DeoptReason null_reason = Deoptimization::Reason_null_check;
+ Deoptimization::DeoptReason null_reason = Deoptimization::Reason_speculate_null_check;
if (!too_many_traps(null_reason) &&
!too_many_traps(class_reason)) {
Node* not_null_obj = NULL;
@@ -2819,7 +2872,7 @@
// there's no need for a null check
if (!not_null) {
Node* null_ctl = top();
- not_null_obj = null_check_oop(obj, &null_ctl, true, true);
+ not_null_obj = null_check_oop(obj, &null_ctl, true, true, true);
assert(null_ctl->is_top(), "no null control here");
} else {
not_null_obj = obj;
@@ -2867,12 +2920,13 @@
if (java_bc() == Bytecodes::_instanceof) { // Only for the bytecode
data = method()->method_data()->bci_to_data(bci());
}
+ bool speculative_not_null = false;
bool never_see_null = (ProfileDynamicTypes // aggressive use of profile
- && seems_never_null(obj, data));
+ && seems_never_null(obj, data, speculative_not_null));
// Null check; get casted pointer; set region slot 3
Node* null_ctl = top();
- Node* not_null_obj = null_check_oop(obj, &null_ctl, never_see_null, safe_for_replace);
+ Node* not_null_obj = null_check_oop(obj, &null_ctl, never_see_null, safe_for_replace, speculative_not_null);
// If not_null_obj is dead, only null-path is taken
if (stopped()) { // Doing instance-of on a NULL?
@@ -2995,12 +3049,13 @@
C->set_has_split_ifs(true); // Has chance for split-if optimization
// Use null-cast information if it is available
+ bool speculative_not_null = false;
bool never_see_null = ((failure_control == NULL) // regular case only
- && seems_never_null(obj, data));
+ && seems_never_null(obj, data, speculative_not_null));
// Null check; get casted pointer; set region slot 3
Node* null_ctl = top();
- Node* not_null_obj = null_check_oop(obj, &null_ctl, never_see_null, safe_for_replace);
+ Node* not_null_obj = null_check_oop(obj, &null_ctl, never_see_null, safe_for_replace, speculative_not_null);
// If not_null_obj is dead, only null-path is taken
if (stopped()) { // Doing instance-of on a NULL?