8181211: C2: Use profiling data to optimize on/off heap unsafe accesses
authorroland
Fri, 09 Jun 2017 10:51:52 +0200
changeset 46542 73dd19b96b5d
parent 46541 d20828de9e39
child 46544 9157a537907e
8181211: C2: Use profiling data to optimize on/off heap unsafe accesses Reviewed-by: kvn
hotspot/src/share/vm/c1/c1_GraphBuilder.cpp
hotspot/src/share/vm/ci/ciMethod.cpp
hotspot/src/share/vm/ci/ciMethod.hpp
hotspot/src/share/vm/ci/ciMethodData.hpp
hotspot/src/share/vm/oops/methodData.cpp
hotspot/src/share/vm/oops/methodData.hpp
hotspot/src/share/vm/opto/doCall.cpp
hotspot/src/share/vm/opto/graphKit.cpp
hotspot/src/share/vm/opto/graphKit.hpp
hotspot/src/share/vm/opto/library_call.cpp
hotspot/src/share/vm/opto/parse2.cpp
hotspot/src/share/vm/opto/type.cpp
hotspot/src/share/vm/opto/type.hpp
hotspot/src/share/vm/runtime/deoptimization.cpp
hotspot/src/share/vm/runtime/deoptimization.hpp
hotspot/src/share/vm/runtime/vmStructs.cpp
--- a/hotspot/src/share/vm/c1/c1_GraphBuilder.cpp	Thu Jun 15 09:52:44 2017 +0200
+++ b/hotspot/src/share/vm/c1/c1_GraphBuilder.cpp	Fri Jun 09 10:51:52 2017 +0200
@@ -1540,13 +1540,7 @@
         ciMethod* caller = state()->scope()->method();
         ciMethodData* md = caller->method_data_or_null();
         ciProfileData* data = md->bci_to_data(invoke_bci);
-        if (data != NULL && (data->is_CallTypeData() || data->is_VirtualCallTypeData())) {
-          bool has_return = data->is_CallTypeData() ? ((ciCallTypeData*)data)->has_return() : ((ciVirtualCallTypeData*)data)->has_return();
-          // May not be true in case of an inlined call through a method handle intrinsic.
-          if (has_return) {
-            profile_return_type(x, method(), caller, invoke_bci);
-          }
-        }
+        profile_return_type(x, method(), caller, invoke_bci);
       }
     }
     Goto* goto_callee = new Goto(continuation(), false);
@@ -4366,7 +4360,10 @@
   ciMethodData* md = m->method_data_or_null();
   ciProfileData* data = md->bci_to_data(invoke_bci);
   if (data != NULL && (data->is_CallTypeData() || data->is_VirtualCallTypeData())) {
-    append(new ProfileReturnType(m , invoke_bci, callee, ret));
+    bool has_return = data->is_CallTypeData() ? ((ciCallTypeData*)data)->has_return() : ((ciVirtualCallTypeData*)data)->has_return();
+    if (has_return) {
+      append(new ProfileReturnType(m , invoke_bci, callee, ret));
+    }
   }
 }
 
--- a/hotspot/src/share/vm/ci/ciMethod.cpp	Thu Jun 15 09:52:44 2017 +0200
+++ b/hotspot/src/share/vm/ci/ciMethod.cpp	Fri Jun 09 10:51:52 2017 +0200
@@ -594,11 +594,11 @@
  * @param [in]bci         bci of the call
  * @param [in]i           argument number
  * @param [out]type       profiled type of argument, NULL if none
- * @param [out]maybe_null true if null was seen for argument
+ * @param [out]ptr_kind   whether always null, never null or maybe null
  * @return                true if profiling exists
  *
  */
-bool ciMethod::argument_profiled_type(int bci, int i, ciKlass*& type, bool& maybe_null) {
+bool ciMethod::argument_profiled_type(int bci, int i, ciKlass*& type, ProfilePtrKind& ptr_kind) {
   if (MethodData::profile_parameters() && method_data() != NULL && method_data()->is_mature()) {
     ciProfileData* data = method_data()->bci_to_data(bci);
     if (data != NULL) {
@@ -609,7 +609,7 @@
           return false;
         }
         type = call->valid_argument_type(i);
-        maybe_null = call->argument_maybe_null(i);
+        ptr_kind = call->argument_ptr_kind(i);
         return true;
       } else if (data->is_CallTypeData()) {
         assert_call_type_ok(bci);
@@ -618,7 +618,7 @@
           return false;
         }
         type = call->valid_argument_type(i);
-        maybe_null = call->argument_maybe_null(i);
+        ptr_kind = call->argument_ptr_kind(i);
         return true;
       }
     }
@@ -632,25 +632,29 @@
  *
  * @param [in]bci         bci of the call
  * @param [out]type       profiled type of argument, NULL if none
- * @param [out]maybe_null true if null was seen for argument
+ * @param [out]ptr_kind   whether always null, never null or maybe null
  * @return                true if profiling exists
  *
  */
-bool ciMethod::return_profiled_type(int bci, ciKlass*& type, bool& maybe_null) {
+bool ciMethod::return_profiled_type(int bci, ciKlass*& type, ProfilePtrKind& ptr_kind) {
   if (MethodData::profile_return() && method_data() != NULL && method_data()->is_mature()) {
     ciProfileData* data = method_data()->bci_to_data(bci);
     if (data != NULL) {
       if (data->is_VirtualCallTypeData()) {
         assert_virtual_call_type_ok(bci);
         ciVirtualCallTypeData* call = (ciVirtualCallTypeData*)data->as_VirtualCallTypeData();
-        type = call->valid_return_type();
-        maybe_null = call->return_maybe_null();
-        return true;
+        if (call->has_return()) {
+          type = call->valid_return_type();
+          ptr_kind = call->return_ptr_kind();
+          return true;
+        }
       } else if (data->is_CallTypeData()) {
         assert_call_type_ok(bci);
         ciCallTypeData* call = (ciCallTypeData*)data->as_CallTypeData();
-        type = call->valid_return_type();
-        maybe_null = call->return_maybe_null();
+        if (call->has_return()) {
+          type = call->valid_return_type();
+          ptr_kind = call->return_ptr_kind();
+        }
         return true;
       }
     }
@@ -663,16 +667,16 @@
  *
  * @param [in]i           parameter number
  * @param [out]type       profiled type of parameter, NULL if none
- * @param [out]maybe_null true if null was seen for parameter
+ * @param [out]ptr_kind   whether always null, never null or maybe null
  * @return                true if profiling exists
  *
  */
-bool ciMethod::parameter_profiled_type(int i, ciKlass*& type, bool& maybe_null) {
+bool ciMethod::parameter_profiled_type(int i, ciKlass*& type, ProfilePtrKind& ptr_kind) {
   if (MethodData::profile_parameters() && method_data() != NULL && method_data()->is_mature()) {
     ciParametersTypeData* parameters = method_data()->parameters_type_data();
     if (parameters != NULL && i < parameters->number_of_parameters()) {
       type = parameters->valid_parameter_type(i);
-      maybe_null = parameters->parameter_maybe_null(i);
+      ptr_kind = parameters->parameter_ptr_kind(i);
       return true;
     }
   }
--- a/hotspot/src/share/vm/ci/ciMethod.hpp	Thu Jun 15 09:52:44 2017 +0200
+++ b/hotspot/src/share/vm/ci/ciMethod.hpp	Fri Jun 09 10:51:52 2017 +0200
@@ -40,6 +40,14 @@
 class BCEscapeAnalyzer;
 class InlineTree;
 
+// Whether profiling found an oop to be always, never or sometimes
+// null
+enum ProfilePtrKind {
+  ProfileAlwaysNull,
+  ProfileNeverNull,
+  ProfileMaybeNull
+};
+
 // ciMethod
 //
 // This class represents a Method* in the HotSpot virtual
@@ -248,9 +256,9 @@
   int           interpreter_call_site_count(int bci);
 
   // Does type profiling provide any useful information at this point?
-  bool          argument_profiled_type(int bci, int i, ciKlass*& type, bool& maybe_null);
-  bool          parameter_profiled_type(int i, ciKlass*& type, bool& maybe_null);
-  bool          return_profiled_type(int bci, ciKlass*& type, bool& maybe_null);
+  bool          argument_profiled_type(int bci, int i, ciKlass*& type, ProfilePtrKind& ptr_kind);
+  bool          parameter_profiled_type(int i, ciKlass*& type, ProfilePtrKind& ptr_kind);
+  bool          return_profiled_type(int bci, ciKlass*& type, ProfilePtrKind& ptr_kind);
 
   ciField*      get_field_at_bci( int bci, bool &will_link);
   ciMethod*     get_method_at_bci(int bci, bool &will_link, ciSignature* *declared_signature);
--- a/hotspot/src/share/vm/ci/ciMethodData.hpp	Thu Jun 15 09:52:44 2017 +0200
+++ b/hotspot/src/share/vm/ci/ciMethodData.hpp	Fri Jun 09 10:51:52 2017 +0200
@@ -88,6 +88,17 @@
     }
   }
 
+  static ProfilePtrKind ptr_kind(intptr_t v) {
+    bool maybe_null = TypeEntries::was_null_seen(v);
+    if (!maybe_null) {
+      return ProfileNeverNull;
+    } else if (TypeEntries::is_type_none(v)) {
+      return ProfileAlwaysNull;
+    } else {
+      return ProfileMaybeNull;
+    }
+  }
+
   static intptr_t with_status(ciKlass* k, intptr_t in) {
     return TypeEntries::with_status((intptr_t)k, in);
   }
@@ -105,8 +116,8 @@
     return valid_ciklass(type(i));
   }
 
-  bool maybe_null(int i) const {
-    return was_null_seen(type(i));
+  ProfilePtrKind ptr_kind(int i) const {
+    return ciTypeEntries::ptr_kind(type(i));
   }
 
 #ifndef PRODUCT
@@ -122,8 +133,8 @@
     return valid_ciklass(type());
   }
 
-  bool maybe_null() const {
-    return was_null_seen(type());
+  ProfilePtrKind ptr_kind() const {
+    return ciTypeEntries::ptr_kind(type());
   }
 
 #ifndef PRODUCT
@@ -167,12 +178,12 @@
     return ret()->valid_type();
   }
 
-  bool argument_maybe_null(int i) const {
-    return args()->maybe_null(i);
+  ProfilePtrKind argument_ptr_kind(int i) const {
+    return args()->ptr_kind(i);
   }
 
-  bool return_maybe_null() const {
-    return ret()->maybe_null();
+  ProfilePtrKind return_ptr_kind() const {
+    return ret()->ptr_kind();
   }
 
 #ifndef PRODUCT
@@ -281,12 +292,12 @@
     return ret()->valid_type();
   }
 
-  bool argument_maybe_null(int i) const {
-    return args()->maybe_null(i);
+  ProfilePtrKind argument_ptr_kind(int i) const {
+    return args()->ptr_kind(i);
   }
 
-  bool return_maybe_null() const {
-    return ret()->maybe_null();
+  ProfilePtrKind return_ptr_kind() const {
+    return ret()->ptr_kind();
   }
 
 #ifndef PRODUCT
@@ -334,8 +345,8 @@
     return parameters()->valid_type(i);
   }
 
-  bool parameter_maybe_null(int i) const {
-    return parameters()->maybe_null(i);
+  ProfilePtrKind parameter_ptr_kind(int i) const {
+    return parameters()->ptr_kind(i);
   }
 
 #ifndef PRODUCT
--- a/hotspot/src/share/vm/oops/methodData.cpp	Thu Jun 15 09:52:44 2017 +0200
+++ b/hotspot/src/share/vm/oops/methodData.cpp	Fri Jun 09 10:51:52 2017 +0200
@@ -205,13 +205,15 @@
 int TypeEntriesAtCall::compute_cell_count(BytecodeStream* stream) {
   assert(Bytecodes::is_invoke(stream->code()), "should be invoke");
   assert(TypeStackSlotEntries::per_arg_count() > ReturnTypeEntry::static_cell_count(), "code to test for arguments/results broken");
-  Bytecode_invoke inv(stream->method(), stream->bci());
+  const methodHandle m = stream->method();
+  int bci = stream->bci();
+  Bytecode_invoke inv(m, bci);
   int args_cell = 0;
-  if (arguments_profiling_enabled()) {
+  if (MethodData::profile_arguments_for_invoke(m, bci)) {
     args_cell = TypeStackSlotEntries::compute_cell_count(inv.signature(), false, TypeProfileArgsLimit);
   }
   int ret_cell = 0;
-  if (return_profiling_enabled() && (inv.result_type() == T_OBJECT || inv.result_type() == T_ARRAY)) {
+  if (MethodData::profile_return_for_invoke(m, bci) && (inv.result_type() == T_OBJECT || inv.result_type() == T_ARRAY)) {
     ret_cell = ReturnTypeEntry::static_cell_count();
   }
   int header_cell = 0;
@@ -1525,6 +1527,18 @@
   return inv.is_invokedynamic() || inv.is_invokehandle();
 }
 
+bool MethodData::profile_unsafe(const methodHandle& m, int bci) {
+  Bytecode_invoke inv(m , bci);
+  if (inv.is_invokevirtual() && inv.klass() == vmSymbols::jdk_internal_misc_Unsafe()) {
+    ResourceMark rm;
+    char* name = inv.name()->as_C_string();
+    if (!strncmp(name, "get", 3) || !strncmp(name, "put", 3)) {
+      return true;
+    }
+  }
+  return false;
+}
+
 int MethodData::profile_arguments_flag() {
   return TypeProfileLevel % 10;
 }
@@ -1550,6 +1564,10 @@
     return true;
   }
 
+  if (profile_unsafe(m, bci)) {
+    return true;
+  }
+
   assert(profile_arguments_jsr292_only(), "inconsistent");
   return profile_jsr292(m, bci);
 }
--- a/hotspot/src/share/vm/oops/methodData.hpp	Thu Jun 15 09:52:44 2017 +0200
+++ b/hotspot/src/share/vm/oops/methodData.hpp	Fri Jun 09 10:51:52 2017 +0200
@@ -2151,6 +2151,7 @@
   CC_INTERP_ONLY(friend class BytecodeInterpreter;)
 private:
   friend class ProfileData;
+  friend class TypeEntriesAtCall;
 
   // Back pointer to the Method*
   Method* _method;
@@ -2173,7 +2174,7 @@
 
   // Whole-method sticky bits and flags
   enum {
-    _trap_hist_limit    = 22 JVMCI_ONLY(+5),   // decoupled from Deoptimization::Reason_LIMIT
+    _trap_hist_limit    = 23 JVMCI_ONLY(+5),   // decoupled from Deoptimization::Reason_LIMIT
     _trap_hist_mask     = max_jubyte,
     _extra_data_count   = 4     // extra DataLayout headers, for trap history
   }; // Public flag values
@@ -2302,6 +2303,7 @@
   };
 
   static bool profile_jsr292(const methodHandle& m, int bci);
+  static bool profile_unsafe(const methodHandle& m, int bci);
   static int profile_arguments_flag();
   static bool profile_all_arguments();
   static bool profile_arguments_for_invoke(const methodHandle& m, int bci);
--- a/hotspot/src/share/vm/opto/doCall.cpp	Thu Jun 15 09:52:44 2017 +0200
+++ b/hotspot/src/share/vm/opto/doCall.cpp	Fri Jun 09 10:51:52 2017 +0200
@@ -569,7 +569,7 @@
   // save across call, for a subsequent cast_not_null.
   Node* receiver = has_receiver ? argument(0) : NULL;
 
-  // The extra CheckCastPP for speculative types mess with PhaseStringOpts
+  // The extra CheckCastPPs for speculative types mess with PhaseStringOpts
   if (receiver != NULL && !call_does_dispatch && !cg->is_string_late_inline()) {
     // Feed profiling data for a single receiver to the type system so
     // it can propagate it as a speculative type
--- a/hotspot/src/share/vm/opto/graphKit.cpp	Thu Jun 15 09:52:44 2017 +0200
+++ b/hotspot/src/share/vm/opto/graphKit.cpp	Fri Jun 09 10:51:52 2017 +0200
@@ -1294,7 +1294,7 @@
   float ok_prob = PROB_MAX;  // a priori estimate:  nulls never happen
   Deoptimization::DeoptReason reason;
   if (assert_null) {
-    reason = Deoptimization::Reason_null_assert;
+    reason = Deoptimization::reason_null_assert(speculative);
   } else if (type == T_OBJECT) {
     reason = Deoptimization::reason_null_check(speculative);
   } else {
@@ -2133,7 +2133,7 @@
  *
  * @return           node with improved type
  */
-Node* GraphKit::record_profile_for_speculation(Node* n, ciKlass* exact_kls, bool maybe_null) {
+Node* GraphKit::record_profile_for_speculation(Node* n, ciKlass* exact_kls, ProfilePtrKind ptr_kind) {
   const Type* current_type = _gvn.type(n);
   assert(UseTypeSpeculation, "type speculation must be on");
 
@@ -2145,19 +2145,24 @@
     const TypeOopPtr* xtype = tklass->as_instance_type();
     assert(xtype->klass_is_exact(), "Should be exact");
     // Any reason to believe n is not null (from this profiling or a previous one)?
-    const TypePtr* ptr = (maybe_null && current_type->speculative_maybe_null()) ? TypePtr::BOTTOM : TypePtr::NOTNULL;
+    assert(ptr_kind != ProfileAlwaysNull, "impossible here");
+    const TypePtr* ptr = (ptr_kind == ProfileMaybeNull && current_type->speculative_maybe_null()) ? TypePtr::BOTTOM : TypePtr::NOTNULL;
     // record the new speculative type's depth
     speculative = xtype->cast_to_ptr_type(ptr->ptr())->is_ptr();
     speculative = speculative->with_inline_depth(jvms()->depth());
-  } else if (current_type->would_improve_ptr(maybe_null)) {
+  } else if (current_type->would_improve_ptr(ptr_kind)) {
     // Profiling report that null was never seen so we can change the
     // speculative type to non null ptr.
-    assert(!maybe_null, "nothing to improve");
-    if (speculative == NULL) {
-      speculative = TypePtr::NOTNULL;
+    if (ptr_kind == ProfileAlwaysNull) {
+      speculative = TypePtr::NULL_PTR;
     } else {
+      assert(ptr_kind == ProfileNeverNull, "nothing else is an improvement");
       const TypePtr* ptr = TypePtr::NOTNULL;
-      speculative = speculative->cast_to_ptr_type(ptr->ptr())->is_ptr();
+      if (speculative != NULL) {
+        speculative = speculative->cast_to_ptr_type(ptr->ptr())->is_ptr();
+      } else {
+        speculative = ptr;
+      }
     }
   }
 
@@ -2191,14 +2196,30 @@
     return n;
   }
   ciKlass* exact_kls = profile_has_unique_klass();
-  bool maybe_null = true;
-  if (java_bc() == Bytecodes::_checkcast ||
-      java_bc() == Bytecodes::_instanceof ||
-      java_bc() == Bytecodes::_aastore) {
+  ProfilePtrKind ptr_kind = ProfileMaybeNull;
+  if ((java_bc() == Bytecodes::_checkcast ||
+       java_bc() == Bytecodes::_instanceof ||
+       java_bc() == Bytecodes::_aastore) &&
+      method()->method_data()->is_mature()) {
     ciProfileData* data = method()->method_data()->bci_to_data(bci());
-    maybe_null = data == NULL ? true : data->as_BitData()->null_seen();
+    if (data != NULL) {
+      if (!data->as_BitData()->null_seen()) {
+        ptr_kind = ProfileNeverNull;
+      } else {
+        assert(data->is_ReceiverTypeData(), "bad profile data type");
+        ciReceiverTypeData* call = (ciReceiverTypeData*)data->as_ReceiverTypeData();
+        uint i = 0;
+        for (; i < call->row_limit(); i++) {
+          ciKlass* receiver = call->receiver(i);
+          if (receiver != NULL) {
+            break;
+          }
+        }
+        ptr_kind = (i == call->row_limit()) ? ProfileAlwaysNull : ProfileMaybeNull;
+      }
+    }
   }
-  return record_profile_for_speculation(n, exact_kls, maybe_null);
+  return record_profile_for_speculation(n, exact_kls, ptr_kind);
 }
 
 /**
@@ -2218,10 +2239,10 @@
   for (int j = skip, i = 0; j < nargs && i < TypeProfileArgsLimit; j++) {
     const Type *targ = tf->domain()->field_at(j + TypeFunc::Parms);
     if (targ->basic_type() == T_OBJECT || targ->basic_type() == T_ARRAY) {
-      bool maybe_null = true;
+      ProfilePtrKind ptr_kind = ProfileMaybeNull;
       ciKlass* better_type = NULL;
-      if (method()->argument_profiled_type(bci(), i, better_type, maybe_null)) {
-        record_profile_for_speculation(argument(j), better_type, maybe_null);
+      if (method()->argument_profiled_type(bci(), i, better_type, ptr_kind)) {
+        record_profile_for_speculation(argument(j), better_type, ptr_kind);
       }
       i++;
     }
@@ -2238,10 +2259,10 @@
   }
   for (int i = 0, j = 0; i < method()->arg_size() ; i++) {
     if (_gvn.type(local(i))->isa_oopptr()) {
-      bool maybe_null = true;
+      ProfilePtrKind ptr_kind = ProfileMaybeNull;
       ciKlass* better_type = NULL;
-      if (method()->parameter_profiled_type(j, better_type, maybe_null)) {
-        record_profile_for_speculation(local(i), better_type, maybe_null);
+      if (method()->parameter_profiled_type(j, better_type, ptr_kind)) {
+        record_profile_for_speculation(local(i), better_type, ptr_kind);
       }
       j++;
     }
@@ -2256,13 +2277,13 @@
   if (!UseTypeSpeculation) {
     return;
   }
-  bool maybe_null = true;
+  ProfilePtrKind ptr_kind = ProfileMaybeNull;
   ciKlass* better_type = NULL;
-  if (method()->return_profiled_type(bci(), better_type, maybe_null)) {
+  if (method()->return_profiled_type(bci(), better_type, ptr_kind)) {
     // If profiling reports a single type for the return value,
     // feed it to the type system so it can propagate it as a
     // speculative type
-    record_profile_for_speculation(stack(sp()-1), better_type, maybe_null);
+    record_profile_for_speculation(stack(sp()-1), better_type, ptr_kind);
   }
 }
 
@@ -2938,12 +2959,7 @@
     }
   }
 
-  if (known_statically && UseTypeSpeculation) {
-    // If we know the type check always succeeds then we don't use the
-    // profiling data at this bytecode. Don't lose it, feed it to the
-    // type system as a speculative type.
-    not_null_obj = record_profiled_receiver_for_speculation(not_null_obj);
-  } else {
+  if (!known_statically) {
     const TypeOopPtr* obj_type = _gvn.type(obj)->is_oopptr();
     // We may not have profiling here or it may not help us. If we
     // have a speculative type use it to perform an exact cast.
@@ -2977,6 +2993,15 @@
   // Return final merged results
   set_control( _gvn.transform(region) );
   record_for_igvn(region);
+
+  // If we know the type check always succeeds then we don't use the
+  // profiling data at this bytecode. Don't lose it, feed it to the
+  // type system as a speculative type.
+  if (safe_for_replace) {
+    Node* casted_obj = record_profiled_receiver_for_speculation(obj);
+    replace_in_map(obj, casted_obj);
+  }
+
   return _gvn.transform(phi);
 }
 
@@ -3117,7 +3142,8 @@
   // Return final merged results
   set_control( _gvn.transform(region) );
   record_for_igvn(region);
-  return res;
+
+  return record_profiled_receiver_for_speculation(res);
 }
 
 //------------------------------next_monitor-----------------------------------
--- a/hotspot/src/share/vm/opto/graphKit.hpp	Thu Jun 15 09:52:44 2017 +0200
+++ b/hotspot/src/share/vm/opto/graphKit.hpp	Fri Jun 09 10:51:52 2017 +0200
@@ -365,7 +365,7 @@
   // Throw an uncommon trap if a given value is __not__ null.
   // Return the value cast to null, and be clever about dominating checks.
   Node* null_assert(Node* value, BasicType type = T_OBJECT) {
-    return null_check_common(value, type, true);
+    return null_check_common(value, type, true, NULL, _gvn.type(value)->speculative_always_null());
   }
 
   // Check if value is null and abort if it is
@@ -397,7 +397,7 @@
   }
 
   // record type from profiling with the type system
-  Node* record_profile_for_speculation(Node* n, ciKlass* exact_kls, bool maybe_null);
+  Node* record_profile_for_speculation(Node* n, ciKlass* exact_kls, ProfilePtrKind ptr_kind);
   void record_profiled_arguments_for_speculation(ciMethod* dest_method, Bytecodes::Code bc);
   void record_profiled_parameters_for_speculation();
   void record_profiled_return_for_speculation();
--- a/hotspot/src/share/vm/opto/library_call.cpp	Thu Jun 15 09:52:44 2017 +0200
+++ b/hotspot/src/share/vm/opto/library_call.cpp	Fri Jun 09 10:51:52 2017 +0200
@@ -240,7 +240,7 @@
   Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y);
   // This returns Type::AnyPtr, RawPtr, or OopPtr.
   int classify_unsafe_addr(Node* &base, Node* &offset, BasicType type);
-  Node* make_unsafe_address(Node*& base, Node* offset, BasicType type = T_ILLEGAL);
+  Node* make_unsafe_address(Node*& base, Node* offset, BasicType type = T_ILLEGAL, bool can_cast = false);
   // Helper for inline_unsafe_access.
   // Generates the guards that check whether the result of
   // Unsafe.getObject should be recorded in an SATB log buffer.
@@ -2111,13 +2111,33 @@
   }
 }
 
-inline Node* LibraryCallKit::make_unsafe_address(Node*& base, Node* offset, BasicType type) {
+inline Node* LibraryCallKit::make_unsafe_address(Node*& base, Node* offset, BasicType type, bool can_cast) {
   Node* uncasted_base = base;
   int kind = classify_unsafe_addr(uncasted_base, offset, type);
   if (kind == Type::RawPtr) {
     return basic_plus_adr(top(), uncasted_base, offset);
   } else if (kind == Type::AnyPtr) {
     assert(base == uncasted_base, "unexpected base change");
+    if (can_cast) {
+      if (!_gvn.type(base)->speculative_maybe_null() &&
+          !too_many_traps(Deoptimization::Reason_speculate_null_check)) {
+        // According to profiling, this access is always on
+        // heap. Casting the base to not null and thus avoiding membars
+        // around the access should allow better optimizations
+        Node* null_ctl = top();
+        base = null_check_oop(base, &null_ctl, true, true, true);
+        assert(null_ctl->is_top(), "no null control here");
+        return basic_plus_adr(base, offset);
+      } else if (_gvn.type(base)->speculative_always_null() &&
+                 !too_many_traps(Deoptimization::Reason_speculate_null_assert)) {
+        // According to profiling, this access is always off
+        // heap.
+        base = null_assert(base);
+        Node* raw_base = _gvn.transform(new CastX2PNode(offset));
+        offset = MakeConX(0);
+        return basic_plus_adr(top(), raw_base, offset);
+      }
+    }
     // We don't know if it's an on heap or off heap access. Fall back
     // to raw memory access.
     Node* raw = _gvn.transform(new CheckCastPPNode(control(), base, TypeRawPtr::BOTTOM));
@@ -2359,7 +2379,8 @@
          "fieldOffset must be byte-scaled");
   // 32-bit machines ignore the high half!
   offset = ConvL2X(offset);
-  adr = make_unsafe_address(base, offset, type);
+  adr = make_unsafe_address(base, offset, type, kind == Relaxed);
+
   if (_gvn.type(base)->isa_ptr() != TypePtr::NULL_PTR) {
     heap_base_oop = base;
   } else if (type == T_OBJECT) {
@@ -2417,7 +2438,7 @@
   bool need_mem_bar = false;
   switch (kind) {
       case Relaxed:
-          need_mem_bar = mismatched && !adr_type->isa_aryptr();
+          need_mem_bar = (mismatched && !adr_type->isa_aryptr()) || can_access_non_heap;
           break;
       case Opaque:
           // Opaque uses CPUOrder membars for protection against code movement.
@@ -2521,7 +2542,22 @@
     if (p == NULL) {
       // To be valid, unsafe loads may depend on other conditions than
       // the one that guards them: pin the Load node
-      p = make_load(control(), adr, value_type, type, adr_type, mo, LoadNode::Pinned, requires_atomic_access, unaligned, mismatched);
+      LoadNode::ControlDependency dep = LoadNode::Pinned;
+      Node* ctrl = control();
+      if (adr_type->isa_instptr()) {
+        assert(adr_type->meet(TypePtr::NULL_PTR) != adr_type->remove_speculative(), "should be not null");
+        intptr_t offset = Type::OffsetBot;
+        AddPNode::Ideal_base_and_offset(adr, &_gvn, offset);
+        if (offset >= 0) {
+          int s = Klass::layout_helper_size_in_bytes(adr_type->isa_instptr()->klass()->layout_helper());
+          if (offset < s) {
+            // Guaranteed to be a valid access, no need to pin it
+            dep = LoadNode::DependsOnlyOnTest;
+            ctrl = NULL;
+          }
+        }
+      }
+      p = make_load(ctrl, adr, value_type, type, adr_type, mo, dep, requires_atomic_access, unaligned, mismatched);
       // load value
       switch (type) {
       case T_BOOLEAN:
@@ -2770,7 +2806,7 @@
   assert(Unsafe_field_offset_to_byte_offset(11) == 11, "fieldOffset must be byte-scaled");
   // 32-bit machines ignore the high half of long offsets
   offset = ConvL2X(offset);
-  Node* adr = make_unsafe_address(base, offset, type);
+  Node* adr = make_unsafe_address(base, offset, type, false);
   const TypePtr *adr_type = _gvn.type(adr)->isa_ptr();
 
   Compile::AliasType* alias_type = C->alias_type(adr_type);
--- a/hotspot/src/share/vm/opto/parse2.cpp	Thu Jun 15 09:52:44 2017 +0200
+++ b/hotspot/src/share/vm/opto/parse2.cpp	Fri Jun 09 10:51:52 2017 +0200
@@ -2282,6 +2282,11 @@
       b = null_check_oop(b, &null_ctl, true, true, true);
       assert(null_ctl->is_top(), "no null control here");
       dec_sp(1);
+    } else if (_gvn.type(b)->speculative_always_null() &&
+               !too_many_traps(Deoptimization::Reason_speculate_null_assert)) {
+      inc_sp(1);
+      b = null_assert(b);
+      dec_sp(1);
     }
     c = _gvn.transform( new CmpPNode(b, a) );
     do_ifnull(btest, c);
--- a/hotspot/src/share/vm/opto/type.cpp	Thu Jun 15 09:52:44 2017 +0200
+++ b/hotspot/src/share/vm/opto/type.cpp	Fri Jun 09 10:51:52 2017 +0200
@@ -2531,7 +2531,8 @@
   const TypeOopPtr* spec_oopptr = speculative()->isa_oopptr();
   // If the speculative may be null and is an inexact klass then it
   // doesn't help
-  if (speculative()->maybe_null() && (spec_oopptr == NULL || !spec_oopptr->klass_is_exact())) {
+  if (speculative() != TypePtr::NULL_PTR && speculative()->maybe_null() &&
+      (spec_oopptr == NULL || !spec_oopptr->klass_is_exact())) {
     return no_spec;
   }
   return this;
@@ -2660,6 +2661,14 @@
   return true;
 }
 
+bool TypePtr::speculative_always_null() const {
+  if (_speculative != NULL) {
+    const TypePtr* speculative = _speculative->join(this)->is_ptr();
+    return speculative == TypePtr::NULL_PTR;
+  }
+  return false;
+}
+
 /**
  * Same as TypePtr::speculative_type() but return the klass only if
  * the speculative tells us is not null
@@ -2684,6 +2693,9 @@
   if (exact_kls == NULL) {
     return false;
   }
+  if (speculative() == TypePtr::NULL_PTR) {
+    return false;
+  }
   // no speculative type or non exact speculative type?
   if (speculative_type() == NULL) {
     return true;
@@ -2703,16 +2715,16 @@
  * Check whether new profiling would improve ptr (= tells us it is non
  * null)
  *
- * @param   maybe_null true if profiling tells the ptr may be null
+ * @param   ptr_kind always null or not null?
  *
  * @return  true if ptr profile is valuable
  */
-bool TypePtr::would_improve_ptr(bool maybe_null) const {
+bool TypePtr::would_improve_ptr(ProfilePtrKind ptr_kind) const {
   // profiling doesn't tell us anything useful
-  if (maybe_null) {
+  if (ptr_kind != ProfileAlwaysNull && ptr_kind != ProfileNeverNull) {
     return false;
   }
-  // We already know this is not be null
+  // We already know this is not null
   if (!this->maybe_null()) {
     return false;
   }
@@ -2720,6 +2732,17 @@
   if (!speculative_maybe_null()) {
     return false;
   }
+  // We already know this is always null
+  if (this == TypePtr::NULL_PTR) {
+    return false;
+  }
+  // We already know the speculative type is always null
+  if (speculative_always_null()) {
+    return false;
+  }
+  if (ptr_kind == ProfileAlwaysNull && speculative() != NULL && speculative()->isa_oopptr()) {
+    return false;
+  }
   return true;
 }
 
--- a/hotspot/src/share/vm/opto/type.hpp	Thu Jun 15 09:52:44 2017 +0200
+++ b/hotspot/src/share/vm/opto/type.hpp	Fri Jun 09 10:51:52 2017 +0200
@@ -445,10 +445,11 @@
   virtual ciKlass* speculative_type() const                                   { return NULL; }
   virtual ciKlass* speculative_type_not_null() const                          { return NULL; }
   virtual bool speculative_maybe_null() const                                 { return true; }
+  virtual bool speculative_always_null() const                                { return true; }
   virtual const Type* remove_speculative() const                              { return this; }
   virtual const Type* cleanup_speculative() const                             { return this; }
   virtual bool would_improve_type(ciKlass* exact_kls, int inline_depth) const { return exact_kls != NULL; }
-  virtual bool would_improve_ptr(bool maybe_null) const                       { return !maybe_null; }
+  virtual bool would_improve_ptr(ProfilePtrKind ptr_kind) const { return ptr_kind == ProfileAlwaysNull || ptr_kind == ProfileNeverNull; }
   const Type* maybe_remove_speculative(bool include_speculative) const;
 
   virtual bool maybe_null() const { return true; }
@@ -885,10 +886,11 @@
   virtual ciKlass* speculative_type() const;
   virtual ciKlass* speculative_type_not_null() const;
   virtual bool speculative_maybe_null() const;
+  virtual bool speculative_always_null() const;
   virtual const Type* remove_speculative() const;
   virtual const Type* cleanup_speculative() const;
   virtual bool would_improve_type(ciKlass* exact_kls, int inline_depth) const;
-  virtual bool would_improve_ptr(bool maybe_null) const;
+  virtual bool would_improve_ptr(ProfilePtrKind maybe_null) const;
   virtual const TypePtr* with_inline_depth(int depth) const;
 
   virtual bool maybe_null() const { return meet_ptr(Null) == ptr(); }
--- a/hotspot/src/share/vm/runtime/deoptimization.cpp	Thu Jun 15 09:52:44 2017 +0200
+++ b/hotspot/src/share/vm/runtime/deoptimization.cpp	Fri Jun 09 10:51:52 2017 +0200
@@ -2179,6 +2179,7 @@
   "loop_limit_check",
   "speculate_class_check",
   "speculate_null_check",
+  "speculate_null_assert",
   "rtm_state_change",
   "unstable_if",
   "unstable_fused_if",
--- a/hotspot/src/share/vm/runtime/deoptimization.hpp	Thu Jun 15 09:52:44 2017 +0200
+++ b/hotspot/src/share/vm/runtime/deoptimization.hpp	Fri Jun 09 10:51:52 2017 +0200
@@ -74,6 +74,7 @@
     Reason_loop_limit_check,      // compiler generated loop limits check failed
     Reason_speculate_class_check, // saw unexpected object class from type speculation
     Reason_speculate_null_check,  // saw unexpected null from type speculation
+    Reason_speculate_null_assert, // saw unexpected null from type speculation
     Reason_rtm_state_change,      // rtm state change detected
     Reason_unstable_if,           // a branch predicted always false was taken
     Reason_unstable_fused_if,     // fused two ifs that had each one untaken branch. One is now taken.
@@ -365,6 +366,8 @@
       return Reason_class_check;
     else if (reason == Reason_speculate_null_check)
       return Reason_null_check;
+    else if (reason == Reason_speculate_null_assert)
+      return Reason_null_assert;
     else if (reason == Reason_unstable_if)
       return Reason_intrinsic;
     else if (reason == Reason_unstable_fused_if)
@@ -374,7 +377,9 @@
   }
 
   static bool reason_is_speculate(int reason) {
-    if (reason == Reason_speculate_class_check || reason == Reason_speculate_null_check) {
+    if (reason == Reason_speculate_class_check ||
+        reason == Reason_speculate_null_check ||
+        reason == Reason_speculate_null_assert) {
       return true;
     }
     return false;
@@ -388,6 +393,10 @@
     return speculative ? Deoptimization::Reason_speculate_class_check : Deoptimization::Reason_class_check;
   }
 
+  static DeoptReason reason_null_assert(bool speculative) {
+    return speculative ? Deoptimization::Reason_speculate_null_assert : Deoptimization::Reason_null_assert;
+  }
+
   static uint per_method_trap_limit(int reason) {
     return reason_is_speculate(reason) ? (uint)PerMethodSpecTrapLimit : (uint)PerMethodTrapLimit;
   }
--- a/hotspot/src/share/vm/runtime/vmStructs.cpp	Thu Jun 15 09:52:44 2017 +0200
+++ b/hotspot/src/share/vm/runtime/vmStructs.cpp	Fri Jun 09 10:51:52 2017 +0200
@@ -2656,6 +2656,7 @@
   declare_constant(Deoptimization::Reason_loop_limit_check)               \
   declare_constant(Deoptimization::Reason_speculate_class_check)          \
   declare_constant(Deoptimization::Reason_speculate_null_check)           \
+  declare_constant(Deoptimization::Reason_speculate_null_assert)          \
   declare_constant(Deoptimization::Reason_rtm_state_change)               \
   declare_constant(Deoptimization::Reason_unstable_if)                    \
   declare_constant(Deoptimization::Reason_unstable_fused_if)              \