Merge
authorkvn
Tue, 29 Dec 2015 18:30:51 +0100
changeset 35158 23c52c9c06f5
parent 35156 a06b3d7455d6 (current diff)
parent 35157 1a5fa0acb08b (diff)
child 35159 3ee05e289424
Merge
--- a/hotspot/src/share/vm/opto/callnode.cpp	Tue Dec 29 11:54:21 2015 +0100
+++ b/hotspot/src/share/vm/opto/callnode.cpp	Tue Dec 29 18:30:51 2015 +0100
@@ -1333,6 +1333,7 @@
   init_flags(Flag_is_macro);
   _is_scalar_replaceable = false;
   _is_non_escaping = false;
+  _is_allocation_MemBar_redundant = false;
   Node *topnode = C->top();
 
   init_req( TypeFunc::Control  , ctrl );
@@ -1347,6 +1348,23 @@
   C->add_macro_node(this);
 }
 
+void AllocateNode::compute_MemBar_redundancy(ciMethod* initializer)
+{
+  assert(initializer != NULL &&
+         initializer->is_initializer() &&
+         !initializer->is_static(),
+             "unexpected initializer method");
+  BCEscapeAnalyzer* analyzer = initializer->get_bcea();
+  if (analyzer == NULL) {
+    return;
+  }
+
+  // Allocation node is first parameter in its initializer
+  if (analyzer->is_arg_stack(0) || analyzer->is_arg_local(0)) {
+    _is_allocation_MemBar_redundant = true;
+  }
+}
+
 //=============================================================================
 Node* AllocateArrayNode::Ideal(PhaseGVN *phase, bool can_reshape) {
   if (remove_dead_region(phase, can_reshape))  return this;
--- a/hotspot/src/share/vm/opto/callnode.hpp	Tue Dec 29 11:54:21 2015 +0100
+++ b/hotspot/src/share/vm/opto/callnode.hpp	Tue Dec 29 18:30:51 2015 +0100
@@ -858,6 +858,8 @@
   // Result of Escape Analysis
   bool _is_scalar_replaceable;
   bool _is_non_escaping;
+  // True when MemBar for new is redundant with MemBar at initialzer exit
+  bool _is_allocation_MemBar_redundant;
 
   virtual uint size_of() const; // Size is bigger
   AllocateNode(Compile* C, const TypeFunc *atype, Node *ctrl, Node *mem, Node *abio,
@@ -923,6 +925,13 @@
     InitializeNode* init = NULL;
     return _is_non_escaping || (((init = initialization()) != NULL) && init->does_not_escape());
   }
+
+  // If object doesn't escape in <.init> method and there is memory barrier
+  // inserted at exit of its <.init>, memory barrier for new is not necessary.
+  // Inovke this method when MemBar at exit of initializer and post-dominate
+  // allocation node.
+  void compute_MemBar_redundancy(ciMethod* initializer);
+  bool is_allocation_MemBar_redundant() { return _is_allocation_MemBar_redundant; }
 };
 
 //------------------------------AllocateArray---------------------------------
--- a/hotspot/src/share/vm/opto/macro.cpp	Tue Dec 29 11:54:21 2015 +0100
+++ b/hotspot/src/share/vm/opto/macro.cpp	Tue Dec 29 18:30:51 2015 +0100
@@ -1522,11 +1522,20 @@
 
     // If initialization is performed by an array copy, any required
     // MemBarStoreStore was already added. If the object does not
-    // escape no need for a MemBarStoreStore. Otherwise we need a
-    // MemBarStoreStore so that stores that initialize this object
-    // can't be reordered with a subsequent store that makes this
-    // object accessible by other threads.
+    // escape no need for a MemBarStoreStore. If the object does not
+    // escape in its initializer and memory barrier (MemBarStoreStore or
+    // stronger) is already added at exit of initializer, also no need
+    // for a MemBarStoreStore. Otherwise we need a MemBarStoreStore
+    // so that stores that initialize this object can't be reordered
+    // with a subsequent store that makes this object accessible by
+    // other threads.
+    // Other threads include java threads and JVM internal threads
+    // (for example concurrent GC threads). Current concurrent GC
+    // implementation: CMS and G1 will not scan newly created object,
+    // so it's safe to skip storestore barrier when allocation does
+    // not escape.
     if (!alloc->does_not_escape_thread() &&
+        !alloc->is_allocation_MemBar_redundant() &&
         (init == NULL || !init->is_complete_with_arraycopy())) {
       if (init == NULL || init->req() < InitializeNode::RawStores) {
         // No InitializeNode or no stores captured by zeroing
--- a/hotspot/src/share/vm/opto/parse1.cpp	Tue Dec 29 11:54:21 2015 +0100
+++ b/hotspot/src/share/vm/opto/parse1.cpp	Tue Dec 29 18:30:51 2015 +0100
@@ -962,6 +962,14 @@
            PPC64_ONLY(wrote_volatile() ||)
            (AlwaysSafeConstructors && wrote_fields()))) {
     _exits.insert_mem_bar(Op_MemBarRelease, alloc_with_final());
+
+    // If Memory barrier is created for final fields write
+    // and allocation node does not escape the initialize method,
+    // then barrier introduced by allocation node can be removed.
+    if (DoEscapeAnalysis && alloc_with_final()) {
+      AllocateNode *alloc = AllocateNode::Ideal_allocation(alloc_with_final(), &_gvn);
+      alloc->compute_MemBar_redundancy(method());
+    }
     if (PrintOpto && (Verbose || WizardMode)) {
       method()->print_name();
       tty->print_cr(" writes finals and needs a memory barrier");