src/hotspot/cpu/x86/templateTable_x86.cpp
changeset 48481 b97818fba2b0
parent 47916 bdbef8638948
child 48608 1dab70e20292
child 56021 864ee22719af
--- a/src/hotspot/cpu/x86/templateTable_x86.cpp	Fri Dec 22 18:34:36 2017 +0100
+++ b/src/hotspot/cpu/x86/templateTable_x86.cpp	Mon Dec 18 15:38:20 2017 -0800
@@ -3851,7 +3851,6 @@
   Label done;
   Label initialize_header;
   Label initialize_object;  // including clearing the fields
-  Label allocate_shared;
 
   __ get_cpool_and_tags(rcx, rax);
 
@@ -3877,12 +3876,19 @@
   __ testl(rdx, Klass::_lh_instance_slow_path_bit);
   __ jcc(Assembler::notZero, slow_case);
 
+  // Allocate the instance:
+  //  If TLAB is enabled:
+  //    Try to allocate in the TLAB.
+  //    If fails, go to the slow path.
+  //  Else If inline contiguous allocations are enabled:
+  //    Try to allocate in eden.
+  //    If fails due to heap end, go to slow path.
   //
-  // Allocate the instance
-  // 1) Try to allocate in the TLAB
-  // 2) if fail and the object is large allocate in the shared Eden
-  // 3) if the above fails (or is not applicable), go to a slow case
-  // (creates a new TLAB, etc.)
+  //  If TLAB is enabled OR inline contiguous is enabled:
+  //    Initialize the allocation.
+  //    Exit.
+  //
+  //  Go to slow path.
 
   const bool allow_shared_alloc =
     Universe::heap()->supports_inline_contig_alloc();
@@ -3898,7 +3904,7 @@
     __ movptr(rax, Address(thread, in_bytes(JavaThread::tlab_top_offset())));
     __ lea(rbx, Address(rax, rdx, Address::times_1));
     __ cmpptr(rbx, Address(thread, in_bytes(JavaThread::tlab_end_offset())));
-    __ jcc(Assembler::above, allow_shared_alloc ? allocate_shared : slow_case);
+    __ jcc(Assembler::above, slow_case);
     __ movptr(Address(thread, in_bytes(JavaThread::tlab_top_offset())), rbx);
     if (ZeroTLAB) {
       // the fields have been already cleared
@@ -3907,40 +3913,40 @@
       // initialize both the header and fields
       __ jmp(initialize_object);
     }
+  } else {
+    // Allocation in the shared Eden, if allowed.
+    //
+    // rdx: instance size in bytes
+    if (allow_shared_alloc) {
+      ExternalAddress heap_top((address)Universe::heap()->top_addr());
+      ExternalAddress heap_end((address)Universe::heap()->end_addr());
+
+      Label retry;
+      __ bind(retry);
+      __ movptr(rax, heap_top);
+      __ lea(rbx, Address(rax, rdx, Address::times_1));
+      __ cmpptr(rbx, heap_end);
+      __ jcc(Assembler::above, slow_case);
+
+      // Compare rax, with the top addr, and if still equal, store the new
+      // top addr in rbx, at the address of the top addr pointer. Sets ZF if was
+      // equal, and clears it otherwise. Use lock prefix for atomicity on MPs.
+      //
+      // rax,: object begin
+      // rbx,: object end
+      // rdx: instance size in bytes
+      __ locked_cmpxchgptr(rbx, heap_top);
+
+      // if someone beat us on the allocation, try again, otherwise continue
+      __ jcc(Assembler::notEqual, retry);
+
+      __ incr_allocated_bytes(thread, rdx, 0);
+    }
   }
 
-  // Allocation in the shared Eden, if allowed.
-  //
-  // rdx: instance size in bytes
-  if (allow_shared_alloc) {
-    __ bind(allocate_shared);
-
-    ExternalAddress heap_top((address)Universe::heap()->top_addr());
-    ExternalAddress heap_end((address)Universe::heap()->end_addr());
-
-    Label retry;
-    __ bind(retry);
-    __ movptr(rax, heap_top);
-    __ lea(rbx, Address(rax, rdx, Address::times_1));
-    __ cmpptr(rbx, heap_end);
-    __ jcc(Assembler::above, slow_case);
-
-    // Compare rax, with the top addr, and if still equal, store the new
-    // top addr in rbx, at the address of the top addr pointer. Sets ZF if was
-    // equal, and clears it otherwise. Use lock prefix for atomicity on MPs.
-    //
-    // rax,: object begin
-    // rbx,: object end
-    // rdx: instance size in bytes
-    __ locked_cmpxchgptr(rbx, heap_top);
-
-    // if someone beat us on the allocation, try again, otherwise continue
-    __ jcc(Assembler::notEqual, retry);
-
-    __ incr_allocated_bytes(thread, rdx, 0);
-  }
-
-  if (UseTLAB || Universe::heap()->supports_inline_contig_alloc()) {
+  // If UseTLAB or allow_shared_alloc are true, the object is created above and
+  // there is an initialize need. Otherwise, skip and go to the slow path.
+  if (UseTLAB || allow_shared_alloc) {
     // The object is initialized before the header.  If the object size is
     // zero, go directly to the header initialization.
     __ bind(initialize_object);