http-client-branch: Merge with default http-client-branch
authorchegar
Wed, 02 May 2018 15:47:57 +0100
branchhttp-client-branch
changeset 56507 2294c51eae30
parent 56506 487a640f283c (current diff)
parent 49947 cece972575ac (diff)
child 56513 17cb1166de81
http-client-branch: Merge with default
src/java.net.http/share/classes/jdk/internal/net/http/AsyncSSLConnection.java
src/java.net.http/share/classes/jdk/internal/net/http/AsyncSSLTunnelConnection.java
src/java.net.http/share/classes/jdk/internal/net/http/Http1AsyncReceiver.java
src/java.net.http/share/classes/jdk/internal/net/http/Http1Exchange.java
src/java.net.http/share/classes/jdk/internal/net/http/Http1Request.java
src/java.net.http/share/classes/jdk/internal/net/http/Http1Response.java
src/java.net.http/share/classes/jdk/internal/net/http/Http2ClientImpl.java
src/java.net.http/share/classes/jdk/internal/net/http/Http2Connection.java
src/java.net.http/share/classes/jdk/internal/net/http/HttpClientImpl.java
src/java.net.http/share/classes/jdk/internal/net/http/PlainHttpConnection.java
src/java.net.http/share/classes/jdk/internal/net/http/SocketTube.java
src/java.net.http/share/classes/jdk/internal/net/http/Stream.java
src/java.net.http/share/classes/jdk/internal/net/http/common/DebugLogger.java
src/java.net.http/share/classes/jdk/internal/net/http/common/FlowTube.java
src/java.net.http/share/classes/jdk/internal/net/http/common/SSLFlowDelegate.java
src/java.net.http/share/classes/jdk/internal/net/http/common/SSLTube.java
src/java.net.http/share/classes/jdk/internal/net/http/common/SubscriberWrapper.java
src/java.net.http/share/classes/jdk/internal/net/http/common/Utils.java
src/java.net.http/share/classes/jdk/internal/net/http/hpack/Huffman.java
src/java.net.http/share/classes/jdk/internal/net/http/hpack/SimpleHeaderTable.java
src/java.net.http/share/classes/jdk/internal/net/http/hpack/StringReader.java
src/java.net.http/share/classes/jdk/internal/net/http/hpack/StringWriter.java
test/jdk/ProblemList.txt
test/jdk/java/net/httpclient/BodyProcessorInputStreamTest.java
test/jdk/java/net/httpclient/DependentPromiseActionsTest.java
test/jdk/java/net/httpclient/EscapedOctetsInURI.java
test/jdk/java/net/httpclient/HttpInputStreamTest.java
test/jdk/java/net/httpclient/ProxyServer.java
test/jdk/java/net/httpclient/RetryWithCookie.java
test/jdk/java/net/httpclient/SmallTimeout.java
test/jdk/java/net/httpclient/TimeoutOrdering.java
test/jdk/java/net/httpclient/http2/java.net.http/jdk/internal/net/http/hpack/CircularBufferTest.java
test/jdk/java/net/httpclient/http2/java.net.http/jdk/internal/net/http/hpack/DecoderTest.java
test/jdk/java/net/httpclient/http2/java.net.http/jdk/internal/net/http/hpack/HuffmanTest.java
test/jdk/java/net/httpclient/http2/server/Http2TestServer.java
test/jdk/java/net/httpclient/websocket/DummyWebSocketServer.java
test/jdk/java/net/httpclient/websocket/PendingBinaryPingClose.java
test/jdk/java/net/httpclient/websocket/PendingBinaryPongClose.java
test/jdk/java/net/httpclient/websocket/PendingPingBinaryClose.java
test/jdk/java/net/httpclient/websocket/PendingPingTextClose.java
test/jdk/java/net/httpclient/websocket/PendingPongBinaryClose.java
test/jdk/java/net/httpclient/websocket/PendingPongTextClose.java
test/jdk/java/net/httpclient/websocket/PendingTextPingClose.java
test/jdk/java/net/httpclient/websocket/PendingTextPongClose.java
test/jdk/java/net/httpclient/websocket/SendTest.java
test/jdk/java/net/httpclient/websocket/WSHandshakeExceptionTest.java
test/jdk/java/net/httpclient/websocket/WebSocketExtendedTest.java
test/jdk/java/net/httpclient/websocket/WebSocketTest.java
--- a/src/hotspot/cpu/arm/c1_LIRGenerator_arm.cpp	Wed May 02 10:35:33 2018 +0100
+++ b/src/hotspot/cpu/arm/c1_LIRGenerator_arm.cpp	Wed May 02 15:47:57 2018 +0100
@@ -375,32 +375,17 @@
 }
 
 
-LIR_Address* LIRGenerator::emit_array_address(LIR_Opr array_opr, LIR_Opr index_opr,
-                                              BasicType type, bool needs_card_mark) {
+LIR_Address* LIRGenerator::emit_array_address(LIR_Opr array_opr, LIR_Opr index_opr, BasicType type) {
   int base_offset = arrayOopDesc::base_offset_in_bytes(type);
   int elem_size = type2aelembytes(type);
 
   if (index_opr->is_constant()) {
     int offset = base_offset + index_opr->as_constant_ptr()->as_jint() * elem_size;
-    if (needs_card_mark) {
-      LIR_Opr base_opr = new_pointer_register();
-      add_large_constant(array_opr, offset, base_opr);
-      return new LIR_Address(base_opr, (intx)0, type);
-    } else {
-      return generate_address(array_opr, offset, type);
-    }
+    return generate_address(array_opr, offset, type);
   } else {
     assert(index_opr->is_register(), "must be");
     int scale = exact_log2(elem_size);
-    if (needs_card_mark) {
-      LIR_Opr base_opr = new_pointer_register();
-      LIR_Address* addr = make_address(base_opr, index_opr, (LIR_Address::Scale)scale, type);
-      __ add(array_opr, LIR_OprFact::intptrConst(base_offset), base_opr);
-      __ add(base_opr, LIR_OprFact::address(addr), base_opr); // add with shifted/extended register
-      return new LIR_Address(base_opr, type);
-    } else {
-      return generate_address(array_opr, index_opr, scale, base_offset, type);
-    }
+    return generate_address(array_opr, index_opr, scale, base_offset, type);
   }
 }
 
@@ -1024,7 +1009,7 @@
   value.load_item();
   assert(type == T_INT || is_oop LP64_ONLY( || type == T_LONG ), "unexpected type");
   LIR_Opr tmp = (UseCompressedOops && is_oop) ? new_pointer_register() : LIR_OprFact::illegalOpr;
-  __ xchg(addr_ptr, data, dst, tmp);
+  __ xchg(addr, value.result(), result, tmp);
   return result;
 }
 
--- a/src/hotspot/cpu/arm/c1_Runtime1_arm.cpp	Wed May 02 10:35:33 2018 +0100
+++ b/src/hotspot/cpu/arm/c1_Runtime1_arm.cpp	Wed May 02 15:47:57 2018 +0100
@@ -352,11 +352,11 @@
 }
 
 void StubAssembler::save_live_registers() {
-  save_live_registers(this);
+  ::save_live_registers(this);
 }
 
 void StubAssembler::restore_live_registers_without_return() {
-  restore_live_registers_without_return(this);
+  ::restore_live_registers_without_return(this);
 }
 
 void Runtime1::initialize_pd() {
--- a/src/hotspot/cpu/arm/gc/g1/g1BarrierSetAssembler_arm.cpp	Wed May 02 10:35:33 2018 +0100
+++ b/src/hotspot/cpu/arm/gc/g1/g1BarrierSetAssembler_arm.cpp	Wed May 02 15:47:57 2018 +0100
@@ -26,6 +26,7 @@
 #include "asm/macroAssembler.inline.hpp"
 #include "gc/g1/g1BarrierSet.hpp"
 #include "gc/g1/g1BarrierSetAssembler.hpp"
+#include "gc/g1/g1ThreadLocalData.hpp"
 #include "gc/g1/g1CardTable.hpp"
 #include "gc/g1/heapRegion.hpp"
 #include "interpreter/interp_masm.hpp"
@@ -175,15 +176,7 @@
   // Input:
   // - pre_val pushed on the stack
 
-  __ set_info("g1_pre_barrier_slow_id", dont_gc_arguments);
-
-  BarrierSet* bs = BarrierSet::barrier_set();
-  if (bs->kind() != BarrierSet::G1BarrierSet) {
-    __ mov(R0, (int)id);
-    __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, unimplemented_entry), R0);
-    __ should_not_reach_here();
-    break;
-  }
+  __ set_info("g1_pre_barrier_slow_id", false);
 
   // save at least the registers that need saving if the runtime is called
 #ifdef AARCH64
@@ -251,15 +244,7 @@
   // Input:
   // - store_addr, pushed on the stack
 
-  __ set_info("g1_post_barrier_slow_id", dont_gc_arguments);
-
-  BarrierSet* bs = BarrierSet::barrier_set();
-  if (bs->kind() != BarrierSet::G1BarrierSet) {
-    __ mov(R0, (int)id);
-    __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, unimplemented_entry), R0);
-    __ should_not_reach_here();
-    break;
-  }
+  __ set_info("g1_post_barrier_slow_id", false);
 
   Label done;
   Label recheck;
--- a/src/hotspot/cpu/arm/gc/g1/g1BarrierSetAssembler_arm.hpp	Wed May 02 10:35:33 2018 +0100
+++ b/src/hotspot/cpu/arm/gc/g1/g1BarrierSetAssembler_arm.hpp	Wed May 02 15:47:57 2018 +0100
@@ -42,6 +42,7 @@
                                         Register addr, Register count, Register tmp);
 
 #ifdef COMPILER1
+public:
   void gen_pre_barrier_stub(LIR_Assembler* ce, G1PreBarrierStub* stub);
   void gen_post_barrier_stub(LIR_Assembler* ce, G1PostBarrierStub* stub);
 
--- a/src/hotspot/cpu/s390/s390.ad	Wed May 02 10:35:33 2018 +0100
+++ b/src/hotspot/cpu/s390/s390.ad	Wed May 02 15:47:57 2018 +0100
@@ -8401,16 +8401,6 @@
   ins_pipe(pipe_class_dummy);
 %}
 
-instruct compU_reg_imm0(flagsReg cr, iRegI op1, immI_0 zero) %{
-  match(Set cr (CmpU op1 zero));
-  ins_cost(DEFAULT_COST_LOW);
-  size(2);
-  format %{ "LTR     $op1,$op1\t # unsigned" %}
-  opcode(LTR_ZOPC);
-  ins_encode(z_rrform(op1, op1));
-  ins_pipe(pipe_class_dummy);
-%}
-
 instruct compU_reg_mem(flagsReg cr, iRegI op1, memory op2)%{
   match(Set cr (CmpU op1 (LoadI op2)));
   ins_cost(MEMORY_REF_COST);
--- a/src/hotspot/share/c1/c1_LIRGenerator.hpp	Wed May 02 10:35:33 2018 +0100
+++ b/src/hotspot/share/c1/c1_LIRGenerator.hpp	Wed May 02 15:47:57 2018 +0100
@@ -302,6 +302,10 @@
   LIR_Opr atomic_xchg(BasicType type, LIR_Opr addr, LIRItem& new_value);
   LIR_Opr atomic_add(BasicType type, LIR_Opr addr, LIRItem& new_value);
 
+#ifdef CARDTABLEBARRIERSET_POST_BARRIER_HELPER
+  virtual void CardTableBarrierSet_post_barrier_helper(LIR_OprDesc* addr, LIR_Const* card_table_base);
+#endif
+
   // specific implementations
   void array_store_check(LIR_Opr value, LIR_Opr array, CodeEmitInfo* store_check_info, ciMethod* profiled_method, int profiled_bci);
 
--- a/src/hotspot/share/gc/g1/g1AllocRegion.cpp	Wed May 02 10:35:33 2018 +0100
+++ b/src/hotspot/share/gc/g1/g1AllocRegion.cpp	Wed May 02 15:47:57 2018 +0100
@@ -95,33 +95,40 @@
   return result;
 }
 
+size_t G1AllocRegion::retire_internal(HeapRegion* alloc_region, bool fill_up) {
+  // We never have to check whether the active region is empty or not,
+  // and potentially free it if it is, given that it's guaranteed that
+  // it will never be empty.
+  size_t waste = 0;
+  assert_alloc_region(!alloc_region->is_empty(),
+      "the alloc region should never be empty");
+
+  if (fill_up) {
+    waste = fill_up_remaining_space(alloc_region);
+  }
+
+  assert_alloc_region(alloc_region->used() >= _used_bytes_before, "invariant");
+  size_t allocated_bytes = alloc_region->used() - _used_bytes_before;
+  retire_region(alloc_region, allocated_bytes);
+  _used_bytes_before = 0;
+
+  return waste;
+}
+
 size_t G1AllocRegion::retire(bool fill_up) {
   assert_alloc_region(_alloc_region != NULL, "not initialized properly");
 
-  size_t result = 0;
+  size_t waste = 0;
 
   trace("retiring");
   HeapRegion* alloc_region = _alloc_region;
   if (alloc_region != _dummy_region) {
-    // We never have to check whether the active region is empty or not,
-    // and potentially free it if it is, given that it's guaranteed that
-    // it will never be empty.
-    assert_alloc_region(!alloc_region->is_empty(),
-                           "the alloc region should never be empty");
-
-    if (fill_up) {
-      result = fill_up_remaining_space(alloc_region);
-    }
-
-    assert_alloc_region(alloc_region->used() >= _used_bytes_before, "invariant");
-    size_t allocated_bytes = alloc_region->used() - _used_bytes_before;
-    retire_region(alloc_region, allocated_bytes);
-    _used_bytes_before = 0;
-    _alloc_region = _dummy_region;
+    waste = retire_internal(alloc_region, fill_up);
+    reset_alloc_region();
   }
   trace("retired");
 
-  return result;
+  return waste;
 }
 
 HeapWord* G1AllocRegion::new_alloc_region_and_allocate(size_t word_size,
@@ -245,7 +252,8 @@
 G1AllocRegion::G1AllocRegion(const char* name,
                              bool bot_updates)
   : _name(name), _bot_updates(bot_updates),
-    _alloc_region(NULL), _count(0), _used_bytes_before(0) { }
+    _alloc_region(NULL), _count(0),
+    _used_bytes_before(0) { }
 
 
 HeapRegion* MutatorAllocRegion::allocate_new_region(size_t word_size,
@@ -258,6 +266,82 @@
   _g1h->retire_mutator_alloc_region(alloc_region, allocated_bytes);
 }
 
+void MutatorAllocRegion::init() {
+  assert(_retained_alloc_region == NULL, "Pre-condition");
+  G1AllocRegion::init();
+  _wasted_bytes = 0;
+}
+
+bool MutatorAllocRegion::should_retain(HeapRegion* region) {
+  size_t free_bytes = region->free();
+  if (free_bytes < MinTLABSize) {
+    return false;
+  }
+
+  if (_retained_alloc_region != NULL &&
+      free_bytes < _retained_alloc_region->free()) {
+    return false;
+  }
+
+  return true;
+}
+
+size_t MutatorAllocRegion::retire(bool fill_up) {
+  size_t waste = 0;
+  trace("retiring");
+  HeapRegion* current_region = get();
+  if (current_region != NULL) {
+    // Retain the current region if it fits a TLAB and has more
+    // free than the currently retained region.
+    if (should_retain(current_region)) {
+      trace("mutator retained");
+      if (_retained_alloc_region != NULL) {
+        waste = retire_internal(_retained_alloc_region, true);
+      }
+      _retained_alloc_region = current_region;
+    } else {
+      waste = retire_internal(current_region, fill_up);
+    }
+    reset_alloc_region();
+  }
+
+  _wasted_bytes += waste;
+  trace("retired");
+  return waste;
+}
+
+size_t MutatorAllocRegion::used_in_alloc_regions() {
+  size_t used = 0;
+  HeapRegion* hr = get();
+  if (hr != NULL) {
+    used += hr->used();
+  }
+
+  hr = _retained_alloc_region;
+  if (hr != NULL) {
+    used += hr->used();
+  }
+  return used;
+}
+
+HeapRegion* MutatorAllocRegion::release() {
+  HeapRegion* ret = G1AllocRegion::release();
+
+  // The retained alloc region must be retired and this must be
+  // done after the above call to release the mutator alloc region,
+  // since it might update the _retained_alloc_region member.
+  if (_retained_alloc_region != NULL) {
+    _wasted_bytes += retire_internal(_retained_alloc_region, false);
+    _retained_alloc_region = NULL;
+  }
+  log_debug(gc, alloc, region)("Mutator Allocation stats, regions: %u, wasted size: " SIZE_FORMAT "%s (%4.1f%%)",
+                               count(),
+                               byte_size_in_proper_unit(_wasted_bytes),
+                               proper_unit_for_byte_size(_wasted_bytes),
+                               percent_of(_wasted_bytes, count() * HeapRegion::GrainBytes));
+  return ret;
+}
+
 HeapRegion* G1GCAllocRegion::allocate_new_region(size_t word_size,
                                                  bool force) {
   assert(!force, "not supported for GC alloc regions");
--- a/src/hotspot/share/gc/g1/g1AllocRegion.hpp	Wed May 02 10:35:33 2018 +0100
+++ b/src/hotspot/share/gc/g1/g1AllocRegion.hpp	Wed May 02 15:47:57 2018 +0100
@@ -80,6 +80,20 @@
   // whether the _alloc_region is NULL or not.
   static HeapRegion* _dummy_region;
 
+  // After a region is allocated by alloc_new_region, this
+  // method is used to set it as the active alloc_region
+  void update_alloc_region(HeapRegion* alloc_region);
+
+  // Allocate a new active region and use it to perform a word_size
+  // allocation. The force parameter will be passed on to
+  // G1CollectedHeap::allocate_new_alloc_region() and tells it to try
+  // to allocate a new region even if the max has been reached.
+  HeapWord* new_alloc_region_and_allocate(size_t word_size, bool force);
+
+protected:
+  // Reset the alloc region to point a the dummy region.
+  void reset_alloc_region();
+
   // Perform a non-MT-safe allocation out of the given region.
   inline HeapWord* allocate(HeapRegion* alloc_region,
                             size_t word_size);
@@ -102,23 +116,14 @@
   // the space.
   size_t fill_up_remaining_space(HeapRegion* alloc_region);
 
-  // After a region is allocated by alloc_new_region, this
-  // method is used to set it as the active alloc_region
-  void update_alloc_region(HeapRegion* alloc_region);
-
-  // Allocate a new active region and use it to perform a word_size
-  // allocation. The force parameter will be passed on to
-  // G1CollectedHeap::allocate_new_alloc_region() and tells it to try
-  // to allocate a new region even if the max has been reached.
-  HeapWord* new_alloc_region_and_allocate(size_t word_size, bool force);
-
-protected:
   // Retire the active allocating region. If fill_up is true then make
   // sure that the region is full before we retire it so that no one
   // else can allocate out of it.
   // Returns the number of bytes that have been filled up during retire.
   virtual size_t retire(bool fill_up);
 
+  size_t retire_internal(HeapRegion* alloc_region, bool fill_up);
+
   // For convenience as subclasses use it.
   static G1CollectedHeap* _g1h;
 
@@ -177,7 +182,7 @@
   inline HeapWord* attempt_allocation_force(size_t word_size);
 
   // Should be called before we start using this object.
-  void init();
+  virtual void init();
 
   // This can be used to set the active region to a specific
   // region. (Use Example: we try to retain the last old GC alloc
@@ -197,14 +202,49 @@
 };
 
 class MutatorAllocRegion : public G1AllocRegion {
+private:
+  // Keeps track of the total waste generated during the current
+  // mutator phase.
+  size_t _wasted_bytes;
+
+  // Retained allocation region. Used to lower the waste generated
+  // during mutation by having two active regions if the free space
+  // in a region about to be retired still could fit a TLAB.
+  HeapRegion* volatile _retained_alloc_region;
+
+  // Decide if the region should be retained, based on the free size
+  // in it and the free size in the currently retained region, if any.
+  bool should_retain(HeapRegion* region);
 protected:
   virtual HeapRegion* allocate_new_region(size_t word_size, bool force);
   virtual void retire_region(HeapRegion* alloc_region, size_t allocated_bytes);
+  virtual size_t retire(bool fill_up);
 public:
   MutatorAllocRegion()
-    : G1AllocRegion("Mutator Alloc Region", false /* bot_updates */) { }
+    : G1AllocRegion("Mutator Alloc Region", false /* bot_updates */),
+      _wasted_bytes(0),
+      _retained_alloc_region(NULL) { }
+
+  // Returns the combined used memory in the current alloc region and
+  // the retained alloc region.
+  size_t used_in_alloc_regions();
+
+  // Perform an allocation out of the retained allocation region, with the given
+  // minimum and desired size. Returns the actual size allocated (between
+  // minimum and desired size) in actual_word_size if the allocation has been
+  // successful.
+  // Should be called without holding a lock. It will try to allocate lock-free
+  // out of the retained region, or return NULL if it was unable to.
+  inline HeapWord* attempt_retained_allocation(size_t min_word_size,
+                                               size_t desired_word_size,
+                                               size_t* actual_word_size);
+
+  // This specialization of release() makes sure that the retained alloc
+  // region is retired and set to NULL.
+  virtual HeapRegion* release();
+
+  virtual void init();
 };
-
 // Common base class for allocation regions used during GC.
 class G1GCAllocRegion : public G1AllocRegion {
 protected:
--- a/src/hotspot/share/gc/g1/g1AllocRegion.inline.hpp	Wed May 02 10:35:33 2018 +0100
+++ b/src/hotspot/share/gc/g1/g1AllocRegion.inline.hpp	Wed May 02 15:47:57 2018 +0100
@@ -36,6 +36,10 @@
   } while (0)
 
 
+inline void G1AllocRegion::reset_alloc_region() {
+  _alloc_region = _dummy_region;
+}
+
 inline HeapWord* G1AllocRegion::allocate(HeapRegion* alloc_region,
                                          size_t word_size) {
   assert(alloc_region != NULL, "pre-condition");
@@ -126,4 +130,17 @@
   return NULL;
 }
 
+inline HeapWord* MutatorAllocRegion::attempt_retained_allocation(size_t min_word_size,
+                                                                 size_t desired_word_size,
+                                                                 size_t* actual_word_size) {
+  if (_retained_alloc_region != NULL) {
+    HeapWord* result = par_allocate(_retained_alloc_region, min_word_size, desired_word_size, actual_word_size);
+    if (result != NULL) {
+      trace("alloc retained", min_word_size, desired_word_size, *actual_word_size, result);
+      return result;
+    }
+  }
+  return NULL;
+}
+
 #endif // SHARE_VM_GC_G1_G1ALLOCREGION_INLINE_HPP
--- a/src/hotspot/share/gc/g1/g1Allocator.cpp	Wed May 02 10:35:33 2018 +0100
+++ b/src/hotspot/share/gc/g1/g1Allocator.cpp	Wed May 02 15:47:57 2018 +0100
@@ -157,14 +157,7 @@
 
 size_t G1Allocator::used_in_alloc_regions() {
   assert(Heap_lock->owner() != NULL, "Should be owned on this thread's behalf.");
-  size_t result = 0;
-
-  // Read only once in case it is set to NULL concurrently
-  HeapRegion* hr = mutator_alloc_region()->get();
-  if (hr != NULL) {
-    result += hr->used();
-  }
-  return result;
+  return mutator_alloc_region()->used_in_alloc_regions();
 }
 
 
--- a/src/hotspot/share/gc/g1/g1Allocator.hpp	Wed May 02 10:35:33 2018 +0100
+++ b/src/hotspot/share/gc/g1/g1Allocator.hpp	Wed May 02 15:47:57 2018 +0100
@@ -99,7 +99,9 @@
 
   // Allocate blocks of memory during mutator time.
 
-  inline HeapWord* attempt_allocation(size_t word_size);
+  inline HeapWord* attempt_allocation(size_t min_word_size,
+                                      size_t desired_word_size,
+                                      size_t* actual_word_size);
   inline HeapWord* attempt_allocation_locked(size_t word_size);
   inline HeapWord* attempt_allocation_force(size_t word_size);
 
--- a/src/hotspot/share/gc/g1/g1Allocator.inline.hpp	Wed May 02 10:35:33 2018 +0100
+++ b/src/hotspot/share/gc/g1/g1Allocator.inline.hpp	Wed May 02 15:47:57 2018 +0100
@@ -41,8 +41,14 @@
   return &_old_gc_alloc_region;
 }
 
-inline HeapWord* G1Allocator::attempt_allocation(size_t word_size) {
-  return mutator_alloc_region()->attempt_allocation(word_size);
+inline HeapWord* G1Allocator::attempt_allocation(size_t min_word_size,
+                                                 size_t desired_word_size,
+                                                 size_t* actual_word_size) {
+  HeapWord* result = mutator_alloc_region()->attempt_retained_allocation(min_word_size, desired_word_size, actual_word_size);
+  if (result != NULL) {
+    return result;
+  }
+  return mutator_alloc_region()->attempt_allocation(min_word_size, desired_word_size, actual_word_size);
 }
 
 inline HeapWord* G1Allocator::attempt_allocation_locked(size_t word_size) {
--- a/src/hotspot/share/gc/g1/g1CollectedHeap.cpp	Wed May 02 10:35:33 2018 +0100
+++ b/src/hotspot/share/gc/g1/g1CollectedHeap.cpp	Wed May 02 15:47:57 2018 +0100
@@ -384,11 +384,13 @@
   return result;
 }
 
-HeapWord* G1CollectedHeap::allocate_new_tlab(size_t word_size) {
+HeapWord* G1CollectedHeap::allocate_new_tlab(size_t min_size,
+                                             size_t requested_size,
+                                             size_t* actual_size) {
   assert_heap_not_locked_and_not_at_safepoint();
-  assert(!is_humongous(word_size), "we do not allow humongous TLABs");
-
-  return attempt_allocation(word_size);
+  assert(!is_humongous(requested_size), "we do not allow humongous TLABs");
+
+  return attempt_allocation(min_size, requested_size, actual_size);
 }
 
 HeapWord*
@@ -399,7 +401,8 @@
   if (is_humongous(word_size)) {
     return attempt_allocation_humongous(word_size);
   }
-  return attempt_allocation(word_size);
+  size_t dummy = 0;
+  return attempt_allocation(word_size, word_size, &dummy);
 }
 
 HeapWord* G1CollectedHeap::attempt_allocation_slow(size_t word_size) {
@@ -492,8 +495,8 @@
     // first attempt (without holding the Heap_lock) here and the
     // follow-on attempt will be at the start of the next loop
     // iteration (after taking the Heap_lock).
-
-    result = _allocator->attempt_allocation(word_size);
+    size_t dummy = 0;
+    result = _allocator->attempt_allocation(word_size, word_size, &dummy);
     if (result != NULL) {
       return result;
     }
@@ -722,20 +725,28 @@
   }
 }
 
-inline HeapWord* G1CollectedHeap::attempt_allocation(size_t word_size) {
+inline HeapWord* G1CollectedHeap::attempt_allocation(size_t min_word_size,
+                                                     size_t desired_word_size,
+                                                     size_t* actual_word_size) {
   assert_heap_not_locked_and_not_at_safepoint();
-  assert(!is_humongous(word_size), "attempt_allocation() should not "
+  assert(!is_humongous(desired_word_size), "attempt_allocation() should not "
          "be called for humongous allocation requests");
 
-  HeapWord* result = _allocator->attempt_allocation(word_size);
+  HeapWord* result = _allocator->attempt_allocation(min_word_size, desired_word_size, actual_word_size);
 
   if (result == NULL) {
-    result = attempt_allocation_slow(word_size);
+    *actual_word_size = desired_word_size;
+    result = attempt_allocation_slow(desired_word_size);
   }
+
   assert_heap_not_locked();
   if (result != NULL) {
-    dirty_young_block(result, word_size);
+    assert(*actual_word_size != 0, "Actual size must have been set here");
+    dirty_young_block(result, *actual_word_size);
+  } else {
+    *actual_word_size = 0;
   }
+
   return result;
 }
 
--- a/src/hotspot/share/gc/g1/g1CollectedHeap.hpp	Wed May 02 10:35:33 2018 +0100
+++ b/src/hotspot/share/gc/g1/g1CollectedHeap.hpp	Wed May 02 15:47:57 2018 +0100
@@ -420,7 +420,9 @@
   //   humongous allocation requests should go to mem_allocate() which
   //   will satisfy them with a special path.
 
-  virtual HeapWord* allocate_new_tlab(size_t word_size);
+  virtual HeapWord* allocate_new_tlab(size_t min_size,
+                                      size_t requested_size,
+                                      size_t* actual_size);
 
   virtual HeapWord* mem_allocate(size_t word_size,
                                  bool*  gc_overhead_limit_was_exceeded);
@@ -428,7 +430,9 @@
   // First-level mutator allocation attempt: try to allocate out of
   // the mutator alloc region without taking the Heap_lock. This
   // should only be used for non-humongous allocations.
-  inline HeapWord* attempt_allocation(size_t word_size);
+  inline HeapWord* attempt_allocation(size_t min_word_size,
+                                      size_t desired_word_size,
+                                      size_t* actual_word_size);
 
   // Second-level mutator allocation attempt: take the Heap_lock and
   // retry the allocation attempt, potentially scheduling a GC
--- a/src/hotspot/share/gc/parallel/parallelScavengeHeap.cpp	Wed May 02 10:35:33 2018 +0100
+++ b/src/hotspot/share/gc/parallel/parallelScavengeHeap.cpp	Wed May 02 15:47:57 2018 +0100
@@ -478,8 +478,13 @@
   return young_gen()->eden_space()->unsafe_max_tlab_alloc(thr);
 }
 
-HeapWord* ParallelScavengeHeap::allocate_new_tlab(size_t size) {
-  return young_gen()->allocate(size);
+HeapWord* ParallelScavengeHeap::allocate_new_tlab(size_t min_size, size_t requested_size, size_t* actual_size) {
+  HeapWord* result = young_gen()->allocate(requested_size);
+  if (result != NULL) {
+    *actual_size = requested_size;
+  }
+
+  return result;
 }
 
 void ParallelScavengeHeap::accumulate_statistics_all_tlabs() {
--- a/src/hotspot/share/gc/parallel/parallelScavengeHeap.hpp	Wed May 02 10:35:33 2018 +0100
+++ b/src/hotspot/share/gc/parallel/parallelScavengeHeap.hpp	Wed May 02 15:47:57 2018 +0100
@@ -85,7 +85,7 @@
 
  protected:
   static inline size_t total_invocations();
-  HeapWord* allocate_new_tlab(size_t size);
+  HeapWord* allocate_new_tlab(size_t min_size, size_t requested_size, size_t* actual_size);
 
   inline bool should_alloc_in_eden(size_t size) const;
   inline void death_march_check(HeapWord* const result, size_t size);
--- a/src/hotspot/share/gc/shared/collectedHeap.cpp	Wed May 02 10:35:33 2018 +0100
+++ b/src/hotspot/share/gc/shared/collectedHeap.cpp	Wed May 02 15:47:57 2018 +0100
@@ -384,17 +384,24 @@
     return NULL;
   }
 
-  // Allocate a new TLAB...
-  HeapWord* obj = Universe::heap()->allocate_new_tlab(new_tlab_size);
+  // Allocate a new TLAB requesting new_tlab_size. Any size
+  // between minimal and new_tlab_size is accepted.
+  size_t actual_tlab_size = 0;
+  size_t min_tlab_size = ThreadLocalAllocBuffer::compute_min_size(size);
+  HeapWord* obj = Universe::heap()->allocate_new_tlab(min_tlab_size, new_tlab_size, &actual_tlab_size);
   if (obj == NULL) {
+    assert(actual_tlab_size == 0, "Allocation failed, but actual size was updated. min: " SIZE_FORMAT ", desired: " SIZE_FORMAT ", actual: " SIZE_FORMAT,
+           min_tlab_size, new_tlab_size, actual_tlab_size);
     return NULL;
   }
+  assert(actual_tlab_size != 0, "Allocation succeeded but actual size not updated. obj at: " PTR_FORMAT " min: " SIZE_FORMAT ", desired: " SIZE_FORMAT,
+         p2i(obj), min_tlab_size, new_tlab_size);
 
-  AllocTracer::send_allocation_in_new_tlab(klass, obj, new_tlab_size * HeapWordSize, size * HeapWordSize, thread);
+  AllocTracer::send_allocation_in_new_tlab(klass, obj, actual_tlab_size * HeapWordSize, size * HeapWordSize, thread);
 
   if (ZeroTLAB) {
     // ..and clear it.
-    Copy::zero_to_words(obj, new_tlab_size);
+    Copy::zero_to_words(obj, actual_tlab_size);
   } else {
     // ...and zap just allocated object.
 #ifdef ASSERT
@@ -402,10 +409,10 @@
     // ensure that the returned space is not considered parsable by
     // any concurrent GC thread.
     size_t hdr_size = oopDesc::header_size();
-    Copy::fill_to_words(obj + hdr_size, new_tlab_size - hdr_size, badHeapWordVal);
+    Copy::fill_to_words(obj + hdr_size, actual_tlab_size - hdr_size, badHeapWordVal);
 #endif // ASSERT
   }
-  thread->tlab().fill(obj, obj + size, new_tlab_size);
+  thread->tlab().fill(obj, obj + size, actual_tlab_size);
   return obj;
 }
 
@@ -506,7 +513,9 @@
   fill_with_object_impl(start, words, zap);
 }
 
-HeapWord* CollectedHeap::allocate_new_tlab(size_t size) {
+HeapWord* CollectedHeap::allocate_new_tlab(size_t min_size,
+                                           size_t requested_size,
+                                           size_t* actual_size) {
   guarantee(false, "thread-local allocation buffers not supported");
   return NULL;
 }
--- a/src/hotspot/share/gc/shared/collectedHeap.hpp	Wed May 02 10:35:33 2018 +0100
+++ b/src/hotspot/share/gc/shared/collectedHeap.hpp	Wed May 02 15:47:57 2018 +0100
@@ -126,7 +126,13 @@
   CollectedHeap();
 
   // Create a new tlab. All TLAB allocations must go through this.
-  virtual HeapWord* allocate_new_tlab(size_t size);
+  // To allow more flexible TLAB allocations min_size specifies
+  // the minimum size needed, while requested_size is the requested
+  // size based on ergonomics. The actually allocated size will be
+  // returned in actual_size.
+  virtual HeapWord* allocate_new_tlab(size_t min_size,
+                                      size_t requested_size,
+                                      size_t* actual_size);
 
   // Accumulate statistics on all tlabs.
   virtual void accumulate_statistics_all_tlabs();
--- a/src/hotspot/share/gc/shared/genCollectedHeap.cpp	Wed May 02 10:35:33 2018 +0100
+++ b/src/hotspot/share/gc/shared/genCollectedHeap.cpp	Wed May 02 15:47:57 2018 +0100
@@ -1146,11 +1146,18 @@
   return 0;
 }
 
-HeapWord* GenCollectedHeap::allocate_new_tlab(size_t size) {
+HeapWord* GenCollectedHeap::allocate_new_tlab(size_t min_size,
+                                              size_t requested_size,
+                                              size_t* actual_size) {
   bool gc_overhead_limit_was_exceeded;
-  return mem_allocate_work(size /* size */,
-                           true /* is_tlab */,
-                           &gc_overhead_limit_was_exceeded);
+  HeapWord* result = mem_allocate_work(requested_size /* size */,
+                                       true /* is_tlab */,
+                                       &gc_overhead_limit_was_exceeded);
+  if (result != NULL) {
+    *actual_size = requested_size;
+  }
+
+  return result;
 }
 
 // Requires "*prev_ptr" to be non-NULL.  Deletes and a block of minimal size
--- a/src/hotspot/share/gc/shared/genCollectedHeap.hpp	Wed May 02 10:35:33 2018 +0100
+++ b/src/hotspot/share/gc/shared/genCollectedHeap.hpp	Wed May 02 15:47:57 2018 +0100
@@ -297,7 +297,9 @@
   virtual size_t tlab_capacity(Thread* thr) const;
   virtual size_t tlab_used(Thread* thr) const;
   virtual size_t unsafe_max_tlab_alloc(Thread* thr) const;
-  virtual HeapWord* allocate_new_tlab(size_t size);
+  virtual HeapWord* allocate_new_tlab(size_t min_size,
+                                      size_t requested_size,
+                                      size_t* actual_size);
 
   // The "requestor" generation is performing some garbage collection
   // action for which it would be useful to have scratch space.  The
--- a/src/hotspot/share/gc/shared/threadLocalAllocBuffer.cpp	Wed May 02 10:35:33 2018 +0100
+++ b/src/hotspot/share/gc/shared/threadLocalAllocBuffer.cpp	Wed May 02 15:47:57 2018 +0100
@@ -89,7 +89,7 @@
     }
     global_stats()->update_allocating_threads();
     global_stats()->update_number_of_refills(_number_of_refills);
-    global_stats()->update_allocation(_number_of_refills * desired_size());
+    global_stats()->update_allocation(_allocated_size);
     global_stats()->update_gc_waste(_gc_waste);
     global_stats()->update_slow_refill_waste(_slow_refill_waste);
     global_stats()->update_fast_refill_waste(_fast_refill_waste);
@@ -157,17 +157,19 @@
 }
 
 void ThreadLocalAllocBuffer::initialize_statistics() {
-    _number_of_refills = 0;
-    _fast_refill_waste = 0;
-    _slow_refill_waste = 0;
-    _gc_waste          = 0;
-    _slow_allocations  = 0;
+  _number_of_refills = 0;
+  _fast_refill_waste = 0;
+  _slow_refill_waste = 0;
+  _gc_waste          = 0;
+  _slow_allocations  = 0;
+  _allocated_size    = 0;
 }
 
 void ThreadLocalAllocBuffer::fill(HeapWord* start,
                                   HeapWord* top,
                                   size_t    new_size) {
   _number_of_refills++;
+  _allocated_size += new_size;
   print_stats("fill");
   assert(top <= start + new_size - alignment_reserve(), "size too small");
   initialize(start, top, start + new_size - alignment_reserve());
@@ -274,8 +276,7 @@
 
   Thread* thrd = myThread();
   size_t waste = _gc_waste + _slow_refill_waste + _fast_refill_waste;
-  size_t alloc = _number_of_refills * _desired_size;
-  double waste_percent = percent_of(waste, alloc);
+  double waste_percent = percent_of(waste, _allocated_size);
   size_t tlab_used  = Universe::heap()->tlab_used(thrd);
   log.trace("TLAB: %s thread: " INTPTR_FORMAT " [id: %2d]"
             " desired_size: " SIZE_FORMAT "KB"
--- a/src/hotspot/share/gc/shared/threadLocalAllocBuffer.hpp	Wed May 02 10:35:33 2018 +0100
+++ b/src/hotspot/share/gc/shared/threadLocalAllocBuffer.hpp	Wed May 02 15:47:57 2018 +0100
@@ -58,6 +58,7 @@
   unsigned  _slow_refill_waste;
   unsigned  _gc_waste;
   unsigned  _slow_allocations;
+  size_t    _allocated_size;
 
   AdaptiveWeightedAverage _allocation_fraction;  // fraction of eden allocated in tlabs
 
@@ -141,6 +142,9 @@
   // Otherwise return 0;
   inline size_t compute_size(size_t obj_size);
 
+  // Compute the minimal needed tlab size for the given object size.
+  static inline size_t compute_min_size(size_t obj_size);
+
   // Record slow allocation
   inline void record_slow_allocation(size_t obj_size);
 
--- a/src/hotspot/share/gc/shared/threadLocalAllocBuffer.inline.hpp	Wed May 02 10:35:33 2018 +0100
+++ b/src/hotspot/share/gc/shared/threadLocalAllocBuffer.inline.hpp	Wed May 02 15:47:57 2018 +0100
@@ -54,18 +54,15 @@
 }
 
 inline size_t ThreadLocalAllocBuffer::compute_size(size_t obj_size) {
-  const size_t aligned_obj_size = align_object_size(obj_size);
-
   // Compute the size for the new TLAB.
   // The "last" tlab may be smaller to reduce fragmentation.
   // unsafe_max_tlab_alloc is just a hint.
   const size_t available_size = Universe::heap()->unsafe_max_tlab_alloc(myThread()) /
                                                   HeapWordSize;
-  size_t new_tlab_size = MIN3(available_size, desired_size() + aligned_obj_size, max_size());
+  size_t new_tlab_size = MIN3(available_size, desired_size() + align_object_size(obj_size), max_size());
 
   // Make sure there's enough room for object and filler int[].
-  const size_t obj_plus_filler_size = aligned_obj_size + alignment_reserve();
-  if (new_tlab_size < obj_plus_filler_size) {
+  if (new_tlab_size < compute_min_size(obj_size)) {
     // If there isn't enough room for the allocation, return failure.
     log_trace(gc, tlab)("ThreadLocalAllocBuffer::compute_size(" SIZE_FORMAT ") returns failure",
                         obj_size);
@@ -76,6 +73,11 @@
   return new_tlab_size;
 }
 
+inline size_t ThreadLocalAllocBuffer::compute_min_size(size_t obj_size) {
+  const size_t aligned_obj_size = align_object_size(obj_size);
+  const size_t size_with_reserve = aligned_obj_size + alignment_reserve();
+  return MAX2(size_with_reserve, MinTLABSize);
+}
 
 void ThreadLocalAllocBuffer::record_slow_allocation(size_t obj_size) {
   // Raise size required to bypass TLAB next time. Why? Else there's
--- a/src/hotspot/share/services/memoryManager.hpp	Wed May 02 10:35:33 2018 +0100
+++ b/src/hotspot/share/services/memoryManager.hpp	Wed May 02 15:47:57 2018 +0100
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2003, 2017, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2003, 2018, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -137,7 +137,6 @@
   // TODO: We should unify the GCCounter and GCMemoryManager statistic
   size_t       _num_collections;
   elapsedTimer _accumulated_timer;
-  elapsedTimer _gc_timer;         // for measuring every GC duration
   GCStatInfo*  _last_gc_stat;
   Mutex*       _last_gc_lock;
   GCStatInfo*  _current_gc_stat;
--- a/src/java.base/share/classes/java/util/TimeZone.java	Wed May 02 10:35:33 2018 +0100
+++ b/src/java.base/share/classes/java/util/TimeZone.java	Wed May 02 15:47:57 2018 +0100
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 1996, 2015, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 1996, 2018, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -784,9 +784,6 @@
     static final String         GMT_ID        = "GMT";
     private static final int    GMT_ID_LENGTH = 3;
 
-    // a static TimeZone we can reference if no AppContext is in place
-    private static volatile TimeZone mainAppContextDefault;
-
     /**
      * Parses a custom time zone identifier and returns a corresponding zone.
      * This method doesn't support the RFC 822 time zone format. (e.g., +hhmm)
--- a/src/java.base/share/classes/java/util/concurrent/ConcurrentHashMap.java	Wed May 02 10:35:33 2018 +0100
+++ b/src/java.base/share/classes/java/util/concurrent/ConcurrentHashMap.java	Wed May 02 15:47:57 2018 +0100
@@ -6389,5 +6389,8 @@
         // Reduce the risk of rare disastrous classloading in first call to
         // LockSupport.park: https://bugs.openjdk.java.net/browse/JDK-8074773
         Class<?> ensureLoaded = LockSupport.class;
+
+        // Eager class load observed to help JIT during startup
+        ensureLoaded = ReservationNode.class;
     }
 }
--- a/src/java.compiler/share/classes/javax/lang/model/util/AbstractAnnotationValueVisitor9.java	Wed May 02 10:35:33 2018 +0100
+++ b/src/java.compiler/share/classes/javax/lang/model/util/AbstractAnnotationValueVisitor9.java	Wed May 02 15:47:57 2018 +0100
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011, 2017, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2011, 2018, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -31,8 +31,8 @@
 
 /**
  * A skeletal visitor for annotation values with default behavior
- * appropriate for the {@link SourceVersion#RELEASE_9 RELEASE_9}
- * and {@link SourceVersion#RELEASE_10 RELEASE_10} source versions.
+ * appropriate for source versions {@link SourceVersion#RELEASE_9
+ * RELEASE_9} through {@link SourceVersion#RELEASE_11 RELEASE_11}.
  *
  * <p> <b>WARNING:</b> The {@code AnnotationValueVisitor} interface
  * implemented by this class may have methods added to it in the
@@ -59,7 +59,7 @@
  * @see AbstractAnnotationValueVisitor8
  * @since 9
  */
-@SupportedSourceVersion(RELEASE_10)
+@SupportedSourceVersion(RELEASE_11)
 public abstract class AbstractAnnotationValueVisitor9<R, P> extends AbstractAnnotationValueVisitor8<R, P> {
 
     /**
--- a/src/java.compiler/share/classes/javax/lang/model/util/AbstractElementVisitor9.java	Wed May 02 10:35:33 2018 +0100
+++ b/src/java.compiler/share/classes/javax/lang/model/util/AbstractElementVisitor9.java	Wed May 02 15:47:57 2018 +0100
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011, 2017, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2011, 2018, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -33,8 +33,8 @@
 
 /**
  * A skeletal visitor of program elements with default behavior
- * appropriate for the {@link SourceVersion#RELEASE_9 RELEASE_9}
- * and {@link SourceVersion#RELEASE_10 RELEASE_10} source versions.
+ * appropriate for source versions {@link SourceVersion#RELEASE_9
+ * RELEASE_9} through {@link SourceVersion#RELEASE_11 RELEASE_11}.
  *
  * <p> <b>WARNING:</b> The {@code ElementVisitor} interface
  * implemented by this class may have methods added to it in the
@@ -65,7 +65,7 @@
  * @since 9
  * @spec JPMS
  */
-@SupportedSourceVersion(RELEASE_10)
+@SupportedSourceVersion(RELEASE_11)
 public abstract class AbstractElementVisitor9<R, P> extends AbstractElementVisitor8<R, P> {
     /**
      * Constructor for concrete subclasses to call.
--- a/src/java.compiler/share/classes/javax/lang/model/util/AbstractTypeVisitor9.java	Wed May 02 10:35:33 2018 +0100
+++ b/src/java.compiler/share/classes/javax/lang/model/util/AbstractTypeVisitor9.java	Wed May 02 15:47:57 2018 +0100
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011, 2017, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2011, 2018, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -32,8 +32,8 @@
 
 /**
  * A skeletal visitor of types with default behavior appropriate for
- * the {@link SourceVersion#RELEASE_9 RELEASE_9}
- * and {@link SourceVersion#RELEASE_10 RELEASE_10} source versions.
+ * source versions {@link SourceVersion#RELEASE_9 RELEASE_9} through
+ * {@link SourceVersion#RELEASE_11 RELEASE_11}.
  *
  * <p> <b>WARNING:</b> The {@code TypeVisitor} interface implemented
  * by this class may have methods added to it in the future to
@@ -63,7 +63,7 @@
  * @see AbstractTypeVisitor8
  * @since 9
  */
-@SupportedSourceVersion(RELEASE_10)
+@SupportedSourceVersion(RELEASE_11)
 public abstract class AbstractTypeVisitor9<R, P> extends AbstractTypeVisitor8<R, P> {
     /**
      * Constructor for concrete subclasses to call.
--- a/src/java.compiler/share/classes/javax/lang/model/util/ElementKindVisitor9.java	Wed May 02 10:35:33 2018 +0100
+++ b/src/java.compiler/share/classes/javax/lang/model/util/ElementKindVisitor9.java	Wed May 02 15:47:57 2018 +0100
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011, 2017, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2011, 2018, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -32,9 +32,11 @@
 
 /**
  * A visitor of program elements based on their {@linkplain
- * ElementKind kind} with default behavior appropriate for the {@link
- * SourceVersion#RELEASE_9 RELEASE_9} and {@link
- * SourceVersion#RELEASE_10 RELEASE_10} source versions. For {@linkplain
+ * ElementKind kind} with default behavior appropriate for source
+ * versions {@link SourceVersion#RELEASE_9 RELEASE_9} through {@link
+ * SourceVersion#RELEASE_11 RELEASE_11}.
+ *
+ * For {@linkplain
  * Element elements} <code><i>Xyz</i></code> that may have more than one
  * kind, the <code>visit<i>Xyz</i></code> methods in this class delegate
  * to the <code>visit<i>Xyz</i>As<i>Kind</i></code> method corresponding to the
@@ -78,7 +80,7 @@
  * @since 9
  * @spec JPMS
  */
-@SupportedSourceVersion(RELEASE_10)
+@SupportedSourceVersion(RELEASE_11)
 public class ElementKindVisitor9<R, P> extends ElementKindVisitor8<R, P> {
     /**
      * Constructor for concrete subclasses; uses {@code null} for the
--- a/src/java.compiler/share/classes/javax/lang/model/util/ElementScanner9.java	Wed May 02 10:35:33 2018 +0100
+++ b/src/java.compiler/share/classes/javax/lang/model/util/ElementScanner9.java	Wed May 02 15:47:57 2018 +0100
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011, 2017, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2011, 2018, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -33,8 +33,9 @@
 
 /**
  * A scanning visitor of program elements with default behavior
- * appropriate for the {@link SourceVersion#RELEASE_9 RELEASE_9}
- * and {@link SourceVersion#RELEASE_10 RELEASE_10} source versions.
+ * appropriate for source versions {@link SourceVersion#RELEASE_9
+ * RELEASE_9} through {@link SourceVersion#RELEASE_11 RELEASE_11}.
+ *
  * The <code>visit<i>Xyz</i></code> methods in this
  * class scan their component elements by calling {@code scan} on
  * their {@linkplain Element#getEnclosedElements enclosed elements},
@@ -91,7 +92,7 @@
  * @since 9
  * @spec JPMS
  */
-@SupportedSourceVersion(RELEASE_10)
+@SupportedSourceVersion(RELEASE_11)
 public class ElementScanner9<R, P> extends ElementScanner8<R, P> {
     /**
      * Constructor for concrete subclasses; uses {@code null} for the
--- a/src/java.compiler/share/classes/javax/lang/model/util/SimpleAnnotationValueVisitor9.java	Wed May 02 10:35:33 2018 +0100
+++ b/src/java.compiler/share/classes/javax/lang/model/util/SimpleAnnotationValueVisitor9.java	Wed May 02 15:47:57 2018 +0100
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011, 2017, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2011, 2018, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -31,8 +31,9 @@
 
 /**
  * A simple visitor for annotation values with default behavior
- * appropriate for the {@link SourceVersion#RELEASE_9 RELEASE_9}
- * and {@link SourceVersion#RELEASE_10 RELEASE_10} source versions.
+ * appropriate for source versions {@link SourceVersion#RELEASE_9
+ * RELEASE_9} through {@link SourceVersion#RELEASE_11 RELEASE_11}.
+ *
  * Visit methods call {@link #defaultAction
  * defaultAction} passing their arguments to {@code defaultAction}'s
  * corresponding parameters.
@@ -67,7 +68,7 @@
  * @see SimpleAnnotationValueVisitor8
  * @since 9
  */
-@SupportedSourceVersion(RELEASE_10)
+@SupportedSourceVersion(RELEASE_11)
 public class SimpleAnnotationValueVisitor9<R, P> extends SimpleAnnotationValueVisitor8<R, P> {
     /**
      * Constructor for concrete subclasses; uses {@code null} for the
--- a/src/java.compiler/share/classes/javax/lang/model/util/SimpleElementVisitor9.java	Wed May 02 10:35:33 2018 +0100
+++ b/src/java.compiler/share/classes/javax/lang/model/util/SimpleElementVisitor9.java	Wed May 02 15:47:57 2018 +0100
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011, 2017, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2011, 2018, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -32,8 +32,8 @@
 
 /**
  * A simple visitor of program elements with default behavior
- * appropriate for the {@link SourceVersion#RELEASE_9 RELEASE_9}
- * and {@link SourceVersion#RELEASE_10 RELEASE_10} source versions.
+ * appropriate for source versions {@link SourceVersion#RELEASE_9
+ * RELEASE_9} through {@link SourceVersion#RELEASE_11 RELEASE_11}.
  *
  * Visit methods corresponding to {@code RELEASE_9} and earlier
  * language constructs call {@link #defaultAction defaultAction},
@@ -73,7 +73,7 @@
  * @since 9
  * @spec JPMS
  */
-@SupportedSourceVersion(RELEASE_10)
+@SupportedSourceVersion(RELEASE_11)
 public class SimpleElementVisitor9<R, P> extends SimpleElementVisitor8<R, P> {
     /**
      * Constructor for concrete subclasses; uses {@code null} for the
--- a/src/java.compiler/share/classes/javax/lang/model/util/SimpleTypeVisitor9.java	Wed May 02 10:35:33 2018 +0100
+++ b/src/java.compiler/share/classes/javax/lang/model/util/SimpleTypeVisitor9.java	Wed May 02 15:47:57 2018 +0100
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011, 2017, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2011, 2018, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -31,9 +31,9 @@
 import static javax.lang.model.SourceVersion.*;
 
 /**
- * A simple visitor of types with default behavior appropriate for the
- * {@link SourceVersion#RELEASE_9 RELEASE_9} and
- * {@link SourceVersion#RELEASE_10 RELEASE_10} source versions.
+ * A simple visitor of types with default behavior appropriate for
+ * source versions {@link SourceVersion#RELEASE_9 RELEASE_9} through
+ * {@link SourceVersion#RELEASE_11 RELEASE_11}.
  *
  * Visit methods corresponding to {@code RELEASE_9} and earlier
  * language constructs call {@link #defaultAction defaultAction},
@@ -70,9 +70,10 @@
  *
  * @see SimpleTypeVisitor6
  * @see SimpleTypeVisitor7
+ * @see SimpleTypeVisitor8
  * @since 9
  */
-@SupportedSourceVersion(RELEASE_10)
+@SupportedSourceVersion(RELEASE_11)
 public class SimpleTypeVisitor9<R, P> extends SimpleTypeVisitor8<R, P> {
     /**
      * Constructor for concrete subclasses; uses {@code null} for the
--- a/src/java.compiler/share/classes/javax/lang/model/util/TypeKindVisitor9.java	Wed May 02 10:35:33 2018 +0100
+++ b/src/java.compiler/share/classes/javax/lang/model/util/TypeKindVisitor9.java	Wed May 02 15:47:57 2018 +0100
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011, 2017, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2011, 2018, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -32,9 +32,11 @@
 
 /**
  * A visitor of types based on their {@linkplain TypeKind kind} with
- * default behavior appropriate for the {@link SourceVersion#RELEASE_9
- * RELEASE_9} and {@link SourceVersion#RELEASE_10 RELEASE_10} source
- * versions. For {@linkplain
+ * default behavior appropriate for source versions {@link
+ * SourceVersion#RELEASE_9 RELEASE_9} through {@link
+ * SourceVersion#RELEASE_11 RELEASE_11}.
+ *
+ * For {@linkplain
  * TypeMirror types} <code><i>Xyz</i></code> that may have more than one
  * kind, the <code>visit<i>Xyz</i></code> methods in this class delegate
  * to the <code>visit<i>Xyz</i>As<i>Kind</i></code> method corresponding to the
@@ -75,7 +77,7 @@
  * @see TypeKindVisitor8
  * @since 9
  */
-@SupportedSourceVersion(RELEASE_10)
+@SupportedSourceVersion(RELEASE_11)
 public class TypeKindVisitor9<R, P> extends TypeKindVisitor8<R, P> {
     /**
      * Constructor for concrete subclasses to call; uses {@code null}
--- a/src/jdk.compiler/share/classes/com/sun/tools/javac/comp/DeferredAttr.java	Wed May 02 10:35:33 2018 +0100
+++ b/src/jdk.compiler/share/classes/com/sun/tools/javac/comp/DeferredAttr.java	Wed May 02 15:47:57 2018 +0100
@@ -441,7 +441,7 @@
         } else {
             stats.add((JCBlock)that.body);
         }
-        JCBlock lambdaBlock = make.Block(0, stats.toList());
+        JCBlock lambdaBlock = make.at(that.pos).Block(0, stats.toList());
         Env<AttrContext> localEnv = attr.lambdaEnv(that, env);
         try {
             localEnv.info.returnResult = resultInfo;
--- a/test/hotspot/jtreg/runtime/appcds/jigsaw/modulepath/AddOpens.java	Wed May 02 10:35:33 2018 +0100
+++ b/test/hotspot/jtreg/runtime/appcds/jigsaw/modulepath/AddOpens.java	Wed May 02 15:47:57 2018 +0100
@@ -87,7 +87,7 @@
         // the class in the modular jar in the -cp won't be archived.
         OutputAnalyzer output = TestCommon.createArchive(
                                         destJar.toString(), appClasses,
-                                        "-Xlog:class+load=trace", "-XX:+PrintSystemDictionaryAtExit",
+                                        "-Xlog:class+load=trace",
                                         "--module-path", moduleDir.toString(),
                                         "-m", TEST_MODULE1);
         TestCommon.checkDump(output);
--- a/test/hotspot/jtreg/runtime/appcds/jigsaw/modulepath/ExportModule.java	Wed May 02 10:35:33 2018 +0100
+++ b/test/hotspot/jtreg/runtime/appcds/jigsaw/modulepath/ExportModule.java	Wed May 02 15:47:57 2018 +0100
@@ -118,7 +118,7 @@
         // the module in the --module-path
         OutputAnalyzer output = TestCommon.createArchive(
                                         appJar.toString(), appClasses,
-                                        "-Xlog:class+load=trace", "-XX:+PrintSystemDictionaryAtExit",
+                                        "-Xlog:class+load=trace",
                                         "--module-path", moduleDir.toString(),
                                         "--add-modules", TEST_MODULE2, MAIN_CLASS);
         TestCommon.checkDump(output);
@@ -142,7 +142,7 @@
         // unnmaed.
         output = TestCommon.createArchive(
                                         appJar2.toString(), appClasses2,
-                                        "-Xlog:class+load=trace", "-XX:+PrintSystemDictionaryAtExit",
+                                        "-Xlog:class+load=trace",
                                         "--module-path", moduleDir.toString(),
                                         "--add-modules", TEST_MODULE2,
                                         "--add-exports", "org.astro/org.astro=ALL-UNNAMED",
--- a/test/hotspot/jtreg/runtime/appcds/jigsaw/modulepath/MainModuleOnly.java	Wed May 02 10:35:33 2018 +0100
+++ b/test/hotspot/jtreg/runtime/appcds/jigsaw/modulepath/MainModuleOnly.java	Wed May 02 15:47:57 2018 +0100
@@ -88,7 +88,7 @@
         // the class in the modular jar in the -cp won't be archived.
         OutputAnalyzer output = TestCommon.createArchive(
                                         destJar.toString(), appClasses,
-                                        "-Xlog:class+load=trace", "-XX:+PrintSystemDictionaryAtExit",
+                                        "-Xlog:class+load=trace",
                                         "--module-path", moduleDir.toString(),
                                         "-m", TEST_MODULE1);
         TestCommon.checkDump(output);
--- a/test/hotspot/jtreg/runtime/appcds/jigsaw/modulepath/ModulePathAndCP.java	Wed May 02 10:35:33 2018 +0100
+++ b/test/hotspot/jtreg/runtime/appcds/jigsaw/modulepath/ModulePathAndCP.java	Wed May 02 15:47:57 2018 +0100
@@ -132,7 +132,7 @@
         String jars = subJar.toString() + System.getProperty("path.separator") +
                       mainJar.toString();
         output = TestCommon.createArchive( jars, appClasses,
-                                           "-Xlog:class+load=trace", "-XX:+PrintSystemDictionaryAtExit",
+                                           "-Xlog:class+load=trace",
                                            "--module-path", moduleDir.toString(),
                                            "-m", MAIN_MODULE);
         TestCommon.checkDump(output);
--- a/test/jdk/ProblemList.txt	Wed May 02 10:35:33 2018 +0100
+++ b/test/jdk/ProblemList.txt	Wed May 02 15:47:57 2018 +0100
@@ -510,7 +510,6 @@
 
 # jdk_io
 
-java/io/FileOutputStream/AtomicAppend.java                      8202062 macosx-all
 java/io/pathNames/GeneralWin32.java                             8180264 windows-all
 
 java/io/FileInputStream/UnreferencedFISClosesFd.java            8202292 linux-all
@@ -552,8 +551,6 @@
 
 java/nio/channels/DatagramChannel/ChangingAddress.java          7141822 macosx-all
 
-java/nio/channels/FileChannel/AtomicAppend.java                 8202062 macosx-all
-
 java/nio/channels/Selector/Wakeup.java                          6963118 windows-all
 
 java/nio/file/WatchService/Basic.java                           7158947 solaris-all Solaris 11
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/langtools/tools/javac/lambda/8202372/T8202372.java	Wed May 02 15:47:57 2018 +0100
@@ -0,0 +1,45 @@
+/*
+ * @test /nodynamiccopyright/
+ * @bug 8202372
+ * @summary Diagnostic with incorrect line info generated when compiling lambda expression
+ * @compile/fail/ref=T8202372.out -XDrawDiagnostics T8202372.java
+ */
+class T8202372 {
+
+    interface NonVoidFunc {
+        String m();
+    }
+
+    interface VoidFunc {
+        void m();
+    }
+
+    interface ParamFunc {
+        void m(String s);
+    }
+
+    public void addVoid(VoidFunc v) {}
+    public void addNonVoid(NonVoidFunc nv) {}
+    public void addParam(ParamFunc p) {}
+
+    void testVoid(T8202372 test) {
+        test.addVoid(() -> "");
+        test.addVoid(() -> { return ""; });
+        test.addVoid(() -> { });
+        test.addVoid(() -> { return; });
+    }
+
+    void testNonVoid(T8202372 test) {
+        test.addNonVoid(() -> "");
+        test.addNonVoid(() -> { return ""; });
+        test.addNonVoid(() -> { });
+        test.addNonVoid(() -> { return; });
+    }
+
+    void testParam(T8202372 test) {
+        test.addParam(() -> {});
+        test.addParam((String x) -> { });
+        test.addParam((String x1, String x2) -> { });
+        test.addParam((int x) -> { });
+    }
+}
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/langtools/tools/javac/lambda/8202372/T8202372.out	Wed May 02 15:47:57 2018 +0100
@@ -0,0 +1,8 @@
+T8202372.java:26:13: compiler.err.cant.apply.symbol: kindname.method, addVoid, T8202372.VoidFunc, @22, kindname.class, T8202372, (compiler.misc.no.conforming.assignment.exists: (compiler.misc.stat.expr.expected))
+T8202372.java:27:13: compiler.err.cant.apply.symbol: kindname.method, addVoid, T8202372.VoidFunc, @22, kindname.class, T8202372, (compiler.misc.no.conforming.assignment.exists: (compiler.misc.incompatible.ret.type.in.lambda: (compiler.misc.unexpected.ret.val)))
+T8202372.java:35:13: compiler.err.cant.apply.symbol: kindname.method, addNonVoid, T8202372.NonVoidFunc, @25, kindname.class, T8202372, (compiler.misc.no.conforming.assignment.exists: (compiler.misc.incompatible.ret.type.in.lambda: (compiler.misc.missing.ret.val: java.lang.String)))
+T8202372.java:36:13: compiler.err.cant.apply.symbol: kindname.method, addNonVoid, T8202372.NonVoidFunc, @25, kindname.class, T8202372, (compiler.misc.no.conforming.assignment.exists: (compiler.misc.incompatible.ret.type.in.lambda: (compiler.misc.missing.ret.val)))
+T8202372.java:40:13: compiler.err.cant.apply.symbol: kindname.method, addParam, T8202372.ParamFunc, @23, kindname.class, T8202372, (compiler.misc.no.conforming.assignment.exists: (compiler.misc.incompatible.arg.types.in.lambda))
+T8202372.java:42:13: compiler.err.cant.apply.symbol: kindname.method, addParam, T8202372.ParamFunc, @23, kindname.class, T8202372, (compiler.misc.no.conforming.assignment.exists: (compiler.misc.incompatible.arg.types.in.lambda))
+T8202372.java:43:13: compiler.err.cant.apply.symbol: kindname.method, addParam, T8202372.ParamFunc, @23, kindname.class, T8202372, (compiler.misc.no.conforming.assignment.exists: (compiler.misc.incompatible.arg.types.in.lambda))
+7 errors
--- a/test/langtools/tools/javac/lvti/T8200199.java	Wed May 02 10:35:33 2018 +0100
+++ b/test/langtools/tools/javac/lvti/T8200199.java	Wed May 02 15:47:57 2018 +0100
@@ -24,7 +24,7 @@
  */
 
 /*
- * @test /nodynamioccopyright/
+ * @test /nodynamiccopyright/
  * @bug 8200199
  * @summary javac suggests to use var even when var is used
  * @compile/fail/ref=T8200199.out -Werror -XDfind=local -XDrawDiagnostics T8200199.java
--- a/test/langtools/tools/javac/lvti/badTypeReference/BadTypeReference.java	Wed May 02 10:35:33 2018 +0100
+++ b/test/langtools/tools/javac/lvti/badTypeReference/BadTypeReference.java	Wed May 02 15:47:57 2018 +0100
@@ -24,7 +24,7 @@
  */
 
 /*
- * @test /nodynamioccopyright/
+ * @test /nodynamiccopyright/
  * @bug 8177466
  * @summary Add compiler support for local variable type-inference
  * @compile -source 8 pkg/var.java
--- a/test/langtools/tools/javac/preview/classReaderTest/Client.java	Wed May 02 10:35:33 2018 +0100
+++ b/test/langtools/tools/javac/preview/classReaderTest/Client.java	Wed May 02 15:47:57 2018 +0100
@@ -1,5 +1,5 @@
 /*
- * @test /nodynamioccopyright/
+ * @test /nodynamiccopyright/
  * @bug 8199194
  * @summary smoke test for --enabled-preview classreader support
  * @compile -XDforcePreview --enable-preview -source 11 Bar.java