Merge
authorduke
Wed, 05 Jul 2017 18:59:05 +0200
changeset 18014 3016ad09b477
parent 18013 f7887244ecd2 (current diff)
parent 17996 141c642bfd45 (diff)
child 18015 0d804e3b955d
Merge
jdk/src/solaris/classes/sun/awt/fontconfigs/linux.fontconfig.Fedora.properties
jdk/src/solaris/classes/sun/awt/fontconfigs/linux.fontconfig.SuSE.properties
jdk/src/solaris/classes/sun/awt/fontconfigs/linux.fontconfig.Ubuntu.properties
jdk/src/solaris/classes/sun/awt/fontconfigs/linux.fontconfig.properties
jdk/test/java/util/stream/test/org/openjdk/tests/java/util/stream/SpliteratorLateBindingFailFastTest.java
jdk/test/java/util/stream/test/org/openjdk/tests/java/util/stream/SpliteratorTraversingAndSplittingTest.java
nashorn/src/jdk/nashorn/internal/objects/DateParser.java
--- a/.hgtags-top-repo	Mon Jun 10 17:04:18 2013 -0700
+++ b/.hgtags-top-repo	Wed Jul 05 18:59:05 2017 +0200
@@ -214,3 +214,4 @@
 69b773a221b956a3386933ecdbfeccee0edeac47 jdk8-b90
 cb51fb4789ac0b8be4056482077ddfb8f3bd3805 jdk8-b91
 3a36c926a7aafa9d4a892a45ef3678e87ad8359b jdk8-b92
+27c51c6e31c1ef36afa0e6efb031f9b13f26c12b jdk8-b93
--- a/common/autoconf/generated-configure.sh	Mon Jun 10 17:04:18 2013 -0700
+++ b/common/autoconf/generated-configure.sh	Wed Jul 05 18:59:05 2017 +0200
@@ -1,6 +1,6 @@
 #! /bin/sh
 # Guess values for system-dependent variables and create Makefiles.
-# Generated by GNU Autoconf 2.68 for OpenJDK jdk8.
+# Generated by GNU Autoconf 2.67 for OpenJDK jdk8.
 #
 # Report bugs to <build-dev@openjdk.java.net>.
 #
@@ -91,7 +91,6 @@
 IFS=" ""	$as_nl"
 
 # Find who we are.  Look in the path if we contain no directory separator.
-as_myself=
 case $0 in #((
   *[\\/]* ) as_myself=$0 ;;
   *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
@@ -217,18 +216,11 @@
   # We cannot yet assume a decent shell, so we have to provide a
 	# neutralization value for shells without unset; and this also
 	# works around shells that cannot unset nonexistent variables.
-	# Preserve -v and -x to the replacement shell.
 	BASH_ENV=/dev/null
 	ENV=/dev/null
 	(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV
 	export CONFIG_SHELL
-	case $- in # ((((
-	  *v*x* | *x*v* ) as_opts=-vx ;;
-	  *v* ) as_opts=-v ;;
-	  *x* ) as_opts=-x ;;
-	  * ) as_opts= ;;
-	esac
-	exec "$CONFIG_SHELL" $as_opts "$as_myself" ${1+"$@"}
+	exec "$CONFIG_SHELL" "$as_myself" ${1+"$@"}
 fi
 
     if test x$as_have_required = xno; then :
@@ -1468,7 +1460,7 @@
     $as_echo "$as_me: WARNING: you should use --build, --host, --target" >&2
     expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null &&
       $as_echo "$as_me: WARNING: invalid host type: $ac_option" >&2
-    : "${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option}"
+    : ${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option}
     ;;
 
   esac
@@ -1904,7 +1896,7 @@
 if $ac_init_version; then
   cat <<\_ACEOF
 OpenJDK configure jdk8
-generated by GNU Autoconf 2.68
+generated by GNU Autoconf 2.67
 
 Copyright (C) 2010 Free Software Foundation, Inc.
 This configure script is free software; the Free Software Foundation
@@ -1950,7 +1942,7 @@
 
 	ac_retval=1
 fi
-  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+  eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;}
   as_fn_set_status $ac_retval
 
 } # ac_fn_c_try_compile
@@ -1988,7 +1980,7 @@
 
 	ac_retval=1
 fi
-  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+  eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;}
   as_fn_set_status $ac_retval
 
 } # ac_fn_cxx_try_compile
@@ -2026,7 +2018,7 @@
 
 	ac_retval=1
 fi
-  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+  eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;}
   as_fn_set_status $ac_retval
 
 } # ac_fn_objc_try_compile
@@ -2063,7 +2055,7 @@
 
     ac_retval=1
 fi
-  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+  eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;}
   as_fn_set_status $ac_retval
 
 } # ac_fn_c_try_cpp
@@ -2100,7 +2092,7 @@
 
     ac_retval=1
 fi
-  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+  eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;}
   as_fn_set_status $ac_retval
 
 } # ac_fn_cxx_try_cpp
@@ -2113,10 +2105,10 @@
 ac_fn_cxx_check_header_mongrel ()
 {
   as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
-  if eval \${$3+:} false; then :
+  if eval "test \"\${$3+set}\"" = set; then :
   { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
 $as_echo_n "checking for $2... " >&6; }
-if eval \${$3+:} false; then :
+if eval "test \"\${$3+set}\"" = set; then :
   $as_echo_n "(cached) " >&6
 fi
 eval ac_res=\$$3
@@ -2183,7 +2175,7 @@
 esac
   { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
 $as_echo_n "checking for $2... " >&6; }
-if eval \${$3+:} false; then :
+if eval "test \"\${$3+set}\"" = set; then :
   $as_echo_n "(cached) " >&6
 else
   eval "$3=\$ac_header_compiler"
@@ -2192,7 +2184,7 @@
 	       { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
 $as_echo "$ac_res" >&6; }
 fi
-  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+  eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;}
 
 } # ac_fn_cxx_check_header_mongrel
 
@@ -2233,7 +2225,7 @@
        ac_retval=$ac_status
 fi
   rm -rf conftest.dSYM conftest_ipa8_conftest.oo
-  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+  eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;}
   as_fn_set_status $ac_retval
 
 } # ac_fn_cxx_try_run
@@ -2247,7 +2239,7 @@
   as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
   { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
 $as_echo_n "checking for $2... " >&6; }
-if eval \${$3+:} false; then :
+if eval "test \"\${$3+set}\"" = set; then :
   $as_echo_n "(cached) " >&6
 else
   cat confdefs.h - <<_ACEOF >conftest.$ac_ext
@@ -2265,7 +2257,7 @@
 eval ac_res=\$$3
 	       { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
 $as_echo "$ac_res" >&6; }
-  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+  eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;}
 
 } # ac_fn_cxx_check_header_compile
 
@@ -2442,7 +2434,7 @@
 rm -f conftest.val
 
   fi
-  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+  eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;}
   as_fn_set_status $ac_retval
 
 } # ac_fn_cxx_compute_int
@@ -2488,7 +2480,7 @@
   # interfere with the next link command; also delete a directory that is
   # left behind by Apple's compiler.  We do this before executing the actions.
   rm -rf conftest.dSYM conftest_ipa8_conftest.oo
-  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+  eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;}
   as_fn_set_status $ac_retval
 
 } # ac_fn_cxx_try_link
@@ -2501,7 +2493,7 @@
   as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
   { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
 $as_echo_n "checking for $2... " >&6; }
-if eval \${$3+:} false; then :
+if eval "test \"\${$3+set}\"" = set; then :
   $as_echo_n "(cached) " >&6
 else
   cat confdefs.h - <<_ACEOF >conftest.$ac_ext
@@ -2556,7 +2548,7 @@
 eval ac_res=\$$3
 	       { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
 $as_echo "$ac_res" >&6; }
-  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+  eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;}
 
 } # ac_fn_cxx_check_func
 
@@ -2569,7 +2561,7 @@
   as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
   { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
 $as_echo_n "checking for $2... " >&6; }
-if eval \${$3+:} false; then :
+if eval "test \"\${$3+set}\"" = set; then :
   $as_echo_n "(cached) " >&6
 else
   cat confdefs.h - <<_ACEOF >conftest.$ac_ext
@@ -2587,7 +2579,7 @@
 eval ac_res=\$$3
 	       { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
 $as_echo "$ac_res" >&6; }
-  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+  eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;}
 
 } # ac_fn_c_check_header_compile
 cat >config.log <<_ACEOF
@@ -2595,7 +2587,7 @@
 running configure, to aid debugging if configure makes a mistake.
 
 It was created by OpenJDK $as_me jdk8, which was
-generated by GNU Autoconf 2.68.  Invocation command line was
+generated by GNU Autoconf 2.67.  Invocation command line was
 
   $ $0 $@
 
@@ -2853,7 +2845,7 @@
       || { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
 as_fn_error $? "failed to load site script $ac_site_file
-See \`config.log' for more details" "$LINENO" 5; }
+See \`config.log' for more details" "$LINENO" 5 ; }
   fi
 done
 
@@ -3790,7 +3782,7 @@
 #CUSTOM_AUTOCONF_INCLUDE
 
 # Do not change or remove the following line, it is needed for consistency checks:
-DATE_WHEN_GENERATED=1370470729
+DATE_WHEN_GENERATED=1370949244
 
 ###############################################################################
 #
@@ -3828,7 +3820,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_BASENAME+:} false; then :
+if test "${ac_cv_path_BASENAME+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $BASENAME in
@@ -3887,7 +3879,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_BASH+:} false; then :
+if test "${ac_cv_path_BASH+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $BASH in
@@ -3946,7 +3938,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_CAT+:} false; then :
+if test "${ac_cv_path_CAT+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $CAT in
@@ -4005,7 +3997,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_CHMOD+:} false; then :
+if test "${ac_cv_path_CHMOD+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $CHMOD in
@@ -4064,7 +4056,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_CMP+:} false; then :
+if test "${ac_cv_path_CMP+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $CMP in
@@ -4123,7 +4115,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_COMM+:} false; then :
+if test "${ac_cv_path_COMM+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $COMM in
@@ -4182,7 +4174,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_CP+:} false; then :
+if test "${ac_cv_path_CP+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $CP in
@@ -4241,7 +4233,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_CPIO+:} false; then :
+if test "${ac_cv_path_CPIO+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $CPIO in
@@ -4300,7 +4292,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_CUT+:} false; then :
+if test "${ac_cv_path_CUT+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $CUT in
@@ -4359,7 +4351,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_DATE+:} false; then :
+if test "${ac_cv_path_DATE+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $DATE in
@@ -4418,7 +4410,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_DIFF+:} false; then :
+if test "${ac_cv_path_DIFF+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $DIFF in
@@ -4477,7 +4469,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_DIRNAME+:} false; then :
+if test "${ac_cv_path_DIRNAME+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $DIRNAME in
@@ -4536,7 +4528,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_ECHO+:} false; then :
+if test "${ac_cv_path_ECHO+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $ECHO in
@@ -4595,7 +4587,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_EXPR+:} false; then :
+if test "${ac_cv_path_EXPR+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $EXPR in
@@ -4654,7 +4646,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_FILE+:} false; then :
+if test "${ac_cv_path_FILE+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $FILE in
@@ -4713,7 +4705,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_FIND+:} false; then :
+if test "${ac_cv_path_FIND+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $FIND in
@@ -4772,7 +4764,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_HEAD+:} false; then :
+if test "${ac_cv_path_HEAD+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $HEAD in
@@ -4831,7 +4823,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_LN+:} false; then :
+if test "${ac_cv_path_LN+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $LN in
@@ -4890,7 +4882,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_LS+:} false; then :
+if test "${ac_cv_path_LS+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $LS in
@@ -4949,7 +4941,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_MKDIR+:} false; then :
+if test "${ac_cv_path_MKDIR+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $MKDIR in
@@ -5008,7 +5000,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_MKTEMP+:} false; then :
+if test "${ac_cv_path_MKTEMP+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $MKTEMP in
@@ -5067,7 +5059,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_MV+:} false; then :
+if test "${ac_cv_path_MV+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $MV in
@@ -5126,7 +5118,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_PRINTF+:} false; then :
+if test "${ac_cv_path_PRINTF+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $PRINTF in
@@ -5185,7 +5177,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_RM+:} false; then :
+if test "${ac_cv_path_RM+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $RM in
@@ -5244,7 +5236,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_SH+:} false; then :
+if test "${ac_cv_path_SH+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $SH in
@@ -5303,7 +5295,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_SORT+:} false; then :
+if test "${ac_cv_path_SORT+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $SORT in
@@ -5362,7 +5354,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_TAIL+:} false; then :
+if test "${ac_cv_path_TAIL+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $TAIL in
@@ -5421,7 +5413,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_TAR+:} false; then :
+if test "${ac_cv_path_TAR+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $TAR in
@@ -5480,7 +5472,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_TEE+:} false; then :
+if test "${ac_cv_path_TEE+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $TEE in
@@ -5539,7 +5531,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_TOUCH+:} false; then :
+if test "${ac_cv_path_TOUCH+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $TOUCH in
@@ -5598,7 +5590,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_TR+:} false; then :
+if test "${ac_cv_path_TR+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $TR in
@@ -5657,7 +5649,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_UNAME+:} false; then :
+if test "${ac_cv_path_UNAME+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $UNAME in
@@ -5716,7 +5708,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_UNIQ+:} false; then :
+if test "${ac_cv_path_UNIQ+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $UNIQ in
@@ -5775,7 +5767,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_WC+:} false; then :
+if test "${ac_cv_path_WC+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $WC in
@@ -5834,7 +5826,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_WHICH+:} false; then :
+if test "${ac_cv_path_WHICH+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $WHICH in
@@ -5893,7 +5885,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_XARGS+:} false; then :
+if test "${ac_cv_path_XARGS+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $XARGS in
@@ -5953,7 +5945,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_AWK+:} false; then :
+if test "${ac_cv_prog_AWK+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   if test -n "$AWK"; then
@@ -6003,7 +5995,7 @@
 
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for grep that handles long lines and -e" >&5
 $as_echo_n "checking for grep that handles long lines and -e... " >&6; }
-if ${ac_cv_path_GREP+:} false; then :
+if test "${ac_cv_path_GREP+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   if test -z "$GREP"; then
@@ -6078,7 +6070,7 @@
 
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for egrep" >&5
 $as_echo_n "checking for egrep... " >&6; }
-if ${ac_cv_path_EGREP+:} false; then :
+if test "${ac_cv_path_EGREP+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   if echo a | $GREP -E '(a|b)' >/dev/null 2>&1
@@ -6157,7 +6149,7 @@
 
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for fgrep" >&5
 $as_echo_n "checking for fgrep... " >&6; }
-if ${ac_cv_path_FGREP+:} false; then :
+if test "${ac_cv_path_FGREP+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   if echo 'ab*c' | $GREP -F 'ab*c' >/dev/null 2>&1
@@ -6236,7 +6228,7 @@
 
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for a sed that does not truncate output" >&5
 $as_echo_n "checking for a sed that does not truncate output... " >&6; }
-if ${ac_cv_path_SED+:} false; then :
+if test "${ac_cv_path_SED+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
             ac_script=s/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb/
@@ -6322,7 +6314,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_NAWK+:} false; then :
+if test "${ac_cv_path_NAWK+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $NAWK in
@@ -6386,7 +6378,7 @@
 set dummy cygpath; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_CYGPATH+:} false; then :
+if test "${ac_cv_path_CYGPATH+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $CYGPATH in
@@ -6426,7 +6418,7 @@
 set dummy readlink; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_READLINK+:} false; then :
+if test "${ac_cv_path_READLINK+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $READLINK in
@@ -6466,7 +6458,7 @@
 set dummy df; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_DF+:} false; then :
+if test "${ac_cv_path_DF+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $DF in
@@ -6506,7 +6498,7 @@
 set dummy SetFile; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_SETFILE+:} false; then :
+if test "${ac_cv_path_SETFILE+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $SETFILE in
@@ -6552,7 +6544,7 @@
 
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking build system type" >&5
 $as_echo_n "checking build system type... " >&6; }
-if ${ac_cv_build+:} false; then :
+if test "${ac_cv_build+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   ac_build_alias=$build_alias
@@ -6568,7 +6560,7 @@
 $as_echo "$ac_cv_build" >&6; }
 case $ac_cv_build in
 *-*-*) ;;
-*) as_fn_error $? "invalid value of canonical build" "$LINENO" 5;;
+*) as_fn_error $? "invalid value of canonical build" "$LINENO" 5 ;;
 esac
 build=$ac_cv_build
 ac_save_IFS=$IFS; IFS='-'
@@ -6586,7 +6578,7 @@
 
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking host system type" >&5
 $as_echo_n "checking host system type... " >&6; }
-if ${ac_cv_host+:} false; then :
+if test "${ac_cv_host+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   if test "x$host_alias" = x; then
@@ -6601,7 +6593,7 @@
 $as_echo "$ac_cv_host" >&6; }
 case $ac_cv_host in
 *-*-*) ;;
-*) as_fn_error $? "invalid value of canonical host" "$LINENO" 5;;
+*) as_fn_error $? "invalid value of canonical host" "$LINENO" 5 ;;
 esac
 host=$ac_cv_host
 ac_save_IFS=$IFS; IFS='-'
@@ -6619,7 +6611,7 @@
 
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking target system type" >&5
 $as_echo_n "checking target system type... " >&6; }
-if ${ac_cv_target+:} false; then :
+if test "${ac_cv_target+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   if test "x$target_alias" = x; then
@@ -6634,7 +6626,7 @@
 $as_echo "$ac_cv_target" >&6; }
 case $ac_cv_target in
 *-*-*) ;;
-*) as_fn_error $? "invalid value of canonical target" "$LINENO" 5;;
+*) as_fn_error $? "invalid value of canonical target" "$LINENO" 5 ;;
 esac
 target=$ac_cv_target
 ac_save_IFS=$IFS; IFS='-'
@@ -8031,7 +8023,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_PKGHANDLER+:} false; then :
+if test "${ac_cv_prog_PKGHANDLER+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   if test -n "$PKGHANDLER"; then
@@ -8396,7 +8388,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_CHECK_GMAKE+:} false; then :
+if test "${ac_cv_path_CHECK_GMAKE+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $CHECK_GMAKE in
@@ -8750,7 +8742,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_CHECK_MAKE+:} false; then :
+if test "${ac_cv_path_CHECK_MAKE+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $CHECK_MAKE in
@@ -9109,7 +9101,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_CHECK_TOOLSDIR_GMAKE+:} false; then :
+if test "${ac_cv_path_CHECK_TOOLSDIR_GMAKE+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $CHECK_TOOLSDIR_GMAKE in
@@ -9462,7 +9454,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_CHECK_TOOLSDIR_MAKE+:} false; then :
+if test "${ac_cv_path_CHECK_TOOLSDIR_MAKE+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $CHECK_TOOLSDIR_MAKE in
@@ -9858,7 +9850,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_UNZIP+:} false; then :
+if test "${ac_cv_path_UNZIP+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $UNZIP in
@@ -9917,7 +9909,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_ZIP+:} false; then :
+if test "${ac_cv_path_ZIP+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $ZIP in
@@ -9976,7 +9968,7 @@
 set dummy ldd; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_LDD+:} false; then :
+if test "${ac_cv_path_LDD+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $LDD in
@@ -10022,7 +10014,7 @@
 set dummy otool; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_OTOOL+:} false; then :
+if test "${ac_cv_path_OTOOL+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $OTOOL in
@@ -10067,7 +10059,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_READELF+:} false; then :
+if test "${ac_cv_path_READELF+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $READELF in
@@ -10110,7 +10102,7 @@
 set dummy hg; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_HG+:} false; then :
+if test "${ac_cv_path_HG+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $HG in
@@ -10150,7 +10142,7 @@
 set dummy stat; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_STAT+:} false; then :
+if test "${ac_cv_path_STAT+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $STAT in
@@ -10190,7 +10182,7 @@
 set dummy time; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_TIME+:} false; then :
+if test "${ac_cv_path_TIME+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $TIME in
@@ -10235,7 +10227,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_COMM+:} false; then :
+if test "${ac_cv_path_COMM+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $COMM in
@@ -10297,7 +10289,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_XATTR+:} false; then :
+if test "${ac_cv_path_XATTR+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $XATTR in
@@ -10353,7 +10345,7 @@
 set dummy codesign; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_CODESIGN+:} false; then :
+if test "${ac_cv_path_CODESIGN+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $CODESIGN in
@@ -10417,7 +10409,7 @@
 set dummy ${ac_tool_prefix}pkg-config; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_PKG_CONFIG+:} false; then :
+if test "${ac_cv_path_PKG_CONFIG+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $PKG_CONFIG in
@@ -10460,7 +10452,7 @@
 set dummy pkg-config; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_ac_pt_PKG_CONFIG+:} false; then :
+if test "${ac_cv_path_ac_pt_PKG_CONFIG+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $ac_pt_PKG_CONFIG in
@@ -10633,7 +10625,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_BDEPS_UNZIP+:} false; then :
+if test "${ac_cv_prog_BDEPS_UNZIP+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   if test -n "$BDEPS_UNZIP"; then
@@ -10679,7 +10671,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_BDEPS_FTP+:} false; then :
+if test "${ac_cv_prog_BDEPS_FTP+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   if test -n "$BDEPS_FTP"; then
@@ -10790,11 +10782,7 @@
 if test "x$with_cacerts_file" != x; then
     CACERTS_FILE=$with_cacerts_file
 else
-    if test "x$OPENJDK" = "xtrue"; then
-        CACERTS_FILE=${SRC_ROOT}/jdk/src/share/lib/security/cacerts
-    else
-        CACERTS_FILE=${SRC_ROOT}/jdk/src/closed/share/lib/security/cacerts.internal
-    fi
+    CACERTS_FILE=${SRC_ROOT}/jdk/src/share/lib/security/cacerts
 fi
 
 
@@ -11979,7 +11967,7 @@
 set dummy javac; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_JAVAC_CHECK+:} false; then :
+if test "${ac_cv_path_JAVAC_CHECK+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $JAVAC_CHECK in
@@ -12019,7 +12007,7 @@
 set dummy java; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_JAVA_CHECK+:} false; then :
+if test "${ac_cv_path_JAVA_CHECK+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $JAVA_CHECK in
@@ -16348,7 +16336,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_JTREGEXE+:} false; then :
+if test "${ac_cv_path_JTREGEXE+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $JTREGEXE in
@@ -16416,7 +16404,7 @@
 set dummy link; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_CYGWIN_LINK+:} false; then :
+if test "${ac_cv_path_CYGWIN_LINK+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $CYGWIN_LINK in
@@ -16518,6 +16506,123 @@
     as_fn_error $? "Cannot locate a valid Visual Studio installation" "$LINENO" 5
   fi
 
+  if test "x$VS100COMNTOOLS" != x; then
+
+  if test "x$VS_ENV_CMD" = x; then
+    VS100BASE="$VS100COMNTOOLS/../.."
+    METHOD="VS100COMNTOOLS variable"
+
+  windows_path="$VS100BASE"
+  if test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.cygwin"; then
+    unix_path=`$CYGPATH -u "$windows_path"`
+    VS100BASE="$unix_path"
+  elif test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.msys"; then
+    unix_path=`$ECHO "$windows_path" | $SED -e 's,^\\(.\\):,/\\1,g' -e 's,\\\\,/,g'`
+    VS100BASE="$unix_path"
+  fi
+
+    if test -d "$VS100BASE"; then
+      if test -f "$VS100BASE/$VCVARSFILE"; then
+        { $as_echo "$as_me:${as_lineno-$LINENO}: Found Visual Studio installation at $VS100BASE using $METHOD" >&5
+$as_echo "$as_me: Found Visual Studio installation at $VS100BASE using $METHOD" >&6;}
+        VS_ENV_CMD="$VS100BASE/$VCVARSFILE"
+      else
+        { $as_echo "$as_me:${as_lineno-$LINENO}: Found Visual Studio installation at $VS100BASE using $METHOD" >&5
+$as_echo "$as_me: Found Visual Studio installation at $VS100BASE using $METHOD" >&6;}
+        { $as_echo "$as_me:${as_lineno-$LINENO}: Warning: $VCVARSFILE is missing, this is probably Visual Studio Express. Ignoring" >&5
+$as_echo "$as_me: Warning: $VCVARSFILE is missing, this is probably Visual Studio Express. Ignoring" >&6;}
+      fi
+    fi
+  fi
+
+  fi
+  if test "x$PROGRAMFILES" != x; then
+
+  if test "x$VS_ENV_CMD" = x; then
+    VS100BASE="$PROGRAMFILES/Microsoft Visual Studio 10.0"
+    METHOD="well-known name"
+
+  windows_path="$VS100BASE"
+  if test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.cygwin"; then
+    unix_path=`$CYGPATH -u "$windows_path"`
+    VS100BASE="$unix_path"
+  elif test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.msys"; then
+    unix_path=`$ECHO "$windows_path" | $SED -e 's,^\\(.\\):,/\\1,g' -e 's,\\\\,/,g'`
+    VS100BASE="$unix_path"
+  fi
+
+    if test -d "$VS100BASE"; then
+      if test -f "$VS100BASE/$VCVARSFILE"; then
+        { $as_echo "$as_me:${as_lineno-$LINENO}: Found Visual Studio installation at $VS100BASE using $METHOD" >&5
+$as_echo "$as_me: Found Visual Studio installation at $VS100BASE using $METHOD" >&6;}
+        VS_ENV_CMD="$VS100BASE/$VCVARSFILE"
+      else
+        { $as_echo "$as_me:${as_lineno-$LINENO}: Found Visual Studio installation at $VS100BASE using $METHOD" >&5
+$as_echo "$as_me: Found Visual Studio installation at $VS100BASE using $METHOD" >&6;}
+        { $as_echo "$as_me:${as_lineno-$LINENO}: Warning: $VCVARSFILE is missing, this is probably Visual Studio Express. Ignoring" >&5
+$as_echo "$as_me: Warning: $VCVARSFILE is missing, this is probably Visual Studio Express. Ignoring" >&6;}
+      fi
+    fi
+  fi
+
+  fi
+
+  if test "x$VS_ENV_CMD" = x; then
+    VS100BASE="C:/Program Files/Microsoft Visual Studio 10.0"
+    METHOD="well-known name"
+
+  windows_path="$VS100BASE"
+  if test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.cygwin"; then
+    unix_path=`$CYGPATH -u "$windows_path"`
+    VS100BASE="$unix_path"
+  elif test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.msys"; then
+    unix_path=`$ECHO "$windows_path" | $SED -e 's,^\\(.\\):,/\\1,g' -e 's,\\\\,/,g'`
+    VS100BASE="$unix_path"
+  fi
+
+    if test -d "$VS100BASE"; then
+      if test -f "$VS100BASE/$VCVARSFILE"; then
+        { $as_echo "$as_me:${as_lineno-$LINENO}: Found Visual Studio installation at $VS100BASE using $METHOD" >&5
+$as_echo "$as_me: Found Visual Studio installation at $VS100BASE using $METHOD" >&6;}
+        VS_ENV_CMD="$VS100BASE/$VCVARSFILE"
+      else
+        { $as_echo "$as_me:${as_lineno-$LINENO}: Found Visual Studio installation at $VS100BASE using $METHOD" >&5
+$as_echo "$as_me: Found Visual Studio installation at $VS100BASE using $METHOD" >&6;}
+        { $as_echo "$as_me:${as_lineno-$LINENO}: Warning: $VCVARSFILE is missing, this is probably Visual Studio Express. Ignoring" >&5
+$as_echo "$as_me: Warning: $VCVARSFILE is missing, this is probably Visual Studio Express. Ignoring" >&6;}
+      fi
+    fi
+  fi
+
+
+  if test "x$VS_ENV_CMD" = x; then
+    VS100BASE="C:/Program Files (x86)/Microsoft Visual Studio 10.0"
+    METHOD="well-known name"
+
+  windows_path="$VS100BASE"
+  if test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.cygwin"; then
+    unix_path=`$CYGPATH -u "$windows_path"`
+    VS100BASE="$unix_path"
+  elif test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.msys"; then
+    unix_path=`$ECHO "$windows_path" | $SED -e 's,^\\(.\\):,/\\1,g' -e 's,\\\\,/,g'`
+    VS100BASE="$unix_path"
+  fi
+
+    if test -d "$VS100BASE"; then
+      if test -f "$VS100BASE/$VCVARSFILE"; then
+        { $as_echo "$as_me:${as_lineno-$LINENO}: Found Visual Studio installation at $VS100BASE using $METHOD" >&5
+$as_echo "$as_me: Found Visual Studio installation at $VS100BASE using $METHOD" >&6;}
+        VS_ENV_CMD="$VS100BASE/$VCVARSFILE"
+      else
+        { $as_echo "$as_me:${as_lineno-$LINENO}: Found Visual Studio installation at $VS100BASE using $METHOD" >&5
+$as_echo "$as_me: Found Visual Studio installation at $VS100BASE using $METHOD" >&6;}
+        { $as_echo "$as_me:${as_lineno-$LINENO}: Warning: $VCVARSFILE is missing, this is probably Visual Studio Express. Ignoring" >&5
+$as_echo "$as_me: Warning: $VCVARSFILE is missing, this is probably Visual Studio Express. Ignoring" >&6;}
+      fi
+    fi
+  fi
+
+
   if test "x$ProgramW6432" != x; then
 
   if test "x$VS_ENV_CMD" = x; then
@@ -16725,123 +16830,6 @@
   fi
 
 
-  if test "x$VS100COMNTOOLS" != x; then
-
-  if test "x$VS_ENV_CMD" = x; then
-    VS100BASE="$VS100COMNTOOLS/../.."
-    METHOD="VS100COMNTOOLS variable"
-
-  windows_path="$VS100BASE"
-  if test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.cygwin"; then
-    unix_path=`$CYGPATH -u "$windows_path"`
-    VS100BASE="$unix_path"
-  elif test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.msys"; then
-    unix_path=`$ECHO "$windows_path" | $SED -e 's,^\\(.\\):,/\\1,g' -e 's,\\\\,/,g'`
-    VS100BASE="$unix_path"
-  fi
-
-    if test -d "$VS100BASE"; then
-      if test -f "$VS100BASE/$VCVARSFILE"; then
-        { $as_echo "$as_me:${as_lineno-$LINENO}: Found Visual Studio installation at $VS100BASE using $METHOD" >&5
-$as_echo "$as_me: Found Visual Studio installation at $VS100BASE using $METHOD" >&6;}
-        VS_ENV_CMD="$VS100BASE/$VCVARSFILE"
-      else
-        { $as_echo "$as_me:${as_lineno-$LINENO}: Found Visual Studio installation at $VS100BASE using $METHOD" >&5
-$as_echo "$as_me: Found Visual Studio installation at $VS100BASE using $METHOD" >&6;}
-        { $as_echo "$as_me:${as_lineno-$LINENO}: Warning: $VCVARSFILE is missing, this is probably Visual Studio Express. Ignoring" >&5
-$as_echo "$as_me: Warning: $VCVARSFILE is missing, this is probably Visual Studio Express. Ignoring" >&6;}
-      fi
-    fi
-  fi
-
-  fi
-  if test "x$PROGRAMFILES" != x; then
-
-  if test "x$VS_ENV_CMD" = x; then
-    VS100BASE="$PROGRAMFILES/Microsoft Visual Studio 10.0"
-    METHOD="well-known name"
-
-  windows_path="$VS100BASE"
-  if test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.cygwin"; then
-    unix_path=`$CYGPATH -u "$windows_path"`
-    VS100BASE="$unix_path"
-  elif test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.msys"; then
-    unix_path=`$ECHO "$windows_path" | $SED -e 's,^\\(.\\):,/\\1,g' -e 's,\\\\,/,g'`
-    VS100BASE="$unix_path"
-  fi
-
-    if test -d "$VS100BASE"; then
-      if test -f "$VS100BASE/$VCVARSFILE"; then
-        { $as_echo "$as_me:${as_lineno-$LINENO}: Found Visual Studio installation at $VS100BASE using $METHOD" >&5
-$as_echo "$as_me: Found Visual Studio installation at $VS100BASE using $METHOD" >&6;}
-        VS_ENV_CMD="$VS100BASE/$VCVARSFILE"
-      else
-        { $as_echo "$as_me:${as_lineno-$LINENO}: Found Visual Studio installation at $VS100BASE using $METHOD" >&5
-$as_echo "$as_me: Found Visual Studio installation at $VS100BASE using $METHOD" >&6;}
-        { $as_echo "$as_me:${as_lineno-$LINENO}: Warning: $VCVARSFILE is missing, this is probably Visual Studio Express. Ignoring" >&5
-$as_echo "$as_me: Warning: $VCVARSFILE is missing, this is probably Visual Studio Express. Ignoring" >&6;}
-      fi
-    fi
-  fi
-
-  fi
-
-  if test "x$VS_ENV_CMD" = x; then
-    VS100BASE="C:/Program Files/Microsoft Visual Studio 10.0"
-    METHOD="well-known name"
-
-  windows_path="$VS100BASE"
-  if test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.cygwin"; then
-    unix_path=`$CYGPATH -u "$windows_path"`
-    VS100BASE="$unix_path"
-  elif test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.msys"; then
-    unix_path=`$ECHO "$windows_path" | $SED -e 's,^\\(.\\):,/\\1,g' -e 's,\\\\,/,g'`
-    VS100BASE="$unix_path"
-  fi
-
-    if test -d "$VS100BASE"; then
-      if test -f "$VS100BASE/$VCVARSFILE"; then
-        { $as_echo "$as_me:${as_lineno-$LINENO}: Found Visual Studio installation at $VS100BASE using $METHOD" >&5
-$as_echo "$as_me: Found Visual Studio installation at $VS100BASE using $METHOD" >&6;}
-        VS_ENV_CMD="$VS100BASE/$VCVARSFILE"
-      else
-        { $as_echo "$as_me:${as_lineno-$LINENO}: Found Visual Studio installation at $VS100BASE using $METHOD" >&5
-$as_echo "$as_me: Found Visual Studio installation at $VS100BASE using $METHOD" >&6;}
-        { $as_echo "$as_me:${as_lineno-$LINENO}: Warning: $VCVARSFILE is missing, this is probably Visual Studio Express. Ignoring" >&5
-$as_echo "$as_me: Warning: $VCVARSFILE is missing, this is probably Visual Studio Express. Ignoring" >&6;}
-      fi
-    fi
-  fi
-
-
-  if test "x$VS_ENV_CMD" = x; then
-    VS100BASE="C:/Program Files (x86)/Microsoft Visual Studio 10.0"
-    METHOD="well-known name"
-
-  windows_path="$VS100BASE"
-  if test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.cygwin"; then
-    unix_path=`$CYGPATH -u "$windows_path"`
-    VS100BASE="$unix_path"
-  elif test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.msys"; then
-    unix_path=`$ECHO "$windows_path" | $SED -e 's,^\\(.\\):,/\\1,g' -e 's,\\\\,/,g'`
-    VS100BASE="$unix_path"
-  fi
-
-    if test -d "$VS100BASE"; then
-      if test -f "$VS100BASE/$VCVARSFILE"; then
-        { $as_echo "$as_me:${as_lineno-$LINENO}: Found Visual Studio installation at $VS100BASE using $METHOD" >&5
-$as_echo "$as_me: Found Visual Studio installation at $VS100BASE using $METHOD" >&6;}
-        VS_ENV_CMD="$VS100BASE/$VCVARSFILE"
-      else
-        { $as_echo "$as_me:${as_lineno-$LINENO}: Found Visual Studio installation at $VS100BASE using $METHOD" >&5
-$as_echo "$as_me: Found Visual Studio installation at $VS100BASE using $METHOD" >&6;}
-        { $as_echo "$as_me:${as_lineno-$LINENO}: Warning: $VCVARSFILE is missing, this is probably Visual Studio Express. Ignoring" >&5
-$as_echo "$as_me: Warning: $VCVARSFILE is missing, this is probably Visual Studio Express. Ignoring" >&6;}
-      fi
-    fi
-  fi
-
-
   if test "x$VS_ENV_CMD" != x; then
     # We have found a Visual Studio environment on disk, let's extract variables from the vsvars bat file.
 
@@ -17243,11 +17231,25 @@
 $as_echo "$as_me: Warning: msvcr100.dll not found in VCINSTALLDIR: $VCINSTALLDIR" >&6;}
       fi
     fi
+    # Try some fallback alternatives
     if test "x$MSVCR_DLL" = x; then
-      if test -f "$SYSTEMROOT/system32/msvcr100.dll"; then
-        { $as_echo "$as_me:${as_lineno-$LINENO}: msvcr100.dll found in $SYSTEMROOT/system32" >&5
+      # If visual studio express is installed, there is usually one with the debugger
+      if test "x$VS100COMNTOOLS" != x; then
+        if test "x$OPENJDK_TARGET_CPU_BITS" = x64; then
+          MSVCR_DLL=`find "$VS100COMNTOOLS/.." -name msvcr100.dll | grep -i x64 | head --lines 1`
+          { $as_echo "$as_me:${as_lineno-$LINENO}: msvcr100.dll found in $VS100COMNTOOLS..: $VS100COMNTOOLS.." >&5
+$as_echo "$as_me: msvcr100.dll found in $VS100COMNTOOLS..: $VS100COMNTOOLS.." >&6;}
+        fi
+      fi
+    fi
+    if test "x$MSVCR_DLL" = x; then
+      if test "x$OPENJDK_TARGET_CPU_BITS" = x32; then
+        # Fallback for 32bit builds, look in the windows directory.
+        if test -f "$SYSTEMROOT/system32/msvcr100.dll"; then
+          { $as_echo "$as_me:${as_lineno-$LINENO}: msvcr100.dll found in $SYSTEMROOT/system32" >&5
 $as_echo "$as_me: msvcr100.dll found in $SYSTEMROOT/system32" >&6;}
-        MSVCR_DLL="$SYSTEMROOT/system32/msvcr100.dll"
+          MSVCR_DLL="$SYSTEMROOT/system32/msvcr100.dll"
+        fi
       fi
     fi
   fi
@@ -17843,7 +17845,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_BUILD_CC+:} false; then :
+if test "${ac_cv_path_BUILD_CC+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $BUILD_CC in
@@ -18154,7 +18156,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_BUILD_CXX+:} false; then :
+if test "${ac_cv_path_BUILD_CXX+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $BUILD_CXX in
@@ -18463,7 +18465,7 @@
 set dummy ld; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_BUILD_LD+:} false; then :
+if test "${ac_cv_path_BUILD_LD+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $BUILD_LD in
@@ -18979,7 +18981,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_TOOLS_DIR_CC+:} false; then :
+if test "${ac_cv_path_TOOLS_DIR_CC+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $TOOLS_DIR_CC in
@@ -19031,7 +19033,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_POTENTIAL_CC+:} false; then :
+if test "${ac_cv_path_POTENTIAL_CC+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $POTENTIAL_CC in
@@ -19444,7 +19446,7 @@
 set dummy $ac_tool_prefix$ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_PROPER_COMPILER_CC+:} false; then :
+if test "${ac_cv_prog_PROPER_COMPILER_CC+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   if test -n "$PROPER_COMPILER_CC"; then
@@ -19488,7 +19490,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_PROPER_COMPILER_CC+:} false; then :
+if test "${ac_cv_prog_ac_ct_PROPER_COMPILER_CC+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   if test -n "$ac_ct_PROPER_COMPILER_CC"; then
@@ -19938,7 +19940,7 @@
 set dummy $ac_tool_prefix$ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_CC+:} false; then :
+if test "${ac_cv_prog_CC+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   if test -n "$CC"; then
@@ -19982,7 +19984,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_CC+:} false; then :
+if test "${ac_cv_prog_ac_ct_CC+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   if test -n "$ac_ct_CC"; then
@@ -20035,7 +20037,7 @@
 test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
 as_fn_error $? "no acceptable C compiler found in \$PATH
-See \`config.log' for more details" "$LINENO" 5; }
+See \`config.log' for more details" "$LINENO" 5 ; }
 
 # Provide some information about the compiler.
 $as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5
@@ -20150,7 +20152,7 @@
 { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
 as_fn_error 77 "C compiler cannot create executables
-See \`config.log' for more details" "$LINENO" 5; }
+See \`config.log' for more details" "$LINENO" 5 ; }
 else
   { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
 $as_echo "yes" >&6; }
@@ -20193,7 +20195,7 @@
   { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
 as_fn_error $? "cannot compute suffix of executables: cannot compile and link
-See \`config.log' for more details" "$LINENO" 5; }
+See \`config.log' for more details" "$LINENO" 5 ; }
 fi
 rm -f conftest conftest$ac_cv_exeext
 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_exeext" >&5
@@ -20252,7 +20254,7 @@
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
 as_fn_error $? "cannot run C compiled programs.
 If you meant to cross compile, use \`--host'.
-See \`config.log' for more details" "$LINENO" 5; }
+See \`config.log' for more details" "$LINENO" 5 ; }
     fi
   fi
 fi
@@ -20263,7 +20265,7 @@
 ac_clean_files=$ac_clean_files_save
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of object files" >&5
 $as_echo_n "checking for suffix of object files... " >&6; }
-if ${ac_cv_objext+:} false; then :
+if test "${ac_cv_objext+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   cat confdefs.h - <<_ACEOF >conftest.$ac_ext
@@ -20304,7 +20306,7 @@
 { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
 as_fn_error $? "cannot compute suffix of object files: cannot compile
-See \`config.log' for more details" "$LINENO" 5; }
+See \`config.log' for more details" "$LINENO" 5 ; }
 fi
 rm -f conftest.$ac_cv_objext conftest.$ac_ext
 fi
@@ -20314,7 +20316,7 @@
 ac_objext=$OBJEXT
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C compiler" >&5
 $as_echo_n "checking whether we are using the GNU C compiler... " >&6; }
-if ${ac_cv_c_compiler_gnu+:} false; then :
+if test "${ac_cv_c_compiler_gnu+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   cat confdefs.h - <<_ACEOF >conftest.$ac_ext
@@ -20351,7 +20353,7 @@
 ac_save_CFLAGS=$CFLAGS
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5
 $as_echo_n "checking whether $CC accepts -g... " >&6; }
-if ${ac_cv_prog_cc_g+:} false; then :
+if test "${ac_cv_prog_cc_g+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   ac_save_c_werror_flag=$ac_c_werror_flag
@@ -20429,7 +20431,7 @@
 fi
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C89" >&5
 $as_echo_n "checking for $CC option to accept ISO C89... " >&6; }
-if ${ac_cv_prog_cc_c89+:} false; then :
+if test "${ac_cv_prog_cc_c89+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   ac_cv_prog_cc_c89=no
@@ -20552,7 +20554,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_TOOLS_DIR_CXX+:} false; then :
+if test "${ac_cv_path_TOOLS_DIR_CXX+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $TOOLS_DIR_CXX in
@@ -20604,7 +20606,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_POTENTIAL_CXX+:} false; then :
+if test "${ac_cv_path_POTENTIAL_CXX+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $POTENTIAL_CXX in
@@ -21017,7 +21019,7 @@
 set dummy $ac_tool_prefix$ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_PROPER_COMPILER_CXX+:} false; then :
+if test "${ac_cv_prog_PROPER_COMPILER_CXX+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   if test -n "$PROPER_COMPILER_CXX"; then
@@ -21061,7 +21063,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_PROPER_COMPILER_CXX+:} false; then :
+if test "${ac_cv_prog_ac_ct_PROPER_COMPILER_CXX+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   if test -n "$ac_ct_PROPER_COMPILER_CXX"; then
@@ -21515,7 +21517,7 @@
 set dummy $ac_tool_prefix$ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_CXX+:} false; then :
+if test "${ac_cv_prog_CXX+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   if test -n "$CXX"; then
@@ -21559,7 +21561,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_CXX+:} false; then :
+if test "${ac_cv_prog_ac_ct_CXX+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   if test -n "$ac_ct_CXX"; then
@@ -21637,7 +21639,7 @@
 
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C++ compiler" >&5
 $as_echo_n "checking whether we are using the GNU C++ compiler... " >&6; }
-if ${ac_cv_cxx_compiler_gnu+:} false; then :
+if test "${ac_cv_cxx_compiler_gnu+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   cat confdefs.h - <<_ACEOF >conftest.$ac_ext
@@ -21674,7 +21676,7 @@
 ac_save_CXXFLAGS=$CXXFLAGS
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CXX accepts -g" >&5
 $as_echo_n "checking whether $CXX accepts -g... " >&6; }
-if ${ac_cv_prog_cxx_g+:} false; then :
+if test "${ac_cv_prog_cxx_g+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   ac_save_cxx_werror_flag=$ac_cxx_werror_flag
@@ -21772,7 +21774,7 @@
 set dummy $ac_tool_prefix$ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_OBJC+:} false; then :
+if test "${ac_cv_prog_OBJC+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   if test -n "$OBJC"; then
@@ -21816,7 +21818,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_OBJC+:} false; then :
+if test "${ac_cv_prog_ac_ct_OBJC+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   if test -n "$ac_ct_OBJC"; then
@@ -21892,7 +21894,7 @@
 
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU Objective C compiler" >&5
 $as_echo_n "checking whether we are using the GNU Objective C compiler... " >&6; }
-if ${ac_cv_objc_compiler_gnu+:} false; then :
+if test "${ac_cv_objc_compiler_gnu+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   cat confdefs.h - <<_ACEOF >conftest.$ac_ext
@@ -21929,7 +21931,7 @@
 ac_save_OBJCFLAGS=$OBJCFLAGS
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $OBJC accepts -g" >&5
 $as_echo_n "checking whether $OBJC accepts -g... " >&6; }
-if ${ac_cv_prog_objc_g+:} false; then :
+if test "${ac_cv_prog_objc_g+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   ac_save_objc_werror_flag=$ac_objc_werror_flag
@@ -22305,7 +22307,7 @@
 set dummy ${ac_tool_prefix}ar; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_AR+:} false; then :
+if test "${ac_cv_prog_AR+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   if test -n "$AR"; then
@@ -22345,7 +22347,7 @@
 set dummy ar; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_AR+:} false; then :
+if test "${ac_cv_prog_ac_ct_AR+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   if test -n "$ac_ct_AR"; then
@@ -22687,7 +22689,7 @@
 set dummy link; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_WINLD+:} false; then :
+if test "${ac_cv_prog_WINLD+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   if test -n "$WINLD"; then
@@ -23026,7 +23028,7 @@
 set dummy mt; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_MT+:} false; then :
+if test "${ac_cv_prog_MT+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   if test -n "$MT"; then
@@ -23347,7 +23349,7 @@
 set dummy rc; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_RC+:} false; then :
+if test "${ac_cv_prog_RC+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   if test -n "$RC"; then
@@ -23738,7 +23740,7 @@
 set dummy lib; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_WINAR+:} false; then :
+if test "${ac_cv_prog_WINAR+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   if test -n "$WINAR"; then
@@ -24044,7 +24046,7 @@
 set dummy dumpbin; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_DUMPBIN+:} false; then :
+if test "${ac_cv_prog_DUMPBIN+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   if test -n "$DUMPBIN"; then
@@ -24363,7 +24365,7 @@
   CPP=
 fi
 if test -z "$CPP"; then
-  if ${ac_cv_prog_CPP+:} false; then :
+  if test "${ac_cv_prog_CPP+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
       # Double quotes because CPP needs to be expanded
@@ -24479,7 +24481,7 @@
   { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
 as_fn_error $? "C preprocessor \"$CPP\" fails sanity check
-See \`config.log' for more details" "$LINENO" 5; }
+See \`config.log' for more details" "$LINENO" 5 ; }
 fi
 
 ac_ext=cpp
@@ -24763,7 +24765,7 @@
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to run the C++ preprocessor" >&5
 $as_echo_n "checking how to run the C++ preprocessor... " >&6; }
 if test -z "$CXXCPP"; then
-  if ${ac_cv_prog_CXXCPP+:} false; then :
+  if test "${ac_cv_prog_CXXCPP+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
       # Double quotes because CXXCPP needs to be expanded
@@ -24879,7 +24881,7 @@
   { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
 as_fn_error $? "C++ preprocessor \"$CXXCPP\" fails sanity check
-See \`config.log' for more details" "$LINENO" 5; }
+See \`config.log' for more details" "$LINENO" 5 ; }
 fi
 
 ac_ext=cpp
@@ -25181,7 +25183,7 @@
 set dummy as; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_AS+:} false; then :
+if test "${ac_cv_path_AS+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $AS in
@@ -25493,7 +25495,7 @@
 set dummy nm; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_NM+:} false; then :
+if test "${ac_cv_path_NM+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $NM in
@@ -25799,7 +25801,7 @@
 set dummy gnm; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_GNM+:} false; then :
+if test "${ac_cv_path_GNM+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $GNM in
@@ -26105,7 +26107,7 @@
 set dummy strip; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_STRIP+:} false; then :
+if test "${ac_cv_path_STRIP+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $STRIP in
@@ -26411,7 +26413,7 @@
 set dummy mcs; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_MCS+:} false; then :
+if test "${ac_cv_path_MCS+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $MCS in
@@ -26719,7 +26721,7 @@
 set dummy ${ac_tool_prefix}nm; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_NM+:} false; then :
+if test "${ac_cv_prog_NM+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   if test -n "$NM"; then
@@ -26759,7 +26761,7 @@
 set dummy nm; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_NM+:} false; then :
+if test "${ac_cv_prog_ac_ct_NM+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   if test -n "$ac_ct_NM"; then
@@ -27079,7 +27081,7 @@
 set dummy ${ac_tool_prefix}strip; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_STRIP+:} false; then :
+if test "${ac_cv_prog_STRIP+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   if test -n "$STRIP"; then
@@ -27119,7 +27121,7 @@
 set dummy strip; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_STRIP+:} false; then :
+if test "${ac_cv_prog_ac_ct_STRIP+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   if test -n "$ac_ct_STRIP"; then
@@ -27444,7 +27446,7 @@
 set dummy $ac_tool_prefix$ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_OBJCOPY+:} false; then :
+if test "${ac_cv_prog_OBJCOPY+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   if test -n "$OBJCOPY"; then
@@ -27488,7 +27490,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_OBJCOPY+:} false; then :
+if test "${ac_cv_prog_ac_ct_OBJCOPY+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   if test -n "$ac_ct_OBJCOPY"; then
@@ -27815,7 +27817,7 @@
 set dummy $ac_tool_prefix$ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_OBJDUMP+:} false; then :
+if test "${ac_cv_prog_OBJDUMP+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   if test -n "$OBJDUMP"; then
@@ -27859,7 +27861,7 @@
 set dummy $ac_prog; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_OBJDUMP+:} false; then :
+if test "${ac_cv_prog_ac_ct_OBJDUMP+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   if test -n "$ac_ct_OBJDUMP"; then
@@ -28183,7 +28185,7 @@
 set dummy lipo; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_LIPO+:} false; then :
+if test "${ac_cv_path_LIPO+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $LIPO in
@@ -28500,7 +28502,7 @@
 
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ANSI C header files" >&5
 $as_echo_n "checking for ANSI C header files... " >&6; }
-if ${ac_cv_header_stdc+:} false; then :
+if test "${ac_cv_header_stdc+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   cat confdefs.h - <<_ACEOF >conftest.$ac_ext
@@ -28677,7 +28679,7 @@
 for ac_header in stdio.h
 do :
   ac_fn_cxx_check_header_mongrel "$LINENO" "stdio.h" "ac_cv_header_stdio_h" "$ac_includes_default"
-if test "x$ac_cv_header_stdio_h" = xyes; then :
+if test "x$ac_cv_header_stdio_h" = x""yes; then :
   cat >>confdefs.h <<_ACEOF
 #define HAVE_STDIO_H 1
 _ACEOF
@@ -28706,7 +28708,7 @@
 # This bug is HP SR number 8606223364.
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking size of int *" >&5
 $as_echo_n "checking size of int *... " >&6; }
-if ${ac_cv_sizeof_int_p+:} false; then :
+if test "${ac_cv_sizeof_int_p+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   if ac_fn_cxx_compute_int "$LINENO" "(long int) (sizeof (int *))" "ac_cv_sizeof_int_p"        "$ac_includes_default"; then :
@@ -28716,7 +28718,7 @@
      { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
 as_fn_error 77 "cannot compute sizeof (int *)
-See \`config.log' for more details" "$LINENO" 5; }
+See \`config.log' for more details" "$LINENO" 5 ; }
    else
      ac_cv_sizeof_int_p=0
    fi
@@ -28763,7 +28765,7 @@
 #
  { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether byte ordering is bigendian" >&5
 $as_echo_n "checking whether byte ordering is bigendian... " >&6; }
-if ${ac_cv_c_bigendian+:} false; then :
+if test "${ac_cv_c_bigendian+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   ac_cv_c_bigendian=unknown
@@ -29930,8 +29932,8 @@
   have_x=disabled
 else
   case $x_includes,$x_libraries in #(
-    *\'*) as_fn_error $? "cannot use X directory names containing '" "$LINENO" 5;; #(
-    *,NONE | NONE,*) if ${ac_cv_have_x+:} false; then :
+    *\'*) as_fn_error $? "cannot use X directory names containing '" "$LINENO" 5 ;; #(
+    *,NONE | NONE,*) if test "${ac_cv_have_x+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   # One or both of the vars are not set, and there is no cached value.
@@ -30208,7 +30210,7 @@
 else
   { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dnet_ntoa in -ldnet" >&5
 $as_echo_n "checking for dnet_ntoa in -ldnet... " >&6; }
-if ${ac_cv_lib_dnet_dnet_ntoa+:} false; then :
+if test "${ac_cv_lib_dnet_dnet_ntoa+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   ac_check_lib_save_LIBS=$LIBS
@@ -30242,14 +30244,14 @@
 fi
 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dnet_dnet_ntoa" >&5
 $as_echo "$ac_cv_lib_dnet_dnet_ntoa" >&6; }
-if test "x$ac_cv_lib_dnet_dnet_ntoa" = xyes; then :
+if test "x$ac_cv_lib_dnet_dnet_ntoa" = x""yes; then :
   X_EXTRA_LIBS="$X_EXTRA_LIBS -ldnet"
 fi
 
     if test $ac_cv_lib_dnet_dnet_ntoa = no; then
       { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dnet_ntoa in -ldnet_stub" >&5
 $as_echo_n "checking for dnet_ntoa in -ldnet_stub... " >&6; }
-if ${ac_cv_lib_dnet_stub_dnet_ntoa+:} false; then :
+if test "${ac_cv_lib_dnet_stub_dnet_ntoa+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   ac_check_lib_save_LIBS=$LIBS
@@ -30283,7 +30285,7 @@
 fi
 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dnet_stub_dnet_ntoa" >&5
 $as_echo "$ac_cv_lib_dnet_stub_dnet_ntoa" >&6; }
-if test "x$ac_cv_lib_dnet_stub_dnet_ntoa" = xyes; then :
+if test "x$ac_cv_lib_dnet_stub_dnet_ntoa" = x""yes; then :
   X_EXTRA_LIBS="$X_EXTRA_LIBS -ldnet_stub"
 fi
 
@@ -30302,14 +30304,14 @@
     # The functions gethostbyname, getservbyname, and inet_addr are
     # in -lbsd on LynxOS 3.0.1/i386, according to Lars Hecking.
     ac_fn_cxx_check_func "$LINENO" "gethostbyname" "ac_cv_func_gethostbyname"
-if test "x$ac_cv_func_gethostbyname" = xyes; then :
+if test "x$ac_cv_func_gethostbyname" = x""yes; then :
 
 fi
 
     if test $ac_cv_func_gethostbyname = no; then
       { $as_echo "$as_me:${as_lineno-$LINENO}: checking for gethostbyname in -lnsl" >&5
 $as_echo_n "checking for gethostbyname in -lnsl... " >&6; }
-if ${ac_cv_lib_nsl_gethostbyname+:} false; then :
+if test "${ac_cv_lib_nsl_gethostbyname+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   ac_check_lib_save_LIBS=$LIBS
@@ -30343,14 +30345,14 @@
 fi
 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_nsl_gethostbyname" >&5
 $as_echo "$ac_cv_lib_nsl_gethostbyname" >&6; }
-if test "x$ac_cv_lib_nsl_gethostbyname" = xyes; then :
+if test "x$ac_cv_lib_nsl_gethostbyname" = x""yes; then :
   X_EXTRA_LIBS="$X_EXTRA_LIBS -lnsl"
 fi
 
       if test $ac_cv_lib_nsl_gethostbyname = no; then
 	{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for gethostbyname in -lbsd" >&5
 $as_echo_n "checking for gethostbyname in -lbsd... " >&6; }
-if ${ac_cv_lib_bsd_gethostbyname+:} false; then :
+if test "${ac_cv_lib_bsd_gethostbyname+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   ac_check_lib_save_LIBS=$LIBS
@@ -30384,7 +30386,7 @@
 fi
 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_bsd_gethostbyname" >&5
 $as_echo "$ac_cv_lib_bsd_gethostbyname" >&6; }
-if test "x$ac_cv_lib_bsd_gethostbyname" = xyes; then :
+if test "x$ac_cv_lib_bsd_gethostbyname" = x""yes; then :
   X_EXTRA_LIBS="$X_EXTRA_LIBS -lbsd"
 fi
 
@@ -30399,14 +30401,14 @@
     # must be given before -lnsl if both are needed.  We assume that
     # if connect needs -lnsl, so does gethostbyname.
     ac_fn_cxx_check_func "$LINENO" "connect" "ac_cv_func_connect"
-if test "x$ac_cv_func_connect" = xyes; then :
+if test "x$ac_cv_func_connect" = x""yes; then :
 
 fi
 
     if test $ac_cv_func_connect = no; then
       { $as_echo "$as_me:${as_lineno-$LINENO}: checking for connect in -lsocket" >&5
 $as_echo_n "checking for connect in -lsocket... " >&6; }
-if ${ac_cv_lib_socket_connect+:} false; then :
+if test "${ac_cv_lib_socket_connect+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   ac_check_lib_save_LIBS=$LIBS
@@ -30440,7 +30442,7 @@
 fi
 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_socket_connect" >&5
 $as_echo "$ac_cv_lib_socket_connect" >&6; }
-if test "x$ac_cv_lib_socket_connect" = xyes; then :
+if test "x$ac_cv_lib_socket_connect" = x""yes; then :
   X_EXTRA_LIBS="-lsocket $X_EXTRA_LIBS"
 fi
 
@@ -30448,14 +30450,14 @@
 
     # Guillermo Gomez says -lposix is necessary on A/UX.
     ac_fn_cxx_check_func "$LINENO" "remove" "ac_cv_func_remove"
-if test "x$ac_cv_func_remove" = xyes; then :
+if test "x$ac_cv_func_remove" = x""yes; then :
 
 fi
 
     if test $ac_cv_func_remove = no; then
       { $as_echo "$as_me:${as_lineno-$LINENO}: checking for remove in -lposix" >&5
 $as_echo_n "checking for remove in -lposix... " >&6; }
-if ${ac_cv_lib_posix_remove+:} false; then :
+if test "${ac_cv_lib_posix_remove+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   ac_check_lib_save_LIBS=$LIBS
@@ -30489,7 +30491,7 @@
 fi
 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_posix_remove" >&5
 $as_echo "$ac_cv_lib_posix_remove" >&6; }
-if test "x$ac_cv_lib_posix_remove" = xyes; then :
+if test "x$ac_cv_lib_posix_remove" = x""yes; then :
   X_EXTRA_LIBS="$X_EXTRA_LIBS -lposix"
 fi
 
@@ -30497,14 +30499,14 @@
 
     # BSDI BSD/OS 2.1 needs -lipc for XOpenDisplay.
     ac_fn_cxx_check_func "$LINENO" "shmat" "ac_cv_func_shmat"
-if test "x$ac_cv_func_shmat" = xyes; then :
+if test "x$ac_cv_func_shmat" = x""yes; then :
 
 fi
 
     if test $ac_cv_func_shmat = no; then
       { $as_echo "$as_me:${as_lineno-$LINENO}: checking for shmat in -lipc" >&5
 $as_echo_n "checking for shmat in -lipc... " >&6; }
-if ${ac_cv_lib_ipc_shmat+:} false; then :
+if test "${ac_cv_lib_ipc_shmat+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   ac_check_lib_save_LIBS=$LIBS
@@ -30538,7 +30540,7 @@
 fi
 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_ipc_shmat" >&5
 $as_echo "$ac_cv_lib_ipc_shmat" >&6; }
-if test "x$ac_cv_lib_ipc_shmat" = xyes; then :
+if test "x$ac_cv_lib_ipc_shmat" = x""yes; then :
   X_EXTRA_LIBS="$X_EXTRA_LIBS -lipc"
 fi
 
@@ -30556,7 +30558,7 @@
   # John Interrante, Karl Berry
   { $as_echo "$as_me:${as_lineno-$LINENO}: checking for IceConnectionNumber in -lICE" >&5
 $as_echo_n "checking for IceConnectionNumber in -lICE... " >&6; }
-if ${ac_cv_lib_ICE_IceConnectionNumber+:} false; then :
+if test "${ac_cv_lib_ICE_IceConnectionNumber+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   ac_check_lib_save_LIBS=$LIBS
@@ -30590,7 +30592,7 @@
 fi
 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_ICE_IceConnectionNumber" >&5
 $as_echo "$ac_cv_lib_ICE_IceConnectionNumber" >&6; }
-if test "x$ac_cv_lib_ICE_IceConnectionNumber" = xyes; then :
+if test "x$ac_cv_lib_ICE_IceConnectionNumber" = x""yes; then :
   X_PRE_LIBS="$X_PRE_LIBS -lSM -lICE"
 fi
 
@@ -31609,7 +31611,7 @@
             LDFLAGS="$FREETYPE2_LIBS"
             { $as_echo "$as_me:${as_lineno-$LINENO}: checking for FT_Init_FreeType in -lfreetype" >&5
 $as_echo_n "checking for FT_Init_FreeType in -lfreetype... " >&6; }
-if ${ac_cv_lib_freetype_FT_Init_FreeType+:} false; then :
+if test "${ac_cv_lib_freetype_FT_Init_FreeType+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   ac_check_lib_save_LIBS=$LIBS
@@ -31643,7 +31645,7 @@
 fi
 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_freetype_FT_Init_FreeType" >&5
 $as_echo "$ac_cv_lib_freetype_FT_Init_FreeType" >&6; }
-if test "x$ac_cv_lib_freetype_FT_Init_FreeType" = xyes; then :
+if test "x$ac_cv_lib_freetype_FT_Init_FreeType" = x""yes; then :
   FREETYPE2_FOUND=true
 else
   as_fn_error $? "Could not find freetype2! $HELP_MSG " "$LINENO" 5
@@ -31931,7 +31933,7 @@
 	    for ac_header in alsa/asoundlib.h
 do :
   ac_fn_cxx_check_header_mongrel "$LINENO" "alsa/asoundlib.h" "ac_cv_header_alsa_asoundlib_h" "$ac_includes_default"
-if test "x$ac_cv_header_alsa_asoundlib_h" = xyes; then :
+if test "x$ac_cv_header_alsa_asoundlib_h" = x""yes; then :
   cat >>confdefs.h <<_ACEOF
 #define HAVE_ALSA_ASOUNDLIB_H 1
 _ACEOF
@@ -31990,7 +31992,7 @@
 USE_EXTERNAL_LIBJPEG=true
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for main in -ljpeg" >&5
 $as_echo_n "checking for main in -ljpeg... " >&6; }
-if ${ac_cv_lib_jpeg_main+:} false; then :
+if test "${ac_cv_lib_jpeg_main+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   ac_check_lib_save_LIBS=$LIBS
@@ -32018,7 +32020,7 @@
 fi
 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_jpeg_main" >&5
 $as_echo "$ac_cv_lib_jpeg_main" >&6; }
-if test "x$ac_cv_lib_jpeg_main" = xyes; then :
+if test "x$ac_cv_lib_jpeg_main" = x""yes; then :
   cat >>confdefs.h <<_ACEOF
 #define HAVE_LIBJPEG 1
 _ACEOF
@@ -32067,7 +32069,7 @@
     USE_EXTERNAL_LIBGIF=false
 elif test "x${with_giflib}" = "xsystem"; then
     ac_fn_cxx_check_header_mongrel "$LINENO" "gif_lib.h" "ac_cv_header_gif_lib_h" "$ac_includes_default"
-if test "x$ac_cv_header_gif_lib_h" = xyes; then :
+if test "x$ac_cv_header_gif_lib_h" = x""yes; then :
 
 else
    as_fn_error $? "--with-giflib=system specified, but gif_lib.h not found!" "$LINENO" 5
@@ -32076,7 +32078,7 @@
 
     { $as_echo "$as_me:${as_lineno-$LINENO}: checking for DGifGetCode in -lgif" >&5
 $as_echo_n "checking for DGifGetCode in -lgif... " >&6; }
-if ${ac_cv_lib_gif_DGifGetCode+:} false; then :
+if test "${ac_cv_lib_gif_DGifGetCode+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   ac_check_lib_save_LIBS=$LIBS
@@ -32110,7 +32112,7 @@
 fi
 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_gif_DGifGetCode" >&5
 $as_echo "$ac_cv_lib_gif_DGifGetCode" >&6; }
-if test "x$ac_cv_lib_gif_DGifGetCode" = xyes; then :
+if test "x$ac_cv_lib_gif_DGifGetCode" = x""yes; then :
   cat >>confdefs.h <<_ACEOF
 #define HAVE_LIBGIF 1
 _ACEOF
@@ -32142,7 +32144,7 @@
 
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for compress in -lz" >&5
 $as_echo_n "checking for compress in -lz... " >&6; }
-if ${ac_cv_lib_z_compress+:} false; then :
+if test "${ac_cv_lib_z_compress+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   ac_check_lib_save_LIBS=$LIBS
@@ -32176,7 +32178,7 @@
 fi
 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_z_compress" >&5
 $as_echo "$ac_cv_lib_z_compress" >&6; }
-if test "x$ac_cv_lib_z_compress" = xyes; then :
+if test "x$ac_cv_lib_z_compress" = x""yes; then :
    ZLIB_FOUND=yes
 else
    ZLIB_FOUND=no
@@ -32269,7 +32271,7 @@
 
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for cos in -lm" >&5
 $as_echo_n "checking for cos in -lm... " >&6; }
-if ${ac_cv_lib_m_cos+:} false; then :
+if test "${ac_cv_lib_m_cos+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   ac_check_lib_save_LIBS=$LIBS
@@ -32303,7 +32305,7 @@
 fi
 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_m_cos" >&5
 $as_echo "$ac_cv_lib_m_cos" >&6; }
-if test "x$ac_cv_lib_m_cos" = xyes; then :
+if test "x$ac_cv_lib_m_cos" = x""yes; then :
   cat >>confdefs.h <<_ACEOF
 #define HAVE_LIBM 1
 _ACEOF
@@ -32327,7 +32329,7 @@
 LIBS=""
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5
 $as_echo_n "checking for dlopen in -ldl... " >&6; }
-if ${ac_cv_lib_dl_dlopen+:} false; then :
+if test "${ac_cv_lib_dl_dlopen+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   ac_check_lib_save_LIBS=$LIBS
@@ -32361,7 +32363,7 @@
 fi
 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5
 $as_echo "$ac_cv_lib_dl_dlopen" >&6; }
-if test "x$ac_cv_lib_dl_dlopen" = xyes; then :
+if test "x$ac_cv_lib_dl_dlopen" = x""yes; then :
   cat >>confdefs.h <<_ACEOF
 #define HAVE_LIBDL 1
 _ACEOF
@@ -32591,7 +32593,7 @@
 See the pkg-config man page for more details.
 
 To get pkg-config, see <http://pkg-config.freedesktop.org/>.
-See \`config.log' for more details" "$LINENO" 5; }
+See \`config.log' for more details" "$LINENO" 5 ; }
 else
 	LIBFFI_CFLAGS=$pkg_cv_LIBFFI_CFLAGS
 	LIBFFI_LIBS=$pkg_cv_LIBFFI_LIBS
@@ -32607,7 +32609,7 @@
 set dummy llvm-config; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_LLVM_CONFIG+:} false; then :
+if test "${ac_cv_prog_LLVM_CONFIG+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   if test -n "$LLVM_CONFIG"; then
@@ -33219,7 +33221,7 @@
 set dummy ccache; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_path_CCACHE+:} false; then :
+if test "${ac_cv_path_CCACHE+set}" = set; then :
   $as_echo_n "(cached) " >&6
 else
   case $CCACHE in
@@ -33480,21 +33482,10 @@
      :end' >>confcache
 if diff "$cache_file" confcache >/dev/null 2>&1; then :; else
   if test -w "$cache_file"; then
-    if test "x$cache_file" != "x/dev/null"; then
+    test "x$cache_file" != "x/dev/null" &&
       { $as_echo "$as_me:${as_lineno-$LINENO}: updating cache $cache_file" >&5
 $as_echo "$as_me: updating cache $cache_file" >&6;}
-      if test ! -f "$cache_file" || test -h "$cache_file"; then
-	cat confcache >"$cache_file"
-      else
-        case $cache_file in #(
-        */* | ?:*)
-	  mv -f confcache "$cache_file"$$ &&
-	  mv -f "$cache_file"$$ "$cache_file" ;; #(
-        *)
-	  mv -f confcache "$cache_file" ;;
-	esac
-      fi
-    fi
+    cat confcache >$cache_file
   else
     { $as_echo "$as_me:${as_lineno-$LINENO}: not updating unwritable cache $cache_file" >&5
 $as_echo "$as_me: not updating unwritable cache $cache_file" >&6;}
@@ -33526,7 +33517,7 @@
 
 
 
-: "${CONFIG_STATUS=./config.status}"
+: ${CONFIG_STATUS=./config.status}
 ac_write_fail=0
 ac_clean_files_save=$ac_clean_files
 ac_clean_files="$ac_clean_files $CONFIG_STATUS"
@@ -33627,7 +33618,6 @@
 IFS=" ""	$as_nl"
 
 # Find who we are.  Look in the path if we contain no directory separator.
-as_myself=
 case $0 in #((
   *[\\/]* ) as_myself=$0 ;;
   *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
@@ -33935,7 +33925,7 @@
 # values after options handling.
 ac_log="
 This file was extended by OpenJDK $as_me jdk8, which was
-generated by GNU Autoconf 2.68.  Invocation command line was
+generated by GNU Autoconf 2.67.  Invocation command line was
 
   CONFIG_FILES    = $CONFIG_FILES
   CONFIG_HEADERS  = $CONFIG_HEADERS
@@ -33998,7 +33988,7 @@
 ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`"
 ac_cs_version="\\
 OpenJDK config.status jdk8
-configured by $0, generated by GNU Autoconf 2.68,
+configured by $0, generated by GNU Autoconf 2.67,
   with options \\"\$ac_cs_config\\"
 
 Copyright (C) 2010 Free Software Foundation, Inc.
@@ -34127,7 +34117,7 @@
     "$OUTPUT_ROOT/spec.sh") CONFIG_FILES="$CONFIG_FILES $OUTPUT_ROOT/spec.sh:$AUTOCONF_DIR/spec.sh.in" ;;
     "$OUTPUT_ROOT/Makefile") CONFIG_FILES="$CONFIG_FILES $OUTPUT_ROOT/Makefile:$AUTOCONF_DIR/Makefile.in" ;;
 
-  *) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5;;
+  *) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5 ;;
   esac
 done
 
@@ -34149,10 +34139,9 @@
 # after its creation but before its name has been assigned to `$tmp'.
 $debug ||
 {
-  tmp= ac_tmp=
+  tmp=
   trap 'exit_status=$?
-  : "${ac_tmp:=$tmp}"
-  { test ! -d "$ac_tmp" || rm -fr "$ac_tmp"; } && exit $exit_status
+  { test -z "$tmp" || test ! -d "$tmp" || rm -fr "$tmp"; } && exit $exit_status
 ' 0
   trap 'as_fn_exit 1' 1 2 13 15
 }
@@ -34160,13 +34149,12 @@
 
 {
   tmp=`(umask 077 && mktemp -d "./confXXXXXX") 2>/dev/null` &&
-  test -d "$tmp"
+  test -n "$tmp" && test -d "$tmp"
 }  ||
 {
   tmp=./conf$$-$RANDOM
   (umask 077 && mkdir "$tmp")
 } || as_fn_error $? "cannot create a temporary directory in ." "$LINENO" 5
-ac_tmp=$tmp
 
 # Set up the scripts for CONFIG_FILES section.
 # No need to generate them if there are no CONFIG_FILES.
@@ -34188,7 +34176,7 @@
   ac_cs_awk_cr=$ac_cr
 fi
 
-echo 'BEGIN {' >"$ac_tmp/subs1.awk" &&
+echo 'BEGIN {' >"$tmp/subs1.awk" &&
 _ACEOF
 
 
@@ -34216,7 +34204,7 @@
 rm -f conf$$subs.sh
 
 cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
-cat >>"\$ac_tmp/subs1.awk" <<\\_ACAWK &&
+cat >>"\$tmp/subs1.awk" <<\\_ACAWK &&
 _ACEOF
 sed -n '
 h
@@ -34264,7 +34252,7 @@
 rm -f conf$$subs.awk
 cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
 _ACAWK
-cat >>"\$ac_tmp/subs1.awk" <<_ACAWK &&
+cat >>"\$tmp/subs1.awk" <<_ACAWK &&
   for (key in S) S_is_set[key] = 1
   FS = ""
 
@@ -34296,7 +34284,7 @@
   sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g"
 else
   cat
-fi < "$ac_tmp/subs1.awk" > "$ac_tmp/subs.awk" \
+fi < "$tmp/subs1.awk" > "$tmp/subs.awk" \
   || as_fn_error $? "could not setup config files machinery" "$LINENO" 5
 _ACEOF
 
@@ -34330,7 +34318,7 @@
 # No need to generate them if there are no CONFIG_HEADERS.
 # This happens for instance with `./config.status Makefile'.
 if test -n "$CONFIG_HEADERS"; then
-cat >"$ac_tmp/defines.awk" <<\_ACAWK ||
+cat >"$tmp/defines.awk" <<\_ACAWK ||
 BEGIN {
 _ACEOF
 
@@ -34342,8 +34330,8 @@
 # handling of long lines.
 ac_delim='%!_!# '
 for ac_last_try in false false :; do
-  ac_tt=`sed -n "/$ac_delim/p" confdefs.h`
-  if test -z "$ac_tt"; then
+  ac_t=`sed -n "/$ac_delim/p" confdefs.h`
+  if test -z "$ac_t"; then
     break
   elif $ac_last_try; then
     as_fn_error $? "could not make $CONFIG_HEADERS" "$LINENO" 5
@@ -34444,7 +34432,7 @@
   esac
   case $ac_mode$ac_tag in
   :[FHL]*:*);;
-  :L* | :C*:*) as_fn_error $? "invalid tag \`$ac_tag'" "$LINENO" 5;;
+  :L* | :C*:*) as_fn_error $? "invalid tag \`$ac_tag'" "$LINENO" 5 ;;
   :[FH]-) ac_tag=-:-;;
   :[FH]*) ac_tag=$ac_tag:$ac_tag.in;;
   esac
@@ -34463,7 +34451,7 @@
     for ac_f
     do
       case $ac_f in
-      -) ac_f="$ac_tmp/stdin";;
+      -) ac_f="$tmp/stdin";;
       *) # Look for the file first in the build tree, then in the source tree
 	 # (if the path is not absolute).  The absolute path cannot be DOS-style,
 	 # because $ac_f cannot contain `:'.
@@ -34472,7 +34460,7 @@
 	   [\\/$]*) false;;
 	   *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";;
 	   esac ||
-	   as_fn_error 1 "cannot find input file: \`$ac_f'" "$LINENO" 5;;
+	   as_fn_error 1 "cannot find input file: \`$ac_f'" "$LINENO" 5 ;;
       esac
       case $ac_f in *\'*) ac_f=`$as_echo "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac
       as_fn_append ac_file_inputs " '$ac_f'"
@@ -34498,8 +34486,8 @@
     esac
 
     case $ac_tag in
-    *:-:* | *:-) cat >"$ac_tmp/stdin" \
-      || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;;
+    *:-:* | *:-) cat >"$tmp/stdin" \
+      || as_fn_error $? "could not create $ac_file" "$LINENO" 5  ;;
     esac
     ;;
   esac
@@ -34624,22 +34612,21 @@
 s&@abs_top_builddir@&$ac_abs_top_builddir&;t t
 $ac_datarootdir_hack
 "
-eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$ac_tmp/subs.awk" \
-  >$ac_tmp/out || as_fn_error $? "could not create $ac_file" "$LINENO" 5
+eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$tmp/subs.awk" >$tmp/out \
+  || as_fn_error $? "could not create $ac_file" "$LINENO" 5
 
 test -z "$ac_datarootdir_hack$ac_datarootdir_seen" &&
-  { ac_out=`sed -n '/\${datarootdir}/p' "$ac_tmp/out"`; test -n "$ac_out"; } &&
-  { ac_out=`sed -n '/^[	 ]*datarootdir[	 ]*:*=/p' \
-      "$ac_tmp/out"`; test -z "$ac_out"; } &&
+  { ac_out=`sed -n '/\${datarootdir}/p' "$tmp/out"`; test -n "$ac_out"; } &&
+  { ac_out=`sed -n '/^[	 ]*datarootdir[	 ]*:*=/p' "$tmp/out"`; test -z "$ac_out"; } &&
   { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir'
 which seems to be undefined.  Please make sure it is defined" >&5
 $as_echo "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir'
 which seems to be undefined.  Please make sure it is defined" >&2;}
 
-  rm -f "$ac_tmp/stdin"
+  rm -f "$tmp/stdin"
   case $ac_file in
-  -) cat "$ac_tmp/out" && rm -f "$ac_tmp/out";;
-  *) rm -f "$ac_file" && mv "$ac_tmp/out" "$ac_file";;
+  -) cat "$tmp/out" && rm -f "$tmp/out";;
+  *) rm -f "$ac_file" && mv "$tmp/out" "$ac_file";;
   esac \
   || as_fn_error $? "could not create $ac_file" "$LINENO" 5
  ;;
@@ -34650,20 +34637,20 @@
   if test x"$ac_file" != x-; then
     {
       $as_echo "/* $configure_input  */" \
-      && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs"
-    } >"$ac_tmp/config.h" \
+      && eval '$AWK -f "$tmp/defines.awk"' "$ac_file_inputs"
+    } >"$tmp/config.h" \
       || as_fn_error $? "could not create $ac_file" "$LINENO" 5
-    if diff "$ac_file" "$ac_tmp/config.h" >/dev/null 2>&1; then
+    if diff "$ac_file" "$tmp/config.h" >/dev/null 2>&1; then
       { $as_echo "$as_me:${as_lineno-$LINENO}: $ac_file is unchanged" >&5
 $as_echo "$as_me: $ac_file is unchanged" >&6;}
     else
       rm -f "$ac_file"
-      mv "$ac_tmp/config.h" "$ac_file" \
+      mv "$tmp/config.h" "$ac_file" \
 	|| as_fn_error $? "could not create $ac_file" "$LINENO" 5
     fi
   else
     $as_echo "/* $configure_input  */" \
-      && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" \
+      && eval '$AWK -f "$tmp/defines.awk"' "$ac_file_inputs" \
       || as_fn_error $? "could not create -" "$LINENO" 5
   fi
  ;;
--- a/common/autoconf/jdk-options.m4	Mon Jun 10 17:04:18 2013 -0700
+++ b/common/autoconf/jdk-options.m4	Wed Jul 05 18:59:05 2017 +0200
@@ -351,11 +351,7 @@
 if test "x$with_cacerts_file" != x; then
     CACERTS_FILE=$with_cacerts_file
 else
-    if test "x$OPENJDK" = "xtrue"; then
-        CACERTS_FILE=${SRC_ROOT}/jdk/src/share/lib/security/cacerts
-    else
-        CACERTS_FILE=${SRC_ROOT}/jdk/src/closed/share/lib/security/cacerts.internal
-    fi
+    CACERTS_FILE=${SRC_ROOT}/jdk/src/share/lib/security/cacerts
 fi
 AC_SUBST(CACERTS_FILE)
 
--- a/common/autoconf/toolchain_windows.m4	Mon Jun 10 17:04:18 2013 -0700
+++ b/common/autoconf/toolchain_windows.m4	Wed Jul 05 18:59:05 2017 +0200
@@ -91,6 +91,15 @@
     AC_MSG_ERROR([Cannot locate a valid Visual Studio installation])
   fi  
 
+  if test "x$VS100COMNTOOLS" != x; then
+    TOOLCHAIN_CHECK_POSSIBLE_VISUAL_STUDIO_ROOT([$VS100COMNTOOLS/../..], [VS100COMNTOOLS variable])
+  fi
+  if test "x$PROGRAMFILES" != x; then
+    TOOLCHAIN_CHECK_POSSIBLE_VISUAL_STUDIO_ROOT([$PROGRAMFILES/Microsoft Visual Studio 10.0], [well-known name])
+  fi
+  TOOLCHAIN_CHECK_POSSIBLE_VISUAL_STUDIO_ROOT([C:/Program Files/Microsoft Visual Studio 10.0], [well-known name])
+  TOOLCHAIN_CHECK_POSSIBLE_VISUAL_STUDIO_ROOT([C:/Program Files (x86)/Microsoft Visual Studio 10.0], [well-known name])
+
   if test "x$ProgramW6432" != x; then
     TOOLCHAIN_CHECK_POSSIBLE_WIN_SDK_ROOT([$ProgramW6432/Microsoft SDKs/Windows/v7.1/Bin], [well-known name])
   fi
@@ -102,15 +111,6 @@
   fi
   TOOLCHAIN_CHECK_POSSIBLE_WIN_SDK_ROOT([C:/Program Files/Microsoft SDKs/Windows/v7.1/Bin], [well-known name])
   TOOLCHAIN_CHECK_POSSIBLE_WIN_SDK_ROOT([C:/Program Files (x86)/Microsoft SDKs/Windows/v7.1/Bin], [well-known name])
-
-  if test "x$VS100COMNTOOLS" != x; then
-    TOOLCHAIN_CHECK_POSSIBLE_VISUAL_STUDIO_ROOT([$VS100COMNTOOLS/../..], [VS100COMNTOOLS variable])
-  fi
-  if test "x$PROGRAMFILES" != x; then
-    TOOLCHAIN_CHECK_POSSIBLE_VISUAL_STUDIO_ROOT([$PROGRAMFILES/Microsoft Visual Studio 10.0], [well-known name])
-  fi
-  TOOLCHAIN_CHECK_POSSIBLE_VISUAL_STUDIO_ROOT([C:/Program Files/Microsoft Visual Studio 10.0], [well-known name])
-  TOOLCHAIN_CHECK_POSSIBLE_VISUAL_STUDIO_ROOT([C:/Program Files (x86)/Microsoft Visual Studio 10.0], [well-known name])
 ])
 
 # Check if the VS env variables were setup prior to running configure.
@@ -248,10 +248,23 @@
         AC_MSG_NOTICE([Warning: msvcr100.dll not found in VCINSTALLDIR: $VCINSTALLDIR])
       fi
     fi
+    # Try some fallback alternatives
     if test "x$MSVCR_DLL" = x; then
-      if test -f "$SYSTEMROOT/system32/msvcr100.dll"; then
-        AC_MSG_NOTICE([msvcr100.dll found in $SYSTEMROOT/system32])
-        MSVCR_DLL="$SYSTEMROOT/system32/msvcr100.dll"
+      # If visual studio express is installed, there is usually one with the debugger
+      if test "x$VS100COMNTOOLS" != x; then
+        if test "x$OPENJDK_TARGET_CPU_BITS" = x64; then
+          MSVCR_DLL=`find "$VS100COMNTOOLS/.." -name msvcr100.dll | grep -i x64 | head --lines 1`
+          AC_MSG_NOTICE([msvcr100.dll found in $VS100COMNTOOLS..: $VS100COMNTOOLS..])
+        fi
+      fi
+    fi
+    if test "x$MSVCR_DLL" = x; then
+      if test "x$OPENJDK_TARGET_CPU_BITS" = x32; then
+        # Fallback for 32bit builds, look in the windows directory.
+        if test -f "$SYSTEMROOT/system32/msvcr100.dll"; then
+          AC_MSG_NOTICE([msvcr100.dll found in $SYSTEMROOT/system32])
+          MSVCR_DLL="$SYSTEMROOT/system32/msvcr100.dll"
+        fi
       fi
     fi
   fi
--- a/corba/.hgtags	Mon Jun 10 17:04:18 2013 -0700
+++ b/corba/.hgtags	Wed Jul 05 18:59:05 2017 +0200
@@ -214,3 +214,4 @@
 c8286839d0df04aba819ec4bef12b86babccf30e jdk8-b90
 8f7ffb296385f85a4a6d53f9f2d4a7b13a8fa1ff jdk8-b91
 717aa26f8e0a1c0e768aebb3a763aca56db0c83e jdk8-b92
+8dc9d7ccbb2d77fd89bc321bb02e67c152aca257 jdk8-b93
--- a/hotspot/.hgtags	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/.hgtags	Wed Jul 05 18:59:05 2017 +0200
@@ -346,3 +346,6 @@
 7cbdf0e3725c0c56a2ff7540fc70b6d4b5890d04 jdk8-b91
 38da9f4f67096745f851318d792d6468aa1f6cf8 hs25-b34
 092018493d3bbeb1c24278fd8c40ff3d76e1fed7 jdk8-b92
+573d86d412cd9d3df7912194c1a540be50e9544e jdk8-b93
+b786c04b7be15194febe88dc1f0c9443e737a84b hs25-b35
+3c78a14da19d26d6937af5f98b97e2a21c653b04 hs25-b36
--- a/hotspot/agent/src/share/classes/sun/jvm/hotspot/memory/DictionaryEntry.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/agent/src/share/classes/sun/jvm/hotspot/memory/DictionaryEntry.java	Wed Jul 05 18:59:05 2017 +0200
@@ -96,9 +96,10 @@
 
   public boolean containsProtectionDomain(Oop protectionDomain) {
     InstanceKlass ik = (InstanceKlass) klass();
-    if (protectionDomain.equals(ik.getProtectionDomain())) {
-      return true; // Succeeds trivially
-    }
+    // Currently unimplemented and not used.
+    // if (protectionDomain.equals(ik.getJavaMirror().getProtectionDomain())) {
+    //   return true; // Succeeds trivially
+    // }
     for (ProtectionDomainEntry current = pdSet(); current != null;
                                        current = current.next()) {
       if (protectionDomain.equals(current.protectionDomain())) {
--- a/hotspot/agent/src/share/classes/sun/jvm/hotspot/oops/InstanceKlass.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/agent/src/share/classes/sun/jvm/hotspot/oops/InstanceKlass.java	Wed Jul 05 18:59:05 2017 +0200
@@ -75,8 +75,6 @@
     javaFieldsCount      = new CIntField(type.getCIntegerField("_java_fields_count"), 0);
     constants            = new MetadataField(type.getAddressField("_constants"), 0);
     classLoaderData      = type.getAddressField("_class_loader_data");
-    protectionDomain     = new OopField(type.getOopField("_protection_domain"), 0);
-    signers              = new OopField(type.getOopField("_signers"), 0);
     sourceFileName       = type.getAddressField("_source_file_name");
     sourceDebugExtension = type.getAddressField("_source_debug_extension");
     innerClasses         = type.getAddressField("_inner_classes");
@@ -136,8 +134,6 @@
   private static CIntField javaFieldsCount;
   private static MetadataField constants;
   private static AddressField  classLoaderData;
-  private static OopField  protectionDomain;
-  private static OopField  signers;
   private static AddressField  sourceFileName;
   private static AddressField  sourceDebugExtension;
   private static AddressField  innerClasses;
@@ -350,8 +346,6 @@
   public ConstantPool getConstants()        { return (ConstantPool) constants.getValue(this); }
   public ClassLoaderData getClassLoaderData() { return                ClassLoaderData.instantiateWrapperFor(classLoaderData.getValue(getAddress())); }
   public Oop       getClassLoader()         { return                getClassLoaderData().getClassLoader(); }
-  public Oop       getProtectionDomain()    { return                protectionDomain.getValue(this); }
-  public ObjArray  getSigners()             { return (ObjArray)     signers.getValue(this); }
   public Symbol    getSourceFileName()      { return getSymbol(sourceFileName); }
   public String    getSourceDebugExtension(){ return                CStringUtilities.getString(sourceDebugExtension.getValue(getAddress())); }
   public long      getNonstaticFieldSize()  { return                nonstaticFieldSize.getValue(this); }
@@ -541,8 +535,6 @@
     // visitor.doOop(methods, true);
     // visitor.doOop(localInterfaces, true);
     // visitor.doOop(transitiveInterfaces, true);
-      visitor.doOop(protectionDomain, true);
-      visitor.doOop(signers, true);
       visitor.doCInt(nonstaticFieldSize, true);
       visitor.doCInt(staticFieldSize, true);
       visitor.doCInt(staticOopFieldCount, true);
--- a/hotspot/agent/src/share/classes/sun/jvm/hotspot/utilities/HeapGXLWriter.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/agent/src/share/classes/sun/jvm/hotspot/utilities/HeapGXLWriter.java	Wed Jul 05 18:59:05 2017 +0200
@@ -204,13 +204,13 @@
                 Oop loader = ik.getClassLoader();
                 writeEdge(instance, loader, "loaded-by");
 
-                // write signers
-                Oop signers = ik.getSigners();
-                writeEdge(instance, signers, "signed-by");
+                // write signers NYI
+                // Oop signers = ik.getJavaMirror().getSigners();
+                writeEdge(instance, null, "signed-by");
 
-                // write protection domain
-                Oop protectionDomain = ik.getProtectionDomain();
-                writeEdge(instance, protectionDomain, "protection-domain");
+                // write protection domain NYI
+                // Oop protectionDomain = ik.getJavaMirror().getProtectionDomain();
+                writeEdge(instance, null, "protection-domain");
 
                 // write edges for static reference fields from this class
                 for (Iterator itr = refFields.iterator(); itr.hasNext();) {
--- a/hotspot/agent/src/share/classes/sun/jvm/hotspot/utilities/HeapHprofBinWriter.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/agent/src/share/classes/sun/jvm/hotspot/utilities/HeapHprofBinWriter.java	Wed Jul 05 18:59:05 2017 +0200
@@ -477,8 +477,8 @@
         if (k instanceof InstanceKlass) {
             InstanceKlass ik = (InstanceKlass) k;
             writeObjectID(ik.getClassLoader());
-            writeObjectID(ik.getSigners());
-            writeObjectID(ik.getProtectionDomain());
+            writeObjectID(null);  // ik.getJavaMirror().getSigners());
+            writeObjectID(null);  // ik.getJavaMirror().getProtectionDomain());
             // two reserved id fields
             writeObjectID(null);
             writeObjectID(null);
@@ -516,8 +516,8 @@
                 if (bottomKlass instanceof InstanceKlass) {
                     InstanceKlass ik = (InstanceKlass) bottomKlass;
                     writeObjectID(ik.getClassLoader());
-                    writeObjectID(ik.getSigners());
-                    writeObjectID(ik.getProtectionDomain());
+                    writeObjectID(null); // ik.getJavaMirror().getSigners());
+                    writeObjectID(null); // ik.getJavaMirror().getProtectionDomain());
                 } else {
                     writeObjectID(null);
                     writeObjectID(null);
--- a/hotspot/agent/src/share/classes/sun/jvm/hotspot/utilities/soql/JSJavaInstanceKlass.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/agent/src/share/classes/sun/jvm/hotspot/utilities/soql/JSJavaInstanceKlass.java	Wed Jul 05 18:59:05 2017 +0200
@@ -47,8 +47,6 @@
    private static final int FIELD_IS_SYNTHETIC       = 13;
    private static final int FIELD_IS_INTERFACE       = 14;
    private static final int FIELD_CLASS_LOADER       = 15;
-   private static final int FIELD_PROTECTION_DOMAIN  = 16;
-   private static final int FIELD_SIGNERS            = 17;
    private static final int FIELD_STATICS            = 18;
    private static final int FIELD_UNDEFINED          = -1;
 
@@ -100,10 +98,6 @@
          return Boolean.valueOf(ik.isInterface());
       case FIELD_CLASS_LOADER:
          return factory.newJSJavaObject(ik.getClassLoader());
-      case FIELD_PROTECTION_DOMAIN:
-         return factory.newJSJavaObject(ik.getProtectionDomain());
-      case FIELD_SIGNERS:
-         return factory.newJSJavaObject(ik.getSigners());
       case FIELD_STATICS:
          return getStatics();
       case FIELD_UNDEFINED:
@@ -246,8 +240,6 @@
       addField("isSynthetic", FIELD_IS_SYNTHETIC);
       addField("isInterface", FIELD_IS_INTERFACE);
       addField("classLoader", FIELD_CLASS_LOADER);
-      addField("protectionDomain", FIELD_PROTECTION_DOMAIN);
-      addField("signers", FIELD_SIGNERS);
       addField("statics", FIELD_STATICS);
    }
 
--- a/hotspot/make/bsd/makefiles/adlc.make	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/make/bsd/makefiles/adlc.make	Wed Jul 05 18:59:05 2017 +0200
@@ -69,7 +69,7 @@
 # CFLAGS_WARN holds compiler options to suppress/enable warnings.
 # Compiler warnings are treated as errors
 ifneq ($(COMPILER_WARNINGS_FATAL),false)
-  CFLAGS_WARN = -Werror
+  CFLAGS_WARN = $(WARNINGS_ARE_ERRORS)
 endif
 CFLAGS += $(CFLAGS_WARN)
 
--- a/hotspot/make/bsd/makefiles/gcc.make	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/make/bsd/makefiles/gcc.make	Wed Jul 05 18:59:05 2017 +0200
@@ -71,6 +71,11 @@
       CC  = $(CC32)
     endif
 
+    ifeq ($(USE_CLANG), true)
+      CXX = clang++
+      CC  = clang
+    endif
+
     HOSTCXX = $(CXX)
     HOSTCC  = $(CC)
   endif
@@ -79,21 +84,79 @@
 endif
 
 
-# -dumpversion in gcc-2.91 shows "egcs-2.91.66". In later version, it only
-# prints the numbers (e.g. "2.95", "3.2.1")
-CC_VER_MAJOR := $(shell $(CC) -dumpversion | sed 's/egcs-//' | cut -d'.' -f1)
-CC_VER_MINOR := $(shell $(CC) -dumpversion | sed 's/egcs-//' | cut -d'.' -f2)
-
-# check for precompiled headers support
-ifneq "$(shell expr \( $(CC_VER_MAJOR) \> 3 \) \| \( \( $(CC_VER_MAJOR) = 3 \) \& \( $(CC_VER_MINOR) \>= 4 \) \))" "0"
-# Allow the user to turn off precompiled headers from the command line.
-ifneq ($(USE_PRECOMPILED_HEADER),0)
-PRECOMPILED_HEADER_DIR=.
-PRECOMPILED_HEADER_SRC=$(GAMMADIR)/src/share/vm/precompiled/precompiled.hpp
-PRECOMPILED_HEADER=$(PRECOMPILED_HEADER_DIR)/precompiled.hpp.gch
-endif
+ifeq ($(USE_CLANG), true)
+  CC_VER_MAJOR := $(shell $(CC) -v 2>&1 | grep version | sed "s/.*version \([0-9]*\.[0-9]*\).*/\1/" | cut -d'.' -f1)
+  CC_VER_MINOR := $(shell $(CC) -v 2>&1 | grep version | sed "s/.*version \([0-9]*\.[0-9]*\).*/\1/" | cut -d'.' -f2)
+else
+  # -dumpversion in gcc-2.91 shows "egcs-2.91.66". In later version, it only
+  # prints the numbers (e.g. "2.95", "3.2.1")
+  CC_VER_MAJOR := $(shell $(CC) -dumpversion | sed 's/egcs-//' | cut -d'.' -f1)
+  CC_VER_MINOR := $(shell $(CC) -dumpversion | sed 's/egcs-//' | cut -d'.' -f2)
 endif
 
+ifeq ($(USE_CLANG), true)
+  # clang has precompiled headers support by default, but the user can switch
+  # it off by using 'USE_PRECOMPILED_HEADER=0'.
+  ifdef LP64
+    ifeq ($(USE_PRECOMPILED_HEADER),)
+      USE_PRECOMPILED_HEADER=1
+    endif
+  else
+    # We don't support precompiled headers on 32-bit builds because there some files are
+    # compiled with -fPIC while others are compiled without (see 'NONPIC_OBJ_FILES' rules.make)
+    # Clang produces an error if the PCH file was compiled with other options than the actual compilation unit.
+    USE_PRECOMPILED_HEADER=0
+  endif
+  
+  ifeq ($(USE_PRECOMPILED_HEADER),1)
+  
+    ifndef LP64
+      $(error " Precompiled Headers only supported on 64-bit platforms!")
+    endif
+  
+    PRECOMPILED_HEADER_DIR=.
+    PRECOMPILED_HEADER_SRC=$(GAMMADIR)/src/share/vm/precompiled/precompiled.hpp
+    PRECOMPILED_HEADER=$(PRECOMPILED_HEADER_DIR)/precompiled.hpp.pch
+  
+    PCH_FLAG = -include precompiled.hpp
+    PCH_FLAG/DEFAULT = $(PCH_FLAG)
+    PCH_FLAG/NO_PCH = -DNO_PCH
+    PCH_FLAG/BY_FILE = $(PCH_FLAG/$@)$(PCH_FLAG/DEFAULT$(PCH_FLAG/$@))
+  
+    VM_PCH_FLAG/LIBJVM = $(PCH_FLAG/BY_FILE)
+    VM_PCH_FLAG/AOUT =
+    VM_PCH_FLAG = $(VM_PCH_FLAG/$(LINK_INTO))
+  
+    # We only use precompiled headers for the JVM build
+    CFLAGS += $(VM_PCH_FLAG)
+  
+    # There are some files which don't like precompiled headers
+    # The following files are build with 'OPT_CFLAGS/NOOPT' (-O0) in the opt build.
+    # But Clang doesn't support a precompiled header which was compiled with -O3
+    # to be used in a compilation unit which uses '-O0'. We could also prepare an
+    # extra '-O0' PCH file for the opt build and use it here, but it's probably
+    # not worth the effort as long as only two files need this special handling.
+    PCH_FLAG/loopTransform.o = $(PCH_FLAG/NO_PCH)
+    PCH_FLAG/sharedRuntimeTrig.o = $(PCH_FLAG/NO_PCH)
+    PCH_FLAG/sharedRuntimeTrans.o = $(PCH_FLAG/NO_PCH)
+  
+  endif
+else # ($(USE_CLANG), true)
+  # check for precompiled headers support
+  ifneq "$(shell expr \( $(CC_VER_MAJOR) \> 3 \) \| \( \( $(CC_VER_MAJOR) = 3 \) \& \( $(CC_VER_MINOR) \>= 4 \) \))" "0"
+    # Allow the user to turn off precompiled headers from the command line.
+    ifneq ($(USE_PRECOMPILED_HEADER),0)
+      PRECOMPILED_HEADER_DIR=.
+      PRECOMPILED_HEADER_SRC=$(GAMMADIR)/src/share/vm/precompiled/precompiled.hpp
+      PRECOMPILED_HEADER=$(PRECOMPILED_HEADER_DIR)/precompiled.hpp.gch
+    endif
+  endif
+endif
+
+# -DDONT_USE_PRECOMPILED_HEADER will exclude all includes in precompiled.hpp.
+ifeq ($(USE_PRECOMPILED_HEADER),0)
+  CFLAGS += -DDONT_USE_PRECOMPILED_HEADER
+endif
 
 #------------------------------------------------------------------------
 # Compiler flags
@@ -115,17 +178,31 @@
 CFLAGS += $(VM_PICFLAG)
 CFLAGS += -fno-rtti
 CFLAGS += -fno-exceptions
-CFLAGS += -pthread
-CFLAGS += -fcheck-new
-# version 4 and above support fvisibility=hidden (matches jni_x86.h file)
-# except 4.1.2 gives pointless warnings that can't be disabled (afaik)
-ifneq "$(shell expr \( $(CC_VER_MAJOR) \> 4 \) \| \( \( $(CC_VER_MAJOR) = 4 \) \& \( $(CC_VER_MINOR) \>= 3 \) \))" "0"
-CFLAGS += -fvisibility=hidden
+ifeq ($(USE_CLANG),)
+  CFLAGS += -pthread
+  CFLAGS += -fcheck-new
+  # version 4 and above support fvisibility=hidden (matches jni_x86.h file)
+  # except 4.1.2 gives pointless warnings that can't be disabled (afaik)
+  ifneq "$(shell expr \( $(CC_VER_MAJOR) \> 4 \) \| \( \( $(CC_VER_MAJOR) = 4 \) \& \( $(CC_VER_MINOR) \>= 3 \) \))" "0"
+    CFLAGS += -fvisibility=hidden
+  endif
+else
+  CFLAGS += -fvisibility=hidden
+endif
+
+ifeq ($(USE_CLANG), true)
+  # Before Clang 3.1, we had to pass the stack alignment specification directly to llvm with the help of '-mllvm'
+  # Starting with version 3.1, Clang understands the '-mstack-alignment' (and rejects '-mllvm -stack-alignment')
+  ifneq "$(shell expr \( $(CC_VER_MAJOR) \> 3 \) \| \( \( $(CC_VER_MAJOR) = 3 \) \& \( $(CC_VER_MINOR) \>= 1 \) \))" "0"
+    STACK_ALIGNMENT_OPT = -mno-omit-leaf-frame-pointer -mstack-alignment=16
+  else
+    STACK_ALIGNMENT_OPT = -mno-omit-leaf-frame-pointer -mllvm -stack-alignment=16
+  endif
 endif
 
 ARCHFLAG = $(ARCHFLAG/$(BUILDARCH))
 ARCHFLAG/i486    = -m32 -march=i586
-ARCHFLAG/amd64   = -m64
+ARCHFLAG/amd64   = -m64 $(STACK_ALIGNMENT_OPT)
 ARCHFLAG/ia64    =
 ARCHFLAG/sparc   = -m32 -mcpu=v9
 ARCHFLAG/sparcv9 = -m64 -mcpu=v9
@@ -163,14 +240,25 @@
   WARNINGS_ARE_ERRORS = -Werror
 endif
 
-# Except for a few acceptable ones
-# Since GCC 4.3, -Wconversion has changed its meanings to warn these implicit
-# conversions which might affect the values. To avoid that, we need to turn
-# it off explicitly. 
-ifneq "$(shell expr \( $(CC_VER_MAJOR) \> 4 \) \| \( \( $(CC_VER_MAJOR) = 4 \) \& \( $(CC_VER_MINOR) \>= 3 \) \))" "0"
+ifeq ($(USE_CLANG), true)
+  # However we need to clean the code up before we can unrestrictedly enable this option with Clang
+  WARNINGS_ARE_ERRORS += -Wno-unused-value -Wno-logical-op-parentheses -Wno-parentheses-equality -Wno-parentheses
+  WARNINGS_ARE_ERRORS += -Wno-switch -Wno-tautological-compare
+# Not yet supported by clang in Xcode 4.6.2
+#  WARNINGS_ARE_ERRORS += -Wno-tautological-constant-out-of-range-compare
+  WARNINGS_ARE_ERRORS += -Wno-delete-non-virtual-dtor -Wno-deprecated -Wno-format -Wno-dynamic-class-memaccess
+  WARNINGS_ARE_ERRORS += -Wno-return-type -Wno-empty-body
+endif
+
 WARNING_FLAGS = -Wpointer-arith -Wsign-compare -Wundef
-else
-WARNING_FLAGS = -Wpointer-arith -Wconversion -Wsign-compare -Wundef
+
+ifeq "$(shell expr \( $(CC_VER_MAJOR) \> 4 \) \| \( \( $(CC_VER_MAJOR) = 4 \) \& \( $(CC_VER_MINOR) \>= 3 \) \))" "0"
+  # Since GCC 4.3, -Wconversion has changed its meanings to warn these implicit
+  # conversions which might affect the values. Only enable it in earlier versions.
+  WARNING_FLAGS = -Wunused-function
+  ifeq ($(USE_CLANG),)
+    WARNINGS_FLAGS += -Wconversion
+  endif
 endif
 
 CFLAGS_WARN/DEFAULT = $(WARNINGS_ARE_ERRORS) $(WARNING_FLAGS)
@@ -214,14 +302,24 @@
 
 OPT_CFLAGS/NOOPT=-O0
 
-# 6835796. Problem in GCC 4.3.0 with mulnode.o optimized compilation. 
-ifneq "$(shell expr \( \( $(CC_VER_MAJOR) = 4 \) \& \( $(CC_VER_MINOR) = 3 \) \))" "0"
-OPT_CFLAGS/mulnode.o += -O0
+# Work around some compiler bugs.
+ifeq ($(USE_CLANG), true)
+  ifeq ($(shell expr $(CC_VER_MAJOR) = 4 \& $(CC_VER_MINOR) = 2), 1)
+    OPT_CFLAGS/loopTransform.o += $(OPT_CFLAGS/NOOPT)
+  endif
+else
+  # 6835796. Problem in GCC 4.3.0 with mulnode.o optimized compilation.
+  ifeq ($(shell expr $(CC_VER_MAJOR) = 4 \& $(CC_VER_MINOR) = 3), 1)
+    OPT_CFLAGS/mulnode.o += $(OPT_CFLAGS/NOOPT)
+  endif
 endif
 
 # Flags for generating make dependency flags.
-ifneq ("${CC_VER_MAJOR}", "2")
-DEPFLAGS = -fpch-deps -MMD -MP -MF $(DEP_DIR)/$(@:%=%.d)
+DEPFLAGS = -MMD -MP -MF $(DEP_DIR)/$(@:%=%.d)
+ifeq ($(USE_CLANG),)
+  ifneq ($(CC_VER_MAJOR), 2)
+    DEPFLAGS += -fpch-deps
+  endif
 endif
 
 # -DDONT_USE_PRECOMPILED_HEADER will exclude all includes in precompiled.hpp.
@@ -249,13 +347,15 @@
 # statically link libstdc++.so, work with gcc but ignored by g++
 STATIC_STDCXX = -Wl,-Bstatic -lstdc++ -Wl,-Bdynamic
 
-# statically link libgcc and/or libgcc_s, libgcc does not exist before gcc-3.x.
-ifneq ("${CC_VER_MAJOR}", "2")
-STATIC_LIBGCC += -static-libgcc
-endif
+ifeq ($(USE_CLANG),)
+  # statically link libgcc and/or libgcc_s, libgcc does not exist before gcc-3.x.
+  ifneq ("${CC_VER_MAJOR}", "2")
+    STATIC_LIBGCC += -static-libgcc
+  endif
 
-ifeq ($(BUILDARCH), ia64)
-LFLAGS += -Wl,-relax
+  ifeq ($(BUILDARCH), ia64)
+    LFLAGS += -Wl,-relax
+  endif
 endif
 
 # Use $(MAPFLAG:FILENAME=real_file_name) to specify a map file.
@@ -296,25 +396,31 @@
 #------------------------------------------------------------------------
 # Debug flags
 
-# Use the stabs format for debugging information (this is the default
-# on gcc-2.91). It's good enough, has all the information about line
-# numbers and local variables, and libjvm.so is only about 16M.
-# Change this back to "-g" if you want the most expressive format.
-# (warning: that could easily inflate libjvm.so to 150M!)
-# Note: The Itanium gcc compiler crashes when using -gstabs.
-DEBUG_CFLAGS/ia64  = -g
-DEBUG_CFLAGS/amd64 = -g
-DEBUG_CFLAGS/arm   = -g
-DEBUG_CFLAGS/ppc   = -g
-DEBUG_CFLAGS += $(DEBUG_CFLAGS/$(BUILDARCH))
-ifeq ($(DEBUG_CFLAGS/$(BUILDARCH)),)
-DEBUG_CFLAGS += -gstabs
+ifeq ($(USE_CLANG), true)
+  # Restrict the debug information created by Clang to avoid
+  # too big object files and speed the build up a little bit
+  # (see http://llvm.org/bugs/show_bug.cgi?id=7554)
+  CFLAGS += -flimit-debug-info
 endif
 
-# DEBUG_BINARIES overrides everything, use full -g debug information
+# DEBUG_BINARIES uses full -g debug information for all configs
 ifeq ($(DEBUG_BINARIES), true)
-  DEBUG_CFLAGS = -g
-  CFLAGS += $(DEBUG_CFLAGS)
+  CFLAGS += -g
+else
+  # Use the stabs format for debugging information (this is the default
+  # on gcc-2.91). It's good enough, has all the information about line
+  # numbers and local variables, and libjvm.so is only about 16M.
+  # Change this back to "-g" if you want the most expressive format.
+  # (warning: that could easily inflate libjvm.so to 150M!)
+  # Note: The Itanium gcc compiler crashes when using -gstabs.
+  DEBUG_CFLAGS/ia64  = -g
+  DEBUG_CFLAGS/amd64 = -g
+  DEBUG_CFLAGS/arm   = -g
+  DEBUG_CFLAGS/ppc   = -g
+  DEBUG_CFLAGS += $(DEBUG_CFLAGS/$(BUILDARCH))
+  ifeq ($(DEBUG_CFLAGS/$(BUILDARCH)),)
+  DEBUG_CFLAGS += -gstabs
+  endif
 endif
 
 # If we are building HEADLESS, pass on to VM
--- a/hotspot/make/bsd/makefiles/vm.make	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/make/bsd/makefiles/vm.make	Wed Jul 05 18:59:05 2017 +0200
@@ -126,7 +126,11 @@
 LFLAGS += -Xlinker -z -Xlinker noexecstack
 endif
 
-LIBS += -lm -pthread
+LIBS += -lm
+
+ifeq ($(USE_CLANG),)
+  LIBS += -pthread
+endif
 
 # By default, link the *.o into the library, not the executable.
 LINK_INTO$(LINK_INTO) = LIBJVM
--- a/hotspot/make/excludeSrc.make	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/make/excludeSrc.make	Wed Jul 05 18:59:05 2017 +0200
@@ -25,7 +25,7 @@
       CXXFLAGS += -DINCLUDE_JVMTI=0
       CFLAGS += -DINCLUDE_JVMTI=0
 
-      Src_Files_EXCLUDE += jvmtiGetLoadedClasses.cpp forte.cpp jvmtiThreadState.cpp jvmtiExtensions.cpp \
+      Src_Files_EXCLUDE += jvmtiGetLoadedClasses.cpp jvmtiThreadState.cpp jvmtiExtensions.cpp \
 	jvmtiImpl.cpp jvmtiManageCapabilities.cpp jvmtiRawMonitor.cpp jvmtiUtil.cpp jvmtiTrace.cpp \
 	jvmtiCodeBlobEvents.cpp jvmtiEnv.cpp jvmtiRedefineClasses.cpp jvmtiEnvBase.cpp jvmtiEnvThreadState.cpp \
 	jvmtiTagMap.cpp jvmtiEventController.cpp evmCompat.cpp jvmtiEnter.xsl jvmtiExport.cpp \
@@ -87,7 +87,7 @@
 	g1BlockOffsetTable.cpp g1CardCounts.cpp g1CollectedHeap.cpp g1CollectorPolicy.cpp \
 	g1ErgoVerbose.cpp g1GCPhaseTimes.cpp g1HRPrinter.cpp g1HotCardCache.cpp g1Log.cpp \
 	g1MMUTracker.cpp g1MarkSweep.cpp g1MemoryPool.cpp g1MonitoringSupport.cpp \
-	g1RemSet.cpp g1SATBCardTableModRefBS.cpp g1_globals.cpp heapRegion.cpp \
+	g1RemSet.cpp g1RemSetSummary.cpp g1SATBCardTableModRefBS.cpp g1_globals.cpp heapRegion.cpp \
 	heapRegionRemSet.cpp heapRegionSeq.cpp heapRegionSet.cpp heapRegionSets.cpp \
 	ptrQueue.cpp satbQueue.cpp sparsePRT.cpp survRateGroup.cpp vm_operations_g1.cpp \
 	adjoiningGenerations.cpp adjoiningVirtualSpaces.cpp asPSOldGen.cpp asPSYoungGen.cpp \
--- a/hotspot/make/hotspot_version	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/make/hotspot_version	Wed Jul 05 18:59:05 2017 +0200
@@ -35,7 +35,7 @@
 
 HS_MAJOR_VER=25
 HS_MINOR_VER=0
-HS_BUILD_NUMBER=34
+HS_BUILD_NUMBER=36
 
 JDK_MAJOR_VER=1
 JDK_MINOR_VER=8
--- a/hotspot/make/linux/makefiles/adlc.make	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/make/linux/makefiles/adlc.make	Wed Jul 05 18:59:05 2017 +0200
@@ -68,7 +68,7 @@
 
 # CFLAGS_WARN holds compiler options to suppress/enable warnings.
 # Compiler warnings are treated as errors
-CFLAGS_WARN = -Werror
+CFLAGS_WARN = $(WARNINGS_ARE_ERRORS)
 CFLAGS += $(CFLAGS_WARN)
 
 OBJECTNAMES = \
--- a/hotspot/make/linux/makefiles/gcc.make	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/make/linux/makefiles/gcc.make	Wed Jul 05 18:59:05 2017 +0200
@@ -36,8 +36,14 @@
     HOSTCC  = gcc
     STRIP = $(ALT_COMPILER_PATH)/strip
   else
-    CXX = g++
-    CC  = gcc
+    ifeq ($(USE_CLANG), true)
+      CXX = clang++
+      CC  = clang
+    else
+      CXX = g++
+      CC  = gcc
+    endif
+
     HOSTCXX = $(CXX)
     HOSTCC  = $(CC)
     STRIP = strip
@@ -46,19 +52,79 @@
 endif
 
 
-# -dumpversion in gcc-2.91 shows "egcs-2.91.66". In later version, it only
-# prints the numbers (e.g. "2.95", "3.2.1")
-CC_VER_MAJOR := $(shell $(CC) -dumpversion | sed 's/egcs-//' | cut -d'.' -f1)
-CC_VER_MINOR := $(shell $(CC) -dumpversion | sed 's/egcs-//' | cut -d'.' -f2)
+ifeq ($(USE_CLANG), true)
+  CC_VER_MAJOR := $(shell $(CC) -v 2>&1 | grep version | sed "s/.*version \([0-9]*\.[0-9]*\).*/\1/" | cut -d'.' -f1)
+  CC_VER_MINOR := $(shell $(CC) -v 2>&1 | grep version | sed "s/.*version \([0-9]*\.[0-9]*\).*/\1/" | cut -d'.' -f2)
+else
+  # -dumpversion in gcc-2.91 shows "egcs-2.91.66". In later version, it only
+  # prints the numbers (e.g. "2.95", "3.2.1")
+  CC_VER_MAJOR := $(shell $(CC) -dumpversion | sed 's/egcs-//' | cut -d'.' -f1)
+  CC_VER_MINOR := $(shell $(CC) -dumpversion | sed 's/egcs-//' | cut -d'.' -f2)
+endif
+
+
+ifeq ($(USE_CLANG), true)
+  # Clang has precompiled headers support by default, but the user can switch
+  # it off by using 'USE_PRECOMPILED_HEADER=0'.
+  ifdef LP64
+    ifeq ($(USE_PRECOMPILED_HEADER),)
+      USE_PRECOMPILED_HEADER=1
+    endif
+  else
+    # We don't support precompiled headers on 32-bit builds because there some files are
+    # compiled with -fPIC while others are compiled without (see 'NONPIC_OBJ_FILES' rules.make)
+    # Clang produces an error if the PCH file was compiled with other options than the actual compilation unit.
+    USE_PRECOMPILED_HEADER=0
+  endif
+
+  ifeq ($(USE_PRECOMPILED_HEADER),1)
+
+    ifndef LP64
+      $(error " Precompiled Headers only supported on 64-bit platforms!")
+    endif
+
+    PRECOMPILED_HEADER_DIR=.
+    PRECOMPILED_HEADER_SRC=$(GAMMADIR)/src/share/vm/precompiled/precompiled.hpp
+    PRECOMPILED_HEADER=$(PRECOMPILED_HEADER_DIR)/precompiled.hpp.pch
 
-# check for precompiled headers support
-ifneq "$(shell expr \( $(CC_VER_MAJOR) \> 3 \) \| \( \( $(CC_VER_MAJOR) = 3 \) \& \( $(CC_VER_MINOR) \>= 4 \) \))" "0"
-# Allow the user to turn off precompiled headers from the command line.
-ifneq ($(USE_PRECOMPILED_HEADER),0)
-PRECOMPILED_HEADER_DIR=.
-PRECOMPILED_HEADER_SRC=$(GAMMADIR)/src/share/vm/precompiled/precompiled.hpp
-PRECOMPILED_HEADER=$(PRECOMPILED_HEADER_DIR)/precompiled.hpp.gch
+    PCH_FLAG = -include precompiled.hpp
+    PCH_FLAG/DEFAULT = $(PCH_FLAG)
+    PCH_FLAG/NO_PCH = -DNO_PCH
+    PCH_FLAG/BY_FILE = $(PCH_FLAG/$@)$(PCH_FLAG/DEFAULT$(PCH_FLAG/$@))
+
+    VM_PCH_FLAG/LIBJVM = $(PCH_FLAG/BY_FILE)
+    VM_PCH_FLAG/AOUT =
+    VM_PCH_FLAG = $(VM_PCH_FLAG/$(LINK_INTO))
+
+    # We only use precompiled headers for the JVM build
+    CFLAGS += $(VM_PCH_FLAG)
+
+    # There are some files which don't like precompiled headers
+    # The following files are build with 'OPT_CFLAGS/NOOPT' (-O0) in the opt build.
+    # But Clang doesn't support a precompiled header which was compiled with -O3
+    # to be used in a compilation unit which uses '-O0'. We could also prepare an
+    # extra '-O0' PCH file for the opt build and use it here, but it's probably
+    # not worth the effoert as long as only two files need this special handling.
+    PCH_FLAG/loopTransform.o = $(PCH_FLAG/NO_PCH)
+    PCH_FLAG/sharedRuntimeTrig.o = $(PCH_FLAG/NO_PCH)
+    PCH_FLAG/sharedRuntimeTrans.o = $(PCH_FLAG/NO_PCH)
+
+  endif
+else # ($(USE_CLANG), true)
+  # check for precompiled headers support
+  ifneq "$(shell expr \( $(CC_VER_MAJOR) \> 3 \) \| \( \( $(CC_VER_MAJOR) = 3 \) \& \( $(CC_VER_MINOR) \>= 4 \) \))" "0"
+    # Allow the user to turn off precompiled headers from the command line.
+    ifneq ($(USE_PRECOMPILED_HEADER),0)
+      PRECOMPILED_HEADER_DIR=.
+      PRECOMPILED_HEADER_SRC=$(GAMMADIR)/src/share/vm/precompiled/precompiled.hpp
+      PRECOMPILED_HEADER=$(PRECOMPILED_HEADER_DIR)/precompiled.hpp.gch
+    endif
+  endif
 endif
+
+# -DDONT_USE_PRECOMPILED_HEADER will exclude all includes in precompiled.hpp.
+ifeq ($(USE_PRECOMPILED_HEADER),0)
+  CFLAGS += -DDONT_USE_PRECOMPILED_HEADER
 endif
 
 
@@ -83,16 +149,30 @@
 CFLAGS += -fno-rtti
 CFLAGS += -fno-exceptions
 CFLAGS += -D_REENTRANT
-CFLAGS += -fcheck-new
-# version 4 and above support fvisibility=hidden (matches jni_x86.h file)
-# except 4.1.2 gives pointless warnings that can't be disabled (afaik)
-ifneq "$(shell expr \( $(CC_VER_MAJOR) \> 4 \) \| \( \( $(CC_VER_MAJOR) = 4 \) \& \( $(CC_VER_MINOR) \>= 3 \) \))" "0"
-CFLAGS += -fvisibility=hidden
+ifeq ($(USE_CLANG),)
+  CFLAGS += -fcheck-new
+  # version 4 and above support fvisibility=hidden (matches jni_x86.h file)
+  # except 4.1.2 gives pointless warnings that can't be disabled (afaik)
+  ifneq "$(shell expr \( $(CC_VER_MAJOR) \> 4 \) \| \( \( $(CC_VER_MAJOR) = 4 \) \& \( $(CC_VER_MINOR) \>= 3 \) \))" "0"
+    CFLAGS += -fvisibility=hidden
+  endif
+else
+  CFLAGS += -fvisibility=hidden
+endif
+
+ifeq ($(USE_CLANG), true)
+  # Before Clang 3.1, we had to pass the stack alignment specification directly to llvm with the help of '-mllvm'
+  # Starting with version 3.1, Clang understands the '-mstack-alignment' (and rejects '-mllvm -stack-alignment')
+  ifneq "$(shell expr \( $(CC_VER_MAJOR) \> 3 \) \| \( \( $(CC_VER_MAJOR) = 3 \) \& \( $(CC_VER_MINOR) \>= 1 \) \))" "0"
+    STACK_ALIGNMENT_OPT = -mno-omit-leaf-frame-pointer -mstack-alignment=16
+  else
+    STACK_ALIGNMENT_OPT = -mno-omit-leaf-frame-pointer -mllvm -stack-alignment=16
+  endif
 endif
 
 ARCHFLAG = $(ARCHFLAG/$(BUILDARCH))
 ARCHFLAG/i486    = -m32 -march=i586
-ARCHFLAG/amd64   = -m64
+ARCHFLAG/amd64   = -m64 $(STACK_ALIGNMENT_OPT)
 ARCHFLAG/ia64    =
 ARCHFLAG/sparc   = -m32 -mcpu=v9
 ARCHFLAG/sparcv9 = -m64 -mcpu=v9
@@ -126,12 +206,22 @@
 # Compiler warnings are treated as errors
 WARNINGS_ARE_ERRORS = -Werror
 
+ifeq ($(USE_CLANG), true)
+  # However we need to clean the code up before we can unrestrictedly enable this option with Clang
+  WARNINGS_ARE_ERRORS += -Wno-unused-value -Wno-logical-op-parentheses -Wno-parentheses-equality -Wno-parentheses
+  WARNINGS_ARE_ERRORS += -Wno-switch -Wno-tautological-constant-out-of-range-compare -Wno-tautological-compare
+  WARNINGS_ARE_ERRORS += -Wno-delete-non-virtual-dtor -Wno-deprecated -Wno-format -Wno-dynamic-class-memaccess
+  WARNINGS_ARE_ERRORS += -Wno-return-type -Wno-empty-body
+endif
+
 WARNING_FLAGS = -Wpointer-arith -Wsign-compare -Wundef -Wunused-function
 
-# Since GCC 4.3, -Wconversion has changed its meanings to warn these implicit
-# conversions which might affect the values. Only enable it in earlier versions.
-ifeq "$(shell expr \( $(CC_VER_MAJOR) \> 4 \) \| \( \( $(CC_VER_MAJOR) = 4 \) \& \( $(CC_VER_MINOR) \>= 3 \) \))" "0"
-WARNING_FLAGS += -Wconversion
+ifeq ($(USE_CLANG),)
+  # Since GCC 4.3, -Wconversion has changed its meanings to warn these implicit
+  # conversions which might affect the values. Only enable it in earlier versions.
+  ifeq "$(shell expr \( $(CC_VER_MAJOR) \> 4 \) \| \( \( $(CC_VER_MAJOR) = 4 \) \& \( $(CC_VER_MINOR) \>= 3 \) \))" "0"
+    WARNING_FLAGS += -Wconversion
+  endif
 endif
 
 CFLAGS_WARN/DEFAULT = $(WARNINGS_ARE_ERRORS) $(WARNING_FLAGS)
@@ -165,19 +255,24 @@
 
 OPT_CFLAGS/NOOPT=-O0
 
-# 6835796. Problem in GCC 4.3.0 with mulnode.o optimized compilation. 
-ifneq "$(shell expr \( \( $(CC_VER_MAJOR) = 4 \) \& \( $(CC_VER_MINOR) = 3 \) \))" "0"
-OPT_CFLAGS/mulnode.o += -O0
+# Work around some compiler bugs.
+ifeq ($(USE_CLANG), true)
+  ifeq ($(shell expr $(CC_VER_MAJOR) = 4 \& $(CC_VER_MINOR) = 2), 1)
+    OPT_CFLAGS/loopTransform.o += $(OPT_CFLAGS/NOOPT)
+  endif
+else
+  # 6835796. Problem in GCC 4.3.0 with mulnode.o optimized compilation.
+  ifeq ($(shell expr $(CC_VER_MAJOR) = 4 \& $(CC_VER_MINOR) = 3), 1)
+    OPT_CFLAGS/mulnode.o += $(OPT_CFLAGS/NOOPT)
+  endif
 endif
 
 # Flags for generating make dependency flags.
-ifneq ("${CC_VER_MAJOR}", "2")
-DEPFLAGS = -fpch-deps -MMD -MP -MF $(DEP_DIR)/$(@:%=%.d)
-endif
-
-# -DDONT_USE_PRECOMPILED_HEADER will exclude all includes in precompiled.hpp.
-ifeq ($(USE_PRECOMPILED_HEADER),0)
-CFLAGS += -DDONT_USE_PRECOMPILED_HEADER
+DEPFLAGS = -MMD -MP -MF $(DEP_DIR)/$(@:%=%.d)
+ifeq ($(USE_CLANG),)
+  ifneq ("${CC_VER_MAJOR}", "2")
+    DEPFLAGS += -fpch-deps
+  endif
 endif
 
 #------------------------------------------------------------------------
@@ -186,24 +281,33 @@
 # statically link libstdc++.so, work with gcc but ignored by g++
 STATIC_STDCXX = -Wl,-Bstatic -lstdc++ -Wl,-Bdynamic
 
-# statically link libgcc and/or libgcc_s, libgcc does not exist before gcc-3.x.
-ifneq ("${CC_VER_MAJOR}", "2")
-STATIC_LIBGCC += -static-libgcc
-endif
+ifeq ($(USE_CLANG),)
+  # statically link libgcc and/or libgcc_s, libgcc does not exist before gcc-3.x.
+  ifneq ("${CC_VER_MAJOR}", "2")
+    STATIC_LIBGCC += -static-libgcc
+  endif
 
-ifeq ($(BUILDARCH), ia64)
-LFLAGS += -Wl,-relax
+  ifeq ($(BUILDARCH), ia64)
+    LFLAGS += -Wl,-relax
+  endif
 endif
 
 # Enable linker optimization
 LFLAGS += -Xlinker -O1
 
-# If this is a --hash-style=gnu system, use --hash-style=both
-#   The gnu .hash section won't work on some Linux systems like SuSE 10.
-_HAS_HASH_STYLE_GNU:=$(shell $(CC) -dumpspecs | grep -- '--hash-style=gnu')
-ifneq ($(_HAS_HASH_STYLE_GNU),)
+ifeq ($(USE_CLANG),)
+  # If this is a --hash-style=gnu system, use --hash-style=both
+  #   The gnu .hash section won't work on some Linux systems like SuSE 10.
+  _HAS_HASH_STYLE_GNU:=$(shell $(CC) -dumpspecs | grep -- '--hash-style=gnu')
+  ifneq ($(_HAS_HASH_STYLE_GNU),)
+    LDFLAGS_HASH_STYLE = -Wl,--hash-style=both
+  endif
+else
+  # Don't know how to find out the 'hash style' of a system as '-dumpspecs'
+  # doesn't work for Clang. So for now we'll alwys use --hash-style=both
   LDFLAGS_HASH_STYLE = -Wl,--hash-style=both
 endif
+
 LFLAGS += $(LDFLAGS_HASH_STYLE)
 
 # Use $(MAPFLAG:FILENAME=real_file_name) to specify a map file.
@@ -221,6 +325,13 @@
 #------------------------------------------------------------------------
 # Debug flags
 
+ifeq ($(USE_CLANG), true)
+  # Restrict the debug information created by Clang to avoid
+  # too big object files and speed the build up a little bit
+  # (see http://llvm.org/bugs/show_bug.cgi?id=7554)
+  CFLAGS += -flimit-debug-info
+endif
+
 # DEBUG_BINARIES uses full -g debug information for all configs
 ifeq ($(DEBUG_BINARIES), true)
   CFLAGS += -g
@@ -237,7 +348,12 @@
   DEBUG_CFLAGS/ppc   = -g
   DEBUG_CFLAGS += $(DEBUG_CFLAGS/$(BUILDARCH))
   ifeq ($(DEBUG_CFLAGS/$(BUILDARCH)),)
-    DEBUG_CFLAGS += -gstabs
+      ifeq ($(USE_CLANG), true)
+        # Clang doesn't understand -gstabs
+        OPT_CFLAGS += -g
+      else
+        OPT_CFLAGS += -gstabs
+      endif
   endif
   
   ifeq ($(ENABLE_FULL_DEBUG_SYMBOLS),1)
@@ -247,7 +363,12 @@
     FASTDEBUG_CFLAGS/ppc   = -g
     FASTDEBUG_CFLAGS += $(DEBUG_CFLAGS/$(BUILDARCH))
     ifeq ($(FASTDEBUG_CFLAGS/$(BUILDARCH)),)
-      FASTDEBUG_CFLAGS += -gstabs
+      ifeq ($(USE_CLANG), true)
+        # Clang doesn't understand -gstabs
+        OPT_CFLAGS += -g
+      else
+        OPT_CFLAGS += -gstabs
+      endif
     endif
   
     OPT_CFLAGS/ia64  = -g
@@ -256,7 +377,12 @@
     OPT_CFLAGS/ppc   = -g
     OPT_CFLAGS += $(OPT_CFLAGS/$(BUILDARCH))
     ifeq ($(OPT_CFLAGS/$(BUILDARCH)),)
-      OPT_CFLAGS += -gstabs
+      ifeq ($(USE_CLANG), true)
+        # Clang doesn't understand -gstabs
+        OPT_CFLAGS += -g
+      else
+        OPT_CFLAGS += -gstabs
+      endif
     endif
   endif
 endif
--- a/hotspot/src/cpu/sparc/vm/cppInterpreter_sparc.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/cpu/sparc/vm/cppInterpreter_sparc.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -1065,7 +1065,7 @@
   const int slop_factor = 2*wordSize;
 
   const int fixed_size = ((sizeof(BytecodeInterpreter) + slop_factor) >> LogBytesPerWord) + // what is the slop factor?
-                         //6815692//Method::extra_stack_words() +  // extra push slots for MH adapters
+                         Method::extra_stack_entries() + // extra stack for jsr 292
                          frame::memory_parameter_word_sp_offset +  // register save area + param window
                          (native ?  frame::interpreter_frame_extra_outgoing_argument_words : 0); // JNI, class
 
@@ -1221,9 +1221,7 @@
   // Full size expression stack
   __ ld_ptr(constMethod, O3);
   __ lduh(O3, in_bytes(ConstMethod::max_stack_offset()), O3);
-  guarantee(!EnableInvokeDynamic, "no support yet for java.lang.invoke.MethodHandle"); //6815692
-  //6815692//if (EnableInvokeDynamic)
-  //6815692//  __ inc(O3, Method::extra_stack_entries());
+  __ inc(O3, Method::extra_stack_entries());
   __ sll(O3, LogBytesPerWord, O3);
   __ sub(O2, O3, O3);
 //  __ sub(O3, wordSize, O3);                    // so prepush doesn't look out of bounds
@@ -2084,9 +2082,7 @@
 
   const int fixed_size = sizeof(BytecodeInterpreter)/wordSize +           // interpreter state object
                          frame::memory_parameter_word_sp_offset;   // register save area + param window
-  const int extra_stack = 0; //6815692//Method::extra_stack_entries();
   return (round_to(max_stack +
-                   extra_stack +
                    slop_factor +
                    fixed_size +
                    monitor_size +
@@ -2173,8 +2169,7 @@
   // Need +1 here because stack_base points to the word just above the first expr stack entry
   // and stack_limit is supposed to point to the word just below the last expr stack entry.
   // See generate_compute_interpreter_state.
-  int extra_stack = 0; //6815692//Method::extra_stack_entries();
-  to_fill->_stack_limit = stack_base - (method->max_stack() + 1 + extra_stack);
+  to_fill->_stack_limit = stack_base - (method->max_stack() + 1);
   to_fill->_monitor_base = (BasicObjectLock*) monitor_base;
 
   // sparc specific
--- a/hotspot/src/cpu/sparc/vm/interp_masm_sparc.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/cpu/sparc/vm/interp_masm_sparc.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -521,7 +521,7 @@
   // Compute max expression stack+register save area
   ld_ptr(Lmethod, in_bytes(Method::const_offset()), Gframe_size);
   lduh(Gframe_size, in_bytes(ConstMethod::max_stack_offset()), Gframe_size);  // Load max stack.
-  add( Gframe_size, frame::memory_parameter_word_sp_offset, Gframe_size );
+  add(Gframe_size, frame::memory_parameter_word_sp_offset+Method::extra_stack_entries(), Gframe_size );
 
   //
   // now set up a stack frame with the size computed above
--- a/hotspot/src/cpu/sparc/vm/templateInterpreter_sparc.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/cpu/sparc/vm/templateInterpreter_sparc.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -507,7 +507,7 @@
 
   const int extra_space =
     rounded_vm_local_words +                   // frame local scratch space
-    //6815692//Method::extra_stack_words() +       // extra push slots for MH adapters
+    Method::extra_stack_entries() +            // extra stack for jsr 292
     frame::memory_parameter_word_sp_offset +   // register save area
     (native_call ? frame::interpreter_frame_extra_outgoing_argument_words : 0);
 
@@ -1558,7 +1558,6 @@
        round_to(callee_extra_locals * Interpreter::stackElementWords, WordsPerLong);
   const int max_stack_words = max_stack * Interpreter::stackElementWords;
   return (round_to((max_stack_words
-                   //6815692//+ Method::extra_stack_words()
                    + rounded_vm_local_words
                    + frame::memory_parameter_word_sp_offset), WordsPerLong)
                    // already rounded
--- a/hotspot/src/cpu/x86/vm/cppInterpreter_x86.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/cpu/x86/vm/cppInterpreter_x86.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -539,12 +539,11 @@
 
     // compute full expression stack limit
 
-    const int extra_stack = 0; //6815692//Method::extra_stack_words();
     __ movptr(rdx, Address(rbx, Method::const_offset()));
     __ load_unsigned_short(rdx, Address(rdx, ConstMethod::max_stack_offset())); // get size of expression stack in words
     __ negptr(rdx);                                                       // so we can subtract in next step
     // Allocate expression stack
-    __ lea(rsp, Address(rsp, rdx, Address::times_ptr, -extra_stack));
+    __ lea(rsp, Address(rsp, rdx, Address::times_ptr, -Method::extra_stack_words()));
     __ movptr(STATE(_stack_limit), rsp);
   }
 
@@ -692,10 +691,9 @@
   // Always give one monitor to allow us to start interp if sync method.
   // Any additional monitors need a check when moving the expression stack
   const int one_monitor = frame::interpreter_frame_monitor_size() * wordSize;
-  const int extra_stack = 0; //6815692//Method::extra_stack_entries();
   __ movptr(rax, Address(rbx, Method::const_offset()));
   __ load_unsigned_short(rax, Address(rax, ConstMethod::max_stack_offset())); // get size of expression stack in words
-  __ lea(rax, Address(noreg, rax, Interpreter::stackElementScale(), extra_stack + one_monitor));
+  __ lea(rax, Address(noreg, rax, Interpreter::stackElementScale(), one_monitor+Method::extra_stack_words()));
   __ lea(rax, Address(rax, rdx, Interpreter::stackElementScale(), overhead_size));
 
 #ifdef ASSERT
@@ -2265,8 +2263,7 @@
   const int overhead_size = sizeof(BytecodeInterpreter)/wordSize +
     ( frame::sender_sp_offset - frame::link_offset) + 2;
 
-  const int extra_stack = 0; //6815692//Method::extra_stack_entries();
-  const int method_stack = (method->max_locals() + method->max_stack() + extra_stack) *
+  const int method_stack = (method->max_locals() + method->max_stack()) *
                            Interpreter::stackElementWords;
   return overhead_size + method_stack + stub_code;
 }
@@ -2331,8 +2328,7 @@
   // Need +1 here because stack_base points to the word just above the first expr stack entry
   // and stack_limit is supposed to point to the word just below the last expr stack entry.
   // See generate_compute_interpreter_state.
-  int extra_stack = 0; //6815692//Method::extra_stack_entries();
-  to_fill->_stack_limit = stack_base - (method->max_stack() + extra_stack + 1);
+  to_fill->_stack_limit = stack_base - (method->max_stack() + 1);
   to_fill->_monitor_base = (BasicObjectLock*) monitor_base;
 
   to_fill->_self_link = to_fill;
@@ -2380,8 +2376,7 @@
                                                 monitor_size);
 
   // Now with full size expression stack
-  int extra_stack = 0; //6815692//Method::extra_stack_entries();
-  int full_frame_size = short_frame_size + (method->max_stack() + extra_stack) * BytesPerWord;
+  int full_frame_size = short_frame_size + method->max_stack() * BytesPerWord;
 
   // and now with only live portion of the expression stack
   short_frame_size = short_frame_size + tempcount * BytesPerWord;
--- a/hotspot/src/cpu/x86/vm/templateInterpreter_x86_32.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/cpu/x86/vm/templateInterpreter_x86_32.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -1565,8 +1565,7 @@
   // be sure to change this if you add/subtract anything to/from the overhead area
   const int overhead_size = -frame::interpreter_frame_initial_sp_offset;
 
-  const int extra_stack = Method::extra_stack_entries();
-  const int method_stack = (method->max_locals() + method->max_stack() + extra_stack) *
+  const int method_stack = (method->max_locals() + method->max_stack()) *
                            Interpreter::stackElementWords;
   return overhead_size + method_stack + stub_code;
 }
--- a/hotspot/src/cpu/x86/vm/templateInterpreter_x86_64.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/cpu/x86/vm/templateInterpreter_x86_64.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -1574,8 +1574,7 @@
     -(frame::interpreter_frame_initial_sp_offset) + entry_size;
 
   const int stub_code = frame::entry_frame_after_call_words;
-  const int extra_stack = Method::extra_stack_entries();
-  const int method_stack = (method->max_locals() + method->max_stack() + extra_stack) *
+  const int method_stack = (method->max_locals() + method->max_stack()) *
                            Interpreter::stackElementWords;
   return (overhead_size + method_stack + stub_code);
 }
--- a/hotspot/src/os/bsd/vm/os_bsd.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/os/bsd/vm/os_bsd.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -626,8 +626,6 @@
 //////////////////////////////////////////////////////////////////////////////
 // create new thread
 
-static address highest_vm_reserved_address();
-
 // check if it's safe to start a new thread
 static bool _thread_safety_check(Thread* thread) {
   return true;
@@ -935,10 +933,10 @@
   return (1000 * 1000);
 }
 
-// XXX: For now, code this as if BSD does not support vtime.
-bool os::supports_vtime() { return false; }
+bool os::supports_vtime() { return true; }
 bool os::enable_vtime()   { return false; }
 bool os::vtime_enabled()  { return false; }
+
 double os::elapsedVTime() {
   // better than nothing, but not much
   return elapsedTime();
@@ -2112,10 +2110,6 @@
   return anon_munmap(addr, size);
 }
 
-static address highest_vm_reserved_address() {
-  return _highest_vm_reserved_address;
-}
-
 static bool bsd_mprotect(char* addr, size_t size, int prot) {
   // Bsd wants the mprotect address argument to be page aligned.
   char* bottom = (char*)align_size_down((intptr_t)addr, os::Bsd::page_size());
@@ -2159,43 +2153,6 @@
   return false;
 }
 
-/*
-* Set the coredump_filter bits to include largepages in core dump (bit 6)
-*
-* From the coredump_filter documentation:
-*
-* - (bit 0) anonymous private memory
-* - (bit 1) anonymous shared memory
-* - (bit 2) file-backed private memory
-* - (bit 3) file-backed shared memory
-* - (bit 4) ELF header pages in file-backed private memory areas (it is
-*           effective only if the bit 2 is cleared)
-* - (bit 5) hugetlb private memory
-* - (bit 6) hugetlb shared memory
-*/
-static void set_coredump_filter(void) {
-  FILE *f;
-  long cdm;
-
-  if ((f = fopen("/proc/self/coredump_filter", "r+")) == NULL) {
-    return;
-  }
-
-  if (fscanf(f, "%lx", &cdm) != 1) {
-    fclose(f);
-    return;
-  }
-
-  rewind(f);
-
-  if ((cdm & LARGEPAGES_BIT) == 0) {
-    cdm |= LARGEPAGES_BIT;
-    fprintf(f, "%#lx", cdm);
-  }
-
-  fclose(f);
-}
-
 // Large page support
 
 static size_t _large_page_size = 0;
@@ -3030,6 +2987,19 @@
     sigAct.sa_sigaction = signalHandler;
     sigAct.sa_flags = SA_SIGINFO|SA_RESTART;
   }
+#if __APPLE__
+  // Needed for main thread as XNU (Mac OS X kernel) will only deliver SIGSEGV
+  // (which starts as SIGBUS) on main thread with faulting address inside "stack+guard pages"
+  // if the signal handler declares it will handle it on alternate stack.
+  // Notice we only declare we will handle it on alt stack, but we are not
+  // actually going to use real alt stack - this is just a workaround.
+  // Please see ux_exception.c, method catch_mach_exception_raise for details
+  // link http://www.opensource.apple.com/source/xnu/xnu-2050.18.24/bsd/uxkern/ux_exception.c
+  if (sig == SIGSEGV) {
+    sigAct.sa_flags |= SA_ONSTACK;
+  }
+#endif
+
   // Save flags, which are set by ours
   assert(sig > 0 && sig < MAXSIGNUM, "vm signal out of expected range");
   sigflags[sig] = sigAct.sa_flags;
--- a/hotspot/src/os/linux/vm/os_linux.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/os/linux/vm/os_linux.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -101,6 +101,12 @@
 # include <inttypes.h>
 # include <sys/ioctl.h>
 
+// if RUSAGE_THREAD for getrusage() has not been defined, do it here. The code calling
+// getrusage() is prepared to handle the associated failure.
+#ifndef RUSAGE_THREAD
+#define RUSAGE_THREAD   (1)               /* only the calling thread */
+#endif
+
 #define MAX_PATH    (2 * K)
 
 // for timer info max values which include all bits
@@ -1336,15 +1342,19 @@
   return (1000 * 1000);
 }
 
-// For now, we say that linux does not support vtime.  I have no idea
-// whether it can actually be made to (DLD, 9/13/05).
-
-bool os::supports_vtime() { return false; }
+bool os::supports_vtime() { return true; }
 bool os::enable_vtime()   { return false; }
 bool os::vtime_enabled()  { return false; }
+
 double os::elapsedVTime() {
-  // better than nothing, but not much
-  return elapsedTime();
+  struct rusage usage;
+  int retval = getrusage(RUSAGE_THREAD, &usage);
+  if (retval == 0) {
+    return (double) (usage.ru_utime.tv_sec + usage.ru_stime.tv_sec) + (double) (usage.ru_utime.tv_usec + usage.ru_stime.tv_usec) / (1000 * 1000);
+  } else {
+    // better than nothing, but not much
+    return elapsedTime();
+  }
 }
 
 jlong os::javaTimeMillis() {
--- a/hotspot/src/os/windows/vm/os_windows.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/os/windows/vm/os_windows.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -813,15 +813,21 @@
   return result;
 }
 
-// For now, we say that Windows does not support vtime.  I have no idea
-// whether it can actually be made to (DLD, 9/13/05).
-
-bool os::supports_vtime() { return false; }
+bool os::supports_vtime() { return true; }
 bool os::enable_vtime() { return false; }
 bool os::vtime_enabled() { return false; }
+
 double os::elapsedVTime() {
-  // better than nothing, but not much
-  return elapsedTime();
+  FILETIME created;
+  FILETIME exited;
+  FILETIME kernel;
+  FILETIME user;
+  if (GetThreadTimes(GetCurrentThread(), &created, &exited, &kernel, &user) != 0) {
+    // the resolution of windows_to_java_time() should be sufficient (ms)
+    return (double) (windows_to_java_time(kernel) + windows_to_java_time(user)) / MILLIUNITS;
+  } else {
+    return elapsedTime();
+  }
 }
 
 jlong os::javaTimeMillis() {
@@ -944,6 +950,8 @@
   MINIDUMP_TYPE dumpType;
   static const char* cwd;
 
+// Default is to always create dump for debug builds, on product builds only dump on server versions of Windows.
+#ifndef ASSERT
   // If running on a client version of Windows and user has not explicitly enabled dumping
   if (!os::win32::is_windows_server() && !CreateMinidumpOnCrash) {
     VMError::report_coredump_status("Minidumps are not enabled by default on client versions of Windows", false);
@@ -953,6 +961,12 @@
     VMError::report_coredump_status("Minidump has been disabled from the command line", false);
     return;
   }
+#else
+  if (!FLAG_IS_DEFAULT(CreateMinidumpOnCrash) && !CreateMinidumpOnCrash) {
+    VMError::report_coredump_status("Minidump has been disabled from the command line", false);
+    return;
+  }
+#endif
 
   dbghelp = os::win32::load_Windows_dll("DBGHELP.DLL", NULL, 0);
 
@@ -1004,7 +1018,21 @@
   // the dump types we really want. If first call fails, lets fall back to just use MiniDumpWithFullMemory then.
   if (_MiniDumpWriteDump(hProcess, processId, dumpFile, dumpType, pmei, NULL, NULL) == false &&
       _MiniDumpWriteDump(hProcess, processId, dumpFile, (MINIDUMP_TYPE)MiniDumpWithFullMemory, pmei, NULL, NULL) == false) {
-    VMError::report_coredump_status("Call to MiniDumpWriteDump() failed", false);
+        DWORD error = GetLastError();
+        LPTSTR msgbuf = NULL;
+
+        if (FormatMessage(FORMAT_MESSAGE_ALLOCATE_BUFFER |
+                      FORMAT_MESSAGE_FROM_SYSTEM |
+                      FORMAT_MESSAGE_IGNORE_INSERTS,
+                      NULL, error, 0, (LPTSTR)&msgbuf, 0, NULL) != 0) {
+
+          jio_snprintf(buffer, bufferSize, "Call to MiniDumpWriteDump() failed (Error 0x%x: %s)", error, msgbuf);
+          LocalFree(msgbuf);
+        } else {
+          // Call to FormatMessage failed, just include the result from GetLastError
+          jio_snprintf(buffer, bufferSize, "Call to MiniDumpWriteDump() failed (Error 0x%x)", error);
+        }
+        VMError::report_coredump_status(buffer, false);
   } else {
     VMError::report_coredump_status(buffer, true);
   }
--- a/hotspot/src/os_cpu/linux_x86/vm/linux_x86_32.s	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/os_cpu/linux_x86/vm/linux_x86_32.s	Wed Jul 05 18:59:05 2017 +0200
@@ -241,7 +241,7 @@
         jbe      2f                   # <= 32 dwords
         rep;     smovl
         jmp      4f
-	.=.+8
+	.space 8
 2:      subl     %esi,%edi
         .p2align 4,,15
 3:      movl     (%esi),%edx
@@ -378,7 +378,7 @@
         rep;     smovl
         jmp      4f 
         # copy aligned dwords
-        .=.+5
+        .space 5
 2:      subl     %esi,%edi 
         .p2align 4,,15
 3:      movl     (%esi),%edx
@@ -454,7 +454,7 @@
         popl     %edi
         popl     %esi
         ret
-        .=.+10
+        .space 10
 2:      subl     %esi,%edi
         jmp      4f
         .p2align 4,,15
--- a/hotspot/src/os_cpu/linux_x86/vm/os_linux_x86.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/os_cpu/linux_x86/vm/os_linux_x86.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -93,6 +93,10 @@
   register void *esp;
   __asm__("mov %%"SPELL_REG_SP", %0":"=r"(esp));
   return (address) ((char*)esp + sizeof(long)*2);
+#elif defined(__clang__)
+  intptr_t* esp;
+  __asm__ __volatile__ ("mov %%"SPELL_REG_SP", %0":"=r"(esp):);
+  return (address) esp;
 #else
   register void *esp __asm__ (SPELL_REG_SP);
   return (address) esp;
@@ -175,6 +179,9 @@
 #ifdef SPARC_WORKS
   register intptr_t **ebp;
   __asm__("mov %%"SPELL_REG_FP", %0":"=r"(ebp));
+#elif defined(__clang__)
+  intptr_t **ebp;
+  __asm__ __volatile__ ("mov %%"SPELL_REG_FP", %0":"=r"(ebp):);
 #else
   register intptr_t **ebp __asm__ (SPELL_REG_FP);
 #endif
--- a/hotspot/src/share/vm/adlc/archDesc.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/adlc/archDesc.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -29,8 +29,8 @@
 static FILE *errfile = stderr;
 
 //--------------------------- utility functions -----------------------------
-inline char  toUpper(char lower) {
-  return (('a' <= lower && lower <= 'z') ? (lower + ('A'-'a')) : lower);
+inline char toUpper(char lower) {
+  return (('a' <= lower && lower <= 'z') ? ((char) (lower + ('A'-'a'))) : lower);
 }
 char *toUpper(const char *str) {
   char *upper  = new char[strlen(str)+1];
--- a/hotspot/src/share/vm/adlc/dict2.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/adlc/dict2.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -64,18 +64,18 @@
   int i;
 
   // Precompute table of null character hashes
-  if( !initflag ) {             // Not initializated yet?
-    xsum[0] = (1<<shft[0])+1;   // Initialize
+  if (!initflag) {              // Not initializated yet?
+    xsum[0] = (short) ((1 << shft[0]) + 1);  // Initialize
     for( i = 1; i < MAXID; i++) {
-      xsum[i] = (1<<shft[i])+1+xsum[i-1];
+      xsum[i] = (short) ((1 << shft[i]) + 1 + xsum[i-1]);
     }
     initflag = 1;               // Never again
   }
 
   _size = 16;                   // Size is a power of 2
   _cnt = 0;                     // Dictionary is empty
-  _bin = (bucket*)_arena->Amalloc_4(sizeof(bucket)*_size);
-  memset(_bin,0,sizeof(bucket)*_size);
+  _bin = (bucket*)_arena->Amalloc_4(sizeof(bucket) * _size);
+  memset(_bin, 0, sizeof(bucket) * _size);
 }
 
 //------------------------------~Dict------------------------------------------
@@ -287,11 +287,11 @@
   register int sum = 0;
   register const char *s = (const char *)t;
 
-  while( ((c = s[k]) != '\0') && (k < MAXID-1) ) { // Get characters till nul
-    c = (c<<1)+1;               // Characters are always odd!
-    sum += c + (c<<shft[k++]);  // Universal hash function
+  while (((c = s[k]) != '\0') && (k < MAXID-1)) { // Get characters till nul
+    c = (char) ((c << 1) + 1);    // Characters are always odd!
+    sum += c + (c << shft[k++]);  // Universal hash function
   }
-  assert( k < (MAXID), "Exceeded maximum name length");
+  assert(k < (MAXID), "Exceeded maximum name length");
   return (int)((sum+xsum[k]) >> 1); // Hash key, un-modulo'd table size
 }
 
--- a/hotspot/src/share/vm/adlc/formssel.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/adlc/formssel.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -796,11 +796,11 @@
   return num_opnds;
 }
 
-const char *InstructForm::opnd_ident(int idx) {
+const char* InstructForm::opnd_ident(int idx) {
   return _components.at(idx)->_name;
 }
 
-const char *InstructForm::unique_opnd_ident(int idx) {
+const char* InstructForm::unique_opnd_ident(uint idx) {
   uint i;
   for (i = 1; i < num_opnds(); ++i) {
     if (unique_opnds_idx(i) == idx) {
@@ -1315,36 +1315,36 @@
 // Seach through operands to determine parameters unique positions.
 void InstructForm::set_unique_opnds() {
   uint* uniq_idx = NULL;
-  int  nopnds = num_opnds();
+  uint  nopnds = num_opnds();
   uint  num_uniq = nopnds;
-  int i;
+  uint i;
   _uniq_idx_length = 0;
-  if ( nopnds > 0 ) {
+  if (nopnds > 0) {
     // Allocate index array.  Worst case we're mapping from each
     // component back to an index and any DEF always goes at 0 so the
     // length of the array has to be the number of components + 1.
     _uniq_idx_length = _components.count() + 1;
-    uniq_idx = (uint*) malloc(sizeof(uint)*(_uniq_idx_length));
-    for( i = 0; i < _uniq_idx_length; i++ ) {
+    uniq_idx = (uint*) malloc(sizeof(uint) * _uniq_idx_length);
+    for (i = 0; i < _uniq_idx_length; i++) {
       uniq_idx[i] = i;
     }
   }
   // Do it only if there is a match rule and no expand rule.  With an
   // expand rule it is done by creating new mach node in Expand()
   // method.
-  if ( nopnds > 0 && _matrule != NULL && _exprule == NULL ) {
+  if (nopnds > 0 && _matrule != NULL && _exprule == NULL) {
     const char *name;
     uint count;
     bool has_dupl_use = false;
 
     _parameters.reset();
-    while( (name = _parameters.iter()) != NULL ) {
+    while ((name = _parameters.iter()) != NULL) {
       count = 0;
-      int position = 0;
-      int uniq_position = 0;
+      uint position = 0;
+      uint uniq_position = 0;
       _components.reset();
       Component *comp = NULL;
-      if( sets_result() ) {
+      if (sets_result()) {
         comp = _components.iter();
         position++;
       }
@@ -1352,11 +1352,11 @@
       for (; (comp = _components.iter()) != NULL; ++position) {
         // When the first component is not a DEF,
         // leave space for the result operand!
-        if ( position==0 && (! comp->isa(Component::DEF)) ) {
+        if (position==0 && (!comp->isa(Component::DEF))) {
           ++position;
         }
-        if( strcmp(name, comp->_name)==0 ) {
-          if( ++count > 1 ) {
+        if (strcmp(name, comp->_name) == 0) {
+          if (++count > 1) {
             assert(position < _uniq_idx_length, "out of bounds");
             uniq_idx[position] = uniq_position;
             has_dupl_use = true;
@@ -1364,22 +1364,25 @@
             uniq_position = position;
           }
         }
-        if( comp->isa(Component::DEF)
-            && comp->isa(Component::USE) ) {
+        if (comp->isa(Component::DEF) && comp->isa(Component::USE)) {
           ++position;
-          if( position != 1 )
+          if (position != 1)
             --position;   // only use two slots for the 1st USE_DEF
         }
       }
     }
-    if( has_dupl_use ) {
-      for( i = 1; i < nopnds; i++ )
-        if( i != uniq_idx[i] )
+    if (has_dupl_use) {
+      for (i = 1; i < nopnds; i++) {
+        if (i != uniq_idx[i]) {
           break;
-      int  j = i;
-      for( ; i < nopnds; i++ )
-        if( i == uniq_idx[i] )
+        }
+      }
+      uint j = i;
+      for (; i < nopnds; i++) {
+        if (i == uniq_idx[i]) {
           uniq_idx[i] = j++;
+        }
+      }
       num_uniq = j;
     }
   }
@@ -2216,21 +2219,27 @@
 
 
 bool OperandForm::is_bound_register() const {
-  RegClass *reg_class  = get_RegClass();
-  if (reg_class == NULL) return false;
-
-  const char * name = ideal_type(globalAD->globalNames());
-  if (name == NULL) return false;
-
-  int size = 0;
-  if (strcmp(name,"RegFlags")==0) size =  1;
-  if (strcmp(name,"RegI")==0) size =  1;
-  if (strcmp(name,"RegF")==0) size =  1;
-  if (strcmp(name,"RegD")==0) size =  2;
-  if (strcmp(name,"RegL")==0) size =  2;
-  if (strcmp(name,"RegN")==0) size =  1;
-  if (strcmp(name,"RegP")==0) size =  globalAD->get_preproc_def("_LP64") ? 2 : 1;
-  if (size == 0) return false;
+  RegClass* reg_class = get_RegClass();
+  if (reg_class == NULL) {
+    return false;
+  }
+
+  const char* name = ideal_type(globalAD->globalNames());
+  if (name == NULL) {
+    return false;
+  }
+
+  uint size = 0;
+  if (strcmp(name, "RegFlags") == 0) size = 1;
+  if (strcmp(name, "RegI") == 0) size = 1;
+  if (strcmp(name, "RegF") == 0) size = 1;
+  if (strcmp(name, "RegD") == 0) size = 2;
+  if (strcmp(name, "RegL") == 0) size = 2;
+  if (strcmp(name, "RegN") == 0) size = 1;
+  if (strcmp(name, "RegP") == 0) size = globalAD->get_preproc_def("_LP64") ? 2 : 1;
+  if (size == 0) {
+    return false;
+  }
   return size == reg_class->size();
 }
 
--- a/hotspot/src/share/vm/adlc/formssel.hpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/adlc/formssel.hpp	Wed Jul 05 18:59:05 2017 +0200
@@ -106,7 +106,7 @@
   const char    *_ins_pipe;        // Instruction Scheduling description class
 
   uint          *_uniq_idx;        // Indexes of unique operands
-  int            _uniq_idx_length; // Length of _uniq_idx array
+  uint           _uniq_idx_length; // Length of _uniq_idx array
   uint           _num_uniq;        // Number  of unique operands
   ComponentList  _components;      // List of Components matches MachNode's
                                    // operand structure
@@ -272,14 +272,14 @@
   void                set_unique_opnds();
   uint                num_unique_opnds() { return _num_uniq; }
   uint                unique_opnds_idx(int idx) {
-                        if( _uniq_idx != NULL && idx > 0 ) {
-                          assert(idx < _uniq_idx_length, "out of bounds");
-                          return _uniq_idx[idx];
-                        } else {
-                          return idx;
-                        }
+    if (_uniq_idx != NULL && idx > 0) {
+      assert((uint)idx < _uniq_idx_length, "out of bounds");
+      return _uniq_idx[idx];
+    } else {
+      return idx;
+    }
   }
-  const char         *unique_opnd_ident(int idx);  // Name of operand at unique idx.
+  const char         *unique_opnd_ident(uint idx);  // Name of operand at unique idx.
 
   // Operands which are only KILLs aren't part of the input array and
   // require special handling in some cases.  Their position in this
--- a/hotspot/src/share/vm/adlc/output_c.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/adlc/output_c.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -463,8 +463,9 @@
   uint resources_used_exclusively = 0;
 
   for (pipeclass->_resUsage.reset();
-       (piperesource = (const PipeClassResourceForm *)pipeclass->_resUsage.iter()) != NULL; )
+       (piperesource = (const PipeClassResourceForm*)pipeclass->_resUsage.iter()) != NULL; ) {
     element_count++;
+  }
 
   // Pre-compute the string length
   int templen;
@@ -482,8 +483,8 @@
   for (i = rescount; i > 0; i /= 10)
     maskdigit++;
 
-  static const char * pipeline_use_cycle_mask = "Pipeline_Use_Cycle_Mask";
-  static const char * pipeline_use_element    = "Pipeline_Use_Element";
+  static const char* pipeline_use_cycle_mask = "Pipeline_Use_Cycle_Mask";
+  static const char* pipeline_use_element    = "Pipeline_Use_Element";
 
   templen = 1 +
     (int)(strlen(pipeline_use_cycle_mask) + (int)strlen(pipeline_use_element) +
@@ -496,11 +497,12 @@
   templen = 0;
 
   for (pipeclass->_resUsage.reset();
-       (piperesource = (const PipeClassResourceForm *)pipeclass->_resUsage.iter()) != NULL; ) {
+       (piperesource = (const PipeClassResourceForm*)pipeclass->_resUsage.iter()) != NULL; ) {
     int used_mask = pipeline->_resdict[piperesource->_resource]->is_resource()->mask();
 
-    if (!used_mask)
+    if (!used_mask) {
       fprintf(stderr, "*** used_mask is 0 ***\n");
+    }
 
     resources_used |= used_mask;
 
@@ -509,8 +511,9 @@
     for (lb =  0; (used_mask & (1 << lb)) == 0; lb++);
     for (ub = 31; (used_mask & (1 << ub)) == 0; ub--);
 
-    if (lb == ub)
+    if (lb == ub) {
       resources_used_exclusively |= used_mask;
+    }
 
     int formatlen =
       sprintf(&resource_mask[templen], "  %s(0x%0*x, %*d, %*d, %s %s(",
@@ -526,7 +529,7 @@
 
     int cycles = piperesource->_cycles;
     uint stage          = pipeline->_stages.index(piperesource->_stage);
-    if (NameList::Not_in_list == stage) {
+    if ((uint)NameList::Not_in_list == stage) {
       fprintf(stderr,
               "pipeline_res_mask_initializer: "
               "semantic error: "
@@ -534,8 +537,8 @@
               piperesource->_stage);
       exit(1);
     }
-    uint upper_limit    = stage+cycles-1;
-    uint lower_limit    = stage-1;
+    uint upper_limit    = stage + cycles - 1;
+    uint lower_limit    = stage - 1;
     uint upper_idx      = upper_limit >> 5;
     uint lower_idx      = lower_limit >> 5;
     uint upper_position = upper_limit & 0x1f;
@@ -543,7 +546,7 @@
 
     uint mask = (((uint)1) << upper_position) - 1;
 
-    while ( upper_idx > lower_idx ) {
+    while (upper_idx > lower_idx) {
       res_mask[upper_idx--] |= mask;
       mask = (uint)-1;
     }
@@ -565,8 +568,9 @@
   }
 
   resource_mask[templen] = 0;
-  if (last_comma)
+  if (last_comma) {
     last_comma[0] = ' ';
+  }
 
   // See if the same string is in the table
   int ndx = pipeline_res_mask.index(resource_mask);
@@ -580,7 +584,7 @@
       fprintf(fp_cpp, "static const Pipeline_Use_Element pipeline_res_mask_%03d[%d] = {\n%s};\n\n",
         ndx+1, element_count, resource_mask);
 
-    char * args = new char [9 + 2*masklen + maskdigit];
+    char* args = new char [9 + 2*masklen + maskdigit];
 
     sprintf(args, "0x%0*x, 0x%0*x, %*d",
       masklen, resources_used,
@@ -589,8 +593,9 @@
 
     pipeline_res_args.addName(args);
   }
-  else
+  else {
     delete [] resource_mask;
+  }
 
   delete [] res_mask;
 //delete [] res_masks;
@@ -1787,7 +1792,7 @@
       // Skip first unique operands.
       for( i = 1; i < cur_num_opnds; i++ ) {
         comp = node->_components.iter();
-        if( (int)i != node->unique_opnds_idx(i) ) {
+        if (i != node->unique_opnds_idx(i)) {
           break;
         }
         new_num_opnds++;
@@ -1795,7 +1800,7 @@
       // Replace not unique operands with next unique operands.
       for( ; i < cur_num_opnds; i++ ) {
         comp = node->_components.iter();
-        int j = node->unique_opnds_idx(i);
+        uint j = node->unique_opnds_idx(i);
         // unique_opnds_idx(i) is unique if unique_opnds_idx(j) is not unique.
         if( j != node->unique_opnds_idx(j) ) {
           fprintf(fp,"  set_opnd_array(%d, opnd_array(%d)->clone(C)); // %s\n",
--- a/hotspot/src/share/vm/c1/c1_LIRGenerator.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/c1/c1_LIRGenerator.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -2232,6 +2232,7 @@
       // We still need to continue with the checks.
       if (src.is_constant()) {
         ciObject* src_con = src.get_jobject_constant();
+        guarantee(src_con != NULL, "no source constant");
 
         if (src_con->is_null_object()) {
           // The constant src object is null - We can skip
--- a/hotspot/src/share/vm/classfile/classFileParser.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/classfile/classFileParser.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -444,8 +444,8 @@
             break;
           case JVM_REF_invokeStatic:
           case JVM_REF_invokeSpecial:
-            check_property(
-               tag.is_method() || tag.is_interface_method(),
+            check_property(tag.is_method() ||
+                           ((_major_version >= JAVA_8_VERSION) && tag.is_interface_method()),
                "Invalid constant pool index %u in class file %s (not a method)",
                ref_index, CHECK_(nullHandle));
              break;
@@ -3152,7 +3152,6 @@
       }
     }
   }
-  int contended_count = nonstatic_contended_count;
 
 
   // Calculate the starting byte offsets
@@ -3177,35 +3176,52 @@
 
   next_nonstatic_field_offset = nonstatic_fields_start;
 
+  bool is_contended_class     = parsed_annotations->is_contended();
+
   // Class is contended, pad before all the fields
-  if (parsed_annotations->is_contended()) {
+  if (is_contended_class) {
     next_nonstatic_field_offset += ContendedPaddingWidth;
   }
 
-  // Compute the non-contended fields count
+  // Compute the non-contended fields count.
+  // The packing code below relies on these counts to determine if some field
+  // can be squeezed into the alignment gap. Contended fields are obviously
+  // exempt from that.
   unsigned int nonstatic_double_count = fac->count[NONSTATIC_DOUBLE] - fac_contended.count[NONSTATIC_DOUBLE];
   unsigned int nonstatic_word_count   = fac->count[NONSTATIC_WORD]   - fac_contended.count[NONSTATIC_WORD];
   unsigned int nonstatic_short_count  = fac->count[NONSTATIC_SHORT]  - fac_contended.count[NONSTATIC_SHORT];
   unsigned int nonstatic_byte_count   = fac->count[NONSTATIC_BYTE]   - fac_contended.count[NONSTATIC_BYTE];
   unsigned int nonstatic_oop_count    = fac->count[NONSTATIC_OOP]    - fac_contended.count[NONSTATIC_OOP];
 
+  // Total non-static fields count, including every contended field
+  unsigned int nonstatic_fields_count = fac->count[NONSTATIC_DOUBLE] + fac->count[NONSTATIC_WORD] +
+                                        fac->count[NONSTATIC_SHORT] + fac->count[NONSTATIC_BYTE] +
+                                        fac->count[NONSTATIC_OOP];
+
   bool super_has_nonstatic_fields =
           (_super_klass() != NULL && _super_klass->has_nonstatic_fields());
-  bool has_nonstatic_fields = super_has_nonstatic_fields ||
-          ((nonstatic_double_count + nonstatic_word_count +
-            nonstatic_short_count + nonstatic_byte_count +
-            nonstatic_oop_count) != 0);
+  bool has_nonstatic_fields = super_has_nonstatic_fields || (nonstatic_fields_count != 0);
 
 
   // Prepare list of oops for oop map generation.
+  //
+  // "offset" and "count" lists are describing the set of contiguous oop
+  // regions. offset[i] is the start of the i-th region, which then has
+  // count[i] oops following. Before we know how many regions are required,
+  // we pessimistically allocate the maps to fit all the oops into the
+  // distinct regions.
+  //
+  // TODO: We add +1 to always allocate non-zero resource arrays; we need
+  // to figure out if we still need to do this.
   int* nonstatic_oop_offsets;
   unsigned int* nonstatic_oop_counts;
   unsigned int nonstatic_oop_map_count = 0;
+  unsigned int max_nonstatic_oop_maps  = fac->count[NONSTATIC_OOP] + 1;
 
   nonstatic_oop_offsets = NEW_RESOURCE_ARRAY_IN_THREAD(
-            THREAD, int, nonstatic_oop_count + 1);
+            THREAD, int, max_nonstatic_oop_maps);
   nonstatic_oop_counts  = NEW_RESOURCE_ARRAY_IN_THREAD(
-            THREAD, unsigned int, nonstatic_oop_count + 1);
+            THREAD, unsigned int, max_nonstatic_oop_maps);
 
   first_nonstatic_oop_offset = 0; // will be set for first oop field
 
@@ -3392,9 +3408,11 @@
             int(nonstatic_oop_counts[nonstatic_oop_map_count - 1]) *
             heapOopSize ) {
           // Extend current oop map
+          assert(nonstatic_oop_map_count - 1 < max_nonstatic_oop_maps, "range check");
           nonstatic_oop_counts[nonstatic_oop_map_count - 1] += 1;
         } else {
           // Create new oop map
+          assert(nonstatic_oop_map_count < max_nonstatic_oop_maps, "range check");
           nonstatic_oop_offsets[nonstatic_oop_map_count] = real_offset;
           nonstatic_oop_counts [nonstatic_oop_map_count] = 1;
           nonstatic_oop_map_count += 1;
@@ -3452,12 +3470,10 @@
   //
   // Additionally, this should not break alignment for the fields, so we round the alignment up
   // for each field.
-  if (contended_count > 0) {
+  if (nonstatic_contended_count > 0) {
 
     // if there is at least one contended field, we need to have pre-padding for them
-    if (nonstatic_contended_count > 0) {
-      next_nonstatic_padded_offset += ContendedPaddingWidth;
-    }
+    next_nonstatic_padded_offset += ContendedPaddingWidth;
 
     // collect all contended groups
     BitMap bm(_cp->size());
@@ -3518,6 +3534,7 @@
             next_nonstatic_padded_offset += heapOopSize;
 
             // Create new oop map
+            assert(nonstatic_oop_map_count < max_nonstatic_oop_maps, "range check");
             nonstatic_oop_offsets[nonstatic_oop_map_count] = real_offset;
             nonstatic_oop_counts [nonstatic_oop_map_count] = 1;
             nonstatic_oop_map_count += 1;
@@ -3554,18 +3571,17 @@
     // handle static fields
   }
 
-  // Size of instances
-  int notaligned_offset = next_nonstatic_padded_offset;
-
   // Entire class is contended, pad in the back.
   // This helps to alleviate memory contention effects for subclass fields
   // and/or adjacent object.
-  if (parsed_annotations->is_contended()) {
-    notaligned_offset += ContendedPaddingWidth;
+  if (is_contended_class) {
+    next_nonstatic_padded_offset += ContendedPaddingWidth;
   }
 
-  int nonstatic_fields_end      = align_size_up(notaligned_offset, heapOopSize);
-  int instance_end              = align_size_up(notaligned_offset, wordSize);
+  int notaligned_nonstatic_fields_end = next_nonstatic_padded_offset;
+
+  int nonstatic_fields_end      = align_size_up(notaligned_nonstatic_fields_end, heapOopSize);
+  int instance_end              = align_size_up(notaligned_nonstatic_fields_end, wordSize);
   int static_fields_end         = align_size_up(next_static_byte_offset, wordSize);
 
   int static_field_size         = (static_fields_end -
@@ -3579,6 +3595,14 @@
          (instanceOopDesc::base_offset_in_bytes() + nonstatic_field_size*heapOopSize),
           wordSize) / wordSize), "consistent layout helper value");
 
+  // Invariant: nonstatic_field end/start should only change if there are
+  // nonstatic fields in the class, or if the class is contended. We compare
+  // against the non-aligned value, so that end alignment will not fail the
+  // assert without actually having the fields.
+  assert((notaligned_nonstatic_fields_end == nonstatic_fields_start) ||
+         is_contended_class ||
+         (nonstatic_fields_count > 0), "double-check nonstatic start/end");
+
   // Number of non-static oop map blocks allocated at end of klass.
   const unsigned int total_oop_map_count =
     compute_oop_map_count(_super_klass, nonstatic_oop_map_count,
@@ -4040,6 +4064,9 @@
       }
     }
 
+    // Allocate mirror and initialize static fields
+    java_lang_Class::create_mirror(this_klass, protection_domain, CHECK_(nullHandle));
+
 
 #ifdef ASSERT
     if (ParseAllGenericSignatures) {
@@ -4055,17 +4082,6 @@
           this_klass(), &all_mirandas, CHECK_(nullHandle));
     }
 
-    // Allocate mirror and initialize static fields
-    java_lang_Class::create_mirror(this_klass, CHECK_(nullHandle));
-
-    // Allocate a simple java object for locking during class initialization.
-    // This needs to be a java object because it can be held across a java call.
-    typeArrayOop r = oopFactory::new_typeArray(T_INT, 0, CHECK_NULL);
-    this_klass->set_init_lock(r);
-
-    // TODO: Move these oops to the mirror
-    this_klass->set_protection_domain(protection_domain());
-
     // Update the loader_data graph.
     record_defined_class_dependencies(this_klass, CHECK_NULL);
 
--- a/hotspot/src/share/vm/classfile/defaultMethods.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/classfile/defaultMethods.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -1349,6 +1349,7 @@
 
   // Replace klass methods with new merged lists
   klass->set_methods(merged_methods);
+  klass->set_initial_method_idnum(new_size);
 
   ClassLoaderData* cld = klass->class_loader_data();
   MetadataFactory::free_array(cld, original_methods);
--- a/hotspot/src/share/vm/classfile/javaClasses.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/classfile/javaClasses.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -512,22 +512,22 @@
 
   // If the offset was read from the shared archive, it was fixed up already
   if (!k->is_shared()) {
-  if (k->oop_is_instance()) {
-    // During bootstrap, java.lang.Class wasn't loaded so static field
-    // offsets were computed without the size added it.  Go back and
-    // update all the static field offsets to included the size.
-      for (JavaFieldStream fs(InstanceKlass::cast(k())); !fs.done(); fs.next()) {
-      if (fs.access_flags().is_static()) {
-        int real_offset = fs.offset() + InstanceMirrorKlass::offset_of_static_fields();
-        fs.set_offset(real_offset);
+    if (k->oop_is_instance()) {
+      // During bootstrap, java.lang.Class wasn't loaded so static field
+      // offsets were computed without the size added it.  Go back and
+      // update all the static field offsets to included the size.
+        for (JavaFieldStream fs(InstanceKlass::cast(k())); !fs.done(); fs.next()) {
+        if (fs.access_flags().is_static()) {
+          int real_offset = fs.offset() + InstanceMirrorKlass::offset_of_static_fields();
+          fs.set_offset(real_offset);
+        }
       }
     }
   }
-  }
-  create_mirror(k, CHECK);
+  create_mirror(k, Handle(NULL), CHECK);
 }
 
-oop java_lang_Class::create_mirror(KlassHandle k, TRAPS) {
+oop java_lang_Class::create_mirror(KlassHandle k, Handle protection_domain, TRAPS) {
   assert(k->java_mirror() == NULL, "should only assign mirror once");
   // Use this moment of initialization to cache modifier_flags also,
   // to support Class.getModifiers().  Instance classes recalculate
@@ -563,6 +563,16 @@
       set_array_klass(comp_mirror(), k());
     } else {
       assert(k->oop_is_instance(), "Must be");
+
+      // Allocate a simple java object for a lock.
+      // This needs to be a java object because during class initialization
+      // it can be held across a java call.
+      typeArrayOop r = oopFactory::new_typeArray(T_INT, 0, CHECK_NULL);
+      set_init_lock(mirror(), r);
+
+      // Set protection domain also
+      set_protection_domain(mirror(), protection_domain());
+
       // Initialize static fields
       InstanceKlass::cast(k())->do_local_static_fields(&initialize_static_field, CHECK_NULL);
     }
@@ -597,6 +607,34 @@
   java_class->int_field_put(_static_oop_field_count_offset, size);
 }
 
+oop java_lang_Class::protection_domain(oop java_class) {
+  assert(_protection_domain_offset != 0, "must be set");
+  return java_class->obj_field(_protection_domain_offset);
+}
+void java_lang_Class::set_protection_domain(oop java_class, oop pd) {
+  assert(_protection_domain_offset != 0, "must be set");
+  java_class->obj_field_put(_protection_domain_offset, pd);
+}
+
+oop java_lang_Class::init_lock(oop java_class) {
+  assert(_init_lock_offset != 0, "must be set");
+  return java_class->obj_field(_init_lock_offset);
+}
+void java_lang_Class::set_init_lock(oop java_class, oop init_lock) {
+  assert(_init_lock_offset != 0, "must be set");
+  java_class->obj_field_put(_init_lock_offset, init_lock);
+}
+
+objArrayOop java_lang_Class::signers(oop java_class) {
+  assert(_signers_offset != 0, "must be set");
+  return (objArrayOop)java_class->obj_field(_signers_offset);
+}
+void java_lang_Class::set_signers(oop java_class, objArrayOop signers) {
+  assert(_signers_offset != 0, "must be set");
+  java_class->obj_field_put(_signers_offset, (oop)signers);
+}
+
+
 oop java_lang_Class::create_basic_type_mirror(const char* basic_type_name, BasicType type, TRAPS) {
   // This should be improved by adding a field at the Java level or by
   // introducing a new VM klass (see comment in ClassFileParser)
@@ -2934,6 +2972,9 @@
 int java_lang_Class::_array_klass_offset;
 int java_lang_Class::_oop_size_offset;
 int java_lang_Class::_static_oop_field_count_offset;
+int java_lang_Class::_protection_domain_offset;
+int java_lang_Class::_init_lock_offset;
+int java_lang_Class::_signers_offset;
 GrowableArray<Klass*>* java_lang_Class::_fixup_mirror_list = NULL;
 int java_lang_Throwable::backtrace_offset;
 int java_lang_Throwable::detailMessage_offset;
--- a/hotspot/src/share/vm/classfile/javaClasses.hpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/classfile/javaClasses.hpp	Wed Jul 05 18:59:05 2017 +0200
@@ -208,7 +208,10 @@
   macro(java_lang_Class, klass,                  intptr_signature,  false) \
   macro(java_lang_Class, array_klass,            intptr_signature,  false) \
   macro(java_lang_Class, oop_size,               int_signature,     false) \
-  macro(java_lang_Class, static_oop_field_count, int_signature,     false)
+  macro(java_lang_Class, static_oop_field_count, int_signature,     false) \
+  macro(java_lang_Class, protection_domain,      object_signature,  false) \
+  macro(java_lang_Class, init_lock,              object_signature,  false) \
+  macro(java_lang_Class, signers,                object_signature,  false)
 
 class java_lang_Class : AllStatic {
   friend class VMStructs;
@@ -222,15 +225,20 @@
   static int _oop_size_offset;
   static int _static_oop_field_count_offset;
 
+  static int _protection_domain_offset;
+  static int _init_lock_offset;
+  static int _signers_offset;
+
   static bool offsets_computed;
   static int classRedefinedCount_offset;
   static GrowableArray<Klass*>* _fixup_mirror_list;
 
+  static void set_init_lock(oop java_class, oop init_lock);
  public:
   static void compute_offsets();
 
   // Instance creation
-  static oop  create_mirror(KlassHandle k, TRAPS);
+  static oop  create_mirror(KlassHandle k, Handle protection_domain, TRAPS);
   static void fixup_mirror(KlassHandle k, TRAPS);
   static oop  create_basic_type_mirror(const char* basic_type_name, BasicType type, TRAPS);
   // Conversion
@@ -262,6 +270,13 @@
   static int classRedefinedCount(oop the_class_mirror);
   static void set_classRedefinedCount(oop the_class_mirror, int value);
 
+  // Support for embedded per-class oops
+  static oop  protection_domain(oop java_class);
+  static void set_protection_domain(oop java_class, oop protection_domain);
+  static oop  init_lock(oop java_class);
+  static objArrayOop  signers(oop java_class);
+  static void set_signers(oop java_class, objArrayOop signers);
+
   static int oop_size(oop java_class);
   static void set_oop_size(oop java_class, int size);
   static int static_oop_field_count(oop java_class);
--- a/hotspot/src/share/vm/classfile/symbolTable.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/classfile/symbolTable.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -737,7 +737,7 @@
   return result;
 }
 
-void StringTable::unlink(BoolObjectClosure* is_alive) {
+void StringTable::unlink_or_oops_do(BoolObjectClosure* is_alive, OopClosure* f) {
   // Readers of the table are unlocked, so we should only be removing
   // entries at a safepoint.
   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint");
@@ -745,41 +745,31 @@
     HashtableEntry<oop, mtSymbol>** p = the_table()->bucket_addr(i);
     HashtableEntry<oop, mtSymbol>* entry = the_table()->bucket(i);
     while (entry != NULL) {
-      // Shared entries are normally at the end of the bucket and if we run into
-      // a shared entry, then there is nothing more to remove. However, if we
-      // have rehashed the table, then the shared entries are no longer at the
-      // end of the bucket.
-      if (entry->is_shared() && !use_alternate_hashcode()) {
-        break;
-      }
-      assert(entry->literal() != NULL, "just checking");
-      if (entry->is_shared() || is_alive->do_object_b(entry->literal())) {
+      assert(!entry->is_shared(), "CDS not used for the StringTable");
+
+      if (is_alive->do_object_b(entry->literal())) {
+        if (f != NULL) {
+          f->do_oop((oop*)entry->literal_addr());
+        }
         p = entry->next_addr();
       } else {
         *p = entry->next();
         the_table()->free_entry(entry);
       }
-      entry = (HashtableEntry<oop, mtSymbol>*)HashtableEntry<oop, mtSymbol>::make_ptr(*p);
+      entry = *p;
     }
   }
 }
 
 void StringTable::oops_do(OopClosure* f) {
   for (int i = 0; i < the_table()->table_size(); ++i) {
-    HashtableEntry<oop, mtSymbol>** p = the_table()->bucket_addr(i);
     HashtableEntry<oop, mtSymbol>* entry = the_table()->bucket(i);
     while (entry != NULL) {
+      assert(!entry->is_shared(), "CDS not used for the StringTable");
+
       f->do_oop((oop*)entry->literal_addr());
 
-      // Did the closure remove the literal from the table?
-      if (entry->literal() == NULL) {
-        assert(!entry->is_shared(), "immutable hashtable entry?");
-        *p = entry->next();
-        the_table()->free_entry(entry);
-      } else {
-        p = entry->next_addr();
-      }
-      entry = (HashtableEntry<oop, mtSymbol>*)HashtableEntry<oop, mtSymbol>::make_ptr(*p);
+      entry = entry->next();
     }
   }
 }
--- a/hotspot/src/share/vm/classfile/symbolTable.hpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/classfile/symbolTable.hpp	Wed Jul 05 18:59:05 2017 +0200
@@ -272,7 +272,10 @@
 
   // GC support
   //   Delete pointers to otherwise-unreachable objects.
-  static void unlink(BoolObjectClosure* cl);
+  static void unlink_or_oops_do(BoolObjectClosure* cl, OopClosure* f);
+  static void unlink(BoolObjectClosure* cl) {
+    unlink_or_oops_do(cl, NULL);
+  }
 
   // Invoke "f->do_oop" on the locations of all oops in the table.
   static void oops_do(OopClosure* f);
--- a/hotspot/src/share/vm/classfile/verifier.hpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/classfile/verifier.hpp	Wed Jul 05 18:59:05 2017 +0200
@@ -36,8 +36,10 @@
 class Verifier : AllStatic {
  public:
   enum {
+    STRICTER_ACCESS_CTRL_CHECK_VERSION  = 49,
     STACKMAP_ATTRIBUTE_MAJOR_VERSION    = 50,
-    INVOKEDYNAMIC_MAJOR_VERSION         = 51
+    INVOKEDYNAMIC_MAJOR_VERSION         = 51,
+    NO_RELAX_ACCESS_CTRL_CHECK_VERSION  = 52
   };
   typedef enum { ThrowException, NoException } Mode;
 
--- a/hotspot/src/share/vm/classfile/vmSymbols.hpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/classfile/vmSymbols.hpp	Wed Jul 05 18:59:05 2017 +0200
@@ -392,6 +392,9 @@
   template(array_klass_name,                          "array_klass")                              \
   template(oop_size_name,                             "oop_size")                                 \
   template(static_oop_field_count_name,               "static_oop_field_count")                   \
+  template(protection_domain_name,                    "protection_domain")                        \
+  template(init_lock_name,                            "init_lock")                                \
+  template(signers_name,                              "signers_name")                             \
   template(loader_data_name,                          "loader_data")                              \
   template(dependencies_name,                         "dependencies")                             \
                                                                                                   \
--- a/hotspot/src/share/vm/code/nmethod.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/code/nmethod.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -1976,11 +1976,10 @@
   if (!method()->is_native()) {
     SimpleScopeDesc ssd(this, fr.pc());
     Bytecode_invoke call(ssd.method(), ssd.bci());
-    // compiled invokedynamic call sites have an implicit receiver at
-    // resolution time, so make sure it gets GC'ed.
-    bool has_receiver = !call.is_invokestatic();
+    bool has_receiver = call.has_receiver();
+    bool has_appendix = call.has_appendix();
     Symbol* signature = call.signature();
-    fr.oops_compiled_arguments_do(signature, has_receiver, reg_map, f);
+    fr.oops_compiled_arguments_do(signature, has_receiver, has_appendix, reg_map, f);
   }
 #endif // !SHARK
 }
--- a/hotspot/src/share/vm/compiler/compileBroker.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/compiler/compileBroker.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -1642,42 +1642,37 @@
 // Set up state required by +LogCompilation.
 void CompileBroker::init_compiler_thread_log() {
     CompilerThread* thread = CompilerThread::current();
-    char  fileBuf[4*K];
+    char  file_name[4*K];
     FILE* fp = NULL;
-    char* file = NULL;
     intx thread_id = os::current_thread_id();
     for (int try_temp_dir = 1; try_temp_dir >= 0; try_temp_dir--) {
       const char* dir = (try_temp_dir ? os::get_temp_directory() : NULL);
       if (dir == NULL) {
-        jio_snprintf(fileBuf, sizeof(fileBuf), "hs_c" UINTX_FORMAT "_pid%u.log",
+        jio_snprintf(file_name, sizeof(file_name), "hs_c" UINTX_FORMAT "_pid%u.log",
                      thread_id, os::current_process_id());
       } else {
-        jio_snprintf(fileBuf, sizeof(fileBuf),
+        jio_snprintf(file_name, sizeof(file_name),
                      "%s%shs_c" UINTX_FORMAT "_pid%u.log", dir,
                      os::file_separator(), thread_id, os::current_process_id());
       }
-      fp = fopen(fileBuf, "at");
+
+      fp = fopen(file_name, "at");
       if (fp != NULL) {
-        file = NEW_C_HEAP_ARRAY(char, strlen(fileBuf)+1, mtCompiler);
-        strcpy(file, fileBuf);
-        break;
+        if (LogCompilation && Verbose) {
+          tty->print_cr("Opening compilation log %s", file_name);
+        }
+        CompileLog* log = new(ResourceObj::C_HEAP, mtCompiler) CompileLog(file_name, fp, thread_id);
+        thread->init_log(log);
+
+        if (xtty != NULL) {
+          ttyLocker ttyl;
+          // Record any per thread log files
+          xtty->elem("thread_logfile thread='%d' filename='%s'", thread_id, file_name);
+        }
+        return;
       }
     }
-    if (fp == NULL) {
-      warning("Cannot open log file: %s", fileBuf);
-    } else {
-      if (LogCompilation && Verbose)
-        tty->print_cr("Opening compilation log %s", file);
-      CompileLog* log = new(ResourceObj::C_HEAP, mtCompiler) CompileLog(file, fp, thread_id);
-      thread->init_log(log);
-
-      if (xtty != NULL) {
-        ttyLocker ttyl;
-
-        // Record any per thread log files
-        xtty->elem("thread_logfile thread='%d' filename='%s'", thread_id, file);
-      }
-    }
+    warning("Cannot open log file: %s", file_name);
 }
 
 // ------------------------------------------------------------------
--- a/hotspot/src/share/vm/compiler/compileLog.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/compiler/compileLog.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -34,17 +34,18 @@
 
 // ------------------------------------------------------------------
 // CompileLog::CompileLog
-CompileLog::CompileLog(const char* file, FILE* fp, intx thread_id)
+CompileLog::CompileLog(const char* file_name, FILE* fp, intx thread_id)
   : _context(_context_buffer, sizeof(_context_buffer))
 {
-  initialize(new(ResourceObj::C_HEAP, mtCompiler) fileStream(fp));
-  _file = file;
+  initialize(new(ResourceObj::C_HEAP, mtCompiler) fileStream(fp, true));
   _file_end = 0;
   _thread_id = thread_id;
 
   _identities_limit = 0;
   _identities_capacity = 400;
   _identities = NEW_C_HEAP_ARRAY(char, _identities_capacity, mtCompiler);
+  _file = NEW_C_HEAP_ARRAY(char, strlen(file_name)+1, mtCompiler);
+   strcpy((char*)_file, file_name);
 
   // link into the global list
   { MutexLocker locker(CompileTaskAlloc_lock);
@@ -57,6 +58,7 @@
   delete _out;
   _out = NULL;
   FREE_C_HEAP_ARRAY(char, _identities, mtCompiler);
+  FREE_C_HEAP_ARRAY(char, _file, mtCompiler);
 }
 
 
@@ -188,7 +190,8 @@
   if (called_exit)  return;
   called_exit = true;
 
-  for (CompileLog* log = _first; log != NULL; log = log->_next) {
+  CompileLog* log = _first;
+  while (log != NULL) {
     log->flush();
     const char* partial_file = log->file();
     int partial_fd = open(partial_file, O_RDONLY);
@@ -267,7 +270,11 @@
       close(partial_fd);
       unlink(partial_file);
     }
+    CompileLog* next_log = log->_next;
+    delete log;
+    log = next_log;
   }
+  _first = NULL;
 }
 
 // ------------------------------------------------------------------
--- a/hotspot/src/share/vm/compiler/compileLog.hpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/compiler/compileLog.hpp	Wed Jul 05 18:59:05 2017 +0200
@@ -57,7 +57,7 @@
   void va_tag(bool push, const char* format, va_list ap);
 
  public:
-  CompileLog(const char* file, FILE* fp, intx thread_id);
+  CompileLog(const char* file_name, FILE* fp, intx thread_id);
   ~CompileLog();
 
   intx          thread_id()                      { return _thread_id; }
--- a/hotspot/src/share/vm/gc_implementation/concurrentMarkSweep/cmsAdaptiveSizePolicy.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/gc_implementation/concurrentMarkSweep/cmsAdaptiveSizePolicy.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2004, 2010, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2004, 2013, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -969,8 +969,8 @@
 }
 
 
-void CMSAdaptiveSizePolicy::compute_young_generation_free_space(size_t cur_eden,
-                                          size_t max_eden_size)
+void CMSAdaptiveSizePolicy::compute_eden_space_size(size_t cur_eden,
+                                                    size_t max_eden_size)
 {
   size_t desired_eden_size = cur_eden;
   size_t eden_limit = max_eden_size;
@@ -978,7 +978,7 @@
   // Printout input
   if (PrintGC && PrintAdaptiveSizePolicy) {
     gclog_or_tty->print_cr(
-      "CMSAdaptiveSizePolicy::compute_young_generation_free_space: "
+      "CMSAdaptiveSizePolicy::compute_eden_space_size: "
       "cur_eden " SIZE_FORMAT,
       cur_eden);
   }
@@ -1024,7 +1024,7 @@
 
   if (PrintGC && PrintAdaptiveSizePolicy) {
     gclog_or_tty->print_cr(
-      "CMSAdaptiveSizePolicy::compute_young_generation_free_space limits:"
+      "CMSAdaptiveSizePolicy::compute_eden_space_size limits:"
       " desired_eden_size: " SIZE_FORMAT
       " old_eden_size: " SIZE_FORMAT,
       desired_eden_size, cur_eden);
--- a/hotspot/src/share/vm/gc_implementation/concurrentMarkSweep/cmsAdaptiveSizePolicy.hpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/gc_implementation/concurrentMarkSweep/cmsAdaptiveSizePolicy.hpp	Wed Jul 05 18:59:05 2017 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2004, 2010, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2004, 2013, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -436,8 +436,8 @@
 
   size_t generation_alignment() { return _generation_alignment; }
 
-  virtual void compute_young_generation_free_space(size_t cur_eden,
-                                                   size_t max_eden_size);
+  virtual void compute_eden_space_size(size_t cur_eden,
+                                       size_t max_eden_size);
   // Calculates new survivor space size;  returns a new tenuring threshold
   // value. Stores new survivor size in _survivor_size.
   virtual uint compute_survivor_space_size_and_threshold(
--- a/hotspot/src/share/vm/gc_implementation/g1/concurrentG1Refine.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/gc_implementation/g1/concurrentG1Refine.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -114,6 +114,14 @@
   }
 }
 
+void ConcurrentG1Refine::worker_threads_do(ThreadClosure * tc) {
+  if (_threads != NULL) {
+    for (int i = 0; i < worker_thread_num(); i++) {
+      tc->do_thread(_threads[i]);
+    }
+  }
+}
+
 int ConcurrentG1Refine::thread_num() {
   int n_threads = (G1ConcRefinementThreads > 0) ? G1ConcRefinementThreads
                                                 : ParallelGCThreads;
@@ -126,3 +134,7 @@
     st->cr();
   }
 }
+
+ConcurrentG1RefineThread * ConcurrentG1Refine::sampling_thread() const {
+  return _threads[worker_thread_num()];
+}
--- a/hotspot/src/share/vm/gc_implementation/g1/concurrentG1Refine.hpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/gc_implementation/g1/concurrentG1Refine.hpp	Wed Jul 05 18:59:05 2017 +0200
@@ -35,6 +35,7 @@
 class G1CollectedHeap;
 class G1HotCardCache;
 class G1RemSet;
+class DirtyCardQueue;
 
 class ConcurrentG1Refine: public CHeapObj<mtGC> {
   ConcurrentG1RefineThread** _threads;
@@ -78,9 +79,15 @@
 
   void reinitialize_threads();
 
-  // Iterate over the conc refine threads
+  // Iterate over all concurrent refinement threads
   void threads_do(ThreadClosure *tc);
 
+  // Iterate over all worker refinement threads
+  void worker_threads_do(ThreadClosure * tc);
+
+  // The RS sampling thread
+  ConcurrentG1RefineThread * sampling_thread() const;
+
   static int thread_num();
 
   void print_worker_threads_on(outputStream* st) const;
--- a/hotspot/src/share/vm/gc_implementation/g1/g1CollectedHeap.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/gc_implementation/g1/g1CollectedHeap.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -1417,8 +1417,6 @@
 
       MemoryService::track_memory_usage();
 
-      verify_after_gc();
-
       assert(!ref_processor_stw()->discovery_enabled(), "Postcondition");
       ref_processor_stw()->verify_no_references_recorded();
 
@@ -1521,6 +1519,8 @@
       _hrs.verify_optional();
       verify_region_sets_optional();
 
+      verify_after_gc();
+
       // Start a new incremental collection set for the next pause
       assert(g1_policy()->collection_set() == NULL, "must be");
       g1_policy()->start_incremental_cset_building();
@@ -3539,6 +3539,14 @@
 }
 
 void G1CollectedHeap::gc_epilogue(bool full /* Ignored */) {
+
+  if (G1SummarizeRSetStats &&
+      (G1SummarizeRSetStatsPeriod > 0) &&
+      // we are at the end of the GC. Total collections has already been increased.
+      ((total_collections() - 1) % G1SummarizeRSetStatsPeriod == 0)) {
+    g1_rem_set()->print_periodic_summary_info();
+  }
+
   // FIXME: what is this about?
   // I'm ignoring the "fill_newgen()" call if "alloc_event_enabled"
   // is set.
@@ -4093,12 +4101,6 @@
     g1mm()->update_sizes();
   }
 
-  if (G1SummarizeRSetStats &&
-      (G1SummarizeRSetStatsPeriod > 0) &&
-      (total_collections() % G1SummarizeRSetStatsPeriod == 0)) {
-    g1_rem_set()->print_summary_info();
-  }
-
   // It should now be safe to tell the concurrent mark thread to start
   // without its logging output interfering with the logging output
   // that came from the pause.
--- a/hotspot/src/share/vm/gc_implementation/g1/g1CollectedHeap.hpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/gc_implementation/g1/g1CollectedHeap.hpp	Wed Jul 05 18:59:05 2017 +0200
@@ -593,11 +593,6 @@
   // may not be a humongous - it must fit into a single heap region.
   HeapWord* par_allocate_during_gc(GCAllocPurpose purpose, size_t word_size);
 
-  HeapWord* allocate_during_gc_slow(GCAllocPurpose purpose,
-                                    HeapRegion*    alloc_region,
-                                    bool           par,
-                                    size_t         word_size);
-
   // Ensure that no further allocations can happen in "r", bearing in mind
   // that parallel threads might be attempting allocations.
   void par_allocate_remaining_space(HeapRegion* r);
@@ -1733,6 +1728,95 @@
     ParGCAllocBuffer::retire(end_of_gc, retain);
     _retired = true;
   }
+
+  bool is_retired() {
+    return _retired;
+  }
+};
+
+class G1ParGCAllocBufferContainer {
+protected:
+  static int const _priority_max = 2;
+  G1ParGCAllocBuffer* _priority_buffer[_priority_max];
+
+public:
+  G1ParGCAllocBufferContainer(size_t gclab_word_size) {
+    for (int pr = 0; pr < _priority_max; ++pr) {
+      _priority_buffer[pr] = new G1ParGCAllocBuffer(gclab_word_size);
+    }
+  }
+
+  ~G1ParGCAllocBufferContainer() {
+    for (int pr = 0; pr < _priority_max; ++pr) {
+      assert(_priority_buffer[pr]->is_retired(), "alloc buffers should all retire at this point.");
+      delete _priority_buffer[pr];
+    }
+  }
+
+  HeapWord* allocate(size_t word_sz) {
+    HeapWord* obj;
+    for (int pr = 0; pr < _priority_max; ++pr) {
+      obj = _priority_buffer[pr]->allocate(word_sz);
+      if (obj != NULL) return obj;
+    }
+    return obj;
+  }
+
+  bool contains(void* addr) {
+    for (int pr = 0; pr < _priority_max; ++pr) {
+      if (_priority_buffer[pr]->contains(addr)) return true;
+    }
+    return false;
+  }
+
+  void undo_allocation(HeapWord* obj, size_t word_sz) {
+    bool finish_undo;
+    for (int pr = 0; pr < _priority_max; ++pr) {
+      if (_priority_buffer[pr]->contains(obj)) {
+        _priority_buffer[pr]->undo_allocation(obj, word_sz);
+        finish_undo = true;
+      }
+    }
+    if (!finish_undo) ShouldNotReachHere();
+  }
+
+  size_t words_remaining() {
+    size_t result = 0;
+    for (int pr = 0; pr < _priority_max; ++pr) {
+      result += _priority_buffer[pr]->words_remaining();
+    }
+    return result;
+  }
+
+  size_t words_remaining_in_retired_buffer() {
+    G1ParGCAllocBuffer* retired = _priority_buffer[0];
+    return retired->words_remaining();
+  }
+
+  void flush_stats_and_retire(PLABStats* stats, bool end_of_gc, bool retain) {
+    for (int pr = 0; pr < _priority_max; ++pr) {
+      _priority_buffer[pr]->flush_stats_and_retire(stats, end_of_gc, retain);
+    }
+  }
+
+  void update(bool end_of_gc, bool retain, HeapWord* buf, size_t word_sz) {
+    G1ParGCAllocBuffer* retired_and_set = _priority_buffer[0];
+    retired_and_set->retire(end_of_gc, retain);
+    retired_and_set->set_buf(buf);
+    retired_and_set->set_word_size(word_sz);
+    adjust_priority_order();
+  }
+
+private:
+  void adjust_priority_order() {
+    G1ParGCAllocBuffer* retired_and_set = _priority_buffer[0];
+
+    int last = _priority_max - 1;
+    for (int pr = 0; pr < last; ++pr) {
+      _priority_buffer[pr] = _priority_buffer[pr + 1];
+    }
+    _priority_buffer[last] = retired_and_set;
+  }
 };
 
 class G1ParScanThreadState : public StackObj {
@@ -1743,9 +1827,9 @@
   CardTableModRefBS* _ct_bs;
   G1RemSet* _g1_rem;
 
-  G1ParGCAllocBuffer  _surviving_alloc_buffer;
-  G1ParGCAllocBuffer  _tenured_alloc_buffer;
-  G1ParGCAllocBuffer* _alloc_buffers[GCAllocPurposeCount];
+  G1ParGCAllocBufferContainer  _surviving_alloc_buffer;
+  G1ParGCAllocBufferContainer  _tenured_alloc_buffer;
+  G1ParGCAllocBufferContainer* _alloc_buffers[GCAllocPurposeCount];
   ageTable            _age_table;
 
   size_t           _alloc_buffer_waste;
@@ -1809,7 +1893,7 @@
   RefToScanQueue*   refs()            { return _refs;             }
   ageTable*         age_table()       { return &_age_table;       }
 
-  G1ParGCAllocBuffer* alloc_buffer(GCAllocPurpose purpose) {
+  G1ParGCAllocBufferContainer* alloc_buffer(GCAllocPurpose purpose) {
     return _alloc_buffers[purpose];
   }
 
@@ -1839,15 +1923,13 @@
     HeapWord* obj = NULL;
     size_t gclab_word_size = _g1h->desired_plab_sz(purpose);
     if (word_sz * 100 < gclab_word_size * ParallelGCBufferWastePct) {
-      G1ParGCAllocBuffer* alloc_buf = alloc_buffer(purpose);
-      add_to_alloc_buffer_waste(alloc_buf->words_remaining());
-      alloc_buf->retire(false /* end_of_gc */, false /* retain */);
+      G1ParGCAllocBufferContainer* alloc_buf = alloc_buffer(purpose);
 
       HeapWord* buf = _g1h->par_allocate_during_gc(purpose, gclab_word_size);
       if (buf == NULL) return NULL; // Let caller handle allocation failure.
-      // Otherwise.
-      alloc_buf->set_word_size(gclab_word_size);
-      alloc_buf->set_buf(buf);
+
+      add_to_alloc_buffer_waste(alloc_buf->words_remaining_in_retired_buffer());
+      alloc_buf->update(false /* end_of_gc */, false /* retain */, buf, gclab_word_size);
 
       obj = alloc_buf->allocate(word_sz);
       assert(obj != NULL, "buffer was definitely big enough...");
@@ -1959,7 +2041,6 @@
     }
   }
 
-public:
   void trim_queue();
 };
 
--- a/hotspot/src/share/vm/gc_implementation/g1/g1RemSet.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/gc_implementation/g1/g1RemSet.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -34,6 +34,7 @@
 #include "gc_implementation/g1/g1OopClosures.inline.hpp"
 #include "gc_implementation/g1/g1RemSet.inline.hpp"
 #include "gc_implementation/g1/heapRegionSeq.inline.hpp"
+#include "gc_implementation/g1/heapRegionRemSet.hpp"
 #include "memory/iterator.hpp"
 #include "oops/oop.inline.hpp"
 #include "utilities/intHisto.hpp"
@@ -73,7 +74,8 @@
     _ct_bs(ct_bs), _g1p(_g1->g1_policy()),
     _cg1r(g1->concurrent_g1_refine()),
     _cset_rs_update_cl(NULL),
-    _cards_scanned(NULL), _total_cards_scanned(0)
+    _cards_scanned(NULL), _total_cards_scanned(0),
+    _prev_period_summary()
 {
   _seq_task = new SubTasksDone(NumSeqTasks);
   guarantee(n_workers() > 0, "There should be some workers");
@@ -81,6 +83,7 @@
   for (uint i = 0; i < n_workers(); i++) {
     _cset_rs_update_cl[i] = NULL;
   }
+  _prev_period_summary.initialize(this, n_workers());
 }
 
 G1RemSet::~G1RemSet() {
@@ -697,47 +700,29 @@
   return has_refs_into_cset;
 }
 
-class HRRSStatsIter: public HeapRegionClosure {
-  size_t _occupied;
-  size_t _total_mem_sz;
-  size_t _max_mem_sz;
-  HeapRegion* _max_mem_sz_region;
-public:
-  HRRSStatsIter() :
-    _occupied(0),
-    _total_mem_sz(0),
-    _max_mem_sz(0),
-    _max_mem_sz_region(NULL)
-  {}
+void G1RemSet::print_periodic_summary_info() {
+  G1RemSetSummary current;
+  current.initialize(this, n_workers());
 
-  bool doHeapRegion(HeapRegion* r) {
-    if (r->continuesHumongous()) return false;
-    size_t mem_sz = r->rem_set()->mem_size();
-    if (mem_sz > _max_mem_sz) {
-      _max_mem_sz = mem_sz;
-      _max_mem_sz_region = r;
-    }
-    _total_mem_sz += mem_sz;
-    size_t occ = r->rem_set()->occupied();
-    _occupied += occ;
-    return false;
-  }
-  size_t total_mem_sz() { return _total_mem_sz; }
-  size_t max_mem_sz() { return _max_mem_sz; }
-  size_t occupied() { return _occupied; }
-  HeapRegion* max_mem_sz_region() { return _max_mem_sz_region; }
-};
+  _prev_period_summary.subtract_from(&current);
+  print_summary_info(&_prev_period_summary);
 
-class PrintRSThreadVTimeClosure : public ThreadClosure {
-public:
-  virtual void do_thread(Thread *t) {
-    ConcurrentG1RefineThread* crt = (ConcurrentG1RefineThread*) t;
-    gclog_or_tty->print("    %5.2f", crt->vtime_accum());
-  }
-};
+  _prev_period_summary.set(&current);
+}
 
 void G1RemSet::print_summary_info() {
-  G1CollectedHeap* g1 = G1CollectedHeap::heap();
+  G1RemSetSummary current;
+  current.initialize(this, n_workers());
+
+  print_summary_info(&current, " Cumulative RS summary");
+}
+
+void G1RemSet::print_summary_info(G1RemSetSummary * summary, const char * header) {
+  assert(summary != NULL, "just checking");
+
+  if (header != NULL) {
+    gclog_or_tty->print_cr("%s", header);
+  }
 
 #if CARD_REPEAT_HISTO
   gclog_or_tty->print_cr("\nG1 card_repeat count histogram: ");
@@ -745,52 +730,13 @@
   card_repeat_count.print_on(gclog_or_tty);
 #endif
 
-  gclog_or_tty->print_cr("\n Concurrent RS processed %d cards",
-                         _conc_refine_cards);
-  DirtyCardQueueSet& dcqs = JavaThread::dirty_card_queue_set();
-  jint tot_processed_buffers =
-    dcqs.processed_buffers_mut() + dcqs.processed_buffers_rs_thread();
-  gclog_or_tty->print_cr("  Of %d completed buffers:", tot_processed_buffers);
-  gclog_or_tty->print_cr("     %8d (%5.1f%%) by conc RS threads.",
-                dcqs.processed_buffers_rs_thread(),
-                100.0*(float)dcqs.processed_buffers_rs_thread()/
-                (float)tot_processed_buffers);
-  gclog_or_tty->print_cr("     %8d (%5.1f%%) by mutator threads.",
-                dcqs.processed_buffers_mut(),
-                100.0*(float)dcqs.processed_buffers_mut()/
-                (float)tot_processed_buffers);
-  gclog_or_tty->print_cr("  Conc RS threads times(s)");
-  PrintRSThreadVTimeClosure p;
-  gclog_or_tty->print("     ");
-  g1->concurrent_g1_refine()->threads_do(&p);
-  gclog_or_tty->print_cr("");
-
-  HRRSStatsIter blk;
-  g1->heap_region_iterate(&blk);
-  gclog_or_tty->print_cr("  Total heap region rem set sizes = "SIZE_FORMAT"K."
-                         "  Max = "SIZE_FORMAT"K.",
-                         blk.total_mem_sz()/K, blk.max_mem_sz()/K);
-  gclog_or_tty->print_cr("  Static structures = "SIZE_FORMAT"K,"
-                         " free_lists = "SIZE_FORMAT"K.",
-                         HeapRegionRemSet::static_mem_size() / K,
-                         HeapRegionRemSet::fl_mem_size() / K);
-  gclog_or_tty->print_cr("    "SIZE_FORMAT" occupied cards represented.",
-                         blk.occupied());
-  HeapRegion* max_mem_sz_region = blk.max_mem_sz_region();
-  HeapRegionRemSet* rem_set = max_mem_sz_region->rem_set();
-  gclog_or_tty->print_cr("    Max size region = "HR_FORMAT", "
-                         "size = "SIZE_FORMAT "K, occupied = "SIZE_FORMAT"K.",
-                         HR_FORMAT_PARAMS(max_mem_sz_region),
-                         (rem_set->mem_size() + K - 1)/K,
-                         (rem_set->occupied() + K - 1)/K);
-  gclog_or_tty->print_cr("    Did %d coarsenings.",
-                         HeapRegionRemSet::n_coarsenings());
+  summary->print_on(gclog_or_tty);
 }
 
 void G1RemSet::prepare_for_verify() {
   if (G1HRRSFlushLogBuffersOnVerify &&
       (VerifyBeforeGC || VerifyAfterGC)
-      &&  !_g1->full_collection()) {
+      &&  (!_g1->full_collection() || G1VerifyRSetsDuringFullGC)) {
     cleanupHRRS();
     _g1->set_refine_cte_cl_concurrency(false);
     if (SafepointSynchronize::is_at_safepoint()) {
--- a/hotspot/src/share/vm/gc_implementation/g1/g1RemSet.hpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/gc_implementation/g1/g1RemSet.hpp	Wed Jul 05 18:59:05 2017 +0200
@@ -25,6 +25,8 @@
 #ifndef SHARE_VM_GC_IMPLEMENTATION_G1_G1REMSET_HPP
 #define SHARE_VM_GC_IMPLEMENTATION_G1_G1REMSET_HPP
 
+#include "gc_implementation/g1/g1RemSetSummary.hpp"
+
 // A G1RemSet provides ways of iterating over pointers into a selected
 // collection set.
 
@@ -37,9 +39,11 @@
 // so that they can be used to update the individual region remsets.
 
 class G1RemSet: public CHeapObj<mtGC> {
+private:
+  G1RemSetSummary _prev_period_summary;
 protected:
   G1CollectedHeap* _g1;
-  unsigned _conc_refine_cards;
+  size_t _conc_refine_cards;
   uint n_workers();
 
 protected:
@@ -66,6 +70,8 @@
   // references into the collection set.
   OopsInHeapRegionClosure** _cset_rs_update_cl;
 
+  // Print the given summary info
+  virtual void print_summary_info(G1RemSetSummary * summary, const char * header = NULL);
 public:
   // This is called to reset dual hash tables after the gc pause
   // is finished and the initial hash table is no longer being
@@ -123,11 +129,18 @@
                            int worker_i,
                            bool check_for_refs_into_cset);
 
-  // Print any relevant summary info.
+  // Print accumulated summary info from the start of the VM.
   virtual void print_summary_info();
 
+  // Print accumulated summary info from the last time called.
+  virtual void print_periodic_summary_info();
+
   // Prepare remembered set for verification.
   virtual void prepare_for_verify();
+
+  size_t conc_refine_cards() const {
+    return _conc_refine_cards;
+  }
 };
 
 class CountNonCleanMemRegionClosure: public MemRegionClosure {
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hotspot/src/share/vm/gc_implementation/g1/g1RemSetSummary.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,205 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ *
+ */
+
+#include "precompiled.hpp"
+#include "gc_implementation/g1/concurrentG1Refine.hpp"
+#include "gc_implementation/g1/concurrentG1RefineThread.hpp"
+#include "gc_implementation/g1/heapRegion.hpp"
+#include "gc_implementation/g1/g1CollectedHeap.inline.hpp"
+#include "gc_implementation/g1/g1RemSet.inline.hpp"
+#include "gc_implementation/g1/g1RemSetSummary.hpp"
+#include "gc_implementation/g1/heapRegionRemSet.hpp"
+#include "runtime/thread.inline.hpp"
+
+class GetRSThreadVTimeClosure : public ThreadClosure {
+private:
+  G1RemSetSummary* _summary;
+  uint _counter;
+
+public:
+  GetRSThreadVTimeClosure(G1RemSetSummary * summary) : ThreadClosure(), _summary(summary), _counter(0) {
+    assert(_summary != NULL, "just checking");
+  }
+
+  virtual void do_thread(Thread* t) {
+    ConcurrentG1RefineThread* crt = (ConcurrentG1RefineThread*) t;
+    _summary->set_rs_thread_vtime(_counter, crt->vtime_accum());
+    _counter++;
+  }
+};
+
+void G1RemSetSummary::update() {
+  _num_refined_cards = remset()->conc_refine_cards();
+  DirtyCardQueueSet& dcqs = JavaThread::dirty_card_queue_set();
+  _num_processed_buf_mutator = dcqs.processed_buffers_mut();
+  _num_processed_buf_rs_threads = dcqs.processed_buffers_rs_thread();
+
+  _num_coarsenings = HeapRegionRemSet::n_coarsenings();
+
+  ConcurrentG1Refine * cg1r = G1CollectedHeap::heap()->concurrent_g1_refine();
+  if (_rs_threads_vtimes != NULL) {
+    GetRSThreadVTimeClosure p(this);
+    cg1r->worker_threads_do(&p);
+  }
+  set_sampling_thread_vtime(cg1r->sampling_thread()->vtime_accum());
+}
+
+void G1RemSetSummary::set_rs_thread_vtime(uint thread, double value) {
+  assert(_rs_threads_vtimes != NULL, "just checking");
+  assert(thread < _num_vtimes, "just checking");
+  _rs_threads_vtimes[thread] = value;
+}
+
+double G1RemSetSummary::rs_thread_vtime(uint thread) const {
+  assert(_rs_threads_vtimes != NULL, "just checking");
+  assert(thread < _num_vtimes, "just checking");
+  return _rs_threads_vtimes[thread];
+}
+
+void G1RemSetSummary::initialize(G1RemSet* remset, uint num_workers) {
+  assert(_rs_threads_vtimes == NULL, "just checking");
+  assert(remset != NULL, "just checking");
+
+  _remset = remset;
+  _num_vtimes = num_workers;
+  _rs_threads_vtimes = NEW_C_HEAP_ARRAY(double, _num_vtimes, mtGC);
+  memset(_rs_threads_vtimes, 0, sizeof(double) * _num_vtimes);
+
+  update();
+}
+
+void G1RemSetSummary::set(G1RemSetSummary* other) {
+  assert(other != NULL, "just checking");
+  assert(remset() == other->remset(), "just checking");
+  assert(_num_vtimes == other->_num_vtimes, "just checking");
+
+  _num_refined_cards = other->num_concurrent_refined_cards();
+
+  _num_processed_buf_mutator = other->num_processed_buf_mutator();
+  _num_processed_buf_rs_threads = other->num_processed_buf_rs_threads();
+
+  _num_coarsenings = other->_num_coarsenings;
+
+  memcpy(_rs_threads_vtimes, other->_rs_threads_vtimes, sizeof(double) * _num_vtimes);
+
+  set_sampling_thread_vtime(other->sampling_thread_vtime());
+}
+
+void G1RemSetSummary::subtract_from(G1RemSetSummary* other) {
+  assert(other != NULL, "just checking");
+  assert(remset() == other->remset(), "just checking");
+  assert(_num_vtimes == other->_num_vtimes, "just checking");
+
+  _num_refined_cards = other->num_concurrent_refined_cards() - _num_refined_cards;
+
+  _num_processed_buf_mutator = other->num_processed_buf_mutator() - _num_processed_buf_mutator;
+  _num_processed_buf_rs_threads = other->num_processed_buf_rs_threads() - _num_processed_buf_rs_threads;
+
+  _num_coarsenings = other->num_coarsenings() - _num_coarsenings;
+
+  for (uint i = 0; i < _num_vtimes; i++) {
+    set_rs_thread_vtime(i, other->rs_thread_vtime(i) - rs_thread_vtime(i));
+  }
+
+  _sampling_thread_vtime = other->sampling_thread_vtime() - _sampling_thread_vtime;
+}
+
+class HRRSStatsIter: public HeapRegionClosure {
+  size_t _occupied;
+  size_t _total_mem_sz;
+  size_t _max_mem_sz;
+  HeapRegion* _max_mem_sz_region;
+public:
+  HRRSStatsIter() :
+    _occupied(0),
+    _total_mem_sz(0),
+    _max_mem_sz(0),
+    _max_mem_sz_region(NULL)
+  {}
+
+  bool doHeapRegion(HeapRegion* r) {
+    size_t mem_sz = r->rem_set()->mem_size();
+    if (mem_sz > _max_mem_sz) {
+      _max_mem_sz = mem_sz;
+      _max_mem_sz_region = r;
+    }
+    _total_mem_sz += mem_sz;
+    size_t occ = r->rem_set()->occupied();
+    _occupied += occ;
+    return false;
+  }
+  size_t total_mem_sz() { return _total_mem_sz; }
+  size_t max_mem_sz() { return _max_mem_sz; }
+  size_t occupied() { return _occupied; }
+  HeapRegion* max_mem_sz_region() { return _max_mem_sz_region; }
+};
+
+double calc_percentage(size_t numerator, size_t denominator) {
+  if (denominator != 0) {
+    return (double)numerator / denominator * 100.0;
+  } else {
+    return 0.0f;
+  }
+}
+
+void G1RemSetSummary::print_on(outputStream* out) {
+  out->print_cr("\n Concurrent RS processed "SIZE_FORMAT" cards",
+                num_concurrent_refined_cards());
+  out->print_cr("  Of %d completed buffers:", num_processed_buf_total());
+  out->print_cr("     %8d (%5.1f%%) by concurrent RS threads.",
+                num_processed_buf_total(),
+                calc_percentage(num_processed_buf_rs_threads(), num_processed_buf_total()));
+  out->print_cr("     %8d (%5.1f%%) by mutator threads.",
+                num_processed_buf_mutator(),
+                calc_percentage(num_processed_buf_mutator(), num_processed_buf_total()));
+  out->print_cr("  Concurrent RS threads times (s)");
+  out->print("     ");
+  for (uint i = 0; i < _num_vtimes; i++) {
+    out->print("    %5.2f", rs_thread_vtime(i));
+  }
+  out->cr();
+  out->print_cr("  Concurrent sampling threads times (s)");
+  out->print_cr("         %5.2f", sampling_thread_vtime());
+
+  HRRSStatsIter blk;
+  G1CollectedHeap::heap()->heap_region_iterate(&blk);
+  out->print_cr("  Total heap region rem set sizes = "SIZE_FORMAT"K."
+                "  Max = "SIZE_FORMAT"K.",
+                blk.total_mem_sz()/K, blk.max_mem_sz()/K);
+  out->print_cr("  Static structures = "SIZE_FORMAT"K,"
+                " free_lists = "SIZE_FORMAT"K.",
+                HeapRegionRemSet::static_mem_size() / K,
+                HeapRegionRemSet::fl_mem_size() / K);
+  out->print_cr("    "SIZE_FORMAT" occupied cards represented.",
+                blk.occupied());
+  HeapRegion* max_mem_sz_region = blk.max_mem_sz_region();
+  HeapRegionRemSet* rem_set = max_mem_sz_region->rem_set();
+  out->print_cr("    Max size region = "HR_FORMAT", "
+                "size = "SIZE_FORMAT "K, occupied = "SIZE_FORMAT"K.",
+                HR_FORMAT_PARAMS(max_mem_sz_region),
+                (rem_set->mem_size() + K - 1)/K,
+                (rem_set->occupied() + K - 1)/K);
+
+  out->print_cr("    Did %d coarsenings.", num_coarsenings());
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hotspot/src/share/vm/gc_implementation/g1/g1RemSetSummary.hpp	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,118 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ *
+ */
+
+#ifndef SHARE_VM_GC_IMPLEMENTATION_G1_G1REMSETSUMMARY_HPP
+#define SHARE_VM_GC_IMPLEMENTATION_G1_G1REMSETSUMMARY_HPP
+
+#include "utilities/ostream.hpp"
+
+class G1RemSet;
+
+// A G1RemSetSummary manages statistical information about the G1RemSet
+
+class G1RemSetSummary VALUE_OBJ_CLASS_SPEC {
+private:
+  friend class GetRSThreadVTimeClosure;
+
+  G1RemSet* _remset;
+
+  G1RemSet* remset() const {
+    return _remset;
+  }
+
+  size_t _num_refined_cards;
+  size_t _num_processed_buf_mutator;
+  size_t _num_processed_buf_rs_threads;
+
+  size_t _num_coarsenings;
+
+  double* _rs_threads_vtimes;
+  size_t _num_vtimes;
+
+  double _sampling_thread_vtime;
+
+  void set_rs_thread_vtime(uint thread, double value);
+  void set_sampling_thread_vtime(double value) {
+    _sampling_thread_vtime = value;
+  }
+
+  void free_and_null() {
+    if (_rs_threads_vtimes) {
+      FREE_C_HEAP_ARRAY(double, _rs_threads_vtimes, mtGC);
+      _rs_threads_vtimes = NULL;
+      _num_vtimes = 0;
+    }
+  }
+
+  // update this summary with current data from various places
+  void update();
+
+public:
+  G1RemSetSummary() : _remset(NULL), _num_refined_cards(0),
+    _num_processed_buf_mutator(0), _num_processed_buf_rs_threads(0), _num_coarsenings(0),
+    _rs_threads_vtimes(NULL), _num_vtimes(0), _sampling_thread_vtime(0.0f) {
+  }
+
+  ~G1RemSetSummary() {
+    free_and_null();
+  }
+
+  // set the counters in this summary to the values of the others
+  void set(G1RemSetSummary* other);
+  // subtract all counters from the other summary, and set them in the current
+  void subtract_from(G1RemSetSummary* other);
+
+  // initialize and get the first sampling
+  void initialize(G1RemSet* remset, uint num_workers);
+
+  void print_on(outputStream* out);
+
+  double rs_thread_vtime(uint thread) const;
+
+  double sampling_thread_vtime() const {
+    return _sampling_thread_vtime;
+  }
+
+  size_t num_concurrent_refined_cards() const {
+    return _num_refined_cards;
+  }
+
+  size_t num_processed_buf_mutator() const {
+    return _num_processed_buf_mutator;
+  }
+
+  size_t num_processed_buf_rs_threads() const {
+    return _num_processed_buf_rs_threads;
+  }
+
+  size_t num_processed_buf_total() const {
+    return num_processed_buf_mutator() + num_processed_buf_rs_threads();
+  }
+
+  size_t num_coarsenings() const {
+    return _num_coarsenings;
+  }
+};
+
+#endif // SHARE_VM_GC_IMPLEMENTATION_G1_G1REMSETSUMMARY_HPP
--- a/hotspot/src/share/vm/gc_implementation/g1/g1_globals.hpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/gc_implementation/g1/g1_globals.hpp	Wed Jul 05 18:59:05 2017 +0200
@@ -329,7 +329,11 @@
                                                                             \
   develop(bool, G1EvacuationFailureALotDuringMixedGC, true,                 \
           "Force use of evacuation failure handling during mixed "          \
-          "evacuation pauses")
+          "evacuation pauses")                                              \
+                                                                            \
+  diagnostic(bool, G1VerifyRSetsDuringFullGC, false,                        \
+             "If true, perform verification of each heap region's "         \
+             "remembered set when verifying the heap during a full GC.")
 
 G1_FLAGS(DECLARE_DEVELOPER_FLAG, DECLARE_PD_DEVELOPER_FLAG, DECLARE_PRODUCT_FLAG, DECLARE_PD_PRODUCT_FLAG, DECLARE_DIAGNOSTIC_FLAG, DECLARE_EXPERIMENTAL_FLAG, DECLARE_NOTPRODUCT_FLAG, DECLARE_MANAGEABLE_FLAG, DECLARE_PRODUCT_RW_FLAG)
 
--- a/hotspot/src/share/vm/gc_implementation/g1/heapRegion.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/gc_implementation/g1/heapRegion.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -139,7 +139,7 @@
         _n_failures++;
       }
 
-      if (!_g1h->full_collection()) {
+      if (!_g1h->full_collection() || G1VerifyRSetsDuringFullGC) {
         HeapRegion* from = _g1h->heap_region_containing((HeapWord*)p);
         HeapRegion* to   = _g1h->heap_region_containing(obj);
         if (from != NULL && to != NULL &&
--- a/hotspot/src/share/vm/gc_implementation/parNew/asParNewGeneration.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/gc_implementation/parNew/asParNewGeneration.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2005, 2010, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2005, 2013, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -585,8 +585,7 @@
   size_policy->avg_young_live()->sample(used());
   size_policy->avg_eden_live()->sample(eden()->used());
 
-  size_policy->compute_young_generation_free_space(eden()->capacity(),
-                                                   max_gen_size());
+  size_policy->compute_eden_space_size(eden()->capacity(), max_gen_size());
 
   resize(size_policy->calculated_eden_size_in_bytes(),
          size_policy->calculated_survivor_size_in_bytes());
--- a/hotspot/src/share/vm/gc_implementation/parallelScavenge/parallelScavengeHeap.hpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/gc_implementation/parallelScavenge/parallelScavengeHeap.hpp	Wed Jul 05 18:59:05 2017 +0200
@@ -116,7 +116,7 @@
 
   // The alignment used for eden and survivors within the young gen
   // and for boundary between young gen and old gen.
-  size_t intra_heap_alignment() const { return 64 * K; }
+  size_t intra_heap_alignment() const { return 64 * K * HeapWordSize; }
 
   size_t capacity() const;
   size_t used() const;
--- a/hotspot/src/share/vm/gc_implementation/parallelScavenge/pcTasks.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/gc_implementation/parallelScavenge/pcTasks.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -120,6 +120,9 @@
 
     case system_dictionary:
       SystemDictionary::always_strong_oops_do(&mark_and_push_closure);
+      break;
+
+    case class_loader_data:
       ClassLoaderDataGraph::always_strong_oops_do(&mark_and_push_closure, &follow_klass_closure, true);
       break;
 
--- a/hotspot/src/share/vm/gc_implementation/parallelScavenge/pcTasks.hpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/gc_implementation/parallelScavenge/pcTasks.hpp	Wed Jul 05 18:59:05 2017 +0200
@@ -98,7 +98,8 @@
     management            = 6,
     jvmti                 = 7,
     system_dictionary     = 8,
-    code_cache            = 9
+    class_loader_data     = 9,
+    code_cache            = 10
   };
  private:
   RootType _root_type;
--- a/hotspot/src/share/vm/gc_implementation/parallelScavenge/psAdaptiveSizePolicy.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/gc_implementation/parallelScavenge/psAdaptiveSizePolicy.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -194,7 +194,7 @@
 
 // If this is not a full GC, only test and modify the young generation.
 
-void PSAdaptiveSizePolicy::compute_generation_free_space(
+void PSAdaptiveSizePolicy::compute_generations_free_space(
                                            size_t young_live,
                                            size_t eden_live,
                                            size_t old_live,
@@ -729,7 +729,7 @@
 
   if (PrintAdaptiveSizePolicy && Verbose) {
     gclog_or_tty->print_cr(
-      "PSAdaptiveSizePolicy::compute_old_gen_free_space "
+      "PSAdaptiveSizePolicy::adjust_promo_for_pause_time "
       "adjusting gen sizes for major pause (avg %f goal %f). "
       "desired_promo_size " SIZE_FORMAT " promo delta " SIZE_FORMAT,
       _avg_major_pause->average(), gc_pause_goal_sec(),
@@ -786,7 +786,7 @@
 
   if (PrintAdaptiveSizePolicy && Verbose) {
     gclog_or_tty->print_cr(
-      "PSAdaptiveSizePolicy::compute_eden_space_size "
+      "PSAdaptiveSizePolicy::adjust_eden_for_pause_time "
       "adjusting gen sizes for major pause (avg %f goal %f). "
       "desired_eden_size " SIZE_FORMAT " eden delta " SIZE_FORMAT,
       _avg_major_pause->average(), gc_pause_goal_sec(),
@@ -1001,7 +1001,7 @@
 
   if (PrintAdaptiveSizePolicy && Verbose) {
     gclog_or_tty->print_cr(
-      "AdaptiveSizePolicy::compute_generation_free_space "
+      "AdaptiveSizePolicy::adjust_promo_for_footprint "
       "adjusting tenured gen for footprint. "
       "starting promo size " SIZE_FORMAT
       " reduced promo size " SIZE_FORMAT,
@@ -1025,7 +1025,7 @@
 
   if (PrintAdaptiveSizePolicy && Verbose) {
     gclog_or_tty->print_cr(
-      "AdaptiveSizePolicy::compute_generation_free_space "
+      "AdaptiveSizePolicy::adjust_eden_for_footprint "
       "adjusting eden for footprint. "
       " starting eden size " SIZE_FORMAT
       " reduced eden size " SIZE_FORMAT
@@ -1280,7 +1280,7 @@
 
   if (PrintAdaptiveSizePolicy) {
     gclog_or_tty->print(
-                  "AdaptiveSizePolicy::compute_survivor_space_size_and_thresh:"
+                  "AdaptiveSizePolicy::update_averages:"
                   "  survived: "  SIZE_FORMAT
                   "  promoted: "  SIZE_FORMAT
                   "  overflow: %s",
--- a/hotspot/src/share/vm/gc_implementation/parallelScavenge/psAdaptiveSizePolicy.hpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/gc_implementation/parallelScavenge/psAdaptiveSizePolicy.hpp	Wed Jul 05 18:59:05 2017 +0200
@@ -344,13 +344,13 @@
   // Takes current used space in all generations as input, as well
   // as an indication if a full gc has just been performed, for use
   // in deciding if an OOM error should be thrown.
-  void compute_generation_free_space(size_t young_live,
-                                     size_t eden_live,
-                                     size_t old_live,
-                                     size_t cur_eden,  // current eden in bytes
-                                     size_t max_old_gen_size,
-                                     size_t max_eden_size,
-                                     bool   is_full_gc);
+  void compute_generations_free_space(size_t young_live,
+                                      size_t eden_live,
+                                      size_t old_live,
+                                      size_t cur_eden,  // current eden in bytes
+                                      size_t max_old_gen_size,
+                                      size_t max_eden_size,
+                                      bool   is_full_gc);
 
   void compute_eden_space_size(size_t young_live,
                                size_t eden_live,
--- a/hotspot/src/share/vm/gc_implementation/parallelScavenge/psGCAdaptivePolicyCounters.hpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/gc_implementation/parallelScavenge/psGCAdaptivePolicyCounters.hpp	Wed Jul 05 18:59:05 2017 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -119,7 +119,7 @@
       ps_size_policy()->change_old_gen_for_min_pauses());
   }
 
-  // compute_generation_free_space() statistics
+  // compute_generations_free_space() statistics
 
   inline void update_avg_major_pause() {
     _avg_major_pause->set_value(
--- a/hotspot/src/share/vm/gc_implementation/parallelScavenge/psMarkSweep.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/gc_implementation/parallelScavenge/psMarkSweep.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -290,13 +290,13 @@
         // Used for diagnostics
         size_policy->clear_generation_free_space_flags();
 
-        size_policy->compute_generation_free_space(young_live,
-                                                   eden_live,
-                                                   old_live,
-                                                   cur_eden,
-                                                   max_old_gen_size,
-                                                   max_eden_size,
-                                                   true /* full gc*/);
+        size_policy->compute_generations_free_space(young_live,
+                                                    eden_live,
+                                                    old_live,
+                                                    cur_eden,
+                                                    max_old_gen_size,
+                                                    max_eden_size,
+                                                    true /* full gc*/);
 
         size_policy->check_gc_overhead_limit(young_live,
                                              eden_live,
--- a/hotspot/src/share/vm/gc_implementation/parallelScavenge/psParallelCompact.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/gc_implementation/parallelScavenge/psParallelCompact.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -59,13 +59,25 @@
 #include <math.h>
 
 // All sizes are in HeapWords.
-const size_t ParallelCompactData::Log2RegionSize  = 9; // 512 words
+const size_t ParallelCompactData::Log2RegionSize  = 16; // 64K words
 const size_t ParallelCompactData::RegionSize      = (size_t)1 << Log2RegionSize;
 const size_t ParallelCompactData::RegionSizeBytes =
   RegionSize << LogHeapWordSize;
 const size_t ParallelCompactData::RegionSizeOffsetMask = RegionSize - 1;
 const size_t ParallelCompactData::RegionAddrOffsetMask = RegionSizeBytes - 1;
-const size_t ParallelCompactData::RegionAddrMask  = ~RegionAddrOffsetMask;
+const size_t ParallelCompactData::RegionAddrMask       = ~RegionAddrOffsetMask;
+
+const size_t ParallelCompactData::Log2BlockSize   = 7; // 128 words
+const size_t ParallelCompactData::BlockSize       = (size_t)1 << Log2BlockSize;
+const size_t ParallelCompactData::BlockSizeBytes  =
+  BlockSize << LogHeapWordSize;
+const size_t ParallelCompactData::BlockSizeOffsetMask = BlockSize - 1;
+const size_t ParallelCompactData::BlockAddrOffsetMask = BlockSizeBytes - 1;
+const size_t ParallelCompactData::BlockAddrMask       = ~BlockAddrOffsetMask;
+
+const size_t ParallelCompactData::BlocksPerRegion = RegionSize / BlockSize;
+const size_t ParallelCompactData::Log2BlocksPerRegion =
+  Log2RegionSize - Log2BlockSize;
 
 const ParallelCompactData::RegionData::region_sz_t
 ParallelCompactData::RegionData::dc_shift = 27;
@@ -359,6 +371,10 @@
   _reserved_byte_size = 0;
   _region_data = 0;
   _region_count = 0;
+
+  _block_vspace = 0;
+  _block_data = 0;
+  _block_count = 0;
 }
 
 bool ParallelCompactData::initialize(MemRegion covered_region)
@@ -372,8 +388,7 @@
   assert((region_size & RegionSizeOffsetMask) == 0,
          "region size not a multiple of RegionSize");
 
-  bool result = initialize_region_data(region_size);
-
+  bool result = initialize_region_data(region_size) && initialize_block_data();
   return result;
 }
 
@@ -418,17 +433,36 @@
   return false;
 }
 
+bool ParallelCompactData::initialize_block_data()
+{
+  assert(_region_count != 0, "region data must be initialized first");
+  const size_t count = _region_count << Log2BlocksPerRegion;
+  _block_vspace = create_vspace(count, sizeof(BlockData));
+  if (_block_vspace != 0) {
+    _block_data = (BlockData*)_block_vspace->reserved_low_addr();
+    _block_count = count;
+    return true;
+  }
+  return false;
+}
+
 void ParallelCompactData::clear()
 {
   memset(_region_data, 0, _region_vspace->committed_size());
+  memset(_block_data, 0, _block_vspace->committed_size());
 }
 
 void ParallelCompactData::clear_range(size_t beg_region, size_t end_region) {
   assert(beg_region <= _region_count, "beg_region out of range");
   assert(end_region <= _region_count, "end_region out of range");
+  assert(RegionSize % BlockSize == 0, "RegionSize not a multiple of BlockSize");
 
   const size_t region_cnt = end_region - beg_region;
   memset(_region_data + beg_region, 0, region_cnt * sizeof(RegionData));
+
+  const size_t beg_block = beg_region * BlocksPerRegion;
+  const size_t block_cnt = region_cnt * BlocksPerRegion;
+  memset(_block_data + beg_block, 0, block_cnt * sizeof(BlockData));
 }
 
 HeapWord* ParallelCompactData::partial_obj_end(size_t region_idx) const
@@ -707,49 +741,48 @@
 
 HeapWord* ParallelCompactData::calc_new_pointer(HeapWord* addr) {
   assert(addr != NULL, "Should detect NULL oop earlier");
-  assert(PSParallelCompact::gc_heap()->is_in(addr), "addr not in heap");
-#ifdef ASSERT
-  if (PSParallelCompact::mark_bitmap()->is_unmarked(addr)) {
-    gclog_or_tty->print_cr("calc_new_pointer:: addr " PTR_FORMAT, addr);
-  }
-#endif
-  assert(PSParallelCompact::mark_bitmap()->is_marked(addr), "obj not marked");
+  assert(PSParallelCompact::gc_heap()->is_in(addr), "not in heap");
+  assert(PSParallelCompact::mark_bitmap()->is_marked(addr), "not marked");
 
   // Region covering the object.
-  size_t region_index = addr_to_region_idx(addr);
-  const RegionData* const region_ptr = region(region_index);
-  HeapWord* const region_addr = region_align_down(addr);
-
-  assert(addr < region_addr + RegionSize, "Region does not cover object");
-  assert(addr_to_region_ptr(region_addr) == region_ptr, "sanity check");
-
+  RegionData* const region_ptr = addr_to_region_ptr(addr);
   HeapWord* result = region_ptr->destination();
 
-  // If all the data in the region is live, then the new location of the object
-  // can be calculated from the destination of the region plus the offset of the
-  // object in the region.
+  // If the entire Region is live, the new location is region->destination + the
+  // offset of the object within in the Region.
+
+  // Run some performance tests to determine if this special case pays off.  It
+  // is worth it for pointers into the dense prefix.  If the optimization to
+  // avoid pointer updates in regions that only point to the dense prefix is
+  // ever implemented, this should be revisited.
   if (region_ptr->data_size() == RegionSize) {
-    result += pointer_delta(addr, region_addr);
-    DEBUG_ONLY(PSParallelCompact::check_new_location(addr, result);)
+    result += region_offset(addr);
     return result;
   }
 
-  // The new location of the object is
-  //    region destination +
-  //    size of the partial object extending onto the region +
-  //    sizes of the live objects in the Region that are to the left of addr
-  const size_t partial_obj_size = region_ptr->partial_obj_size();
-  HeapWord* const search_start = region_addr + partial_obj_size;
+  // Otherwise, the new location is region->destination + block offset + the
+  // number of live words in the Block that are (a) to the left of addr and (b)
+  // due to objects that start in the Block.
+
+  // Fill in the block table if necessary.  This is unsynchronized, so multiple
+  // threads may fill the block table for a region (harmless, since it is
+  // idempotent).
+  if (!region_ptr->blocks_filled()) {
+    PSParallelCompact::fill_blocks(addr_to_region_idx(addr));
+    region_ptr->set_blocks_filled();
+  }
+
+  HeapWord* const search_start = block_align_down(addr);
+  const size_t block_offset = addr_to_block_ptr(addr)->offset();
 
   const ParMarkBitMap* bitmap = PSParallelCompact::mark_bitmap();
-  size_t live_to_left = bitmap->live_words_in_range(search_start, oop(addr));
-
-  result += partial_obj_size + live_to_left;
-  DEBUG_ONLY(PSParallelCompact::check_new_location(addr, result);)
+  const size_t live = bitmap->live_words_in_range(search_start, oop(addr));
+  result += block_offset + live;
+  DEBUG_ONLY(PSParallelCompact::check_new_location(addr, result));
   return result;
 }
 
-#ifdef  ASSERT
+#ifdef ASSERT
 void ParallelCompactData::verify_clear(const PSVirtualSpace* vspace)
 {
   const size_t* const beg = (const size_t*)vspace->committed_low_addr();
@@ -762,16 +795,10 @@
 void ParallelCompactData::verify_clear()
 {
   verify_clear(_region_vspace);
+  verify_clear(_block_vspace);
 }
 #endif  // #ifdef ASSERT
 
-#ifdef NOT_PRODUCT
-ParallelCompactData::RegionData* debug_region(size_t region_index) {
-  ParallelCompactData& sd = PSParallelCompact::summary_data();
-  return sd.region(region_index);
-}
-#endif
-
 elapsedTimer        PSParallelCompact::_accumulated_time;
 unsigned int        PSParallelCompact::_total_invocations = 0;
 unsigned int        PSParallelCompact::_maximum_compaction_gc_num = 0;
@@ -1961,11 +1988,6 @@
                                       maximum_heap_compaction);
 }
 
-bool ParallelCompactData::region_contains(size_t region_index, HeapWord* addr) {
-  size_t addr_region_index = addr_to_region_idx(addr);
-  return region_index == addr_region_index;
-}
-
 // This method contains no policy. You should probably
 // be calling invoke() instead.
 bool PSParallelCompact::invoke_no_policy(bool maximum_heap_compaction) {
@@ -2101,13 +2123,13 @@
         // Used for diagnostics
         size_policy->clear_generation_free_space_flags();
 
-        size_policy->compute_generation_free_space(young_live,
-                                                   eden_live,
-                                                   old_live,
-                                                   cur_eden,
-                                                   max_old_gen_size,
-                                                   max_eden_size,
-                                                   true /* full gc*/);
+        size_policy->compute_generations_free_space(young_live,
+                                                    eden_live,
+                                                    old_live,
+                                                    cur_eden,
+                                                    max_old_gen_size,
+                                                    max_eden_size,
+                                                    true /* full gc*/);
 
         size_policy->check_gc_overhead_limit(young_live,
                                              eden_live,
@@ -2338,6 +2360,7 @@
     q->enqueue(new MarkFromRootsTask(MarkFromRootsTask::flat_profiler));
     q->enqueue(new MarkFromRootsTask(MarkFromRootsTask::management));
     q->enqueue(new MarkFromRootsTask(MarkFromRootsTask::system_dictionary));
+    q->enqueue(new MarkFromRootsTask(MarkFromRootsTask::class_loader_data));
     q->enqueue(new MarkFromRootsTask(MarkFromRootsTask::jvmti));
     q->enqueue(new MarkFromRootsTask(MarkFromRootsTask::code_cache));
 
@@ -2626,6 +2649,41 @@
   }
 }
 
+#ifdef ASSERT
+// Write a histogram of the number of times the block table was filled for a
+// region.
+void PSParallelCompact::write_block_fill_histogram(outputStream* const out)
+{
+  if (!TraceParallelOldGCCompactionPhase) return;
+
+  typedef ParallelCompactData::RegionData rd_t;
+  ParallelCompactData& sd = summary_data();
+
+  for (unsigned int id = old_space_id; id < last_space_id; ++id) {
+    MutableSpace* const spc = _space_info[id].space();
+    if (spc->bottom() != spc->top()) {
+      const rd_t* const beg = sd.addr_to_region_ptr(spc->bottom());
+      HeapWord* const top_aligned_up = sd.region_align_up(spc->top());
+      const rd_t* const end = sd.addr_to_region_ptr(top_aligned_up);
+
+      size_t histo[5] = { 0, 0, 0, 0, 0 };
+      const size_t histo_len = sizeof(histo) / sizeof(size_t);
+      const size_t region_cnt = pointer_delta(end, beg, sizeof(rd_t));
+
+      for (const rd_t* cur = beg; cur < end; ++cur) {
+        ++histo[MIN2(cur->blocks_filled_count(), histo_len - 1)];
+      }
+      out->print("%u %-4s" SIZE_FORMAT_W(5), id, space_names[id], region_cnt);
+      for (size_t i = 0; i < histo_len; ++i) {
+        out->print(" " SIZE_FORMAT_W(5) " %5.1f%%",
+                   histo[i], 100.0 * histo[i] / region_cnt);
+      }
+      out->cr();
+    }
+  }
+}
+#endif // #ifdef ASSERT
+
 void PSParallelCompact::compact() {
   // trace("5");
   TraceTime tm("compaction phase", print_phases(), true, gclog_or_tty);
@@ -2665,6 +2723,8 @@
       update_deferred_objects(cm, SpaceId(id));
     }
   }
+
+  DEBUG_ONLY(write_block_fill_histogram(gclog_or_tty));
 }
 
 #ifdef  ASSERT
@@ -3129,6 +3189,57 @@
   } while (true);
 }
 
+void PSParallelCompact::fill_blocks(size_t region_idx)
+{
+  // Fill in the block table elements for the specified region.  Each block
+  // table element holds the number of live words in the region that are to the
+  // left of the first object that starts in the block.  Thus only blocks in
+  // which an object starts need to be filled.
+  //
+  // The algorithm scans the section of the bitmap that corresponds to the
+  // region, keeping a running total of the live words.  When an object start is
+  // found, if it's the first to start in the block that contains it, the
+  // current total is written to the block table element.
+  const size_t Log2BlockSize = ParallelCompactData::Log2BlockSize;
+  const size_t Log2RegionSize = ParallelCompactData::Log2RegionSize;
+  const size_t RegionSize = ParallelCompactData::RegionSize;
+
+  ParallelCompactData& sd = summary_data();
+  const size_t partial_obj_size = sd.region(region_idx)->partial_obj_size();
+  if (partial_obj_size >= RegionSize) {
+    return; // No objects start in this region.
+  }
+
+  // Ensure the first loop iteration decides that the block has changed.
+  size_t cur_block = sd.block_count();
+
+  const ParMarkBitMap* const bitmap = mark_bitmap();
+
+  const size_t Log2BitsPerBlock = Log2BlockSize - LogMinObjAlignment;
+  assert((size_t)1 << Log2BitsPerBlock ==
+         bitmap->words_to_bits(ParallelCompactData::BlockSize), "sanity");
+
+  size_t beg_bit = bitmap->words_to_bits(region_idx << Log2RegionSize);
+  const size_t range_end = beg_bit + bitmap->words_to_bits(RegionSize);
+  size_t live_bits = bitmap->words_to_bits(partial_obj_size);
+  beg_bit = bitmap->find_obj_beg(beg_bit + live_bits, range_end);
+  while (beg_bit < range_end) {
+    const size_t new_block = beg_bit >> Log2BitsPerBlock;
+    if (new_block != cur_block) {
+      cur_block = new_block;
+      sd.block(cur_block)->set_offset(bitmap->bits_to_words(live_bits));
+    }
+
+    const size_t end_bit = bitmap->find_obj_end(beg_bit, range_end);
+    if (end_bit < range_end - 1) {
+      live_bits += end_bit - beg_bit + 1;
+      beg_bit = bitmap->find_obj_beg(end_bit + 1, range_end);
+    } else {
+      return;
+    }
+  }
+}
+
 void
 PSParallelCompact::move_and_update(ParCompactionManager* cm, SpaceId space_id) {
   const MutableSpace* sp = space(space_id);
--- a/hotspot/src/share/vm/gc_implementation/parallelScavenge/psParallelCompact.hpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/gc_implementation/parallelScavenge/psParallelCompact.hpp	Wed Jul 05 18:59:05 2017 +0200
@@ -220,6 +220,17 @@
   // Mask for the bits in a pointer to get the address of the start of a region.
   static const size_t RegionAddrMask;
 
+  static const size_t Log2BlockSize;
+  static const size_t BlockSize;
+  static const size_t BlockSizeBytes;
+
+  static const size_t BlockSizeOffsetMask;
+  static const size_t BlockAddrOffsetMask;
+  static const size_t BlockAddrMask;
+
+  static const size_t BlocksPerRegion;
+  static const size_t Log2BlocksPerRegion;
+
   class RegionData
   {
   public:
@@ -272,6 +283,12 @@
     inline uint destination_count() const;
     inline uint destination_count_raw() const;
 
+    // Whether the block table for this region has been filled.
+    inline bool blocks_filled() const;
+
+    // Number of times the block table was filled.
+    DEBUG_ONLY(inline size_t blocks_filled_count() const;)
+
     // The location of the java heap data that corresponds to this region.
     inline HeapWord* data_location() const;
 
@@ -296,6 +313,7 @@
     void set_partial_obj_size(size_t words)    {
       _partial_obj_size = (region_sz_t) words;
     }
+    inline void set_blocks_filled();
 
     inline void set_destination_count(uint count);
     inline void set_live_obj_size(size_t words);
@@ -328,7 +346,11 @@
     HeapWord*            _partial_obj_addr;
     region_sz_t          _partial_obj_size;
     region_sz_t volatile _dc_and_los;
+    bool                 _blocks_filled;
+
 #ifdef ASSERT
+    size_t               _blocks_filled_count;   // Number of block table fills.
+
     // These enable optimizations that are only partially implemented.  Use
     // debug builds to prevent the code fragments from breaking.
     HeapWord*            _data_location;
@@ -337,11 +359,26 @@
 
 #ifdef ASSERT
    public:
-    uint            _pushed;   // 0 until region is pushed onto a worker's stack
+    uint                 _pushed;   // 0 until region is pushed onto a stack
    private:
 #endif
   };
 
+  // "Blocks" allow shorter sections of the bitmap to be searched.  Each Block
+  // holds an offset, which is the amount of live data in the Region to the left
+  // of the first live object that starts in the Block.
+  class BlockData
+  {
+  public:
+    typedef unsigned short int blk_ofs_t;
+
+    blk_ofs_t offset() const    { return _offset; }
+    void set_offset(size_t val) { _offset = (blk_ofs_t)val; }
+
+  private:
+    blk_ofs_t _offset;
+  };
+
 public:
   ParallelCompactData();
   bool initialize(MemRegion covered_region);
@@ -353,8 +390,9 @@
   inline RegionData* region(size_t region_idx) const;
   inline size_t     region(const RegionData* const region_ptr) const;
 
-  // Returns true if the given address is contained within the region
-  bool region_contains(size_t region_index, HeapWord* addr);
+  size_t block_count() const { return _block_count; }
+  inline BlockData* block(size_t block_idx) const;
+  inline size_t     block(const BlockData* block_ptr) const;
 
   void add_obj(HeapWord* addr, size_t len);
   void add_obj(oop p, size_t len) { add_obj((HeapWord*)p, len); }
@@ -394,11 +432,24 @@
   inline HeapWord*  region_align_up(HeapWord* addr) const;
   inline bool       is_region_aligned(HeapWord* addr) const;
 
+  // Analogous to region_offset() for blocks.
+  size_t     block_offset(const HeapWord* addr) const;
+  size_t     addr_to_block_idx(const HeapWord* addr) const;
+  size_t     addr_to_block_idx(const oop obj) const {
+    return addr_to_block_idx((HeapWord*) obj);
+  }
+  inline BlockData* addr_to_block_ptr(const HeapWord* addr) const;
+  inline HeapWord*  block_to_addr(size_t block) const;
+  inline size_t     region_to_block_idx(size_t region) const;
+
+  inline HeapWord*  block_align_down(HeapWord* addr) const;
+  inline HeapWord*  block_align_up(HeapWord* addr) const;
+  inline bool       is_block_aligned(HeapWord* addr) const;
+
   // Return the address one past the end of the partial object.
   HeapWord* partial_obj_end(size_t region_idx) const;
 
-  // Return the new location of the object p after the
-  // the compaction.
+  // Return the location of the object after compaction.
   HeapWord* calc_new_pointer(HeapWord* addr);
 
   HeapWord* calc_new_pointer(oop p) {
@@ -411,6 +462,7 @@
 #endif  // #ifdef ASSERT
 
 private:
+  bool initialize_block_data();
   bool initialize_region_data(size_t region_size);
   PSVirtualSpace* create_vspace(size_t count, size_t element_size);
 
@@ -424,6 +476,10 @@
   size_t          _reserved_byte_size;
   RegionData*     _region_data;
   size_t          _region_count;
+
+  PSVirtualSpace* _block_vspace;
+  BlockData*      _block_data;
+  size_t          _block_count;
 };
 
 inline uint
@@ -438,6 +494,28 @@
   return destination_count_raw() >> dc_shift;
 }
 
+inline bool
+ParallelCompactData::RegionData::blocks_filled() const
+{
+  return _blocks_filled;
+}
+
+#ifdef ASSERT
+inline size_t
+ParallelCompactData::RegionData::blocks_filled_count() const
+{
+  return _blocks_filled_count;
+}
+#endif // #ifdef ASSERT
+
+inline void
+ParallelCompactData::RegionData::set_blocks_filled()
+{
+  _blocks_filled = true;
+  // Debug builds count the number of times the table was filled.
+  DEBUG_ONLY(Atomic::inc_ptr(&_blocks_filled_count));
+}
+
 inline void
 ParallelCompactData::RegionData::set_destination_count(uint count)
 {
@@ -532,6 +610,12 @@
   return pointer_delta(region_ptr, _region_data, sizeof(RegionData));
 }
 
+inline ParallelCompactData::BlockData*
+ParallelCompactData::block(size_t n) const {
+  assert(n < block_count(), "bad arg");
+  return _block_data + n;
+}
+
 inline size_t
 ParallelCompactData::region_offset(const HeapWord* addr) const
 {
@@ -598,6 +682,63 @@
   return region_offset(addr) == 0;
 }
 
+inline size_t
+ParallelCompactData::block_offset(const HeapWord* addr) const
+{
+  assert(addr >= _region_start, "bad addr");
+  assert(addr <= _region_end, "bad addr");
+  return (size_t(addr) & BlockAddrOffsetMask) >> LogHeapWordSize;
+}
+
+inline size_t
+ParallelCompactData::addr_to_block_idx(const HeapWord* addr) const
+{
+  assert(addr >= _region_start, "bad addr");
+  assert(addr <= _region_end, "bad addr");
+  return pointer_delta(addr, _region_start) >> Log2BlockSize;
+}
+
+inline ParallelCompactData::BlockData*
+ParallelCompactData::addr_to_block_ptr(const HeapWord* addr) const
+{
+  return block(addr_to_block_idx(addr));
+}
+
+inline HeapWord*
+ParallelCompactData::block_to_addr(size_t block) const
+{
+  assert(block < _block_count, "block out of range");
+  return _region_start + (block << Log2BlockSize);
+}
+
+inline size_t
+ParallelCompactData::region_to_block_idx(size_t region) const
+{
+  return region << Log2BlocksPerRegion;
+}
+
+inline HeapWord*
+ParallelCompactData::block_align_down(HeapWord* addr) const
+{
+  assert(addr >= _region_start, "bad addr");
+  assert(addr < _region_end + RegionSize, "bad addr");
+  return (HeapWord*)(size_t(addr) & BlockAddrMask);
+}
+
+inline HeapWord*
+ParallelCompactData::block_align_up(HeapWord* addr) const
+{
+  assert(addr >= _region_start, "bad addr");
+  assert(addr <= _region_end, "bad addr");
+  return block_align_down(addr + BlockSizeOffsetMask);
+}
+
+inline bool
+ParallelCompactData::is_block_aligned(HeapWord* addr) const
+{
+  return block_offset(addr) == 0;
+}
+
 // Abstract closure for use with ParMarkBitMap::iterate(), which will invoke the
 // do_addr() method.
 //
@@ -775,6 +916,7 @@
   // Convenient access to type names.
   typedef ParMarkBitMap::idx_t idx_t;
   typedef ParallelCompactData::RegionData RegionData;
+  typedef ParallelCompactData::BlockData BlockData;
 
   typedef enum {
     old_space_id, eden_space_id,
@@ -962,6 +1104,8 @@
   // Adjust addresses in roots.  Does not adjust addresses in heap.
   static void adjust_roots();
 
+  DEBUG_ONLY(static void write_block_fill_histogram(outputStream* const out);)
+
   // Move objects to new locations.
   static void compact_perm(ParCompactionManager* cm);
   static void compact();
@@ -1128,6 +1272,9 @@
     fill_region(cm, region);
   }
 
+  // Fill in the block table for the specified region.
+  static void fill_blocks(size_t region_idx);
+
   // Update the deferred objects in the space.
   static void update_deferred_objects(ParCompactionManager* cm, SpaceId id);
 
--- a/hotspot/src/share/vm/gc_implementation/parallelScavenge/psPromotionManager.inline.hpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/gc_implementation/parallelScavenge/psPromotionManager.inline.hpp	Wed Jul 05 18:59:05 2017 +0200
@@ -42,7 +42,7 @@
     if (o->is_forwarded()) {
       o = o->forwardee();
       // Card mark
-      if (PSScavenge::is_obj_in_young((HeapWord*) o)) {
+      if (PSScavenge::is_obj_in_young(o)) {
         PSScavenge::card_table()->inline_write_ref_field_gc(p, o);
       }
       oopDesc::encode_store_heap_oop_not_null(p, o);
--- a/hotspot/src/share/vm/gc_implementation/parallelScavenge/psScavenge.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/gc_implementation/parallelScavenge/psScavenge.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -61,6 +61,7 @@
 bool                       PSScavenge::_survivor_overflow = false;
 uint                       PSScavenge::_tenuring_threshold = 0;
 HeapWord*                  PSScavenge::_young_generation_boundary = NULL;
+uintptr_t                  PSScavenge::_young_generation_boundary_compressed = 0;
 elapsedTimer               PSScavenge::_accumulated_time;
 Stack<markOop, mtGC>       PSScavenge::_preserved_mark_stack;
 Stack<oop, mtGC>           PSScavenge::_preserved_oop_stack;
@@ -71,7 +72,7 @@
 class PSIsAliveClosure: public BoolObjectClosure {
 public:
   bool do_object_b(oop p) {
-    return (!PSScavenge::is_obj_in_young((HeapWord*) p)) || p->is_forwarded();
+    return (!PSScavenge::is_obj_in_young(p)) || p->is_forwarded();
   }
 };
 
@@ -408,6 +409,7 @@
       q->enqueue(new ScavengeRootsTask(ScavengeRootsTask::flat_profiler));
       q->enqueue(new ScavengeRootsTask(ScavengeRootsTask::management));
       q->enqueue(new ScavengeRootsTask(ScavengeRootsTask::system_dictionary));
+      q->enqueue(new ScavengeRootsTask(ScavengeRootsTask::class_loader_data));
       q->enqueue(new ScavengeRootsTask(ScavengeRootsTask::jvmti));
       q->enqueue(new ScavengeRootsTask(ScavengeRootsTask::code_cache));
 
@@ -449,11 +451,9 @@
       reference_processor()->enqueue_discovered_references(NULL);
     }
 
-      // Unlink any dead interned Strings
-      StringTable::unlink(&_is_alive_closure);
-      // Process the remaining live ones
-      PSScavengeRootsClosure root_closure(promotion_manager);
-      StringTable::oops_do(&root_closure);
+    // Unlink any dead interned Strings and process the remaining live ones.
+    PSScavengeRootsClosure root_closure(promotion_manager);
+    StringTable::unlink_or_oops_do(&_is_alive_closure, &root_closure);
 
     // Finally, flush the promotion_manager's labs, and deallocate its stacks.
     PSPromotionManager::post_scavenge();
@@ -816,7 +816,7 @@
   // Set boundary between young_gen and old_gen
   assert(old_gen->reserved().end() <= young_gen->eden_space()->bottom(),
          "old above young");
-  _young_generation_boundary = young_gen->eden_space()->bottom();
+  set_young_generation_boundary(young_gen->eden_space()->bottom());
 
   // Initialize ref handling object for scavenging.
   MemRegion mr = young_gen->reserved();
--- a/hotspot/src/share/vm/gc_implementation/parallelScavenge/psScavenge.hpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/gc_implementation/parallelScavenge/psScavenge.hpp	Wed Jul 05 18:59:05 2017 +0200
@@ -62,19 +62,22 @@
 
  protected:
   // Flags/counters
-  static ReferenceProcessor* _ref_processor;        // Reference processor for scavenging.
-  static PSIsAliveClosure    _is_alive_closure;     // Closure used for reference processing
-  static CardTableExtension* _card_table;           // We cache the card table for fast access.
-  static bool                _survivor_overflow;    // Overflow this collection
-  static uint                _tenuring_threshold;   // tenuring threshold for next scavenge
-  static elapsedTimer        _accumulated_time;     // total time spent on scavenge
-  static HeapWord*           _young_generation_boundary; // The lowest address possible for the young_gen.
-                                                         // This is used to decide if an oop should be scavenged,
-                                                         // cards should be marked, etc.
+  static ReferenceProcessor*  _ref_processor;        // Reference processor for scavenging.
+  static PSIsAliveClosure     _is_alive_closure;     // Closure used for reference processing
+  static CardTableExtension*  _card_table;           // We cache the card table for fast access.
+  static bool                 _survivor_overflow;    // Overflow this collection
+  static uint                 _tenuring_threshold;   // tenuring threshold for next scavenge
+  static elapsedTimer         _accumulated_time;     // total time spent on scavenge
+  // The lowest address possible for the young_gen.
+  // This is used to decide if an oop should be scavenged,
+  // cards should be marked, etc.
+  static HeapWord*            _young_generation_boundary;
+  // Used to optimize compressed oops young gen boundary checking.
+  static uintptr_t            _young_generation_boundary_compressed;
   static Stack<markOop, mtGC> _preserved_mark_stack; // List of marks to be restored after failed promotion
   static Stack<oop, mtGC>     _preserved_oop_stack;  // List of oops that need their mark restored.
-  static CollectorCounters*      _counters;          // collector performance counters
-  static bool                    _promotion_failed;
+  static CollectorCounters*   _counters;             // collector performance counters
+  static bool                 _promotion_failed;
 
   static void clean_up_failed_promotion();
 
@@ -112,6 +115,9 @@
   // boundary moves, _young_generation_boundary must be reset
   static void set_young_generation_boundary(HeapWord* v) {
     _young_generation_boundary = v;
+    if (UseCompressedOops) {
+      _young_generation_boundary_compressed = (uintptr_t)oopDesc::encode_heap_oop((oop)v);
+    }
   }
 
   // Called by parallelScavengeHeap to init the tenuring threshold
@@ -140,11 +146,19 @@
   static void copy_and_push_safe_barrier_from_klass(PSPromotionManager* pm, oop* p);
 
   // Is an object in the young generation
-  // This assumes that the HeapWord argument is in the heap,
+  // This assumes that the 'o' is in the heap,
   // so it only checks one side of the complete predicate.
+
+  inline static bool is_obj_in_young(oop o) {
+    return (HeapWord*)o >= _young_generation_boundary;
+  }
+
+  inline static bool is_obj_in_young(narrowOop o) {
+    return (uintptr_t)o >= _young_generation_boundary_compressed;
+  }
+
   inline static bool is_obj_in_young(HeapWord* o) {
-    const bool result = (o >= _young_generation_boundary);
-    return result;
+    return o >= _young_generation_boundary;
   }
 };
 
--- a/hotspot/src/share/vm/gc_implementation/parallelScavenge/psScavenge.inline.hpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/gc_implementation/parallelScavenge/psScavenge.inline.hpp	Wed Jul 05 18:59:05 2017 +0200
@@ -39,9 +39,7 @@
 
 template <class T> inline bool PSScavenge::should_scavenge(T* p) {
   T heap_oop = oopDesc::load_heap_oop(p);
-  if (oopDesc::is_null(heap_oop)) return false;
-  oop obj = oopDesc::decode_heap_oop_not_null(heap_oop);
-  return PSScavenge::is_obj_in_young((HeapWord*)obj);
+  return PSScavenge::is_obj_in_young(heap_oop);
 }
 
 template <class T>
@@ -94,7 +92,7 @@
   // or from metadata.
   if ((!PSScavenge::is_obj_in_young((HeapWord*)p)) &&
       Universe::heap()->is_in_reserved(p)) {
-    if (PSScavenge::is_obj_in_young((HeapWord*)new_obj)) {
+    if (PSScavenge::is_obj_in_young(new_obj)) {
       card_table()->inline_write_ref_field_gc(p, new_obj);
     }
   }
@@ -147,7 +145,7 @@
       }
       oopDesc::encode_store_heap_oop_not_null(p, new_obj);
 
-      if (PSScavenge::is_obj_in_young((HeapWord*)new_obj)) {
+      if (PSScavenge::is_obj_in_young(new_obj)) {
         do_klass_barrier();
       }
     }
--- a/hotspot/src/share/vm/gc_implementation/parallelScavenge/psTasks.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/gc_implementation/parallelScavenge/psTasks.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -79,14 +79,15 @@
       break;
 
     case system_dictionary:
-      {
       SystemDictionary::oops_do(&roots_closure);
+      break;
 
-        // Move this to another root_type?
-        PSScavengeKlassClosure klass_closure(pm);
-        ClassLoaderDataGraph::oops_do(&roots_closure, &klass_closure, false);
-      }
-      break;
+    case class_loader_data:
+    {
+      PSScavengeKlassClosure klass_closure(pm);
+      ClassLoaderDataGraph::oops_do(&roots_closure, &klass_closure, false);
+    }
+    break;
 
     case management:
       Management::oops_do(&roots_closure);
--- a/hotspot/src/share/vm/gc_implementation/parallelScavenge/psTasks.hpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/gc_implementation/parallelScavenge/psTasks.hpp	Wed Jul 05 18:59:05 2017 +0200
@@ -59,9 +59,10 @@
     object_synchronizer   = 4,
     flat_profiler         = 5,
     system_dictionary     = 6,
-    management            = 7,
-    jvmti                 = 8,
-    code_cache            = 9
+    class_loader_data     = 7,
+    management            = 8,
+    jvmti                 = 9,
+    code_cache            = 10
   };
  private:
   RootType _root_type;
--- a/hotspot/src/share/vm/gc_implementation/shared/adaptiveSizePolicy.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/gc_implementation/shared/adaptiveSizePolicy.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2004, 2010, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2004, 2013, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -467,7 +467,7 @@
       (free_in_old_gen < (size_t) mem_free_old_limit &&
        free_in_eden < (size_t) mem_free_eden_limit))) {
     gclog_or_tty->print_cr(
-          "PSAdaptiveSizePolicy::compute_generation_free_space limits:"
+          "PSAdaptiveSizePolicy::check_gc_overhead_limit:"
           " promo_limit: " SIZE_FORMAT
           " max_eden_size: " SIZE_FORMAT
           " total_free_limit: " SIZE_FORMAT
--- a/hotspot/src/share/vm/gc_implementation/shared/parGCAllocBuffer.hpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/gc_implementation/shared/parGCAllocBuffer.hpp	Wed Jul 05 18:59:05 2017 +0200
@@ -158,7 +158,7 @@
   // Fills in the unallocated portion of the buffer with a garbage object.
   // If "end_of_gc" is TRUE, is after the last use in the GC.  IF "retain"
   // is true, attempt to re-use the unused portion in the next GC.
-  void retire(bool end_of_gc, bool retain);
+  virtual void retire(bool end_of_gc, bool retain);
 
   void print() PRODUCT_RETURN;
 };
--- a/hotspot/src/share/vm/interpreter/bytecodeInterpreter.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/interpreter/bytecodeInterpreter.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -468,7 +468,25 @@
 
 #ifdef ASSERT
   if (istate->_msg != initialize) {
-    assert(abs(istate->_stack_base - istate->_stack_limit) == (istate->_method->max_stack() + 1), "bad stack limit");
+    // We have a problem here if we are running with a pre-hsx24 JDK (for example during bootstrap)
+    // because in that case, EnableInvokeDynamic is true by default but will be later switched off
+    // if java_lang_invoke_MethodHandle::compute_offsets() detects that the JDK only has the classes
+    // for the old JSR292 implementation.
+    // This leads to a situation where 'istate->_stack_limit' always accounts for
+    // methodOopDesc::extra_stack_entries() because it is computed in
+    // CppInterpreterGenerator::generate_compute_interpreter_state() which was generated while
+    // EnableInvokeDynamic was still true. On the other hand, istate->_method->max_stack() doesn't
+    // account for extra_stack_entries() anymore because at the time when it is called
+    // EnableInvokeDynamic was already set to false.
+    // So we have a second version of the assertion which handles the case where EnableInvokeDynamic was
+    // switched off because of the wrong classes.
+    if (EnableInvokeDynamic || FLAG_IS_CMDLINE(EnableInvokeDynamic)) {
+      assert(abs(istate->_stack_base - istate->_stack_limit) == (istate->_method->max_stack() + 1), "bad stack limit");
+    } else {
+      const int extra_stack_entries = Method::extra_stack_entries_for_indy;
+      assert(labs(istate->_stack_base - istate->_stack_limit) == (istate->_method->max_stack() + extra_stack_entries
+                                                                                               + 1), "bad stack limit");
+    }
 #ifndef SHARK
     IA32_ONLY(assert(istate->_stack_limit == istate->_thread->last_Java_sp() + 1, "wrong"));
 #endif // !SHARK
--- a/hotspot/src/share/vm/memory/allocation.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/memory/allocation.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -60,10 +60,11 @@
 void  _ValueObj::operator delete [](void* p)    { ShouldNotCallThis(); }
 
 void* MetaspaceObj::operator new(size_t size, ClassLoaderData* loader_data,
-                                size_t word_size, bool read_only, TRAPS) {
+                                 size_t word_size, bool read_only,
+                                 MetaspaceObj::Type type, TRAPS) {
   // Klass has it's own operator new
   return Metaspace::allocate(loader_data, word_size, read_only,
-                             Metaspace::NonClassType, CHECK_NULL);
+                             type, CHECK_NULL);
 }
 
 bool MetaspaceObj::is_shared() const {
--- a/hotspot/src/share/vm/memory/allocation.hpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/memory/allocation.hpp	Wed Jul 05 18:59:05 2017 +0200
@@ -268,8 +268,55 @@
   bool is_shared() const;
   void print_address_on(outputStream* st) const;  // nonvirtual address printing
 
+#define METASPACE_OBJ_TYPES_DO(f) \
+  f(Unknown) \
+  f(Class) \
+  f(Symbol) \
+  f(TypeArrayU1) \
+  f(TypeArrayU2) \
+  f(TypeArrayU4) \
+  f(TypeArrayU8) \
+  f(TypeArrayOther) \
+  f(Method) \
+  f(ConstMethod) \
+  f(MethodData) \
+  f(ConstantPool) \
+  f(ConstantPoolCache) \
+  f(Annotation) \
+  f(MethodCounters)
+
+#define METASPACE_OBJ_TYPE_DECLARE(name) name ## Type,
+#define METASPACE_OBJ_TYPE_NAME_CASE(name) case name ## Type: return #name;
+
+  enum Type {
+    // Types are MetaspaceObj::ClassType, MetaspaceObj::SymbolType, etc
+    METASPACE_OBJ_TYPES_DO(METASPACE_OBJ_TYPE_DECLARE)
+    _number_of_types
+  };
+
+  static const char * type_name(Type type) {
+    switch(type) {
+    METASPACE_OBJ_TYPES_DO(METASPACE_OBJ_TYPE_NAME_CASE)
+    default:
+      ShouldNotReachHere();
+      return NULL;
+    }
+  }
+
+  static MetaspaceObj::Type array_type(size_t elem_size) {
+    switch (elem_size) {
+    case 1: return TypeArrayU1Type;
+    case 2: return TypeArrayU2Type;
+    case 4: return TypeArrayU4Type;
+    case 8: return TypeArrayU8Type;
+    default:
+      return TypeArrayOtherType;
+    }
+  }
+
   void* operator new(size_t size, ClassLoaderData* loader_data,
-                     size_t word_size, bool read_only, Thread* thread);
+                     size_t word_size, bool read_only,
+                     Type type, Thread* thread);
                      // can't use TRAPS from this header file.
   void operator delete(void* p) { ShouldNotCallThis(); }
 };
--- a/hotspot/src/share/vm/memory/metaspace.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/memory/metaspace.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -713,6 +713,23 @@
 #ifdef ASSERT
   void verify_allocated_blocks_words();
 #endif
+
+  size_t get_raw_word_size(size_t word_size) {
+    // If only the dictionary is going to be used (i.e., no
+    // indexed free list), then there is a minimum size requirement.
+    // MinChunkSize is a placeholder for the real minimum size JJJ
+    size_t byte_size = word_size * BytesPerWord;
+
+    size_t byte_size_with_overhead = byte_size + Metablock::overhead();
+
+    size_t raw_bytes_size = MAX2(byte_size_with_overhead,
+                                 Metablock::min_block_byte_size());
+    raw_bytes_size = ARENA_ALIGN(raw_bytes_size);
+    size_t raw_word_size = raw_bytes_size / BytesPerWord;
+    assert(raw_word_size * BytesPerWord == raw_bytes_size, "Size problem");
+
+    return raw_word_size;
+  }
 };
 
 uint const SpaceManager::_small_chunk_limit = 4;
@@ -2320,19 +2337,7 @@
 MetaWord* SpaceManager::allocate(size_t word_size) {
   MutexLockerEx cl(lock(), Mutex::_no_safepoint_check_flag);
 
-  // If only the dictionary is going to be used (i.e., no
-  // indexed free list), then there is a minimum size requirement.
-  // MinChunkSize is a placeholder for the real minimum size JJJ
-  size_t byte_size = word_size * BytesPerWord;
-
-  size_t byte_size_with_overhead = byte_size + Metablock::overhead();
-
-  size_t raw_bytes_size = MAX2(byte_size_with_overhead,
-                               Metablock::min_block_byte_size());
-  raw_bytes_size = ARENA_ALIGN(raw_bytes_size);
-  size_t raw_word_size = raw_bytes_size / BytesPerWord;
-  assert(raw_word_size * BytesPerWord == raw_bytes_size, "Size problem");
-
+  size_t raw_word_size = get_raw_word_size(word_size);
   BlockFreelist* fl =  block_freelists();
   MetaWord* p = NULL;
   // Allocation from the dictionary is expensive in the sense that
@@ -2896,6 +2901,9 @@
   if (class_chunk != NULL) {
     class_vsm()->add_chunk(class_chunk, true);
   }
+
+  _alloc_record_head = NULL;
+  _alloc_record_tail = NULL;
 }
 
 size_t Metaspace::align_word_size_up(size_t word_size) {
@@ -3000,12 +3008,14 @@
 }
 
 Metablock* Metaspace::allocate(ClassLoaderData* loader_data, size_t word_size,
-                              bool read_only, MetadataType mdtype, TRAPS) {
+                              bool read_only, MetaspaceObj::Type type, TRAPS) {
   if (HAS_PENDING_EXCEPTION) {
     assert(false, "Should not allocate with exception pending");
     return NULL;  // caller does a CHECK_NULL too
   }
 
+  MetadataType mdtype = (type == MetaspaceObj::ClassType) ? ClassType : NonClassType;
+
   // SSS: Should we align the allocations and make sure the sizes are aligned.
   MetaWord* result = NULL;
 
@@ -3015,13 +3025,13 @@
   // with the SymbolTable_lock.  Dumping is single threaded for now.  We'll have
   // to revisit this for application class data sharing.
   if (DumpSharedSpaces) {
-    if (read_only) {
-      result = loader_data->ro_metaspace()->allocate(word_size, NonClassType);
-    } else {
-      result = loader_data->rw_metaspace()->allocate(word_size, NonClassType);
-    }
+    assert(type > MetaspaceObj::UnknownType && type < MetaspaceObj::_number_of_types, "sanity");
+    Metaspace* space = read_only ? loader_data->ro_metaspace() : loader_data->rw_metaspace();
+    result = space->allocate(word_size, NonClassType);
     if (result == NULL) {
       report_out_of_shared_space(read_only ? SharedReadOnly : SharedReadWrite);
+    } else {
+      space->record_allocation(result, type, space->vsm()->get_raw_word_size(word_size));
     }
     return Metablock::initialize(result, word_size);
   }
@@ -3056,6 +3066,38 @@
   return Metablock::initialize(result, word_size);
 }
 
+void Metaspace::record_allocation(void* ptr, MetaspaceObj::Type type, size_t word_size) {
+  assert(DumpSharedSpaces, "sanity");
+
+  AllocRecord *rec = new AllocRecord((address)ptr, type, (int)word_size * HeapWordSize);
+  if (_alloc_record_head == NULL) {
+    _alloc_record_head = _alloc_record_tail = rec;
+  } else {
+    _alloc_record_tail->_next = rec;
+    _alloc_record_tail = rec;
+  }
+}
+
+void Metaspace::iterate(Metaspace::AllocRecordClosure *closure) {
+  assert(DumpSharedSpaces, "unimplemented for !DumpSharedSpaces");
+
+  address last_addr = (address)bottom();
+
+  for (AllocRecord *rec = _alloc_record_head; rec; rec = rec->_next) {
+    address ptr = rec->_ptr;
+    if (last_addr < ptr) {
+      closure->doit(last_addr, MetaspaceObj::UnknownType, ptr - last_addr);
+    }
+    closure->doit(ptr, rec->_type, rec->_byte_size);
+    last_addr = ptr + rec->_byte_size;
+  }
+
+  address top = ((address)bottom()) + used_bytes_slow(Metaspace::NonClassType);
+  if (last_addr < top) {
+    closure->doit(last_addr, MetaspaceObj::UnknownType, top - last_addr);
+  }
+}
+
 void Metaspace::purge() {
   MutexLockerEx cl(SpaceManager::expand_lock(),
                    Mutex::_no_safepoint_check_flag);
--- a/hotspot/src/share/vm/memory/metaspace.hpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/memory/metaspace.hpp	Wed Jul 05 18:59:05 2017 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011, 2012, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2011, 2013, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -127,6 +127,23 @@
   static VirtualSpaceList* space_list()       { return _space_list; }
   static VirtualSpaceList* class_space_list() { return _class_space_list; }
 
+  // This is used by DumpSharedSpaces only, where only _vsm is used. So we will
+  // maintain a single list for now.
+  void record_allocation(void* ptr, MetaspaceObj::Type type, size_t word_size);
+
+  class AllocRecord : public CHeapObj<mtClass> {
+  public:
+    AllocRecord(address ptr, MetaspaceObj::Type type, int byte_size)
+      : _next(NULL), _ptr(ptr), _type(type), _byte_size(byte_size) {}
+    AllocRecord *_next;
+    address _ptr;
+    MetaspaceObj::Type _type;
+    int _byte_size;
+  };
+
+  AllocRecord * _alloc_record_head;
+  AllocRecord * _alloc_record_tail;
+
  public:
 
   Metaspace(Mutex* lock, MetaspaceType type);
@@ -148,8 +165,8 @@
   size_t used_bytes_slow(MetadataType mdtype) const;
   size_t capacity_bytes_slow(MetadataType mdtype) const;
 
-  static Metablock* allocate(ClassLoaderData* loader_data, size_t size,
-                            bool read_only, MetadataType mdtype, TRAPS);
+  static Metablock* allocate(ClassLoaderData* loader_data, size_t word_size,
+                             bool read_only, MetaspaceObj::Type type, TRAPS);
   void deallocate(MetaWord* ptr, size_t byte_size, bool is_class);
 
   MetaWord* expand_and_allocate(size_t size,
@@ -166,6 +183,13 @@
   void print_on(outputStream* st) const;
   // Debugging support
   void verify();
+
+  class AllocRecordClosure :  public StackObj {
+  public:
+    virtual void doit(address ptr, MetaspaceObj::Type type, int byte_size) = 0;
+  };
+
+  void iterate(AllocRecordClosure *closure);
 };
 
 class MetaspaceAux : AllStatic {
--- a/hotspot/src/share/vm/memory/metaspaceShared.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/memory/metaspaceShared.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -243,6 +243,147 @@
   bool reading() const { return false; }
 };
 
+// This is for dumping detailed statistics for the allocations
+// in the shared spaces.
+class DumpAllocClosure : public Metaspace::AllocRecordClosure {
+public:
+
+  // Here's poor man's enum inheritance
+#define SHAREDSPACE_OBJ_TYPES_DO(f) \
+  METASPACE_OBJ_TYPES_DO(f) \
+  f(SymbolHashentry) \
+  f(SymbolBuckets) \
+  f(Other)
+
+#define SHAREDSPACE_OBJ_TYPE_DECLARE(name) name ## Type,
+#define SHAREDSPACE_OBJ_TYPE_NAME_CASE(name) case name ## Type: return #name;
+
+  enum Type {
+    // Types are MetaspaceObj::ClassType, MetaspaceObj::SymbolType, etc
+    SHAREDSPACE_OBJ_TYPES_DO(SHAREDSPACE_OBJ_TYPE_DECLARE)
+    _number_of_types
+  };
+
+  static const char * type_name(Type type) {
+    switch(type) {
+    SHAREDSPACE_OBJ_TYPES_DO(SHAREDSPACE_OBJ_TYPE_NAME_CASE)
+    default:
+      ShouldNotReachHere();
+      return NULL;
+    }
+  }
+
+public:
+  enum {
+    RO = 0,
+    RW = 1
+  };
+
+  int _counts[2][_number_of_types];
+  int _bytes [2][_number_of_types];
+  int _which;
+
+  DumpAllocClosure() {
+    memset(_counts, 0, sizeof(_counts));
+    memset(_bytes,  0, sizeof(_bytes));
+  };
+
+  void iterate_metaspace(Metaspace* space, int which) {
+    assert(which == RO || which == RW, "sanity");
+    _which = which;
+    space->iterate(this);
+  }
+
+  virtual void doit(address ptr, MetaspaceObj::Type type, int byte_size) {
+    assert(int(type) >= 0 && type < MetaspaceObj::_number_of_types, "sanity");
+    _counts[_which][type] ++;
+    _bytes [_which][type] += byte_size;
+  }
+
+  void dump_stats(int ro_all, int rw_all, int md_all, int mc_all);
+};
+
+void DumpAllocClosure::dump_stats(int ro_all, int rw_all, int md_all, int mc_all) {
+  rw_all += (md_all + mc_all); // md and mc are all mapped Read/Write
+  int other_bytes = md_all + mc_all;
+
+  // Calculate size of data that was not allocated by Metaspace::allocate()
+  int symbol_count = _counts[RO][MetaspaceObj::SymbolType];
+  int symhash_bytes = symbol_count * sizeof (HashtableEntry<Symbol*, mtSymbol>);
+  int symbuck_count = SymbolTable::the_table()->table_size();
+  int symbuck_bytes = symbuck_count * sizeof(HashtableBucket<mtSymbol>);
+
+  _counts[RW][SymbolHashentryType] = symbol_count;
+  _bytes [RW][SymbolHashentryType] = symhash_bytes;
+  other_bytes -= symhash_bytes;
+
+  _counts[RW][SymbolBucketsType] = symbuck_count;
+  _bytes [RW][SymbolBucketsType] = symbuck_bytes;
+  other_bytes -= symbuck_bytes;
+
+  // TODO: count things like dictionary, vtable, etc
+  _bytes[RW][OtherType] =  other_bytes;
+
+  // prevent divide-by-zero
+  if (ro_all < 1) {
+    ro_all = 1;
+  }
+  if (rw_all < 1) {
+    rw_all = 1;
+  }
+
+  int all_ro_count = 0;
+  int all_ro_bytes = 0;
+  int all_rw_count = 0;
+  int all_rw_bytes = 0;
+
+  const char *fmt = "%-20s: %8d %10d %5.1f | %8d %10d %5.1f | %8d %10d %5.1f";
+  const char *sep = "--------------------+---------------------------+---------------------------+--------------------------";
+  const char *hdr = "                        ro_cnt   ro_bytes     % |   rw_cnt   rw_bytes     % |  all_cnt  all_bytes     %";
+
+  tty->print_cr("Detailed metadata info (rw includes md and mc):");
+  tty->print_cr(hdr);
+  tty->print_cr(sep);
+  for (int type = 0; type < int(_number_of_types); type ++) {
+    const char *name = type_name((Type)type);
+    int ro_count = _counts[RO][type];
+    int ro_bytes = _bytes [RO][type];
+    int rw_count = _counts[RW][type];
+    int rw_bytes = _bytes [RW][type];
+    int count = ro_count + rw_count;
+    int bytes = ro_bytes + rw_bytes;
+
+    double ro_perc = 100.0 * double(ro_bytes) / double(ro_all);
+    double rw_perc = 100.0 * double(rw_bytes) / double(rw_all);
+    double perc    = 100.0 * double(bytes)    / double(ro_all + rw_all);
+
+    tty->print_cr(fmt, name,
+                  ro_count, ro_bytes, ro_perc,
+                  rw_count, rw_bytes, rw_perc,
+                  count, bytes, perc);
+
+    all_ro_count += ro_count;
+    all_ro_bytes += ro_bytes;
+    all_rw_count += rw_count;
+    all_rw_bytes += rw_bytes;
+  }
+
+  int all_count = all_ro_count + all_rw_count;
+  int all_bytes = all_ro_bytes + all_rw_bytes;
+
+  double all_ro_perc = 100.0 * double(all_ro_bytes) / double(ro_all);
+  double all_rw_perc = 100.0 * double(all_rw_bytes) / double(rw_all);
+  double all_perc    = 100.0 * double(all_bytes)    / double(ro_all + rw_all);
+
+  tty->print_cr(sep);
+  tty->print_cr(fmt, "Total",
+                all_ro_count, all_ro_bytes, all_ro_perc,
+                all_rw_count, all_rw_bytes, all_rw_perc,
+                all_count, all_bytes, all_perc);
+
+  assert(all_ro_bytes == ro_all, "everything should have been counted");
+  assert(all_rw_bytes == rw_all, "everything should have been counted");
+}
 
 // Populate the shared space.
 
@@ -454,6 +595,14 @@
   mapinfo->close();
 
   memmove(vtbl_list, saved_vtbl, vtbl_list_size * sizeof(void*));
+
+  if (PrintSharedSpaces) {
+    DumpAllocClosure dac;
+    dac.iterate_metaspace(_loader_data->ro_metaspace(), DumpAllocClosure::RO);
+    dac.iterate_metaspace(_loader_data->rw_metaspace(), DumpAllocClosure::RW);
+
+    dac.dump_stats(int(ro_bytes), int(rw_bytes), int(md_bytes), int(mc_bytes));
+  }
 }
 
 static void link_shared_classes(Klass* obj, TRAPS) {
--- a/hotspot/src/share/vm/memory/sharedHeap.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/memory/sharedHeap.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -45,6 +45,7 @@
   SH_PS_FlatProfiler_oops_do,
   SH_PS_Management_oops_do,
   SH_PS_SystemDictionary_oops_do,
+  SH_PS_ClassLoaderDataGraph_oops_do,
   SH_PS_jvmti_oops_do,
   SH_PS_StringTable_oops_do,
   SH_PS_CodeCache_oops_do,
@@ -173,15 +174,21 @@
   if (!_process_strong_tasks->is_task_claimed(SH_PS_SystemDictionary_oops_do)) {
     if (so & SO_AllClasses) {
       SystemDictionary::oops_do(roots);
-      ClassLoaderDataGraph::oops_do(roots, klass_closure, !is_scavenging);
     } else if (so & SO_SystemClasses) {
       SystemDictionary::always_strong_oops_do(roots);
-      ClassLoaderDataGraph::always_strong_oops_do(roots, klass_closure, !is_scavenging);
     } else {
       fatal("We should always have selected either SO_AllClasses or SO_SystemClasses");
     }
   }
 
+  if (!_process_strong_tasks->is_task_claimed(SH_PS_ClassLoaderDataGraph_oops_do)) {
+    if (so & SO_AllClasses) {
+      ClassLoaderDataGraph::oops_do(roots, klass_closure, !is_scavenging);
+    } else if (so & SO_SystemClasses) {
+      ClassLoaderDataGraph::always_strong_oops_do(roots, klass_closure, !is_scavenging);
+    }
+  }
+
   if (!_process_strong_tasks->is_task_claimed(SH_PS_StringTable_oops_do)) {
     if (so & SO_Strings) {
       StringTable::oops_do(roots);
--- a/hotspot/src/share/vm/memory/universe.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/memory/universe.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -228,11 +228,8 @@
 
 void Universe::check_alignment(uintx size, uintx alignment, const char* name) {
   if (size < alignment || size % alignment != 0) {
-    ResourceMark rm;
-    stringStream st;
-    st.print("Size of %s (" UINTX_FORMAT " bytes) must be aligned to " UINTX_FORMAT " bytes", name, size, alignment);
-    char* error = st.as_string();
-    vm_exit_during_initialization(error);
+    vm_exit_during_initialization(
+      err_msg("Size of %s (" UINTX_FORMAT " bytes) must be aligned to " UINTX_FORMAT " bytes", name, size, alignment));
   }
 }
 
@@ -916,7 +913,7 @@
   }
 
   if (!total_rs.is_reserved()) {
-    vm_exit_during_initialization(err_msg("Could not reserve enough space for object heap %d bytes", total_reserved));
+    vm_exit_during_initialization(err_msg("Could not reserve enough space for " SIZE_FORMAT "KB object heap", total_reserved/K));
     return total_rs;
   }
 
--- a/hotspot/src/share/vm/oops/annotations.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/oops/annotations.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -33,7 +33,7 @@
 
 // Allocate annotations in metadata area
 Annotations* Annotations::allocate(ClassLoaderData* loader_data, TRAPS) {
-  return new (loader_data, size(), true, THREAD) Annotations();
+  return new (loader_data, size(), true, MetaspaceObj::AnnotationType, THREAD) Annotations();
 }
 
 // helper
--- a/hotspot/src/share/vm/oops/arrayKlass.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/oops/arrayKlass.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -94,7 +94,7 @@
   ResourceMark rm(THREAD);
   k->initialize_supers(super_klass(), CHECK);
   k->vtable()->initialize_vtable(false, CHECK);
-  java_lang_Class::create_mirror(k, CHECK);
+  java_lang_Class::create_mirror(k, Handle(NULL), CHECK);
 }
 
 GrowableArray<Klass*>* ArrayKlass::compute_secondary_supers(int num_extra_slots) {
--- a/hotspot/src/share/vm/oops/constMethod.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/oops/constMethod.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -40,7 +40,7 @@
                                    MethodType method_type,
                                    TRAPS) {
   int size = ConstMethod::size(byte_code_size, sizes);
-  return new (loader_data, size, true, THREAD) ConstMethod(
+  return new (loader_data, size, true, MetaspaceObj::ConstMethodType, THREAD) ConstMethod(
       byte_code_size, sizes, method_type, size);
 }
 
--- a/hotspot/src/share/vm/oops/constantPool.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/oops/constantPool.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -55,7 +55,7 @@
   // the resolved_references array, which is recreated at startup time.
   // But that could be moved to InstanceKlass (although a pain to access from
   // assembly code).  Maybe it could be moved to the cpCache which is RW.
-  return new (loader_data, size, false, THREAD) ConstantPool(tags);
+  return new (loader_data, size, false, MetaspaceObj::ConstantPoolType, THREAD) ConstantPool(tags);
 }
 
 ConstantPool::ConstantPool(Array<u1>* tags) {
@@ -1063,9 +1063,10 @@
     int k2 = cp2->invoke_dynamic_name_and_type_ref_index_at(index2);
     int i1 = invoke_dynamic_bootstrap_specifier_index(index1);
     int i2 = cp2->invoke_dynamic_bootstrap_specifier_index(index2);
-    bool match = compare_entry_to(k1, cp2, k2, CHECK_false) &&
-                 compare_operand_to(i1, cp2, i2, CHECK_false);
-    return match;
+    // separate statements and variables because CHECK_false is used
+    bool match_entry = compare_entry_to(k1, cp2, k2, CHECK_false);
+    bool match_operand = compare_operand_to(i1, cp2, i2, CHECK_false);
+    return (match_entry && match_operand);
   } break;
 
   case JVM_CONSTANT_String:
--- a/hotspot/src/share/vm/oops/cpCache.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/oops/cpCache.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -542,7 +542,8 @@
                                      const intStack& invokedynamic_map, TRAPS) {
   int size = ConstantPoolCache::size(length);
 
-  return new (loader_data, size, false, THREAD) ConstantPoolCache(length, index_map, invokedynamic_map);
+  return new (loader_data, size, false, MetaspaceObj::ConstantPoolCacheType, THREAD)
+    ConstantPoolCache(length, index_map, invokedynamic_map);
 }
 
 void ConstantPoolCache::initialize(const intArray& inverse_index_map,
--- a/hotspot/src/share/vm/oops/instanceKlass.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/oops/instanceKlass.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -268,8 +268,6 @@
   set_fields(NULL, 0);
   set_constants(NULL);
   set_class_loader_data(NULL);
-  set_protection_domain(NULL);
-  set_signers(NULL);
   set_source_file_name(NULL);
   set_source_debug_extension(NULL, 0);
   set_array_name(NULL);
@@ -279,7 +277,6 @@
   set_is_marked_dependent(false);
   set_init_state(InstanceKlass::allocated);
   set_init_thread(NULL);
-  set_init_lock(NULL);
   set_reference_type(rt);
   set_oop_map_cache(NULL);
   set_jni_ids(NULL);
@@ -408,12 +405,6 @@
   }
   set_inner_classes(NULL);
 
-  // Null out Java heap objects, although these won't be walked to keep
-  // alive once this InstanceKlass is deallocated.
-  set_protection_domain(NULL);
-  set_signers(NULL);
-  set_init_lock(NULL);
-
   // We should deallocate the Annotations instance
   MetadataFactory::free_metadata(loader_data, annotations());
   set_annotations(NULL);
@@ -451,6 +442,24 @@
   }
 }
 
+// JVMTI spec thinks there are signers and protection domain in the
+// instanceKlass.  These accessors pretend these fields are there.
+// The hprof specification also thinks these fields are in InstanceKlass.
+oop InstanceKlass::protection_domain() const {
+  // return the protection_domain from the mirror
+  return java_lang_Class::protection_domain(java_mirror());
+}
+
+// To remove these from requires an incompatible change and CCC request.
+objArrayOop InstanceKlass::signers() const {
+  // return the signers from the mirror
+  return java_lang_Class::signers(java_mirror());
+}
+
+volatile oop InstanceKlass::init_lock() const {
+  // return the init lock from the mirror
+  return java_lang_Class::init_lock(java_mirror());
+}
 
 void InstanceKlass::eager_initialize_impl(instanceKlassHandle this_oop) {
   EXCEPTION_MARK;
@@ -1883,16 +1892,6 @@
 
 // Garbage collection
 
-void InstanceKlass::oops_do(OopClosure* cl) {
-  Klass::oops_do(cl);
-
-  cl->do_oop(adr_protection_domain());
-  cl->do_oop(adr_signers());
-  cl->do_oop(adr_init_lock());
-
-  // Don't walk the arrays since they are walked from the ClassLoaderData objects.
-}
-
 #ifdef ASSERT
 template <class T> void assert_is_in(T *p) {
   T heap_oop = oopDesc::load_heap_oop(p);
@@ -2241,9 +2240,6 @@
     m->remove_unshareable_info();
   }
 
-  // Need to reinstate when reading back the class.
-  set_init_lock(NULL);
-
   // do array classes also.
   array_klasses_do(remove_unshareable_in_class);
 }
@@ -2275,13 +2271,6 @@
     ik->itable()->initialize_itable(false, CHECK);
   }
 
-  // Allocate a simple java object for a lock.
-  // This needs to be a java object because during class initialization
-  // it can be held across a java call.
-  typeArrayOop r = oopFactory::new_typeArray(T_INT, 0, CHECK);
-  Handle h(THREAD, (oop)r);
-  ik->set_init_lock(h());
-
   // restore constant pool resolved references
   ik->constants()->restore_unshareable_info(CHECK);
 
@@ -2331,10 +2320,15 @@
     FreeHeap(jmeths);
   }
 
-  MemberNameTable* mnt = member_names();
-  if (mnt != NULL) {
-    delete mnt;
-    set_member_names(NULL);
+  // Deallocate MemberNameTable
+  {
+    Mutex* lock_or_null = SafepointSynchronize::is_at_safepoint() ? NULL : MemberNameTable_lock;
+    MutexLockerEx ml(lock_or_null, Mutex::_no_safepoint_check_flag);
+    MemberNameTable* mnt = member_names();
+    if (mnt != NULL) {
+      delete mnt;
+      set_member_names(NULL);
+    }
   }
 
   int* indices = methods_cached_itable_indices_acquire();
@@ -2765,15 +2759,28 @@
   return NULL;
 }
 
-void InstanceKlass::add_member_name(Handle mem_name) {
+void InstanceKlass::add_member_name(int index, Handle mem_name) {
   jweak mem_name_wref = JNIHandles::make_weak_global(mem_name);
   MutexLocker ml(MemberNameTable_lock);
+  assert(0 <= index && index < idnum_allocated_count(), "index is out of bounds");
   DEBUG_ONLY(No_Safepoint_Verifier nsv);
 
   if (_member_names == NULL) {
-    _member_names = new (ResourceObj::C_HEAP, mtClass) MemberNameTable();
+    _member_names = new (ResourceObj::C_HEAP, mtClass) MemberNameTable(idnum_allocated_count());
   }
-  _member_names->add_member_name(mem_name_wref);
+  _member_names->add_member_name(index, mem_name_wref);
+}
+
+oop InstanceKlass::get_member_name(int index) {
+  MutexLocker ml(MemberNameTable_lock);
+  assert(0 <= index && index < idnum_allocated_count(), "index is out of bounds");
+  DEBUG_ONLY(No_Safepoint_Verifier nsv);
+
+  if (_member_names == NULL) {
+    return NULL;
+  }
+  oop mem_name =_member_names->get_member_name(index);
+  return mem_name;
 }
 
 // -----------------------------------------------------------------------------------------------------
@@ -2836,10 +2843,7 @@
     class_loader_data()->print_value_on(st);
     st->cr();
   }
-  st->print(BULLET"protection domain: "); ((InstanceKlass*)this)->protection_domain()->print_value_on(st); st->cr();
   st->print(BULLET"host class:        "); host_klass()->print_value_on_maybe_null(st); st->cr();
-  st->print(BULLET"signers:           "); signers()->print_value_on(st);               st->cr();
-  st->print(BULLET"init_lock:         "); ((oop)_init_lock)->print_value_on(st);       st->cr();
   if (source_file_name() != NULL) {
     st->print(BULLET"source file:       ");
     source_file_name()->print_value_on(st);
@@ -3040,7 +3044,6 @@
   n += (sz->_method_ordering_bytes       = sz->count_array(method_ordering()));
   n += (sz->_local_interfaces_bytes      = sz->count_array(local_interfaces()));
   n += (sz->_transitive_interfaces_bytes = sz->count_array(transitive_interfaces()));
-  n += (sz->_signers_bytes               = sz->count_array(signers()));
   n += (sz->_fields_bytes                = sz->count_array(fields()));
   n += (sz->_inner_classes_bytes         = sz->count_array(inner_classes()));
   sz->_ro_bytes += n;
@@ -3206,17 +3209,11 @@
     guarantee(constants()->is_metadata(), "should be in metaspace");
     guarantee(constants()->is_constantPool(), "should be constant pool");
   }
-  if (protection_domain() != NULL) {
-    guarantee(protection_domain()->is_oop(), "should be oop");
-  }
   const Klass* host = host_klass();
   if (host != NULL) {
     guarantee(host->is_metadata(), "should be in metaspace");
     guarantee(host->is_klass(), "should be klass");
   }
-  if (signers() != NULL) {
-    guarantee(signers()->is_objArray(), "should be obj array");
-  }
 }
 
 void InstanceKlass::oop_verify_on(oop obj, outputStream* st) {
--- a/hotspot/src/share/vm/oops/instanceKlass.hpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/oops/instanceKlass.hpp	Wed Jul 05 18:59:05 2017 +0200
@@ -58,8 +58,6 @@
 //    [fields                     ]
 //    [constants                  ]
 //    [class loader               ]
-//    [protection domain          ]
-//    [signers                    ]
 //    [source file name           ]
 //    [inner classes              ]
 //    [static field size          ]
@@ -180,16 +178,6 @@
   static volatile int _total_instanceKlass_count;
 
  protected:
-  // Protection domain.
-  oop             _protection_domain;
-  // Class signers.
-  objArrayOop     _signers;
-  // Lock for (1) initialization; (2) access to the ConstantPool of this class.
-  // Must be one per class and it has to be a VM internal object so java code
-  // cannot lock it (like the mirror).
-  // It has to be an object not a Mutex because it's held through java calls.
-  volatile oop    _init_lock;
-
   // Annotations for this class
   Annotations*    _annotations;
   // Array classes holding elements of this class.
@@ -527,8 +515,10 @@
   void set_constants(ConstantPool* c)    { _constants = c; }
 
   // protection domain
-  oop protection_domain()                  { return _protection_domain; }
-  void set_protection_domain(oop pd)       { klass_oop_store(&_protection_domain, pd); }
+  oop protection_domain() const;
+
+  // signers
+  objArrayOop signers() const;
 
   // host class
   Klass* host_klass() const              {
@@ -575,10 +565,6 @@
     }
   }
 
-  // signers
-  objArrayOop signers() const              { return _signers; }
-  void set_signers(objArrayOop s)          { klass_oop_store((oop*)&_signers, s); }
-
   // source file name
   Symbol* source_file_name() const         { return _source_file_name; }
   void set_source_file_name(Symbol* n);
@@ -912,8 +898,6 @@
   Method* method_at_itable(Klass* holder, int index, TRAPS);
 
   // Garbage collection
-  virtual void oops_do(OopClosure* cl);
-
   void oop_follow_contents(oop obj);
   int  oop_adjust_pointers(oop obj);
 
@@ -999,14 +983,12 @@
 
   // Lock during initialization
 public:
-  volatile oop init_lock() const     {return _init_lock; }
+  // Lock for (1) initialization; (2) access to the ConstantPool of this class.
+  // Must be one per class and it has to be a VM internal object so java code
+  // cannot lock it (like the mirror).
+  // It has to be an object not a Mutex because it's held through java calls.
+  volatile oop init_lock() const;
 private:
-  void set_init_lock(oop value) { klass_oop_store(&_init_lock, value); }
-
-  // Offsets for memory management
-  oop* adr_protection_domain() const { return (oop*)&this->_protection_domain;}
-  oop* adr_signers() const           { return (oop*)&this->_signers;}
-  oop* adr_init_lock() const         { return (oop*)&this->_init_lock;}
 
   // Static methods that are used to implement member methods where an exposed this pointer
   // is needed due to possible GCs
@@ -1040,7 +1022,8 @@
   // JSR-292 support
   MemberNameTable* member_names() { return _member_names; }
   void set_member_names(MemberNameTable* member_names) { _member_names = member_names; }
-  void add_member_name(Handle member_name);
+  void add_member_name(int index, Handle member_name);
+  oop  get_member_name(int index);
 
 public:
   // JVMTI support
--- a/hotspot/src/share/vm/oops/klass.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/oops/klass.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -140,7 +140,7 @@
 
 void* Klass::operator new(size_t size, ClassLoaderData* loader_data, size_t word_size, TRAPS) {
   return Metaspace::allocate(loader_data, word_size, /*read_only*/false,
-                             Metaspace::ClassType, CHECK_NULL);
+                             MetaspaceObj::ClassType, CHECK_NULL);
 }
 
 Klass::Klass() {
@@ -511,8 +511,9 @@
   // (same order as class file parsing)
   loader_data->add_class(this);
 
-  // Recreate the class mirror
-  java_lang_Class::create_mirror(this, CHECK);
+  // Recreate the class mirror.  The protection_domain is always null for
+  // boot loader, for now.
+  java_lang_Class::create_mirror(this, Handle(NULL), CHECK);
 }
 
 Klass* Klass::array_klass_or_null(int rank) {
--- a/hotspot/src/share/vm/oops/klass.hpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/oops/klass.hpp	Wed Jul 05 18:59:05 2017 +0200
@@ -445,7 +445,7 @@
   Klass* array_klass_or_null(int rank);
   Klass* array_klass_or_null();
 
-  virtual oop protection_domain()       { return NULL; }
+  virtual oop protection_domain() const = 0;
 
   oop class_loader() const;
 
--- a/hotspot/src/share/vm/oops/method.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/oops/method.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -74,7 +74,7 @@
 
   int size = Method::size(access_flags.is_native());
 
-  return new (loader_data, size, false, THREAD) Method(cm, access_flags, size);
+  return new (loader_data, size, false, MetaspaceObj::MethodType, THREAD) Method(cm, access_flags, size);
 }
 
 Method::Method(ConstMethod* xconst, AccessFlags access_flags, int size) {
--- a/hotspot/src/share/vm/oops/method.hpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/oops/method.hpp	Wed Jul 05 18:59:05 2017 +0200
@@ -671,13 +671,15 @@
                                                    Symbol* signature, //anything at all
                                                    TRAPS);
   static Klass* check_non_bcp_klass(Klass* klass);
-  // these operate only on invoke methods:
+
+  // How many extra stack entries for invokedynamic when it's enabled
+  static const int extra_stack_entries_for_jsr292 = 1;
+
+  // this operates only on invoke methods:
   // presize interpreter frames for extra interpreter stack entries, if needed
-  // method handles want to be able to push a few extra values (e.g., a bound receiver), and
-  // invokedynamic sometimes needs to push a bootstrap method, call site, and arglist,
-  // all without checking for a stack overflow
-  static int extra_stack_entries() { return EnableInvokeDynamic ? 2 : 0; }
-  static int extra_stack_words();  // = extra_stack_entries() * Interpreter::stackElementSize()
+  // Account for the extra appendix argument for invokehandle/invokedynamic
+  static int extra_stack_entries() { return EnableInvokeDynamic ? extra_stack_entries_for_jsr292 : 0; }
+  static int extra_stack_words();  // = extra_stack_entries() * Interpreter::stackElementSize
 
   // RedefineClasses() support:
   bool is_old() const                               { return access_flags().is_old(); }
--- a/hotspot/src/share/vm/oops/methodCounters.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/oops/methodCounters.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -26,7 +26,7 @@
 #include "runtime/thread.inline.hpp"
 
 MethodCounters* MethodCounters::allocate(ClassLoaderData* loader_data, TRAPS) {
-  return new(loader_data, size(), false, THREAD) MethodCounters();
+  return new(loader_data, size(), false, MetaspaceObj::MethodCountersType, THREAD) MethodCounters();
 }
 
 void MethodCounters::clear_counters() {
--- a/hotspot/src/share/vm/oops/methodData.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/oops/methodData.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -388,7 +388,8 @@
 MethodData* MethodData::allocate(ClassLoaderData* loader_data, methodHandle method, TRAPS) {
   int size = MethodData::compute_allocation_size_in_words(method);
 
-  return new (loader_data, size, false, THREAD) MethodData(method(), size, CHECK_NULL);
+  return new (loader_data, size, false, MetaspaceObj::MethodDataType, THREAD)
+    MethodData(method(), size, CHECK_NULL);
 }
 
 int MethodData::bytecode_cell_count(Bytecodes::Code code) {
--- a/hotspot/src/share/vm/oops/objArrayKlass.hpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/oops/objArrayKlass.hpp	Wed Jul 05 18:59:05 2017 +0200
@@ -75,7 +75,7 @@
   void  copy_array(arrayOop s, int src_pos, arrayOop d, int dst_pos, int length, TRAPS);
 
   // Compute protection domain
-  oop protection_domain() { return bottom_klass()->protection_domain(); }
+  oop protection_domain() const { return bottom_klass()->protection_domain(); }
 
  private:
   // Either oop or narrowOop depending on UseCompressedOops.
--- a/hotspot/src/share/vm/oops/symbol.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/oops/symbol.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -55,7 +55,7 @@
   address res;
   int alloc_size = size(len)*HeapWordSize;
   res = (address) Metaspace::allocate(loader_data, size(len), true,
-                                      Metaspace::NonClassType, CHECK_NULL);
+                                      MetaspaceObj::SymbolType, CHECK_NULL);
   return res;
 }
 
--- a/hotspot/src/share/vm/oops/typeArrayKlass.hpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/oops/typeArrayKlass.hpp	Wed Jul 05 18:59:05 2017 +0200
@@ -67,6 +67,8 @@
   typeArrayOop allocate(int length, TRAPS) { return allocate_common(length, true, THREAD); }
   oop multi_allocate(int rank, jint* sizes, TRAPS);
 
+  oop protection_domain() const { return NULL; }
+
   // Copying
   void  copy_array(arrayOop s, int src_pos, arrayOop d, int dst_pos, int length, TRAPS);
 
--- a/hotspot/src/share/vm/opto/escape.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/opto/escape.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -2202,7 +2202,7 @@
     int opcode = uncast_base->Opcode();
     assert(opcode == Op_ConP || opcode == Op_ThreadLocal ||
            opcode == Op_CastX2P || uncast_base->is_DecodeNarrowPtr() ||
-           (uncast_base->is_Mem() && uncast_base->bottom_type() == TypeRawPtr::NOTNULL) ||
+           (uncast_base->is_Mem() && (uncast_base->bottom_type()->isa_rawptr() != NULL)) ||
            (uncast_base->is_Proj() && uncast_base->in(0)->is_Allocate()), "sanity");
   }
   return base;
--- a/hotspot/src/share/vm/opto/matcher.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/opto/matcher.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -1282,16 +1282,6 @@
     mcall->_argsize = out_arg_limit_per_call - begin_out_arg_area;
   }
 
-  if (is_method_handle_invoke) {
-    // Kill some extra stack space in case method handles want to do
-    // a little in-place argument insertion.
-    // FIXME: Is this still necessary?
-    int regs_per_word  = NOT_LP64(1) LP64_ONLY(2); // %%% make a global const!
-    out_arg_limit_per_call += Method::extra_stack_entries() * regs_per_word;
-    // Do not update mcall->_argsize because (a) the extra space is not
-    // pushed as arguments and (b) _argsize is dead (not used anywhere).
-  }
-
   // Compute the max stack slot killed by any call.  These will not be
   // available for debug info, and will be used to adjust FIRST_STACK_mask
   // after all call sites have been visited.
--- a/hotspot/src/share/vm/opto/reg_split.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/opto/reg_split.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -51,6 +51,15 @@
 
 static const char out_of_nodes[] = "out of nodes during split";
 
+static bool contains_no_live_range_input(const Node* def) {
+  for (uint i = 1; i < def->req(); ++i) {
+    if (def->in(i) != NULL && def->in_RegMask(i).is_NotEmpty()) {
+      return false;
+    }
+  }
+  return true;
+}
+
 //------------------------------get_spillcopy_wide-----------------------------
 // Get a SpillCopy node with wide-enough masks.  Use the 'wide-mask', the
 // wide ideal-register spill-mask if possible.  If the 'wide-mask' does
@@ -1312,7 +1321,7 @@
       Node *def = Reaches[pidx][slidx];
       assert( def, "must have reaching def" );
       // If input up/down sense and reg-pressure DISagree
-      if( def->rematerialize() ) {
+      if (def->rematerialize() && contains_no_live_range_input(def)) {
         // Place the rematerialized node above any MSCs created during
         // phi node splitting.  end_idx points at the insertion point
         // so look at the node before it.
--- a/hotspot/src/share/vm/prims/forte.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/prims/forte.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -35,6 +35,19 @@
 #include "runtime/vframe.hpp"
 #include "runtime/vframeArray.hpp"
 
+// call frame copied from old .h file and renamed
+typedef struct {
+    jint lineno;                      // line number in the source file
+    jmethodID method_id;              // method executed in this frame
+} ASGCT_CallFrame;
+
+// call trace copied from old .h file and renamed
+typedef struct {
+    JNIEnv *env_id;                   // Env where trace was recorded
+    jint num_frames;                  // number of frames in this trace
+    ASGCT_CallFrame *frames;          // frames
+} ASGCT_CallTrace;
+
 // These name match the names reported by the forte quality kit
 enum {
   ticks_no_Java_frame         =  0,
@@ -50,6 +63,8 @@
   ticks_safepoint             = -10
 };
 
+#if INCLUDE_JVMTI
+
 //-------------------------------------------------------
 
 // Native interfaces for use by Forte tools.
@@ -360,20 +375,6 @@
 
 }
 
-
-// call frame copied from old .h file and renamed
-typedef struct {
-    jint lineno;                      // line number in the source file
-    jmethodID method_id;              // method executed in this frame
-} ASGCT_CallFrame;
-
-// call trace copied from old .h file and renamed
-typedef struct {
-    JNIEnv *env_id;                   // Env where trace was recorded
-    jint num_frames;                  // number of frames in this trace
-    ASGCT_CallFrame *frames;          // frames
-} ASGCT_CallTrace;
-
 static void forte_fill_call_trace_given_top(JavaThread* thd,
                                             ASGCT_CallTrace* trace,
                                             int depth,
@@ -634,3 +635,12 @@
     pointer_delta(end, start, sizeof(jbyte)), 0, NULL);
 #endif // !_WINDOWS && !IA64
 }
+
+#else // INCLUDE_JVMTI
+extern "C" {
+  JNIEXPORT
+  void AsyncGetCallTrace(ASGCT_CallTrace *trace, jint depth, void* ucontext) {
+    trace->num_frames = ticks_no_class_load; // -1
+  }
+}
+#endif // INCLUDE_JVMTI
--- a/hotspot/src/share/vm/prims/jvm.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/prims/jvm.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -1072,11 +1072,7 @@
     return NULL;
   }
 
-  Klass* k = java_lang_Class::as_Klass(JNIHandles::resolve_non_null(cls));
-  objArrayOop signers = NULL;
-  if (k->oop_is_instance()) {
-    signers = InstanceKlass::cast(k)->signers();
-  }
+  objArrayOop signers = java_lang_Class::signers(JNIHandles::resolve_non_null(cls));
 
   // If there are no signers set in the class, or if the class
   // is an array, return NULL.
@@ -1102,7 +1098,7 @@
     // be called with an array.  Only the bootstrap loader creates arrays.
     Klass* k = java_lang_Class::as_Klass(JNIHandles::resolve_non_null(cls));
     if (k->oop_is_instance()) {
-      InstanceKlass::cast(k)->set_signers(objArrayOop(JNIHandles::resolve(signers)));
+      java_lang_Class::set_signers(k->java_mirror(), objArrayOop(JNIHandles::resolve(signers)));
     }
   }
 JVM_END
@@ -1119,8 +1115,8 @@
     return NULL;
   }
 
-  Klass* k = java_lang_Class::as_Klass(JNIHandles::resolve(cls));
-  return (jobject) JNIHandles::make_local(env, k->protection_domain());
+  oop pd = java_lang_Class::protection_domain(JNIHandles::resolve(cls));
+  return (jobject) JNIHandles::make_local(env, pd);
 JVM_END
 
 
@@ -1139,7 +1135,7 @@
     if (k->oop_is_instance()) {
       oop pd = JNIHandles::resolve(protection_domain);
       assert(pd == NULL || pd->is_oop(), "just checking");
-      InstanceKlass::cast(k)->set_protection_domain(pd);
+      java_lang_Class::set_protection_domain(k->java_mirror(), pd);
     }
   }
 JVM_END
--- a/hotspot/src/share/vm/prims/jvmtiRedefineClasses.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/prims/jvmtiRedefineClasses.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -1349,12 +1349,11 @@
         CHECK_0);
     }
 
-    finalize_operands_merge(*merge_cp_p, THREAD);
-
     RC_TRACE_WITH_THREAD(0x00020000, THREAD,
       ("after pass 1b: merge_cp_len=%d, scratch_i=%d, index_map_len=%d",
       *merge_cp_length_p, scratch_i, _index_map_count));
   }
+  finalize_operands_merge(*merge_cp_p, THREAD);
 
   return true;
 } // end merge_constant_pools()
--- a/hotspot/src/share/vm/prims/methodHandles.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/prims/methodHandles.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2008, 2012, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2008, 2013, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -232,7 +232,8 @@
   // This is done eagerly, since it is readily available without
   // constructing any new objects.
   // TO DO: maybe intern mname_oop
-  m->method_holder()->add_member_name(mname);
+  m->method_holder()->add_member_name(m->method_idnum(), mname);
+
   return mname();
 }
 
@@ -301,7 +302,6 @@
   // Although the fieldDescriptor::_index would also identify the field,
   // we do not use it, because it is harder to decode.
   // TO DO: maybe intern mname_oop
-  InstanceKlass::cast(field_holder())->add_member_name(mname);
   return mname();
 }
 
@@ -943,7 +943,8 @@
 // MemberNameTable
 //
 
-MemberNameTable::MemberNameTable() : GrowableArray<jweak>(10, true) {
+MemberNameTable::MemberNameTable(int methods_cnt)
+                  : GrowableArray<jweak>(methods_cnt, true) {
   assert_locked_or_safepoint(MemberNameTable_lock);
 }
 
@@ -957,29 +958,18 @@
   }
 }
 
-// Return entry index if found, return -1 otherwise.
-int MemberNameTable::find_member_name(oop mem_name) {
+void MemberNameTable::add_member_name(int index, jweak mem_name_wref) {
   assert_locked_or_safepoint(MemberNameTable_lock);
-  int len = this->length();
-
-  for (int idx = 0; idx < len; idx++) {
-    jweak ref = this->at(idx);
-    oop entry = JNIHandles::resolve(ref);
-    if (entry == mem_name) {
-      return idx;
-    }
-  }
-  return -1;
+  this->at_put_grow(index, mem_name_wref);
 }
 
-void MemberNameTable::add_member_name(jweak mem_name_wref) {
+// Return a member name oop or NULL.
+oop MemberNameTable::get_member_name(int index) {
   assert_locked_or_safepoint(MemberNameTable_lock);
-  oop mem_name = JNIHandles::resolve(mem_name_wref);
 
-  // Each member name may appear just once: add only if not found
-  if (find_member_name(mem_name) == -1) {
-    this->append(mem_name_wref);
-  }
+  jweak ref = this->at(index);
+  oop mem_name = JNIHandles::resolve(ref);
+  return mem_name;
 }
 
 #if INCLUDE_JVMTI
--- a/hotspot/src/share/vm/prims/methodHandles.hpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/prims/methodHandles.hpp	Wed Jul 05 18:59:05 2017 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2008, 2012, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2008, 2013, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -219,7 +219,6 @@
   }
 };
 
-
 //------------------------------------------------------------------------------
 // MethodHandlesAdapterGenerator
 //
@@ -233,13 +232,13 @@
 //------------------------------------------------------------------------------
 // MemberNameTable
 //
+
 class MemberNameTable : public GrowableArray<jweak> {
  public:
-  MemberNameTable();
+  MemberNameTable(int methods_cnt);
   ~MemberNameTable();
-  void add_member_name(jweak mem_name_ref);
- private:
-  int find_member_name(oop mem_name);
+  void add_member_name(int index, jweak mem_name_ref);
+  oop  get_member_name(int index);
 
 #if INCLUDE_JVMTI
  public:
--- a/hotspot/src/share/vm/prims/unsafe.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/prims/unsafe.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -115,8 +115,6 @@
 
 inline void* index_oop_from_field_offset_long(oop p, jlong field_offset) {
   jlong byte_offset = field_offset_to_byte_offset(field_offset);
-  // Don't allow unsafe to be used to read or write the header word of oops
-  assert(p == NULL || field_offset >= oopDesc::header_size(), "offset must be outside of header");
 #ifdef ASSERT
   if (p != NULL) {
     assert(byte_offset >= 0 && byte_offset <= (jlong)MAX_OBJECT_SIZE, "sane offset");
--- a/hotspot/src/share/vm/prims/whitebox.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/prims/whitebox.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -37,6 +37,7 @@
 #include "runtime/os.hpp"
 #include "utilities/debug.hpp"
 #include "utilities/macros.hpp"
+#include "utilities/exceptions.hpp"
 
 #if INCLUDE_ALL_GCS
 #include "gc_implementation/g1/concurrentMark.hpp"
@@ -330,8 +331,18 @@
 WB_END
 
 
-WB_ENTRY(jlong, WB_ReserveMemory(JNIEnv* env, jobject o, jlong size))
-  return (jlong)os::reserve_memory(size, NULL, 0);
+WB_ENTRY(void, WB_ReadReservedMemory(JNIEnv* env, jobject o))
+  // static+volatile in order to force the read to happen
+  // (not be eliminated by the compiler)
+  static char c;
+  static volatile char* p;
+
+  p = os::reserve_memory(os::vm_allocation_granularity(), NULL, 0);
+  if (p == NULL) {
+    THROW_MSG(vmSymbols::java_lang_OutOfMemoryError(), "Failed to reserve memory");
+  }
+
+  c = *p;
 WB_END
 
 //Some convenience methods to deal with objects from java
@@ -437,7 +448,7 @@
   {CC"isInStringTable",   CC"(Ljava/lang/String;)Z",  (void*)&WB_IsInStringTable  },
   {CC"fullGC",   CC"()V",                             (void*)&WB_FullGC },
 
-  {CC"reserveMemory", CC"(J)J", (void*)&WB_ReserveMemory },
+  {CC"readReservedMemory", CC"()V",                   (void*)&WB_ReadReservedMemory },
 };
 
 #undef CC
--- a/hotspot/src/share/vm/runtime/arguments.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/runtime/arguments.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -2217,6 +2217,13 @@
     status = false;
   }
 
+  if (ReservedCodeCacheSize < InitialCodeCacheSize) {
+    jio_fprintf(defaultStream::error_stream(),
+                "Invalid ReservedCodeCacheSize: %dK. Should be greater than InitialCodeCacheSize=%dK\n",
+                ReservedCodeCacheSize/K, InitialCodeCacheSize/K);
+    status = false;
+  }
+
   return status;
 }
 
@@ -2619,13 +2626,10 @@
     } else if (match_option(option, "-Xmaxjitcodesize", &tail) ||
                match_option(option, "-XX:ReservedCodeCacheSize=", &tail)) {
       julong long_ReservedCodeCacheSize = 0;
-      ArgsRange errcode = parse_memory_size(tail, &long_ReservedCodeCacheSize,
-                                            (size_t)InitialCodeCacheSize);
+      ArgsRange errcode = parse_memory_size(tail, &long_ReservedCodeCacheSize, 1);
       if (errcode != arg_in_range) {
         jio_fprintf(defaultStream::error_stream(),
-                    "Invalid maximum code cache size: %s. Should be greater than InitialCodeCacheSize=%dK\n",
-                    option->optionString, InitialCodeCacheSize/K);
-        describe_range_error(errcode);
+                    "Invalid maximum code cache size: %s.\n", option->optionString);
         return JNI_EINVAL;
       }
       FLAG_SET_CMDLINE(uintx, ReservedCodeCacheSize, (uintx)long_ReservedCodeCacheSize);
--- a/hotspot/src/share/vm/runtime/deoptimization.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/runtime/deoptimization.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -635,18 +635,22 @@
       // at an uncommon trap for an invoke (where the compiler
       // generates debug info before the invoke has executed)
       Bytecodes::Code cur_code = str.next();
-      if (cur_code == Bytecodes::_invokevirtual ||
-          cur_code == Bytecodes::_invokespecial ||
-          cur_code == Bytecodes::_invokestatic  ||
-          cur_code == Bytecodes::_invokeinterface) {
+      if (cur_code == Bytecodes::_invokevirtual   ||
+          cur_code == Bytecodes::_invokespecial   ||
+          cur_code == Bytecodes::_invokestatic    ||
+          cur_code == Bytecodes::_invokeinterface ||
+          cur_code == Bytecodes::_invokedynamic) {
         Bytecode_invoke invoke(mh, iframe->interpreter_frame_bci());
         Symbol* signature = invoke.signature();
         ArgumentSizeComputer asc(signature);
         cur_invoke_parameter_size = asc.size();
-        if (cur_code != Bytecodes::_invokestatic) {
+        if (invoke.has_receiver()) {
           // Add in receiver
           ++cur_invoke_parameter_size;
         }
+        if (i != 0 && !invoke.is_invokedynamic() && MethodHandles::has_member_arg(invoke.klass(), invoke.name())) {
+          callee_size_of_parameters++;
+        }
       }
       if (str.bci() < max_bci) {
         Bytecodes::Code bc = str.next();
@@ -661,6 +665,7 @@
             case Bytecodes::_invokespecial:
             case Bytecodes::_invokestatic:
             case Bytecodes::_invokeinterface:
+            case Bytecodes::_invokedynamic:
             case Bytecodes::_athrow:
               break;
             default: {
--- a/hotspot/src/share/vm/runtime/frame.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/runtime/frame.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -1008,6 +1008,7 @@
   OopClosure*     _f;
   int             _offset;        // the current offset, incremented with each argument
   bool            _has_receiver;  // true if the callee has a receiver
+  bool            _has_appendix;  // true if the call has an appendix
   frame           _fr;
   RegisterMap*    _reg_map;
   int             _arg_size;
@@ -1027,19 +1028,20 @@
   }
 
  public:
-  CompiledArgumentOopFinder(Symbol* signature, bool has_receiver, OopClosure* f, frame fr,  const RegisterMap* reg_map)
+  CompiledArgumentOopFinder(Symbol* signature, bool has_receiver, bool has_appendix, OopClosure* f, frame fr,  const RegisterMap* reg_map)
     : SignatureInfo(signature) {
 
     // initialize CompiledArgumentOopFinder
     _f         = f;
     _offset    = 0;
     _has_receiver = has_receiver;
+    _has_appendix = has_appendix;
     _fr        = fr;
     _reg_map   = (RegisterMap*)reg_map;
-    _arg_size  = ArgumentSizeComputer(signature).size() + (has_receiver ? 1 : 0);
+    _arg_size  = ArgumentSizeComputer(signature).size() + (has_receiver ? 1 : 0) + (has_appendix ? 1 : 0);
 
     int arg_size;
-    _regs = SharedRuntime::find_callee_arguments(signature, has_receiver, &arg_size);
+    _regs = SharedRuntime::find_callee_arguments(signature, has_receiver, has_appendix, &arg_size);
     assert(arg_size == _arg_size, "wrong arg size");
   }
 
@@ -1049,12 +1051,16 @@
       _offset++;
     }
     iterate_parameters();
+    if (_has_appendix) {
+      handle_oop_offset();
+      _offset++;
+    }
   }
 };
 
-void frame::oops_compiled_arguments_do(Symbol* signature, bool has_receiver, const RegisterMap* reg_map, OopClosure* f) {
+void frame::oops_compiled_arguments_do(Symbol* signature, bool has_receiver, bool has_appendix, const RegisterMap* reg_map, OopClosure* f) {
   ResourceMark rm;
-  CompiledArgumentOopFinder finder(signature, has_receiver, f, *this, reg_map);
+  CompiledArgumentOopFinder finder(signature, has_receiver, has_appendix, f, *this, reg_map);
   finder.oops_do();
 }
 
--- a/hotspot/src/share/vm/runtime/frame.hpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/runtime/frame.hpp	Wed Jul 05 18:59:05 2017 +0200
@@ -411,7 +411,7 @@
   oop* oopmapreg_to_location(VMReg reg, const RegisterMap* regmap) const;
 
   // Oops-do's
-  void oops_compiled_arguments_do(Symbol* signature, bool has_receiver, const RegisterMap* reg_map, OopClosure* f);
+  void oops_compiled_arguments_do(Symbol* signature, bool has_receiver, bool has_appendix, const RegisterMap* reg_map, OopClosure* f);
   void oops_interpreted_do(OopClosure* f, CLDToOopClosure* cld_f, const RegisterMap* map, bool query_oop_map_cache = true);
 
  private:
--- a/hotspot/src/share/vm/runtime/reflection.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/runtime/reflection.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 1997, 2012, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -44,8 +44,6 @@
 #include "runtime/signature.hpp"
 #include "runtime/vframe.hpp"
 
-#define JAVA_1_5_VERSION                  49
-
 static void trace_class_resolution(Klass* to_class) {
   ResourceMark rm;
   int line_number = -1;
@@ -375,7 +373,7 @@
     }
   }
   klass = klass->array_klass(dim, CHECK_NULL);
-  oop obj = ArrayKlass::cast(klass)->multi_allocate(len, dimensions, THREAD);
+  oop obj = ArrayKlass::cast(klass)->multi_allocate(len, dimensions, CHECK_NULL);
   assert(obj->is_array(), "just checking");
   return arrayOop(obj);
 }
@@ -507,9 +505,11 @@
       under_host_klass(accessee_ik, accessor))
     return true;
 
-  if (RelaxAccessControlCheck ||
-      (accessor_ik->major_version() < JAVA_1_5_VERSION &&
-       accessee_ik->major_version() < JAVA_1_5_VERSION)) {
+  if ((RelaxAccessControlCheck &&
+        accessor_ik->major_version() < Verifier::NO_RELAX_ACCESS_CTRL_CHECK_VERSION &&
+        accessee_ik->major_version() < Verifier::NO_RELAX_ACCESS_CTRL_CHECK_VERSION) ||
+      (accessor_ik->major_version() < Verifier::STRICTER_ACCESS_CTRL_CHECK_VERSION &&
+       accessee_ik->major_version() < Verifier::STRICTER_ACCESS_CTRL_CHECK_VERSION)) {
     return classloader_only &&
       Verifier::relax_verify_for(accessor_ik->class_loader()) &&
       accessor_ik->protection_domain() == accessee_ik->protection_domain() &&
@@ -817,6 +817,10 @@
     typeArrayOop an_oop = Annotations::make_java_array(method->parameter_annotations(), CHECK_NULL);
     java_lang_reflect_Constructor::set_parameter_annotations(ch(), an_oop);
   }
+  if (java_lang_reflect_Constructor::has_type_annotations_field()) {
+    typeArrayOop an_oop = Annotations::make_java_array(method->type_annotations(), CHECK_NULL);
+    java_lang_reflect_Constructor::set_type_annotations(ch(), an_oop);
+  }
   return ch();
 }
 
--- a/hotspot/src/share/vm/runtime/sharedRuntime.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/runtime/sharedRuntime.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -2726,7 +2726,7 @@
   return regs.first();
 }
 
-VMRegPair *SharedRuntime::find_callee_arguments(Symbol* sig, bool has_receiver, int* arg_size) {
+VMRegPair *SharedRuntime::find_callee_arguments(Symbol* sig, bool has_receiver, bool has_appendix, int* arg_size) {
   // This method is returning a data structure allocating as a
   // ResourceObject, so do not put any ResourceMarks in here.
   char *s = sig->as_C_string();
@@ -2770,6 +2770,11 @@
     default : ShouldNotReachHere();
     }
   }
+
+  if (has_appendix) {
+    sig_bt[cnt++] = T_OBJECT;
+  }
+
   assert( cnt < 256, "grow table size" );
 
   int comp_args_on_stack;
--- a/hotspot/src/share/vm/runtime/sharedRuntime.hpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/runtime/sharedRuntime.hpp	Wed Jul 05 18:59:05 2017 +0200
@@ -410,7 +410,7 @@
 
   // Convert a sig into a calling convention register layout
   // and find interesting things about it.
-  static VMRegPair* find_callee_arguments(Symbol* sig, bool has_receiver, int *arg_size);
+  static VMRegPair* find_callee_arguments(Symbol* sig, bool has_receiver, bool has_appendix, int *arg_size);
   static VMReg     name_for_receiver();
 
   // "Top of Stack" slots that may be unused by the calling convention but must
--- a/hotspot/src/share/vm/runtime/thread.hpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/runtime/thread.hpp	Wed Jul 05 18:59:05 2017 +0200
@@ -638,9 +638,6 @@
   jint _hashStateZ ;
   void * _schedctl ;
 
-  intptr_t _ScratchA, _ScratchB ;              // Scratch locations for fast-path sync code
-  static ByteSize ScratchA_offset()            { return byte_offset_of(Thread, _ScratchA ); }
-  static ByteSize ScratchB_offset()            { return byte_offset_of(Thread, _ScratchB ); }
 
   volatile jint rng [4] ;                      // RNG for spin loop
 
--- a/hotspot/src/share/vm/runtime/vmStructs.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/runtime/vmStructs.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -292,10 +292,8 @@
   nonstatic_field(InstanceKlass,               _transitive_interfaces,                        Array<Klass*>*)                        \
   nonstatic_field(InstanceKlass,               _fields,                                       Array<u2>*)                            \
   nonstatic_field(InstanceKlass,               _java_fields_count,                            u2)                                    \
-  nonstatic_field(InstanceKlass,               _constants,                                    ConstantPool*)                  \
+  nonstatic_field(InstanceKlass,               _constants,                                    ConstantPool*)                         \
   nonstatic_field(InstanceKlass,               _class_loader_data,                            ClassLoaderData*)                      \
-  nonstatic_field(InstanceKlass,               _protection_domain,                            oop)                                   \
-  nonstatic_field(InstanceKlass,               _signers,                                      objArrayOop)                           \
   nonstatic_field(InstanceKlass,               _source_file_name,                             Symbol*)                               \
   nonstatic_field(InstanceKlass,               _source_debug_extension,                       char*)                                 \
   nonstatic_field(InstanceKlass,               _inner_classes,                               Array<jushort>*)                       \
--- a/hotspot/src/share/vm/services/memTracker.cpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/services/memTracker.cpp	Wed Jul 05 18:59:05 2017 +0200
@@ -34,6 +34,7 @@
 #include "services/memReporter.hpp"
 #include "services/memTracker.hpp"
 #include "utilities/decoder.hpp"
+#include "utilities/defaultStream.hpp"
 #include "utilities/globalDefinitions.hpp"
 
 bool NMT_track_callsite = false;
@@ -77,7 +78,15 @@
   if (strcmp(option_line, "=summary") == 0) {
     _tracking_level = NMT_summary;
   } else if (strcmp(option_line, "=detail") == 0) {
-    _tracking_level = NMT_detail;
+    // detail relies on a stack-walking ability that may not
+    // be available depending on platform and/or compiler flags
+    if (PLATFORM_NMT_DETAIL_SUPPORTED) {
+      _tracking_level = NMT_detail;
+    } else {
+      jio_fprintf(defaultStream::error_stream(),
+        "NMT detail is not supported on this platform.  Using NMT summary instead.");
+      _tracking_level = NMT_summary;
+    }
   } else if (strcmp(option_line, "=off") != 0) {
     vm_exit_during_initialization("Syntax error, expecting -XX:NativeMemoryTracking=[off|summary|detail]", NULL);
   }
--- a/hotspot/src/share/vm/utilities/array.hpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/utilities/array.hpp	Wed Jul 05 18:59:05 2017 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2000, 2012, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2000, 2013, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -320,7 +320,7 @@
   void* operator new(size_t size, ClassLoaderData* loader_data, int length, bool read_only, TRAPS) {
     size_t word_size = Array::size(length);
     return (void*) Metaspace::allocate(loader_data, word_size, read_only,
-                        Metaspace::NonClassType, CHECK_NULL);
+                                       MetaspaceObj::array_type(sizeof(T)), CHECK_NULL);
   }
 
   static size_t byte_sizeof(int length) { return sizeof(Array<T>) + MAX2(length - 1, 0) * sizeof(T); }
--- a/hotspot/src/share/vm/utilities/globalDefinitions.hpp	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/src/share/vm/utilities/globalDefinitions.hpp	Wed Jul 05 18:59:05 2017 +0200
@@ -380,6 +380,14 @@
 # include "globalDefinitions_ppc.hpp"
 #endif
 
+/*
+ * If a platform does not support NMT_detail
+ * the platform specific globalDefinitions (above)
+ * can set PLATFORM_NMT_DETAIL_SUPPORTED to false
+ */
+#ifndef PLATFORM_NMT_DETAIL_SUPPORTED
+#define PLATFORM_NMT_DETAIL_SUPPORTED true
+#endif
 
 // The byte alignment to be used by Arena::Amalloc.  See bugid 4169348.
 // Note: this value must be a power of 2
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hotspot/test/compiler/8011771/Test8011771.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,77 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/*
+ * @test
+ * @bug 8011771
+ * @summary Array bound check elimination's in block motion doesn't always reset its data structures from one step to the other.
+ * @run main/othervm -XX:-BackgroundCompilation Test8011771
+ *
+ */
+
+public class Test8011771 {
+
+    static void m(int[] a, int[] b, int j) {
+        // Array bound check elimination inserts a predicate before
+        // the loop. We'll have the predicate fail, so the method is
+        // recompiled without optimistic optimizations
+        for (int i = 0; i < 10; i++) {
+            a[i] = i;
+        }
+
+        // The test itself
+        a[j] = 0;
+        a[j+5] = 0;
+        b[j+4] = 0; // this range check shouldn't be eliminated
+    }
+
+    static public void main(String[] args) {
+        int[] arr1 = new int[10], arr2 = new int[10];
+        // force compilation:
+        for (int i = 0; i < 5000; i++) {
+            m(arr1, arr2, 0);
+        }
+
+        try {
+            m(new int[1], null, 0); // force predicate failure
+        } catch(ArrayIndexOutOfBoundsException e) {}
+
+        // force compilation again (no optimistic opts):
+        for (int i = 0; i < 5000; i++) {
+            m(arr1, arr2, 0);
+        }
+
+        // Check that the range check  on the second array wasn't optimized out
+        boolean success = false;
+        try {
+            m(arr1, new int[1], 0);
+        } catch(ArrayIndexOutOfBoundsException e) {
+            success = true;
+        }
+        if (success) {
+            System.out.println("TEST PASSED");
+        } else {
+            throw new RuntimeException("TEST FAILED: erroneous bound check elimination");
+        }
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hotspot/test/compiler/8013496/Test8013496.sh	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,55 @@
+#!/bin/sh
+# 
+# Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+# 
+# This code is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License version 2 only, as
+# published by the Free Software Foundation.
+# 
+# This code is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+# version 2 for more details (a copy is included in the LICENSE file that
+# accompanied this code).
+# 
+# You should have received a copy of the GNU General Public License version
+# 2 along with this work; if not, write to the Free Software Foundation,
+# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+# 
+# Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+# or visit www.oracle.com if you need additional information or have any
+# questions.
+# 
+#
+# @test
+# @bug 8013496
+# @summary Test checks that the order in which ReversedCodeCacheSize and 
+#          InitialCodeCacheSize are passed to the VM is irrelevant.  
+# @run shell Test8013496.sh
+#
+#
+## some tests require path to find test source dir
+if [ "${TESTSRC}" = "" ]
+then
+  TESTSRC=${PWD}
+  echo "TESTSRC not set.  Using "${TESTSRC}" as default"
+fi
+echo "TESTSRC=${TESTSRC}"
+## Adding common setup Variables for running shell tests.
+. ${TESTSRC}/../../test_env.sh
+set -x
+
+${TESTJAVA}/bin/java ${TESTVMOPTS} -XX:ReservedCodeCacheSize=2m -XX:InitialCodeCacheSize=500K -version > 1.out 2>&1
+${TESTJAVA}/bin/java ${TESTVMOPTS} -XX:InitialCodeCacheSize=500K -XX:ReservedCodeCacheSize=2m -version > 2.out 2>&1
+
+diff 1.out 2.out
+
+result=$?
+if [ $result -eq 0 ] ; then  
+  echo "Test Passed"
+  exit 0
+else
+  echo "Test Failed"
+  exit 1
+fi
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hotspot/test/compiler/8015436/Test8015436.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,74 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/*
+ * @test
+ * @bug 8015436
+ * @summary the IK _initial_method_idnum value must be adjusted if overpass methods are added
+ * @run main Test8015436
+ *
+ */
+
+/*
+ * The test checks that a MemberName for the defaultMethod() is cached in
+ * the class MemberNameTable without a crash in the VM fastdebug mode.
+ * The original issue was that the InstanceKlass _initial_method_idnum was
+ * not adjusted properly when the overpass methods are added to the class.
+ * The expected/correct behavior: The test does not crash nor throw any exceptions.
+ * All the invocations of the defaultMethod() must be completed successfully.
+ */
+
+import java.lang.invoke.*;
+
+interface InterfaceWithDefaultMethod {
+    public void someMethod();
+
+    default public void defaultMethod(String str){
+        System.out.println("defaultMethod() " + str);
+    }
+}
+
+public class Test8015436 implements InterfaceWithDefaultMethod {
+    @Override
+    public void someMethod() {
+        System.out.println("someMethod() invoked");
+    }
+
+    public static void main(String[] args) throws Throwable {
+        Test8015436 testObj = new Test8015436();
+        testObj.someMethod();
+        testObj.defaultMethod("invoked directly");
+
+        MethodHandles.Lookup lookup = MethodHandles.lookup();
+        MethodType   mt = MethodType.methodType(void.class, String.class);
+        MethodHandle mh = lookup.findVirtual(Test8015436.class, "defaultMethod", mt);
+        mh.invokeExact(testObj, "invoked via a MethodHandle");
+    }
+}
+
+/*
+ * A successful execution gives the output:
+ *   someMethod() invoked
+ *   defaultMethod() invoked directly
+ *   defaultMethod() invoked via a MethodHandle
+ */
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hotspot/test/gc/g1/TestSummarizeRSetStats.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,164 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/*
+ * @test TestSummarizeRSetStats.java
+ * @bug 8013895
+ * @library /testlibrary
+ * @build TestSummarizeRSetStats
+ * @summary Verify output of -XX:+G1SummarizeRSetStats
+ * @run main TestSummarizeRSetStats
+ *
+ * Test the output of G1SummarizeRSetStats in conjunction with G1SummarizeRSetStatsPeriod.
+ */
+
+import com.oracle.java.testlibrary.*;
+import java.lang.Thread;
+import java.util.ArrayList;
+import java.util.Arrays;
+
+class RunSystemGCs {
+    // 4M size, both are directly allocated into the old gen
+    static Object[] largeObject1 = new Object[1024 * 1024];
+    static Object[] largeObject2 = new Object[1024 * 1024];
+
+    static int[] temp;
+
+    public static void main(String[] args) {
+        // create some cross-references between these objects
+        for (int i = 0; i < largeObject1.length; i++) {
+            largeObject1[i] = largeObject2;
+        }
+
+        for (int i = 0; i < largeObject2.length; i++) {
+            largeObject2[i] = largeObject1;
+        }
+
+        int numGCs = Integer.parseInt(args[0]);
+
+        if (numGCs > 0) {
+            // try to force a minor collection: the young gen is 4M, the
+            // amount of data allocated below is roughly that (4*1024*1024 +
+            // some header data)
+            for (int i = 0; i < 1024 ; i++) {
+                temp = new int[1024];
+            }
+        }
+
+        for (int i = 0; i < numGCs - 1; i++) {
+            System.gc();
+        }
+    }
+}
+
+public class TestSummarizeRSetStats {
+
+    public static String runTest(String[] additionalArgs, int numGCs) throws Exception {
+        ArrayList<String> finalargs = new ArrayList<String>();
+        String[] defaultArgs = new String[] {
+            "-XX:+UseG1GC",
+            "-Xmn4m",
+            "-Xmx20m",
+            "-XX:InitiatingHeapOccupancyPercent=100", // we don't want the additional GCs due to initial marking
+            "-XX:+PrintGC",
+            "-XX:+UnlockDiagnosticVMOptions",
+            "-XX:G1HeapRegionSize=1M",
+        };
+
+        finalargs.addAll(Arrays.asList(defaultArgs));
+
+        if (additionalArgs != null) {
+            finalargs.addAll(Arrays.asList(additionalArgs));
+        }
+
+        finalargs.add(RunSystemGCs.class.getName());
+        finalargs.add(String.valueOf(numGCs));
+
+        ProcessBuilder pb = ProcessTools.createJavaProcessBuilder(
+            finalargs.toArray(new String[0]));
+        OutputAnalyzer output = new OutputAnalyzer(pb.start());
+
+        output.shouldHaveExitValue(0);
+
+        String result = output.getStdout();
+        return result;
+    }
+
+    private static void expectStatistics(String result, int expectedCumulative, int expectedPeriodic) throws Exception {
+        int actualTotal = result.split("Concurrent RS processed").length - 1;
+        int actualCumulative = result.split("Cumulative RS summary").length - 1;
+
+        if (expectedCumulative != actualCumulative) {
+            throw new Exception("Incorrect amount of RSet summaries at the end. Expected " + expectedCumulative + ", got " + actualCumulative);
+        }
+
+        if (expectedPeriodic != (actualTotal - actualCumulative)) {
+            throw new Exception("Incorrect amount of per-period RSet summaries at the end. Expected " + expectedPeriodic + ", got " + (actualTotal - actualCumulative));
+        }
+    }
+
+    public static void main(String[] args) throws Exception {
+        String result;
+
+        // no RSet statistics output
+        result = runTest(null, 0);
+        expectStatistics(result, 0, 0);
+
+        // no RSet statistics output
+        result = runTest(null, 2);
+        expectStatistics(result, 0, 0);
+
+        // no RSet statistics output
+        result = runTest(new String[] { "-XX:G1SummarizeRSetStatsPeriod=1" }, 3);
+        expectStatistics(result, 0, 0);
+
+        // single RSet statistics output at the end
+        result = runTest(new String[] { "-XX:+G1SummarizeRSetStats" }, 0);
+        expectStatistics(result, 1, 0);
+
+        // single RSet statistics output at the end
+        result = runTest(new String[] { "-XX:+G1SummarizeRSetStats" }, 2);
+        expectStatistics(result, 1, 0);
+
+        // single RSet statistics output
+        result = runTest(new String[] { "-XX:+G1SummarizeRSetStats", "-XX:G1SummarizeRSetStatsPeriod=1" }, 0);
+        expectStatistics(result, 1, 0);
+
+        // two times RSet statistics output
+        result = runTest(new String[] { "-XX:+G1SummarizeRSetStats", "-XX:G1SummarizeRSetStatsPeriod=1" }, 1);
+        expectStatistics(result, 1, 1);
+
+        // four times RSet statistics output
+        result = runTest(new String[] { "-XX:+G1SummarizeRSetStats", "-XX:G1SummarizeRSetStatsPeriod=1" }, 3);
+        expectStatistics(result, 1, 3);
+
+        // three times RSet statistics output
+        result = runTest(new String[] { "-XX:+G1SummarizeRSetStats", "-XX:G1SummarizeRSetStatsPeriod=2" }, 3);
+        expectStatistics(result, 1, 2);
+
+        // single RSet statistics output
+        result = runTest(new String[] { "-XX:+G1SummarizeRSetStats", "-XX:G1SummarizeRSetStatsPeriod=100" }, 3);
+        expectStatistics(result, 1, 1);
+    }
+}
+
--- a/hotspot/test/runtime/8007320/ConstMethodTest.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/test/runtime/8007320/ConstMethodTest.java	Wed Jul 05 18:59:05 2017 +0200
@@ -23,7 +23,7 @@
 
 /*
  * @test
- * @bug 8007320
+ * @bug 8007320 8014709
  * @summary Test all optional fields in ConstMethod
  * @compile -g -parameters ConstMethodTest.java
  * @run main ConstMethodTest
@@ -74,6 +74,11 @@
 
 @MyAnnotation(name="someName", value = "Hello World")
 public class ConstMethodTest {
+    public @TypeAnno("constructor") ConstMethodTest() { }
+
+    public ConstMethodTest(int i) {
+        // needs a second unannotated constructor
+    }
 
     private static void check(boolean b) {
         if (!b)
@@ -139,10 +144,26 @@
         }
     }
 
+    private static void testConstructor() throws Exception {
+        for (Constructor c : ConstMethodTest.class.getDeclaredConstructors()) {
+            Annotation[] aa = c.getAnnotatedReturnType().getAnnotations();
+            if (c.getParameterTypes().length == 1) { // should be un-annotated
+                check(aa.length == 0);
+            } else if (c.getParameterTypes().length == 0) { //should be annotated
+                check(aa.length == 1);
+                check(((TypeAnno)aa[0]).value().equals("constructor"));
+            } else {
+                //should not happen
+                check(false);
+            }
+        }
+    }
+
     public static void main(java.lang.String[] unused) throws Throwable {
         // pass 5 so kitchenSinkFunc is instantiated with an int
         kitchenSinkFunc("parameter", "param2", 5);
         test1();
+        testConstructor();
     }
 };
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hotspot/test/runtime/Metaspace/FragmentMetaspace.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,64 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/**
+ * @test
+ * @library /runtime/testlibrary
+ * @build GeneratedClassLoader
+ * @run main/othervm/timeout=200 FragmentMetaspace
+ */
+
+import java.io.IOException;
+
+/**
+ * Test that tries to fragment the native memory used by class loaders.
+ * This test creates class loaders that load classes of increasing size for every
+ * iteration. By increasing the size of the class meta data needed for every iteration
+ * we stress the subsystem for allocating native memory for meta data.
+ */
+public class FragmentMetaspace {
+
+    public static void main(String... args) {
+        runGrowing(Long.valueOf(System.getProperty("time", "80000")));
+        // try to clean up and unload classes to decrease
+        // class verification time in debug vm
+        System.gc();
+    }
+
+    private static void runGrowing(long time) {
+        long startTime = System.currentTimeMillis();
+        for (int i = 0; System.currentTimeMillis() < startTime + time; ++i) {
+            try {
+                GeneratedClassLoader gcl = new GeneratedClassLoader();
+
+                Class<?> c = gcl.getGeneratedClasses(i, 100)[0];
+                c.newInstance();
+                c = null;
+
+                gcl = null;
+            } catch (IOException|InstantiationException|IllegalAccessException ex) {
+                throw new RuntimeException(ex);
+            }
+        }
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hotspot/test/runtime/Metaspace/FragmentMetaspaceSimple.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,69 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/**
+ * @test
+ * @library /runtime/testlibrary
+ * @library classes
+ * @build test.Empty ClassUnloadCommon
+ * @run main/othervm/timeout=200 FragmentMetaspaceSimple
+ */
+
+import java.util.ArrayList;
+
+/**
+ * Test that tries to fragment the native memory used by class loaders.
+ * Keeps every other class loader alive in order to fragment the memory space
+ * used to store classes and meta data. Since the memory is probably allocated in
+ * chunks per class loader this will cause a lot of fragmentation if not handled
+ * properly since every other chunk will be unused.
+ */
+public class FragmentMetaspaceSimple {
+    public static void main(String... args) {
+        runSimple(Long.valueOf(System.getProperty("time", "80000")));
+        System.gc();
+    }
+
+    private static void runSimple(long time) {
+        long startTime = System.currentTimeMillis();
+        ArrayList<ClassLoader> cls = new ArrayList<>();
+        for (int i = 0; System.currentTimeMillis() < startTime + time; ++i) {
+            ClassLoader ldr = ClassUnloadCommon.newClassLoader();
+            if (i % 1000 == 0) {
+                cls.clear();
+            }
+            // only keep every other class loader alive
+            if (i % 2 == 1) {
+                cls.add(ldr);
+            }
+            Class<?> c = null;
+            try {
+                c = ldr.loadClass("test.Empty");
+            } catch (ClassNotFoundException ex) {
+                throw new RuntimeException(ex);
+            }
+            c = null;
+        }
+        cls = null;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hotspot/test/runtime/Metaspace/classes/test/Empty.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,28 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+package test;
+
+public class Empty {
+public String toString() { return "nothing"; }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hotspot/test/runtime/contended/HasNonStatic.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,75 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+import java.io.BufferedReader;
+import java.io.InputStreamReader;
+import java.lang.Class;
+import java.lang.String;
+import java.lang.System;
+import java.lang.management.ManagementFactory;
+import java.lang.management.RuntimeMXBean;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.CyclicBarrier;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import java.lang.reflect.Field;
+import java.lang.reflect.Modifier;
+import sun.misc.Unsafe;
+import sun.misc.Contended;
+
+/*
+ * @test
+ * @bug     8015270
+ * @summary \@Contended: fix multiple issues in the layout code
+ *
+ * @run main/othervm -XX:-RestrictContended HasNonStatic
+ */
+public class HasNonStatic {
+
+    public static void main(String[] args) throws Exception {
+        R1 r1 = new R1();
+        R2 r2 = new R2();
+        R3 r3 = new R3();
+        R4 r4 = new R4();
+    }
+
+    public static class R1 {
+        @Contended
+        Object o;
+    }
+
+    @Contended
+    public static class R2 {
+        Object o;
+    }
+
+    @Contended
+    public static class R3 {
+    }
+
+    public static class R4 extends R3 {
+    }
+
+}
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hotspot/test/runtime/contended/OopMaps.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,166 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+import java.io.BufferedReader;
+import java.io.InputStreamReader;
+import java.lang.Class;
+import java.lang.String;
+import java.lang.System;
+import java.lang.management.ManagementFactory;
+import java.lang.management.RuntimeMXBean;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.CyclicBarrier;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import java.lang.reflect.Field;
+import java.lang.reflect.Modifier;
+import sun.misc.Unsafe;
+import sun.misc.Contended;
+
+/*
+ * @test
+ * @bug     8015270
+ * @bug     8015493
+ * @summary \@Contended: fix multiple issues in the layout code
+ *
+ * @run main/othervm -XX:-RestrictContended -XX:ContendedPaddingWidth=128 -Xmx128m OopMaps
+ */
+public class OopMaps {
+
+    public static final int COUNT = 10000;
+
+    public static void main(String[] args) throws Exception {
+        Object o01 = new Object();
+        Object o02 = new Object();
+        Object o03 = new Object();
+        Object o04 = new Object();
+        Object o05 = new Object();
+        Object o06 = new Object();
+        Object o07 = new Object();
+        Object o08 = new Object();
+        Object o09 = new Object();
+        Object o10 = new Object();
+        Object o11 = new Object();
+        Object o12 = new Object();
+        Object o13 = new Object();
+        Object o14 = new Object();
+
+        R1[] rs = new R1[COUNT];
+
+        for (int i = 0; i < COUNT; i++) {
+           R1 r1 = new R1();
+           r1.o01 = o01;
+           r1.o02 = o02;
+           r1.o03 = o03;
+           r1.o04 = o04;
+           r1.o05 = o05;
+           r1.o06 = o06;
+           r1.o07 = o07;
+           r1.o08 = o08;
+           r1.o09 = o09;
+           r1.o10 = o10;
+           r1.o11 = o11;
+           r1.o12 = o12;
+           r1.o13 = o13;
+           r1.o14 = o14;
+           r1.i1 = 1;
+           r1.i2 = 2;
+           r1.i3 = 3;
+           r1.i4 = 4;
+           rs[i] = r1;
+        }
+
+        System.gc();
+
+        for (int i = 0; i < COUNT; i++) {
+           R1 r1 = rs[i];
+           if (r1.o01 != o01) throw new Error("Test Error: o01");
+           if (r1.o02 != o02) throw new Error("Test Error: o02");
+           if (r1.o03 != o03) throw new Error("Test Error: o03");
+           if (r1.o04 != o04) throw new Error("Test Error: o04");
+           if (r1.o05 != o05) throw new Error("Test Error: o05");
+           if (r1.o06 != o06) throw new Error("Test Error: o06");
+           if (r1.o07 != o07) throw new Error("Test Error: o07");
+           if (r1.o08 != o08) throw new Error("Test Error: o08");
+           if (r1.o09 != o09) throw new Error("Test Error: o09");
+           if (r1.o10 != o10) throw new Error("Test Error: o10");
+           if (r1.o11 != o11) throw new Error("Test Error: o11");
+           if (r1.o12 != o12) throw new Error("Test Error: o12");
+           if (r1.o13 != o13) throw new Error("Test Error: o13");
+           if (r1.o14 != o14) throw new Error("Test Error: o14");
+           if (r1.i1 != 1)    throw new Error("Test Error: i1");
+           if (r1.i2 != 2)    throw new Error("Test Error: i2");
+           if (r1.i3 != 3)    throw new Error("Test Error: i3");
+           if (r1.i4 != 4)    throw new Error("Test Error: i4");
+        }
+    }
+
+    public static class R0 {
+        int i1;
+        int i2;
+
+        Object o01;
+        Object o02;
+
+        @Contended
+        Object o03;
+
+        @Contended
+        Object o04;
+
+        @Contended
+        Object o05;
+
+        @Contended
+        Object o06;
+
+        @Contended
+        Object o07;
+   }
+
+   public static class R1 extends R0 {
+        int i3;
+        int i4;
+
+        Object o08;
+        Object o09;
+
+        @Contended
+        Object o10;
+
+        @Contended
+        Object o11;
+
+        @Contended
+        Object o12;
+
+        @Contended
+        Object o13;
+
+        @Contended
+        Object o14;
+   }
+
+}
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hotspot/test/runtime/memory/MultiAllocateNullCheck.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/*
+ * @test MultiAllocateNullCheck
+ * @bug 6726963
+ * @summary multi_allocate() call does not CHECK_NULL and causes crash in fastdebug bits
+ * @run main/othervm -Xmx32m MultiAllocateNullCheck
+ */
+
+import java.lang.reflect.Array;
+
+public class MultiAllocateNullCheck {
+      public static void main(String[] args) throws Exception {
+        Object x = null;
+        try
+        {
+            x = Array.newInstance(String.class, new int[]
+                {Integer.MAX_VALUE, Integer.MAX_VALUE});
+            System.out.println("Array was created");
+        } catch (OutOfMemoryError e) {
+            System.out.println("Out of memory occured, which is OK in this case");
+        }
+    }
+}
--- a/hotspot/test/runtime/memory/ReserveMemory.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/test/runtime/memory/ReserveMemory.java	Wed Jul 05 18:59:05 2017 +0200
@@ -34,29 +34,20 @@
 
 import com.oracle.java.testlibrary.*;
 
-import java.lang.reflect.Field;
 import sun.hotspot.WhiteBox;
-import sun.misc.Unsafe;
 
 public class ReserveMemory {
-  private static Unsafe getUnsafe() throws Exception {
-    Field f = Unsafe.class.getDeclaredField("theUnsafe");
-    f.setAccessible(true);
-    return (Unsafe)f.get(null);
-  }
-
   private static boolean isWindows() {
     return System.getProperty("os.name").toLowerCase().startsWith("win");
   }
 
+  private static boolean isOsx() {
+    return System.getProperty("os.name").toLowerCase().startsWith("mac");
+  }
+
   public static void main(String args[]) throws Exception {
     if (args.length > 0) {
-      long address = WhiteBox.getWhiteBox().reserveMemory(4096);
-
-      System.out.println("Reserved memory at address: 0x" + Long.toHexString(address));
-      System.out.println("Will now read from the address, expecting a crash!");
-
-      int x = getUnsafe().getInt(address);
+      WhiteBox.getWhiteBox().readReservedMemory();
 
       throw new Exception("Read of reserved/uncommitted memory unexpectedly succeeded, expected crash!");
     }
@@ -71,6 +62,8 @@
     OutputAnalyzer output = new OutputAnalyzer(pb.start());
     if (isWindows()) {
       output.shouldContain("EXCEPTION_ACCESS_VIOLATION");
+    } else if (isOsx()) {
+      output.shouldContain("SIGBUS");
     } else {
       output.shouldContain("SIGSEGV");
     }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hotspot/test/runtime/testlibrary/GeneratedClassLoader.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,202 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+import java.io.DataInputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.PrintWriter;
+import javax.tools.JavaCompiler;
+import javax.tools.ToolProvider;
+
+/**
+ * A class loader that generates new classes.
+ * The generated classes are made by first emitting java sources with nested
+ * static classes, these are then compiled and the class files are read back.
+ * Some efforts are made to make the class instances unique and of not insignificant
+ * size.
+ */
+public class GeneratedClassLoader extends ClassLoader {
+    /**
+     * Holds a pair of class bytecodes and class name (for use with defineClass).
+     */
+    private static class GeneratedClass {
+        public byte[] bytes;
+        public String name;
+        public GeneratedClass(byte[] bytes, String name) {
+            this.bytes = bytes; this.name = name;
+        }
+    }
+
+    /**
+     * Used to uniquely name every class generated.
+     */
+    private static int count = 0;
+    /**
+     * Used to enable/disable keeping the class files and java sources for
+     * the generated classes.
+     */
+    private static boolean deleteFiles = Boolean.parseBoolean(
+        System.getProperty("GeneratedClassLoader.deleteFiles", "true"));
+
+    private static String bigstr =
+        "Lorem ipsum dolor sit amet, consectetur adipiscing elit. "
+        + "In facilisis scelerisque vehicula. Donec congue nisi a "
+        + "leo posuere placerat lobortis felis ultrices. Pellentesque "
+        + "habitant morbi tristique senectus et netus et malesuada "
+        + "fames ac turpis egestas. Nam tristique velit at felis "
+        + "iaculis at tempor sem vestibulum. Sed adipiscing lectus "
+        + "non mi molestie sagittis. Morbi eu purus urna. Nam tempor "
+        + "tristique massa eget semper. Mauris cursus, nulla et ornare "
+        + "vehicula, leo dolor scelerisque metus, sit amet rutrum erat "
+        + "sapien quis dui. Nullam eleifend risus et velit accumsan sed "
+        + "suscipit felis pulvinar. Nullam faucibus suscipit gravida. "
+        + "Pellentesque habitant morbi tristique senectus et netus et "
+        + "malesuada fames ac turpis egestas. Nullam ut massa augue, "
+        + "nec viverra mauris.";
+
+    private static int getNextCount() {
+        return count++;
+    }
+
+    ////// end statics
+
+    private JavaCompiler javac;
+    private String nameBase;
+
+    public GeneratedClassLoader() {
+        javac = ToolProvider.getSystemJavaCompiler();
+        nameBase = "TestSimpleClass";
+    }
+
+    private long getBigValue(int which) {
+        // > 65536 is too large to encode in the bytecode
+        // so this will force us to emit a constant pool entry for this int
+        return (long)which + 65537;
+    }
+
+    private String getBigString(int which) {
+        return bigstr + which;
+    }
+
+    private String getClassName(int count) {
+        return nameBase + count;
+    }
+
+    private String generateSource(int count, int sizeFactor, int numClasses) {
+        StringBuilder sb = new StringBuilder();
+        sb.append("public class ").append(getClassName(count)).append("{\n");
+        for (int j = 0; j < numClasses; ++j) {
+            sb.append("public static class ")
+              .append("Class")
+              .append(j)
+              .append("{\n");
+            for (int i = 0; i < sizeFactor; ++i) {
+                int value = i;
+                sb.append("private long field")
+                  .append(i).append(" = ")
+                  .append(getBigValue(value++))
+                  .append(";\n");
+                sb.append("public long method")
+                  .append(i)
+                  .append("() {\n");
+                sb.append("return ")
+                  .append(getBigValue(value++))
+                  .append(";");
+                sb.append("}\n");
+                sb.append("private String str").append(i)
+                  .append(" = \"")
+                  .append(getBigString(i))
+                  .append("\";");
+            }
+            sb.append("\n}");
+        }
+        sb.append("\n}");
+        return sb.toString();
+    }
+
+    private GeneratedClass[] getGeneratedClass(int sizeFactor, int numClasses) throws IOException {
+        int uniqueCount = getNextCount();
+        String src = generateSource(uniqueCount, sizeFactor, numClasses);
+        String className = getClassName(uniqueCount);
+        File file = new File(className + ".java");
+        try (PrintWriter pw = new PrintWriter(new FileWriter(file))) {
+            pw.append(src);
+            pw.flush();
+        }
+        int exitcode = javac.run(null, null, null, file.getCanonicalPath());
+        if (exitcode != 0) {
+            throw new RuntimeException("javac failure when compiling: " +
+                    file.getCanonicalPath());
+        } else {
+            if (deleteFiles) {
+                file.delete();
+            }
+        }
+        GeneratedClass[] gc = new GeneratedClass[numClasses];
+        for (int i = 0; i < numClasses; ++i) {
+            String name = className + "$" + "Class" + i;
+            File classFile = new File(name + ".class");
+            byte[] bytes;
+            try (DataInputStream dis = new DataInputStream(new FileInputStream(classFile))) {
+                bytes = new byte[dis.available()];
+                dis.readFully(bytes);
+            }
+            if (deleteFiles) {
+                classFile.delete();
+            }
+            gc[i] = new GeneratedClass(bytes, name);
+        }
+        if (deleteFiles) {
+            new File(className + ".class").delete();
+        }
+        return gc;
+    }
+
+    /**
+     * Generate a single class, compile it and load it.
+     * @param sizeFactor Fuzzy measure of how large the class should be.
+     * @return the Class instance.
+     * @throws IOException
+     */
+    public Class<?> generateClass(int sizeFactor) throws IOException {
+        return getGeneratedClasses(sizeFactor, 1)[0];
+    }
+
+    /**
+     * Generate several classes, compile and load them.
+     * @param sizeFactor Fuzzy measure of how large each class should be.
+     * @param numClasses The number of classes to create
+     * @return an array of the Class instances.
+     * @throws IOException
+     */
+    public Class<?>[] getGeneratedClasses(int sizeFactor, int numClasses) throws IOException {
+        GeneratedClass[] gc = getGeneratedClass(sizeFactor, numClasses);
+        Class<?>[] classes = new Class[numClasses];
+        for (int i = 0; i < numClasses; ++i) {
+            classes[i] = defineClass(gc[i].name, gc[i].bytes, 0 , gc[i].bytes.length);
+        }
+        return classes;
+    }
+}
--- a/hotspot/test/testlibrary/whitebox/sun/hotspot/WhiteBox.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/hotspot/test/testlibrary/whitebox/sun/hotspot/WhiteBox.java	Wed Jul 05 18:59:05 2017 +0200
@@ -115,7 +115,7 @@
   public native boolean isInStringTable(String str);
 
   // Memory
-  public native long reserveMemory(long size);
+  public native void readReservedMemory();
 
   // force Full GC
   public native void fullGC();
--- a/jaxp/.hgtags	Mon Jun 10 17:04:18 2013 -0700
+++ b/jaxp/.hgtags	Wed Jul 05 18:59:05 2017 +0200
@@ -214,3 +214,4 @@
 668acc0e1034bc1bec6d02be92e0dd4a63d0667e jdk8-b90
 e3065fb07877c7e96e8b93416fe2ab9a4c9eb2a5 jdk8-b91
 1ab5d8d6eab81e65c6c3cf21739474cd67a0e7cf jdk8-b92
+d583a491d63c49eeda4869525048075da1cb596e jdk8-b93
--- a/jaxp/src/com/sun/org/apache/xalan/internal/XalanConstants.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jaxp/src/com/sun/org/apache/xalan/internal/XalanConstants.java	Wed Jul 05 18:59:05 2017 +0200
@@ -80,59 +80,6 @@
     /**
      * FEATURE_SECURE_PROCESSING (FSP) is false by default
      */
-    public static final String EXTERNAL_ACCESS_DEFAULT = getExternalAccessDefault(false);
-
-    /**
-     * Determine the default value of the external access properties
-     *
-     * jaxp 1.5 does not require implementations to restrict by default
-     *
-     * For JDK8:
-     * The default value is 'file' (including jar:file); The keyword "all" grants permission
-     * to all protocols. When {@link javax.xml.XMLConstants#FEATURE_SECURE_PROCESSING} is on,
-     * the default value is an empty string indicating no access is allowed.
-     *
-     * For JDK7:
-     * The default value is 'all' granting permission to all protocols. If by default,
-     * {@link javax.xml.XMLConstants#FEATURE_SECURE_PROCESSING} is true, it should
-     * not change the default value. However, if {@link javax.xml.XMLConstants#FEATURE_SECURE_PROCESSING}
-     * is set explicitly, the values of the properties shall be set to an empty string
-     * indicating no access is allowed.
-     *
-     * @param isSecureProcessing indicating if Secure Processing is set
-     * @return default value
-     */
-    public static String getExternalAccessDefault(boolean isSecureProcessing) {
-        String defaultValue = "all";
-        if (isJDKandAbove(RESTRICT_BY_DEFAULT_JDK_VERSION)) {
-            defaultValue = "file";
-            if (isSecureProcessing) {
-                defaultValue = EXTERNAL_ACCESS_DEFAULT_FSP;
-            }
-        }
-        return defaultValue;
-    }
-
-    /*
-     * Check the version of the current JDK against that specified in the
-     * parameter
-     *
-     * There is a proposal to change the java version string to:
-     * MAJOR.MINOR.FU.CPU.PSU-BUILDNUMBER_BUGIDNUMBER_OPTIONAL
-     * This method would work with both the current format and that proposed
-     *
-     * @param compareTo a JDK version to be compared to
-     * @return true if the current version is the same or above that represented
-     * by the parameter
-     */
-    public static boolean isJDKandAbove(int compareTo) {
-        String javaVersion = SecuritySupport.getSystemProperty("java.version");
-        String versions[] = javaVersion.split("\\.", 3);
-        if (Integer.parseInt(versions[0]) >= compareTo ||
-            Integer.parseInt(versions[1]) >= compareTo) {
-            return true;
-        }
-        return false;
-    }
+    public static final String EXTERNAL_ACCESS_DEFAULT = ACCESS_EXTERNAL_ALL;
 
 } // class Constants
--- a/jaxp/src/com/sun/org/apache/xalan/internal/xsltc/trax/TransformerFactoryImpl.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jaxp/src/com/sun/org/apache/xalan/internal/xsltc/trax/TransformerFactoryImpl.java	Wed Jul 05 18:59:05 2017 +0200
@@ -253,7 +253,6 @@
         if (System.getSecurityManager() != null) {
             _isSecureMode = true;
             _isNotSecureProcessing = false;
-            defaultAccess = XalanConstants.getExternalAccessDefault(true);
         }
         _accessExternalStylesheet =  SecuritySupport.getDefaultAccessProperty(
                 XalanConstants.SP_ACCESS_EXTERNAL_STYLESHEET, defaultAccess);
--- a/jaxp/src/com/sun/org/apache/xerces/internal/impl/Constants.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jaxp/src/com/sun/org/apache/xerces/internal/impl/Constants.java	Wed Jul 05 18:59:05 2017 +0200
@@ -202,7 +202,7 @@
     /**
      * FEATURE_SECURE_PROCESSING (FSP) is true by default
      */
-    public static final String EXTERNAL_ACCESS_DEFAULT = getExternalAccessDefault(true);
+    public static final String EXTERNAL_ACCESS_DEFAULT = ACCESS_EXTERNAL_ALL;
 
     //
     // DOM features
@@ -697,58 +697,6 @@
         ? new ArrayEnumeration(fgXercesProperties) : fgEmptyEnumeration;
     } // getXercesProperties():Enumeration
 
-    /**
-     * Determine the default value of the external access properties
-     *
-     * jaxp 1.5 does not require implementations to restrict by default
-     *
-     * For JDK8:
-     * The default value is 'file' (including jar:file); The keyword "all" grants permission
-     * to all protocols. When {@link javax.xml.XMLConstants#FEATURE_SECURE_PROCESSING} is on,
-     * the default value is an empty string indicating no access is allowed.
-     *
-     * For JDK7:
-     * The default value is 'all' granting permission to all protocols. If by default,
-     * {@link javax.xml.XMLConstants#FEATURE_SECURE_PROCESSING} is true, it should
-     * not change the default value. However, if {@link javax.xml.XMLConstants#FEATURE_SECURE_PROCESSING}
-     * is set explicitly, the values of the properties shall be set to an empty string
-     * indicating no access is allowed.
-     *
-     * @param isSecureProcessing indicating if Secure Processing is set
-     * @return default value
-     */
-    public static String getExternalAccessDefault(boolean isSecureProcessing) {
-        String defaultValue = "all";
-        if (isJDKandAbove(RESTRICT_BY_DEFAULT_JDK_VERSION)) {
-            defaultValue = "file";
-            if (isSecureProcessing) {
-                defaultValue = EXTERNAL_ACCESS_DEFAULT_FSP;
-            }
-        }
-        return defaultValue;
-    }
-
-    /*
-     * Check the version of the current JDK against that specified in the
-     * parameter
-     *
-     * There is a proposal to change the java version string to:
-     * MAJOR.MINOR.FU.CPU.PSU-BUILDNUMBER_BUGIDNUMBER_OPTIONAL
-     * This method would work with both the current format and that proposed
-     *
-     * @param compareTo a JDK version to be compared to
-     * @return true if the current version is the same or above that represented
-     * by the parameter
-     */
-    public static boolean isJDKandAbove(int compareTo) {
-        String javaVersion = SecuritySupport.getSystemProperty("java.version");
-        String versions[] = javaVersion.split("\\.", 3);
-        if (Integer.parseInt(versions[0]) >= compareTo ||
-            Integer.parseInt(versions[1]) >= compareTo) {
-            return true;
-        }
-        return false;
-    }
 
     //
     // Classes
--- a/jaxp/src/com/sun/org/apache/xerces/internal/jaxp/validation/XMLSchemaFactory.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jaxp/src/com/sun/org/apache/xerces/internal/jaxp/validation/XMLSchemaFactory.java	Wed Jul 05 18:59:05 2017 +0200
@@ -364,10 +364,15 @@
                         SAXMessageFormatter.formatMessage(null,
                         "jaxp-secureprocessing-feature", null));
             }
-            fSecurityManager = value ? new SecurityManager() : null;
+            if (value) {
+                fSecurityManager = new SecurityManager();
+                fXMLSchemaLoader.setProperty(ACCESS_EXTERNAL_DTD, Constants.EXTERNAL_ACCESS_DEFAULT_FSP);
+                fXMLSchemaLoader.setProperty(ACCESS_EXTERNAL_SCHEMA, Constants.EXTERNAL_ACCESS_DEFAULT_FSP);
+            } else {
+                fSecurityManager = null;
+            }
+
             fXMLSchemaLoader.setProperty(SECURITY_MANAGER, fSecurityManager);
-            fXMLSchemaLoader.setProperty(ACCESS_EXTERNAL_DTD, Constants.EXTERNAL_ACCESS_DEFAULT_FSP);
-            fXMLSchemaLoader.setProperty(ACCESS_EXTERNAL_SCHEMA, Constants.EXTERNAL_ACCESS_DEFAULT_FSP);
             return;
         } else if (name.equals(Constants.ORACLE_FEATURE_SERVICE_MECHANISM)) {
             //in secure mode, let _useServicesMechanism be determined by the constructor
--- a/jaxws/.hgtags	Mon Jun 10 17:04:18 2013 -0700
+++ b/jaxws/.hgtags	Wed Jul 05 18:59:05 2017 +0200
@@ -214,3 +214,4 @@
 3e5b9ea5ac35ea7096da484e24a863cf4552179f jdk8-b90
 0bb1a9fa56b037d072efdaae5f5b73a0f23c966c jdk8-b91
 a0f604766ca14818e2a7b1558cc399499caabf75 jdk8-b92
+7386eca865e1f7216637cdf8dcf3f5d5c255f208 jdk8-b93
--- a/jdk/.hgtags	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/.hgtags	Wed Jul 05 18:59:05 2017 +0200
@@ -214,3 +214,4 @@
 c63eda8f63008a4398d2c22ac8d72f7fef6f9238 jdk8-b90
 169451cf0cc53bde5af24f9820ea3f35ec4b4df4 jdk8-b91
 a2a2a91075ad85becbe10a39d7fd04ef9bea8df5 jdk8-b92
+691d6c6cd332d98b0f0221445a73906776f31f72 jdk8-b93
--- a/jdk/make/java/management/Exportedfiles.gmk	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/make/java/management/Exportedfiles.gmk	Wed Jul 05 18:59:05 2017 +0200
@@ -1,5 +1,5 @@
 #
-# Copyright (c) 2003, 2005, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved.
 # DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
 #
 # This code is free software; you can redistribute it and/or modify it
@@ -29,6 +29,7 @@
 
 FILES_export = \
 	sun/management/ClassLoadingImpl.java \
+	sun/management/DiagnosticCommandImpl.java \
 	sun/management/FileSystemImpl.java \
 	sun/management/Flag.java \
 	sun/management/GarbageCollectorImpl.java \
--- a/jdk/make/java/management/FILES_c.gmk	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/make/java/management/FILES_c.gmk	Wed Jul 05 18:59:05 2017 +0200
@@ -1,5 +1,5 @@
 #
-# Copyright (c) 2003, 2005, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved.
 # DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
 #
 # This code is free software; you can redistribute it and/or modify it
@@ -25,6 +25,7 @@
 
 FILES_c = \
 	ClassLoadingImpl.c \
+	DiagnosticCommandImpl.c \
 	FileSystemImpl.c \
 	Flag.c \
 	GarbageCollectorImpl.c \
--- a/jdk/make/java/management/mapfile-vers	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/make/java/management/mapfile-vers	Wed Jul 05 18:59:05 2017 +0200
@@ -1,5 +1,5 @@
 #
-# Copyright (c) 2005, 2012, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2005, 2013, Oracle and/or its affiliates. All rights reserved.
 # DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
 #
 # This code is free software; you can redistribute it and/or modify it
@@ -39,6 +39,10 @@
 	    Java_com_sun_management_UnixOperatingSystem_getTotalSwapSpaceSize;
 	    Java_com_sun_management_UnixOperatingSystem_initialize;
 	    Java_sun_management_ClassLoadingImpl_setVerboseClass;
+            Java_sun_management_DiagnosticCommandImpl_executeDiagnosticCommand;
+            Java_sun_management_DiagnosticCommandImpl_getDiagnosticCommands;
+            Java_sun_management_DiagnosticCommandImpl_getDiagnosticCommandInfo;
+	    Java_sun_management_DiagnosticCommandImpl_setNotificationEnabled;
 	    Java_sun_management_FileSystemImpl_isAccessUserOnly0;
 	    Java_sun_management_Flag_getAllFlagNames;
 	    Java_sun_management_Flag_getFlags;
--- a/jdk/make/sun/awt/Makefile	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/make/sun/awt/Makefile	Wed Jul 05 18:59:05 2017 +0200
@@ -390,12 +390,9 @@
 # vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv LINUX
 ifdef OPENJDK
 
-FONTCONFIGS_SRC	= $(PLATFORM_SRC)/classes/sun/awt/fontconfigs
-_FONTCONFIGS	= \
-	fontconfig.properties				\
-	fontconfig.SuSE.properties                      \
-	fontconfig.Ubuntu.properties                    \
-	fontconfig.Fedora.properties
+FONTCONFIGS_SRC	= 
+_FONTCONFIGS	= 
+
 else
 
 FONTCONFIGS_SRC	= $(CLOSED_SRC)/solaris/classes/sun/awt/fontconfigs
@@ -441,7 +438,11 @@
 FONTCONFIGS     = $(_FONTCONFIGS:%=$(LIBDIR)/%.src)
 BINARYFONTCONFIGS = $(_FONTCONFIGS:%.properties=$(LIBDIR)/%.bfc)
 
+ifneq ("x$(_FONTCONFIGS)", "x")
 fontconfigs: $(FONTCONFIGS) $(BINARYFONTCONFIGS)
+else
+fontconfigs:
+endif
 
 $(LIBDIR)/%.src: $(FONTCONFIGS_SRC)/$(FONTCONFIGS_SRC_PREFIX)%
 	$(install-file)
@@ -455,9 +456,13 @@
 	$(call chmod-file, 444)
 	@$(java-vm-cleanup)
 
+ifneq ("x$(_FONTCONFIGS)", "x")
 fontconfigs.clean :
 	$(RM) $(FONTCONFIGS)
 	$(RM) $(BINARYFONTCONFIGS)
+else
+fontconfigs.clean :
+endif
 
 ifeq ($(PLATFORM), windows)
 # vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv WINDOWS
--- a/jdk/make/tools/CharsetMapping/EUC_KR.map	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/make/tools/CharsetMapping/EUC_KR.map	Wed Jul 05 18:59:05 2017 +0200
@@ -5,6 +5,8 @@
 # (2)Added 2 new codepoints (KS X 1001:1998)
 #     0xA2E6	0x20AC	# EURO Sign
 #     0xA2E7	0x00AE	# Registered Sign
+# (3) KS X 1001:2002
+#     0xA2E8	0x327E  # CIRCLED KOREAN CHARACTER JUEUI (Postal Code Mark)
 #
 0x00	0x0000
 0x01	0x0001
@@ -295,6 +297,7 @@
 #
 0xA2E6	0x20AC	# EURO Sign
 0xA2E7	0x00AE	# Registered Sign
+0xA2E8	0x327E  # CIRCLED KOREAN CHARACTER JUEUI
 #
 0xA2E0	0x2116	# NUMERO SIGN
 0xA2E1	0x33C7	# SQUARE CO
--- a/jdk/make/tools/src/build/tools/generatebreakiteratordata/CharSet.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/make/tools/src/build/tools/generatebreakiteratordata/CharSet.java	Wed Jul 05 18:59:05 2017 +0200
@@ -39,6 +39,7 @@
 
 package build.tools.generatebreakiteratordata;
 
+import java.util.Arrays;
 import java.util.Hashtable;
 
 /**
@@ -701,7 +702,14 @@
      * the exact same characters as this one
      */
     public boolean equals(Object that) {
-        return (that instanceof CharSet) && chars.equals(((CharSet)that).chars);
+        return (that instanceof CharSet) && Arrays.equals(chars, ((CharSet)that).chars);
+    }
+
+    /**
+     * Returns the hash code for this set of characters
+     */
+    public int hashCode() {
+       return Arrays.hashCode(chars);
     }
 
     /**
--- a/jdk/makefiles/CompileJavaClasses.gmk	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/makefiles/CompileJavaClasses.gmk	Wed Jul 05 18:59:05 2017 +0200
@@ -342,7 +342,7 @@
 		DISABLE_SJAVAC:=true,\
 		SRC:=$(JDK_TOPDIR)/src/macosx/native/jobjc/src/core/java \
 		     $(JDK_TOPDIR)/src/macosx/native/jobjc/src/runtime-additions/java \
-		     $(JDK_OUTPUTDIR)/gensrc, \
+		     $(JDK_OUTPUTDIR)/gensrc_jobjc/src, \
 		INCLUDES := com/apple/jobjc,\
                 EXCLUDES := tests/java/com/apple/jobjc,\
 		BIN:=$(JDK_OUTPUTDIR)/jobjc_classes,\
@@ -355,7 +355,7 @@
 		SETUP:=GENERATE_JDKBYTECODE,\
 		SRC:=$(JDK_TOPDIR)/src/macosx/native/jobjc/src/core/java \
 		     $(JDK_TOPDIR)/src/macosx/native/jobjc/src/runtime-additions/java \
-		     $(JDK_OUTPUTDIR)/gensrc, \
+		     $(JDK_OUTPUTDIR)/gensrc_jobjc/src, \
 		INCLUDES := com/apple/jobjc,\
                 EXCLUDES := tests/java/com/apple/jobjc,\
 		BIN:=$(JDK_OUTPUTDIR)/jobjc_classes_headers,\
--- a/jdk/makefiles/CompileNativeLibraries.gmk	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/makefiles/CompileNativeLibraries.gmk	Wed Jul 05 18:59:05 2017 +0200
@@ -2609,21 +2609,22 @@
 
 ##########################################################################################
 
-BUILD_LIBKRB5_NAME:=
-ifeq ($(OPENJDK_TARGET_OS), windows)
+ifneq ($(BUILD_CRYPTO),no)
+  BUILD_LIBKRB5_NAME:=
+  ifeq ($(OPENJDK_TARGET_OS), windows)
      BUILD_LIBKRB5_NAME:=w2k_lsa_auth
      BUILD_LIBKRB5_SRC:=$(JDK_TOPDIR)/src/$(OPENJDK_TARGET_OS_API_DIR)/native/sun/security/krb5
      BUILD_LIBKRB5_LIBS:=advapi32.lib Secur32.lib netapi32.lib kernel32.lib user32.lib \
 			 gdi32.lib winspool.lib comdlg32.lib advapi32.lib shell32.lib \
 			 ole32.lib oleaut32.lib uuid.lib odbc32.lib odbccp32.lib wsock32.lib
-else ifeq ($(OPENJDK_TARGET_OS), macosx)
+  else ifeq ($(OPENJDK_TARGET_OS), macosx)
      BUILD_LIBKRB5_NAME:=osxkrb5
      BUILD_LIBKRB5_SRC:=$(JDK_TOPDIR)/src/share/native/sun/security/krb5
      BUILD_LIBKRB5_LIBS:=-framework Kerberos
-endif
-
-ifneq ($(BUILD_LIBKRB5_NAME),)
-$(eval $(call SetupNativeCompilation,BUILD_LIBKRB5,\
+  endif
+
+  ifneq ($(BUILD_LIBKRB5_NAME),)
+    $(eval $(call SetupNativeCompilation,BUILD_LIBKRB5,\
 		LIBRARY:=$(BUILD_LIBKRB5_NAME),\
                 OUTPUT_DIR:=$(INSTALL_LIBRARIES_HERE),\
 		SRC:=$(BUILD_LIBKRB5_SRC),\
@@ -2643,7 +2644,8 @@
 		OBJECT_DIR:=$(JDK_OUTPUTDIR)/objs/libkrb5,\
 		DEBUG_SYMBOLS:=$(DEBUG_ALL_BINARIES)))
 
-BUILD_LIBRARIES += $(BUILD_LIBKRB5)
+    BUILD_LIBRARIES += $(BUILD_LIBKRB5)
+  endif
 endif
 
 ##########################################################################################
--- a/jdk/makefiles/CreateJars.gmk	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/makefiles/CreateJars.gmk	Wed Jul 05 18:59:05 2017 +0200
@@ -467,10 +467,15 @@
 	$(MV) $@.tmp $@
 
 ##########################################################################################
-# For all security jars, always build the jar, but for closed, install the prebuilt signed
-# version instead of the newly built jar. Unsigned jars are treated as intermediate targets
-# and explicitly added to the JARS list. For open, signing is not needed. See SignJars.gmk
-# for more information.
+# For security and crypto jars, always build the jar, but for closed, install the prebuilt 
+# signed version instead of the newly built jar. Unsigned jars are treated as intermediate 
+# targets and explicitly added to the JARS list. For open, signing is not needed. See 
+# SignJars.gmk for more information.
+#
+# The source for the crypto jars is not available for all licensees. The BUILD_CRYPTO
+# variable is set to no if these jars can't be built to skip that step of the build.
+# Note that for OPENJDK, the build will fail if BUILD_CRYPTO=no since then there is no 
+# other way to get the jars than to build them.
 
 SUNPKCS11_JAR_DST := $(IMAGES_OUTPUTDIR)/lib/ext/sunpkcs11.jar
 SUNPKCS11_JAR_UNSIGNED := $(IMAGES_OUTPUTDIR)/unsigned/sunpkcs11.jar
@@ -540,7 +545,8 @@
 SUNJCE_PROVIDER_JAR_DST := $(IMAGES_OUTPUTDIR)/lib/ext/sunjce_provider.jar
 SUNJCE_PROVIDER_JAR_UNSIGNED := $(IMAGES_OUTPUTDIR)/unsigned/sunjce_provider.jar
 
-$(eval $(call SetupArchive,BUILD_SUNJCE_PROVIDER_JAR,,\
+ifneq ($(BUILD_CRYPTO),no)
+  $(eval $(call SetupArchive,BUILD_SUNJCE_PROVIDER_JAR,,\
 		SRCS:=$(JDK_OUTPUTDIR)/classes, \
 		SUFFIXES:=.class,\
 		INCLUDES:= com/sun/crypto/provider,\
@@ -548,7 +554,10 @@
                 MANIFEST:=$(JCE_MANIFEST), \
 		SKIP_METAINF := true))
 
-$(SUNJCE_PROVIDER_JAR_UNSIGNED): $(JCE_MANIFEST)
+  $(SUNJCE_PROVIDER_JAR_UNSIGNED): $(JCE_MANIFEST)
+
+  JARS += $(SUNJCE_PROVIDER_JAR_UNSIGNED)
+endif
 
 ifndef OPENJDK
     SUNJCE_PROVIDER_JAR_SRC := $(JDK_TOPDIR)/make/closed/tools/crypto/jce/sunjce_provider.jar
@@ -560,14 +569,13 @@
 	$(install-file)
 endif
 
-JARS += $(SUNJCE_PROVIDER_JAR_UNSIGNED)
-
 ##########################################################################################
 
 JCE_JAR_DST := $(IMAGES_OUTPUTDIR)/lib/jce.jar
 JCE_JAR_UNSIGNED := $(IMAGES_OUTPUTDIR)/unsigned/jce.jar
 
-$(eval $(call SetupArchive,BUILD_JCE_JAR,,\
+ifneq ($(BUILD_CRYPTO),no)
+  $(eval $(call SetupArchive,BUILD_JCE_JAR,,\
 		SRCS:=$(JDK_OUTPUTDIR)/classes, \
 		SUFFIXES:=.class,\
 		INCLUDES:= javax/crypto sun/security/internal,\
@@ -575,101 +583,106 @@
                 MANIFEST:=$(JCE_MANIFEST), \
 		SKIP_METAINF := true))
 
-$(JCE_JAR_UNSIGNED): $(JCE_MANIFEST)
+  $(JCE_JAR_UNSIGNED): $(JCE_MANIFEST)
+
+  JARS +=  $(JCE_JAR_UNSIGNED)
+endif
 
 ifndef OPENJDK
-    JCE_JAR_SRC := $(JDK_TOPDIR)/make/closed/tools/crypto/jce/jce.jar
-    $(JCE_JAR_DST) : $(JCE_JAR_SRC)
+  JCE_JAR_SRC := $(JDK_TOPDIR)/make/closed/tools/crypto/jce/jce.jar
+  $(JCE_JAR_DST) : $(JCE_JAR_SRC)
 	@$(ECHO) $(LOG_INFO) "\n>>>Installing prebuilt jce.jar..."
 	$(install-file)
 else
-    $(JCE_JAR_DST) : $(JCE_JAR_UNSIGNED)
+  $(JCE_JAR_DST) : $(JCE_JAR_UNSIGNED)
 	$(install-file)
 endif
 
-JARS +=  $(JCE_JAR_UNSIGNED)
-
 ##########################################################################################
 
 US_EXPORT_POLICY_JAR_DST := $(IMAGES_OUTPUTDIR)/lib/security/US_export_policy.jar
 US_EXPORT_POLICY_JAR_UNSIGNED := $(IMAGES_OUTPUTDIR)/unsigned/US_export_policy.jar
 
-#
-# TODO fix so that SetupArchive does not write files into SRCS
-#   then we don't need this extra copying
-#
-# NOTE:  We currently do not place restrictions on our limited export
-# policy.  This was not a typo.
-#
-US_EXPORT_POLICY_JAR_SRC_DIR := $(JDK_TOPDIR)/make/javax/crypto/policy/unlimited
-US_EXPORT_POLICY_JAR_TMP := $(IMAGES_OUTPUTDIR)/US_export_policy_jar.tmp
+ifneq ($(BUILD_CRYPTO),no)
+  #
+  # TODO fix so that SetupArchive does not write files into SRCS
+  #   then we don't need this extra copying
 
-$(US_EXPORT_POLICY_JAR_TMP)/% : $(US_EXPORT_POLICY_JAR_SRC_DIR)/%
+  # NOTE:  We currently do not place restrictions on our limited export
+  # policy.  This was not a typo.
+  #
+  US_EXPORT_POLICY_JAR_SRC_DIR := $(JDK_TOPDIR)/make/javax/crypto/policy/unlimited
+  US_EXPORT_POLICY_JAR_TMP := $(IMAGES_OUTPUTDIR)/US_export_policy_jar.tmp
+
+  $(US_EXPORT_POLICY_JAR_TMP)/% : $(US_EXPORT_POLICY_JAR_SRC_DIR)/%
 	$(install-file)
 
-US_EXPORT_POLICY_JAR_DEPS := $(US_EXPORT_POLICY_JAR_TMP)/default_US_export.policy
+  US_EXPORT_POLICY_JAR_DEPS := $(US_EXPORT_POLICY_JAR_TMP)/default_US_export.policy
 
-$(eval $(call SetupArchive,BUILD_US_EXPORT_POLICY_JAR,$(US_EXPORT_POLICY_JAR_DEPS),\
+  $(eval $(call SetupArchive,BUILD_US_EXPORT_POLICY_JAR,$(US_EXPORT_POLICY_JAR_DEPS),\
 		SRCS:=$(US_EXPORT_POLICY_JAR_TMP), \
 		SUFFIXES:= .policy,\
 		JAR:=$(US_EXPORT_POLICY_JAR_UNSIGNED), \
 		EXTRA_MANIFEST_ATTR := Crypto-Strength: unlimited, \
 		SKIP_METAINF := true))
 
+  JARS += $(US_EXPORT_POLICY_JAR_UNSIGNED)
+endif
+
 ifndef OPENJDK
-    $(US_EXPORT_POLICY_JAR_DST): $(JDK_TOPDIR)/make/closed/tools/crypto/jce/US_export_policy.jar
+  $(US_EXPORT_POLICY_JAR_DST): $(JDK_TOPDIR)/make/closed/tools/crypto/jce/US_export_policy.jar
 	$(ECHO) $(LOG_INFO) Copying $(@F)
 	$(install-file)
 else
-    $(US_EXPORT_POLICY_JAR_DST): $(US_EXPORT_POLICY_JAR_UNSIGNED)
+  $(US_EXPORT_POLICY_JAR_DST): $(US_EXPORT_POLICY_JAR_UNSIGNED)
 	$(install-file)
 endif
 
-JARS += $(US_EXPORT_POLICY_JAR_UNSIGNED)
-
 ##########################################################################################
 
 LOCAL_POLICY_JAR_DST := $(IMAGES_OUTPUTDIR)/lib/security/local_policy.jar
 LOCAL_POLICY_JAR_UNSIGNED := $(IMAGES_OUTPUTDIR)/unsigned/local_policy.jar
 
-#
-# TODO fix so that SetupArchive does not write files into SRCS
-#   then we don't need this extra copying
-#
-LOCAL_POLICY_JAR_TMP := $(IMAGES_OUTPUTDIR)/local_policy_jar.tmp
+ifneq ($(BUILD_CRYPTO),no)
+  #
+  # TODO fix so that SetupArchive does not write files into SRCS
+  #   then we don't need this extra copying
+  #
+  LOCAL_POLICY_JAR_TMP := $(IMAGES_OUTPUTDIR)/local_policy_jar.tmp
 
-ifeq ($(UNLIMITED_CRYPTO), true)
+  ifeq ($(UNLIMITED_CRYPTO), true)
     LOCAL_POLICY_JAR_SRC_DIR := $(JDK_TOPDIR)/make/javax/crypto/policy/unlimited
     LOCAL_POLICY_JAR_DEPS := $(LOCAL_POLICY_JAR_TMP)/default_local.policy
     LOCAL_POLICY_JAR_ATTR := Crypto-Strength: unlimited
-else
+  else
     LOCAL_POLICY_JAR_SRC_DIR := $(JDK_TOPDIR)/make/javax/crypto/policy/limited
     LOCAL_POLICY_JAR_DEPS := $(LOCAL_POLICY_JAR_TMP)/exempt_local.policy \
                              $(LOCAL_POLICY_JAR_TMP)/default_local.policy
     LOCAL_POLICY_JAR_ATTR := Crypto-Strength: limited
-endif
+  endif
 
-$(LOCAL_POLICY_JAR_TMP)/% : $(LOCAL_POLICY_JAR_SRC_DIR)/%
+  $(LOCAL_POLICY_JAR_TMP)/% : $(LOCAL_POLICY_JAR_SRC_DIR)/%
 	$(install-file)
 
-$(eval $(call SetupArchive,BUILD_LOCAL_POLICY_JAR,$(LOCAL_POLICY_JAR_DEPS),\
+  $(eval $(call SetupArchive,BUILD_LOCAL_POLICY_JAR,$(LOCAL_POLICY_JAR_DEPS),\
 		SRCS:=$(LOCAL_POLICY_JAR_TMP),\
 		SUFFIXES:= .policy,\
 		JAR:=$(LOCAL_POLICY_JAR_UNSIGNED), \
 		EXTRA_MANIFEST_ATTR := $(LOCAL_POLICY_JAR_ATTR), \
 		SKIP_METAINF := true))
 
+  JARS += $(LOCAL_POLICY_JAR_UNSIGNED)
+endif
+
 ifndef OPENJDK
-    $(LOCAL_POLICY_JAR_DST): $(JDK_TOPDIR)/make/closed/tools/crypto/jce/local_policy.jar
+  $(LOCAL_POLICY_JAR_DST): $(JDK_TOPDIR)/make/closed/tools/crypto/jce/local_policy.jar
 	$(ECHO) $(LOG_INFO) Copying $(@F)
 	$(install-file)
 else
-    $(LOCAL_POLICY_JAR_DST): $(LOCAL_POLICY_JAR_UNSIGNED)
+  $(LOCAL_POLICY_JAR_DST): $(LOCAL_POLICY_JAR_UNSIGNED)
 	$(install-file)
 endif
 
-JARS += $(LOCAL_POLICY_JAR_UNSIGNED)
-
 ##########################################################################################
 
 ifeq ($(OPENJDK_TARGET_OS),windows)
--- a/jdk/makefiles/GendataFontConfig.gmk	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/makefiles/GendataFontConfig.gmk	Wed Jul 05 18:59:05 2017 +0200
@@ -36,11 +36,9 @@
   ifdef OPENJDK
     GENDATA_FONT_CONFIG_SRC_DIR   := \
         $(JDK_TOPDIR)/src/solaris/classes/sun/awt/fontconfigs
-    GENDATA_FONT_CONFIG_SRC_FILES := \
-	fontconfig.properties \
-	fontconfig.SuSE.properties \
-	fontconfig.Ubuntu.properties \
-	fontconfig.Fedora.properties
+    # This is placeholder for possible fonconfig files which may
+    # useful for some highly specialized Linux distributions 
+    GENDATA_FONT_CONFIG_SRC_FILES := 
   else
     GENDATA_FONT_CONFIG_SRC_DIR   := \
         $(JDK_TOPDIR)/src/closed/solaris/classes/sun/awt/fontconfigs
--- a/jdk/makefiles/GensrcBuffer.gmk	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/makefiles/GensrcBuffer.gmk	Wed Jul 05 18:59:05 2017 +0200
@@ -69,6 +69,9 @@
 		$1_fulltype := character
 		$1_Fulltype := Character
 		$1_category := integralType
+                $1_streams  := streamableType
+                $1_streamtype := int
+                $1_Streamtype := Int
 		$1_LBPV     := 1
 	endif
 
@@ -97,7 +100,7 @@
 		$1_Type     := Long
 		$1_fulltype := long
 		$1_Fulltype := Long
-		$1_category := integralType
+		$1_category := integralType	
 		$1_LBPV     := 3
 	endif
 
@@ -231,10 +234,13 @@
 	$(TOOL_SPP) < $$($1_SRC) > $$($1_OUT).tmp \
 		-K$$($1_type) \
 		-K$$($1_category) \
+		-K$$($1_streams) \
 		-Dtype=$$($1_type) \
 		-DType=$$($1_Type) \
 		-Dfulltype=$$($1_fulltype) \
 		-DFulltype=$$($1_Fulltype) \
+                -Dstreamtype=$$($1_streamtype) \
+                -DStreamtype=$$($1_Streamtype) \
 		-Dx=$$($1_x) \
 		-Dmemtype=$$($1_memtype) \
 		-DMemtype=$$($1_Memtype) \
--- a/jdk/makefiles/Import.gmk	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/makefiles/Import.gmk	Wed Jul 05 18:59:05 2017 +0200
@@ -200,6 +200,46 @@
 	$(RM) $(basename $@).debuginfo
 	$(MV) $@.tmp $@
 
-#######
+##########################################################################################
+# Unpack the binary distributions of the crypto classes if they exist.
+SEC_FILES_ZIP:=$(JDK_TOPDIR)/make/tools/crypto/sec-bin.zip
+SEC_FILES_WIN_ZIP:=$(JDK_TOPDIR)/make/tools/crypto/sec-windows-bin.zip
+JGSS_WIN32_FILES_ZIP:=$(JDK_TOPDIR)/make/tools/crypto/jgss-windows-i586-bin.zip
+JGSS_WIN64_FILES_ZIP:=$(JDK_TOPDIR)/make/tools/crypto/jgss-windows-x64-bin.zip
+
+define unzip-sec-file
+	$(ECHO) Unzipping $(<F)
+	$(MKDIR) -p $(@D)
+	$(RM) $@
+	($(CD) $(JDK_OUTPUTDIR) && $(UNZIP) $< > $@.tmp)
+	$(MV) $@.tmp $@
+endef
+
+$(JDK_OUTPUTDIR)/classes/_the.sec-bin.unzipped: $(SEC_FILES_ZIP)
+	$(call unzip-sec-file)
+
+$(JDK_OUTPUTDIR)/classes/_the.sec-windows-bin.unzipped: $(SEC_FILES_WIN_ZIP)
+	$(call unzip-sec-file)
+
+$(JDK_OUTPUTDIR)/classes/_the.jgss-windows-i586-bin.unzipped: $(JGSS_WIN32_FILES_ZIP)
+	$(call unzip-sec-file)
+
+$(JDK_OUTPUTDIR)/classes/_the.jgss-windows-x64-bin.unzipped: $(JGSS_WIN64_FILES_ZIP)
+	$(call unzip-sec-file)
+
+ifneq ($(wildcard $(SEC_FILES_ZIP)),)
+  IMPORT_TARGET_FILES += $(JDK_OUTPUTDIR)/classes/_the.sec-bin.unzipped
+  ifeq ($(OPENJDK_TARGET_OS),windows)
+    IMPORT_TARGET_FILES += $(JDK_OUTPUTDIR)/classes/_the.sec-windows-bin.unzipped
+    ifeq ($(OPENJDK_TARGET_CPU),x86)
+      IMPORT_TARGET_FILES += $(JDK_OUTPUTDIR)/classes/_the.jgss-windows-i586-bin.unzipped
+    endif
+    ifeq ($(OPENJDK_TARGET_CPU),x86_64)
+      IMPORT_TARGET_FILES += $(JDK_OUTPUTDIR)/classes/_the.jgss-windows-x64-bin.unzipped
+    endif
+  endif
+endif
+
+##########################################################################################
 
 all: $(IMPORT_TARGET_FILES)
--- a/jdk/makefiles/Setup.gmk	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/makefiles/Setup.gmk	Wed Jul 05 18:59:05 2017 +0200
@@ -40,7 +40,9 @@
 $(eval $(call SetupJavaCompiler,GENERATE_JDKBYTECODE,\
      JVM:=$(JAVA),\
      JAVAC:=$(NEW_JAVAC),\
-     FLAGS:=-bootclasspath $(JDK_OUTPUTDIR)/classes -source 8 -target 8 -encoding ascii -XDignore.symbol.file=true $(DISABLE_WARNINGS),\
+     FLAGS:=-bootclasspath $(JDK_OUTPUTDIR)/classes -source 8 -target 8 \
+	    -encoding ascii -XDignore.symbol.file=true $(DISABLE_WARNINGS) \
+	    $(GENERATE_JDKBYTECODE_EXTRA_FLAGS),\
      SERVER_DIR:=$(SJAVAC_SERVER_DIR),\
      SERVER_JVM:=$(SJAVAC_SERVER_JAVA)))
 
--- a/jdk/makefiles/mapfiles/libmanagement/mapfile-vers	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/makefiles/mapfiles/libmanagement/mapfile-vers	Wed Jul 05 18:59:05 2017 +0200
@@ -1,5 +1,5 @@
 #
-# Copyright (c) 2005, 2012, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2005, 2013, Oracle and/or its affiliates. All rights reserved.
 # DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
 #
 # This code is free software; you can redistribute it and/or modify it
@@ -39,6 +39,10 @@
 	    Java_com_sun_management_UnixOperatingSystem_getTotalSwapSpaceSize;
 	    Java_com_sun_management_UnixOperatingSystem_initialize;
 	    Java_sun_management_ClassLoadingImpl_setVerboseClass;
+            Java_sun_management_DiagnosticCommandImpl_executeDiagnosticCommand;
+            Java_sun_management_DiagnosticCommandImpl_getDiagnosticCommands;
+            Java_sun_management_DiagnosticCommandImpl_getDiagnosticCommandInfo;
+            Java_sun_management_DiagnosticCommandImpl_setNotificationEnabled;
 	    Java_sun_management_FileSystemImpl_isAccessUserOnly0;
 	    Java_sun_management_Flag_getAllFlagNames;
 	    Java_sun_management_Flag_getFlags;
--- a/jdk/src/macosx/classes/sun/font/CCharToGlyphMapper.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/macosx/classes/sun/font/CCharToGlyphMapper.java	Wed Jul 05 18:59:05 2017 +0200
@@ -130,7 +130,17 @@
     }
 
     public synchronized int charToGlyph(int unicode) {
-        return charToGlyph((char)unicode);
+        if (unicode >= 0x10000) {
+            int[] glyphs = new int[2];
+            char[] surrogates = new char[2];
+            int base = unicode - 0x10000;
+            surrogates[0] = (char)((base >>> 10) + HI_SURROGATE_START);
+            surrogates[1] = (char)((base % 0x400) + LO_SURROGATE_START);
+            charsToGlyphs(2, surrogates, glyphs);
+            return glyphs[0];
+         } else {
+             return charToGlyph((char)unicode);
+         }
     }
 
     public synchronized void charsToGlyphs(int count, char[] unicodes, int[] glyphs) {
@@ -138,9 +148,9 @@
     }
 
     public synchronized void charsToGlyphs(int count, int[] unicodes, int[] glyphs) {
-        final char[] unicodeChars = new char[count];
-        for (int i = 0; i < count; i++) unicodeChars[i] = (char)unicodes[i];
-        cache.get(count, unicodeChars, glyphs);
+        for (int i = 0; i < count; i++) {
+            glyphs[i] = charToGlyph(unicodes[i]);
+        };
     }
 
     // This mapper returns either the glyph code, or if the character can be
@@ -166,7 +176,7 @@
             firstLayerCache[1] = 1;
         }
 
-        public int get(final char index) {
+        public synchronized int get(final int index) {
             if (index < FIRST_LAYER_SIZE) {
                 // catch common glyphcodes
                 return firstLayerCache[index];
@@ -179,12 +189,12 @@
             }
 
             if (generalCache == null) return 0;
-            final Integer value = generalCache.get(new Integer(index));
+            final Integer value = generalCache.get(index);
             if (value == null) return 0;
             return value.intValue();
         }
 
-        public void put(final char index, final int value) {
+        public synchronized void put(final int index, final int value) {
             if (index < FIRST_LAYER_SIZE) {
                 // catch common glyphcodes
                 firstLayerCache[index] = value;
@@ -204,7 +214,7 @@
                 generalCache = new HashMap<Integer, Integer>();
             }
 
-            generalCache.put(new Integer(index), new Integer(value));
+            generalCache.put(index, value);
         }
 
         private class SparseBitShiftingTwoLayerArray {
@@ -220,14 +230,14 @@
                 this.secondLayerLength = size >> shift;
             }
 
-            public int get(final char index) {
+            public int get(final int index) {
                 final int firstIndex = index >> shift;
                 final int[] firstLayerRow = cache[firstIndex];
                 if (firstLayerRow == null) return 0;
                 return firstLayerRow[index - (firstIndex * (1 << shift))];
             }
 
-            public void put(final char index, final int value) {
+            public void put(final int index, final int value) {
                 final int firstIndex = index >> shift;
                 int[] firstLayerRow = cache[firstIndex];
                 if (firstLayerRow == null) {
@@ -237,77 +247,81 @@
             }
         }
 
-        public void get(int count, char[] indicies, int[] values){
+        public synchronized void get(int count, char[] indicies, int[] values)
+        {
+            // "missed" is the count of 'char' that are not mapped.
+            // Surrogates count for 2.
+            // unmappedChars is the unique list of these chars.
+            // unmappedCharIndices is the location in the original array
             int missed = 0;
-            for(int i = 0; i < count; i++){
-                char code = indicies[i];
+            char[] unmappedChars = null;
+            int [] unmappedCharIndices = null;
+
+            for (int i = 0; i < count; i++){
+                int code = indicies[i];
+                if (code >= HI_SURROGATE_START &&
+                    code <= HI_SURROGATE_END && i < count - 1)
+                {
+                    char low = indicies[i + 1];
+                    if (low >= LO_SURROGATE_START && low <= LO_SURROGATE_END) {
+                        code = (code - HI_SURROGATE_START) * 0x400 +
+                            low - LO_SURROGATE_START + 0x10000;
+                    }
+                }
 
                 final int value = get(code);
-                if(value != 0){
+                if (value != 0 && value != -1) {
                     values[i] = value;
-                }else{
-                    // zero this element out, because the caller does not
-                    // promise to keep it clean
+                    if (code >= 0x10000) {
+                        values[i+1] = INVISIBLE_GLYPH_ID;
+                        i++;
+                    }
+                } else {
                     values[i] = 0;
+                    put(code, -1);
+                    if (unmappedChars == null) {
+                        // This is likely to be longer than we need,
+                        // but is the simplest and cheapest option.
+                        unmappedChars = new char[indicies.length];
+                        unmappedCharIndices = new int[indicies.length];
+                    }
+                    unmappedChars[missed] = indicies[i];
+                    unmappedCharIndices[missed] = i;
+                    if (code >= 0x10000) { // was a surrogate pair
+                        unmappedChars[++missed] = indicies[++i];
+                    }
                     missed++;
                 }
             }
 
-            if (missed == 0) return; // horray! everything is already cached!
-
-            final char[] filteredCodes = new char[missed]; // all index codes requested (partially filled)
-            final int[] filteredIndicies = new int[missed]; // local indicies into filteredCodes array (totally filled)
-
-            // scan, mark, and store the index codes again to send into native
-            int j = 0;
-            int dupes = 0;
-            for (int i = 0; i < count; i++){
-                if (values[i] != 0L) continue; // already filled
-
-                final char code = indicies[i];
-
-                // we have already promised to fill this code - this is a dupe
-                if (get(code) == -1){
-                    filteredIndicies[j] = -1;
-                    dupes++;
-                    j++;
-                    continue;
-                }
-
-                // this is a code we have not obtained before
-                // mark this one as "promise to get" in the global cache with a -1
-                final int k = j - dupes;
-                filteredCodes[k] = code;
-                put(code, -1);
-                filteredIndicies[j] = k;
-                j++;
+            if (missed == 0) {
+                return;
             }
 
-            final int filteredRunLen = j - dupes;
-            final int[] filteredValues = new int[filteredRunLen];
-
-            // bulk call to fill in the distinct values
-            nativeCharsToGlyphs(fFont.getNativeFontPtr(), filteredRunLen, filteredCodes, filteredValues);
+            final int[] glyphCodes = new int[missed];
 
-            // scan the requested list, and fill in values from our
-            // distinct code list which has been filled from "getDistinct"
-            j = 0;
-            for (int i = 0; i < count; i++){
-                if (values[i] != 0L && values[i] != -1L) continue; // already placed
+            // bulk call to fill in the unmapped code points.
+            nativeCharsToGlyphs(fFont.getNativeFontPtr(),
+                                missed, unmappedChars, glyphCodes);
 
-                final int k = filteredIndicies[j]; // index into filteredImages array
-                final char code = indicies[i];
-                if(k == -1L){
-                    // we should have already filled the cache with this value
-                    values[i] = get(code);
-                }else{
-                    // fill the particular code request, and store in the cache
-                    final int ptr = filteredValues[k];
-                    values[i] = ptr;
-                    put(code, ptr);
+            for (int m = 0; m < missed; m++){
+                int i = unmappedCharIndices[m];
+                int code = unmappedChars[m];
+                if (code >= HI_SURROGATE_START &&
+                    code <= HI_SURROGATE_END && m < missed - 1)
+                {
+                    char low = indicies[m + 1];
+                    if (low >= LO_SURROGATE_START && low <= LO_SURROGATE_END) {
+                        code = (code - HI_SURROGATE_START) * 0x400 +
+                            low - LO_SURROGATE_START + 0x10000;
+                    }
                 }
-
-                j++;
+               values[i] = glyphCodes[m];
+               put(code, values[i]);
+               if (code >= 0x10000) {
+                   m++;
+                   values[i + 1] = INVISIBLE_GLYPH_ID;
+                }
             }
         }
     }
--- a/jdk/src/macosx/classes/sun/lwawt/macosx/LWCToolkit.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/macosx/classes/sun/lwawt/macosx/LWCToolkit.java	Wed Jul 05 18:59:05 2017 +0200
@@ -77,8 +77,20 @@
         if (!GraphicsEnvironment.isHeadless()) {
             initIDs();
         }
+        inAWT = AccessController.doPrivileged(new PrivilegedAction<Boolean>() {
+            @Override
+            public Boolean run() {
+                return !Boolean.parseBoolean(System.getProperty("javafx.embed.singleThread", "false"));
+            }
+        });
     }
 
+    /*
+     * If true  we operate in normal mode and nested runloop is executed in JavaRunLoopMode
+     * If false we operate in singleThreaded FX/AWT interop mode and nested loop uses NSDefaultRunLoopMode
+     */
+    private static final boolean inAWT;
+
     public LWCToolkit() {
         SunToolkit.setDataTransfererClassName("sun.lwawt.macosx.CDataTransferer");
 
@@ -701,7 +713,10 @@
      *
      *                      if false - all events come after exit form the nested loop
      */
-    static native void doAWTRunLoop(long mediator, boolean processEvents);
+    static void doAWTRunLoop(long mediator, boolean processEvents) {
+        doAWTRunLoopImpl(mediator, processEvents, inAWT);
+    }
+    static private native void doAWTRunLoopImpl(long mediator, boolean processEvents, boolean inAWT);
     static native void stopAWTRunLoop(long mediator);
 
     private native boolean nativeSyncQueue(long timeout);
--- a/jdk/src/macosx/native/sun/awt/AWTWindow.m	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/macosx/native/sun/awt/AWTWindow.m	Wed Jul 05 18:59:05 2017 +0200
@@ -539,7 +539,7 @@
     AWTWindow *opposite = [AWTWindow lastKeyWindow];
     if (!IS(self.styleBits, IS_DIALOG)) {
         [CMenuBar activate:self.javaMenuBar modallyDisabled:NO];
-    } else if (IS(self.styleBits, IS_MODAL)) {
+    } else if ((opposite != NULL) && IS(self.styleBits, IS_MODAL)) {
         [CMenuBar activate:opposite->javaMenuBar modallyDisabled:YES];        
     }
     [AWTWindow setLastKeyWindow:nil];
--- a/jdk/src/macosx/native/sun/awt/CDropTargetContextPeer.m	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/macosx/native/sun/awt/CDropTargetContextPeer.m	Wed Jul 05 18:59:05 2017 +0200
@@ -61,7 +61,6 @@
 JNIEXPORT jlong JNICALL Java_sun_lwawt_macosx_CDropTargetContextPeer_startTransfer
   (JNIEnv *env, jobject jthis, jlong jdroptarget, jlong jformat)
 {
-    AWT_ASSERT_NOT_APPKIT_THREAD;
 
     jlong result = (jlong) 0L;
 
--- a/jdk/src/macosx/native/sun/awt/LWCToolkit.m	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/macosx/native/sun/awt/LWCToolkit.m	Wed Jul 05 18:59:05 2017 +0200
@@ -295,11 +295,11 @@
 
 /*
  * Class:     sun_lwawt_macosx_LWCToolkit
- * Method:    doAWTRunLoop
+ * Method:    doAWTRunLoopImpl
  * Signature: (JZZ)V
  */
-JNIEXPORT void JNICALL Java_sun_lwawt_macosx_LWCToolkit_doAWTRunLoop
-(JNIEnv *env, jclass clz, jlong mediator, jboolean processEvents)
+JNIEXPORT void JNICALL Java_sun_lwawt_macosx_LWCToolkit_doAWTRunLoopImpl
+(JNIEnv *env, jclass clz, jlong mediator, jboolean processEvents, jboolean inAWT)
 {
 AWT_ASSERT_APPKIT_THREAD;
 JNF_COCOA_ENTER(env);
@@ -311,7 +311,7 @@
     // Don't use acceptInputForMode because that doesn't setup autorelease pools properly
     BOOL isRunning = true;
     while (![mediatorObject shouldEndRunLoop] && isRunning) {
-        isRunning = [[NSRunLoop currentRunLoop] runMode:[JNFRunLoop javaRunLoopMode]
+        isRunning = [[NSRunLoop currentRunLoop] runMode:(inAWT ? [JNFRunLoop javaRunLoopMode] : NSDefaultRunLoopMode)
                                              beforeDate:[NSDate dateWithTimeIntervalSinceNow:0.010]];
         if (processEvents) {
             //We do not spin a runloop here as date is nil, so does not matter which mode to use
@@ -340,7 +340,6 @@
 JNIEXPORT void JNICALL Java_sun_lwawt_macosx_LWCToolkit_stopAWTRunLoop
 (JNIEnv *env, jclass clz, jlong mediator)
 {
-AWT_ASSERT_NOT_APPKIT_THREAD;
 JNF_COCOA_ENTER(env);
 
     AWTRunLoopObject* mediatorObject = (AWTRunLoopObject*)jlong_to_ptr(mediator);
--- a/jdk/src/macosx/native/sun/osxapp/QueuingApplicationDelegate.m	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/macosx/native/sun/osxapp/QueuingApplicationDelegate.m	Wed Jul 05 18:59:05 2017 +0200
@@ -110,8 +110,14 @@
 
 - (void)_handleOpenURLEvent:(NSAppleEventDescriptor *)openURLEvent withReplyEvent:(NSAppleEventDescriptor *)replyEvent
 {
+    // Make an explicit copy of the passed events as they may be invalidated by the time they're processed
+    NSAppleEventDescriptor *openURLEventCopy = [openURLEvent copy];
+    NSAppleEventDescriptor *replyEventCopy = [replyEvent copy];
+
     [self.queue addObject:[^(){
-        [self.realDelegate _handleOpenURLEvent:openURLEvent withReplyEvent:replyEvent];
+        [self.realDelegate _handleOpenURLEvent:openURLEventCopy withReplyEvent:replyEventCopy];
+        [openURLEventCopy release];
+        [replyEventCopy release];
     } copy]];
 }
 
--- a/jdk/src/share/classes/com/sun/crypto/provider/DHKeyAgreement.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/com/sun/crypto/provider/DHKeyAgreement.java	Wed Jul 05 18:59:05 2017 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 1997, 2012, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -403,8 +403,9 @@
             }
             return skey;
         } else if (algorithm.equals("TlsPremasterSecret")) {
-            // return entire secret
-            return new SecretKeySpec(secret, "TlsPremasterSecret");
+            // remove leading zero bytes per RFC 5246 Section 8.1.2
+            return new SecretKeySpec(
+                        KeyUtil.trimZeroes(secret), "TlsPremasterSecret");
         } else {
             throw new NoSuchAlgorithmException("Unsupported secret key "
                                                + "algorithm: "+ algorithm);
--- a/jdk/src/share/classes/com/sun/crypto/provider/HmacPKCS12PBESHA1.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/com/sun/crypto/provider/HmacPKCS12PBESHA1.java	Wed Jul 05 18:59:05 2017 +0200
@@ -86,12 +86,13 @@
             throw new InvalidKeyException("SecretKey of PBE type required");
         }
         if (params == null) {
-            // generate default for salt and iteration count if necessary
-            if (salt == null) {
-                salt = new byte[20];
-                SunJCE.getRandom().nextBytes(salt);
+            // should not auto-generate default values since current
+            // javax.crypto.Mac api does not have any method for caller to
+            // retrieve the generated defaults.
+            if ((salt == null) || (iCount == 0)) {
+                throw new InvalidAlgorithmParameterException
+                    ("PBEParameterSpec required for salt and iteration count");
             }
-            if (iCount == 0) iCount = 100;
         } else if (!(params instanceof PBEParameterSpec)) {
             throw new InvalidAlgorithmParameterException
                 ("PBEParameterSpec type required");
--- a/jdk/src/share/classes/com/sun/crypto/provider/PBMAC1Core.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/com/sun/crypto/provider/PBMAC1Core.java	Wed Jul 05 18:59:05 2017 +0200
@@ -42,12 +42,10 @@
  */
 abstract class PBMAC1Core extends HmacCore {
 
-    private static final int DEFAULT_SALT_LENGTH = 20;
-    private static final int DEFAULT_COUNT = 4096;
-
+    // NOTE: this class inherits the Cloneable interface from HmacCore
+    // Need to override clone() if mutable fields are added.
     private final String kdfAlgo;
     private final String hashAlgo;
-    private final PBKDF2Core kdf;
     private final int blockLength; // in octets
 
     /**
@@ -56,13 +54,15 @@
      */
     PBMAC1Core(String kdfAlgo, String hashAlgo, int blockLength)
         throws NoSuchAlgorithmException {
-
         super(hashAlgo, blockLength);
         this.kdfAlgo = kdfAlgo;
         this.hashAlgo = hashAlgo;
         this.blockLength = blockLength;
+    }
 
-        switch(kdfAlgo) {
+    private static PBKDF2Core getKDFImpl(String algo) {
+        PBKDF2Core kdf = null;
+        switch(algo) {
         case "HmacSHA1":
                 kdf = new PBKDF2Core.HmacSHA1();
                 break;
@@ -79,9 +79,10 @@
                 kdf = new PBKDF2Core.HmacSHA512();
                 break;
         default:
-                throw new NoSuchAlgorithmException(
-                    "No MAC implementation for " + kdfAlgo);
+                throw new ProviderException(
+                    "No MAC implementation for " + algo);
         }
+        return kdf;
     }
 
     /**
@@ -120,12 +121,13 @@
             throw new InvalidKeyException("SecretKey of PBE type required");
         }
         if (params == null) {
-            // generate default for salt and iteration count if necessary
-            if (salt == null) {
-                salt = new byte[DEFAULT_SALT_LENGTH];
-                SunJCE.getRandom().nextBytes(salt);
+            // should not auto-generate default values since current
+            // javax.crypto.Mac api does not have any method for caller to
+            // retrieve the generated defaults.
+            if ((salt == null) || (iCount == 0)) {
+                throw new InvalidAlgorithmParameterException
+                    ("PBEParameterSpec required for salt and iteration count");
             }
-            if (iCount == 0) iCount = DEFAULT_COUNT;
         } else if (!(params instanceof PBEParameterSpec)) {
             throw new InvalidAlgorithmParameterException
                 ("PBEParameterSpec type required");
@@ -168,7 +170,7 @@
         java.util.Arrays.fill(passwdChars, ' ');
 
         SecretKey s = null;
-
+        PBKDF2Core kdf = getKDFImpl(kdfAlgo);
         try {
             s = kdf.engineGenerateSecret(pbeSpec);
 
--- a/jdk/src/share/classes/com/sun/crypto/provider/SunJCE.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/com/sun/crypto/provider/SunJCE.java	Wed Jul 05 18:59:05 2017 +0200
@@ -731,10 +731,11 @@
                     put("Mac.HmacSHA384 SupportedKeyFormats", "RAW");
                     put("Mac.HmacSHA512 SupportedKeyFormats", "RAW");
                     put("Mac.HmacPBESHA1 SupportedKeyFormats", "RAW");
-                    put("Mac.HmacPBESHA224 SupportedKeyFormats", "RAW");
-                    put("Mac.HmacPBESHA256 SupportedKeyFormats", "RAW");
-                    put("Mac.HmacPBESHA384 SupportedKeyFormats", "RAW");
-                    put("Mac.HmacPBESHA512 SupportedKeyFormats", "RAW");
+                    put("Mac.PBEWithHmacSHA1 SupportedKeyFormatS", "RAW");
+                    put("Mac.PBEWithHmacSHA224 SupportedKeyFormats", "RAW");
+                    put("Mac.PBEWithHmacSHA256 SupportedKeyFormats", "RAW");
+                    put("Mac.PBEWithHmacSHA384 SupportedKeyFormats", "RAW");
+                    put("Mac.PBEWithHmacSHA512 SupportedKeyFormats", "RAW");
                     put("Mac.SslMacMD5 SupportedKeyFormats", "RAW");
                     put("Mac.SslMacSHA1 SupportedKeyFormats", "RAW");
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/src/share/classes/com/sun/management/DiagnosticCommandMBean.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,220 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.  Oracle designates this
+ * particular file as subject to the "Classpath" exception as provided
+ * by Oracle in the LICENSE file that accompanied this code.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+package com.sun.management;
+
+import java.lang.management.PlatformManagedObject;
+import javax.management.DynamicMBean;
+
+/**
+ * Management interface for the diagnostic commands for the HotSpot Virtual Machine.
+ *
+ * <p>The {code DiagnosticCommandMBean} is registered to the
+ * {@linkplain java.lang.management.ManagementFactory#getPlatformMBeanServer
+ * platform MBeanServer} as are other platform MBeans.
+ *
+ * <p>The {@link javax.management.ObjectName ObjectName} for uniquely identifying
+ * the diagnostic MBean within an MBeanServer is:
+ * <blockquote>
+ *    {@code com.sun.management:type=DiagnosticCommand}
+ * </blockquote>
+ *
+ * <p>This MBean is a {@link javax.management.DynamicMBean DynamicMBean}
+ * and also a {@link javax.management.NotificationEmitter}.
+ * The {@code DiagnosticCommandMBean} is generated at runtime and is subject to
+ * modifications during the lifetime of the Java virtual machine.
+ *
+ * A <em>diagnostic command</em> is represented as an operation of
+ * the {@code DiagnosticCommandMBean} interface. Each diagnostic command has:
+ * <ul>
+ * <li>the diagnostic command name which is the name being referenced in
+ *     the HotSpot Virtual Machine</li>
+ * <li>the MBean operation name which is the
+ *     {@linkplain javax.management.MBeanOperationInfo#getName() name}
+ *     generated for the diagnostic command operation invocation.
+ *     The MBean operation name is implementation dependent</li>
+ * </ul>
+ *
+ * The recommended way to transform a diagnostic command name into a MBean
+ * operation name is as follows:
+ * <ul>
+ * <li>All characters from the first one to the first dot are set to be
+ *      lower-case characters</li>
+ * <li>Every dot or underline character is removed and the following
+ *   character is set to be an upper-case character</li>
+ * <li>All other characters are copied without modification</li>
+ * </ul>
+ *
+ * <p>The diagnostic command name is always provided with the meta-data on the
+ * operation in a field named {@code dcmd.name} (see below).
+ *
+ * <p>A diagnostic command may or may not support options or arguments.
+ * All the operations return {@code String} and either take
+ * no parameter for operations that do not support any option or argument,
+ * or take a {@code String[]} parameter for operations that support at least
+ * one option or argument.
+ * Each option or argument must be stored in a single String.
+ * Options or arguments split across several String instances are not supported.
+ *
+ * <p>The distinction between options and arguments: options are identified by
+ * the option name while arguments are identified by their position in the
+ * command line. Options and arguments are processed in the order of the array
+ * passed to the invocation method.
+ *
+ * <p>Like any operation of a dynamic MBean, each of these operations is
+ * described by {@link javax.management.MBeanOperationInfo MBeanOperationInfo}
+ * instance. Here's the values returned by this object:
+ * <ul>
+ *  <li>{@link javax.management.MBeanOperationInfo#getName() getName()}
+ *      returns the operation name generated from the diagnostic command name</li>
+ *  <li>{@link javax.management.MBeanOperationInfo#getDescription() getDescription()}
+ *      returns the diagnostic command description
+ *      (the same as the one return in the 'help' command)</li>
+ *  <li>{@link javax.management.MBeanOperationInfo#getImpact() getImpact()}
+ *      returns <code>ACTION_INFO</code></li>
+ *  <li>{@link javax.management.MBeanOperationInfo#getReturnType() getReturnType()}
+ *      returns {@code java.lang.String}</li>
+ *  <li>{@link javax.management.MBeanOperationInfo#getDescriptor() getDescriptor()}
+ *      returns a Descriptor instance (see below)</li>
+ * </ul>
+ *
+ * <p>The {@link javax.management.Descriptor Descriptor}
+ * is a collection of fields containing additional
+ * meta-data for a JMX element. A field is a name and an associated value.
+ * The additional meta-data provided for an operation associated with a
+ * diagnostic command are described in the table below:
+ * <p>
+ *
+ * <table border="1" cellpadding="5">
+ *   <tr>
+ *     <th>Name</th><th>Type</th><th>Description</th>
+ *   </tr>
+ *   <tr>
+ *     <td>dcmd.name</td><td>String</td>
+ *     <td>The original diagnostic command name (not the operation name)</td>
+ *   </tr>
+ *   <tr>
+ *     <td>dcmd.description</td><td>String</td>
+ *     <td>The diagnostic command description</td>
+ *   </tr>
+ *   <tr>
+ *     <td>dcmd.help</td><td>String</td>
+ *     <td>The full help message for this diagnostic command (same output as
+ *          the one produced by the 'help' command)</td>
+ *   </tr>
+ *   <tr>
+ *     <td>dcmd.vmImpact</td><td>String</td>
+ *     <td>The impact of the diagnostic command,
+ *      this value is the same as the one printed in the 'impact'
+ *      section of the help message of the diagnostic command, and it
+ *      is different from the getImpact() of the MBeanOperationInfo</td>
+ *   </tr>
+ *   <tr>
+ *     <td>dcmd.enabled</td><td>boolean</td>
+ *     <td>True if the diagnostic command is enabled, false otherwise</td>
+ *   </tr>
+ *   <tr>
+ *     <td>dcmd.permissionClass</td><td>String</td>
+ *     <td>Some diagnostic command might require a specific permission to be
+ *          executed, in addition to the MBeanPermission to invoke their
+ *          associated MBean operation. This field returns the fully qualified
+ *          name of the permission class or null if no permission is required
+ *   </td>
+ *   </tr>
+ *   <tr>
+ *     <td>dcmd.permissionName</td><td>String</td>
+ *     <td>The fist argument of the permission required to execute this
+ *          diagnostic command or null if no permission is required</td>
+ *   </tr>
+ *   <tr>
+ *     <td>dcmd.permissionAction</td><td>String</td>
+ *     <td>The second argument of the permission required to execute this
+ *          diagnostic command or null if the permission constructor has only
+ *          one argument (like the ManagementPermission) or if no permission
+ *          is required</td>
+ *   </tr>
+ *   <tr>
+ *     <td>dcmd.arguments</td><td>Descriptor</td>
+ *     <td>A Descriptor instance containing the descriptions of options and
+ *          arguments supported by the diagnostic command (see below)</td>
+ *   </tr>
+ * </table>
+ * <p>
+ *
+ * <p>The description of parameters (options or arguments) of a diagnostic
+ * command is provided within a Descriptor instance. In this Descriptor,
+ * each field name is a parameter name, and each field value is itself
+ * a Descriptor instance. The fields provided in this second Descriptor
+ * instance are described in the table below:
+ *
+ * <table border="1" cellpadding="5">
+ *   <tr>
+ *     <th>Name</th><th>Type</th><th>Description</th>
+ *   </tr>
+ *   <tr>
+ *     <td>dcmd.arg.name</td><td>String</td>
+ *     <td>The name of the parameter</td>
+ *   </tr>
+ *   <tr>
+ *     <td>dcmd.arg.type</td><td>String</td>
+ *     <td>The type of the parameter. The returned String is the name of a type
+ *          recognized by the diagnostic command parser. These types are not
+ *          Java types and are implementation dependent.
+ *          </td>
+ *   </tr>
+ *   <tr>
+ *     <td>dcmd.arg.description</td><td>String</td>
+ *     <td>The parameter description</td>
+ *   </tr>
+ *   <tr>
+ *     <td>dcmd.arg.isMandatory</td><td>boolean</td>
+ *     <td>True if the parameter is mandatory, false otherwise</td>
+ *   </tr>
+ *   <tr>
+ *     <td>dcmd.arg.isOption</td><td>boolean</td>
+ *     <td>True if the parameter is an option, false if it is an argument</td>
+ *   </tr>
+ *   <tr>
+ *     <td>dcmd.arg.isMultiple</td><td>boolean</td>
+ *     <td>True if the parameter can be specified several times, false
+ *          otherwise</td>
+ *   </tr>
+ * </table>
+ *
+ * <p>When the set of diagnostic commands currently supported by the Java
+ * Virtual Machine is modified, the {@code DiagnosticCommandMBean} emits
+ * a {@link javax.management.Notification} with a
+ * {@linkplain javax.management.Notification#getType() type} of
+ * <a href="{@docRoot}/../../../../api/javax/management/MBeanInfo.html#info-changed">
+ * {@code "jmx.mbean.info.changed"}</a> and a
+ * {@linkplain javax.management.Notification#getUserData() userData} that
+ * is the new {@code MBeanInfo}.
+ *
+ * @since 8
+ */
+public interface DiagnosticCommandMBean extends DynamicMBean
+{
+
+}
--- a/jdk/src/share/classes/java/awt/ContainerOrderFocusTraversalPolicy.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/awt/ContainerOrderFocusTraversalPolicy.java	Wed Jul 05 18:59:05 2017 +0200
@@ -42,7 +42,7 @@
  * Component to focus. This behavior can be disabled using the
  * <code>setImplicitDownCycleTraversal</code> method.
  * <p>
- * By default, methods of this class with return a Component only if it is
+ * By default, methods of this class will return a Component only if it is
  * visible, displayable, enabled, and focusable. Subclasses can modify this
  * behavior by overriding the <code>accept</code> method.
  * <p>
--- a/jdk/src/share/classes/java/awt/DefaultKeyboardFocusManager.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/awt/DefaultKeyboardFocusManager.java	Wed Jul 05 18:59:05 2017 +0200
@@ -559,6 +559,7 @@
                         } else {
                             restoreFocus(fe, newFocusedWindow);
                         }
+                        setMostRecentFocusOwner(newFocusedWindow, null); // see: 8013773
                     }
                     break;
                 }
--- a/jdk/src/share/classes/java/awt/EventQueue.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/awt/EventQueue.java	Wed Jul 05 18:59:05 2017 +0200
@@ -37,16 +37,10 @@
 
 import java.util.EmptyStackException;
 
+import sun.awt.*;
 import sun.awt.dnd.SunDropTargetEvent;
 import sun.util.logging.PlatformLogger;
 
-import sun.awt.AppContext;
-import sun.awt.AWTAutoShutdown;
-import sun.awt.PeerEvent;
-import sun.awt.SunToolkit;
-import sun.awt.EventQueueItem;
-import sun.awt.AWTAccessor;
-
 import java.util.concurrent.locks.Condition;
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.atomic.AtomicInteger;
@@ -181,6 +175,8 @@
 
     private final String name = "AWT-EventQueue-" + threadInitNumber.getAndIncrement();
 
+    private FwDispatcher fwDispatcher;
+
     private static final PlatformLogger eventLog = PlatformLogger.getLogger("java.awt.event.EventQueue");
 
     static {
@@ -209,6 +205,10 @@
                 {
                     EventQueue.invokeAndWait(source, r);
                 }
+                public void setFwDispatcher(EventQueue eventQueue,
+                                            FwDispatcher dispatcher) {
+                    eventQueue.setFwDispatcher(dispatcher);
+                }
             });
     }
 
@@ -684,7 +684,16 @@
         final Object src = event.getSource();
         final PrivilegedAction<Void> action = new PrivilegedAction<Void>() {
             public Void run() {
-                dispatchEventImpl(event, src);
+                if (fwDispatcher == null) {
+                    dispatchEventImpl(event, src);
+                } else {
+                    fwDispatcher.scheduleDispatch(new Runnable() {
+                        @Override
+                        public void run() {
+                            dispatchEventImpl(event, src);
+                        }
+                    });
+                }
                 return null;
             }
         };
@@ -844,7 +853,9 @@
             while (topQueue.nextQueue != null) {
                 topQueue = topQueue.nextQueue;
             }
-
+            if (topQueue.fwDispatcher != null) {
+                throw new RuntimeException("push() to queue with fwDispatcher");
+            }
             if ((topQueue.dispatchThread != null) &&
                 (topQueue.dispatchThread.getEventQueue() == this))
             {
@@ -975,6 +986,9 @@
                 // Forward the request to the top of EventQueue stack
                 return nextQueue.createSecondaryLoop(cond, filter, interval);
             }
+            if (fwDispatcher != null) {
+                return fwDispatcher.createSecondaryLoop();
+            }
             if (dispatchThread == null) {
                 initDispatchThread();
             }
@@ -1018,6 +1032,9 @@
                 eq = next;
                 next = eq.nextQueue;
             }
+            if (eq.fwDispatcher != null) {
+                return eq.fwDispatcher.isDispatchThread();
+            }
             return (Thread.currentThread() == eq.dispatchThread);
         } finally {
             pushPopLock.unlock();
@@ -1303,6 +1320,15 @@
             pushPopLock.unlock();
         }
     }
+
+    // The method is used by AWTAccessor for javafx/AWT single threaded mode.
+    private void setFwDispatcher(FwDispatcher dispatcher) {
+        if (nextQueue != null) {
+            nextQueue.setFwDispatcher(dispatcher);
+        } else {
+            fwDispatcher = dispatcher;
+        }
+    }
 }
 
 /**
--- a/jdk/src/share/classes/java/awt/Window.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/awt/Window.java	Wed Jul 05 18:59:05 2017 +0200
@@ -1200,6 +1200,7 @@
             }
         }
     }
+        boolean fireWindowClosedEvent = isDisplayable();
         DisposeAction action = new DisposeAction();
         if (EventQueue.isDispatchThread()) {
             action.run();
@@ -1220,7 +1221,9 @@
         // Execute outside the Runnable because postWindowEvent is
         // synchronized on (this). We don't need to synchronize the call
         // on the EventQueue anyways.
-        postWindowEvent(WindowEvent.WINDOW_CLOSED);
+        if (fireWindowClosedEvent) {
+            postWindowEvent(WindowEvent.WINDOW_CLOSED);
+        }
     }
 
     /*
--- a/jdk/src/share/classes/java/beans/XMLEncoder.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/beans/XMLEncoder.java	Wed Jul 05 18:59:05 2017 +0200
@@ -487,6 +487,12 @@
         }
         indentation--;
 
+        Statement statement = getMissedStatement();
+        while (statement != null) {
+            outputStatement(statement, this, false);
+            statement = getMissedStatement();
+        }
+
         try {
             out.flush();
         }
@@ -503,6 +509,17 @@
         targetToStatementList.clear();
     }
 
+    Statement getMissedStatement() {
+        for (List<Statement> statements : this.targetToStatementList.values()) {
+            for (int i = 0; i < statements.size(); i++) {
+                if (Statement.class == statements.get(i).getClass()) {
+                    return statements.remove(i);
+                }
+            }
+        }
+        return null;
+    }
+
 
     /**
      * This method calls <code>flush</code>, writes the closing
@@ -597,7 +614,7 @@
                                                 "methodName") + " should not be null");
             }
 
-            if (target instanceof Field && methodName.equals("get")) {
+            if (isArgument && target instanceof Field && methodName.equals("get")) {
                 Field f = (Field)target;
                 writeln("<object class=" + quote(f.getDeclaringClass().getName()) +
                         " field=" + quote(f.getName()) + "/>");
--- a/jdk/src/share/classes/java/lang/Integer.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/lang/Integer.java	Wed Jul 05 18:59:05 2017 +0200
@@ -26,7 +26,6 @@
 package java.lang;
 
 import java.lang.annotation.Native;
-import java.util.Properties;
 
 /**
  * The {@code Integer} class wraps a value of the primitive type
@@ -185,7 +184,7 @@
      * @since 1.8
      */
     public static String toUnsignedString(int i, int radix) {
-        return Long.toString(toUnsignedLong(i), radix);
+        return Long.toUnsignedString(toUnsignedLong(i), radix);
     }
 
     /**
@@ -307,20 +306,39 @@
     /**
      * Convert the integer to an unsigned number.
      */
-    private static String toUnsignedString0(int i, int shift) {
-        char[] buf = new char[32];
-        int charPos = 32;
+    private static String toUnsignedString0(int val, int shift) {
+        // assert shift > 0 && shift <=5 : "Illegal shift value";
+        int mag = Integer.SIZE - Integer.numberOfLeadingZeros(val);
+        int chars = Math.max(((mag + (shift - 1)) / shift), 1);
+        char[] buf = new char[chars];
+
+        formatUnsignedInt(val, shift, buf, 0, chars);
+
+        // Use special constructor which takes over "buf".
+        return new String(buf, true);
+    }
+
+    /**
+     * Format a long (treated as unsigned) into a character buffer.
+     * @param val the unsigned int to format
+     * @param shift the log2 of the base to format in (4 for hex, 3 for octal, 1 for binary)
+     * @param buf the character buffer to write to
+     * @param offset the offset in the destination buffer to start at
+     * @param len the number of characters to write
+     * @return the lowest character  location used
+     */
+     static int formatUnsignedInt(int val, int shift, char[] buf, int offset, int len) {
+        int charPos = len;
         int radix = 1 << shift;
         int mask = radix - 1;
         do {
-            buf[--charPos] = digits[i & mask];
-            i >>>= shift;
-        } while (i != 0);
+            buf[offset + --charPos] = Integer.digits[val & mask];
+            val >>>= shift;
+        } while (val != 0 && charPos > 0);
 
-        return new String(buf, charPos, (32 - charPos));
+        return charPos;
     }
 
-
     final static char [] DigitTens = {
         '0', '0', '0', '0', '0', '0', '0', '0', '0', '0',
         '1', '1', '1', '1', '1', '1', '1', '1', '1', '1',
@@ -875,6 +893,7 @@
      * Returns the value of this {@code Integer} as a {@code long}
      * after a widening primitive conversion.
      * @jls 5.1.2 Widening Primitive Conversions
+     * @see Integer#toUnsignedLong(int)
      */
     public long longValue() {
         return (long)value;
--- a/jdk/src/share/classes/java/lang/Long.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/lang/Long.java	Wed Jul 05 18:59:05 2017 +0200
@@ -28,6 +28,7 @@
 import java.lang.annotation.Native;
 import java.math.*;
 
+
 /**
  * The {@code Long} class wraps a value of the primitive type {@code
  * long} in an object. An object of type {@code Long} contains a
@@ -344,18 +345,39 @@
     }
 
     /**
-     * Convert the integer to an unsigned number.
+     * Format a long (treated as unsigned) into a String.
+     * @param val the value to format
+     * @param shift the log2 of the base to format in (4 for hex, 3 for octal, 1 for binary)
      */
-    private static String toUnsignedString0(long i, int shift) {
-        char[] buf = new char[64];
-        int charPos = 64;
+    static String toUnsignedString0(long val, int shift) {
+        // assert shift > 0 && shift <=5 : "Illegal shift value";
+        int mag = Long.SIZE - Long.numberOfLeadingZeros(val);
+        int chars = Math.max(((mag + (shift - 1)) / shift), 1);
+        char[] buf = new char[chars];
+
+        formatUnsignedLong(val, shift, buf, 0, chars);
+        return new String(buf, true);
+    }
+
+    /**
+     * Format a long (treated as unsigned) into a character buffer.
+     * @param val the unsigned long to format
+     * @param shift the log2 of the base to format in (4 for hex, 3 for octal, 1 for binary)
+     * @param buf the character buffer to write to
+     * @param offset the offset in the destination buffer to start at
+     * @param len the number of characters to write
+     * @return the lowest character location used
+     */
+     static int formatUnsignedLong(long val, int shift, char[] buf, int offset, int len) {
+        int charPos = len;
         int radix = 1 << shift;
-        long mask = radix - 1;
+        int mask = radix - 1;
         do {
-            buf[--charPos] = Integer.digits[(int)(i & mask)];
-            i >>>= shift;
-        } while (i != 0);
-        return new String(buf, charPos, (64 - charPos));
+            buf[offset + --charPos] = Integer.digits[((int) val) & mask];
+            val >>>= shift;
+        } while (val != 0 && charPos > 0);
+
+        return charPos;
     }
 
     /**
--- a/jdk/src/share/classes/java/lang/management/ManagementFactory.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/lang/management/ManagementFactory.java	Wed Jul 05 18:59:05 2017 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2003, 2012, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -42,7 +42,9 @@
 import java.util.Collections;
 import java.util.List;
 import java.util.Set;
+import java.util.HashMap;
 import java.util.HashSet;
+import java.util.Map;
 import java.security.AccessController;
 import java.security.Permission;
 import java.security.PrivilegedAction;
@@ -482,6 +484,11 @@
                     }
                 }
             }
+            HashMap<ObjectName, DynamicMBean> dynmbeans =
+                    ManagementFactoryHelper.getPlatformDynamicMBeans();
+            for (Map.Entry<ObjectName, DynamicMBean> e : dynmbeans.entrySet()) {
+                addDynamicMBean(platformMBeanServer, e.getValue(), e.getKey());
+            }
         }
         return platformMBeanServer;
     }
@@ -825,4 +832,24 @@
         }
     }
 
+    /**
+     * Registers a DynamicMBean.
+     */
+    private static void addDynamicMBean(final MBeanServer mbs,
+                                        final DynamicMBean dmbean,
+                                        final ObjectName on) {
+        try {
+            AccessController.doPrivileged(new PrivilegedExceptionAction<Void>() {
+                @Override
+                public Void run() throws InstanceAlreadyExistsException,
+                                         MBeanRegistrationException,
+                                         NotCompliantMBeanException {
+                    mbs.registerMBean(dmbean, on);
+                    return null;
+                }
+            });
+        } catch (PrivilegedActionException e) {
+            throw new RuntimeException(e.getException());
+        }
+    }
 }
--- a/jdk/src/share/classes/java/net/HttpCookie.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/net/HttpCookie.java	Wed Jul 05 18:59:05 2017 +0200
@@ -128,8 +128,7 @@
      *         a {@code String} specifying the value of the cookie
      *
      * @throws  IllegalArgumentException
-     *          if the cookie name contains illegal characters or it is one of
-     *          the tokens reserved for use by the cookie protocol
+     *          if the cookie name contains illegal characters
      * @throws  NullPointerException
      *          if {@code name} is {@code null}
      *
@@ -142,7 +141,7 @@
 
     private HttpCookie(String name, String value, String header) {
         name = name.trim();
-        if (name.length() == 0 || !isToken(name)) {
+        if (name.length() == 0 || !isToken(name) || name.charAt(0) == '$') {
             throw new IllegalArgumentException("Illegal cookie name");
         }
 
@@ -170,9 +169,8 @@
      * @return  a List of cookie parsed from header line string
      *
      * @throws  IllegalArgumentException
-     *          if header string violates the cookie specification's syntax, or
-     *          the cookie name contains illegal characters, or the cookie name
-     *          is one of the tokens reserved for use by the cookie protocol
+     *          if header string violates the cookie specification's syntax or
+     *          the cookie name contains illegal characters.
      * @throws  NullPointerException
      *          if the header string is {@code null}
      */
--- a/jdk/src/share/classes/java/net/HttpURLPermission.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/net/HttpURLPermission.java	Wed Jul 05 18:59:05 2017 +0200
@@ -377,7 +377,7 @@
             throw new IllegalArgumentException ("unexpected URL scheme");
         }
         if (!u.getSchemeSpecificPart().equals("*")) {
-            u = URI.create(scheme + "://" + u.getAuthority() + u.getPath());
+            u = URI.create(scheme + "://" + u.getRawAuthority() + u.getRawPath());
         }
         return u;
     }
--- a/jdk/src/share/classes/java/nio/Buffer.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/nio/Buffer.java	Wed Jul 05 18:59:05 2017 +0200
@@ -25,6 +25,7 @@
 
 package java.nio;
 
+import java.util.Spliterator;
 
 /**
  * A container for data of a specific primitive type.
@@ -173,6 +174,13 @@
 
 public abstract class Buffer {
 
+    /**
+     * The characteristics of Spliterators that traverse and split elements
+     * maintained in Buffers.
+     */
+    static final int SPLITERATOR_CHARACTERISTICS =
+        Spliterator.SIZED | Spliterator.SUBSIZED | Spliterator.ORDERED;
+
     // Invariants: mark <= position <= limit <= capacity
     private int mark = -1;
     private int position = 0;
--- a/jdk/src/share/classes/java/nio/ByteBufferAs-X-Buffer.java.template	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/nio/ByteBufferAs-X-Buffer.java.template	Wed Jul 05 18:59:05 2017 +0200
@@ -115,6 +115,12 @@
         return Bits.get$Type$$BO$(bb, ix(checkIndex(i)));
     }
 
+#if[streamableType]
+   $type$ getUnchecked(int i) {
+        return Bits.get$Type$$BO$(bb, ix(i));
+    }
+#end[streamableType]
+
 #end[rw]
 
     public $Type$Buffer put($type$ x) {
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/src/share/classes/java/nio/CharBufferSpliterator.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,96 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.  Oracle designates this
+* particular file as subject to the "Classpath" exception as provided
+ * by Oracle in the LICENSE file that accompanied this code.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+package java.nio;
+
+import java.util.Comparator;
+import java.util.Spliterator;
+import java.util.function.IntConsumer;
+
+/**
+ * A Spliterator.OfInt for sources that traverse and split elements
+ * maintained in a CharBuffer.
+ *
+ * @implNote
+ * The implementation is based on the code for the Array-based spliterators.
+ */
+class CharBufferSpliterator implements Spliterator.OfInt {
+    private final CharBuffer buffer;
+    private int index;   // current index, modified on advance/split
+    private final int limit;
+
+    CharBufferSpliterator(CharBuffer buffer) {
+        this(buffer, buffer.position(), buffer.limit());
+    }
+
+    CharBufferSpliterator(CharBuffer buffer, int origin, int limit) {
+        assert origin <= limit;
+        this.buffer = buffer;
+        this.index = (origin <= limit) ? origin : limit;
+        this.limit = limit;
+    }
+
+    @Override
+    public OfInt trySplit() {
+        int lo = index, mid = (lo + limit) >>> 1;
+        return (lo >= mid)
+               ? null
+               : new CharBufferSpliterator(buffer, lo, index = mid);
+    }
+
+    @Override
+    public void forEachRemaining(IntConsumer action) {
+        if (action == null)
+            throw new NullPointerException();
+        CharBuffer cb = buffer;
+        int i = index;
+        int hi = limit;
+        index = hi;
+        while (i < hi) {
+            action.accept(cb.getUnchecked(i++));
+        }
+    }
+
+    @Override
+    public boolean tryAdvance(IntConsumer action) {
+        if (action == null)
+            throw new NullPointerException();
+        if (index >= 0 && index < limit) {
+            action.accept(buffer.getUnchecked(index++));
+            return true;
+        }
+        return false;
+    }
+
+    @Override
+    public long estimateSize() {
+        return (long)(limit - index);
+    }
+
+    @Override
+    public int characteristics() {
+        return Buffer.SPLITERATOR_CHARACTERISTICS;
+    }
+}
--- a/jdk/src/share/classes/java/nio/Direct-X-Buffer.java.template	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/nio/Direct-X-Buffer.java.template	Wed Jul 05 18:59:05 2017 +0200
@@ -253,6 +253,12 @@
         return $fromBits$($swap$(unsafe.get$Swaptype$(ix(checkIndex(i)))));
     }
 
+#if[streamableType]
+    $type$ getUnchecked(int i) {
+        return $fromBits$($swap$(unsafe.get$Swaptype$(ix(i))));
+    }
+#end[streamableType]
+
     public $Type$Buffer get($type$[] dst, int offset, int length) {
 #if[rw]
         if ((length << $LG_BYTES_PER_VALUE$) > Bits.JNI_COPY_TO_ARRAY_THRESHOLD) {
--- a/jdk/src/share/classes/java/nio/Heap-X-Buffer.java.template	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/nio/Heap-X-Buffer.java.template	Wed Jul 05 18:59:05 2017 +0200
@@ -139,6 +139,12 @@
         return hb[ix(checkIndex(i))];
     }
 
+#if[streamableType]
+    $type$ getUnchecked(int i) {
+	return hb[ix(i)];
+    }
+#end[streamableType]
+
     public $Type$Buffer get($type$[] dst, int offset, int length) {
         checkBounds(offset, length, dst.length);
         if (length > remaining())
--- a/jdk/src/share/classes/java/nio/StringCharBuffer.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/nio/StringCharBuffer.java	Wed Jul 05 18:59:05 2017 +0200
@@ -77,6 +77,10 @@
         return str.charAt(checkIndex(index) + offset);
     }
 
+    char getUnchecked(int index) {
+        return str.charAt(index + offset);
+    }
+
     // ## Override bulk get methods for better performance
 
     public final CharBuffer put(char c) {
--- a/jdk/src/share/classes/java/nio/X-Buffer.java.template	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/nio/X-Buffer.java.template	Wed Jul 05 18:59:05 2017 +0200
@@ -30,6 +30,11 @@
 #if[char]
 import java.io.IOException;
 #end[char]
+#if[streamableType]
+import java.util.Spliterator;
+import java.util.stream.StreamSupport;
+import java.util.stream.$Streamtype$Stream;
+#end[streamableType]
 
 /**
  * $A$ $type$ buffer.
@@ -589,6 +594,19 @@
      */
     public abstract $type$ get(int index);
 
+#if[streamableType]
+    /**
+     * Absolute <i>get</i> method.  Reads the $type$ at the given
+     * index without any validation of the index.
+     *
+     * @param  index
+     *         The index from which the $type$ will be read
+     *
+     * @return  The $type$ at the given index
+     */
+    abstract $type$ getUnchecked(int index);   // package-private
+#end[streamableType]
+
     /**
      * Absolute <i>put</i> method&nbsp;&nbsp;<i>(optional operation)</i>.
      *
@@ -1458,4 +1476,16 @@
 
 #end[byte]
 
+#if[streamableType]
+
+#if[char]
+    @Override
+#end[char]
+    public $Streamtype$Stream $type$s() {
+        return StreamSupport.$streamtype$Stream(() -> new $Type$BufferSpliterator(this),
+            Buffer.SPLITERATOR_CHARACTERISTICS);
+    }
+
+#end[streamableType]
+
 }
--- a/jdk/src/share/classes/java/security/AccessControlContext.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/security/AccessControlContext.java	Wed Jul 05 18:59:05 2017 +0200
@@ -85,6 +85,15 @@
 
     private DomainCombiner combiner = null;
 
+    // limited privilege scope
+    private Permission permissions[];
+    private AccessControlContext parent;
+    private boolean isWrapped;
+
+    // is constrained by limited privilege scope?
+    private boolean isLimited;
+    private ProtectionDomain limitedContext[];
+
     private static boolean debugInit = false;
     private static Debug debug = null;
 
@@ -178,14 +187,79 @@
 
     /**
      * package private for AccessController
+     *
+     * This "argument wrapper" context will be passed as the actual context
+     * parameter on an internal doPrivileged() call used in the implementation.
      */
-    AccessControlContext(ProtectionDomain context[], DomainCombiner combiner) {
+    AccessControlContext(ProtectionDomain caller, DomainCombiner combiner,
+        AccessControlContext parent, AccessControlContext context,
+        Permission[] perms)
+    {
+        /*
+         * Combine the domains from the doPrivileged() context into our
+         * wrapper context, if necessary.
+         */
+        ProtectionDomain[] callerPDs = null;
+        if (caller != null) {
+             callerPDs = new ProtectionDomain[] { caller };
+        }
         if (context != null) {
-            this.context = context.clone();
+            if (combiner != null) {
+                this.context = combiner.combine(callerPDs, context.context);
+            } else {
+                this.context = combine(callerPDs, context.context);
+            }
+        } else {
+            /*
+             * Call combiner even if there is seemingly nothing to combine.
+             */
+            if (combiner != null) {
+                this.context = combiner.combine(callerPDs, null);
+            } else {
+                this.context = combine(callerPDs, null);
+            }
         }
         this.combiner = combiner;
+
+        Permission[] tmp = null;
+        if (perms != null) {
+            tmp = new Permission[perms.length];
+            for (int i=0; i < perms.length; i++) {
+                if (perms[i] == null) {
+                    throw new NullPointerException("permission can't be null");
+                }
+
+                /*
+                 * An AllPermission argument is equivalent to calling
+                 * doPrivileged() without any limit permissions.
+                 */
+                if (perms[i].getClass() == AllPermission.class) {
+                    parent = null;
+                }
+                tmp[i] = perms[i];
+            }
+        }
+
+        /*
+         * For a doPrivileged() with limited privilege scope, initialize
+         * the relevant fields.
+         *
+         * The limitedContext field contains the union of all domains which
+         * are enclosed by this limited privilege scope. In other words,
+         * it contains all of the domains which could potentially be checked
+         * if none of the limiting permissions implied a requested permission.
+         */
+        if (parent != null) {
+            this.limitedContext = combine(parent.context, parent.limitedContext);
+            this.isLimited = true;
+            this.isWrapped = true;
+            this.permissions = tmp;
+            this.parent = parent;
+            this.privilegedContext = context; // used in checkPermission2()
+        }
     }
 
+
     /**
      * package private constructor for AccessController.getContext()
      */
@@ -260,6 +334,13 @@
         if (sm != null) {
             sm.checkPermission(SecurityConstants.GET_COMBINER_PERMISSION);
         }
+        return getCombiner();
+    }
+
+    /**
+     * package private for AccessController
+     */
+    DomainCombiner getCombiner() {
         return combiner;
     }
 
@@ -335,8 +416,10 @@
            or the first domain was a Privileged system domain. This
            is to make the common case for system code very fast */
 
-        if (context == null)
+        if (context == null) {
+            checkPermission2(perm);
             return;
+        }
 
         for (int i=0; i< context.length; i++) {
             if (context[i] != null &&  !context[i].implies(perm)) {
@@ -370,20 +453,108 @@
             debug.println("access allowed "+perm);
         }
 
-        return;
+        checkPermission2(perm);
+    }
+
+    /*
+     * Check the domains associated with the limited privilege scope.
+     */
+    private void checkPermission2(Permission perm) {
+        if (!isLimited) {
+            return;
+        }
+
+        /*
+         * Check the doPrivileged() context parameter, if present.
+         */
+        if (privilegedContext != null) {
+            privilegedContext.checkPermission2(perm);
+        }
+
+        /*
+         * Ignore the limited permissions and parent fields of a wrapper
+         * context since they were already carried down into the unwrapped
+         * context.
+         */
+        if (isWrapped) {
+            return;
+        }
+
+        /*
+         * Try to match any limited privilege scope.
+         */
+        if (permissions != null) {
+            Class<?> permClass = perm.getClass();
+            for (int i=0; i < permissions.length; i++) {
+                Permission limit = permissions[i];
+                if (limit.getClass().equals(permClass) && limit.implies(perm)) {
+                    return;
+                }
+            }
+        }
+
+        /*
+         * Check the limited privilege scope up the call stack or the inherited
+         * parent thread call stack of this ACC.
+         */
+        if (parent != null) {
+            /*
+             * As an optimization, if the parent context is the inherited call
+             * stack context from a parent thread then checking the protection
+             * domains of the parent context is redundant since they have
+             * already been merged into the child thread's context by
+             * optimize(). When parent is set to an inherited context this
+             * context was not directly created by a limited scope
+             * doPrivileged() and it does not have its own limited permissions.
+             */
+            if (permissions == null) {
+                parent.checkPermission2(perm);
+            } else {
+                parent.checkPermission(perm);
+            }
+        }
     }
 
     /**
      * Take the stack-based context (this) and combine it with the
-     * privileged or inherited context, if need be.
+     * privileged or inherited context, if need be. Any limited
+     * privilege scope is flagged regardless of whether the assigned
+     * context comes from an immediately enclosing limited doPrivileged().
+     * The limited privilege scope can indirectly flow from the inherited
+     * parent thread or an assigned context previously captured by getContext().
      */
     AccessControlContext optimize() {
         // the assigned (privileged or inherited) context
         AccessControlContext acc;
+        DomainCombiner combiner = null;
+        AccessControlContext parent = null;
+        Permission[] permissions = null;
+
         if (isPrivileged) {
             acc = privilegedContext;
+            if (acc != null) {
+                /*
+                 * If the context is from a limited scope doPrivileged() then
+                 * copy the permissions and parent fields out of the wrapper
+                 * context that was created to hold them.
+                 */
+                if (acc.isWrapped) {
+                    permissions = acc.permissions;
+                    parent = acc.parent;
+                }
+            }
         } else {
             acc = AccessController.getInheritedAccessControlContext();
+            if (acc != null) {
+                /*
+                 * If the inherited context is constrained by a limited scope
+                 * doPrivileged() then set it as our parent so we will process
+                 * the non-domain-related state.
+                 */
+                if (acc.isLimited) {
+                    parent = acc;
+                }
+            }
         }
 
         // this.context could be null if only system code is on the stack;
@@ -393,53 +564,98 @@
         // acc.context could be null if only system code was involved;
         // in that case, ignore the assigned context
         boolean skipAssigned = (acc == null || acc.context == null);
+        ProtectionDomain[] assigned = (skipAssigned) ? null : acc.context;
+        ProtectionDomain[] pd;
+
+        // if there is no enclosing limited privilege scope on the stack or
+        // inherited from a parent thread
+        boolean skipLimited = ((acc == null || !acc.isWrapped) && parent == null);
 
         if (acc != null && acc.combiner != null) {
             // let the assigned acc's combiner do its thing
-            return goCombiner(context, acc);
+            if (getDebug() != null) {
+                debug.println("AccessControlContext invoking the Combiner");
+            }
+
+            // No need to clone current and assigned.context
+            // combine() will not update them
+            combiner = acc.combiner;
+            pd = combiner.combine(context, assigned);
+        } else {
+            if (skipStack) {
+                if (skipAssigned) {
+                    calculateFields(acc, parent, permissions);
+                    return this;
+                } else if (skipLimited) {
+                    return acc;
+                }
+            } else if (assigned != null) {
+                if (skipLimited) {
+                    // optimization: if there is a single stack domain and
+                    // that domain is already in the assigned context; no
+                    // need to combine
+                    if (context.length == 1 && context[0] == assigned[0]) {
+                        return acc;
+                    }
+                }
+            }
+
+            pd = combine(context, assigned);
+            if (skipLimited && !skipAssigned && pd == assigned) {
+                return acc;
+            } else if (skipAssigned && pd == context) {
+                calculateFields(acc, parent, permissions);
+                return this;
+            }
         }
 
-        // optimization: if neither have contexts; return acc if possible
-        // rather than this, because acc might have a combiner
-        if (skipAssigned && skipStack) {
-            return this;
-        }
+        // Reuse existing ACC
+        this.context = pd;
+        this.combiner = combiner;
+        this.isPrivileged = false;
+
+        calculateFields(acc, parent, permissions);
+        return this;
+    }
+
 
-        // optimization: if there is no stack context; there is no reason
-        // to compress the assigned context, it already is compressed
-        if (skipStack) {
-            return acc;
-        }
+    /*
+     * Combine the current (stack) and assigned domains.
+     */
+    private static ProtectionDomain[] combine(ProtectionDomain[]current,
+        ProtectionDomain[] assigned) {
 
-        int slen = context.length;
+        // current could be null if only system code is on the stack;
+        // in that case, ignore the stack context
+        boolean skipStack = (current == null);
+
+        // assigned could be null if only system code was involved;
+        // in that case, ignore the assigned context
+        boolean skipAssigned = (assigned == null);
+
+        int slen = (skipStack) ? 0 : current.length;
 
         // optimization: if there is no assigned context and the stack length
         // is less then or equal to two; there is no reason to compress the
         // stack context, it already is
         if (skipAssigned && slen <= 2) {
-            return this;
+            return current;
         }
 
-        // optimization: if there is a single stack domain and that domain
-        // is already in the assigned context; no need to combine
-        if ((slen == 1) && (context[0] == acc.context[0])) {
-            return acc;
-        }
-
-        int n = (skipAssigned) ? 0 : acc.context.length;
+        int n = (skipAssigned) ? 0 : assigned.length;
 
         // now we combine both of them, and create a new context
         ProtectionDomain pd[] = new ProtectionDomain[slen + n];
 
         // first copy in the assigned context domains, no need to compress
         if (!skipAssigned) {
-            System.arraycopy(acc.context, 0, pd, 0, n);
+            System.arraycopy(assigned, 0, pd, 0, n);
         }
 
         // now add the stack context domains, discarding nulls and duplicates
     outer:
-        for (int i = 0; i < context.length; i++) {
-            ProtectionDomain sd = context[i];
+        for (int i = 0; i < slen; i++) {
+            ProtectionDomain sd = current[i];
             if (sd != null) {
                 for (int j = 0; j < n; j++) {
                     if (sd == pd[j]) {
@@ -453,53 +669,47 @@
         // if length isn't equal, we need to shorten the array
         if (n != pd.length) {
             // optimization: if we didn't really combine anything
-            if (!skipAssigned && n == acc.context.length) {
-                return acc;
+            if (!skipAssigned && n == assigned.length) {
+                return assigned;
             } else if (skipAssigned && n == slen) {
-                return this;
+                return current;
             }
             ProtectionDomain tmp[] = new ProtectionDomain[n];
             System.arraycopy(pd, 0, tmp, 0, n);
             pd = tmp;
         }
 
-        //      return new AccessControlContext(pd, false);
-
-        // Reuse existing ACC
-
-        this.context = pd;
-        this.combiner = null;
-        this.isPrivileged = false;
-
-        return this;
+        return pd;
     }
 
-    private AccessControlContext goCombiner(ProtectionDomain[] current,
-                                        AccessControlContext assigned) {
-
-        // the assigned ACC's combiner is not null --
-        // let the combiner do its thing
-
-        // XXX we could add optimizations to 'current' here ...
-
-        if (getDebug() != null) {
-            debug.println("AccessControlContext invoking the Combiner");
-        }
 
-        // No need to clone current and assigned.context
-        // combine() will not update them
-        ProtectionDomain[] combinedPds = assigned.combiner.combine(
-            current, assigned.context);
-
-        // return new AccessControlContext(combinedPds, assigned.combiner);
+    /*
+     * Calculate the additional domains that could potentially be reached via
+     * limited privilege scope. Mark the context as being subject to limited
+     * privilege scope unless the reachable domains (if any) are already
+     * contained in this domain context (in which case any limited
+     * privilege scope checking would be redundant).
+     */
+    private void calculateFields(AccessControlContext assigned,
+        AccessControlContext parent, Permission[] permissions)
+    {
+        ProtectionDomain[] parentLimit = null;
+        ProtectionDomain[] assignedLimit = null;
+        ProtectionDomain[] newLimit;
 
-        // Reuse existing ACC
-        this.context = combinedPds;
-        this.combiner = assigned.combiner;
-        this.isPrivileged = false;
+        parentLimit = (parent != null)? parent.limitedContext: null;
+        assignedLimit = (assigned != null)? assigned.limitedContext: null;
+        newLimit = combine(parentLimit, assignedLimit);
+        if (newLimit != null) {
+            if (context == null || !containsAllPDs(newLimit, context)) {
+                this.limitedContext = newLimit;
+                this.permissions = permissions;
+                this.parent = parent;
+                this.isLimited = true;
+            }
+        }
+    }
 
-        return this;
-    }
 
     /**
      * Checks two AccessControlContext objects for equality.
@@ -520,31 +730,131 @@
 
         AccessControlContext that = (AccessControlContext) obj;
 
+        if (!equalContext(that))
+            return false;
 
-        if (context == null) {
-            return (that.context == null);
-        }
+        if (!equalLimitedContext(that))
+            return false;
+
+        return true;
+    }
 
-        if (that.context == null)
+    /*
+     * Compare for equality based on state that is free of limited
+     * privilege complications.
+     */
+    private boolean equalContext(AccessControlContext that) {
+        if (!equalPDs(this.context, that.context))
             return false;
 
-        if (!(this.containsAllPDs(that) && that.containsAllPDs(this)))
+        if (this.combiner == null && that.combiner != null)
+            return false;
+
+        if (this.combiner != null && !this.combiner.equals(that.combiner))
             return false;
 
-        if (this.combiner == null)
-            return (that.combiner == null);
+        return true;
+    }
 
-        if (that.combiner == null)
+    private boolean equalPDs(ProtectionDomain[] a, ProtectionDomain[] b) {
+        if (a == null) {
+            return (b == null);
+        }
+
+        if (b == null)
             return false;
 
-        if (!this.combiner.equals(that.combiner))
+        if (!(containsAllPDs(a, b) && containsAllPDs(b, a)))
             return false;
 
         return true;
     }
 
-    private boolean containsAllPDs(AccessControlContext that) {
+    /*
+     * Compare for equality based on state that is captured during a
+     * call to AccessController.getContext() when a limited privilege
+     * scope is in effect.
+     */
+    private boolean equalLimitedContext(AccessControlContext that) {
+        if (that == null)
+            return false;
+
+        /*
+         * If neither instance has limited privilege scope then we're done.
+         */
+        if (!this.isLimited && !that.isLimited)
+            return true;
+
+        /*
+         * If only one instance has limited privilege scope then we're done.
+         */
+         if (!(this.isLimited && that.isLimited))
+             return false;
+
+        /*
+         * Wrapped instances should never escape outside the implementation
+         * this class and AccessController so this will probably never happen
+         * but it only makes any sense to compare if they both have the same
+         * isWrapped state.
+         */
+        if ((this.isWrapped && !that.isWrapped) ||
+            (!this.isWrapped && that.isWrapped)) {
+            return false;
+        }
+
+        if (this.permissions == null && that.permissions != null)
+            return false;
+
+        if (this.permissions != null && that.permissions == null)
+            return false;
+
+        if (!(this.containsAllLimits(that) && that.containsAllLimits(this)))
+            return false;
+
+        /*
+         * Skip through any wrapped contexts.
+         */
+        AccessControlContext thisNextPC = getNextPC(this);
+        AccessControlContext thatNextPC = getNextPC(that);
+
+        /*
+         * The protection domains and combiner of a privilegedContext are
+         * not relevant because they have already been included in the context
+         * of this instance by optimize() so we only care about any limited
+         * privilege state they may have.
+         */
+        if (thisNextPC == null && thatNextPC != null && thatNextPC.isLimited)
+            return false;
+
+        if (thisNextPC != null && !thisNextPC.equalLimitedContext(thatNextPC))
+            return false;
+
+        if (this.parent == null && that.parent != null)
+            return false;
+
+        if (this.parent != null && !this.parent.equals(that.parent))
+            return false;
+
+        return true;
+    }
+
+    /*
+     * Follow the privilegedContext link making our best effort to skip
+     * through any wrapper contexts.
+     */
+    private static AccessControlContext getNextPC(AccessControlContext acc) {
+        while (acc != null && acc.privilegedContext != null) {
+            acc = acc.privilegedContext;
+            if (!acc.isWrapped)
+                return acc;
+        }
+        return null;
+    }
+
+    private static boolean containsAllPDs(ProtectionDomain[] thisContext,
+        ProtectionDomain[] thatContext) {
         boolean match = false;
+
         //
         // ProtectionDomains within an ACC currently cannot be null
         // and this is enforced by the constructor and the various
@@ -552,17 +862,17 @@
         // to support the notion of a null PD and therefore this logic continues
         // to support that notion.
         ProtectionDomain thisPd;
-        for (int i = 0; i < context.length; i++) {
+        for (int i = 0; i < thisContext.length; i++) {
             match = false;
-            if ((thisPd = context[i]) == null) {
-                for (int j = 0; (j < that.context.length) && !match; j++) {
-                    match = (that.context[j] == null);
+            if ((thisPd = thisContext[i]) == null) {
+                for (int j = 0; (j < thatContext.length) && !match; j++) {
+                    match = (thatContext[j] == null);
                 }
             } else {
                 Class<?> thisPdClass = thisPd.getClass();
                 ProtectionDomain thatPd;
-                for (int j = 0; (j < that.context.length) && !match; j++) {
-                    thatPd = that.context[j];
+                for (int j = 0; (j < thatContext.length) && !match; j++) {
+                    thatPd = thatContext[j];
 
                     // Class check required to avoid PD exposure (4285406)
                     match = (thatPd != null &&
@@ -573,6 +883,29 @@
         }
         return match;
     }
+
+    private boolean containsAllLimits(AccessControlContext that) {
+        boolean match = false;
+        Permission thisPerm;
+
+        if (this.permissions == null && that.permissions == null)
+            return true;
+
+        for (int i = 0; i < this.permissions.length; i++) {
+            Permission limit = this.permissions[i];
+            Class <?> limitClass = limit.getClass();
+            match = false;
+            for (int j = 0; (j < that.permissions.length) && !match; j++) {
+                Permission perm = that.permissions[j];
+                match = (limitClass.equals(perm.getClass()) &&
+                    limit.equals(perm));
+            }
+            if (!match) return false;
+        }
+        return match;
+    }
+
+
     /**
      * Returns the hash code value for this context. The hash code
      * is computed by exclusive or-ing the hash code of all the protection
@@ -591,6 +924,7 @@
             if (context[i] != null)
                 hashCode ^= context[i].hashCode();
         }
+
         return hashCode;
     }
 }
--- a/jdk/src/share/classes/java/security/AccessController.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/security/AccessController.java	Wed Jul 05 18:59:05 2017 +0200
@@ -82,9 +82,15 @@
  *     else if (caller i is marked as privileged) {
  *         if (a context was specified in the call to doPrivileged)
  *             context.checkPermission(permission)
- *         return;
+ *         if (limited permissions were specified in the call to doPrivileged) {
+ *             for (each limited permission) {
+ *                 if (the limited permission implies the requested permission)
+ *                     return;
+ *             }
+ *         } else
+ *             return;
  *     }
- * };
+ * }
  *
  * // Next, check the context inherited when the thread was created.
  * // Whenever a new thread is created, the AccessControlContext at
@@ -101,11 +107,16 @@
  * was marked as "privileged" via a <code>doPrivileged</code>
  * call without a context argument (see below for information about a
  * context argument). If that caller's domain has the
- * specified permission, no further checking is done and
+ * specified permission and at least one limiting permission argument (if any)
+ * implies the requested permission, no further checking is done and
  * <code>checkPermission</code>
  * returns quietly, indicating that the requested access is allowed.
  * If that domain does not have the specified permission, an exception
- * is thrown, as usual.
+ * is thrown, as usual. If the caller's domain had the specified permission
+ * but it was not implied by any limiting permission arguments given in the call
+ * to <code>doPrivileged</code> then the permission checking continues
+ * until there are no more callers or another <code>doPrivileged</code>
+ * call matches the requested permission and returns normally.
  *
  * <p> The normal use of the "privileged" feature is as follows. If you
  * don't need to return a value from within the "privileged" block, do
@@ -180,6 +191,9 @@
  *
  * <p> Be *very* careful in your use of the "privileged" construct, and
  * always remember to make the privileged code section as small as possible.
+ * You can pass <code>Permission</code> arguments to further limit the
+ * scope of the "privilege" (see below).
+ *
  *
  * <p> Note that <code>checkPermission</code> always performs security checks
  * within the context of the currently executing thread.
@@ -215,7 +229,9 @@
  *
  * <p> There are also times where you don't know a priori which permissions
  * to check the context against. In these cases you can use the
- * doPrivileged method that takes a context:
+ * doPrivileged method that takes a context. You can also limit the scope
+ * of the privileged code by passing additional <code>Permission</code>
+ * parameters.
  *
  *  <pre> {@code
  * somemethod() {
@@ -223,12 +239,21 @@
  *         public Object run() {
  *             // Code goes here. Any permission checks within this
  *             // run method will require that the intersection of the
- *             // callers protection domain and the snapshot's
- *             // context have the desired permission.
+ *             // caller's protection domain and the snapshot's
+ *             // context have the desired permission. If a requested
+ *             // permission is not implied by the limiting FilePermission
+ *             // argument then checking of the thread continues beyond the
+ *             // caller of doPrivileged.
  *         }
- *     }, acc);
+ *     }, acc, new FilePermission("/temp/*", read));
  *     ...normal code here...
  * }}</pre>
+ * <p> Passing a limiting <code>Permission</code> argument of an instance of
+ * <code>AllPermission</code> is equivalent to calling the equivalent
+ * <code>doPrivileged</code> method without limiting <code>Permission</code>
+ * arguments. Passing a zero length array of <code>Permission</code> disables
+ * the code privileges so that checking always continues beyond the caller of
+ * that <code>doPrivileged</code> method.
  *
  * @see AccessControlContext
  *
@@ -334,6 +359,112 @@
     public static native <T> T doPrivileged(PrivilegedAction<T> action,
                                             AccessControlContext context);
 
+
+    /**
+     * Performs the specified <code>PrivilegedAction</code> with privileges
+     * enabled and restricted by the specified
+     * <code>AccessControlContext</code> and with a privilege scope limited
+     * by specified <code>Permission</code> arguments.
+     *
+     * The action is performed with the intersection of the permissions
+     * possessed by the caller's protection domain, and those possessed
+     * by the domains represented by the specified
+     * <code>AccessControlContext</code>.
+     * <p>
+     * If the action's <code>run</code> method throws an (unchecked) exception,
+     * it will propagate through this method.
+     *
+     * @param action the action to be performed.
+     * @param context an <i>access control context</i>
+     *                representing the restriction to be applied to the
+     *                caller's domain's privileges before performing
+     *                the specified action.  If the context is
+     *                <code>null</code>,
+     *                then no additional restriction is applied.
+     * @param perms the <code>Permission</code> arguments which limit the
+     *              scope of the caller's privileges. The number of arguments
+     *              is variable.
+     *
+     * @return the value returned by the action's <code>run</code> method.
+     *
+     * @throws NullPointerException if action or perms or any element of
+     *         perms is <code>null</code>
+     *
+     * @see #doPrivileged(PrivilegedAction)
+     * @see #doPrivileged(PrivilegedExceptionAction,AccessControlContext)
+     *
+     * @since 1.8
+     */
+    @CallerSensitive
+    public static <T> T doPrivileged(PrivilegedAction<T> action,
+        AccessControlContext context, Permission... perms) {
+
+        AccessControlContext parent = getContext();
+        if (perms == null) {
+            throw new NullPointerException("null permissions parameter");
+        }
+        Class <?> caller = Reflection.getCallerClass();
+        return AccessController.doPrivileged(action, createWrapper(null,
+            caller, parent, context, perms));
+    }
+
+
+    /**
+     * Performs the specified <code>PrivilegedAction</code> with privileges
+     * enabled and restricted by the specified
+     * <code>AccessControlContext</code> and with a privilege scope limited
+     * by specified <code>Permission</code> arguments.
+     *
+     * The action is performed with the intersection of the permissions
+     * possessed by the caller's protection domain, and those possessed
+     * by the domains represented by the specified
+     * <code>AccessControlContext</code>.
+     * <p>
+     * If the action's <code>run</code> method throws an (unchecked) exception,
+     * it will propagate through this method.
+     *
+     * <p> This method preserves the current AccessControlContext's
+     * DomainCombiner (which may be null) while the action is performed.
+     *
+     * @param action the action to be performed.
+     * @param context an <i>access control context</i>
+     *                representing the restriction to be applied to the
+     *                caller's domain's privileges before performing
+     *                the specified action.  If the context is
+     *                <code>null</code>,
+     *                then no additional restriction is applied.
+     * @param perms the <code>Permission</code> arguments which limit the
+     *              scope of the caller's privileges. The number of arguments
+     *              is variable.
+     *
+     * @return the value returned by the action's <code>run</code> method.
+     *
+     * @throws NullPointerException if action or perms or any element of
+     *         perms is <code>null</code>
+     *
+     * @see #doPrivileged(PrivilegedAction)
+     * @see #doPrivileged(PrivilegedExceptionAction,AccessControlContext)
+     * @see java.security.DomainCombiner
+     *
+     * @since 1.8
+     */
+    @CallerSensitive
+    public static <T> T doPrivilegedWithCombiner(PrivilegedAction<T> action,
+        AccessControlContext context, Permission... perms) {
+
+        AccessControlContext parent = getContext();
+        DomainCombiner dc = parent.getCombiner();
+        if (dc == null && context != null) {
+            dc = context.getCombiner();
+        }
+        if (perms == null) {
+            throw new NullPointerException("null permissions parameter");
+        }
+        Class <?> caller = Reflection.getCallerClass();
+        return AccessController.doPrivileged(action, createWrapper(dc, caller,
+            parent, context, perms));
+    }
+
     /**
      * Performs the specified <code>PrivilegedExceptionAction</code> with
      * privileges enabled.  The action is performed with <i>all</i> of the
@@ -408,6 +539,22 @@
     private static AccessControlContext preserveCombiner(DomainCombiner combiner,
                                                          Class<?> caller)
     {
+        return createWrapper(combiner, caller, null, null, null);
+    }
+
+    /**
+     * Create a wrapper to contain the limited privilege scope data.
+     */
+    private static AccessControlContext
+        createWrapper(DomainCombiner combiner, Class<?> caller,
+                      AccessControlContext parent, AccessControlContext context,
+                      Permission[] perms)
+    {
+        return new AccessControlContext(getCallerPD(caller), combiner, parent,
+                                        context, perms);
+    }
+
+    private static ProtectionDomain getCallerPD(final Class <?> caller) {
         ProtectionDomain callerPd = doPrivileged
             (new PrivilegedAction<ProtectionDomain>() {
             public ProtectionDomain run() {
@@ -415,18 +562,9 @@
             }
         });
 
-        // perform 'combine' on the caller of doPrivileged,
-        // even if the caller is from the bootclasspath
-        ProtectionDomain[] pds = new ProtectionDomain[] {callerPd};
-        if (combiner == null) {
-            return new AccessControlContext(pds);
-        } else {
-            return new AccessControlContext(combiner.combine(pds, null),
-                                            combiner);
-        }
+        return callerPd;
     }
 
-
     /**
      * Performs the specified <code>PrivilegedExceptionAction</code> with
      * privileges enabled and restricted by the specified
@@ -454,7 +592,7 @@
      * @exception NullPointerException if the action is <code>null</code>
      *
      * @see #doPrivileged(PrivilegedAction)
-     * @see #doPrivileged(PrivilegedExceptionAction,AccessControlContext)
+     * @see #doPrivileged(PrivilegedAction,AccessControlContext)
      */
     @CallerSensitive
     public static native <T> T
@@ -462,6 +600,118 @@
                      AccessControlContext context)
         throws PrivilegedActionException;
 
+
+    /**
+     * Performs the specified <code>PrivilegedExceptionAction</code> with
+     * privileges enabled and restricted by the specified
+     * <code>AccessControlContext</code> and with a privilege scope limited by
+     * specified <code>Permission</code> arguments.
+     *
+     * The action is performed with the intersection of the permissions
+     * possessed by the caller's protection domain, and those possessed
+     * by the domains represented by the specified
+     * <code>AccessControlContext</code>.
+     * <p>
+     * If the action's <code>run</code> method throws an (unchecked) exception,
+     * it will propagate through this method.
+     *
+     * @param action the action to be performed.
+     * @param context an <i>access control context</i>
+     *                representing the restriction to be applied to the
+     *                caller's domain's privileges before performing
+     *                the specified action.  If the context is
+     *                <code>null</code>,
+     *                then no additional restriction is applied.
+     * @param perms the <code>Permission</code> arguments which limit the
+     *              scope of the caller's privileges. The number of arguments
+     *              is variable.
+     *
+     * @return the value returned by the action's <code>run</code> method.
+     *
+     * @throws PrivilegedActionException if the specified action's
+     *         <code>run</code> method threw a <i>checked</i> exception
+     * @throws NullPointerException if action or perms or any element of
+     *         perms is <code>null</code>
+     *
+     * @see #doPrivileged(PrivilegedAction)
+     * @see #doPrivileged(PrivilegedAction,AccessControlContext)
+     *
+     * @since 1.8
+     */
+    @CallerSensitive
+    public static <T> T doPrivileged(PrivilegedExceptionAction<T> action,
+                                     AccessControlContext context, Permission... perms)
+        throws PrivilegedActionException
+    {
+        AccessControlContext parent = getContext();
+        if (perms == null) {
+            throw new NullPointerException("null permissions parameter");
+        }
+        Class <?> caller = Reflection.getCallerClass();
+        return AccessController.doPrivileged(action, createWrapper(null, caller, parent, context, perms));
+    }
+
+
+    /**
+     * Performs the specified <code>PrivilegedExceptionAction</code> with
+     * privileges enabled and restricted by the specified
+     * <code>AccessControlContext</code> and with a privilege scope limited by
+     * specified <code>Permission</code> arguments.
+     *
+     * The action is performed with the intersection of the permissions
+     * possessed by the caller's protection domain, and those possessed
+     * by the domains represented by the specified
+     * <code>AccessControlContext</code>.
+     * <p>
+     * If the action's <code>run</code> method throws an (unchecked) exception,
+     * it will propagate through this method.
+     *
+     * <p> This method preserves the current AccessControlContext's
+     * DomainCombiner (which may be null) while the action is performed.
+     *
+     * @param action the action to be performed.
+     * @param context an <i>access control context</i>
+     *                representing the restriction to be applied to the
+     *                caller's domain's privileges before performing
+     *                the specified action.  If the context is
+     *                <code>null</code>,
+     *                then no additional restriction is applied.
+     * @param perms the <code>Permission</code> arguments which limit the
+     *              scope of the caller's privileges. The number of arguments
+     *              is variable.
+     *
+     * @return the value returned by the action's <code>run</code> method.
+     *
+     * @throws PrivilegedActionException if the specified action's
+     *         <code>run</code> method threw a <i>checked</i> exception
+     * @throws NullPointerException if action or perms or any element of
+     *         perms is <code>null</code>
+     *
+     * @see #doPrivileged(PrivilegedAction)
+     * @see #doPrivileged(PrivilegedAction,AccessControlContext)
+     * @see java.security.DomainCombiner
+     *
+     * @since 1.8
+     */
+    @CallerSensitive
+    public static <T> T doPrivilegedWithCombiner(PrivilegedExceptionAction<T> action,
+                                                 AccessControlContext context,
+                                                 Permission... perms)
+        throws PrivilegedActionException
+    {
+        AccessControlContext parent = getContext();
+        DomainCombiner dc = parent.getCombiner();
+        if (dc == null && context != null) {
+            dc = context.getCombiner();
+        }
+        if (perms == null) {
+            throw new NullPointerException("null permissions parameter");
+        }
+        Class <?> caller = Reflection.getCallerClass();
+        return AccessController.doPrivileged(action, createWrapper(dc, caller,
+            parent, context, perms));
+    }
+
     /**
      * Returns the AccessControl context. i.e., it gets
      * the protection domains of all the callers on the stack,
@@ -474,6 +724,7 @@
 
     private static native AccessControlContext getStackAccessControlContext();
 
+
     /**
      * Returns the "inherited" AccessControl context. This is the context
      * that existed when the thread was created. Package private so
@@ -484,9 +735,9 @@
 
     /**
      * This method takes a "snapshot" of the current calling context, which
-     * includes the current Thread's inherited AccessControlContext,
-     * and places it in an AccessControlContext object. This context may then
-     * be checked at a later point, possibly in another thread.
+     * includes the current Thread's inherited AccessControlContext and any
+     * limited privilege scope, and places it in an AccessControlContext object.
+     * This context may then be checked at a later point, possibly in another thread.
      *
      * @see AccessControlContext
      *
@@ -524,7 +775,7 @@
      */
 
     public static void checkPermission(Permission perm)
-                 throws AccessControlException
+        throws AccessControlException
     {
         //System.err.println("checkPermission "+perm);
         //Thread.currentThread().dumpStack();
--- a/jdk/src/share/classes/java/security/DigestOutputStream.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/security/DigestOutputStream.java	Wed Jul 05 18:59:05 2017 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 1996, 1999, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 1996, 2013, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -112,10 +112,10 @@
      * @see MessageDigest#update(byte)
      */
     public void write(int b) throws IOException {
+        out.write(b);
         if (on) {
             digest.update((byte)b);
         }
-        out.write(b);
     }
 
     /**
@@ -142,10 +142,10 @@
      * @see MessageDigest#update(byte[], int, int)
      */
     public void write(byte[] b, int off, int len) throws IOException {
+        out.write(b, off, len);
         if (on) {
             digest.update(b, off, len);
         }
-        out.write(b, off, len);
     }
 
     /**
--- a/jdk/src/share/classes/java/util/HashMap.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/util/HashMap.java	Wed Jul 05 18:59:05 2017 +0200
@@ -26,6 +26,8 @@
 package java.util;
 
 import java.io.*;
+import java.lang.reflect.ParameterizedType;
+import java.lang.reflect.Type;
 import java.util.function.Consumer;
 import java.util.function.BiFunction;
 import java.util.function.Function;
@@ -126,7 +128,7 @@
  */
 
 public class HashMap<K,V>
-    extends AbstractMap<K,V>
+        extends AbstractMap<K,V>
     implements Map<K,V>, Cloneable, Serializable
 {
 
@@ -150,12 +152,12 @@
     /**
      * An empty table instance to share when the table is not inflated.
      */
-    static final Entry<?,?>[] EMPTY_TABLE = {};
+    static final Object[] EMPTY_TABLE = {};
 
     /**
      * The table, resized as necessary. Length MUST Always be a power of two.
      */
-    transient Entry<?,?>[] table = EMPTY_TABLE;
+    transient Object[] table = EMPTY_TABLE;
 
     /**
      * The number of key-value mappings contained in this map.
@@ -186,10 +188,10 @@
      */
     transient int modCount;
 
+    /**
+     * Holds values which can't be initialized until after VM is booted.
+     */
     private static class Holder {
-         /**
-         *
-         */
         static final sun.misc.Unsafe UNSAFE;
 
         /**
@@ -198,22 +200,616 @@
          */
         static final long HASHSEED_OFFSET;
 
+        static final boolean USE_HASHSEED;
+
         static {
-            try {
-                UNSAFE = sun.misc.Unsafe.getUnsafe();
-                HASHSEED_OFFSET = UNSAFE.objectFieldOffset(
-                    HashMap.class.getDeclaredField("hashSeed"));
-            } catch (NoSuchFieldException | SecurityException e) {
-                throw new InternalError("Failed to record hashSeed offset", e);
+            String hashSeedProp = java.security.AccessController.doPrivileged(
+                    new sun.security.action.GetPropertyAction(
+                        "jdk.map.useRandomSeed"));
+            boolean localBool = (null != hashSeedProp)
+                    ? Boolean.parseBoolean(hashSeedProp) : false;
+            USE_HASHSEED = localBool;
+
+            if (USE_HASHSEED) {
+                try {
+                    UNSAFE = sun.misc.Unsafe.getUnsafe();
+                    HASHSEED_OFFSET = UNSAFE.objectFieldOffset(
+                        HashMap.class.getDeclaredField("hashSeed"));
+                } catch (NoSuchFieldException | SecurityException e) {
+                    throw new InternalError("Failed to record hashSeed offset", e);
+                }
+            } else {
+                UNSAFE = null;
+                HASHSEED_OFFSET = 0;
             }
         }
     }
 
-    /**
+    /*
      * A randomizing value associated with this instance that is applied to
      * hash code of keys to make hash collisions harder to find.
+     *
+     * Non-final so it can be set lazily, but be sure not to set more than once.
      */
-    transient final int hashSeed = sun.misc.Hashing.randomHashSeed(this);
+    transient final int hashSeed;
+
+    /*
+     * TreeBin/TreeNode code from CHM doesn't handle the null key.  Store the
+     * null key entry here.
+     */
+    transient Entry<K,V> nullKeyEntry = null;
+
+    /*
+     * In order to improve performance under high hash-collision conditions,
+     * HashMap will switch to storing a bin's entries in a balanced tree
+     * (TreeBin) instead of a linked-list once the number of entries in the bin
+     * passes a certain threshold (TreeBin.TREE_THRESHOLD), if at least one of
+     * the keys in the bin implements Comparable.  This technique is borrowed
+     * from ConcurrentHashMap.
+     */
+
+    /*
+     * Code based on CHMv8
+     *
+     * Node type for TreeBin
+     */
+    final static class TreeNode<K,V> {
+        TreeNode parent;  // red-black tree links
+        TreeNode left;
+        TreeNode right;
+        TreeNode prev;    // needed to unlink next upon deletion
+        boolean red;
+        final HashMap.Entry<K,V> entry;
+
+        TreeNode(HashMap.Entry<K,V> entry, Object next, TreeNode parent) {
+            this.entry = entry;
+            this.entry.next = next;
+            this.parent = parent;
+        }
+    }
+
+    /**
+     * Returns a Class for the given object of the form "class C
+     * implements Comparable<C>", if one exists, else null.  See the TreeBin
+     * docs, below, for explanation.
+     */
+    static Class<?> comparableClassFor(Object x) {
+        Class<?> c, s, cmpc; Type[] ts, as; Type t; ParameterizedType p;
+        if ((c = x.getClass()) == String.class) // bypass checks
+            return c;
+        if ((cmpc = Comparable.class).isAssignableFrom(c)) {
+            while (cmpc.isAssignableFrom(s = c.getSuperclass()))
+                c = s; // find topmost comparable class
+            if ((ts  = c.getGenericInterfaces()) != null) {
+                for (int i = 0; i < ts.length; ++i) {
+                    if (((t = ts[i]) instanceof ParameterizedType) &&
+                        ((p = (ParameterizedType)t).getRawType() == cmpc) &&
+                        (as = p.getActualTypeArguments()) != null &&
+                        as.length == 1 && as[0] == c) // type arg is c
+                        return c;
+                }
+            }
+        }
+        return null;
+    }
+
+    /*
+     * Code based on CHMv8
+     *
+     * A specialized form of red-black tree for use in bins
+     * whose size exceeds a threshold.
+     *
+     * TreeBins use a special form of comparison for search and
+     * related operations (which is the main reason we cannot use
+     * existing collections such as TreeMaps). TreeBins contain
+     * Comparable elements, but may contain others, as well as
+     * elements that are Comparable but not necessarily Comparable<T>
+     * for the same T, so we cannot invoke compareTo among them. To
+     * handle this, the tree is ordered primarily by hash value, then
+     * by Comparable.compareTo order if applicable.  On lookup at a
+     * node, if elements are not comparable or compare as 0 then both
+     * left and right children may need to be searched in the case of
+     * tied hash values. (This corresponds to the full list search
+     * that would be necessary if all elements were non-Comparable and
+     * had tied hashes.)  The red-black balancing code is updated from
+     * pre-jdk-collections
+     * (http://gee.cs.oswego.edu/dl/classes/collections/RBCell.java)
+     * based in turn on Cormen, Leiserson, and Rivest "Introduction to
+     * Algorithms" (CLR).
+     */
+    final class TreeBin {
+        /*
+         * The bin count threshold for using a tree rather than list for a bin. The
+         * value reflects the approximate break-even point for using tree-based
+         * operations.
+         */
+        static final int TREE_THRESHOLD = 16;
+
+        TreeNode<K,V> root;  // root of tree
+        TreeNode<K,V> first; // head of next-pointer list
+
+        /*
+         * Split a TreeBin into lo and hi parts and install in given table.
+         *
+         * Existing Entrys are re-used, which maintains the before/after links for
+         * LinkedHashMap.Entry.
+         *
+         * No check for Comparable, though this is the same as CHM.
+         */
+        final void splitTreeBin(Object[] newTable, int i, TreeBin loTree, TreeBin hiTree) {
+            TreeBin oldTree = this;
+            int bit = newTable.length >>> 1;
+            int loCount = 0, hiCount = 0;
+            TreeNode<K,V> e = oldTree.first;
+            TreeNode<K,V> next;
+
+            // This method is called when the table has just increased capacity,
+            // so indexFor() is now taking one additional bit of hash into
+            // account ("bit").  Entries in this TreeBin now belong in one of
+            // two bins, "i" or "i+bit", depending on if the new top bit of the
+            // hash is set.  The trees for the two bins are loTree and hiTree.
+            // If either tree ends up containing fewer than TREE_THRESHOLD
+            // entries, it is converted back to a linked list.
+            while (e != null) {
+                // Save entry.next - it will get overwritten in putTreeNode()
+                next = (TreeNode<K,V>)e.entry.next;
+
+                int h = e.entry.hash;
+                K k = (K) e.entry.key;
+                V v = e.entry.value;
+                if ((h & bit) == 0) {
+                    ++loCount;
+                    // Re-using e.entry
+                    loTree.putTreeNode(h, k, v, e.entry);
+                } else {
+                    ++hiCount;
+                    hiTree.putTreeNode(h, k, v, e.entry);
+                }
+                // Iterate using the saved 'next'
+                e = next;
+            }
+            if (loCount < TREE_THRESHOLD) { // too small, convert back to list
+                HashMap.Entry loEntry = null;
+                TreeNode<K,V> p = loTree.first;
+                while (p != null) {
+                    @SuppressWarnings("unchecked")
+                    TreeNode<K,V> savedNext = (TreeNode<K,V>) p.entry.next;
+                    p.entry.next = loEntry;
+                    loEntry = p.entry;
+                    p = savedNext;
+                }
+                // assert newTable[i] == null;
+                newTable[i] = loEntry;
+            } else {
+                // assert newTable[i] == null;
+                newTable[i] = loTree;
+            }
+            if (hiCount < TREE_THRESHOLD) { // too small, convert back to list
+                HashMap.Entry hiEntry = null;
+                TreeNode<K,V> p = hiTree.first;
+                while (p != null) {
+                    @SuppressWarnings("unchecked")
+                    TreeNode<K,V> savedNext = (TreeNode<K,V>) p.entry.next;
+                    p.entry.next = hiEntry;
+                    hiEntry = p.entry;
+                    p = savedNext;
+                }
+                // assert newTable[i + bit] == null;
+                newTable[i + bit] = hiEntry;
+            } else {
+                // assert newTable[i + bit] == null;
+                newTable[i + bit] = hiTree;
+            }
+        }
+
+        /*
+         * Popuplate the TreeBin with entries from the linked list e
+         *
+         * Assumes 'this' is a new/empty TreeBin
+         *
+         * Note: no check for Comparable
+         * Note: I believe this changes iteration order
+         */
+        @SuppressWarnings("unchecked")
+        void populate(HashMap.Entry e) {
+            // assert root == null;
+            // assert first == null;
+            HashMap.Entry next;
+            while (e != null) {
+                // Save entry.next - it will get overwritten in putTreeNode()
+                next = (HashMap.Entry)e.next;
+                // Re-using Entry e will maintain before/after in LinkedHM
+                putTreeNode(e.hash, (K)e.key, (V)e.value, e);
+                // Iterate using the saved 'next'
+                e = next;
+            }
+        }
+
+        /**
+         * Copied from CHMv8
+         * From CLR
+         */
+        private void rotateLeft(TreeNode p) {
+            if (p != null) {
+                TreeNode r = p.right, pp, rl;
+                if ((rl = p.right = r.left) != null) {
+                    rl.parent = p;
+                }
+                if ((pp = r.parent = p.parent) == null) {
+                    root = r;
+                } else if (pp.left == p) {
+                    pp.left = r;
+                } else {
+                    pp.right = r;
+                }
+                r.left = p;
+                p.parent = r;
+            }
+        }
+
+        /**
+         * Copied from CHMv8
+         * From CLR
+         */
+        private void rotateRight(TreeNode p) {
+            if (p != null) {
+                TreeNode l = p.left, pp, lr;
+                if ((lr = p.left = l.right) != null) {
+                    lr.parent = p;
+                }
+                if ((pp = l.parent = p.parent) == null) {
+                    root = l;
+                } else if (pp.right == p) {
+                    pp.right = l;
+                } else {
+                    pp.left = l;
+                }
+                l.right = p;
+                p.parent = l;
+            }
+        }
+
+        /**
+         * Returns the TreeNode (or null if not found) for the given
+         * key.  A front-end for recursive version.
+         */
+        final TreeNode getTreeNode(int h, K k) {
+            return getTreeNode(h, k, root, comparableClassFor(k));
+        }
+
+        /**
+         * Returns the TreeNode (or null if not found) for the given key
+         * starting at given root.
+         */
+        @SuppressWarnings("unchecked")
+        final TreeNode getTreeNode (int h, K k, TreeNode p, Class<?> cc) {
+            // assert k != null;
+            while (p != null) {
+                int dir, ph;  Object pk;
+                if ((ph = p.entry.hash) != h)
+                    dir = (h < ph) ? -1 : 1;
+                else if ((pk = p.entry.key) == k || k.equals(pk))
+                    return p;
+                else if (cc == null || comparableClassFor(pk) != cc ||
+                         (dir = ((Comparable<Object>)k).compareTo(pk)) == 0) {
+                    // assert pk != null;
+                    TreeNode r, pl, pr; // check both sides
+                    if ((pr = p.right) != null &&
+                        (r = getTreeNode(h, k, pr, cc)) != null)
+                        return r;
+                    else if ((pl = p.left) != null)
+                        dir = -1;
+                    else // nothing there
+                        break;
+                }
+                p = (dir > 0) ? p.right : p.left;
+            }
+            return null;
+        }
+
+        /*
+         * Finds or adds a node.
+         *
+         * 'entry' should be used to recycle an existing Entry (e.g. in the case
+         * of converting a linked-list bin to a TreeBin).
+         * If entry is null, a new Entry will be created for the new TreeNode
+         *
+         * @return the TreeNode containing the mapping, or null if a new
+         * TreeNode was added
+         */
+        @SuppressWarnings("unchecked")
+        TreeNode putTreeNode(int h, K k, V v, HashMap.Entry<K,V> entry) {
+            // assert k != null;
+            //if (entry != null) {
+                // assert h == entry.hash;
+                // assert k == entry.key;
+                // assert v == entry.value;
+            // }
+            Class<?> cc = comparableClassFor(k);
+            TreeNode pp = root, p = null;
+            int dir = 0;
+            while (pp != null) { // find existing node or leaf to insert at
+                int ph;  Object pk;
+                p = pp;
+                if ((ph = p.entry.hash) != h)
+                    dir = (h < ph) ? -1 : 1;
+                else if ((pk = p.entry.key) == k || k.equals(pk))
+                    return p;
+                else if (cc == null || comparableClassFor(pk) != cc ||
+                         (dir = ((Comparable<Object>)k).compareTo(pk)) == 0) {
+                    TreeNode r, pr;
+                    if ((pr = p.right) != null &&
+                        (r = getTreeNode(h, k, pr, cc)) != null)
+                        return r;
+                    else // continue left
+                        dir = -1;
+                }
+                pp = (dir > 0) ? p.right : p.left;
+            }
+
+            // Didn't find the mapping in the tree, so add it
+            TreeNode f = first;
+            TreeNode x;
+            if (entry != null) {
+                x = new TreeNode(entry, f, p);
+            } else {
+                x = new TreeNode(newEntry(h, k, v, null), f, p);
+            }
+            first = x;
+
+            if (p == null) {
+                root = x;
+            } else { // attach and rebalance; adapted from CLR
+                TreeNode xp, xpp;
+                if (f != null) {
+                    f.prev = x;
+                }
+                if (dir <= 0) {
+                    p.left = x;
+                } else {
+                    p.right = x;
+                }
+                x.red = true;
+                while (x != null && (xp = x.parent) != null && xp.red
+                        && (xpp = xp.parent) != null) {
+                    TreeNode xppl = xpp.left;
+                    if (xp == xppl) {
+                        TreeNode y = xpp.right;
+                        if (y != null && y.red) {
+                            y.red = false;
+                            xp.red = false;
+                            xpp.red = true;
+                            x = xpp;
+                        } else {
+                            if (x == xp.right) {
+                                rotateLeft(x = xp);
+                                xpp = (xp = x.parent) == null ? null : xp.parent;
+                            }
+                            if (xp != null) {
+                                xp.red = false;
+                                if (xpp != null) {
+                                    xpp.red = true;
+                                    rotateRight(xpp);
+                                }
+                            }
+                        }
+                    } else {
+                        TreeNode y = xppl;
+                        if (y != null && y.red) {
+                            y.red = false;
+                            xp.red = false;
+                            xpp.red = true;
+                            x = xpp;
+                        } else {
+                            if (x == xp.left) {
+                                rotateRight(x = xp);
+                                xpp = (xp = x.parent) == null ? null : xp.parent;
+                            }
+                            if (xp != null) {
+                                xp.red = false;
+                                if (xpp != null) {
+                                    xpp.red = true;
+                                    rotateLeft(xpp);
+                                }
+                            }
+                        }
+                    }
+                }
+                TreeNode r = root;
+                if (r != null && r.red) {
+                    r.red = false;
+                }
+            }
+            return null;
+        }
+
+        /*
+         * From CHMv8
+         *
+         * Removes the given node, that must be present before this
+         * call.  This is messier than typical red-black deletion code
+         * because we cannot swap the contents of an interior node
+         * with a leaf successor that is pinned by "next" pointers
+         * that are accessible independently of lock. So instead we
+         * swap the tree linkages.
+         */
+        final void deleteTreeNode(TreeNode p) {
+            TreeNode next = (TreeNode) p.entry.next; // unlink traversal pointers
+            TreeNode pred = p.prev;
+            if (pred == null) {
+                first = next;
+            } else {
+                pred.entry.next = next;
+            }
+            if (next != null) {
+                next.prev = pred;
+            }
+            TreeNode replacement;
+            TreeNode pl = p.left;
+            TreeNode pr = p.right;
+            if (pl != null && pr != null) {
+                TreeNode s = pr, sl;
+                while ((sl = s.left) != null) // find successor
+                {
+                    s = sl;
+                }
+                boolean c = s.red;
+                s.red = p.red;
+                p.red = c; // swap colors
+                TreeNode sr = s.right;
+                TreeNode pp = p.parent;
+                if (s == pr) { // p was s's direct parent
+                    p.parent = s;
+                    s.right = p;
+                } else {
+                    TreeNode sp = s.parent;
+                    if ((p.parent = sp) != null) {
+                        if (s == sp.left) {
+                            sp.left = p;
+                        } else {
+                            sp.right = p;
+                        }
+                    }
+                    if ((s.right = pr) != null) {
+                        pr.parent = s;
+                    }
+                }
+                p.left = null;
+                if ((p.right = sr) != null) {
+                    sr.parent = p;
+                }
+                if ((s.left = pl) != null) {
+                    pl.parent = s;
+                }
+                if ((s.parent = pp) == null) {
+                    root = s;
+                } else if (p == pp.left) {
+                    pp.left = s;
+                } else {
+                    pp.right = s;
+                }
+                replacement = sr;
+            } else {
+                replacement = (pl != null) ? pl : pr;
+            }
+            TreeNode pp = p.parent;
+            if (replacement == null) {
+                if (pp == null) {
+                    root = null;
+                    return;
+                }
+                replacement = p;
+            } else {
+                replacement.parent = pp;
+                if (pp == null) {
+                    root = replacement;
+                } else if (p == pp.left) {
+                    pp.left = replacement;
+                } else {
+                    pp.right = replacement;
+                }
+                p.left = p.right = p.parent = null;
+            }
+            if (!p.red) { // rebalance, from CLR
+                TreeNode x = replacement;
+                while (x != null) {
+                    TreeNode xp, xpl;
+                    if (x.red || (xp = x.parent) == null) {
+                        x.red = false;
+                        break;
+                    }
+                    if (x == (xpl = xp.left)) {
+                        TreeNode sib = xp.right;
+                        if (sib != null && sib.red) {
+                            sib.red = false;
+                            xp.red = true;
+                            rotateLeft(xp);
+                            sib = (xp = x.parent) == null ? null : xp.right;
+                        }
+                        if (sib == null) {
+                            x = xp;
+                        } else {
+                            TreeNode sl = sib.left, sr = sib.right;
+                            if ((sr == null || !sr.red)
+                                    && (sl == null || !sl.red)) {
+                                sib.red = true;
+                                x = xp;
+                            } else {
+                                if (sr == null || !sr.red) {
+                                    if (sl != null) {
+                                        sl.red = false;
+                                    }
+                                    sib.red = true;
+                                    rotateRight(sib);
+                                    sib = (xp = x.parent) == null ?
+                                        null : xp.right;
+                                }
+                                if (sib != null) {
+                                    sib.red = (xp == null) ? false : xp.red;
+                                    if ((sr = sib.right) != null) {
+                                        sr.red = false;
+                                    }
+                                }
+                                if (xp != null) {
+                                    xp.red = false;
+                                    rotateLeft(xp);
+                                }
+                                x = root;
+                            }
+                        }
+                    } else { // symmetric
+                        TreeNode sib = xpl;
+                        if (sib != null && sib.red) {
+                            sib.red = false;
+                            xp.red = true;
+                            rotateRight(xp);
+                            sib = (xp = x.parent) == null ? null : xp.left;
+                        }
+                        if (sib == null) {
+                            x = xp;
+                        } else {
+                            TreeNode sl = sib.left, sr = sib.right;
+                            if ((sl == null || !sl.red)
+                                    && (sr == null || !sr.red)) {
+                                sib.red = true;
+                                x = xp;
+                            } else {
+                                if (sl == null || !sl.red) {
+                                    if (sr != null) {
+                                        sr.red = false;
+                                    }
+                                    sib.red = true;
+                                    rotateLeft(sib);
+                                    sib = (xp = x.parent) == null ?
+                                        null : xp.left;
+                                }
+                                if (sib != null) {
+                                    sib.red = (xp == null) ? false : xp.red;
+                                    if ((sl = sib.left) != null) {
+                                        sl.red = false;
+                                    }
+                                }
+                                if (xp != null) {
+                                    xp.red = false;
+                                    rotateRight(xp);
+                                }
+                                x = root;
+                            }
+                        }
+                    }
+                }
+            }
+            if (p == replacement && (pp = p.parent) != null) {
+                if (p == pp.left) // detach pointers
+                {
+                    pp.left = null;
+                } else if (p == pp.right) {
+                    pp.right = null;
+                }
+                p.parent = null;
+            }
+        }
+    }
 
     /**
      * Constructs an empty <tt>HashMap</tt> with the specified initial
@@ -233,9 +829,9 @@
         if (loadFactor <= 0 || Float.isNaN(loadFactor))
             throw new IllegalArgumentException("Illegal load factor: " +
                                                loadFactor);
-
         this.loadFactor = loadFactor;
         threshold = initialCapacity;
+        hashSeed = initHashSeed();
         init();
     }
 
@@ -269,10 +865,11 @@
      */
     public HashMap(Map<? extends K, ? extends V> m) {
         this(Math.max((int) (m.size() / DEFAULT_LOAD_FACTOR) + 1,
-                      DEFAULT_INITIAL_CAPACITY), DEFAULT_LOAD_FACTOR);
+                DEFAULT_INITIAL_CAPACITY), DEFAULT_LOAD_FACTOR);
         inflateTable(threshold);
 
         putAllForCreate(m);
+        // assert size == m.size();
     }
 
     private static int roundUpToPowerOf2(int number) {
@@ -294,7 +891,7 @@
         int capacity = roundUpToPowerOf2(toSize);
 
         threshold = (int) Math.min(capacity * loadFactor, MAXIMUM_CAPACITY + 1);
-        table = new Entry[capacity];
+        table = new Object[capacity];
     }
 
     // internal utilities
@@ -310,17 +907,24 @@
     }
 
     /**
+     * Return an initial value for the hashSeed, or 0 if the random seed is not
+     * enabled.
+     */
+    final int initHashSeed() {
+        if (sun.misc.VM.isBooted() && Holder.USE_HASHSEED) {
+            return sun.misc.Hashing.randomHashSeed(this);
+        }
+        return 0;
+    }
+
+    /**
      * Retrieve object hash code and applies a supplemental hash function to the
-     * result hash, which defends against poor quality hash functions.  This is
+     * result hash, which defends against poor quality hash functions. This is
      * critical because HashMap uses power-of-two length hash tables, that
      * otherwise encounter collisions for hashCodes that do not differ
      * in lower bits.
      */
     final int hash(Object k) {
-        if (k instanceof String) {
-            return ((String) k).hash32();
-        }
-
         int  h = hashSeed ^ k.hashCode();
 
         // This function ensures that hashCodes that differ only by
@@ -409,19 +1013,35 @@
         if (isEmpty()) {
             return null;
         }
+        if (key == null) {
+            return nullKeyEntry;
+        }
+        int hash = hash(key);
+        int bin = indexFor(hash, table.length);
 
-        int hash = (key == null) ? 0 : hash(key);
-        for (Entry<?,?> e = table[indexFor(hash, table.length)];
-             e != null;
-             e = e.next) {
-            Object k;
-            if (e.hash == hash &&
-                ((k = e.key) == key || (key != null && key.equals(k))))
-                return (Entry<K,V>)e;
+        if (table[bin] instanceof Entry) {
+            Entry<K,V> e = (Entry<K,V>) table[bin];
+            for (; e != null; e = (Entry<K,V>)e.next) {
+                Object k;
+                if (e.hash == hash &&
+                    ((k = e.key) == key || key.equals(k))) {
+                    return e;
+                }
+            }
+        } else if (table[bin] != null) {
+            TreeBin e = (TreeBin)table[bin];
+            TreeNode p = e.getTreeNode(hash, (K)key);
+            if (p != null) {
+                // assert p.entry.hash == hash && p.entry.key.equals(key);
+                return (Entry<K,V>)p.entry;
+            } else {
+                return null;
+            }
         }
         return null;
     }
 
+
     /**
      * Associates the specified value with the specified key in this map.
      * If the map previously contained a mapping for the key, the old
@@ -434,28 +1054,57 @@
      *         (A <tt>null</tt> return can also indicate that the map
      *         previously associated <tt>null</tt> with <tt>key</tt>.)
      */
+    @SuppressWarnings("unchecked")
     public V put(K key, V value) {
         if (table == EMPTY_TABLE) {
             inflateTable(threshold);
         }
-        if (key == null)
+       if (key == null)
             return putForNullKey(value);
         int hash = hash(key);
         int i = indexFor(hash, table.length);
-        @SuppressWarnings("unchecked")
-        Entry<K,V> e = (Entry<K,V>)table[i];
-        for(; e != null; e = e.next) {
-            Object k;
-            if (e.hash == hash && ((k = e.key) == key || key.equals(k))) {
-                V oldValue = e.value;
-                e.value = value;
-                e.recordAccess(this);
-                return oldValue;
+        boolean checkIfNeedTree = false; // Might we convert bin to a TreeBin?
+
+        if (table[i] instanceof Entry) {
+            // Bin contains ordinary Entries.  Search for key in the linked list
+            // of entries, counting the number of entries.  Only check for
+            // TreeBin conversion if the list size is >= TREE_THRESHOLD.
+            // (The conversion still may not happen if the table gets resized.)
+            int listSize = 0;
+            Entry<K,V> e = (Entry<K,V>) table[i];
+            for (; e != null; e = (Entry<K,V>)e.next) {
+                Object k;
+                if (e.hash == hash && ((k = e.key) == key || key.equals(k))) {
+                    V oldValue = e.value;
+                    e.value = value;
+                    e.recordAccess(this);
+                    return oldValue;
+                }
+                listSize++;
+            }
+            // Didn't find, so fall through and call addEntry() to add the
+            // Entry and check for TreeBin conversion.
+            checkIfNeedTree = listSize >= TreeBin.TREE_THRESHOLD;
+        } else if (table[i] != null) {
+            TreeBin e = (TreeBin)table[i];
+            TreeNode p = e.putTreeNode(hash, key, value, null);
+            if (p == null) { // putTreeNode() added a new node
+                modCount++;
+                size++;
+                if (size >= threshold) {
+                    resize(2 * table.length);
+                }
+                return null;
+            } else { // putTreeNode() found an existing node
+                Entry<K,V> pEntry = (Entry<K,V>)p.entry;
+                V oldVal = pEntry.value;
+                pEntry.value = value;
+                pEntry.recordAccess(this);
+                return oldVal;
             }
         }
-
         modCount++;
-        addEntry(hash, key, value, i);
+        addEntry(hash, key, value, i, checkIfNeedTree);
         return null;
     }
 
@@ -463,47 +1112,79 @@
      * Offloaded version of put for null keys
      */
     private V putForNullKey(V value) {
-        @SuppressWarnings("unchecked")
-        Entry<K,V> e = (Entry<K,V>)table[0];
-        for(; e != null; e = e.next) {
-            if (e.key == null) {
-                V oldValue = e.value;
-                e.value = value;
-                e.recordAccess(this);
-                return oldValue;
-            }
+        if (nullKeyEntry != null) {
+            V oldValue = nullKeyEntry.value;
+            nullKeyEntry.value = value;
+            nullKeyEntry.recordAccess(this);
+            return oldValue;
         }
         modCount++;
-        addEntry(0, null, value, 0);
+        size++; // newEntry() skips size++
+        nullKeyEntry = newEntry(0, null, value, null);
         return null;
     }
 
+    private void putForCreateNullKey(V value) {
+        // Look for preexisting entry for key.  This will never happen for
+        // clone or deserialize.  It will only happen for construction if the
+        // input Map is a sorted map whose ordering is inconsistent w/ equals.
+        if (nullKeyEntry != null) {
+            nullKeyEntry.value = value;
+        } else {
+            nullKeyEntry = newEntry(0, null, value, null);
+            size++;
+        }
+    }
+
+
     /**
      * This method is used instead of put by constructors and
      * pseudoconstructors (clone, readObject).  It does not resize the table,
-     * check for comodification, etc.  It calls createEntry rather than
-     * addEntry.
+     * check for comodification, etc, though it will convert bins to TreeBins
+     * as needed.  It calls createEntry rather than addEntry.
      */
+    @SuppressWarnings("unchecked")
     private void putForCreate(K key, V value) {
-        int hash = null == key ? 0 : hash(key);
+        if (null == key) {
+            putForCreateNullKey(value);
+            return;
+        }
+        int hash = hash(key);
         int i = indexFor(hash, table.length);
+        boolean checkIfNeedTree = false; // Might we convert bin to a TreeBin?
 
         /**
          * Look for preexisting entry for key.  This will never happen for
          * clone or deserialize.  It will only happen for construction if the
          * input Map is a sorted map whose ordering is inconsistent w/ equals.
          */
-        for (@SuppressWarnings("unchecked")
-             Entry<?,V> e = (Entry<?,V>)table[i]; e != null; e = e.next) {
-            Object k;
-            if (e.hash == hash &&
-                ((k = e.key) == key || (key != null && key.equals(k)))) {
-                e.value = value;
-                return;
+        if (table[i] instanceof Entry) {
+            int listSize = 0;
+            Entry<K,V> e = (Entry<K,V>) table[i];
+            for (; e != null; e = (Entry<K,V>)e.next) {
+                Object k;
+                if (e.hash == hash && ((k = e.key) == key || key.equals(k))) {
+                    e.value = value;
+                    return;
+                }
+                listSize++;
             }
+            // Didn't find, fall through to createEntry().
+            // Check for conversion to TreeBin done via createEntry().
+            checkIfNeedTree = listSize >= TreeBin.TREE_THRESHOLD;
+        } else if (table[i] != null) {
+            TreeBin e = (TreeBin)table[i];
+            TreeNode p = e.putTreeNode(hash, key, value, null);
+            if (p != null) {
+                p.entry.setValue(value); // Found an existing node, set value
+            } else {
+                size++; // Added a new TreeNode, so update size
+            }
+            // don't need modCount++/check for resize - just return
+            return;
         }
 
-        createEntry(hash, key, value, i);
+        createEntry(hash, key, value, i, checkIfNeedTree);
     }
 
     private void putAllForCreate(Map<? extends K, ? extends V> m) {
@@ -526,14 +1207,14 @@
      *        is irrelevant).
      */
     void resize(int newCapacity) {
-        Entry<?,?>[] oldTable = table;
+        Object[] oldTable = table;
         int oldCapacity = oldTable.length;
         if (oldCapacity == MAXIMUM_CAPACITY) {
             threshold = Integer.MAX_VALUE;
             return;
         }
 
-        Entry<?,?>[] newTable = new Entry<?,?>[newCapacity];
+        Object[] newTable = new Object[newCapacity];
         transfer(newTable);
         table = newTable;
         threshold = (int)Math.min(newCapacity * loadFactor, MAXIMUM_CAPACITY + 1);
@@ -541,19 +1222,31 @@
 
     /**
      * Transfers all entries from current table to newTable.
+     *
+     * Assumes newTable is larger than table
      */
     @SuppressWarnings("unchecked")
-    void transfer(Entry<?,?>[] newTable) {
-        Entry<?,?>[] src = table;
+    void transfer(Object[] newTable) {
+        Object[] src = table;
+        // assert newTable.length > src.length : "newTable.length(" +
+        //   newTable.length + ") expected to be > src.length("+src.length+")";
         int newCapacity = newTable.length;
-        for (int j = 0; j < src.length; j++ ) {
-            Entry<K,V> e = (Entry<K,V>) src[j];
-            while(null != e) {
-                Entry<K,V> next = e.next;
-                int i = indexFor(e.hash, newCapacity);
-                e.next = (Entry<K,V>) newTable[i];
-                newTable[i] = e;
-                e = next;
+        for (int j = 0; j < src.length; j++) {
+             if (src[j] instanceof Entry) {
+                // Assume: since wasn't TreeBin before, won't need TreeBin now
+                Entry<K,V> e = (Entry<K,V>) src[j];
+                while (null != e) {
+                    Entry<K,V> next = (Entry<K,V>)e.next;
+                    int i = indexFor(e.hash, newCapacity);
+                    e.next = (Entry<K,V>) newTable[i];
+                    newTable[i] = e;
+                    e = next;
+                }
+            } else if (src[j] != null) {
+                TreeBin e = (TreeBin) src[j];
+                TreeBin loTree = new TreeBin();
+                TreeBin hiTree = new TreeBin();
+                e.splitTreeBin(newTable, j, loTree, hiTree);
             }
         }
         Arrays.fill(table, null);
@@ -585,20 +1278,13 @@
          * By using the conservative calculation, we subject ourself
          * to at most one extra resize.
          */
-        if (numKeysToBeAdded > threshold) {
-            int targetCapacity = (int)(numKeysToBeAdded / loadFactor + 1);
-            if (targetCapacity > MAXIMUM_CAPACITY)
-                targetCapacity = MAXIMUM_CAPACITY;
-            int newCapacity = table.length;
-            while (newCapacity < targetCapacity)
-                newCapacity <<= 1;
-            if (newCapacity > table.length)
-                resize(newCapacity);
+        if (numKeysToBeAdded > threshold && table.length < MAXIMUM_CAPACITY) {
+            resize(table.length * 2);
         }
 
         for (Map.Entry<? extends K, ? extends V> e : m.entrySet())
             put(e.getKey(), e.getValue());
-    }
+        }
 
     /**
      * Removes the mapping for the specified key from this map if present.
@@ -621,24 +1307,57 @@
         if (table == EMPTY_TABLE) {
             inflateTable(threshold);
         }
-        int hash = (key == null) ? 0 : hash(key);
-        int i = indexFor(hash, table.length);
-        @SuppressWarnings("unchecked")
-        Entry<K,V> e = (Entry<K,V>)table[i];
-        for(; e != null; e = e.next) {
-            if (e.hash == hash && Objects.equals(e.key, key)) {
-                if(e.value != null) {
-                    return e.value;
-                }
-                e.value = value;
-                modCount++;
-                e.recordAccess(this);
+        if (key == null) {
+            if (nullKeyEntry == null || nullKeyEntry.value == null) {
+                putForNullKey(value);
                 return null;
+            } else {
+                return nullKeyEntry.value;
             }
         }
+        int hash = hash(key);
+        int i = indexFor(hash, table.length);
+        boolean checkIfNeedTree = false; // Might we convert bin to a TreeBin?
 
+        if (table[i] instanceof Entry) {
+            int listSize = 0;
+            Entry<K,V> e = (Entry<K,V>) table[i];
+            for (; e != null; e = (Entry<K,V>)e.next) {
+                if (e.hash == hash && Objects.equals(e.key, key)) {
+                    if (e.value != null) {
+                        return e.value;
+                    }
+                    e.value = value;
+                    e.recordAccess(this);
+                    return null;
+                }
+                listSize++;
+            }
+            // Didn't find, so fall through and call addEntry() to add the
+            // Entry and check for TreeBin conversion.
+            checkIfNeedTree = listSize >= TreeBin.TREE_THRESHOLD;
+        } else if (table[i] != null) {
+            TreeBin e = (TreeBin)table[i];
+            TreeNode p = e.putTreeNode(hash, key, value, null);
+            if (p == null) { // not found, putTreeNode() added a new node
+                modCount++;
+                size++;
+                if (size >= threshold) {
+                    resize(2 * table.length);
+                }
+                return null;
+            } else { // putTreeNode() found an existing node
+                Entry<K,V> pEntry = (Entry<K,V>)p.entry;
+                V oldVal = pEntry.value;
+                if (oldVal == null) { // only replace if maps to null
+                    pEntry.value = value;
+                    pEntry.recordAccess(this);
+                }
+                return oldVal;
+            }
+        }
         modCount++;
-        addEntry(hash, key, value, i);
+        addEntry(hash, key, value, i, checkIfNeedTree);
         return null;
     }
 
@@ -647,31 +1366,61 @@
         if (isEmpty()) {
             return false;
         }
-        int hash = (key == null) ? 0 : hash(key);
-        int i = indexFor(hash, table.length);
-        @SuppressWarnings("unchecked")
-        Entry<K,V> prev = (Entry<K,V>)table[i];
-        Entry<K,V> e = prev;
-
-        while (e != null) {
-            Entry<K,V> next = e.next;
-            if (e.hash == hash && Objects.equals(e.key, key)) {
-                if (!Objects.equals(e.value, value)) {
-                    return false;
-                }
-                modCount++;
-                size--;
-                if (prev == e)
-                    table[i] = next;
-                else
-                    prev.next = next;
-                e.recordRemoval(this);
+        if (key == null) {
+            if (nullKeyEntry != null &&
+                 Objects.equals(nullKeyEntry.value, value)) {
+                removeNullKey();
                 return true;
             }
-            prev = e;
-            e = next;
+            return false;
         }
+        int hash = hash(key);
+        int i = indexFor(hash, table.length);
 
+        if (table[i] instanceof Entry) {
+            @SuppressWarnings("unchecked")
+            Entry<K,V> prev = (Entry<K,V>) table[i];
+            Entry<K,V> e = prev;
+            while (e != null) {
+                @SuppressWarnings("unchecked")
+                Entry<K,V> next = (Entry<K,V>) e.next;
+                if (e.hash == hash && Objects.equals(e.key, key)) {
+                    if (!Objects.equals(e.value, value)) {
+                        return false;
+                    }
+                    modCount++;
+                    size--;
+                    if (prev == e)
+                        table[i] = next;
+                    else
+                        prev.next = next;
+                    e.recordRemoval(this);
+                    return true;
+                }
+                prev = e;
+                e = next;
+            }
+        } else if (table[i] != null) {
+            TreeBin tb = ((TreeBin) table[i]);
+            TreeNode p = tb.getTreeNode(hash, (K)key);
+            if (p != null) {
+                Entry<K,V> pEntry = (Entry<K,V>)p.entry;
+                // assert pEntry.key.equals(key);
+                if (Objects.equals(pEntry.value, value)) {
+                    modCount++;
+                    size--;
+                    tb.deleteTreeNode(p);
+                    pEntry.recordRemoval(this);
+                    if (tb.root == null || tb.first == null) {
+                        // assert tb.root == null && tb.first == null :
+                        //         "TreeBin.first and root should both be null";
+                        // TreeBin is now empty, we should blank this bin
+                        table[i] = null;
+                    }
+                    return true;
+                }
+            }
+        }
         return false;
     }
 
@@ -680,39 +1429,82 @@
         if (isEmpty()) {
             return false;
         }
-        int hash = (key == null) ? 0 : hash(key);
-        int i = indexFor(hash, table.length);
-        @SuppressWarnings("unchecked")
-        Entry<K,V> e = (Entry<K,V>)table[i];
-        for (; e != null; e = e.next) {
-            if (e.hash == hash && Objects.equals(e.key, key) && Objects.equals(e.value, oldValue)) {
-                e.value = newValue;
-                e.recordAccess(this);
+        if (key == null) {
+            if (nullKeyEntry != null &&
+                 Objects.equals(nullKeyEntry.value, oldValue)) {
+                putForNullKey(newValue);
                 return true;
             }
+            return false;
         }
+        int hash = hash(key);
+        int i = indexFor(hash, table.length);
 
+        if (table[i] instanceof Entry) {
+            @SuppressWarnings("unchecked")
+            Entry<K,V> e = (Entry<K,V>) table[i];
+            for (; e != null; e = (Entry<K,V>)e.next) {
+                if (e.hash == hash && Objects.equals(e.key, key) && Objects.equals(e.value, oldValue)) {
+                    e.value = newValue;
+                    e.recordAccess(this);
+                    return true;
+                }
+            }
+            return false;
+        } else if (table[i] != null) {
+            TreeBin tb = ((TreeBin) table[i]);
+            TreeNode p = tb.getTreeNode(hash, key);
+            if (p != null) {
+                Entry<K,V> pEntry = (Entry<K,V>)p.entry;
+                // assert pEntry.key.equals(key);
+                if (Objects.equals(pEntry.value, oldValue)) {
+                    pEntry.value = newValue;
+                    pEntry.recordAccess(this);
+                    return true;
+                }
+            }
+        }
         return false;
     }
 
-    @Override
+   @Override
     public V replace(K key, V value) {
         if (isEmpty()) {
             return null;
         }
-        int hash = (key == null) ? 0 : hash(key);
+        if (key == null) {
+            if (nullKeyEntry != null) {
+                return putForNullKey(value);
+            }
+            return null;
+        }
+        int hash = hash(key);
         int i = indexFor(hash, table.length);
-        @SuppressWarnings("unchecked")
-        Entry<K,V> e = (Entry<K,V>)table[i];
-        for (; e != null; e = e.next) {
-            if (e.hash == hash && Objects.equals(e.key, key)) {
-                V oldValue = e.value;
-                e.value = value;
-                e.recordAccess(this);
+        if (table[i] instanceof Entry) {
+            @SuppressWarnings("unchecked")
+            Entry<K,V> e = (Entry<K,V>)table[i];
+            for (; e != null; e = (Entry<K,V>)e.next) {
+                if (e.hash == hash && Objects.equals(e.key, key)) {
+                    V oldValue = e.value;
+                    e.value = value;
+                    e.recordAccess(this);
+                    return oldValue;
+                }
+            }
+
+            return null;
+        } else if (table[i] != null) {
+            TreeBin tb = ((TreeBin) table[i]);
+            TreeNode p = tb.getTreeNode(hash, key);
+            if (p != null) {
+                Entry<K,V> pEntry = (Entry<K,V>)p.entry;
+                // assert pEntry.key.equals(key);
+                V oldValue = pEntry.value;
+                pEntry.value = value;
+                pEntry.recordAccess(this);
                 return oldValue;
             }
         }
-
         return null;
     }
 
@@ -721,21 +1513,75 @@
         if (table == EMPTY_TABLE) {
             inflateTable(threshold);
         }
-        int hash = (key == null) ? 0 : hash(key);
+        if (key == null) {
+            if (nullKeyEntry == null || nullKeyEntry.value == null) {
+                V newValue = mappingFunction.apply(key);
+                if (newValue != null) {
+                    putForNullKey(newValue);
+                }
+                return newValue;
+            }
+            return nullKeyEntry.value;
+        }
+        int hash = hash(key);
         int i = indexFor(hash, table.length);
-        @SuppressWarnings("unchecked")
-        Entry<K,V> e = (Entry<K,V>)table[i];
-        for (; e != null; e = e.next) {
-            if (e.hash == hash && Objects.equals(e.key, key)) {
-                V oldValue = e.value;
-                return oldValue == null ? (e.value = mappingFunction.apply(key)) : oldValue;
+        boolean checkIfNeedTree = false; // Might we convert bin to a TreeBin?
+
+        if (table[i] instanceof Entry) {
+            int listSize = 0;
+            @SuppressWarnings("unchecked")
+            Entry<K,V> e = (Entry<K,V>)table[i];
+            for (; e != null; e = (Entry<K,V>)e.next) {
+                if (e.hash == hash && Objects.equals(e.key, key)) {
+                    V oldValue = e.value;
+                    if (oldValue == null) {
+                        V newValue = mappingFunction.apply(key);
+                        if (newValue != null) {
+                            e.value = newValue;
+                            e.recordAccess(this);
+                        }
+                        return newValue;
+                    }
+                    return oldValue;
+                }
+                listSize++;
+            }
+            // Didn't find, fall through to call the mapping function
+            checkIfNeedTree = listSize >= TreeBin.TREE_THRESHOLD;
+        } else if (table[i] != null) {
+            TreeBin e = (TreeBin)table[i];
+            V value = mappingFunction.apply(key);
+            if (value == null) { // Return the existing value, if any
+                TreeNode p = e.getTreeNode(hash, key);
+                if (p != null) {
+                    return (V) p.entry.value;
+                }
+                return null;
+            } else { // Put the new value into the Tree, if absent
+                TreeNode p = e.putTreeNode(hash, key, value, null);
+                if (p == null) { // not found, new node was added
+                    modCount++;
+                    size++;
+                    if (size >= threshold) {
+                        resize(2 * table.length);
+                    }
+                    return value;
+                } else { // putTreeNode() found an existing node
+                    Entry<K,V> pEntry = (Entry<K,V>)p.entry;
+                    V oldVal = pEntry.value;
+                    if (oldVal == null) { // only replace if maps to null
+                        pEntry.value = value;
+                        pEntry.recordAccess(this);
+                        return value;
+                    }
+                    return oldVal;
+                }
             }
         }
-
         V newValue = mappingFunction.apply(key);
-        if (newValue != null) {
+        if (newValue != null) { // add Entry and check for TreeBin conversion
             modCount++;
-            addEntry(hash, key, newValue, i);
+            addEntry(hash, key, newValue, i, checkIfNeedTree);
         }
 
         return newValue;
@@ -746,59 +1592,34 @@
         if (isEmpty()) {
             return null;
         }
-        int hash = (key == null) ? 0 : hash(key);
-        int i = indexFor(hash, table.length);
-        @SuppressWarnings("unchecked")
-        Entry<K,V> prev = (Entry<K,V>)table[i];
-        Entry<K,V> e = prev;
-
-        while (e != null) {
-            Entry<K,V> next = e.next;
-            if (e.hash == hash && Objects.equals(e.key, key)) {
-                V oldValue = e.value;
-                if (oldValue == null)
-                    break;
+        if (key == null) {
+            V oldValue;
+            if (nullKeyEntry != null && (oldValue = nullKeyEntry.value) != null) {
                 V newValue = remappingFunction.apply(key, oldValue);
-                modCount++;
-                if (newValue == null) {
-                    size--;
-                    if (prev == e)
-                        table[i] = next;
-                    else
-                        prev.next = next;
-                    e.recordRemoval(this);
+                if (newValue != null ) {
+                    putForNullKey(newValue);
+                    return newValue;
                 } else {
-                    e.value = newValue;
-                    e.recordAccess(this);
+                    removeNullKey();
                 }
-                return newValue;
             }
-            prev = e;
-            e = next;
-        }
-
-        return null;
-    }
-
-    @Override
-    public V compute(K key, BiFunction<? super K, ? super V, ? extends V> remappingFunction) {
-        if (table == EMPTY_TABLE) {
-            inflateTable(threshold);
+            return null;
         }
-        int hash = (key == null) ? 0 : hash(key);
+        int hash = hash(key);
         int i = indexFor(hash, table.length);
-        @SuppressWarnings("unchecked")
-        Entry<K,V> prev = (Entry<K,V>)table[i];
-        Entry<K,V> e = prev;
-
-        while (e != null) {
-            Entry<K,V> next = e.next;
-            if (e.hash == hash && Objects.equals(e.key, key)) {
-                V oldValue = e.value;
-                V newValue = remappingFunction.apply(key, oldValue);
-                if (newValue != oldValue) {
-                    modCount++;
+        if (table[i] instanceof Entry) {
+            @SuppressWarnings("unchecked")
+            Entry<K,V> prev = (Entry<K,V>)table[i];
+            Entry<K,V> e = prev;
+            while (e != null) {
+                Entry<K,V> next = (Entry<K,V>)e.next;
+                if (e.hash == hash && Objects.equals(e.key, key)) {
+                    V oldValue = e.value;
+                    if (oldValue == null)
+                        break;
+                    V newValue = remappingFunction.apply(key, oldValue);
                     if (newValue == null) {
+                        modCount++;
                         size--;
                         if (prev == e)
                             table[i] = next;
@@ -809,17 +1630,136 @@
                         e.value = newValue;
                         e.recordAccess(this);
                     }
+                    return newValue;
                 }
-                return newValue;
+                prev = e;
+                e = next;
+            }
+        } else if (table[i] != null) {
+            TreeBin tb = (TreeBin)table[i];
+            TreeNode p = tb.getTreeNode(hash, key);
+            if (p != null) {
+                Entry<K,V> pEntry = (Entry<K,V>)p.entry;
+                // assert pEntry.key.equals(key);
+                V oldValue = pEntry.value;
+                if (oldValue != null) {
+                    V newValue = remappingFunction.apply(key, oldValue);
+                    if (newValue == null) { // remove mapping
+                        modCount++;
+                        size--;
+                        tb.deleteTreeNode(p);
+                        pEntry.recordRemoval(this);
+                        if (tb.root == null || tb.first == null) {
+                            // assert tb.root == null && tb.first == null :
+                            //     "TreeBin.first and root should both be null";
+                            // TreeBin is now empty, we should blank this bin
+                            table[i] = null;
+                        }
+                    } else {
+                        pEntry.value = newValue;
+                        pEntry.recordAccess(this);
+                    }
+                    return newValue;
+                }
+            }
+        }
+        return null;
+    }
+
+    @Override
+    public V compute(K key, BiFunction<? super K, ? super V, ? extends V> remappingFunction) {
+        if (table == EMPTY_TABLE) {
+            inflateTable(threshold);
+        }
+        if (key == null) {
+            V oldValue = nullKeyEntry == null ? null : nullKeyEntry.value;
+            V newValue = remappingFunction.apply(key, oldValue);
+            if (newValue != oldValue) {
+                if (newValue == null) {
+                    removeNullKey();
+                } else {
+                    putForNullKey(newValue);
+                }
             }
-            prev = e;
-            e = next;
+            return newValue;
+        }
+        int hash = hash(key);
+        int i = indexFor(hash, table.length);
+        boolean checkIfNeedTree = false; // Might we convert bin to a TreeBin?
+
+        if (table[i] instanceof Entry) {
+            int listSize = 0;
+            @SuppressWarnings("unchecked")
+            Entry<K,V> prev = (Entry<K,V>)table[i];
+            Entry<K,V> e = prev;
+
+            while (e != null) {
+                Entry<K,V> next = (Entry<K,V>)e.next;
+                if (e.hash == hash && Objects.equals(e.key, key)) {
+                    V oldValue = e.value;
+                    V newValue = remappingFunction.apply(key, oldValue);
+                    if (newValue != oldValue) {
+                        if (newValue == null) {
+                            modCount++;
+                            size--;
+                            if (prev == e)
+                                table[i] = next;
+                            else
+                                prev.next = next;
+                            e.recordRemoval(this);
+                        } else {
+                            e.value = newValue;
+                            e.recordAccess(this);
+                        }
+                    }
+                    return newValue;
+                }
+                prev = e;
+                e = next;
+                listSize++;
+            }
+            checkIfNeedTree = listSize >= TreeBin.TREE_THRESHOLD;
+        } else if (table[i] != null) {
+            TreeBin tb = (TreeBin)table[i];
+            TreeNode p = tb.getTreeNode(hash, key);
+            V oldValue = p == null ? null : (V)p.entry.value;
+            V newValue = remappingFunction.apply(key, oldValue);
+            if (newValue != oldValue) {
+                if (newValue == null) {
+                    Entry<K,V> pEntry = (Entry<K,V>)p.entry;
+                    modCount++;
+                    size--;
+                    tb.deleteTreeNode(p);
+                    pEntry.recordRemoval(this);
+                    if (tb.root == null || tb.first == null) {
+                        // assert tb.root == null && tb.first == null :
+                        //         "TreeBin.first and root should both be null";
+                        // TreeBin is now empty, we should blank this bin
+                        table[i] = null;
+                    }
+                } else {
+                    if (p != null) { // just update the value
+                        Entry<K,V> pEntry = (Entry<K,V>)p.entry;
+                        pEntry.value = newValue;
+                        pEntry.recordAccess(this);
+                    } else { // need to put new node
+                        p = tb.putTreeNode(hash, key, newValue, null);
+                        // assert p == null; // should have added a new node
+                        modCount++;
+                        size++;
+                        if (size >= threshold) {
+                            resize(2 * table.length);
+                        }
+                    }
+                }
+            }
+            return newValue;
         }
 
         V newValue = remappingFunction.apply(key, null);
         if (newValue != null) {
             modCount++;
-            addEntry(hash, key, newValue, i);
+            addEntry(hash, key, newValue, i, checkIfNeedTree);
         }
 
         return newValue;
@@ -830,40 +1770,96 @@
         if (table == EMPTY_TABLE) {
             inflateTable(threshold);
         }
-        int hash = (key == null) ? 0 : hash(key);
+        if (key == null) {
+            V oldValue = nullKeyEntry == null ? null : nullKeyEntry.value;
+            V newValue = oldValue == null ? value : remappingFunction.apply(oldValue, value);
+            if (newValue != null) {
+                putForNullKey(newValue);
+            } else if (nullKeyEntry != null) {
+                removeNullKey();
+            }
+            return newValue;
+        }
+        int hash = hash(key);
         int i = indexFor(hash, table.length);
-        @SuppressWarnings("unchecked")
-        Entry<K,V> prev = (Entry<K,V>)table[i];
-        Entry<K,V> e = prev;
+        boolean checkIfNeedTree = false; // Might we convert bin to a TreeBin?
+
+        if (table[i] instanceof Entry) {
+            int listSize = 0;
+            @SuppressWarnings("unchecked")
+            Entry<K,V> prev = (Entry<K,V>)table[i];
+            Entry<K,V> e = prev;
 
-        while (e != null) {
-            Entry<K,V> next = e.next;
-            if (e.hash == hash && Objects.equals(e.key, key)) {
-                V oldValue = e.value;
-                V newValue = remappingFunction.apply(oldValue, value);
-                modCount++;
-                if (newValue == null) {
+            while (e != null) {
+                Entry<K,V> next = (Entry<K,V>)e.next;
+                if (e.hash == hash && Objects.equals(e.key, key)) {
+                    V oldValue = e.value;
+                    V newValue = (oldValue == null) ? value :
+                                 remappingFunction.apply(oldValue, value);
+                    if (newValue == null) {
+                        modCount++;
+                        size--;
+                        if (prev == e)
+                            table[i] = next;
+                        else
+                            prev.next = next;
+                        e.recordRemoval(this);
+                    } else {
+                        e.value = newValue;
+                        e.recordAccess(this);
+                    }
+                    return newValue;
+                }
+                prev = e;
+                e = next;
+                listSize++;
+            }
+            // Didn't find, so fall through and (maybe) call addEntry() to add
+            // the Entry and check for TreeBin conversion.
+            checkIfNeedTree = listSize >= TreeBin.TREE_THRESHOLD;
+        } else if (table[i] != null) {
+            TreeBin tb = (TreeBin)table[i];
+            TreeNode p = tb.getTreeNode(hash, key);
+            V oldValue = p == null ? null : (V)p.entry.value;
+            V newValue = (oldValue == null) ? value :
+                         remappingFunction.apply(oldValue, value);
+            if (newValue == null) {
+                if (p != null) {
+                    Entry<K,V> pEntry = (Entry<K,V>)p.entry;
+                    modCount++;
                     size--;
-                    if (prev == e)
-                        table[i] = next;
-                    else
-                        prev.next = next;
-                    e.recordRemoval(this);
-                } else {
-                    e.value = newValue;
-                    e.recordAccess(this);
+                    tb.deleteTreeNode(p);
+                    pEntry.recordRemoval(this);
+
+                    if (tb.root == null || tb.first == null) {
+                        // assert tb.root == null && tb.first == null :
+                        //         "TreeBin.first and root should both be null";
+                        // TreeBin is now empty, we should blank this bin
+                        table[i] = null;
+                    }
                 }
-                return newValue;
+                return null;
+            } else if (newValue != oldValue) {
+                if (p != null) { // just update the value
+                    Entry<K,V> pEntry = (Entry<K,V>)p.entry;
+                    pEntry.value = newValue;
+                    pEntry.recordAccess(this);
+                } else { // need to put new node
+                    p = tb.putTreeNode(hash, key, newValue, null);
+                    // assert p == null; // should have added a new node
+                    modCount++;
+                    size++;
+                    if (size >= threshold) {
+                        resize(2 * table.length);
+                    }
+                }
             }
-            prev = e;
-            e = next;
+            return newValue;
         }
-
         if (value != null) {
             modCount++;
-            addEntry(hash, key, value, i);
+            addEntry(hash, key, value, i, checkIfNeedTree);
         }
-
         return value;
     }
 
@@ -873,36 +1869,65 @@
      * Removes and returns the entry associated with the specified key
      * in the HashMap.  Returns null if the HashMap contains no mapping
      * for this key.
+     *
+     * We don't bother converting TreeBins back to Entry lists if the bin falls
+     * back below TREE_THRESHOLD, but we do clear bins when removing the last
+     * TreeNode in a TreeBin.
      */
     final Entry<K,V> removeEntryForKey(Object key) {
         if (isEmpty()) {
             return null;
         }
-        int hash = (key == null) ? 0 : hash(key);
+        if (key == null) {
+            if (nullKeyEntry != null) {
+                return removeNullKey();
+            }
+            return null;
+        }
+        int hash = hash(key);
         int i = indexFor(hash, table.length);
-        @SuppressWarnings("unchecked")
+
+        if (table[i] instanceof Entry) {
+            @SuppressWarnings("unchecked")
             Entry<K,V> prev = (Entry<K,V>)table[i];
-        Entry<K,V> e = prev;
+            Entry<K,V> e = prev;
 
-        while (e != null) {
-            Entry<K,V> next = e.next;
-            Object k;
-            if (e.hash == hash &&
-                ((k = e.key) == key || (key != null && key.equals(k)))) {
+            while (e != null) {
+                @SuppressWarnings("unchecked")
+                Entry<K,V> next = (Entry<K,V>) e.next;
+                if (e.hash == hash && Objects.equals(e.key, key)) {
+                    modCount++;
+                    size--;
+                    if (prev == e)
+                        table[i] = next;
+                    else
+                        prev.next = next;
+                    e.recordRemoval(this);
+                    return e;
+                }
+                prev = e;
+                e = next;
+            }
+        } else if (table[i] != null) {
+            TreeBin tb = ((TreeBin) table[i]);
+            TreeNode p = tb.getTreeNode(hash, (K)key);
+            if (p != null) {
+                Entry<K,V> pEntry = (Entry<K,V>)p.entry;
+                // assert pEntry.key.equals(key);
                 modCount++;
                 size--;
-                if (prev == e)
-                    table[i] = next;
-                else
-                    prev.next = next;
-                e.recordRemoval(this);
-                return e;
+                tb.deleteTreeNode(p);
+                pEntry.recordRemoval(this);
+                if (tb.root == null || tb.first == null) {
+                    // assert tb.root == null && tb.first == null :
+                    //             "TreeBin.first and root should both be null";
+                    // TreeBin is now empty, we should blank this bin
+                    table[i] = null;
+                }
+                return pEntry;
             }
-            prev = e;
-            e = next;
         }
-
-        return e;
+        return null;
     }
 
     /**
@@ -915,29 +1940,75 @@
 
         Map.Entry<?,?> entry = (Map.Entry<?,?>) o;
         Object key = entry.getKey();
-        int hash = (key == null) ? 0 : hash(key);
+
+        if (key == null) {
+            if (entry.equals(nullKeyEntry)) {
+                return removeNullKey();
+            }
+            return null;
+        }
+
+        int hash = hash(key);
         int i = indexFor(hash, table.length);
-        @SuppressWarnings("unchecked")
-            Entry<K,V> prev = (Entry<K,V>)table[i];
-        Entry<K,V> e = prev;
+
+        if (table[i] instanceof Entry) {
+            @SuppressWarnings("unchecked")
+                Entry<K,V> prev = (Entry<K,V>)table[i];
+            Entry<K,V> e = prev;
 
-        while (e != null) {
-            Entry<K,V> next = e.next;
-            if (e.hash == hash && e.equals(entry)) {
+            while (e != null) {
+                @SuppressWarnings("unchecked")
+                Entry<K,V> next = (Entry<K,V>)e.next;
+                if (e.hash == hash && e.equals(entry)) {
+                    modCount++;
+                    size--;
+                    if (prev == e)
+                        table[i] = next;
+                    else
+                        prev.next = next;
+                    e.recordRemoval(this);
+                    return e;
+                }
+                prev = e;
+                e = next;
+            }
+        } else if (table[i] != null) {
+            TreeBin tb = ((TreeBin) table[i]);
+            TreeNode p = tb.getTreeNode(hash, (K)key);
+            if (p != null && p.entry.equals(entry)) {
+                @SuppressWarnings("unchecked")
+                Entry<K,V> pEntry = (Entry<K,V>)p.entry;
+                // assert pEntry.key.equals(key);
                 modCount++;
                 size--;
-                if (prev == e)
-                    table[i] = next;
-                else
-                    prev.next = next;
-                e.recordRemoval(this);
-                return e;
+                tb.deleteTreeNode(p);
+                pEntry.recordRemoval(this);
+                if (tb.root == null || tb.first == null) {
+                    // assert tb.root == null && tb.first == null :
+                    //             "TreeBin.first and root should both be null";
+                    // TreeBin is now empty, we should blank this bin
+                    table[i] = null;
+                }
+                return pEntry;
             }
-            prev = e;
-            e = next;
         }
+        return null;
+    }
 
-        return e;
+    /*
+     * Remove the mapping for the null key, and update internal accounting
+     * (size, modcount, recordRemoval, etc).
+     *
+     * Assumes nullKeyEntry is non-null.
+     */
+    private Entry<K,V> removeNullKey() {
+        // assert nullKeyEntry != null;
+        Entry<K,V> retVal = nullKeyEntry;
+        modCount++;
+        size--;
+        retVal.recordRemoval(this);
+        nullKeyEntry = null;
+        return retVal;
     }
 
     /**
@@ -946,6 +2017,9 @@
      */
     public void clear() {
         modCount++;
+        if (nullKeyEntry != null) {
+            nullKeyEntry = null;
+        }
         Arrays.fill(table, null);
         size = 0;
     }
@@ -959,27 +2033,58 @@
      *         specified value
      */
     public boolean containsValue(Object value) {
-        if (value == null)
+        if (value == null) {
             return containsNullValue();
-
-        Entry<?,?>[] tab = table;
-        for (int i = 0; i < tab.length; i++)
-            for (Entry<?,?> e = tab[i]; e != null; e = e.next)
-                if (value.equals(e.value))
-                    return true;
-        return false;
+        }
+        Object[] tab = table;
+        for (int i = 0; i < tab.length; i++) {
+            if (tab[i] instanceof Entry) {
+                Entry<?,?> e = (Entry<?,?>)tab[i];
+                for (; e != null; e = (Entry<?,?>)e.next) {
+                    if (value.equals(e.value)) {
+                        return true;
+                    }
+                }
+            } else if (tab[i] != null) {
+                TreeBin e = (TreeBin)tab[i];
+                TreeNode p = e.first;
+                for (; p != null; p = (TreeNode) p.entry.next) {
+                    if (value == p.entry.value || value.equals(p.entry.value)) {
+                        return true;
+                    }
+                }
+            }
+        }
+        // Didn't find value in table - could be in nullKeyEntry
+        return (nullKeyEntry != null && (value == nullKeyEntry.value ||
+                                         value.equals(nullKeyEntry.value)));
     }
 
     /**
      * Special-case code for containsValue with null argument
      */
     private boolean containsNullValue() {
-        Entry<?,?>[] tab = table;
-        for (int i = 0; i < tab.length; i++)
-            for (Entry<?,?> e = tab[i]; e != null; e = e.next)
-                if (e.value == null)
-                    return true;
-        return false;
+        Object[] tab = table;
+        for (int i = 0; i < tab.length; i++) {
+            if (tab[i] instanceof Entry) {
+                Entry<K,V> e = (Entry<K,V>)tab[i];
+                for (; e != null; e = (Entry<K,V>)e.next) {
+                    if (e.value == null) {
+                        return true;
+                    }
+                }
+            } else if (tab[i] != null) {
+                TreeBin e = (TreeBin)tab[i];
+                TreeNode p = e.first;
+                for (; p != null; p = (TreeNode) p.entry.next) {
+                    if (p.entry.value == null) {
+                        return true;
+                    }
+                }
+            }
+        }
+        // Didn't find value in table - could be in nullKeyEntry
+        return (nullKeyEntry != null && nullKeyEntry.value == null);
     }
 
     /**
@@ -1007,6 +2112,7 @@
         result.entrySet = null;
         result.modCount = 0;
         result.size = 0;
+        result.nullKeyEntry = null;
         result.init();
         result.putAllForCreate(this);
 
@@ -1016,13 +2122,13 @@
     static class Entry<K,V> implements Map.Entry<K,V> {
         final K key;
         V value;
-        Entry<K,V> next;
+        Object next; // an Entry, or a TreeNode
         final int hash;
 
         /**
          * Creates new entry.
          */
-        Entry(int h, K k, V v, Entry<K,V> n) {
+        Entry(int h, K k, V v, Object n) {
             value = v;
             next = n;
             key = k;
@@ -1054,7 +2160,7 @@
                 Object v2 = e.getValue();
                 if (v1 == v2 || (v1 != null && v1.equals(v2)))
                     return true;
-            }
+                }
             return false;
         }
 
@@ -1068,8 +2174,7 @@
 
         /**
          * This method is invoked whenever the value in an entry is
-         * overwritten by an invocation of put(k,v) for a key k that's already
-         * in the HashMap.
+         * overwritten for a key that's already in the HashMap.
          */
         void recordAccess(HashMap<K,V> m) {
         }
@@ -1082,50 +2187,96 @@
         }
     }
 
+    void addEntry(int hash, K key, V value, int bucketIndex) {
+        addEntry(hash, key, value, bucketIndex, true);
+    }
+
     /**
      * Adds a new entry with the specified key, value and hash code to
      * the specified bucket.  It is the responsibility of this
-     * method to resize the table if appropriate.
+     * method to resize the table if appropriate.  The new entry is then
+     * created by calling createEntry().
      *
      * Subclass overrides this to alter the behavior of put method.
+     *
+     * If checkIfNeedTree is false, it is known that this bucket will not need
+     * to be converted to a TreeBin, so don't bothering checking.
+     *
+     * Assumes key is not null.
      */
-    void addEntry(int hash, K key, V value, int bucketIndex) {
+    void addEntry(int hash, K key, V value, int bucketIndex, boolean checkIfNeedTree) {
+        // assert key != null;
         if ((size >= threshold) && (null != table[bucketIndex])) {
             resize(2 * table.length);
-            hash = (null != key) ? hash(key) : 0;
+            hash = hash(key);
             bucketIndex = indexFor(hash, table.length);
         }
-
-        createEntry(hash, key, value, bucketIndex);
+        createEntry(hash, key, value, bucketIndex, checkIfNeedTree);
     }
 
     /**
-     * Like addEntry except that this version is used when creating entries
+     * Called by addEntry(), and also used when creating entries
      * as part of Map construction or "pseudo-construction" (cloning,
-     * deserialization).  This version needn't worry about resizing the table.
+     * deserialization).  This version does not check for resizing of the table.
      *
-     * Subclass overrides this to alter the behavior of HashMap(Map),
-     * clone, and readObject.
+     * This method is responsible for converting a bucket to a TreeBin once
+     * TREE_THRESHOLD is reached. However if checkIfNeedTree is false, it is known
+     * that this bucket will not need to be converted to a TreeBin, so don't
+     * bother checking.  The new entry is constructed by calling newEntry().
+     *
+     * Assumes key is not null.
+     *
+     * Note: buckets already converted to a TreeBin don't call this method, but
+     * instead call TreeBin.putTreeNode() to create new entries.
      */
-    void createEntry(int hash, K key, V value, int bucketIndex) {
+    void createEntry(int hash, K key, V value, int bucketIndex, boolean checkIfNeedTree) {
+        // assert key != null;
         @SuppressWarnings("unchecked")
             Entry<K,V> e = (Entry<K,V>)table[bucketIndex];
-        table[bucketIndex] = new Entry<>(hash, key, value, e);
+        table[bucketIndex] = newEntry(hash, key, value, e);
         size++;
+
+        if (checkIfNeedTree) {
+            int listSize = 0;
+            for (e = (Entry<K,V>) table[bucketIndex]; e != null; e = (Entry<K,V>)e.next) {
+                listSize++;
+                if (listSize >= TreeBin.TREE_THRESHOLD) { // Convert to TreeBin
+                    if (comparableClassFor(key) != null) {
+                        TreeBin t = new TreeBin();
+                        t.populate((Entry)table[bucketIndex]);
+                        table[bucketIndex] = t;
+                    }
+                    break;
+                }
+            }
+        }
     }
 
+    /*
+     * Factory method to create a new Entry object.
+     */
+    Entry<K,V> newEntry(int hash, K key, V value, Object next) {
+        return new HashMap.Entry<>(hash, key, value, next);
+    }
+
+
     private abstract class HashIterator<E> implements Iterator<E> {
-        Entry<?,?> next;        // next entry to return
+        Object next;            // next entry to return, an Entry or a TreeNode
         int expectedModCount;   // For fast-fail
         int index;              // current slot
-        Entry<?,?> current;     // current entry
+        Object current;         // current entry, an Entry or a TreeNode
 
         HashIterator() {
             expectedModCount = modCount;
             if (size > 0) { // advance to first entry
-                Entry<?,?>[] t = table;
-                while (index < t.length && (next = t[index++]) == null)
-                    ;
+                if (nullKeyEntry != null) {
+                    // assert nullKeyEntry.next == null;
+                    // This works with nextEntry(): nullKeyEntry isa Entry, and
+                    // e.next will be null, so we'll hit the findNextBin() call.
+                    next = nullKeyEntry;
+                } else {
+                    findNextBin();
+                }
             }
         }
 
@@ -1135,19 +2286,28 @@
 
         @SuppressWarnings("unchecked")
         final Entry<K,V> nextEntry() {
-            if (modCount != expectedModCount)
+            if (modCount != expectedModCount) {
                 throw new ConcurrentModificationException();
-            Entry<?,?> e = next;
+            }
+            Object e = next;
+            Entry<K,V> retVal;
+
             if (e == null)
                 throw new NoSuchElementException();
 
-            if ((next = e.next) == null) {
-                Entry<?,?>[] t = table;
-                while (index < t.length && (next = t[index++]) == null)
-                    ;
+            if (e instanceof Entry) {
+                retVal = (Entry<K,V>)e;
+                next = ((Entry<K,V>)e).next;
+            } else { // TreeBin
+                retVal = (Entry<K,V>)((TreeNode)e).entry;
+                next = retVal.next;
+            }
+
+            if (next == null) { // Move to next bin
+                findNextBin();
             }
             current = e;
-            return (Entry<K,V>)e;
+            return retVal;
         }
 
         public void remove() {
@@ -1155,11 +2315,33 @@
                 throw new IllegalStateException();
             if (modCount != expectedModCount)
                 throw new ConcurrentModificationException();
-            Object k = current.key;
+            K k;
+
+            if (current instanceof Entry) {
+                k = ((Entry<K,V>)current).key;
+            } else {
+                k = ((Entry<K,V>)((TreeNode)current).entry).key;
+
+            }
             current = null;
             HashMap.this.removeEntryForKey(k);
             expectedModCount = modCount;
         }
+
+        /*
+         * Set 'next' to the first entry of the next non-empty bin in the table
+         */
+        private void findNextBin() {
+            // assert next == null;
+            Object[] t = table;
+
+            while (index < t.length && (next = t[index++]) == null)
+                ;
+            if (next instanceof HashMap.TreeBin) { // Point to the first TreeNode
+                next = ((TreeBin) next).first;
+                // assert next != null; // There should be no empty TreeBins
+            }
+        }
     }
 
     private final class ValueIterator extends HashIterator<V> {
@@ -1357,7 +2539,7 @@
         if (table==EMPTY_TABLE) {
             s.writeInt(roundUpToPowerOf2(threshold));
         } else {
-           s.writeInt(table.length);
+            s.writeInt(table.length);
         }
 
         // Write out size (number of Mappings)
@@ -1389,8 +2571,10 @@
         }
 
         // set other fields that need values
-        Holder.UNSAFE.putIntVolatile(this, Holder.HASHSEED_OFFSET,
-                sun.misc.Hashing.randomHashSeed(this));
+        if (Holder.USE_HASHSEED) {
+            Holder.UNSAFE.putIntVolatile(this, Holder.HASHSEED_OFFSET,
+                    sun.misc.Hashing.randomHashSeed(this));
+        }
         table = EMPTY_TABLE;
 
         // Read in number of buckets
@@ -1404,9 +2588,9 @@
 
         // capacity chosen by number of mappings and desired load (if >= 0.25)
         int capacity = (int) Math.min(
-                    mappings * Math.min(1 / loadFactor, 4.0f),
-                    // we have limits...
-                    HashMap.MAXIMUM_CAPACITY);
+                mappings * Math.min(1 / loadFactor, 4.0f),
+                // we have limits...
+                HashMap.MAXIMUM_CAPACITY);
 
         // allocate the bucket array;
         if (mappings > 0) {
@@ -1420,9 +2604,9 @@
         // Read the keys and values, and put the mappings in the HashMap
         for (int i=0; i<mappings; i++) {
             @SuppressWarnings("unchecked")
-                K key = (K) s.readObject();
+            K key = (K) s.readObject();
             @SuppressWarnings("unchecked")
-                V value = (V) s.readObject();
+            V value = (V) s.readObject();
             putForCreate(key, value);
         }
     }
@@ -1436,11 +2620,17 @@
      */
     static class HashMapSpliterator<K,V> {
         final HashMap<K,V> map;
-        HashMap.Entry<K,V> current; // current node
+        Object current;             // current node, can be Entry or TreeNode
         int index;                  // current index, modified on advance/split
         int fence;                  // one past last index
         int est;                    // size estimate
         int expectedModCount;       // for comodification checks
+        boolean acceptedNull;       // Have we accepted the null key?
+                                    // Without this, we can't distinguish
+                                    // between being at the very beginning (and
+                                    // needing to accept null), or being at the
+                                    // end of the list in bin 0.  In both cases,
+                                    // current == null && index == 0.
 
         HashMapSpliterator(HashMap<K,V> m, int origin,
                                int fence, int est,
@@ -1450,6 +2640,7 @@
             this.fence = fence;
             this.est = est;
             this.expectedModCount = expectedModCount;
+            this.acceptedNull = false;
         }
 
         final int getFence() { // initialize fence and size on first use
@@ -1479,9 +2670,15 @@
 
         public KeySpliterator<K,V> trySplit() {
             int hi = getFence(), lo = index, mid = (lo + hi) >>> 1;
-            return (lo >= mid || current != null) ? null :
-                new KeySpliterator<K,V>(map, lo, index = mid, est >>>= 1,
-                                        expectedModCount);
+            if (lo >= mid || current != null) {
+                return null;
+            } else {
+                KeySpliterator<K,V> retVal = new KeySpliterator<K,V>(map, lo,
+                                     index = mid, est >>>= 1, expectedModCount);
+                // Only 'this' Spliterator chould check for null.
+                retVal.acceptedNull = true;
+                return retVal;
+            }
         }
 
         @SuppressWarnings("unchecked")
@@ -1490,21 +2687,39 @@
             if (action == null)
                 throw new NullPointerException();
             HashMap<K,V> m = map;
-            HashMap.Entry<K,V>[] tab = (HashMap.Entry<K,V>[])m.table;
+            Object[] tab = m.table;
             if ((hi = fence) < 0) {
                 mc = expectedModCount = m.modCount;
                 hi = fence = tab.length;
             }
             else
                 mc = expectedModCount;
-            if (tab.length >= hi && (i = index) >= 0 && i < (index = hi)) {
-                HashMap.Entry<K,V> p = current;
+
+            if (!acceptedNull) {
+                acceptedNull = true;
+                if (m.nullKeyEntry != null) {
+                    action.accept(m.nullKeyEntry.key);
+                }
+            }
+            if (tab.length >= hi && (i = index) >= 0 &&
+                (i < (index = hi) || current != null)) {
+                Object p = current;
+                current = null;
                 do {
-                    if (p == null)
+                    if (p == null) {
                         p = tab[i++];
-                    else {
-                        action.accept(p.getKey());
-                        p = p.next;
+                        if (p instanceof HashMap.TreeBin) {
+                            p = ((HashMap.TreeBin)p).first;
+                        }
+                    } else {
+                        HashMap.Entry<K,V> entry;
+                        if (p instanceof HashMap.Entry) {
+                            entry = (HashMap.Entry<K,V>)p;
+                        } else {
+                            entry = (HashMap.Entry<K,V>)((TreeNode)p).entry;
+                        }
+                        action.accept(entry.key);
+                        p = entry.next;
                     }
                 } while (p != null || i < hi);
                 if (m.modCount != mc)
@@ -1517,14 +2732,34 @@
             int hi;
             if (action == null)
                 throw new NullPointerException();
-            HashMap.Entry<K,V>[] tab = (HashMap.Entry<K,V>[])map.table;
-            if (tab.length >= (hi = getFence()) && index >= 0) {
+            Object[] tab = map.table;
+            hi = getFence();
+
+            if (!acceptedNull) {
+                acceptedNull = true;
+                if (map.nullKeyEntry != null) {
+                    action.accept(map.nullKeyEntry.key);
+                    if (map.modCount != expectedModCount)
+                        throw new ConcurrentModificationException();
+                    return true;
+                }
+            }
+            if (tab.length >= hi && index >= 0) {
                 while (current != null || index < hi) {
-                    if (current == null)
+                    if (current == null) {
                         current = tab[index++];
-                    else {
-                        K k = current.getKey();
-                        current = current.next;
+                        if (current instanceof HashMap.TreeBin) {
+                            current = ((HashMap.TreeBin)current).first;
+                        }
+                    } else {
+                        HashMap.Entry<K,V> entry;
+                        if (current instanceof HashMap.Entry) {
+                            entry = (HashMap.Entry<K,V>)current;
+                        } else {
+                            entry = (HashMap.Entry<K,V>)((TreeNode)current).entry;
+                        }
+                        K k = entry.key;
+                        current = entry.next;
                         action.accept(k);
                         if (map.modCount != expectedModCount)
                             throw new ConcurrentModificationException();
@@ -1551,9 +2786,15 @@
 
         public ValueSpliterator<K,V> trySplit() {
             int hi = getFence(), lo = index, mid = (lo + hi) >>> 1;
-            return (lo >= mid || current != null) ? null :
-                new ValueSpliterator<K,V>(map, lo, index = mid, est >>>= 1,
-                                          expectedModCount);
+            if (lo >= mid || current != null) {
+                return null;
+            } else {
+                ValueSpliterator<K,V> retVal = new ValueSpliterator<K,V>(map,
+                                 lo, index = mid, est >>>= 1, expectedModCount);
+                // Only 'this' Spliterator chould check for null.
+                retVal.acceptedNull = true;
+                return retVal;
+            }
         }
 
         @SuppressWarnings("unchecked")
@@ -1562,21 +2803,39 @@
             if (action == null)
                 throw new NullPointerException();
             HashMap<K,V> m = map;
-            HashMap.Entry<K,V>[] tab = (HashMap.Entry<K,V>[])m.table;
+            Object[] tab = m.table;
             if ((hi = fence) < 0) {
                 mc = expectedModCount = m.modCount;
                 hi = fence = tab.length;
             }
             else
                 mc = expectedModCount;
-            if (tab.length >= hi && (i = index) >= 0 && i < (index = hi)) {
-                HashMap.Entry<K,V> p = current;
+
+            if (!acceptedNull) {
+                acceptedNull = true;
+                if (m.nullKeyEntry != null) {
+                    action.accept(m.nullKeyEntry.value);
+                }
+            }
+            if (tab.length >= hi && (i = index) >= 0 &&
+                (i < (index = hi) || current != null)) {
+                Object p = current;
+                current = null;
                 do {
-                    if (p == null)
+                    if (p == null) {
                         p = tab[i++];
-                    else {
-                        action.accept(p.getValue());
-                        p = p.next;
+                        if (p instanceof HashMap.TreeBin) {
+                            p = ((HashMap.TreeBin)p).first;
+                        }
+                    } else {
+                        HashMap.Entry<K,V> entry;
+                        if (p instanceof HashMap.Entry) {
+                            entry = (HashMap.Entry<K,V>)p;
+                        } else {
+                            entry = (HashMap.Entry<K,V>)((TreeNode)p).entry;
+                        }
+                        action.accept(entry.value);
+                        p = entry.next;
                     }
                 } while (p != null || i < hi);
                 if (m.modCount != mc)
@@ -1589,14 +2848,34 @@
             int hi;
             if (action == null)
                 throw new NullPointerException();
-            HashMap.Entry<K,V>[] tab = (HashMap.Entry<K,V>[])map.table;
-            if (tab.length >= (hi = getFence()) && index >= 0) {
+            Object[] tab = map.table;
+            hi = getFence();
+
+            if (!acceptedNull) {
+                acceptedNull = true;
+                if (map.nullKeyEntry != null) {
+                    action.accept(map.nullKeyEntry.value);
+                    if (map.modCount != expectedModCount)
+                        throw new ConcurrentModificationException();
+                    return true;
+                }
+            }
+            if (tab.length >= hi && index >= 0) {
                 while (current != null || index < hi) {
-                    if (current == null)
+                    if (current == null) {
                         current = tab[index++];
-                    else {
-                        V v = current.getValue();
-                        current = current.next;
+                        if (current instanceof HashMap.TreeBin) {
+                            current = ((HashMap.TreeBin)current).first;
+                        }
+                    } else {
+                        HashMap.Entry<K,V> entry;
+                        if (current instanceof HashMap.Entry) {
+                            entry = (Entry<K,V>)current;
+                        } else {
+                            entry = (Entry<K,V>)((TreeNode)current).entry;
+                        }
+                        V v = entry.value;
+                        current = entry.next;
                         action.accept(v);
                         if (map.modCount != expectedModCount)
                             throw new ConcurrentModificationException();
@@ -1622,9 +2901,15 @@
 
         public EntrySpliterator<K,V> trySplit() {
             int hi = getFence(), lo = index, mid = (lo + hi) >>> 1;
-            return (lo >= mid || current != null) ? null :
-                new EntrySpliterator<K,V>(map, lo, index = mid, est >>>= 1,
-                                          expectedModCount);
+            if (lo >= mid || current != null) {
+                return null;
+            } else {
+                EntrySpliterator<K,V> retVal = new EntrySpliterator<K,V>(map,
+                                 lo, index = mid, est >>>= 1, expectedModCount);
+                // Only 'this' Spliterator chould check for null.
+                retVal.acceptedNull = true;
+                return retVal;
+            }
         }
 
         @SuppressWarnings("unchecked")
@@ -1633,21 +2918,40 @@
             if (action == null)
                 throw new NullPointerException();
             HashMap<K,V> m = map;
-            HashMap.Entry<K,V>[] tab = (HashMap.Entry<K,V>[])m.table;
+            Object[] tab = m.table;
             if ((hi = fence) < 0) {
                 mc = expectedModCount = m.modCount;
                 hi = fence = tab.length;
             }
             else
                 mc = expectedModCount;
-            if (tab.length >= hi && (i = index) >= 0 && i < (index = hi)) {
-                HashMap.Entry<K,V> p = current;
+
+            if (!acceptedNull) {
+                acceptedNull = true;
+                if (m.nullKeyEntry != null) {
+                    action.accept(m.nullKeyEntry);
+                }
+            }
+            if (tab.length >= hi && (i = index) >= 0 &&
+                (i < (index = hi) || current != null)) {
+                Object p = current;
+                current = null;
                 do {
-                    if (p == null)
+                    if (p == null) {
                         p = tab[i++];
-                    else {
-                        action.accept(p);
-                        p = p.next;
+                        if (p instanceof HashMap.TreeBin) {
+                            p = ((HashMap.TreeBin)p).first;
+                        }
+                    } else {
+                        HashMap.Entry<K,V> entry;
+                        if (p instanceof HashMap.Entry) {
+                            entry = (HashMap.Entry<K,V>)p;
+                        } else {
+                            entry = (HashMap.Entry<K,V>)((TreeNode)p).entry;
+                        }
+                        action.accept(entry);
+                        p = entry.next;
+
                     }
                 } while (p != null || i < hi);
                 if (m.modCount != mc)
@@ -1660,14 +2964,33 @@
             int hi;
             if (action == null)
                 throw new NullPointerException();
-            HashMap.Entry<K,V>[] tab = (HashMap.Entry<K,V>[])map.table;
-            if (tab.length >= (hi = getFence()) && index >= 0) {
+            Object[] tab = map.table;
+            hi = getFence();
+
+            if (!acceptedNull) {
+                acceptedNull = true;
+                if (map.nullKeyEntry != null) {
+                    action.accept(map.nullKeyEntry);
+                    if (map.modCount != expectedModCount)
+                        throw new ConcurrentModificationException();
+                    return true;
+                }
+            }
+            if (tab.length >= hi && index >= 0) {
                 while (current != null || index < hi) {
-                    if (current == null)
+                    if (current == null) {
                         current = tab[index++];
-                    else {
-                        HashMap.Entry<K,V> e = current;
-                        current = current.next;
+                        if (current instanceof HashMap.TreeBin) {
+                            current = ((HashMap.TreeBin)current).first;
+                        }
+                    } else {
+                        HashMap.Entry<K,V> e;
+                        if (current instanceof HashMap.Entry) {
+                            e = (Entry<K,V>)current;
+                        } else {
+                            e = (Entry<K,V>)((TreeNode)current).entry;
+                        }
+                        current = e.next;
                         action.accept(e);
                         if (map.modCount != expectedModCount)
                             throw new ConcurrentModificationException();
--- a/jdk/src/share/classes/java/util/Hashtable.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/util/Hashtable.java	Wed Jul 05 18:59:05 2017 +0200
@@ -180,13 +180,27 @@
          */
         static final long HASHSEED_OFFSET;
 
+        static final boolean USE_HASHSEED;
+
         static {
-            try {
-                UNSAFE = sun.misc.Unsafe.getUnsafe();
-                HASHSEED_OFFSET = UNSAFE.objectFieldOffset(
-                    Hashtable.class.getDeclaredField("hashSeed"));
-            } catch (NoSuchFieldException | SecurityException e) {
-                throw new InternalError("Failed to record hashSeed offset", e);
+            String hashSeedProp = java.security.AccessController.doPrivileged(
+                    new sun.security.action.GetPropertyAction(
+                        "jdk.map.useRandomSeed"));
+            boolean localBool = (null != hashSeedProp)
+                    ? Boolean.parseBoolean(hashSeedProp) : false;
+            USE_HASHSEED = localBool;
+
+            if (USE_HASHSEED) {
+                try {
+                    UNSAFE = sun.misc.Unsafe.getUnsafe();
+                    HASHSEED_OFFSET = UNSAFE.objectFieldOffset(
+                        Hashtable.class.getDeclaredField("hashSeed"));
+                } catch (NoSuchFieldException | SecurityException e) {
+                    throw new InternalError("Failed to record hashSeed offset", e);
+                }
+            } else {
+                UNSAFE = null;
+                HASHSEED_OFFSET = 0;
             }
         }
     }
@@ -194,21 +208,24 @@
     /**
      * A randomizing value associated with this instance that is applied to
      * hash code of keys to make hash collisions harder to find.
+     *
+     * Non-final so it can be set lazily, but be sure not to set more than once.
      */
-    transient final int hashSeed = sun.misc.Hashing.randomHashSeed(this);
+    transient final int hashSeed;
+
+    /**
+     * Return an initial value for the hashSeed, or 0 if the random seed is not
+     * enabled.
+     */
+    final int initHashSeed() {
+        if (sun.misc.VM.isBooted() && Holder.USE_HASHSEED) {
+            return sun.misc.Hashing.randomHashSeed(this);
+        }
+        return 0;
+    }
 
     private int hash(Object k) {
-        if (k instanceof String) {
-            return ((String)k).hash32();
-        }
-
-        int h = hashSeed ^ k.hashCode();
-
-        // This function ensures that hashCodes that differ only by
-        // constant multiples at each bit position have a bounded
-        // number of collisions (approximately 8 at default load factor).
-        h ^= (h >>> 20) ^ (h >>> 12);
-        return h ^ (h >>> 7) ^ (h >>> 4);
+        return hashSeed ^ k.hashCode();
     }
 
     /**
@@ -232,6 +249,7 @@
         this.loadFactor = loadFactor;
         table = new Entry<?,?>[initialCapacity];
         threshold = (int)Math.min(initialCapacity * loadFactor, MAX_ARRAY_SIZE + 1);
+        hashSeed = initHashSeed();
     }
 
     /**
@@ -1187,8 +1205,10 @@
         s.defaultReadObject();
 
         // set hashMask
-        Holder.UNSAFE.putIntVolatile(this, Holder.HASHSEED_OFFSET,
-                sun.misc.Hashing.randomHashSeed(this));
+        if (Holder.USE_HASHSEED) {
+            Holder.UNSAFE.putIntVolatile(this, Holder.HASHSEED_OFFSET,
+                    sun.misc.Hashing.randomHashSeed(this));
+        }
 
         // Read the original length of the array and number of elements
         int origlength = s.readInt();
--- a/jdk/src/share/classes/java/util/IntSummaryStatistics.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/util/IntSummaryStatistics.java	Wed Jul 05 18:59:05 2017 +0200
@@ -159,7 +159,7 @@
      */
     public String toString() {
         return String.format(
-            "%s{count=%d, sum=%d, min=%d, average=%d, max=%d}",
+            "%s{count=%d, sum=%d, min=%d, average=%f, max=%d}",
             this.getClass().getSimpleName(),
             getCount(),
             getSum(),
--- a/jdk/src/share/classes/java/util/LinkedHashMap.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/util/LinkedHashMap.java	Wed Jul 05 18:59:05 2017 +0200
@@ -55,9 +55,9 @@
  * order they were presented.)
  *
  * <p>A special {@link #LinkedHashMap(int,float,boolean) constructor} is
- * provided to create a linked hash map whose order of iteration is the order
- * in which its entries were last accessed, from least-recently accessed to
- * most-recently (<i>access-order</i>).  This kind of map is well-suited to
+ * provided to create a <tt>LinkedHashMap</tt> whose order of iteration is the
+ * order in which its entries were last accessed, from least-recently accessed
+ * to most-recently (<i>access-order</i>).  This kind of map is well-suited to
  * building LRU caches.  Invoking the <tt>put</tt> or <tt>get</tt> method
  * results in an access to the corresponding entry (assuming it exists after
  * the invocation completes).  The <tt>putAll</tt> method generates one entry
@@ -243,23 +243,6 @@
     }
 
     /**
-     * Transfers all entries to new table array.  This method is called
-     * by superclass resize.  It is overridden for performance, as it is
-     * faster to iterate using our linked list.
-     */
-    @Override
-    @SuppressWarnings("unchecked")
-    void transfer(HashMap.Entry[] newTable) {
-        int newCapacity = newTable.length;
-        for (Entry<K,V> e = header.after; e != header; e = e.after) {
-            int index = indexFor(e.hash, newCapacity);
-            e.next = (HashMap.Entry<K,V>)newTable[index];
-            newTable[index] = e;
-        }
-    }
-
-
-    /**
      * Returns <tt>true</tt> if this map maps one or more keys to the
      * specified value.
      *
@@ -320,7 +303,7 @@
         // These fields comprise the doubly linked list used for iteration.
         Entry<K,V> before, after;
 
-        Entry(int hash, K key, V value, HashMap.Entry<K,V> next) {
+        Entry(int hash, K key, V value, Object next) {
             super(hash, key, value, next);
         }
 
@@ -344,7 +327,7 @@
 
         /**
          * This method is invoked by the superclass whenever the value
-         * of a pre-existing entry is read by Map.get or modified by Map.set.
+         * of a pre-existing entry is read by Map.get or modified by Map.put.
          * If the enclosing Map is access-ordered, it moves the entry
          * to the end of the list; otherwise, it does nothing.
          */
@@ -422,8 +405,9 @@
      * allocated entry to get inserted at the end of the linked list and
      * removes the eldest entry if appropriate.
      */
-    void addEntry(int hash, K key, V value, int bucketIndex) {
-        super.addEntry(hash, key, value, bucketIndex);
+    @Override
+    void addEntry(int hash, K key, V value, int bucketIndex, boolean checkIfNeedTree) {
+        super.addEntry(hash, key, value, bucketIndex, checkIfNeedTree);
 
         // Remove eldest entry if instructed
         Entry<K,V> eldest = header.after;
@@ -432,17 +416,14 @@
         }
     }
 
-    /**
-     * This override differs from addEntry in that it doesn't resize the
-     * table or remove the eldest entry.
+    /*
+     * Create a new LinkedHashMap.Entry and setup the before/after pointers
      */
-    void createEntry(int hash, K key, V value, int bucketIndex) {
-        @SuppressWarnings("unchecked")
-            HashMap.Entry<K,V> old = (HashMap.Entry<K,V>)table[bucketIndex];
-        Entry<K,V> e = new Entry<>(hash, key, value, old);
-        table[bucketIndex] = e;
-        e.addBefore(header);
-        size++;
+    @Override
+    HashMap.Entry<K,V> newEntry(int hash, K key, V value, Object next) {
+        Entry<K,V> newEntry = new Entry<>(hash, key, value, next);
+        newEntry.addBefore(header);
+        return newEntry;
     }
 
     /**
--- a/jdk/src/share/classes/java/util/LongSummaryStatistics.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/util/LongSummaryStatistics.java	Wed Jul 05 18:59:05 2017 +0200
@@ -171,7 +171,7 @@
      */
     public String toString() {
         return String.format(
-            "%s{count=%d, sum=%d, min=%d, average=%d, max=%d}",
+            "%s{count=%d, sum=%d, min=%d, average=%f, max=%d}",
             this.getClass().getSimpleName(),
             getCount(),
             getSum(),
--- a/jdk/src/share/classes/java/util/PrimitiveIterator.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/util/PrimitiveIterator.java	Wed Jul 05 18:59:05 2017 +0200
@@ -91,6 +91,7 @@
          * @throws NullPointerException if the specified action is null
          */
         default void forEachRemaining(IntConsumer action) {
+            Objects.requireNonNull(action);
             while (hasNext())
                 action.accept(nextInt());
         }
@@ -123,6 +124,8 @@
                 forEachRemaining((IntConsumer) action);
             }
             else {
+                // The method reference action::accept is never null
+                Objects.requireNonNull(action);
                 if (Tripwire.ENABLED)
                     Tripwire.trip(getClass(), "{0} calling PrimitiveIterator.OfInt.forEachRemainingInt(action::accept)");
                 forEachRemaining((IntConsumer) action::accept);
@@ -162,6 +165,7 @@
          * @throws NullPointerException if the specified action is null
          */
         default void forEachRemaining(LongConsumer action) {
+            Objects.requireNonNull(action);
             while (hasNext())
                 action.accept(nextLong());
         }
@@ -194,6 +198,8 @@
                 forEachRemaining((LongConsumer) action);
             }
             else {
+                // The method reference action::accept is never null
+                Objects.requireNonNull(action);
                 if (Tripwire.ENABLED)
                     Tripwire.trip(getClass(), "{0} calling PrimitiveIterator.OfLong.forEachRemainingLong(action::accept)");
                 forEachRemaining((LongConsumer) action::accept);
@@ -232,6 +238,7 @@
          * @throws NullPointerException if the specified action is null
          */
         default void forEachRemaining(DoubleConsumer action) {
+            Objects.requireNonNull(action);
             while (hasNext())
                 action.accept(nextDouble());
         }
@@ -265,6 +272,8 @@
                 forEachRemaining((DoubleConsumer) action);
             }
             else {
+                // The method reference action::accept is never null
+                Objects.requireNonNull(action);
                 if (Tripwire.ENABLED)
                     Tripwire.trip(getClass(), "{0} calling PrimitiveIterator.OfDouble.forEachRemainingDouble(action::accept)");
                 forEachRemaining((DoubleConsumer) action::accept);
--- a/jdk/src/share/classes/java/util/Spliterator.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/util/Spliterator.java	Wed Jul 05 18:59:05 2017 +0200
@@ -394,9 +394,9 @@
      * Convenience method that returns {@link #estimateSize()} if this
      * Spliterator is {@link #SIZED}, else {@code -1}.
      * @implSpec
-     * The default returns the result of {@code estimateSize()} if the
-     * Spliterator reports a characteristic of {@code SIZED}, and {@code -1}
-     * otherwise.
+     * The default implementation returns the result of {@code estimateSize()}
+     * if the Spliterator reports a characteristic of {@code SIZED}, and
+     * {@code -1} otherwise.
      *
      * @return the exact size, if known, else {@code -1}.
      */
--- a/jdk/src/share/classes/java/util/StringJoiner.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/util/StringJoiner.java	Wed Jul 05 18:59:05 2017 +0200
@@ -29,14 +29,6 @@
  * by a delimiter and optionally starting with a supplied prefix
  * and ending with a supplied suffix.
  * <p>
- * For example, the String {@code "[George:Sally:Fred]"} may
- * be constructed as follows:
- * <pre> {@code
- *     StringJoiner sj = new StringJoiner(":", "[", "]");
- *     sj.add("George").add("Sally").add("Fred");
- *     String desiredString = sj.toString();
- * }</pre>
- * <p>
  * Prior to adding something to the {@code StringJoiner}, its
  * {@code sj.toString()} method will, by default, return {@code prefix + suffix}.
  * However, if the {@code setEmptyValue} method is called, the {@code emptyValue}
@@ -45,17 +37,28 @@
  * <code>"{}"</code>, where the {@code prefix} is <code>"{"</code>, the
  * {@code suffix} is <code>"}"</code> and nothing has been added to the
  * {@code StringJoiner}.
- * <p>
- * A {@code StringJoiner} may be employed to create formatted output from a
- * collection using lambda expressions as shown in the following example.
+ *
+ * @apiNote
+ * <p>The String {@code "[George:Sally:Fred]"} may be constructed as follows:
  *
  * <pre> {@code
- *     List<Person> people = ...
- *     String commaSeparatedNames =
- *         people.map(p -> p.getName()).into(new StringJoiner(", ")).toString();
+ * StringJoiner sj = new StringJoiner(":", "[", "]");
+ * sj.add("George").add("Sally").add("Fred");
+ * String desiredString = sj.toString();
+ * }</pre>
+ * <p>
+ * A {@code StringJoiner} may be employed to create formatted output from a
+ * {@link java.util.stream.Stream} using
+ * {@link java.util.stream.Collectors#toStringJoiner}. For example:
+ *
+ * <pre> {@code
+ * List<Integer> numbers = Arrays.asList(1, 2, 3, 4);
+ * String commaSeparatedNumbers = numbers.stream()
+ *     .map(i -> i.toString())
+ *     .collect(Collectors.toStringJoiner(", ")).toString();
  * }</pre>
  *
- * @author Jim Gish
+ * @see java.util.stream.Collectors#toStringJoiner
  * @since  1.8
 */
 public final class StringJoiner {
--- a/jdk/src/share/classes/java/util/WeakHashMap.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/util/WeakHashMap.java	Wed Jul 05 18:59:05 2017 +0200
@@ -187,11 +187,37 @@
      */
     int modCount;
 
+    private static class Holder {
+        static final boolean USE_HASHSEED;
+
+        static {
+            String hashSeedProp = java.security.AccessController.doPrivileged(
+                    new sun.security.action.GetPropertyAction(
+                        "jdk.map.useRandomSeed"));
+            boolean localBool = (null != hashSeedProp)
+                    ? Boolean.parseBoolean(hashSeedProp) : false;
+            USE_HASHSEED = localBool;
+        }
+    }
+
     /**
      * A randomizing value associated with this instance that is applied to
      * hash code of keys to make hash collisions harder to find.
+     *
+     * Non-final so it can be set lazily, but be sure not to set more than once.
      */
-    transient final int hashSeed = sun.misc.Hashing.randomHashSeed(this);
+    transient int hashSeed;
+
+    /**
+     * Initialize the hashing mask value.
+     */
+    final void initHashSeed() {
+        if (sun.misc.VM.isBooted() && Holder.USE_HASHSEED) {
+            // Do not set hashSeed more than once!
+            // assert hashSeed == 0;
+            hashSeed = sun.misc.Hashing.randomHashSeed(this);
+        }
+    }
 
     @SuppressWarnings("unchecked")
     private Entry<K,V>[] newTable(int n) {
@@ -223,6 +249,7 @@
         table = newTable(capacity);
         this.loadFactor = loadFactor;
         threshold = (int)(capacity * loadFactor);
+        initHashSeed();
     }
 
     /**
@@ -298,10 +325,7 @@
      * in lower bits.
      */
     final int hash(Object k) {
-        if (k instanceof String) {
-            return ((String) k).hash32();
-        }
-        int  h = hashSeed ^ k.hashCode();
+        int h = hashSeed ^ k.hashCode();
 
         // This function ensures that hashCodes that differ only by
         // constant multiples at each bit position have a bounded
@@ -1076,9 +1100,10 @@
             }
             else
                 mc = expectedModCount;
-            if (tab.length >= hi && (i = index) >= 0 && i < hi) {
-                index = hi;
+            if (tab.length >= hi && (i = index) >= 0 &&
+                (i < (index = hi) || current != null)) {
                 WeakHashMap.Entry<K,V> p = current;
+                current = null; // exhaust
                 do {
                     if (p == null)
                         p = tab[i++];
@@ -1155,9 +1180,10 @@
             }
             else
                 mc = expectedModCount;
-            if (tab.length >= hi && (i = index) >= 0 && i < hi) {
-                index = hi;
+            if (tab.length >= hi && (i = index) >= 0 &&
+                (i < (index = hi) || current != null)) {
                 WeakHashMap.Entry<K,V> p = current;
+                current = null; // exhaust
                 do {
                     if (p == null)
                         p = tab[i++];
@@ -1232,9 +1258,10 @@
             }
             else
                 mc = expectedModCount;
-            if (tab.length >= hi && (i = index) >= 0 && i < hi) {
-                index = hi;
+            if (tab.length >= hi && (i = index) >= 0 &&
+                (i < (index = hi) || current != null)) {
                 WeakHashMap.Entry<K,V> p = current;
+                current = null; // exhaust
                 do {
                     if (p == null)
                         p = tab[i++];
--- a/jdk/src/share/classes/java/util/concurrent/ConcurrentHashMap.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/util/concurrent/ConcurrentHashMap.java	Wed Jul 05 18:59:05 2017 +0200
@@ -34,14 +34,47 @@
  */
 
 package java.util.concurrent;
-import java.io.ObjectInputStream;
-import java.util.concurrent.locks.*;
-import java.util.*;
 import java.io.Serializable;
+import java.io.ObjectStreamField;
+import java.lang.reflect.ParameterizedType;
+import java.lang.reflect.Type;
+import java.util.AbstractMap;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Comparator;
+import java.util.ConcurrentModificationException;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.Hashtable;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.NoSuchElementException;
+import java.util.Set;
+import java.util.Spliterator;
+import java.util.concurrent.ConcurrentMap;
+import java.util.concurrent.ForkJoinPool;
+import java.util.concurrent.atomic.AtomicReference;
+import java.util.concurrent.locks.ReentrantLock;
+import java.util.concurrent.locks.StampedLock;
+import java.util.function.BiConsumer;
+import java.util.function.BiFunction;
+import java.util.function.BinaryOperator;
+import java.util.function.Consumer;
+import java.util.function.DoubleBinaryOperator;
+import java.util.function.Function;
+import java.util.function.IntBinaryOperator;
+import java.util.function.LongBinaryOperator;
+import java.util.function.ToDoubleBiFunction;
+import java.util.function.ToDoubleFunction;
+import java.util.function.ToIntBiFunction;
+import java.util.function.ToIntFunction;
+import java.util.function.ToLongBiFunction;
+import java.util.function.ToLongFunction;
+import java.util.stream.Stream;
 
 /**
  * A hash table supporting full concurrency of retrievals and
- * adjustable expected concurrency for updates. This class obeys the
+ * high expected concurrency for updates. This class obeys the
  * same functional specification as {@link java.util.Hashtable}, and
  * includes versions of methods corresponding to each method of
  * {@code Hashtable}. However, even though all operations are
@@ -51,35 +84,61 @@
  * interoperable with {@code Hashtable} in programs that rely on its
  * thread safety but not on its synchronization details.
  *
- * <p> Retrieval operations (including {@code get}) generally do not
- * block, so may overlap with update operations (including
- * {@code put} and {@code remove}). Retrievals reflect the results
- * of the most recently <em>completed</em> update operations holding
- * upon their onset.  For aggregate operations such as {@code putAll}
- * and {@code clear}, concurrent retrievals may reflect insertion or
- * removal of only some entries.  Similarly, Iterators and
- * Enumerations return elements reflecting the state of the hash table
- * at some point at or since the creation of the iterator/enumeration.
- * They do <em>not</em> throw {@link ConcurrentModificationException}.
- * However, iterators are designed to be used by only one thread at a time.
+ * <p>Retrieval operations (including {@code get}) generally do not
+ * block, so may overlap with update operations (including {@code put}
+ * and {@code remove}). Retrievals reflect the results of the most
+ * recently <em>completed</em> update operations holding upon their
+ * onset. (More formally, an update operation for a given key bears a
+ * <em>happens-before</em> relation with any (non-null) retrieval for
+ * that key reporting the updated value.)  For aggregate operations
+ * such as {@code putAll} and {@code clear}, concurrent retrievals may
+ * reflect insertion or removal of only some entries.  Similarly,
+ * Iterators and Enumerations return elements reflecting the state of
+ * the hash table at some point at or since the creation of the
+ * iterator/enumeration.  They do <em>not</em> throw {@link
+ * ConcurrentModificationException}.  However, iterators are designed
+ * to be used by only one thread at a time.  Bear in mind that the
+ * results of aggregate status methods including {@code size}, {@code
+ * isEmpty}, and {@code containsValue} are typically useful only when
+ * a map is not undergoing concurrent updates in other threads.
+ * Otherwise the results of these methods reflect transient states
+ * that may be adequate for monitoring or estimation purposes, but not
+ * for program control.
  *
- * <p> The allowed concurrency among update operations is guided by
- * the optional {@code concurrencyLevel} constructor argument
- * (default {@code 16}), which is used as a hint for internal sizing.  The
- * table is internally partitioned to try to permit the indicated
- * number of concurrent updates without contention. Because placement
- * in hash tables is essentially random, the actual concurrency will
- * vary.  Ideally, you should choose a value to accommodate as many
- * threads as will ever concurrently modify the table. Using a
- * significantly higher value than you need can waste space and time,
- * and a significantly lower value can lead to thread contention. But
- * overestimates and underestimates within an order of magnitude do
- * not usually have much noticeable impact. A value of one is
- * appropriate when it is known that only one thread will modify and
- * all others will only read. Also, resizing this or any other kind of
- * hash table is a relatively slow operation, so, when possible, it is
- * a good idea to provide estimates of expected table sizes in
- * constructors.
+ * <p>The table is dynamically expanded when there are too many
+ * collisions (i.e., keys that have distinct hash codes but fall into
+ * the same slot modulo the table size), with the expected average
+ * effect of maintaining roughly two bins per mapping (corresponding
+ * to a 0.75 load factor threshold for resizing). There may be much
+ * variance around this average as mappings are added and removed, but
+ * overall, this maintains a commonly accepted time/space tradeoff for
+ * hash tables.  However, resizing this or any other kind of hash
+ * table may be a relatively slow operation. When possible, it is a
+ * good idea to provide a size estimate as an optional {@code
+ * initialCapacity} constructor argument. An additional optional
+ * {@code loadFactor} constructor argument provides a further means of
+ * customizing initial table capacity by specifying the table density
+ * to be used in calculating the amount of space to allocate for the
+ * given number of elements.  Also, for compatibility with previous
+ * versions of this class, constructors may optionally specify an
+ * expected {@code concurrencyLevel} as an additional hint for
+ * internal sizing.  Note that using many keys with exactly the same
+ * {@code hashCode()} is a sure way to slow down performance of any
+ * hash table. To ameliorate impact, when keys are {@link Comparable},
+ * this class may use comparison order among keys to help break ties.
+ *
+ * <p>A {@link Set} projection of a ConcurrentHashMap may be created
+ * (using {@link #newKeySet()} or {@link #newKeySet(int)}), or viewed
+ * (using {@link #keySet(Object)} when only keys are of interest, and the
+ * mapped values are (perhaps transiently) not used or all take the
+ * same mapping value.
+ *
+ * <p>A ConcurrentHashMap can be used as scalable frequency map (a
+ * form of histogram or multiset) by using {@link
+ * java.util.concurrent.atomic.LongAdder} values and initializing via
+ * {@link #computeIfAbsent computeIfAbsent}. For example, to add a count
+ * to a {@code ConcurrentHashMap<String,LongAdder> freqs}, you can use
+ * {@code freqs.computeIfAbsent(k -> new LongAdder()).increment();}
  *
  * <p>This class and its views and iterators implement all of the
  * <em>optional</em> methods of the {@link Map} and {@link Iterator}
@@ -88,6 +147,114 @@
  * <p>Like {@link Hashtable} but unlike {@link HashMap}, this class
  * does <em>not</em> allow {@code null} to be used as a key or value.
  *
+ * <p>ConcurrentHashMaps support a set of sequential and parallel bulk
+ * operations that, unlike most {@link Stream} methods, are designed
+ * to be safely, and often sensibly, applied even with maps that are
+ * being concurrently updated by other threads; for example, when
+ * computing a snapshot summary of the values in a shared registry.
+ * There are three kinds of operation, each with four forms, accepting
+ * functions with Keys, Values, Entries, and (Key, Value) arguments
+ * and/or return values. Because the elements of a ConcurrentHashMap
+ * are not ordered in any particular way, and may be processed in
+ * different orders in different parallel executions, the correctness
+ * of supplied functions should not depend on any ordering, or on any
+ * other objects or values that may transiently change while
+ * computation is in progress; and except for forEach actions, should
+ * ideally be side-effect-free. Bulk operations on {@link java.util.Map.Entry}
+ * objects do not support method {@code setValue}.
+ *
+ * <ul>
+ * <li> forEach: Perform a given action on each element.
+ * A variant form applies a given transformation on each element
+ * before performing the action.</li>
+ *
+ * <li> search: Return the first available non-null result of
+ * applying a given function on each element; skipping further
+ * search when a result is found.</li>
+ *
+ * <li> reduce: Accumulate each element.  The supplied reduction
+ * function cannot rely on ordering (more formally, it should be
+ * both associative and commutative).  There are five variants:
+ *
+ * <ul>
+ *
+ * <li> Plain reductions. (There is not a form of this method for
+ * (key, value) function arguments since there is no corresponding
+ * return type.)</li>
+ *
+ * <li> Mapped reductions that accumulate the results of a given
+ * function applied to each element.</li>
+ *
+ * <li> Reductions to scalar doubles, longs, and ints, using a
+ * given basis value.</li>
+ *
+ * </ul>
+ * </li>
+ * </ul>
+ *
+ * <p>These bulk operations accept a {@code parallelismThreshold}
+ * argument. Methods proceed sequentially if the current map size is
+ * estimated to be less than the given threshold. Using a value of
+ * {@code Long.MAX_VALUE} suppresses all parallelism.  Using a value
+ * of {@code 1} results in maximal parallelism by partitioning into
+ * enough subtasks to fully utilize the {@link
+ * ForkJoinPool#commonPool()} that is used for all parallel
+ * computations. Normally, you would initially choose one of these
+ * extreme values, and then measure performance of using in-between
+ * values that trade off overhead versus throughput.
+ *
+ * <p>The concurrency properties of bulk operations follow
+ * from those of ConcurrentHashMap: Any non-null result returned
+ * from {@code get(key)} and related access methods bears a
+ * happens-before relation with the associated insertion or
+ * update.  The result of any bulk operation reflects the
+ * composition of these per-element relations (but is not
+ * necessarily atomic with respect to the map as a whole unless it
+ * is somehow known to be quiescent).  Conversely, because keys
+ * and values in the map are never null, null serves as a reliable
+ * atomic indicator of the current lack of any result.  To
+ * maintain this property, null serves as an implicit basis for
+ * all non-scalar reduction operations. For the double, long, and
+ * int versions, the basis should be one that, when combined with
+ * any other value, returns that other value (more formally, it
+ * should be the identity element for the reduction). Most common
+ * reductions have these properties; for example, computing a sum
+ * with basis 0 or a minimum with basis MAX_VALUE.
+ *
+ * <p>Search and transformation functions provided as arguments
+ * should similarly return null to indicate the lack of any result
+ * (in which case it is not used). In the case of mapped
+ * reductions, this also enables transformations to serve as
+ * filters, returning null (or, in the case of primitive
+ * specializations, the identity basis) if the element should not
+ * be combined. You can create compound transformations and
+ * filterings by composing them yourself under this "null means
+ * there is nothing there now" rule before using them in search or
+ * reduce operations.
+ *
+ * <p>Methods accepting and/or returning Entry arguments maintain
+ * key-value associations. They may be useful for example when
+ * finding the key for the greatest value. Note that "plain" Entry
+ * arguments can be supplied using {@code new
+ * AbstractMap.SimpleEntry(k,v)}.
+ *
+ * <p>Bulk operations may complete abruptly, throwing an
+ * exception encountered in the application of a supplied
+ * function. Bear in mind when handling such exceptions that other
+ * concurrently executing functions could also have thrown
+ * exceptions, or would have done so if the first exception had
+ * not occurred.
+ *
+ * <p>Speedups for parallel compared to sequential forms are common
+ * but not guaranteed.  Parallel operations involving brief functions
+ * on small maps may execute more slowly than sequential forms if the
+ * underlying work to parallelize the computation is more expensive
+ * than the computation itself.  Similarly, parallelization may not
+ * lead to much actual parallelism if all processors are busy
+ * performing unrelated tasks.
+ *
+ * <p>All arguments to all task methods must be non-null.
+ *
  * <p>This class is a member of the
  * <a href="{@docRoot}/../technotes/guides/collections/index.html">
  * Java Collections Framework</a>.
@@ -97,735 +264,2373 @@
  * @param <K> the type of keys maintained by this map
  * @param <V> the type of mapped values
  */
-public class ConcurrentHashMap<K, V> extends AbstractMap<K, V>
-        implements ConcurrentMap<K, V>, Serializable {
+@SuppressWarnings({"unchecked", "rawtypes", "serial"})
+public class ConcurrentHashMap<K,V> extends AbstractMap<K,V>
+    implements ConcurrentMap<K,V>, Serializable {
+
     private static final long serialVersionUID = 7249069246763182397L;
 
     /*
-     * The basic strategy is to subdivide the table among Segments,
-     * each of which itself is a concurrently readable hash table.  To
-     * reduce footprint, all but one segments are constructed only
-     * when first needed (see ensureSegment). To maintain visibility
-     * in the presence of lazy construction, accesses to segments as
-     * well as elements of segment's table must use volatile access,
-     * which is done via Unsafe within methods segmentAt etc
-     * below. These provide the functionality of AtomicReferenceArrays
-     * but reduce the levels of indirection. Additionally,
-     * volatile-writes of table elements and entry "next" fields
-     * within locked operations use the cheaper "lazySet" forms of
-     * writes (via putOrderedObject) because these writes are always
-     * followed by lock releases that maintain sequential consistency
-     * of table updates.
+     * Overview:
+     *
+     * The primary design goal of this hash table is to maintain
+     * concurrent readability (typically method get(), but also
+     * iterators and related methods) while minimizing update
+     * contention. Secondary goals are to keep space consumption about
+     * the same or better than java.util.HashMap, and to support high
+     * initial insertion rates on an empty table by many threads.
+     *
+     * Each key-value mapping is held in a Node.  Because Node key
+     * fields can contain special values, they are defined using plain
+     * Object types (not type "K"). This leads to a lot of explicit
+     * casting (and the use of class-wide warning suppressions).  It
+     * also allows some of the public methods to be factored into a
+     * smaller number of internal methods (although sadly not so for
+     * the five variants of put-related operations). The
+     * validation-based approach explained below leads to a lot of
+     * code sprawl because retry-control precludes factoring into
+     * smaller methods.
+     *
+     * The table is lazily initialized to a power-of-two size upon the
+     * first insertion.  Each bin in the table normally contains a
+     * list of Nodes (most often, the list has only zero or one Node).
+     * Table accesses require volatile/atomic reads, writes, and
+     * CASes.  Because there is no other way to arrange this without
+     * adding further indirections, we use intrinsics
+     * (sun.misc.Unsafe) operations.
+     *
+     * We use the top (sign) bit of Node hash fields for control
+     * purposes -- it is available anyway because of addressing
+     * constraints.  Nodes with negative hash fields are forwarding
+     * nodes to either TreeBins or resized tables.  The lower 31 bits
+     * of each normal Node's hash field contain a transformation of
+     * the key's hash code.
+     *
+     * Insertion (via put or its variants) of the first node in an
+     * empty bin is performed by just CASing it to the bin.  This is
+     * by far the most common case for put operations under most
+     * key/hash distributions.  Other update operations (insert,
+     * delete, and replace) require locks.  We do not want to waste
+     * the space required to associate a distinct lock object with
+     * each bin, so instead use the first node of a bin list itself as
+     * a lock. Locking support for these locks relies on builtin
+     * "synchronized" monitors.
+     *
+     * Using the first node of a list as a lock does not by itself
+     * suffice though: When a node is locked, any update must first
+     * validate that it is still the first node after locking it, and
+     * retry if not. Because new nodes are always appended to lists,
+     * once a node is first in a bin, it remains first until deleted
+     * or the bin becomes invalidated (upon resizing).
+     *
+     * The main disadvantage of per-bin locks is that other update
+     * operations on other nodes in a bin list protected by the same
+     * lock can stall, for example when user equals() or mapping
+     * functions take a long time.  However, statistically, under
+     * random hash codes, this is not a common problem.  Ideally, the
+     * frequency of nodes in bins follows a Poisson distribution
+     * (http://en.wikipedia.org/wiki/Poisson_distribution) with a
+     * parameter of about 0.5 on average, given the resizing threshold
+     * of 0.75, although with a large variance because of resizing
+     * granularity. Ignoring variance, the expected occurrences of
+     * list size k are (exp(-0.5) * pow(0.5, k) / factorial(k)). The
+     * first values are:
+     *
+     * 0:    0.60653066
+     * 1:    0.30326533
+     * 2:    0.07581633
+     * 3:    0.01263606
+     * 4:    0.00157952
+     * 5:    0.00015795
+     * 6:    0.00001316
+     * 7:    0.00000094
+     * 8:    0.00000006
+     * more: less than 1 in ten million
+     *
+     * Lock contention probability for two threads accessing distinct
+     * elements is roughly 1 / (8 * #elements) under random hashes.
      *
-     * Historical note: The previous version of this class relied
-     * heavily on "final" fields, which avoided some volatile reads at
-     * the expense of a large initial footprint.  Some remnants of
-     * that design (including forced construction of segment 0) exist
-     * to ensure serialization compatibility.
+     * Actual hash code distributions encountered in practice
+     * sometimes deviate significantly from uniform randomness.  This
+     * includes the case when N > (1<<30), so some keys MUST collide.
+     * Similarly for dumb or hostile usages in which multiple keys are
+     * designed to have identical hash codes. Also, although we guard
+     * against the worst effects of this (see method spread), sets of
+     * hashes may differ only in bits that do not impact their bin
+     * index for a given power-of-two mask.  So we use a secondary
+     * strategy that applies when the number of nodes in a bin exceeds
+     * a threshold, and at least one of the keys implements
+     * Comparable.  These TreeBins use a balanced tree to hold nodes
+     * (a specialized form of red-black trees), bounding search time
+     * to O(log N).  Each search step in a TreeBin is at least twice as
+     * slow as in a regular list, but given that N cannot exceed
+     * (1<<64) (before running out of addresses) this bounds search
+     * steps, lock hold times, etc, to reasonable constants (roughly
+     * 100 nodes inspected per operation worst case) so long as keys
+     * are Comparable (which is very common -- String, Long, etc).
+     * TreeBin nodes (TreeNodes) also maintain the same "next"
+     * traversal pointers as regular nodes, so can be traversed in
+     * iterators in the same way.
+     *
+     * The table is resized when occupancy exceeds a percentage
+     * threshold (nominally, 0.75, but see below).  Any thread
+     * noticing an overfull bin may assist in resizing after the
+     * initiating thread allocates and sets up the replacement
+     * array. However, rather than stalling, these other threads may
+     * proceed with insertions etc.  The use of TreeBins shields us
+     * from the worst case effects of overfilling while resizes are in
+     * progress.  Resizing proceeds by transferring bins, one by one,
+     * from the table to the next table. To enable concurrency, the
+     * next table must be (incrementally) prefilled with place-holders
+     * serving as reverse forwarders to the old table.  Because we are
+     * using power-of-two expansion, the elements from each bin must
+     * either stay at same index, or move with a power of two
+     * offset. We eliminate unnecessary node creation by catching
+     * cases where old nodes can be reused because their next fields
+     * won't change.  On average, only about one-sixth of them need
+     * cloning when a table doubles. The nodes they replace will be
+     * garbage collectable as soon as they are no longer referenced by
+     * any reader thread that may be in the midst of concurrently
+     * traversing table.  Upon transfer, the old table bin contains
+     * only a special forwarding node (with hash field "MOVED") that
+     * contains the next table as its key. On encountering a
+     * forwarding node, access and update operations restart, using
+     * the new table.
+     *
+     * Each bin transfer requires its bin lock, which can stall
+     * waiting for locks while resizing. However, because other
+     * threads can join in and help resize rather than contend for
+     * locks, average aggregate waits become shorter as resizing
+     * progresses.  The transfer operation must also ensure that all
+     * accessible bins in both the old and new table are usable by any
+     * traversal.  This is arranged by proceeding from the last bin
+     * (table.length - 1) up towards the first.  Upon seeing a
+     * forwarding node, traversals (see class Traverser) arrange to
+     * move to the new table without revisiting nodes.  However, to
+     * ensure that no intervening nodes are skipped, bin splitting can
+     * only begin after the associated reverse-forwarders are in
+     * place.
+     *
+     * The traversal scheme also applies to partial traversals of
+     * ranges of bins (via an alternate Traverser constructor)
+     * to support partitioned aggregate operations.  Also, read-only
+     * operations give up if ever forwarded to a null table, which
+     * provides support for shutdown-style clearing, which is also not
+     * currently implemented.
+     *
+     * Lazy table initialization minimizes footprint until first use,
+     * and also avoids resizings when the first operation is from a
+     * putAll, constructor with map argument, or deserialization.
+     * These cases attempt to override the initial capacity settings,
+     * but harmlessly fail to take effect in cases of races.
+     *
+     * The element count is maintained using a specialization of
+     * LongAdder. We need to incorporate a specialization rather than
+     * just use a LongAdder in order to access implicit
+     * contention-sensing that leads to creation of multiple
+     * Cells.  The counter mechanics avoid contention on
+     * updates but can encounter cache thrashing if read too
+     * frequently during concurrent access. To avoid reading so often,
+     * resizing under contention is attempted only upon adding to a
+     * bin already holding two or more nodes. Under uniform hash
+     * distributions, the probability of this occurring at threshold
+     * is around 13%, meaning that only about 1 in 8 puts check
+     * threshold (and after resizing, many fewer do so). The bulk
+     * putAll operation further reduces contention by only committing
+     * count updates upon these size checks.
+     *
+     * Maintaining API and serialization compatibility with previous
+     * versions of this class introduces several oddities. Mainly: We
+     * leave untouched but unused constructor arguments refering to
+     * concurrencyLevel. We accept a loadFactor constructor argument,
+     * but apply it only to initial table capacity (which is the only
+     * time that we can guarantee to honor it.) We also declare an
+     * unused "Segment" class that is instantiated in minimal form
+     * only when serializing.
      */
 
     /* ---------------- Constants -------------- */
 
     /**
-     * The default initial capacity for this table,
-     * used when not otherwise specified in a constructor.
+     * The largest possible table capacity.  This value must be
+     * exactly 1<<30 to stay within Java array allocation and indexing
+     * bounds for power of two table sizes, and is further required
+     * because the top two bits of 32bit hash fields are used for
+     * control purposes.
      */
-    static final int DEFAULT_INITIAL_CAPACITY = 16;
+    private static final int MAXIMUM_CAPACITY = 1 << 30;
+
+    /**
+     * The default initial table capacity.  Must be a power of 2
+     * (i.e., at least 1) and at most MAXIMUM_CAPACITY.
+     */
+    private static final int DEFAULT_CAPACITY = 16;
 
     /**
-     * The default load factor for this table, used when not
-     * otherwise specified in a constructor.
+     * The largest possible (non-power of two) array size.
+     * Needed by toArray and related methods.
      */
-    static final float DEFAULT_LOAD_FACTOR = 0.75f;
+    static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8;
 
     /**
-     * The default concurrency level for this table, used when not
-     * otherwise specified in a constructor.
+     * The default concurrency level for this table. Unused but
+     * defined for compatibility with previous versions of this class.
      */
-    static final int DEFAULT_CONCURRENCY_LEVEL = 16;
+    private static final int DEFAULT_CONCURRENCY_LEVEL = 16;
+
+    /**
+     * The load factor for this table. Overrides of this value in
+     * constructors affect only the initial table capacity.  The
+     * actual floating point value isn't normally used -- it is
+     * simpler to use expressions such as {@code n - (n >>> 2)} for
+     * the associated resizing threshold.
+     */
+    private static final float LOAD_FACTOR = 0.75f;
 
     /**
-     * The maximum capacity, used if a higher value is implicitly
-     * specified by either of the constructors with arguments.  MUST
-     * be a power of two <= 1<<30 to ensure that entries are indexable
-     * using ints.
+     * The bin count threshold for using a tree rather than list for a
+     * bin.  The value reflects the approximate break-even point for
+     * using tree-based operations.
      */
-    static final int MAXIMUM_CAPACITY = 1 << 30;
+    private static final int TREE_THRESHOLD = 8;
 
     /**
-     * The minimum capacity for per-segment tables.  Must be a power
-     * of two, at least two to avoid immediate resizing on next use
-     * after lazy construction.
+     * Minimum number of rebinnings per transfer step. Ranges are
+     * subdivided to allow multiple resizer threads.  This value
+     * serves as a lower bound to avoid resizers encountering
+     * excessive memory contention.  The value should be at least
+     * DEFAULT_CAPACITY.
+     */
+    private static final int MIN_TRANSFER_STRIDE = 16;
+
+    /*
+     * Encodings for Node hash fields. See above for explanation.
      */
-    static final int MIN_SEGMENT_TABLE_CAPACITY = 2;
+    static final int MOVED     = 0x80000000; // hash field for forwarding nodes
+    static final int HASH_BITS = 0x7fffffff; // usable bits of normal node hash
+
+    /** Number of CPUS, to place bounds on some sizings */
+    static final int NCPU = Runtime.getRuntime().availableProcessors();
+
+    /** For serialization compatibility. */
+    private static final ObjectStreamField[] serialPersistentFields = {
+        new ObjectStreamField("segments", Segment[].class),
+        new ObjectStreamField("segmentMask", Integer.TYPE),
+        new ObjectStreamField("segmentShift", Integer.TYPE)
+    };
 
     /**
-     * The maximum number of segments to allow; used to bound
-     * constructor arguments. Must be power of two less than 1 << 24.
+     * A padded cell for distributing counts.  Adapted from LongAdder
+     * and Striped64.  See their internal docs for explanation.
      */
-    static final int MAX_SEGMENTS = 1 << 16; // slightly conservative
-
-    /**
-     * Number of unsynchronized retries in size and containsValue
-     * methods before resorting to locking. This is used to avoid
-     * unbounded retries if tables undergo continuous modification
-     * which would make it impossible to obtain an accurate result.
-     */
-    static final int RETRIES_BEFORE_LOCK = 2;
+    @sun.misc.Contended static final class Cell {
+        volatile long value;
+        Cell(long x) { value = x; }
+    }
 
     /* ---------------- Fields -------------- */
 
     /**
-     * A randomizing value associated with this instance that is applied to
-     * hash code of keys to make hash collisions harder to find.
+     * The array of bins. Lazily initialized upon first insertion.
+     * Size is always a power of two. Accessed directly by iterators.
+     */
+    transient volatile Node<K,V>[] table;
+
+    /**
+     * The next table to use; non-null only while resizing.
+     */
+    private transient volatile Node<K,V>[] nextTable;
+
+    /**
+     * Base counter value, used mainly when there is no contention,
+     * but also as a fallback during table initialization
+     * races. Updated via CAS.
+     */
+    private transient volatile long baseCount;
+
+    /**
+     * Table initialization and resizing control.  When negative, the
+     * table is being initialized or resized: -1 for initialization,
+     * else -(1 + the number of active resizing threads).  Otherwise,
+     * when table is null, holds the initial table size to use upon
+     * creation, or 0 for default. After initialization, holds the
+     * next element count value upon which to resize the table.
+     */
+    private transient volatile int sizeCtl;
+
+    /**
+     * The next table index (plus one) to split while resizing.
+     */
+    private transient volatile int transferIndex;
+
+    /**
+     * The least available table index to split while resizing.
+     */
+    private transient volatile int transferOrigin;
+
+    /**
+     * Spinlock (locked via CAS) used when resizing and/or creating Cells.
      */
-   private transient final int hashSeed = sun.misc.Hashing.randomHashSeed(this);
+    private transient volatile int cellsBusy;
+
+    /**
+     * Table of counter cells. When non-null, size is a power of 2.
+     */
+    private transient volatile Cell[] counterCells;
+
+    // views
+    private transient KeySetView<K,V> keySet;
+    private transient ValuesView<K,V> values;
+    private transient EntrySetView<K,V> entrySet;
+
+    /* ---------------- Table element access -------------- */
+
+    /*
+     * Volatile access methods are used for table elements as well as
+     * elements of in-progress next table while resizing.  Uses are
+     * null checked by callers, and implicitly bounds-checked, relying
+     * on the invariants that tab arrays have non-zero size, and all
+     * indices are masked with (tab.length - 1) which is never
+     * negative and always less than length. Note that, to be correct
+     * wrt arbitrary concurrency errors by users, bounds checks must
+     * operate on local variables, which accounts for some odd-looking
+     * inline assignments below.
+     */
+
+    static final <K,V> Node<K,V> tabAt(Node<K,V>[] tab, int i) {
+        return (Node<K,V>)U.getObjectVolatile(tab, ((long)i << ASHIFT) + ABASE);
+    }
+
+    static final <K,V> boolean casTabAt(Node<K,V>[] tab, int i,
+                                        Node<K,V> c, Node<K,V> v) {
+        return U.compareAndSwapObject(tab, ((long)i << ASHIFT) + ABASE, c, v);
+    }
+
+    static final <K,V> void setTabAt(Node<K,V>[] tab, int i, Node<K,V> v) {
+        U.putObjectVolatile(tab, ((long)i << ASHIFT) + ABASE, v);
+    }
+
+    /* ---------------- Nodes -------------- */
 
     /**
-     * Mask value for indexing into segments. The upper bits of a
-     * key's hash code are used to choose the segment.
+     * Key-value entry.  This class is never exported out as a
+     * user-mutable Map.Entry (i.e., one supporting setValue; see
+     * MapEntry below), but can be used for read-only traversals used
+     * in bulk tasks.  Nodes with a hash field of MOVED are special,
+     * and do not contain user keys or values (and are never
+     * exported).  Otherwise, keys and vals are never null.
      */
-    final int segmentMask;
+    static class Node<K,V> implements Map.Entry<K,V> {
+        final int hash;
+        final Object key;
+        volatile V val;
+        Node<K,V> next;
+
+        Node(int hash, Object key, V val, Node<K,V> next) {
+            this.hash = hash;
+            this.key = key;
+            this.val = val;
+            this.next = next;
+        }
+
+        public final K getKey()       { return (K)key; }
+        public final V getValue()     { return val; }
+        public final int hashCode()   { return key.hashCode() ^ val.hashCode(); }
+        public final String toString(){ return key + "=" + val; }
+        public final V setValue(V value) {
+            throw new UnsupportedOperationException();
+        }
+
+        public final boolean equals(Object o) {
+            Object k, v, u; Map.Entry<?,?> e;
+            return ((o instanceof Map.Entry) &&
+                    (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
+                    (v = e.getValue()) != null &&
+                    (k == key || k.equals(key)) &&
+                    (v == (u = val) || v.equals(u)));
+        }
+    }
 
     /**
-     * Shift value for indexing within segments.
+     * Exported Entry for EntryIterator
      */
-    final int segmentShift;
+    static final class MapEntry<K,V> implements Map.Entry<K,V> {
+        final K key; // non-null
+        V val;       // non-null
+        final ConcurrentHashMap<K,V> map;
+        MapEntry(K key, V val, ConcurrentHashMap<K,V> map) {
+            this.key = key;
+            this.val = val;
+            this.map = map;
+        }
+        public K getKey()        { return key; }
+        public V getValue()      { return val; }
+        public int hashCode()    { return key.hashCode() ^ val.hashCode(); }
+        public String toString() { return key + "=" + val; }
+
+        public boolean equals(Object o) {
+            Object k, v; Map.Entry<?,?> e;
+            return ((o instanceof Map.Entry) &&
+                    (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
+                    (v = e.getValue()) != null &&
+                    (k == key || k.equals(key)) &&
+                    (v == val || v.equals(val)));
+        }
+
+        /**
+         * Sets our entry's value and writes through to the map. The
+         * value to return is somewhat arbitrary here. Since we do not
+         * necessarily track asynchronous changes, the most recent
+         * "previous" value could be different from what we return (or
+         * could even have been removed, in which case the put will
+         * re-establish). We do not and cannot guarantee more.
+         */
+        public V setValue(V value) {
+            if (value == null) throw new NullPointerException();
+            V v = val;
+            val = value;
+            map.put(key, value);
+            return v;
+        }
+    }
+
+
+    /* ---------------- TreeBins -------------- */
+
+    /**
+     * Nodes for use in TreeBins
+     */
+    static final class TreeNode<K,V> extends Node<K,V> {
+        TreeNode<K,V> parent;  // red-black tree links
+        TreeNode<K,V> left;
+        TreeNode<K,V> right;
+        TreeNode<K,V> prev;    // needed to unlink next upon deletion
+        boolean red;
+
+        TreeNode(int hash, Object key, V val, Node<K,V> next,
+                 TreeNode<K,V> parent) {
+            super(hash, key, val, next);
+            this.parent = parent;
+        }
+    }
+
+    /**
+     * Returns a Class for the given type of the form "class C
+     * implements Comparable<C>", if one exists, else null.  See below
+     * for explanation.
+     */
+    static Class<?> comparableClassFor(Class<?> c) {
+        Class<?> s, cmpc; Type[] ts, as; Type t; ParameterizedType p;
+        if (c == String.class) // bypass checks
+            return c;
+        if (c != null && (cmpc = Comparable.class).isAssignableFrom(c)) {
+            while (cmpc.isAssignableFrom(s = c.getSuperclass()))
+                c = s; // find topmost comparable class
+            if ((ts = c.getGenericInterfaces()) != null) {
+                for (int i = 0; i < ts.length; ++i) {
+                    if (((t = ts[i]) instanceof ParameterizedType) &&
+                        ((p = (ParameterizedType)t).getRawType() == cmpc) &&
+                        (as = p.getActualTypeArguments()) != null &&
+                        as.length == 1 && as[0] == c) // type arg is c
+                        return c;
+                }
+            }
+        }
+        return null;
+    }
 
     /**
-     * The segments, each of which is a specialized hash table.
-     */
-    final Segment<K,V>[] segments;
-
-    transient Set<K> keySet;
-    transient Set<Map.Entry<K,V>> entrySet;
-    transient Collection<V> values;
-
-    /**
-     * ConcurrentHashMap list entry. Note that this is never exported
-     * out as a user-visible Map.Entry.
+     * A specialized form of red-black tree for use in bins
+     * whose size exceeds a threshold.
+     *
+     * TreeBins use a special form of comparison for search and
+     * related operations (which is the main reason we cannot use
+     * existing collections such as TreeMaps). TreeBins contain
+     * Comparable elements, but may contain others, as well as
+     * elements that are Comparable but not necessarily Comparable
+     * for the same T, so we cannot invoke compareTo among them. To
+     * handle this, the tree is ordered primarily by hash value, then
+     * by Comparable.compareTo order if applicable.  On lookup at a
+     * node, if elements are not comparable or compare as 0 then both
+     * left and right children may need to be searched in the case of
+     * tied hash values. (This corresponds to the full list search
+     * that would be necessary if all elements were non-Comparable and
+     * had tied hashes.)  The red-black balancing code is updated from
+     * pre-jdk-collections
+     * (http://gee.cs.oswego.edu/dl/classes/collections/RBCell.java)
+     * based in turn on Cormen, Leiserson, and Rivest "Introduction to
+     * Algorithms" (CLR).
+     *
+     * TreeBins also maintain a separate locking discipline than
+     * regular bins. Because they are forwarded via special MOVED
+     * nodes at bin heads (which can never change once established),
+     * we cannot use those nodes as locks. Instead, TreeBin extends
+     * StampedLock to support a form of read-write lock. For update
+     * operations and table validation, the exclusive form of lock
+     * behaves in the same way as bin-head locks. However, lookups use
+     * shared read-lock mechanics to allow multiple readers in the
+     * absence of writers.  Additionally, these lookups do not ever
+     * block: While the lock is not available, they proceed along the
+     * slow traversal path (via next-pointers) until the lock becomes
+     * available or the list is exhausted, whichever comes
+     * first. These cases are not fast, but maximize aggregate
+     * expected throughput.
      */
-    static final class HashEntry<K,V> {
-        final int hash;
-        final K key;
-        volatile V value;
-        volatile HashEntry<K,V> next;
-
-        HashEntry(int hash, K key, V value, HashEntry<K,V> next) {
-            this.hash = hash;
-            this.key = key;
-            this.value = value;
-            this.next = next;
+    static final class TreeBin<K,V> extends StampedLock {
+        private static final long serialVersionUID = 2249069246763182397L;
+        transient TreeNode<K,V> root;  // root of tree
+        transient TreeNode<K,V> first; // head of next-pointer list
+
+        /** From CLR */
+        private void rotateLeft(TreeNode<K,V> p) {
+            if (p != null) {
+                TreeNode<K,V> r = p.right, pp, rl;
+                if ((rl = p.right = r.left) != null)
+                    rl.parent = p;
+                if ((pp = r.parent = p.parent) == null)
+                    root = r;
+                else if (pp.left == p)
+                    pp.left = r;
+                else
+                    pp.right = r;
+                r.left = p;
+                p.parent = r;
+            }
+        }
+
+        /** From CLR */
+        private void rotateRight(TreeNode<K,V> p) {
+            if (p != null) {
+                TreeNode<K,V> l = p.left, pp, lr;
+                if ((lr = p.left = l.right) != null)
+                    lr.parent = p;
+                if ((pp = l.parent = p.parent) == null)
+                    root = l;
+                else if (pp.right == p)
+                    pp.right = l;
+                else
+                    pp.left = l;
+                l.right = p;
+                p.parent = l;
+            }
+        }
+
+        /**
+         * Returns the TreeNode (or null if not found) for the given key
+         * starting at given root.
+         */
+        final TreeNode<K,V> getTreeNode(int h, Object k, TreeNode<K,V> p,
+                                        Class<?> cc) {
+            while (p != null) {
+                int dir, ph; Object pk; Class<?> pc;
+                if ((ph = p.hash) != h)
+                    dir = (h < ph) ? -1 : 1;
+                else if ((pk = p.key) == k || k.equals(pk))
+                    return p;
+                else if (cc == null || pk == null ||
+                         ((pc = pk.getClass()) != cc &&
+                          comparableClassFor(pc) != cc) ||
+                         (dir = ((Comparable<Object>)k).compareTo(pk)) == 0) {
+                    TreeNode<K,V> r, pr; // check both sides
+                    if ((pr = p.right) != null &&
+                        (r = getTreeNode(h, k, pr, cc)) != null)
+                        return r;
+                    else // continue left
+                        dir = -1;
+                }
+                p = (dir > 0) ? p.right : p.left;
+            }
+            return null;
+        }
+
+        /**
+         * Wrapper for getTreeNode used by CHM.get. Tries to obtain
+         * read-lock to call getTreeNode, but during failure to get
+         * lock, searches along next links.
+         */
+        final V getValue(int h, Object k) {
+            Class<?> cc = comparableClassFor(k.getClass());
+            Node<K,V> r = null;
+            for (Node<K,V> e = first; e != null; e = e.next) {
+                long s;
+                if ((s = tryReadLock()) != 0L) {
+                    try {
+                        r = getTreeNode(h, k, root, cc);
+                    } finally {
+                        unlockRead(s);
+                    }
+                    break;
+                }
+                else if (e.hash == h && k.equals(e.key)) {
+                    r = e;
+                    break;
+                }
+            }
+            return r == null ? null : r.val;
+        }
+
+        /**
+         * Finds or adds a node.
+         * @return null if added
+         */
+        final TreeNode<K,V> putTreeNode(int h, Object k, V v) {
+            Class<?> cc = comparableClassFor(k.getClass());
+            TreeNode<K,V> pp = root, p = null;
+            int dir = 0;
+            while (pp != null) { // find existing node or leaf to insert at
+                int ph; Object pk; Class<?> pc;
+                p = pp;
+                if ((ph = p.hash) != h)
+                    dir = (h < ph) ? -1 : 1;
+                else if ((pk = p.key) == k || k.equals(pk))
+                    return p;
+                else if (cc == null || pk == null ||
+                         ((pc = pk.getClass()) != cc &&
+                          comparableClassFor(pc) != cc) ||
+                         (dir = ((Comparable<Object>)k).compareTo(pk)) == 0) {
+                    TreeNode<K,V> r, pr;
+                    if ((pr = p.right) != null &&
+                        (r = getTreeNode(h, k, pr, cc)) != null)
+                        return r;
+                    else // continue left
+                        dir = -1;
+                }
+                pp = (dir > 0) ? p.right : p.left;
+            }
+
+            TreeNode<K,V> f = first;
+            TreeNode<K,V> x = first = new TreeNode<K,V>(h, k, v, f, p);
+            if (p == null)
+                root = x;
+            else { // attach and rebalance; adapted from CLR
+                if (f != null)
+                    f.prev = x;
+                if (dir <= 0)
+                    p.left = x;
+                else
+                    p.right = x;
+                x.red = true;
+                for (TreeNode<K,V> xp, xpp, xppl, xppr;;) {
+                    if ((xp = x.parent) == null) {
+                        (root = x).red = false;
+                        break;
+                    }
+                    else if (!xp.red || (xpp = xp.parent) == null) {
+                        TreeNode<K,V> r = root;
+                        if (r != null && r.red)
+                            r.red = false;
+                        break;
+                    }
+                    else if ((xppl = xpp.left) == xp) {
+                        if ((xppr = xpp.right) != null && xppr.red) {
+                            xppr.red = false;
+                            xp.red = false;
+                            xpp.red = true;
+                            x = xpp;
+                        }
+                        else {
+                            if (x == xp.right) {
+                                rotateLeft(x = xp);
+                                xpp = (xp = x.parent) == null ? null : xp.parent;
+                            }
+                            if (xp != null) {
+                                xp.red = false;
+                                if (xpp != null) {
+                                    xpp.red = true;
+                                    rotateRight(xpp);
+                                }
+                            }
+                        }
+                    }
+                    else {
+                        if (xppl != null && xppl.red) {
+                            xppl.red = false;
+                            xp.red = false;
+                            xpp.red = true;
+                            x = xpp;
+                        }
+                        else {
+                            if (x == xp.left) {
+                                rotateRight(x = xp);
+                                xpp = (xp = x.parent) == null ? null : xp.parent;
+                            }
+                            if (xp != null) {
+                                xp.red = false;
+                                if (xpp != null) {
+                                    xpp.red = true;
+                                    rotateLeft(xpp);
+                                }
+                            }
+                        }
+                    }
+                }
+            }
+            assert checkInvariants();
+            return null;
+        }
+
+        /**
+         * Removes the given node, that must be present before this
+         * call.  This is messier than typical red-black deletion code
+         * because we cannot swap the contents of an interior node
+         * with a leaf successor that is pinned by "next" pointers
+         * that are accessible independently of lock. So instead we
+         * swap the tree linkages.
+         */
+        final void deleteTreeNode(TreeNode<K,V> p) {
+            TreeNode<K,V> next = (TreeNode<K,V>)p.next;
+            TreeNode<K,V> pred = p.prev;  // unlink traversal pointers
+            if (pred == null)
+                first = next;
+            else
+                pred.next = next;
+            if (next != null)
+                next.prev = pred;
+            else if (pred == null) {
+                root = null;
+                return;
+            }
+            TreeNode<K,V> replacement;
+            TreeNode<K,V> pl = p.left;
+            TreeNode<K,V> pr = p.right;
+            if (pl != null && pr != null) {
+                TreeNode<K,V> s = pr, sl;
+                while ((sl = s.left) != null) // find successor
+                    s = sl;
+                boolean c = s.red; s.red = p.red; p.red = c; // swap colors
+                TreeNode<K,V> sr = s.right;
+                TreeNode<K,V> pp = p.parent;
+                if (s == pr) { // p was s's direct parent
+                    p.parent = s;
+                    s.right = p;
+                }
+                else {
+                    TreeNode<K,V> sp = s.parent;
+                    if ((p.parent = sp) != null) {
+                        if (s == sp.left)
+                            sp.left = p;
+                        else
+                            sp.right = p;
+                    }
+                    if ((s.right = pr) != null)
+                        pr.parent = s;
+                }
+                p.left = null;
+                if ((p.right = sr) != null)
+                    sr.parent = p;
+                if ((s.left = pl) != null)
+                    pl.parent = s;
+                if ((s.parent = pp) == null)
+                    root = s;
+                else if (p == pp.left)
+                    pp.left = s;
+                else
+                    pp.right = s;
+                if (sr != null)
+                    replacement = sr;
+                else
+                    replacement = p;
+            }
+            else if (pl != null)
+                replacement = pl;
+            else if (pr != null)
+                replacement = pr;
+            else
+                replacement = p;
+            if (replacement != p) {
+                TreeNode<K,V> pp = replacement.parent = p.parent;
+                if (pp == null)
+                    root = replacement;
+                else if (p == pp.left)
+                    pp.left = replacement;
+                else
+                    pp.right = replacement;
+                p.left = p.right = p.parent = null;
+            }
+            if (!p.red) { // rebalance, from CLR
+                for (TreeNode<K,V> x = replacement; x != null; ) {
+                    TreeNode<K,V> xp, xpl, xpr;
+                    if (x.red || (xp = x.parent) == null) {
+                        x.red = false;
+                        break;
+                    }
+                    else if ((xpl = xp.left) == x) {
+                        if ((xpr = xp.right) != null && xpr.red) {
+                            xpr.red = false;
+                            xp.red = true;
+                            rotateLeft(xp);
+                            xpr = (xp = x.parent) == null ? null : xp.right;
+                        }
+                        if (xpr == null)
+                            x = xp;
+                        else {
+                            TreeNode<K,V> sl = xpr.left, sr = xpr.right;
+                            if ((sr == null || !sr.red) &&
+                                (sl == null || !sl.red)) {
+                                xpr.red = true;
+                                x = xp;
+                            }
+                            else {
+                                if (sr == null || !sr.red) {
+                                    if (sl != null)
+                                        sl.red = false;
+                                    xpr.red = true;
+                                    rotateRight(xpr);
+                                    xpr = (xp = x.parent) == null ?
+                                        null : xp.right;
+                                }
+                                if (xpr != null) {
+                                    xpr.red = (xp == null) ? false : xp.red;
+                                    if ((sr = xpr.right) != null)
+                                        sr.red = false;
+                                }
+                                if (xp != null) {
+                                    xp.red = false;
+                                    rotateLeft(xp);
+                                }
+                                x = root;
+                            }
+                        }
+                    }
+                    else { // symmetric
+                        if (xpl != null && xpl.red) {
+                            xpl.red = false;
+                            xp.red = true;
+                            rotateRight(xp);
+                            xpl = (xp = x.parent) == null ? null : xp.left;
+                        }
+                        if (xpl == null)
+                            x = xp;
+                        else {
+                            TreeNode<K,V> sl = xpl.left, sr = xpl.right;
+                            if ((sl == null || !sl.red) &&
+                                (sr == null || !sr.red)) {
+                                xpl.red = true;
+                                x = xp;
+                            }
+                            else {
+                                if (sl == null || !sl.red) {
+                                    if (sr != null)
+                                        sr.red = false;
+                                    xpl.red = true;
+                                    rotateLeft(xpl);
+                                    xpl = (xp = x.parent) == null ?
+                                        null : xp.left;
+                                }
+                                if (xpl != null) {
+                                    xpl.red = (xp == null) ? false : xp.red;
+                                    if ((sl = xpl.left) != null)
+                                        sl.red = false;
+                                }
+                                if (xp != null) {
+                                    xp.red = false;
+                                    rotateRight(xp);
+                                }
+                                x = root;
+                            }
+                        }
+                    }
+                }
+            }
+            if (p == replacement) {  // detach pointers
+                TreeNode<K,V> pp;
+                if ((pp = p.parent) != null) {
+                    if (p == pp.left)
+                        pp.left = null;
+                    else if (p == pp.right)
+                        pp.right = null;
+                    p.parent = null;
+                }
+            }
+            assert checkInvariants();
+        }
+
+        /**
+         * Checks linkage and balance invariants at root
+         */
+        final boolean checkInvariants() {
+            TreeNode<K,V> r = root;
+            if (r == null)
+                return (first == null);
+            else
+                return (first != null) && checkTreeNode(r);
         }
 
         /**
-         * Sets next field with volatile write semantics.  (See above
-         * about use of putOrderedObject.)
+         * Recursive invariant check
          */
-        final void setNext(HashEntry<K,V> n) {
-            UNSAFE.putOrderedObject(this, nextOffset, n);
+        final boolean checkTreeNode(TreeNode<K,V> t) {
+            TreeNode<K,V> tp = t.parent, tl = t.left, tr = t.right,
+                tb = t.prev, tn = (TreeNode<K,V>)t.next;
+            if (tb != null && tb.next != t)
+                return false;
+            if (tn != null && tn.prev != t)
+                return false;
+            if (tp != null && t != tp.left && t != tp.right)
+                return false;
+            if (tl != null && (tl.parent != t || tl.hash > t.hash))
+                return false;
+            if (tr != null && (tr.parent != t || tr.hash < t.hash))
+                return false;
+            if (t.red && tl != null && tl.red && tr != null && tr.red)
+                return false;
+            if (tl != null && !checkTreeNode(tl))
+                return false;
+            if (tr != null && !checkTreeNode(tr))
+                return false;
+            return true;
+        }
+    }
+
+    /* ---------------- Collision reduction methods -------------- */
+
+    /**
+     * Spreads higher bits to lower, and also forces top bit to 0.
+     * Because the table uses power-of-two masking, sets of hashes
+     * that vary only in bits above the current mask will always
+     * collide. (Among known examples are sets of Float keys holding
+     * consecutive whole numbers in small tables.)  To counter this,
+     * we apply a transform that spreads the impact of higher bits
+     * downward. There is a tradeoff between speed, utility, and
+     * quality of bit-spreading. Because many common sets of hashes
+     * are already reasonably distributed across bits (so don't benefit
+     * from spreading), and because we use trees to handle large sets
+     * of collisions in bins, we don't need excessively high quality.
+     */
+    private static final int spread(int h) {
+        h ^= (h >>> 18) ^ (h >>> 12);
+        return (h ^ (h >>> 10)) & HASH_BITS;
+    }
+
+    /**
+     * Replaces a list bin with a tree bin if key is comparable.  Call
+     * only when locked.
+     */
+    private final void replaceWithTreeBin(Node<K,V>[] tab, int index, Object key) {
+        if (tab != null && comparableClassFor(key.getClass()) != null) {
+            TreeBin<K,V> t = new TreeBin<K,V>();
+            for (Node<K,V> e = tabAt(tab, index); e != null; e = e.next)
+                t.putTreeNode(e.hash, e.key, e.val);
+            setTabAt(tab, index, new Node<K,V>(MOVED, t, null, null));
+        }
+    }
+
+    /* ---------------- Internal access and update methods -------------- */
+
+    /** Implementation for get and containsKey */
+    private final V internalGet(Object k) {
+        int h = spread(k.hashCode());
+        V v = null;
+        Node<K,V>[] tab; Node<K,V> e;
+        if ((tab = table) != null &&
+            (e = tabAt(tab, (tab.length - 1) & h)) != null) {
+            for (;;) {
+                int eh; Object ek;
+                if ((eh = e.hash) < 0) {
+                    if ((ek = e.key) instanceof TreeBin) { // search TreeBin
+                        v = ((TreeBin<K,V>)ek).getValue(h, k);
+                        break;
+                    }
+                    else if (!(ek instanceof Node[]) ||    // try new table
+                             (e = tabAt(tab = (Node<K,V>[])ek,
+                                        (tab.length - 1) & h)) == null)
+                        break;
+                }
+                else if (eh == h && ((ek = e.key) == k || k.equals(ek))) {
+                    v = e.val;
+                    break;
+                }
+                else if ((e = e.next) == null)
+                    break;
+            }
+        }
+        return v;
+    }
+
+    /**
+     * Implementation for the four public remove/replace methods:
+     * Replaces node value with v, conditional upon match of cv if
+     * non-null.  If resulting value is null, delete.
+     */
+    private final V internalReplace(Object k, V v, Object cv) {
+        int h = spread(k.hashCode());
+        V oldVal = null;
+        for (Node<K,V>[] tab = table;;) {
+            Node<K,V> f; int i, fh; Object fk;
+            if (tab == null ||
+                (f = tabAt(tab, i = (tab.length - 1) & h)) == null)
+                break;
+            else if ((fh = f.hash) < 0) {
+                if ((fk = f.key) instanceof TreeBin) {
+                    TreeBin<K,V> t = (TreeBin<K,V>)fk;
+                    long stamp = t.writeLock();
+                    boolean validated = false;
+                    boolean deleted = false;
+                    try {
+                        if (tabAt(tab, i) == f) {
+                            validated = true;
+                            Class<?> cc = comparableClassFor(k.getClass());
+                            TreeNode<K,V> p = t.getTreeNode(h, k, t.root, cc);
+                            if (p != null) {
+                                V pv = p.val;
+                                if (cv == null || cv == pv || cv.equals(pv)) {
+                                    oldVal = pv;
+                                    if (v != null)
+                                        p.val = v;
+                                    else {
+                                        deleted = true;
+                                        t.deleteTreeNode(p);
+                                    }
+                                }
+                            }
+                        }
+                    } finally {
+                        t.unlockWrite(stamp);
+                    }
+                    if (validated) {
+                        if (deleted)
+                            addCount(-1L, -1);
+                        break;
+                    }
+                }
+                else
+                    tab = (Node<K,V>[])fk;
+            }
+            else {
+                boolean validated = false;
+                boolean deleted = false;
+                synchronized (f) {
+                    if (tabAt(tab, i) == f) {
+                        validated = true;
+                        for (Node<K,V> e = f, pred = null;;) {
+                            Object ek;
+                            if (e.hash == h &&
+                                ((ek = e.key) == k || k.equals(ek))) {
+                                V ev = e.val;
+                                if (cv == null || cv == ev || cv.equals(ev)) {
+                                    oldVal = ev;
+                                    if (v != null)
+                                        e.val = v;
+                                    else {
+                                        deleted = true;
+                                        Node<K,V> en = e.next;
+                                        if (pred != null)
+                                            pred.next = en;
+                                        else
+                                            setTabAt(tab, i, en);
+                                    }
+                                }
+                                break;
+                            }
+                            pred = e;
+                            if ((e = e.next) == null)
+                                break;
+                        }
+                    }
+                }
+                if (validated) {
+                    if (deleted)
+                        addCount(-1L, -1);
+                    break;
+                }
+            }
+        }
+        return oldVal;
+    }
+
+    /*
+     * Internal versions of insertion methods
+     * All have the same basic structure as the first (internalPut):
+     *  1. If table uninitialized, create
+     *  2. If bin empty, try to CAS new node
+     *  3. If bin stale, use new table
+     *  4. if bin converted to TreeBin, validate and relay to TreeBin methods
+     *  5. Lock and validate; if valid, scan and add or update
+     *
+     * The putAll method differs mainly in attempting to pre-allocate
+     * enough table space, and also more lazily performs count updates
+     * and checks.
+     *
+     * Most of the function-accepting methods can't be factored nicely
+     * because they require different functional forms, so instead
+     * sprawl out similar mechanics.
+     */
+
+    /** Implementation for put and putIfAbsent */
+    private final V internalPut(K k, V v, boolean onlyIfAbsent) {
+        if (k == null || v == null) throw new NullPointerException();
+        int h = spread(k.hashCode());
+        int len = 0;
+        for (Node<K,V>[] tab = table;;) {
+            int i, fh; Node<K,V> f; Object fk;
+            if (tab == null)
+                tab = initTable();
+            else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
+                if (casTabAt(tab, i, null, new Node<K,V>(h, k, v, null)))
+                    break;                   // no lock when adding to empty bin
+            }
+            else if ((fh = f.hash) < 0) {
+                if ((fk = f.key) instanceof TreeBin) {
+                    TreeBin<K,V> t = (TreeBin<K,V>)fk;
+                    long stamp = t.writeLock();
+                    V oldVal = null;
+                    try {
+                        if (tabAt(tab, i) == f) {
+                            len = 2;
+                            TreeNode<K,V> p = t.putTreeNode(h, k, v);
+                            if (p != null) {
+                                oldVal = p.val;
+                                if (!onlyIfAbsent)
+                                    p.val = v;
+                            }
+                        }
+                    } finally {
+                        t.unlockWrite(stamp);
+                    }
+                    if (len != 0) {
+                        if (oldVal != null)
+                            return oldVal;
+                        break;
+                    }
+                }
+                else
+                    tab = (Node<K,V>[])fk;
+            }
+            else {
+                V oldVal = null;
+                synchronized (f) {
+                    if (tabAt(tab, i) == f) {
+                        len = 1;
+                        for (Node<K,V> e = f;; ++len) {
+                            Object ek;
+                            if (e.hash == h &&
+                                ((ek = e.key) == k || k.equals(ek))) {
+                                oldVal = e.val;
+                                if (!onlyIfAbsent)
+                                    e.val = v;
+                                break;
+                            }
+                            Node<K,V> last = e;
+                            if ((e = e.next) == null) {
+                                last.next = new Node<K,V>(h, k, v, null);
+                                if (len > TREE_THRESHOLD)
+                                    replaceWithTreeBin(tab, i, k);
+                                break;
+                            }
+                        }
+                    }
+                }
+                if (len != 0) {
+                    if (oldVal != null)
+                        return oldVal;
+                    break;
+                }
+            }
+        }
+        addCount(1L, len);
+        return null;
+    }
+
+    /** Implementation for computeIfAbsent */
+    private final V internalComputeIfAbsent(K k, Function<? super K, ? extends V> mf) {
+        if (k == null || mf == null)
+            throw new NullPointerException();
+        int h = spread(k.hashCode());
+        V val = null;
+        int len = 0;
+        for (Node<K,V>[] tab = table;;) {
+            Node<K,V> f; int i; Object fk;
+            if (tab == null)
+                tab = initTable();
+            else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
+                Node<K,V> node = new Node<K,V>(h, k, null, null);
+                synchronized (node) {
+                    if (casTabAt(tab, i, null, node)) {
+                        len = 1;
+                        try {
+                            if ((val = mf.apply(k)) != null)
+                                node.val = val;
+                        } finally {
+                            if (val == null)
+                                setTabAt(tab, i, null);
+                        }
+                    }
+                }
+                if (len != 0)
+                    break;
+            }
+            else if (f.hash < 0) {
+                if ((fk = f.key) instanceof TreeBin) {
+                    TreeBin<K,V> t = (TreeBin<K,V>)fk;
+                    long stamp = t.writeLock();
+                    boolean added = false;
+                    try {
+                        if (tabAt(tab, i) == f) {
+                            len = 2;
+                            Class<?> cc = comparableClassFor(k.getClass());
+                            TreeNode<K,V> p = t.getTreeNode(h, k, t.root, cc);
+                            if (p != null)
+                                val = p.val;
+                            else if ((val = mf.apply(k)) != null) {
+                                added = true;
+                                t.putTreeNode(h, k, val);
+                            }
+                        }
+                    } finally {
+                        t.unlockWrite(stamp);
+                    }
+                    if (len != 0) {
+                        if (!added)
+                            return val;
+                        break;
+                    }
+                }
+                else
+                    tab = (Node<K,V>[])fk;
+            }
+            else {
+                boolean added = false;
+                synchronized (f) {
+                    if (tabAt(tab, i) == f) {
+                        len = 1;
+                        for (Node<K,V> e = f;; ++len) {
+                            Object ek; V ev;
+                            if (e.hash == h &&
+                                ((ek = e.key) == k || k.equals(ek))) {
+                                val = e.val;
+                                break;
+                            }
+                            Node<K,V> last = e;
+                            if ((e = e.next) == null) {
+                                if ((val = mf.apply(k)) != null) {
+                                    added = true;
+                                    last.next = new Node<K,V>(h, k, val, null);
+                                    if (len > TREE_THRESHOLD)
+                                        replaceWithTreeBin(tab, i, k);
+                                }
+                                break;
+                            }
+                        }
+                    }
+                }
+                if (len != 0) {
+                    if (!added)
+                        return val;
+                    break;
+                }
+            }
         }
-
-        // Unsafe mechanics
-        static final sun.misc.Unsafe UNSAFE;
-        static final long nextOffset;
-        static {
-            try {
-                UNSAFE = sun.misc.Unsafe.getUnsafe();
-                Class<?> k = HashEntry.class;
-                nextOffset = UNSAFE.objectFieldOffset
-                    (k.getDeclaredField("next"));
-            } catch (Exception e) {
-                throw new Error(e);
+        if (val != null)
+            addCount(1L, len);
+        return val;
+    }
+
+    /** Implementation for compute */
+    private final V internalCompute(K k, boolean onlyIfPresent,
+                                    BiFunction<? super K, ? super V, ? extends V> mf) {
+        if (k == null || mf == null)
+            throw new NullPointerException();
+        int h = spread(k.hashCode());
+        V val = null;
+        int delta = 0;
+        int len = 0;
+        for (Node<K,V>[] tab = table;;) {
+            Node<K,V> f; int i, fh; Object fk;
+            if (tab == null)
+                tab = initTable();
+            else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
+                if (onlyIfPresent)
+                    break;
+                Node<K,V> node = new Node<K,V>(h, k, null, null);
+                synchronized (node) {
+                    if (casTabAt(tab, i, null, node)) {
+                        try {
+                            len = 1;
+                            if ((val = mf.apply(k, null)) != null) {
+                                node.val = val;
+                                delta = 1;
+                            }
+                        } finally {
+                            if (delta == 0)
+                                setTabAt(tab, i, null);
+                        }
+                    }
+                }
+                if (len != 0)
+                    break;
+            }
+            else if ((fh = f.hash) < 0) {
+                if ((fk = f.key) instanceof TreeBin) {
+                    TreeBin<K,V> t = (TreeBin<K,V>)fk;
+                    long stamp = t.writeLock();
+                    try {
+                        if (tabAt(tab, i) == f) {
+                            len = 2;
+                            Class<?> cc = comparableClassFor(k.getClass());
+                            TreeNode<K,V> p = t.getTreeNode(h, k, t.root, cc);
+                            if (p != null || !onlyIfPresent) {
+                                V pv = (p == null) ? null : p.val;
+                                if ((val = mf.apply(k, pv)) != null) {
+                                    if (p != null)
+                                        p.val = val;
+                                    else {
+                                        delta = 1;
+                                        t.putTreeNode(h, k, val);
+                                    }
+                                }
+                                else if (p != null) {
+                                    delta = -1;
+                                    t.deleteTreeNode(p);
+                                }
+                            }
+                        }
+                    } finally {
+                        t.unlockWrite(stamp);
+                    }
+                    if (len != 0)
+                        break;
+                }
+                else
+                    tab = (Node<K,V>[])fk;
+            }
+            else {
+                synchronized (f) {
+                    if (tabAt(tab, i) == f) {
+                        len = 1;
+                        for (Node<K,V> e = f, pred = null;; ++len) {
+                            Object ek;
+                            if (e.hash == h &&
+                                ((ek = e.key) == k || k.equals(ek))) {
+                                val = mf.apply(k, e.val);
+                                if (val != null)
+                                    e.val = val;
+                                else {
+                                    delta = -1;
+                                    Node<K,V> en = e.next;
+                                    if (pred != null)
+                                        pred.next = en;
+                                    else
+                                        setTabAt(tab, i, en);
+                                }
+                                break;
+                            }
+                            pred = e;
+                            if ((e = e.next) == null) {
+                                if (!onlyIfPresent &&
+                                    (val = mf.apply(k, null)) != null) {
+                                    pred.next = new Node<K,V>(h, k, val, null);
+                                    delta = 1;
+                                    if (len > TREE_THRESHOLD)
+                                        replaceWithTreeBin(tab, i, k);
+                                }
+                                break;
+                            }
+                        }
+                    }
+                }
+                if (len != 0)
+                    break;
+            }
+        }
+        if (delta != 0)
+            addCount((long)delta, len);
+        return val;
+    }
+
+    /** Implementation for merge */
+    private final V internalMerge(K k, V v,
+                                  BiFunction<? super V, ? super V, ? extends V> mf) {
+        if (k == null || v == null || mf == null)
+            throw new NullPointerException();
+        int h = spread(k.hashCode());
+        V val = null;
+        int delta = 0;
+        int len = 0;
+        for (Node<K,V>[] tab = table;;) {
+            int i; Node<K,V> f; Object fk;
+            if (tab == null)
+                tab = initTable();
+            else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
+                if (casTabAt(tab, i, null, new Node<K,V>(h, k, v, null))) {
+                    delta = 1;
+                    val = v;
+                    break;
+                }
+            }
+            else if (f.hash < 0) {
+                if ((fk = f.key) instanceof TreeBin) {
+                    TreeBin<K,V> t = (TreeBin<K,V>)fk;
+                    long stamp = t.writeLock();
+                    try {
+                        if (tabAt(tab, i) == f) {
+                            len = 2;
+                            Class<?> cc = comparableClassFor(k.getClass());
+                            TreeNode<K,V> p = t.getTreeNode(h, k, t.root, cc);
+                            val = (p == null) ? v : mf.apply(p.val, v);
+                            if (val != null) {
+                                if (p != null)
+                                    p.val = val;
+                                else {
+                                    delta = 1;
+                                    t.putTreeNode(h, k, val);
+                                }
+                            }
+                            else if (p != null) {
+                                delta = -1;
+                                t.deleteTreeNode(p);
+                            }
+                        }
+                    } finally {
+                        t.unlockWrite(stamp);
+                    }
+                    if (len != 0)
+                        break;
+                }
+                else
+                    tab = (Node<K,V>[])fk;
+            }
+            else {
+                synchronized (f) {
+                    if (tabAt(tab, i) == f) {
+                        len = 1;
+                        for (Node<K,V> e = f, pred = null;; ++len) {
+                            Object ek;
+                            if (e.hash == h &&
+                                ((ek = e.key) == k || k.equals(ek))) {
+                                val = mf.apply(e.val, v);
+                                if (val != null)
+                                    e.val = val;
+                                else {
+                                    delta = -1;
+                                    Node<K,V> en = e.next;
+                                    if (pred != null)
+                                        pred.next = en;
+                                    else
+                                        setTabAt(tab, i, en);
+                                }
+                                break;
+                            }
+                            pred = e;
+                            if ((e = e.next) == null) {
+                                delta = 1;
+                                val = v;
+                                pred.next = new Node<K,V>(h, k, val, null);
+                                if (len > TREE_THRESHOLD)
+                                    replaceWithTreeBin(tab, i, k);
+                                break;
+                            }
+                        }
+                    }
+                }
+                if (len != 0)
+                    break;
+            }
+        }
+        if (delta != 0)
+            addCount((long)delta, len);
+        return val;
+    }
+
+    /** Implementation for putAll */
+    private final void internalPutAll(Map<? extends K, ? extends V> m) {
+        tryPresize(m.size());
+        long delta = 0L;     // number of uncommitted additions
+        boolean npe = false; // to throw exception on exit for nulls
+        try {                // to clean up counts on other exceptions
+            for (Map.Entry<?, ? extends V> entry : m.entrySet()) {
+                Object k; V v;
+                if (entry == null || (k = entry.getKey()) == null ||
+                    (v = entry.getValue()) == null) {
+                    npe = true;
+                    break;
+                }
+                int h = spread(k.hashCode());
+                for (Node<K,V>[] tab = table;;) {
+                    int i; Node<K,V> f; int fh; Object fk;
+                    if (tab == null)
+                        tab = initTable();
+                    else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null){
+                        if (casTabAt(tab, i, null, new Node<K,V>(h, k, v, null))) {
+                            ++delta;
+                            break;
+                        }
+                    }
+                    else if ((fh = f.hash) < 0) {
+                        if ((fk = f.key) instanceof TreeBin) {
+                            TreeBin<K,V> t = (TreeBin<K,V>)fk;
+                            long stamp = t.writeLock();
+                            boolean validated = false;
+                            try {
+                                if (tabAt(tab, i) == f) {
+                                    validated = true;
+                                    Class<?> cc = comparableClassFor(k.getClass());
+                                    TreeNode<K,V> p = t.getTreeNode(h, k,
+                                                                    t.root, cc);
+                                    if (p != null)
+                                        p.val = v;
+                                    else {
+                                        ++delta;
+                                        t.putTreeNode(h, k, v);
+                                    }
+                                }
+                            } finally {
+                                t.unlockWrite(stamp);
+                            }
+                            if (validated)
+                                break;
+                        }
+                        else
+                            tab = (Node<K,V>[])fk;
+                    }
+                    else {
+                        int len = 0;
+                        synchronized (f) {
+                            if (tabAt(tab, i) == f) {
+                                len = 1;
+                                for (Node<K,V> e = f;; ++len) {
+                                    Object ek;
+                                    if (e.hash == h &&
+                                        ((ek = e.key) == k || k.equals(ek))) {
+                                        e.val = v;
+                                        break;
+                                    }
+                                    Node<K,V> last = e;
+                                    if ((e = e.next) == null) {
+                                        ++delta;
+                                        last.next = new Node<K,V>(h, k, v, null);
+                                        if (len > TREE_THRESHOLD)
+                                            replaceWithTreeBin(tab, i, k);
+                                        break;
+                                    }
+                                }
+                            }
+                        }
+                        if (len != 0) {
+                            if (len > 1) {
+                                addCount(delta, len);
+                                delta = 0L;
+                            }
+                            break;
+                        }
+                    }
+                }
+            }
+        } finally {
+            if (delta != 0L)
+                addCount(delta, 2);
+        }
+        if (npe)
+            throw new NullPointerException();
+    }
+
+    /**
+     * Implementation for clear. Steps through each bin, removing all
+     * nodes.
+     */
+    private final void internalClear() {
+        long delta = 0L; // negative number of deletions
+        int i = 0;
+        Node<K,V>[] tab = table;
+        while (tab != null && i < tab.length) {
+            Node<K,V> f = tabAt(tab, i);
+            if (f == null)
+                ++i;
+            else if (f.hash < 0) {
+                Object fk;
+                if ((fk = f.key) instanceof TreeBin) {
+                    TreeBin<K,V> t = (TreeBin<K,V>)fk;
+                    long stamp = t.writeLock();
+                    try {
+                        if (tabAt(tab, i) == f) {
+                            for (Node<K,V> p = t.first; p != null; p = p.next)
+                                --delta;
+                            t.first = null;
+                            t.root = null;
+                            ++i;
+                        }
+                    } finally {
+                        t.unlockWrite(stamp);
+                    }
+                }
+                else
+                    tab = (Node<K,V>[])fk;
+            }
+            else {
+                synchronized (f) {
+                    if (tabAt(tab, i) == f) {
+                        for (Node<K,V> e = f; e != null; e = e.next)
+                            --delta;
+                        setTabAt(tab, i, null);
+                        ++i;
+                    }
+                }
+            }
+        }
+        if (delta != 0L)
+            addCount(delta, -1);
+    }
+
+    /* ---------------- Table Initialization and Resizing -------------- */
+
+    /**
+     * Returns a power of two table size for the given desired capacity.
+     * See Hackers Delight, sec 3.2
+     */
+    private static final int tableSizeFor(int c) {
+        int n = c - 1;
+        n |= n >>> 1;
+        n |= n >>> 2;
+        n |= n >>> 4;
+        n |= n >>> 8;
+        n |= n >>> 16;
+        return (n < 0) ? 1 : (n >= MAXIMUM_CAPACITY) ? MAXIMUM_CAPACITY : n + 1;
+    }
+
+    /**
+     * Initializes table, using the size recorded in sizeCtl.
+     */
+    private final Node<K,V>[] initTable() {
+        Node<K,V>[] tab; int sc;
+        while ((tab = table) == null) {
+            if ((sc = sizeCtl) < 0)
+                Thread.yield(); // lost initialization race; just spin
+            else if (U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
+                try {
+                    if ((tab = table) == null) {
+                        int n = (sc > 0) ? sc : DEFAULT_CAPACITY;
+                        table = tab = (Node<K,V>[])new Node[n];
+                        sc = n - (n >>> 2);
+                    }
+                } finally {
+                    sizeCtl = sc;
+                }
+                break;
+            }
+        }
+        return tab;
+    }
+
+    /**
+     * Adds to count, and if table is too small and not already
+     * resizing, initiates transfer. If already resizing, helps
+     * perform transfer if work is available.  Rechecks occupancy
+     * after a transfer to see if another resize is already needed
+     * because resizings are lagging additions.
+     *
+     * @param x the count to add
+     * @param check if <0, don't check resize, if <= 1 only check if uncontended
+     */
+    private final void addCount(long x, int check) {
+        Cell[] as; long b, s;
+        if ((as = counterCells) != null ||
+            !U.compareAndSwapLong(this, BASECOUNT, b = baseCount, s = b + x)) {
+            Cell a; long v; int m;
+            boolean uncontended = true;
+            if (as == null || (m = as.length - 1) < 0 ||
+                (a = as[ThreadLocalRandom.getProbe() & m]) == null ||
+                !(uncontended =
+                  U.compareAndSwapLong(a, CELLVALUE, v = a.value, v + x))) {
+                fullAddCount(x, uncontended);
+                return;
+            }
+            if (check <= 1)
+                return;
+            s = sumCount();
+        }
+        if (check >= 0) {
+            Node<K,V>[] tab, nt; int sc;
+            while (s >= (long)(sc = sizeCtl) && (tab = table) != null &&
+                   tab.length < MAXIMUM_CAPACITY) {
+                if (sc < 0) {
+                    if (sc == -1 || transferIndex <= transferOrigin ||
+                        (nt = nextTable) == null)
+                        break;
+                    if (U.compareAndSwapInt(this, SIZECTL, sc, sc - 1))
+                        transfer(tab, nt);
+                }
+                else if (U.compareAndSwapInt(this, SIZECTL, sc, -2))
+                    transfer(tab, null);
+                s = sumCount();
             }
         }
     }
 
     /**
-     * Gets the ith element of given table (if nonnull) with volatile
-     * read semantics. Note: This is manually integrated into a few
-     * performance-sensitive methods to reduce call overhead.
-     */
-    @SuppressWarnings("unchecked")
-    static final <K,V> HashEntry<K,V> entryAt(HashEntry<K,V>[] tab, int i) {
-        return (tab == null) ? null :
-            (HashEntry<K,V>) UNSAFE.getObjectVolatile
-            (tab, ((long)i << TSHIFT) + TBASE);
-    }
-
-    /**
-     * Sets the ith element of given table, with volatile write
-     * semantics. (See above about use of putOrderedObject.)
+     * Tries to presize table to accommodate the given number of elements.
+     *
+     * @param size number of elements (doesn't need to be perfectly accurate)
      */
-    static final <K,V> void setEntryAt(HashEntry<K,V>[] tab, int i,
-                                       HashEntry<K,V> e) {
-        UNSAFE.putOrderedObject(tab, ((long)i << TSHIFT) + TBASE, e);
-    }
-
-    /**
-     * Applies a supplemental hash function to a given hashCode, which
-     * defends against poor quality hash functions.  This is critical
-     * because ConcurrentHashMap uses power-of-two length hash tables,
-     * that otherwise encounter collisions for hashCodes that do not
-     * differ in lower or upper bits.
-     */
-    private int hash(Object k) {
-        if (k instanceof String) {
-            return ((String) k).hash32();
+    private final void tryPresize(int size) {
+        int c = (size >= (MAXIMUM_CAPACITY >>> 1)) ? MAXIMUM_CAPACITY :
+            tableSizeFor(size + (size >>> 1) + 1);
+        int sc;
+        while ((sc = sizeCtl) >= 0) {
+            Node<K,V>[] tab = table; int n;
+            if (tab == null || (n = tab.length) == 0) {
+                n = (sc > c) ? sc : c;
+                if (U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
+                    try {
+                        if (table == tab) {
+                            table = (Node<K,V>[])new Node[n];
+                            sc = n - (n >>> 2);
+                        }
+                    } finally {
+                        sizeCtl = sc;
+                    }
+                }
+            }
+            else if (c <= sc || n >= MAXIMUM_CAPACITY)
+                break;
+            else if (tab == table &&
+                     U.compareAndSwapInt(this, SIZECTL, sc, -2))
+                transfer(tab, null);
         }
-
-        int h = hashSeed ^ k.hashCode();
-
-        // Spread bits to regularize both segment and index locations,
-        // using variant of single-word Wang/Jenkins hash.
-        h += (h <<  15) ^ 0xffffcd7d;
-        h ^= (h >>> 10);
-        h += (h <<   3);
-        h ^= (h >>>  6);
-        h += (h <<   2) + (h << 14);
-        return h ^ (h >>> 16);
     }
 
     /**
-     * Segments are specialized versions of hash tables.  This
-     * subclasses from ReentrantLock opportunistically, just to
-     * simplify some locking and avoid separate construction.
+     * Moves and/or copies the nodes in each bin to new table. See
+     * above for explanation.
      */
-    static final class Segment<K,V> extends ReentrantLock implements Serializable {
-        /*
-         * Segments maintain a table of entry lists that are always
-         * kept in a consistent state, so can be read (via volatile
-         * reads of segments and tables) without locking.  This
-         * requires replicating nodes when necessary during table
-         * resizing, so the old lists can be traversed by readers
-         * still using old version of table.
-         *
-         * This class defines only mutative methods requiring locking.
-         * Except as noted, the methods of this class perform the
-         * per-segment versions of ConcurrentHashMap methods.  (Other
-         * methods are integrated directly into ConcurrentHashMap
-         * methods.) These mutative methods use a form of controlled
-         * spinning on contention via methods scanAndLock and
-         * scanAndLockForPut. These intersperse tryLocks with
-         * traversals to locate nodes.  The main benefit is to absorb
-         * cache misses (which are very common for hash tables) while
-         * obtaining locks so that traversal is faster once
-         * acquired. We do not actually use the found nodes since they
-         * must be re-acquired under lock anyway to ensure sequential
-         * consistency of updates (and in any case may be undetectably
-         * stale), but they will normally be much faster to re-locate.
-         * Also, scanAndLockForPut speculatively creates a fresh node
-         * to use in put if no node is found.
-         */
-
-        private static final long serialVersionUID = 2249069246763182397L;
-
-        /**
-         * The maximum number of times to tryLock in a prescan before
-         * possibly blocking on acquire in preparation for a locked
-         * segment operation. On multiprocessors, using a bounded
-         * number of retries maintains cache acquired while locating
-         * nodes.
-         */
-        static final int MAX_SCAN_RETRIES =
-            Runtime.getRuntime().availableProcessors() > 1 ? 64 : 1;
-
-        /**
-         * The per-segment table. Elements are accessed via
-         * entryAt/setEntryAt providing volatile semantics.
-         */
-        transient volatile HashEntry<K,V>[] table;
-
-        /**
-         * The number of elements. Accessed only either within locks
-         * or among other volatile reads that maintain visibility.
-         */
-        transient int count;
-
-        /**
-         * The total number of mutative operations in this segment.
-         * Even though this may overflows 32 bits, it provides
-         * sufficient accuracy for stability checks in CHM isEmpty()
-         * and size() methods.  Accessed only either within locks or
-         * among other volatile reads that maintain visibility.
-         */
-        transient int modCount;
-
-        /**
-         * The table is rehashed when its size exceeds this threshold.
-         * (The value of this field is always {@code (int)(capacity *
-         * loadFactor)}.)
-         */
-        transient int threshold;
-
-        /**
-         * The load factor for the hash table.  Even though this value
-         * is same for all segments, it is replicated to avoid needing
-         * links to outer object.
-         * @serial
-         */
-        final float loadFactor;
-
-        Segment(float lf, int threshold, HashEntry<K,V>[] tab) {
-            this.loadFactor = lf;
-            this.threshold = threshold;
-            this.table = tab;
+    private final void transfer(Node<K,V>[] tab, Node<K,V>[] nextTab) {
+        int n = tab.length, stride;
+        if ((stride = (NCPU > 1) ? (n >>> 3) / NCPU : n) < MIN_TRANSFER_STRIDE)
+            stride = MIN_TRANSFER_STRIDE; // subdivide range
+        if (nextTab == null) {            // initiating
+            try {
+                nextTab = (Node<K,V>[])new Node[n << 1];
+            } catch (Throwable ex) {      // try to cope with OOME
+                sizeCtl = Integer.MAX_VALUE;
+                return;
+            }
+            nextTable = nextTab;
+            transferOrigin = n;
+            transferIndex = n;
+            Node<K,V> rev = new Node<K,V>(MOVED, tab, null, null);
+            for (int k = n; k > 0;) {    // progressively reveal ready slots
+                int nextk = (k > stride) ? k - stride : 0;
+                for (int m = nextk; m < k; ++m)
+                    nextTab[m] = rev;
+                for (int m = n + nextk; m < n + k; ++m)
+                    nextTab[m] = rev;
+                U.putOrderedInt(this, TRANSFERORIGIN, k = nextk);
+            }
         }
-
-        final V put(K key, int hash, V value, boolean onlyIfAbsent) {
-            HashEntry<K,V> node = tryLock() ? null :
-                scanAndLockForPut(key, hash, value);
-            V oldValue;
-            try {
-                HashEntry<K,V>[] tab = table;
-                int index = (tab.length - 1) & hash;
-                HashEntry<K,V> first = entryAt(tab, index);
-                for (HashEntry<K,V> e = first;;) {
-                    if (e != null) {
-                        K k;
-                        if ((k = e.key) == key ||
-                            (e.hash == hash && key.equals(k))) {
-                            oldValue = e.value;
-                            if (!onlyIfAbsent) {
-                                e.value = value;
-                                ++modCount;
-                            }
-                            break;
+        int nextn = nextTab.length;
+        Node<K,V> fwd = new Node<K,V>(MOVED, nextTab, null, null);
+        boolean advance = true;
+        for (int i = 0, bound = 0;;) {
+            int nextIndex, nextBound; Node<K,V> f; Object fk;
+            while (advance) {
+                if (--i >= bound)
+                    advance = false;
+                else if ((nextIndex = transferIndex) <= transferOrigin) {
+                    i = -1;
+                    advance = false;
+                }
+                else if (U.compareAndSwapInt
+                         (this, TRANSFERINDEX, nextIndex,
+                          nextBound = (nextIndex > stride ?
+                                       nextIndex - stride : 0))) {
+                    bound = nextBound;
+                    i = nextIndex - 1;
+                    advance = false;
+                }
+            }
+            if (i < 0 || i >= n || i + n >= nextn) {
+                for (int sc;;) {
+                    if (U.compareAndSwapInt(this, SIZECTL, sc = sizeCtl, ++sc)) {
+                        if (sc == -1) {
+                            nextTable = null;
+                            table = nextTab;
+                            sizeCtl = (n << 1) - (n >>> 1);
                         }
-                        e = e.next;
-                    }
-                    else {
-                        if (node != null)
-                            node.setNext(first);
-                        else
-                            node = new HashEntry<K,V>(hash, key, value, first);
-                        int c = count + 1;
-                        if (c > threshold && tab.length < MAXIMUM_CAPACITY)
-                            rehash(node);
-                        else
-                            setEntryAt(tab, index, node);
-                        ++modCount;
-                        count = c;
-                        oldValue = null;
-                        break;
+                        return;
                     }
                 }
-            } finally {
-                unlock();
+            }
+            else if ((f = tabAt(tab, i)) == null) {
+                if (casTabAt(tab, i, null, fwd)) {
+                    setTabAt(nextTab, i, null);
+                    setTabAt(nextTab, i + n, null);
+                    advance = true;
+                }
             }
-            return oldValue;
-        }
-
-        /**
-         * Doubles size of table and repacks entries, also adding the
-         * given node to new table
-         */
-        @SuppressWarnings("unchecked")
-        private void rehash(HashEntry<K,V> node) {
-            /*
-             * Reclassify nodes in each list to new table.  Because we
-             * are using power-of-two expansion, the elements from
-             * each bin must either stay at same index, or move with a
-             * power of two offset. We eliminate unnecessary node
-             * creation by catching cases where old nodes can be
-             * reused because their next fields won't change.
-             * Statistically, at the default threshold, only about
-             * one-sixth of them need cloning when a table
-             * doubles. The nodes they replace will be garbage
-             * collectable as soon as they are no longer referenced by
-             * any reader thread that may be in the midst of
-             * concurrently traversing table. Entry accesses use plain
-             * array indexing because they are followed by volatile
-             * table write.
-             */
-            HashEntry<K,V>[] oldTable = table;
-            int oldCapacity = oldTable.length;
-            int newCapacity = oldCapacity << 1;
-            threshold = (int)(newCapacity * loadFactor);
-            HashEntry<K,V>[] newTable =
-                (HashEntry<K,V>[]) new HashEntry<?,?>[newCapacity];
-            int sizeMask = newCapacity - 1;
-            for (int i = 0; i < oldCapacity ; i++) {
-                HashEntry<K,V> e = oldTable[i];
-                if (e != null) {
-                    HashEntry<K,V> next = e.next;
-                    int idx = e.hash & sizeMask;
-                    if (next == null)   //  Single node on list
-                        newTable[idx] = e;
-                    else { // Reuse consecutive sequence at same slot
-                        HashEntry<K,V> lastRun = e;
-                        int lastIdx = idx;
-                        for (HashEntry<K,V> last = next;
-                             last != null;
-                             last = last.next) {
-                            int k = last.hash & sizeMask;
-                            if (k != lastIdx) {
-                                lastIdx = k;
-                                lastRun = last;
+            else if (f.hash >= 0) {
+                synchronized (f) {
+                    if (tabAt(tab, i) == f) {
+                        int runBit = f.hash & n;
+                        Node<K,V> lastRun = f, lo = null, hi = null;
+                        for (Node<K,V> p = f.next; p != null; p = p.next) {
+                            int b = p.hash & n;
+                            if (b != runBit) {
+                                runBit = b;
+                                lastRun = p;
                             }
                         }
-                        newTable[lastIdx] = lastRun;
-                        // Clone remaining nodes
-                        for (HashEntry<K,V> p = e; p != lastRun; p = p.next) {
-                            V v = p.value;
-                            int h = p.hash;
-                            int k = h & sizeMask;
-                            HashEntry<K,V> n = newTable[k];
-                            newTable[k] = new HashEntry<K,V>(h, p.key, v, n);
+                        if (runBit == 0)
+                            lo = lastRun;
+                        else
+                            hi = lastRun;
+                        for (Node<K,V> p = f; p != lastRun; p = p.next) {
+                            int ph = p.hash; Object pk = p.key; V pv = p.val;
+                            if ((ph & n) == 0)
+                                lo = new Node<K,V>(ph, pk, pv, lo);
+                            else
+                                hi = new Node<K,V>(ph, pk, pv, hi);
                         }
+                        setTabAt(nextTab, i, lo);
+                        setTabAt(nextTab, i + n, hi);
+                        setTabAt(tab, i, fwd);
+                        advance = true;
                     }
                 }
             }
-            int nodeIndex = node.hash & sizeMask; // add the new node
-            node.setNext(newTable[nodeIndex]);
-            newTable[nodeIndex] = node;
-            table = newTable;
-        }
-
-        /**
-         * Scans for a node containing given key while trying to
-         * acquire lock, creating and returning one if not found. Upon
-         * return, guarantees that lock is held. UNlike in most
-         * methods, calls to method equals are not screened: Since
-         * traversal speed doesn't matter, we might as well help warm
-         * up the associated code and accesses as well.
-         *
-         * @return a new node if key not found, else null
-         */
-        private HashEntry<K,V> scanAndLockForPut(K key, int hash, V value) {
-            HashEntry<K,V> first = entryForHash(this, hash);
-            HashEntry<K,V> e = first;
-            HashEntry<K,V> node = null;
-            int retries = -1; // negative while locating node
-            while (!tryLock()) {
-                HashEntry<K,V> f; // to recheck first below
-                if (retries < 0) {
-                    if (e == null) {
-                        if (node == null) // speculatively create node
-                            node = new HashEntry<K,V>(hash, key, value, null);
-                        retries = 0;
+            else if ((fk = f.key) instanceof TreeBin) {
+                TreeBin<K,V> t = (TreeBin<K,V>)fk;
+                long stamp = t.writeLock();
+                try {
+                    if (tabAt(tab, i) == f) {
+                        TreeNode<K,V> root;
+                        Node<K,V> ln = null, hn = null;
+                        if ((root = t.root) != null) {
+                            Node<K,V> e, p; TreeNode<K,V> lr, rr; int lh;
+                            TreeBin<K,V> lt = null, ht = null;
+                            for (lr = root; lr.left != null; lr = lr.left);
+                            for (rr = root; rr.right != null; rr = rr.right);
+                            if ((lh = lr.hash) == rr.hash) { // move entire tree
+                                if ((lh & n) == 0)
+                                    lt = t;
+                                else
+                                    ht = t;
+                            }
+                            else {
+                                lt = new TreeBin<K,V>();
+                                ht = new TreeBin<K,V>();
+                                int lc = 0, hc = 0;
+                                for (e = t.first; e != null; e = e.next) {
+                                    int h = e.hash;
+                                    Object k = e.key; V v = e.val;
+                                    if ((h & n) == 0) {
+                                        ++lc;
+                                        lt.putTreeNode(h, k, v);
+                                    }
+                                    else {
+                                        ++hc;
+                                        ht.putTreeNode(h, k, v);
+                                    }
+                                }
+                                if (lc < TREE_THRESHOLD) { // throw away
+                                    for (p = lt.first; p != null; p = p.next)
+                                        ln = new Node<K,V>(p.hash, p.key,
+                                                           p.val, ln);
+                                    lt = null;
+                                }
+                                if (hc < TREE_THRESHOLD) {
+                                    for (p = ht.first; p != null; p = p.next)
+                                        hn = new Node<K,V>(p.hash, p.key,
+                                                           p.val, hn);
+                                    ht = null;
+                                }
+                            }
+                            if (ln == null && lt != null)
+                                ln = new Node<K,V>(MOVED, lt, null, null);
+                            if (hn == null && ht != null)
+                                hn = new Node<K,V>(MOVED, ht, null, null);
+                        }
+                        setTabAt(nextTab, i, ln);
+                        setTabAt(nextTab, i + n, hn);
+                        setTabAt(tab, i, fwd);
+                        advance = true;
                     }
-                    else if (key.equals(e.key))
-                        retries = 0;
-                    else
-                        e = e.next;
-                }
-                else if (++retries > MAX_SCAN_RETRIES) {
-                    lock();
-                    break;
-                }
-                else if ((retries & 1) == 0 &&
-                         (f = entryForHash(this, hash)) != first) {
-                    e = first = f; // re-traverse if entry changed
-                    retries = -1;
+                } finally {
+                    t.unlockWrite(stamp);
                 }
             }
-            return node;
+            else
+                advance = true; // already processed
+        }
+    }
+
+    /* ---------------- Counter support -------------- */
+
+    final long sumCount() {
+        Cell[] as = counterCells; Cell a;
+        long sum = baseCount;
+        if (as != null) {
+            for (int i = 0; i < as.length; ++i) {
+                if ((a = as[i]) != null)
+                    sum += a.value;
+            }
+        }
+        return sum;
+    }
+
+    // See LongAdder version for explanation
+    private final void fullAddCount(long x, boolean wasUncontended) {
+        int h;
+        if ((h = ThreadLocalRandom.getProbe()) == 0) {
+            ThreadLocalRandom.localInit();      // force initialization
+            h = ThreadLocalRandom.getProbe();
+            wasUncontended = true;
         }
-
-        /**
-         * Scans for a node containing the given key while trying to
-         * acquire lock for a remove or replace operation. Upon
-         * return, guarantees that lock is held.  Note that we must
-         * lock even if the key is not found, to ensure sequential
-         * consistency of updates.
-         */
-        private void scanAndLock(Object key, int hash) {
-            // similar to but simpler than scanAndLockForPut
-            HashEntry<K,V> first = entryForHash(this, hash);
-            HashEntry<K,V> e = first;
-            int retries = -1;
-            while (!tryLock()) {
-                HashEntry<K,V> f;
-                if (retries < 0) {
-                    if (e == null || key.equals(e.key))
-                        retries = 0;
-                    else
-                        e = e.next;
+        boolean collide = false;                // True if last slot nonempty
+        for (;;) {
+            Cell[] as; Cell a; int n; long v;
+            if ((as = counterCells) != null && (n = as.length) > 0) {
+                if ((a = as[(n - 1) & h]) == null) {
+                    if (cellsBusy == 0) {            // Try to attach new Cell
+                        Cell r = new Cell(x); // Optimistic create
+                        if (cellsBusy == 0 &&
+                            U.compareAndSwapInt(this, CELLSBUSY, 0, 1)) {
+                            boolean created = false;
+                            try {               // Recheck under lock
+                                Cell[] rs; int m, j;
+                                if ((rs = counterCells) != null &&
+                                    (m = rs.length) > 0 &&
+                                    rs[j = (m - 1) & h] == null) {
+                                    rs[j] = r;
+                                    created = true;
+                                }
+                            } finally {
+                                cellsBusy = 0;
+                            }
+                            if (created)
+                                break;
+                            continue;           // Slot is now non-empty
+                        }
+                    }
+                    collide = false;
+                }
+                else if (!wasUncontended)       // CAS already known to fail
+                    wasUncontended = true;      // Continue after rehash
+                else if (U.compareAndSwapLong(a, CELLVALUE, v = a.value, v + x))
+                    break;
+                else if (counterCells != as || n >= NCPU)
+                    collide = false;            // At max size or stale
+                else if (!collide)
+                    collide = true;
+                else if (cellsBusy == 0 &&
+                         U.compareAndSwapInt(this, CELLSBUSY, 0, 1)) {
+                    try {
+                        if (counterCells == as) {// Expand table unless stale
+                            Cell[] rs = new Cell[n << 1];
+                            for (int i = 0; i < n; ++i)
+                                rs[i] = as[i];
+                            counterCells = rs;
+                        }
+                    } finally {
+                        cellsBusy = 0;
+                    }
+                    collide = false;
+                    continue;                   // Retry with expanded table
                 }
-                else if (++retries > MAX_SCAN_RETRIES) {
-                    lock();
-                    break;
+                h = ThreadLocalRandom.advanceProbe(h);
+            }
+            else if (cellsBusy == 0 && counterCells == as &&
+                     U.compareAndSwapInt(this, CELLSBUSY, 0, 1)) {
+                boolean init = false;
+                try {                           // Initialize table
+                    if (counterCells == as) {
+                        Cell[] rs = new Cell[2];
+                        rs[h & 1] = new Cell(x);
+                        counterCells = rs;
+                        init = true;
+                    }
+                } finally {
+                    cellsBusy = 0;
                 }
-                else if ((retries & 1) == 0 &&
-                         (f = entryForHash(this, hash)) != first) {
-                    e = first = f;
-                    retries = -1;
-                }
+                if (init)
+                    break;
             }
+            else if (U.compareAndSwapLong(this, BASECOUNT, v = baseCount, v + x))
+                break;                          // Fall back on using base
+        }
+    }
+
+    /* ----------------Table Traversal -------------- */
+
+    /**
+     * Encapsulates traversal for methods such as containsValue; also
+     * serves as a base class for other iterators and spliterators.
+     *
+     * Method advance visits once each still-valid node that was
+     * reachable upon iterator construction. It might miss some that
+     * were added to a bin after the bin was visited, which is OK wrt
+     * consistency guarantees. Maintaining this property in the face
+     * of possible ongoing resizes requires a fair amount of
+     * bookkeeping state that is difficult to optimize away amidst
+     * volatile accesses.  Even so, traversal maintains reasonable
+     * throughput.
+     *
+     * Normally, iteration proceeds bin-by-bin traversing lists.
+     * However, if the table has been resized, then all future steps
+     * must traverse both the bin at the current index as well as at
+     * (index + baseSize); and so on for further resizings. To
+     * paranoically cope with potential sharing by users of iterators
+     * across threads, iteration terminates if a bounds checks fails
+     * for a table read.
+     */
+    static class Traverser<K,V> {
+        Node<K,V>[] tab;        // current table; updated if resized
+        Node<K,V> next;         // the next entry to use
+        int index;              // index of bin to use next
+        int baseIndex;          // current index of initial table
+        int baseLimit;          // index bound for initial table
+        final int baseSize;     // initial table size
+
+        Traverser(Node<K,V>[] tab, int size, int index, int limit) {
+            this.tab = tab;
+            this.baseSize = size;
+            this.baseIndex = this.index = index;
+            this.baseLimit = limit;
+            this.next = null;
         }
 
         /**
-         * Remove; match on key only if value null, else match both.
+         * Advances if possible, returning next valid node, or null if none.
          */
-        final V remove(Object key, int hash, Object value) {
-            if (!tryLock())
-                scanAndLock(key, hash);
-            V oldValue = null;
-            try {
-                HashEntry<K,V>[] tab = table;
-                int index = (tab.length - 1) & hash;
-                HashEntry<K,V> e = entryAt(tab, index);
-                HashEntry<K,V> pred = null;
-                while (e != null) {
-                    K k;
-                    HashEntry<K,V> next = e.next;
-                    if ((k = e.key) == key ||
-                        (e.hash == hash && key.equals(k))) {
-                        V v = e.value;
-                        if (value == null || value == v || value.equals(v)) {
-                            if (pred == null)
-                                setEntryAt(tab, index, next);
-                            else
-                                pred.setNext(next);
-                            ++modCount;
-                            --count;
-                            oldValue = v;
-                        }
-                        break;
-                    }
-                    pred = e;
-                    e = next;
-                }
-            } finally {
-                unlock();
-            }
-            return oldValue;
-        }
-
-        final boolean replace(K key, int hash, V oldValue, V newValue) {
-            if (!tryLock())
-                scanAndLock(key, hash);
-            boolean replaced = false;
-            try {
-                HashEntry<K,V> e;
-                for (e = entryForHash(this, hash); e != null; e = e.next) {
-                    K k;
-                    if ((k = e.key) == key ||
-                        (e.hash == hash && key.equals(k))) {
-                        if (oldValue.equals(e.value)) {
-                            e.value = newValue;
-                            ++modCount;
-                            replaced = true;
-                        }
-                        break;
+        final Node<K,V> advance() {
+            Node<K,V> e;
+            if ((e = next) != null)
+                e = e.next;
+            for (;;) {
+                Node<K,V>[] t; int i, n; Object ek;  // must use locals in checks
+                if (e != null)
+                    return next = e;
+                if (baseIndex >= baseLimit || (t = tab) == null ||
+                    (n = t.length) <= (i = index) || i < 0)
+                    return next = null;
+                if ((e = tabAt(t, index)) != null && e.hash < 0) {
+                    if ((ek = e.key) instanceof TreeBin)
+                        e = ((TreeBin<K,V>)ek).first;
+                    else {
+                        tab = (Node<K,V>[])ek;
+                        e = null;
+                        continue;
                     }
                 }
-            } finally {
-                unlock();
-            }
-            return replaced;
-        }
-
-        final V replace(K key, int hash, V value) {
-            if (!tryLock())
-                scanAndLock(key, hash);
-            V oldValue = null;
-            try {
-                HashEntry<K,V> e;
-                for (e = entryForHash(this, hash); e != null; e = e.next) {
-                    K k;
-                    if ((k = e.key) == key ||
-                        (e.hash == hash && key.equals(k))) {
-                        oldValue = e.value;
-                        e.value = value;
-                        ++modCount;
-                        break;
-                    }
-                }
-            } finally {
-                unlock();
-            }
-            return oldValue;
-        }
-
-        final void clear() {
-            lock();
-            try {
-                HashEntry<K,V>[] tab = table;
-                for (int i = 0; i < tab.length ; i++)
-                    setEntryAt(tab, i, null);
-                ++modCount;
-                count = 0;
-            } finally {
-                unlock();
+                if ((index += baseSize) >= n)
+                    index = ++baseIndex;    // visit upper slots if present
             }
         }
     }
 
-    // Accessing segments
-
     /**
-     * Gets the jth element of given segment array (if nonnull) with
-     * volatile element access semantics via Unsafe. (The null check
-     * can trigger harmlessly only during deserialization.) Note:
-     * because each element of segments array is set only once (using
-     * fully ordered writes), some performance-sensitive methods rely
-     * on this method only as a recheck upon null reads.
-     */
-    @SuppressWarnings("unchecked")
-    static final <K,V> Segment<K,V> segmentAt(Segment<K,V>[] ss, int j) {
-        long u = (j << SSHIFT) + SBASE;
-        return ss == null ? null :
-            (Segment<K,V>) UNSAFE.getObjectVolatile(ss, u);
-    }
-
-    /**
-     * Returns the segment for the given index, creating it and
-     * recording in segment table (via CAS) if not already present.
-     *
-     * @param k the index
-     * @return the segment
+     * Base of key, value, and entry Iterators. Adds fields to
+     * Traverser to support iterator.remove
      */
-    @SuppressWarnings("unchecked")
-    private Segment<K,V> ensureSegment(int k) {
-        final Segment<K,V>[] ss = this.segments;
-        long u = (k << SSHIFT) + SBASE; // raw offset
-        Segment<K,V> seg;
-        if ((seg = (Segment<K,V>)UNSAFE.getObjectVolatile(ss, u)) == null) {
-            Segment<K,V> proto = ss[0]; // use segment 0 as prototype
-            int cap = proto.table.length;
-            float lf = proto.loadFactor;
-            int threshold = (int)(cap * lf);
-            HashEntry<K,V>[] tab = (HashEntry<K,V>[])new HashEntry<?,?>[cap];
-            if ((seg = (Segment<K,V>)UNSAFE.getObjectVolatile(ss, u))
-                == null) { // recheck
-                Segment<K,V> s = new Segment<K,V>(lf, threshold, tab);
-                while ((seg = (Segment<K,V>)UNSAFE.getObjectVolatile(ss, u))
-                       == null) {
-                    if (UNSAFE.compareAndSwapObject(ss, u, null, seg = s))
-                        break;
-                }
-            }
+    static class BaseIterator<K,V> extends Traverser<K,V> {
+        final ConcurrentHashMap<K,V> map;
+        Node<K,V> lastReturned;
+        BaseIterator(Node<K,V>[] tab, int size, int index, int limit,
+                    ConcurrentHashMap<K,V> map) {
+            super(tab, size, index, limit);
+            this.map = map;
+            advance();
+        }
+
+        public final boolean hasNext() { return next != null; }
+        public final boolean hasMoreElements() { return next != null; }
+
+        public final void remove() {
+            Node<K,V> p;
+            if ((p = lastReturned) == null)
+                throw new IllegalStateException();
+            lastReturned = null;
+            map.internalReplace((K)p.key, null, null);
+        }
+    }
+
+    static final class KeyIterator<K,V> extends BaseIterator<K,V>
+        implements Iterator<K>, Enumeration<K> {
+        KeyIterator(Node<K,V>[] tab, int index, int size, int limit,
+                    ConcurrentHashMap<K,V> map) {
+            super(tab, index, size, limit, map);
+        }
+
+        public final K next() {
+            Node<K,V> p;
+            if ((p = next) == null)
+                throw new NoSuchElementException();
+            K k = (K)p.key;
+            lastReturned = p;
+            advance();
+            return k;
         }
-        return seg;
+
+        public final K nextElement() { return next(); }
+    }
+
+    static final class ValueIterator<K,V> extends BaseIterator<K,V>
+        implements Iterator<V>, Enumeration<V> {
+        ValueIterator(Node<K,V>[] tab, int index, int size, int limit,
+                      ConcurrentHashMap<K,V> map) {
+            super(tab, index, size, limit, map);
+        }
+
+        public final V next() {
+            Node<K,V> p;
+            if ((p = next) == null)
+                throw new NoSuchElementException();
+            V v = p.val;
+            lastReturned = p;
+            advance();
+            return v;
+        }
+
+        public final V nextElement() { return next(); }
+    }
+
+    static final class EntryIterator<K,V> extends BaseIterator<K,V>
+        implements Iterator<Map.Entry<K,V>> {
+        EntryIterator(Node<K,V>[] tab, int index, int size, int limit,
+                      ConcurrentHashMap<K,V> map) {
+            super(tab, index, size, limit, map);
+        }
+
+        public final Map.Entry<K,V> next() {
+            Node<K,V> p;
+            if ((p = next) == null)
+                throw new NoSuchElementException();
+            K k = (K)p.key;
+            V v = p.val;
+            lastReturned = p;
+            advance();
+            return new MapEntry<K,V>(k, v, map);
+        }
     }
 
-    // Hash-based segment and entry accesses
-
-    /**
-     * Gets the segment for the given hash code.
-     */
-    @SuppressWarnings("unchecked")
-    private Segment<K,V> segmentForHash(int h) {
-        long u = (((h >>> segmentShift) & segmentMask) << SSHIFT) + SBASE;
-        return (Segment<K,V>) UNSAFE.getObjectVolatile(segments, u);
+    static final class KeySpliterator<K,V> extends Traverser<K,V>
+        implements Spliterator<K> {
+        long est;               // size estimate
+        KeySpliterator(Node<K,V>[] tab, int size, int index, int limit,
+                       long est) {
+            super(tab, size, index, limit);
+            this.est = est;
+        }
+
+        public Spliterator<K> trySplit() {
+            int i, f, h;
+            return (h = ((i = baseIndex) + (f = baseLimit)) >>> 1) <= i ? null :
+                new KeySpliterator<K,V>(tab, baseSize, baseLimit = h,
+                                        f, est >>>= 1);
+        }
+
+        public void forEachRemaining(Consumer<? super K> action) {
+            if (action == null) throw new NullPointerException();
+            for (Node<K,V> p; (p = advance()) != null;)
+                action.accept((K)p.key);
+        }
+
+        public boolean tryAdvance(Consumer<? super K> action) {
+            if (action == null) throw new NullPointerException();
+            Node<K,V> p;
+            if ((p = advance()) == null)
+                return false;
+            action.accept((K)p.key);
+            return true;
+        }
+
+        public long estimateSize() { return est; }
+
+        public int characteristics() {
+            return Spliterator.DISTINCT | Spliterator.CONCURRENT |
+                Spliterator.NONNULL;
+        }
     }
 
-    /**
-     * Gets the table entry for the given segment and hash code.
-     */
-    @SuppressWarnings("unchecked")
-    static final <K,V> HashEntry<K,V> entryForHash(Segment<K,V> seg, int h) {
-        HashEntry<K,V>[] tab;
-        return (seg == null || (tab = seg.table) == null) ? null :
-            (HashEntry<K,V>) UNSAFE.getObjectVolatile
-            (tab, ((long)(((tab.length - 1) & h)) << TSHIFT) + TBASE);
+    static final class ValueSpliterator<K,V> extends Traverser<K,V>
+        implements Spliterator<V> {
+        long est;               // size estimate
+        ValueSpliterator(Node<K,V>[] tab, int size, int index, int limit,
+                         long est) {
+            super(tab, size, index, limit);
+            this.est = est;
+        }
+
+        public Spliterator<V> trySplit() {
+            int i, f, h;
+            return (h = ((i = baseIndex) + (f = baseLimit)) >>> 1) <= i ? null :
+                new ValueSpliterator<K,V>(tab, baseSize, baseLimit = h,
+                                          f, est >>>= 1);
+        }
+
+        public void forEachRemaining(Consumer<? super V> action) {
+            if (action == null) throw new NullPointerException();
+            for (Node<K,V> p; (p = advance()) != null;)
+                action.accept(p.val);
+        }
+
+        public boolean tryAdvance(Consumer<? super V> action) {
+            if (action == null) throw new NullPointerException();
+            Node<K,V> p;
+            if ((p = advance()) == null)
+                return false;
+            action.accept(p.val);
+            return true;
+        }
+
+        public long estimateSize() { return est; }
+
+        public int characteristics() {
+            return Spliterator.CONCURRENT | Spliterator.NONNULL;
+        }
     }
 
+    static final class EntrySpliterator<K,V> extends Traverser<K,V>
+        implements Spliterator<Map.Entry<K,V>> {
+        final ConcurrentHashMap<K,V> map; // To export MapEntry
+        long est;               // size estimate
+        EntrySpliterator(Node<K,V>[] tab, int size, int index, int limit,
+                         long est, ConcurrentHashMap<K,V> map) {
+            super(tab, size, index, limit);
+            this.map = map;
+            this.est = est;
+        }
+
+        public Spliterator<Map.Entry<K,V>> trySplit() {
+            int i, f, h;
+            return (h = ((i = baseIndex) + (f = baseLimit)) >>> 1) <= i ? null :
+                new EntrySpliterator<K,V>(tab, baseSize, baseLimit = h,
+                                          f, est >>>= 1, map);
+        }
+
+        public void forEachRemaining(Consumer<? super Map.Entry<K,V>> action) {
+            if (action == null) throw new NullPointerException();
+            for (Node<K,V> p; (p = advance()) != null; )
+                action.accept(new MapEntry<K,V>((K)p.key, p.val, map));
+        }
+
+        public boolean tryAdvance(Consumer<? super Map.Entry<K,V>> action) {
+            if (action == null) throw new NullPointerException();
+            Node<K,V> p;
+            if ((p = advance()) == null)
+                return false;
+            action.accept(new MapEntry<K,V>((K)p.key, p.val, map));
+            return true;
+        }
+
+        public long estimateSize() { return est; }
+
+        public int characteristics() {
+            return Spliterator.DISTINCT | Spliterator.CONCURRENT |
+                Spliterator.NONNULL;
+        }
+    }
+
+
     /* ---------------- Public operations -------------- */
 
     /**
-     * Creates a new, empty map with the specified initial
-     * capacity, load factor and concurrency level.
-     *
-     * @param initialCapacity the initial capacity. The implementation
-     * performs internal sizing to accommodate this many elements.
-     * @param loadFactor  the load factor threshold, used to control resizing.
-     * Resizing may be performed when the average number of elements per
-     * bin exceeds this threshold.
-     * @param concurrencyLevel the estimated number of concurrently
-     * updating threads. The implementation performs internal sizing
-     * to try to accommodate this many threads.
-     * @throws IllegalArgumentException if the initial capacity is
-     * negative or the load factor or concurrencyLevel are
-     * nonpositive.
+     * Creates a new, empty map with the default initial table size (16).
      */
-    @SuppressWarnings("unchecked")
-    public ConcurrentHashMap(int initialCapacity,
-                             float loadFactor, int concurrencyLevel) {
-        if (!(loadFactor > 0) || initialCapacity < 0 || concurrencyLevel <= 0)
-            throw new IllegalArgumentException();
-        if (concurrencyLevel > MAX_SEGMENTS)
-            concurrencyLevel = MAX_SEGMENTS;
-        // Find power-of-two sizes best matching arguments
-        int sshift = 0;
-        int ssize = 1;
-        while (ssize < concurrencyLevel) {
-            ++sshift;
-            ssize <<= 1;
-        }
-        this.segmentShift = 32 - sshift;
-        this.segmentMask = ssize - 1;
-        if (initialCapacity > MAXIMUM_CAPACITY)
-            initialCapacity = MAXIMUM_CAPACITY;
-        int c = initialCapacity / ssize;
-        if (c * ssize < initialCapacity)
-            ++c;
-        int cap = MIN_SEGMENT_TABLE_CAPACITY;
-        while (cap < c)
-            cap <<= 1;
-        // create segments and segments[0]
-        Segment<K,V> s0 =
-            new Segment<K,V>(loadFactor, (int)(cap * loadFactor),
-                             (HashEntry<K,V>[])new HashEntry<?,?>[cap]);
-        Segment<K,V>[] ss = (Segment<K,V>[])new Segment<?,?>[ssize];
-        UNSAFE.putOrderedObject(ss, SBASE, s0); // ordered write of segments[0]
-        this.segments = ss;
+    public ConcurrentHashMap() {
     }
 
     /**
-     * Creates a new, empty map with the specified initial capacity
-     * and load factor and with the default concurrencyLevel (16).
+     * Creates a new, empty map with an initial table size
+     * accommodating the specified number of elements without the need
+     * to dynamically resize.
      *
      * @param initialCapacity The implementation performs internal
      * sizing to accommodate this many elements.
-     * @param loadFactor  the load factor threshold, used to control resizing.
-     * Resizing may be performed when the average number of elements per
-     * bin exceeds this threshold.
+     * @throws IllegalArgumentException if the initial capacity of
+     * elements is negative
+     */
+    public ConcurrentHashMap(int initialCapacity) {
+        if (initialCapacity < 0)
+            throw new IllegalArgumentException();
+        int cap = ((initialCapacity >= (MAXIMUM_CAPACITY >>> 1)) ?
+                   MAXIMUM_CAPACITY :
+                   tableSizeFor(initialCapacity + (initialCapacity >>> 1) + 1));
+        this.sizeCtl = cap;
+    }
+
+    /**
+     * Creates a new map with the same mappings as the given map.
+     *
+     * @param m the map
+     */
+    public ConcurrentHashMap(Map<? extends K, ? extends V> m) {
+        this.sizeCtl = DEFAULT_CAPACITY;
+        internalPutAll(m);
+    }
+
+    /**
+     * Creates a new, empty map with an initial table size based on
+     * the given number of elements ({@code initialCapacity}) and
+     * initial table density ({@code loadFactor}).
+     *
+     * @param initialCapacity the initial capacity. The implementation
+     * performs internal sizing to accommodate this many elements,
+     * given the specified load factor.
+     * @param loadFactor the load factor (table density) for
+     * establishing the initial table size
      * @throws IllegalArgumentException if the initial capacity of
      * elements is negative or the load factor is nonpositive
      *
      * @since 1.6
      */
     public ConcurrentHashMap(int initialCapacity, float loadFactor) {
-        this(initialCapacity, loadFactor, DEFAULT_CONCURRENCY_LEVEL);
-    }
-
-    /**
-     * Creates a new, empty map with the specified initial capacity,
-     * and with default load factor (0.75) and concurrencyLevel (16).
-     *
-     * @param initialCapacity the initial capacity. The implementation
-     * performs internal sizing to accommodate this many elements.
-     * @throws IllegalArgumentException if the initial capacity of
-     * elements is negative.
-     */
-    public ConcurrentHashMap(int initialCapacity) {
-        this(initialCapacity, DEFAULT_LOAD_FACTOR, DEFAULT_CONCURRENCY_LEVEL);
+        this(initialCapacity, loadFactor, 1);
     }
 
     /**
-     * Creates a new, empty map with a default initial capacity (16),
-     * load factor (0.75) and concurrencyLevel (16).
+     * Creates a new, empty map with an initial table size based on
+     * the given number of elements ({@code initialCapacity}), table
+     * density ({@code loadFactor}), and number of concurrently
+     * updating threads ({@code concurrencyLevel}).
+     *
+     * @param initialCapacity the initial capacity. The implementation
+     * performs internal sizing to accommodate this many elements,
+     * given the specified load factor.
+     * @param loadFactor the load factor (table density) for
+     * establishing the initial table size
+     * @param concurrencyLevel the estimated number of concurrently
+     * updating threads. The implementation may use this value as
+     * a sizing hint.
+     * @throws IllegalArgumentException if the initial capacity is
+     * negative or the load factor or concurrencyLevel are
+     * nonpositive
      */
-    public ConcurrentHashMap() {
-        this(DEFAULT_INITIAL_CAPACITY, DEFAULT_LOAD_FACTOR, DEFAULT_CONCURRENCY_LEVEL);
+    public ConcurrentHashMap(int initialCapacity,
+                             float loadFactor, int concurrencyLevel) {
+        if (!(loadFactor > 0.0f) || initialCapacity < 0 || concurrencyLevel <= 0)
+            throw new IllegalArgumentException();
+        if (initialCapacity < concurrencyLevel)   // Use at least as many bins
+            initialCapacity = concurrencyLevel;   // as estimated threads
+        long size = (long)(1.0 + (long)initialCapacity / loadFactor);
+        int cap = (size >= (long)MAXIMUM_CAPACITY) ?
+            MAXIMUM_CAPACITY : tableSizeFor((int)size);
+        this.sizeCtl = cap;
     }
 
     /**
-     * Creates a new map with the same mappings as the given map.
-     * The map is created with a capacity of 1.5 times the number
-     * of mappings in the given map or 16 (whichever is greater),
-     * and a default load factor (0.75) and concurrencyLevel (16).
+     * Creates a new {@link Set} backed by a ConcurrentHashMap
+     * from the given type to {@code Boolean.TRUE}.
      *
-     * @param m the map
+     * @return the new set
+     * @since 1.8
      */
-    public ConcurrentHashMap(Map<? extends K, ? extends V> m) {
-        this(Math.max((int) (m.size() / DEFAULT_LOAD_FACTOR) + 1,
-                      DEFAULT_INITIAL_CAPACITY),
-             DEFAULT_LOAD_FACTOR, DEFAULT_CONCURRENCY_LEVEL);
-        putAll(m);
+    public static <K> KeySetView<K,Boolean> newKeySet() {
+        return new KeySetView<K,Boolean>
+            (new ConcurrentHashMap<K,Boolean>(), Boolean.TRUE);
+    }
+
+    /**
+     * Creates a new {@link Set} backed by a ConcurrentHashMap
+     * from the given type to {@code Boolean.TRUE}.
+     *
+     * @param initialCapacity The implementation performs internal
+     * sizing to accommodate this many elements.
+     * @throws IllegalArgumentException if the initial capacity of
+     * elements is negative
+     * @return the new set
+     * @since 1.8
+     */
+    public static <K> KeySetView<K,Boolean> newKeySet(int initialCapacity) {
+        return new KeySetView<K,Boolean>
+            (new ConcurrentHashMap<K,Boolean>(initialCapacity), Boolean.TRUE);
     }
 
     /**
@@ -834,38 +2639,7 @@
      * @return {@code true} if this map contains no key-value mappings
      */
     public boolean isEmpty() {
-        /*
-         * Sum per-segment modCounts to avoid mis-reporting when
-         * elements are concurrently added and removed in one segment
-         * while checking another, in which case the table was never
-         * actually empty at any point. (The sum ensures accuracy up
-         * through at least 1<<31 per-segment modifications before
-         * recheck.)  Methods size() and containsValue() use similar
-         * constructions for stability checks.
-         */
-        long sum = 0L;
-        final Segment<K,V>[] segments = this.segments;
-        for (int j = 0; j < segments.length; ++j) {
-            Segment<K,V> seg = segmentAt(segments, j);
-            if (seg != null) {
-                if (seg.count != 0)
-                    return false;
-                sum += seg.modCount;
-            }
-        }
-        if (sum != 0L) { // recheck unless no modifications
-            for (int j = 0; j < segments.length; ++j) {
-                Segment<K,V> seg = segmentAt(segments, j);
-                if (seg != null) {
-                    if (seg.count != 0)
-                        return false;
-                    sum -= seg.modCount;
-                }
-            }
-            if (sum != 0L)
-                return false;
-        }
-        return true;
+        return sumCount() <= 0L; // ignore transient negative values
     }
 
     /**
@@ -876,43 +2650,25 @@
      * @return the number of key-value mappings in this map
      */
     public int size() {
-        // Try a few times to get accurate count. On failure due to
-        // continuous async changes in table, resort to locking.
-        final Segment<K,V>[] segments = this.segments;
-        int size;
-        boolean overflow; // true if size overflows 32 bits
-        long sum;         // sum of modCounts
-        long last = 0L;   // previous sum
-        int retries = -1; // first iteration isn't retry
-        try {
-            for (;;) {
-                if (retries++ == RETRIES_BEFORE_LOCK) {
-                    for (int j = 0; j < segments.length; ++j)
-                        ensureSegment(j).lock(); // force creation
-                }
-                sum = 0L;
-                size = 0;
-                overflow = false;
-                for (int j = 0; j < segments.length; ++j) {
-                    Segment<K,V> seg = segmentAt(segments, j);
-                    if (seg != null) {
-                        sum += seg.modCount;
-                        int c = seg.count;
-                        if (c < 0 || (size += c) < 0)
-                            overflow = true;
-                    }
-                }
-                if (sum == last)
-                    break;
-                last = sum;
-            }
-        } finally {
-            if (retries > RETRIES_BEFORE_LOCK) {
-                for (int j = 0; j < segments.length; ++j)
-                    segmentAt(segments, j).unlock();
-            }
-        }
-        return overflow ? Integer.MAX_VALUE : size;
+        long n = sumCount();
+        return ((n < 0L) ? 0 :
+                (n > (long)Integer.MAX_VALUE) ? Integer.MAX_VALUE :
+                (int)n);
+    }
+
+    /**
+     * Returns the number of mappings. This method should be used
+     * instead of {@link #size} because a ConcurrentHashMap may
+     * contain more mappings than can be represented as an int. The
+     * value returned is an estimate; the actual count may differ if
+     * there are concurrent insertions or removals.
+     *
+     * @return the number of mappings
+     * @since 1.8
+     */
+    public long mappingCount() {
+        long n = sumCount();
+        return (n < 0L) ? 0L : n; // ignore transient negative values
     }
 
     /**
@@ -926,23 +2682,24 @@
      *
      * @throws NullPointerException if the specified key is null
      */
-    @SuppressWarnings("unchecked")
     public V get(Object key) {
-        Segment<K,V> s; // manually integrate access methods to reduce overhead
-        HashEntry<K,V>[] tab;
-        int h = hash(key);
-        long u = (((h >>> segmentShift) & segmentMask) << SSHIFT) + SBASE;
-        if ((s = (Segment<K,V>)UNSAFE.getObjectVolatile(segments, u)) != null &&
-            (tab = s.table) != null) {
-            for (HashEntry<K,V> e = (HashEntry<K,V>) UNSAFE.getObjectVolatile
-                     (tab, ((long)(((tab.length - 1) & h)) << TSHIFT) + TBASE);
-                 e != null; e = e.next) {
-                K k;
-                if ((k = e.key) == key || (e.hash == h && key.equals(k)))
-                    return e.value;
-            }
-        }
-        return null;
+        return internalGet(key);
+    }
+
+    /**
+     * Returns the value to which the specified key is mapped, or the
+     * given default value if this map contains no mapping for the
+     * key.
+     *
+     * @param key the key whose associated value is to be returned
+     * @param defaultValue the value to return if this map contains
+     * no mapping for the given key
+     * @return the mapping for the key, if present; else the default value
+     * @throws NullPointerException if the specified key is null
+     */
+    public V getOrDefault(Object key, V defaultValue) {
+        V v;
+        return (v = internalGet(key)) == null ? defaultValue : v;
     }
 
     /**
@@ -954,29 +2711,14 @@
      *         {@code equals} method; {@code false} otherwise
      * @throws NullPointerException if the specified key is null
      */
-    @SuppressWarnings("unchecked")
     public boolean containsKey(Object key) {
-        Segment<K,V> s; // same as get() except no need for volatile value read
-        HashEntry<K,V>[] tab;
-        int h = hash(key);
-        long u = (((h >>> segmentShift) & segmentMask) << SSHIFT) + SBASE;
-        if ((s = (Segment<K,V>)UNSAFE.getObjectVolatile(segments, u)) != null &&
-            (tab = s.table) != null) {
-            for (HashEntry<K,V> e = (HashEntry<K,V>) UNSAFE.getObjectVolatile
-                     (tab, ((long)(((tab.length - 1) & h)) << TSHIFT) + TBASE);
-                 e != null; e = e.next) {
-                K k;
-                if ((k = e.key) == key || (e.hash == h && key.equals(k)))
-                    return true;
-            }
-        }
-        return false;
+        return internalGet(key) != null;
     }
 
     /**
      * Returns {@code true} if this map maps one or more keys to the
-     * specified value. Note: This method requires a full traversal
-     * of the map, and so is much slower than method {@code containsKey}.
+     * specified value. Note: This method may require a full traversal
+     * of the map, and is much slower than method {@code containsKey}.
      *
      * @param value value whose presence in this map is to be tested
      * @return {@code true} if this map maps one or more keys to the
@@ -984,49 +2726,18 @@
      * @throws NullPointerException if the specified value is null
      */
     public boolean containsValue(Object value) {
-        // Same idea as size()
         if (value == null)
             throw new NullPointerException();
-        final Segment<K,V>[] segments = this.segments;
-        boolean found = false;
-        long last = 0;
-        int retries = -1;
-        try {
-            outer: for (;;) {
-                if (retries++ == RETRIES_BEFORE_LOCK) {
-                    for (int j = 0; j < segments.length; ++j)
-                        ensureSegment(j).lock(); // force creation
-                }
-                long hashSum = 0L;
-                int sum = 0;
-                for (int j = 0; j < segments.length; ++j) {
-                    HashEntry<K,V>[] tab;
-                    Segment<K,V> seg = segmentAt(segments, j);
-                    if (seg != null && (tab = seg.table) != null) {
-                        for (int i = 0 ; i < tab.length; i++) {
-                            HashEntry<K,V> e;
-                            for (e = entryAt(tab, i); e != null; e = e.next) {
-                                V v = e.value;
-                                if (v != null && value.equals(v)) {
-                                    found = true;
-                                    break outer;
-                                }
-                            }
-                        }
-                        sum += seg.modCount;
-                    }
-                }
-                if (retries > 0 && sum == last)
-                    break;
-                last = sum;
-            }
-        } finally {
-            if (retries > RETRIES_BEFORE_LOCK) {
-                for (int j = 0; j < segments.length; ++j)
-                    segmentAt(segments, j).unlock();
+        Node<K,V>[] t;
+        if ((t = table) != null) {
+            Traverser<K,V> it = new Traverser<K,V>(t, t.length, 0, t.length);
+            for (Node<K,V> p; (p = it.advance()) != null; ) {
+                V v;
+                if ((v = p.val) == value || value.equals(v))
+                    return true;
             }
         }
-        return found;
+        return false;
     }
 
     /**
@@ -1061,17 +2772,8 @@
      *         {@code null} if there was no mapping for {@code key}
      * @throws NullPointerException if the specified key or value is null
      */
-    @SuppressWarnings("unchecked")
     public V put(K key, V value) {
-        Segment<K,V> s;
-        if (value == null)
-            throw new NullPointerException();
-        int hash = hash(key);
-        int j = (hash >>> segmentShift) & segmentMask;
-        if ((s = (Segment<K,V>)UNSAFE.getObject          // nonvolatile; recheck
-             (segments, (j << SSHIFT) + SBASE)) == null) //  in ensureSegment
-            s = ensureSegment(j);
-        return s.put(key, hash, value, false);
+        return internalPut(key, value, false);
     }
 
     /**
@@ -1081,17 +2783,8 @@
      *         or {@code null} if there was no mapping for the key
      * @throws NullPointerException if the specified key or value is null
      */
-    @SuppressWarnings("unchecked")
     public V putIfAbsent(K key, V value) {
-        Segment<K,V> s;
-        if (value == null)
-            throw new NullPointerException();
-        int hash = hash(key);
-        int j = (hash >>> segmentShift) & segmentMask;
-        if ((s = (Segment<K,V>)UNSAFE.getObject
-             (segments, (j << SSHIFT) + SBASE)) == null)
-            s = ensureSegment(j);
-        return s.put(key, hash, value, true);
+        return internalPut(key, value, true);
     }
 
     /**
@@ -1102,8 +2795,105 @@
      * @param m mappings to be stored in this map
      */
     public void putAll(Map<? extends K, ? extends V> m) {
-        for (Map.Entry<? extends K, ? extends V> e : m.entrySet())
-            put(e.getKey(), e.getValue());
+        internalPutAll(m);
+    }
+
+    /**
+     * If the specified key is not already associated with a value,
+     * attempts to compute its value using the given mapping function
+     * and enters it into this map unless {@code null}.  The entire
+     * method invocation is performed atomically, so the function is
+     * applied at most once per key.  Some attempted update operations
+     * on this map by other threads may be blocked while computation
+     * is in progress, so the computation should be short and simple,
+     * and must not attempt to update any other mappings of this map.
+     *
+     * @param key key with which the specified value is to be associated
+     * @param mappingFunction the function to compute a value
+     * @return the current (existing or computed) value associated with
+     *         the specified key, or null if the computed value is null
+     * @throws NullPointerException if the specified key or mappingFunction
+     *         is null
+     * @throws IllegalStateException if the computation detectably
+     *         attempts a recursive update to this map that would
+     *         otherwise never complete
+     * @throws RuntimeException or Error if the mappingFunction does so,
+     *         in which case the mapping is left unestablished
+     */
+    public V computeIfAbsent(K key, Function<? super K, ? extends V> mappingFunction) {
+        return internalComputeIfAbsent(key, mappingFunction);
+    }
+
+    /**
+     * If the value for the specified key is present, attempts to
+     * compute a new mapping given the key and its current mapped
+     * value.  The entire method invocation is performed atomically.
+     * Some attempted update operations on this map by other threads
+     * may be blocked while computation is in progress, so the
+     * computation should be short and simple, and must not attempt to
+     * update any other mappings of this map.
+     *
+     * @param key key with which a value may be associated
+     * @param remappingFunction the function to compute a value
+     * @return the new value associated with the specified key, or null if none
+     * @throws NullPointerException if the specified key or remappingFunction
+     *         is null
+     * @throws IllegalStateException if the computation detectably
+     *         attempts a recursive update to this map that would
+     *         otherwise never complete
+     * @throws RuntimeException or Error if the remappingFunction does so,
+     *         in which case the mapping is unchanged
+     */
+    public V computeIfPresent(K key, BiFunction<? super K, ? super V, ? extends V> remappingFunction) {
+        return internalCompute(key, true, remappingFunction);
+    }
+
+    /**
+     * Attempts to compute a mapping for the specified key and its
+     * current mapped value (or {@code null} if there is no current
+     * mapping). The entire method invocation is performed atomically.
+     * Some attempted update operations on this map by other threads
+     * may be blocked while computation is in progress, so the
+     * computation should be short and simple, and must not attempt to
+     * update any other mappings of this Map.
+     *
+     * @param key key with which the specified value is to be associated
+     * @param remappingFunction the function to compute a value
+     * @return the new value associated with the specified key, or null if none
+     * @throws NullPointerException if the specified key or remappingFunction
+     *         is null
+     * @throws IllegalStateException if the computation detectably
+     *         attempts a recursive update to this map that would
+     *         otherwise never complete
+     * @throws RuntimeException or Error if the remappingFunction does so,
+     *         in which case the mapping is unchanged
+     */
+    public V compute(K key, BiFunction<? super K, ? super V, ? extends V> remappingFunction) {
+        return internalCompute(key, false, remappingFunction);
+    }
+
+    /**
+     * If the specified key is not already associated with a
+     * (non-null) value, associates it with the given value.
+     * Otherwise, replaces the value with the results of the given
+     * remapping function, or removes if {@code null}. The entire
+     * method invocation is performed atomically.  Some attempted
+     * update operations on this map by other threads may be blocked
+     * while computation is in progress, so the computation should be
+     * short and simple, and must not attempt to update any other
+     * mappings of this Map.
+     *
+     * @param key key with which the specified value is to be associated
+     * @param value the value to use if absent
+     * @param remappingFunction the function to recompute a value if present
+     * @return the new value associated with the specified key, or null if none
+     * @throws NullPointerException if the specified key or the
+     *         remappingFunction is null
+     * @throws RuntimeException or Error if the remappingFunction does so,
+     *         in which case the mapping is unchanged
+     */
+    public V merge(K key, V value, BiFunction<? super V, ? super V, ? extends V> remappingFunction) {
+        return internalMerge(key, value, remappingFunction);
     }
 
     /**
@@ -1116,9 +2906,7 @@
      * @throws NullPointerException if the specified key is null
      */
     public V remove(Object key) {
-        int hash = hash(key);
-        Segment<K,V> s = segmentForHash(hash);
-        return s == null ? null : s.remove(key, hash, null);
+        return internalReplace(key, null, null);
     }
 
     /**
@@ -1127,10 +2915,9 @@
      * @throws NullPointerException if the specified key is null
      */
     public boolean remove(Object key, Object value) {
-        int hash = hash(key);
-        Segment<K,V> s;
-        return value != null && (s = segmentForHash(hash)) != null &&
-            s.remove(key, hash, value) != null;
+        if (key == null)
+            throw new NullPointerException();
+        return value != null && internalReplace(key, null, value) != null;
     }
 
     /**
@@ -1139,11 +2926,9 @@
      * @throws NullPointerException if any of the arguments are null
      */
     public boolean replace(K key, V oldValue, V newValue) {
-        int hash = hash(key);
-        if (oldValue == null || newValue == null)
+        if (key == null || oldValue == null || newValue == null)
             throw new NullPointerException();
-        Segment<K,V> s = segmentForHash(hash);
-        return s != null && s.replace(key, hash, oldValue, newValue);
+        return internalReplace(key, newValue, oldValue) != null;
     }
 
     /**
@@ -1154,23 +2939,16 @@
      * @throws NullPointerException if the specified key or value is null
      */
     public V replace(K key, V value) {
-        int hash = hash(key);
-        if (value == null)
+        if (key == null || value == null)
             throw new NullPointerException();
-        Segment<K,V> s = segmentForHash(hash);
-        return s == null ? null : s.replace(key, hash, value);
+        return internalReplace(key, value, null);
     }
 
     /**
      * Removes all of the mappings from this map.
      */
     public void clear() {
-        final Segment<K,V>[] segments = this.segments;
-        for (int j = 0; j < segments.length; ++j) {
-            Segment<K,V> s = segmentAt(segments, j);
-            if (s != null)
-                s.clear();
-        }
+        internalClear();
     }
 
     /**
@@ -1188,10 +2966,29 @@
      * and guarantees to traverse elements as they existed upon
      * construction of the iterator, and may (but is not guaranteed to)
      * reflect any modifications subsequent to construction.
+     *
+     * @return the set view
      */
-    public Set<K> keySet() {
-        Set<K> ks = keySet;
-        return (ks != null) ? ks : (keySet = new KeySet());
+    public KeySetView<K,V> keySet() {
+        KeySetView<K,V> ks = keySet;
+        return (ks != null) ? ks : (keySet = new KeySetView<K,V>(this, null));
+    }
+
+    /**
+     * Returns a {@link Set} view of the keys in this map, using the
+     * given common mapped value for any additions (i.e., {@link
+     * Collection#add} and {@link Collection#addAll(Collection)}).
+     * This is of course only appropriate if it is acceptable to use
+     * the same value for all additions from this view.
+     *
+     * @param mappedValue the mapped value to use for any additions
+     * @return the set view
+     * @throws NullPointerException if the mappedValue is null
+     */
+    public KeySetView<K,V> keySet(V mappedValue) {
+        if (mappedValue == null)
+            throw new NullPointerException();
+        return new KeySetView<K,V>(this, mappedValue);
     }
 
     /**
@@ -1209,10 +3006,12 @@
      * and guarantees to traverse elements as they existed upon
      * construction of the iterator, and may (but is not guaranteed to)
      * reflect any modifications subsequent to construction.
+     *
+     * @return the collection view
      */
     public Collection<V> values() {
-        Collection<V> vs = values;
-        return (vs != null) ? vs : (values = new Values());
+        ValuesView<K,V> vs = values;
+        return (vs != null) ? vs : (values = new ValuesView<K,V>(this));
     }
 
     /**
@@ -1222,18 +3021,19 @@
      * removal, which removes the corresponding mapping from the map,
      * via the {@code Iterator.remove}, {@code Set.remove},
      * {@code removeAll}, {@code retainAll}, and {@code clear}
-     * operations.  It does not support the {@code add} or
-     * {@code addAll} operations.
+     * operations.
      *
      * <p>The view's {@code iterator} is a "weakly consistent" iterator
      * that will never throw {@link ConcurrentModificationException},
      * and guarantees to traverse elements as they existed upon
      * construction of the iterator, and may (but is not guaranteed to)
      * reflect any modifications subsequent to construction.
+     *
+     * @return the set view
      */
     public Set<Map.Entry<K,V>> entrySet() {
-        Set<Map.Entry<K,V>> es = entrySet;
-        return (es != null) ? es : (entrySet = new EntrySet());
+        EntrySetView<K,V> es = entrySet;
+        return (es != null) ? es : (entrySet = new EntrySetView<K,V>(this));
     }
 
     /**
@@ -1243,7 +3043,9 @@
      * @see #keySet()
      */
     public Enumeration<K> keys() {
-        return new KeyIterator();
+        Node<K,V>[] t;
+        int f = (t = table) == null ? 0 : t.length;
+        return new KeyIterator<K,V>(t, f, 0, f, this);
     }
 
     /**
@@ -1253,192 +3055,111 @@
      * @see #values()
      */
     public Enumeration<V> elements() {
-        return new ValueIterator();
+        Node<K,V>[] t;
+        int f = (t = table) == null ? 0 : t.length;
+        return new ValueIterator<K,V>(t, f, 0, f, this);
     }
 
-    /* ---------------- Iterator Support -------------- */
-
-    abstract class HashIterator {
-        int nextSegmentIndex;
-        int nextTableIndex;
-        HashEntry<K,V>[] currentTable;
-        HashEntry<K, V> nextEntry;
-        HashEntry<K, V> lastReturned;
-
-        HashIterator() {
-            nextSegmentIndex = segments.length - 1;
-            nextTableIndex = -1;
-            advance();
+    /**
+     * Returns the hash code value for this {@link Map}, i.e.,
+     * the sum of, for each key-value pair in the map,
+     * {@code key.hashCode() ^ value.hashCode()}.
+     *
+     * @return the hash code value for this map
+     */
+    public int hashCode() {
+        int h = 0;
+        Node<K,V>[] t;
+        if ((t = table) != null) {
+            Traverser<K,V> it = new Traverser<K,V>(t, t.length, 0, t.length);
+            for (Node<K,V> p; (p = it.advance()) != null; )
+                h += p.key.hashCode() ^ p.val.hashCode();
         }
-
-        /**
-         * Sets nextEntry to first node of next non-empty table
-         * (in backwards order, to simplify checks).
-         */
-        final void advance() {
-            for (;;) {
-                if (nextTableIndex >= 0) {
-                    if ((nextEntry = entryAt(currentTable,
-                                             nextTableIndex--)) != null)
-                        break;
-                }
-                else if (nextSegmentIndex >= 0) {
-                    Segment<K,V> seg = segmentAt(segments, nextSegmentIndex--);
-                    if (seg != null && (currentTable = seg.table) != null)
-                        nextTableIndex = currentTable.length - 1;
-                }
-                else
-                    break;
-            }
-        }
-
-        final HashEntry<K,V> nextEntry() {
-            HashEntry<K,V> e = nextEntry;
-            if (e == null)
-                throw new NoSuchElementException();
-            lastReturned = e; // cannot assign until after null check
-            if ((nextEntry = e.next) == null)
-                advance();
-            return e;
-        }
-
-        public final boolean hasNext() { return nextEntry != null; }
-        public final boolean hasMoreElements() { return nextEntry != null; }
-
-        public final void remove() {
-            if (lastReturned == null)
-                throw new IllegalStateException();
-            ConcurrentHashMap.this.remove(lastReturned.key);
-            lastReturned = null;
-        }
-    }
-
-    final class KeyIterator
-        extends HashIterator
-        implements Iterator<K>, Enumeration<K>
-    {
-        public final K next()        { return super.nextEntry().key; }
-        public final K nextElement() { return super.nextEntry().key; }
-    }
-
-    final class ValueIterator
-        extends HashIterator
-        implements Iterator<V>, Enumeration<V>
-    {
-        public final V next()        { return super.nextEntry().value; }
-        public final V nextElement() { return super.nextEntry().value; }
+        return h;
     }
 
     /**
-     * Custom Entry class used by EntryIterator.next(), that relays
-     * setValue changes to the underlying map.
+     * Returns a string representation of this map.  The string
+     * representation consists of a list of key-value mappings (in no
+     * particular order) enclosed in braces ("{@code {}}").  Adjacent
+     * mappings are separated by the characters {@code ", "} (comma
+     * and space).  Each key-value mapping is rendered as the key
+     * followed by an equals sign ("{@code =}") followed by the
+     * associated value.
+     *
+     * @return a string representation of this map
      */
-    final class WriteThroughEntry
-        extends AbstractMap.SimpleEntry<K,V>
-    {
-        static final long serialVersionUID = 7249069246763182397L;
-
-        WriteThroughEntry(K k, V v) {
-            super(k,v);
-        }
-
-        /**
-         * Sets our entry's value and writes through to the map. The
-         * value to return is somewhat arbitrary here. Since a
-         * WriteThroughEntry does not necessarily track asynchronous
-         * changes, the most recent "previous" value could be
-         * different from what we return (or could even have been
-         * removed in which case the put will re-establish). We do not
-         * and cannot guarantee more.
-         */
-        public V setValue(V value) {
-            if (value == null) throw new NullPointerException();
-            V v = super.setValue(value);
-            ConcurrentHashMap.this.put(getKey(), value);
-            return v;
-        }
-    }
-
-    final class EntryIterator
-        extends HashIterator
-        implements Iterator<Entry<K,V>>
-    {
-        public Map.Entry<K,V> next() {
-            HashEntry<K,V> e = super.nextEntry();
-            return new WriteThroughEntry(e.key, e.value);
-        }
-    }
-
-    final class KeySet extends AbstractSet<K> {
-        public Iterator<K> iterator() {
-            return new KeyIterator();
-        }
-        public int size() {
-            return ConcurrentHashMap.this.size();
-        }
-        public boolean isEmpty() {
-            return ConcurrentHashMap.this.isEmpty();
-        }
-        public boolean contains(Object o) {
-            return ConcurrentHashMap.this.containsKey(o);
+    public String toString() {
+        Node<K,V>[] t;
+        int f = (t = table) == null ? 0 : t.length;
+        Traverser<K,V> it = new Traverser<K,V>(t, f, 0, f);
+        StringBuilder sb = new StringBuilder();
+        sb.append('{');
+        Node<K,V> p;
+        if ((p = it.advance()) != null) {
+            for (;;) {
+                K k = (K)p.key;
+                V v = p.val;
+                sb.append(k == this ? "(this Map)" : k);
+                sb.append('=');
+                sb.append(v == this ? "(this Map)" : v);
+                if ((p = it.advance()) == null)
+                    break;
+                sb.append(',').append(' ');
+            }
         }
-        public boolean remove(Object o) {
-            return ConcurrentHashMap.this.remove(o) != null;
-        }
-        public void clear() {
-            ConcurrentHashMap.this.clear();
-        }
-    }
-
-    final class Values extends AbstractCollection<V> {
-        public Iterator<V> iterator() {
-            return new ValueIterator();
-        }
-        public int size() {
-            return ConcurrentHashMap.this.size();
-        }
-        public boolean isEmpty() {
-            return ConcurrentHashMap.this.isEmpty();
-        }
-        public boolean contains(Object o) {
-            return ConcurrentHashMap.this.containsValue(o);
-        }
-        public void clear() {
-            ConcurrentHashMap.this.clear();
-        }
+        return sb.append('}').toString();
     }
 
-    final class EntrySet extends AbstractSet<Map.Entry<K,V>> {
-        public Iterator<Map.Entry<K,V>> iterator() {
-            return new EntryIterator();
-        }
-        public boolean contains(Object o) {
-            if (!(o instanceof Map.Entry))
+    /**
+     * Compares the specified object with this map for equality.
+     * Returns {@code true} if the given object is a map with the same
+     * mappings as this map.  This operation may return misleading
+     * results if either map is concurrently modified during execution
+     * of this method.
+     *
+     * @param o object to be compared for equality with this map
+     * @return {@code true} if the specified object is equal to this map
+     */
+    public boolean equals(Object o) {
+        if (o != this) {
+            if (!(o instanceof Map))
                 return false;
-            Map.Entry<?,?> e = (Map.Entry<?,?>)o;
-            V v = ConcurrentHashMap.this.get(e.getKey());
-            return v != null && v.equals(e.getValue());
+            Map<?,?> m = (Map<?,?>) o;
+            Node<K,V>[] t;
+            int f = (t = table) == null ? 0 : t.length;
+            Traverser<K,V> it = new Traverser<K,V>(t, f, 0, f);
+            for (Node<K,V> p; (p = it.advance()) != null; ) {
+                V val = p.val;
+                Object v = m.get(p.key);
+                if (v == null || (v != val && !v.equals(val)))
+                    return false;
+            }
+            for (Map.Entry<?,?> e : m.entrySet()) {
+                Object mk, mv, v;
+                if ((mk = e.getKey()) == null ||
+                    (mv = e.getValue()) == null ||
+                    (v = internalGet(mk)) == null ||
+                    (mv != v && !mv.equals(v)))
+                    return false;
+            }
         }
-        public boolean remove(Object o) {
-            if (!(o instanceof Map.Entry))
-                return false;
-            Map.Entry<?,?> e = (Map.Entry<?,?>)o;
-            return ConcurrentHashMap.this.remove(e.getKey(), e.getValue());
-        }
-        public int size() {
-            return ConcurrentHashMap.this.size();
-        }
-        public boolean isEmpty() {
-            return ConcurrentHashMap.this.isEmpty();
-        }
-        public void clear() {
-            ConcurrentHashMap.this.clear();
-        }
+        return true;
     }
 
     /* ---------------- Serialization Support -------------- */
 
     /**
+     * Stripped-down version of helper class used in previous version,
+     * declared for the sake of serialization compatibility
+     */
+    static class Segment<K,V> extends ReentrantLock implements Serializable {
+        private static final long serialVersionUID = 2249069246763182397L;
+        final float loadFactor;
+        Segment(float lf) { this.loadFactor = lf; }
+    }
+
+    /**
      * Saves the state of the {@code ConcurrentHashMap} instance to a
      * stream (i.e., serializes it).
      * @param s the stream
@@ -1448,119 +3169,2733 @@
      * The key-value mappings are emitted in no particular order.
      */
     private void writeObject(java.io.ObjectOutputStream s)
-            throws java.io.IOException {
-        // force all segments for serialization compatibility
-        for (int k = 0; k < segments.length; ++k)
-            ensureSegment(k);
-        s.defaultWriteObject();
-
-        final Segment<K,V>[] segments = this.segments;
-        for (int k = 0; k < segments.length; ++k) {
-            Segment<K,V> seg = segmentAt(segments, k);
-            seg.lock();
-            try {
-                HashEntry<K,V>[] tab = seg.table;
-                for (int i = 0; i < tab.length; ++i) {
-                    HashEntry<K,V> e;
-                    for (e = entryAt(tab, i); e != null; e = e.next) {
-                        s.writeObject(e.key);
-                        s.writeObject(e.value);
-                    }
-                }
-            } finally {
-                seg.unlock();
+        throws java.io.IOException {
+        // For serialization compatibility
+        // Emulate segment calculation from previous version of this class
+        int sshift = 0;
+        int ssize = 1;
+        while (ssize < DEFAULT_CONCURRENCY_LEVEL) {
+            ++sshift;
+            ssize <<= 1;
+        }
+        int segmentShift = 32 - sshift;
+        int segmentMask = ssize - 1;
+        Segment<K,V>[] segments = (Segment<K,V>[])
+            new Segment<?,?>[DEFAULT_CONCURRENCY_LEVEL];
+        for (int i = 0; i < segments.length; ++i)
+            segments[i] = new Segment<K,V>(LOAD_FACTOR);
+        s.putFields().put("segments", segments);
+        s.putFields().put("segmentShift", segmentShift);
+        s.putFields().put("segmentMask", segmentMask);
+        s.writeFields();
+
+        Node<K,V>[] t;
+        if ((t = table) != null) {
+            Traverser<K,V> it = new Traverser<K,V>(t, t.length, 0, t.length);
+            for (Node<K,V> p; (p = it.advance()) != null; ) {
+                s.writeObject(p.key);
+                s.writeObject(p.val);
             }
         }
         s.writeObject(null);
         s.writeObject(null);
+        segments = null; // throw away
     }
 
     /**
      * Reconstitutes the instance from a stream (that is, deserializes it).
      * @param s the stream
      */
-    @SuppressWarnings("unchecked")
     private void readObject(java.io.ObjectInputStream s)
-            throws java.io.IOException, ClassNotFoundException {
-        // Don't call defaultReadObject()
-        ObjectInputStream.GetField oisFields = s.readFields();
-        final Segment<K,V>[] oisSegments = (Segment<K,V>[])oisFields.get("segments", null);
-
-        final int ssize = oisSegments.length;
-        if (ssize < 1 || ssize > MAX_SEGMENTS
-            || (ssize & (ssize-1)) != 0 )  // ssize not power of two
-            throw new java.io.InvalidObjectException("Bad number of segments:"
-                                                     + ssize);
-        int sshift = 0, ssizeTmp = ssize;
-        while (ssizeTmp > 1) {
-            ++sshift;
-            ssizeTmp >>>= 1;
+        throws java.io.IOException, ClassNotFoundException {
+        s.defaultReadObject();
+
+        // Create all nodes, then place in table once size is known
+        long size = 0L;
+        Node<K,V> p = null;
+        for (;;) {
+            K k = (K) s.readObject();
+            V v = (V) s.readObject();
+            if (k != null && v != null) {
+                int h = spread(k.hashCode());
+                p = new Node<K,V>(h, k, v, p);
+                ++size;
+            }
+            else
+                break;
+        }
+        if (p != null) {
+            boolean init = false;
+            int n;
+            if (size >= (long)(MAXIMUM_CAPACITY >>> 1))
+                n = MAXIMUM_CAPACITY;
+            else {
+                int sz = (int)size;
+                n = tableSizeFor(sz + (sz >>> 1) + 1);
+            }
+            int sc = sizeCtl;
+            boolean collide = false;
+            if (n > sc &&
+                U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
+                try {
+                    if (table == null) {
+                        init = true;
+                        Node<K,V>[] tab = (Node<K,V>[])new Node[n];
+                        int mask = n - 1;
+                        while (p != null) {
+                            int j = p.hash & mask;
+                            Node<K,V> next = p.next;
+                            Node<K,V> q = p.next = tabAt(tab, j);
+                            setTabAt(tab, j, p);
+                            if (!collide && q != null && q.hash == p.hash)
+                                collide = true;
+                            p = next;
+                        }
+                        table = tab;
+                        addCount(size, -1);
+                        sc = n - (n >>> 2);
+                    }
+                } finally {
+                    sizeCtl = sc;
+                }
+                if (collide) { // rescan and convert to TreeBins
+                    Node<K,V>[] tab = table;
+                    for (int i = 0; i < tab.length; ++i) {
+                        int c = 0;
+                        for (Node<K,V> e = tabAt(tab, i); e != null; e = e.next) {
+                            if (++c > TREE_THRESHOLD &&
+                                (e.key instanceof Comparable)) {
+                                replaceWithTreeBin(tab, i, e.key);
+                                break;
+                            }
+                        }
+                    }
+                }
+            }
+            if (!init) { // Can only happen if unsafely published.
+                while (p != null) {
+                    internalPut((K)p.key, p.val, false);
+                    p = p.next;
+                }
+            }
+        }
+    }
+
+    // -------------------------------------------------------
+
+    // Overrides of other default Map methods
+
+    public void forEach(BiConsumer<? super K, ? super V> action) {
+        if (action == null) throw new NullPointerException();
+        Node<K,V>[] t;
+        if ((t = table) != null) {
+            Traverser<K,V> it = new Traverser<K,V>(t, t.length, 0, t.length);
+            for (Node<K,V> p; (p = it.advance()) != null; ) {
+                action.accept((K)p.key, p.val);
+            }
+        }
+    }
+
+    public void replaceAll(BiFunction<? super K, ? super V, ? extends V> function) {
+        if (function == null) throw new NullPointerException();
+        Node<K,V>[] t;
+        if ((t = table) != null) {
+            Traverser<K,V> it = new Traverser<K,V>(t, t.length, 0, t.length);
+            for (Node<K,V> p; (p = it.advance()) != null; ) {
+                K k = (K)p.key;
+                internalPut(k, function.apply(k, p.val), false);
+            }
         }
-        UNSAFE.putIntVolatile(this, SEGSHIFT_OFFSET, 32 - sshift);
-        UNSAFE.putIntVolatile(this, SEGMASK_OFFSET, ssize - 1);
-        UNSAFE.putObjectVolatile(this, SEGMENTS_OFFSET, oisSegments);
-
-        // set hashMask
-        UNSAFE.putIntVolatile(this, HASHSEED_OFFSET,
-                 sun.misc.Hashing.randomHashSeed(this));
-
-        // Re-initialize segments to be minimally sized, and let grow.
-        int cap = MIN_SEGMENT_TABLE_CAPACITY;
-        final Segment<K,V>[] segments = this.segments;
-        for (int k = 0; k < segments.length; ++k) {
-            Segment<K,V> seg = segments[k];
-            if (seg != null) {
-                seg.threshold = (int)(cap * seg.loadFactor);
-                seg.table = (HashEntry<K,V>[]) new HashEntry<?,?>[cap];
+    }
+
+    // -------------------------------------------------------
+
+    // Parallel bulk operations
+
+    /**
+     * Computes initial batch value for bulk tasks. The returned value
+     * is approximately exp2 of the number of times (minus one) to
+     * split task by two before executing leaf action. This value is
+     * faster to compute and more convenient to use as a guide to
+     * splitting than is the depth, since it is used while dividing by
+     * two anyway.
+     */
+    final int batchFor(long b) {
+        long n;
+        if (b == Long.MAX_VALUE || (n = sumCount()) <= 1L || n < b)
+            return 0;
+        int sp = ForkJoinPool.getCommonPoolParallelism() << 2; // slack of 4
+        return (b <= 0L || (n /= b) >= sp) ? sp : (int)n;
+    }
+
+    /**
+     * Performs the given action for each (key, value).
+     *
+     * @param parallelismThreshold the (estimated) number of elements
+     * needed for this operation to be executed in parallel
+     * @param action the action
+     * @since 1.8
+     */
+    public void forEach(long parallelismThreshold,
+                        BiConsumer<? super K,? super V> action) {
+        if (action == null) throw new NullPointerException();
+        new ForEachMappingTask<K,V>
+            (null, batchFor(parallelismThreshold), 0, 0, table,
+             action).invoke();
+    }
+
+    /**
+     * Performs the given action for each non-null transformation
+     * of each (key, value).
+     *
+     * @param parallelismThreshold the (estimated) number of elements
+     * needed for this operation to be executed in parallel
+     * @param transformer a function returning the transformation
+     * for an element, or null if there is no transformation (in
+     * which case the action is not applied)
+     * @param action the action
+     * @since 1.8
+     */
+    public <U> void forEach(long parallelismThreshold,
+                            BiFunction<? super K, ? super V, ? extends U> transformer,
+                            Consumer<? super U> action) {
+        if (transformer == null || action == null)
+            throw new NullPointerException();
+        new ForEachTransformedMappingTask<K,V,U>
+            (null, batchFor(parallelismThreshold), 0, 0, table,
+             transformer, action).invoke();
+    }
+
+    /**
+     * Returns a non-null result from applying the given search
+     * function on each (key, value), or null if none.  Upon
+     * success, further element processing is suppressed and the
+     * results of any other parallel invocations of the search
+     * function are ignored.
+     *
+     * @param parallelismThreshold the (estimated) number of elements
+     * needed for this operation to be executed in parallel
+     * @param searchFunction a function returning a non-null
+     * result on success, else null
+     * @return a non-null result from applying the given search
+     * function on each (key, value), or null if none
+     * @since 1.8
+     */
+    public <U> U search(long parallelismThreshold,
+                        BiFunction<? super K, ? super V, ? extends U> searchFunction) {
+        if (searchFunction == null) throw new NullPointerException();
+        return new SearchMappingsTask<K,V,U>
+            (null, batchFor(parallelismThreshold), 0, 0, table,
+             searchFunction, new AtomicReference<U>()).invoke();
+    }
+
+    /**
+     * Returns the result of accumulating the given transformation
+     * of all (key, value) pairs using the given reducer to
+     * combine values, or null if none.
+     *
+     * @param parallelismThreshold the (estimated) number of elements
+     * needed for this operation to be executed in parallel
+     * @param transformer a function returning the transformation
+     * for an element, or null if there is no transformation (in
+     * which case it is not combined)
+     * @param reducer a commutative associative combining function
+     * @return the result of accumulating the given transformation
+     * of all (key, value) pairs
+     * @since 1.8
+     */
+    public <U> U reduce(long parallelismThreshold,
+                        BiFunction<? super K, ? super V, ? extends U> transformer,
+                        BiFunction<? super U, ? super U, ? extends U> reducer) {
+        if (transformer == null || reducer == null)
+            throw new NullPointerException();
+        return new MapReduceMappingsTask<K,V,U>
+            (null, batchFor(parallelismThreshold), 0, 0, table,
+             null, transformer, reducer).invoke();
+    }
+
+    /**
+     * Returns the result of accumulating the given transformation
+     * of all (key, value) pairs using the given reducer to
+     * combine values, and the given basis as an identity value.
+     *
+     * @param parallelismThreshold the (estimated) number of elements
+     * needed for this operation to be executed in parallel
+     * @param transformer a function returning the transformation
+     * for an element
+     * @param basis the identity (initial default value) for the reduction
+     * @param reducer a commutative associative combining function
+     * @return the result of accumulating the given transformation
+     * of all (key, value) pairs
+     * @since 1.8
+     */
+    public double reduceToDoubleIn(long parallelismThreshold,
+                                   ToDoubleBiFunction<? super K, ? super V> transformer,
+                                   double basis,
+                                   DoubleBinaryOperator reducer) {
+        if (transformer == null || reducer == null)
+            throw new NullPointerException();
+        return new MapReduceMappingsToDoubleTask<K,V>
+            (null, batchFor(parallelismThreshold), 0, 0, table,
+             null, transformer, basis, reducer).invoke();
+    }
+
+    /**
+     * Returns the result of accumulating the given transformation
+     * of all (key, value) pairs using the given reducer to
+     * combine values, and the given basis as an identity value.
+     *
+     * @param parallelismThreshold the (estimated) number of elements
+     * needed for this operation to be executed in parallel
+     * @param transformer a function returning the transformation
+     * for an element
+     * @param basis the identity (initial default value) for the reduction
+     * @param reducer a commutative associative combining function
+     * @return the result of accumulating the given transformation
+     * of all (key, value) pairs
+     * @since 1.8
+     */
+    public long reduceToLong(long parallelismThreshold,
+                             ToLongBiFunction<? super K, ? super V> transformer,
+                             long basis,
+                             LongBinaryOperator reducer) {
+        if (transformer == null || reducer == null)
+            throw new NullPointerException();
+        return new MapReduceMappingsToLongTask<K,V>
+            (null, batchFor(parallelismThreshold), 0, 0, table,
+             null, transformer, basis, reducer).invoke();
+    }
+
+    /**
+     * Returns the result of accumulating the given transformation
+     * of all (key, value) pairs using the given reducer to
+     * combine values, and the given basis as an identity value.
+     *
+     * @param parallelismThreshold the (estimated) number of elements
+     * needed for this operation to be executed in parallel
+     * @param transformer a function returning the transformation
+     * for an element
+     * @param basis the identity (initial default value) for the reduction
+     * @param reducer a commutative associative combining function
+     * @return the result of accumulating the given transformation
+     * of all (key, value) pairs
+     * @since 1.8
+     */
+    public int reduceToInt(long parallelismThreshold,
+                           ToIntBiFunction<? super K, ? super V> transformer,
+                           int basis,
+                           IntBinaryOperator reducer) {
+        if (transformer == null || reducer == null)
+            throw new NullPointerException();
+        return new MapReduceMappingsToIntTask<K,V>
+            (null, batchFor(parallelismThreshold), 0, 0, table,
+             null, transformer, basis, reducer).invoke();
+    }
+
+    /**
+     * Performs the given action for each key.
+     *
+     * @param parallelismThreshold the (estimated) number of elements
+     * needed for this operation to be executed in parallel
+     * @param action the action
+     * @since 1.8
+     */
+    public void forEachKey(long parallelismThreshold,
+                           Consumer<? super K> action) {
+        if (action == null) throw new NullPointerException();
+        new ForEachKeyTask<K,V>
+            (null, batchFor(parallelismThreshold), 0, 0, table,
+             action).invoke();
+    }
+
+    /**
+     * Performs the given action for each non-null transformation
+     * of each key.
+     *
+     * @param parallelismThreshold the (estimated) number of elements
+     * needed for this operation to be executed in parallel
+     * @param transformer a function returning the transformation
+     * for an element, or null if there is no transformation (in
+     * which case the action is not applied)
+     * @param action the action
+     * @since 1.8
+     */
+    public <U> void forEachKey(long parallelismThreshold,
+                               Function<? super K, ? extends U> transformer,
+                               Consumer<? super U> action) {
+        if (transformer == null || action == null)
+            throw new NullPointerException();
+        new ForEachTransformedKeyTask<K,V,U>
+            (null, batchFor(parallelismThreshold), 0, 0, table,
+             transformer, action).invoke();
+    }
+
+    /**
+     * Returns a non-null result from applying the given search
+     * function on each key, or null if none. Upon success,
+     * further element processing is suppressed and the results of
+     * any other parallel invocations of the search function are
+     * ignored.
+     *
+     * @param parallelismThreshold the (estimated) number of elements
+     * needed for this operation to be executed in parallel
+     * @param searchFunction a function returning a non-null
+     * result on success, else null
+     * @return a non-null result from applying the given search
+     * function on each key, or null if none
+     * @since 1.8
+     */
+    public <U> U searchKeys(long parallelismThreshold,
+                            Function<? super K, ? extends U> searchFunction) {
+        if (searchFunction == null) throw new NullPointerException();
+        return new SearchKeysTask<K,V,U>
+            (null, batchFor(parallelismThreshold), 0, 0, table,
+             searchFunction, new AtomicReference<U>()).invoke();
+    }
+
+    /**
+     * Returns the result of accumulating all keys using the given
+     * reducer to combine values, or null if none.
+     *
+     * @param parallelismThreshold the (estimated) number of elements
+     * needed for this operation to be executed in parallel
+     * @param reducer a commutative associative combining function
+     * @return the result of accumulating all keys using the given
+     * reducer to combine values, or null if none
+     * @since 1.8
+     */
+    public K reduceKeys(long parallelismThreshold,
+                        BiFunction<? super K, ? super K, ? extends K> reducer) {
+        if (reducer == null) throw new NullPointerException();
+        return new ReduceKeysTask<K,V>
+            (null, batchFor(parallelismThreshold), 0, 0, table,
+             null, reducer).invoke();
+    }
+
+    /**
+     * Returns the result of accumulating the given transformation
+     * of all keys using the given reducer to combine values, or
+     * null if none.
+     *
+     * @param parallelismThreshold the (estimated) number of elements
+     * needed for this operation to be executed in parallel
+     * @param transformer a function returning the transformation
+     * for an element, or null if there is no transformation (in
+     * which case it is not combined)
+     * @param reducer a commutative associative combining function
+     * @return the result of accumulating the given transformation
+     * of all keys
+     * @since 1.8
+     */
+    public <U> U reduceKeys(long parallelismThreshold,
+                            Function<? super K, ? extends U> transformer,
+         BiFunction<? super U, ? super U, ? extends U> reducer) {
+        if (transformer == null || reducer == null)
+            throw new NullPointerException();
+        return new MapReduceKeysTask<K,V,U>
+            (null, batchFor(parallelismThreshold), 0, 0, table,
+             null, transformer, reducer).invoke();
+    }
+
+    /**
+     * Returns the result of accumulating the given transformation
+     * of all keys using the given reducer to combine values, and
+     * the given basis as an identity value.
+     *
+     * @param parallelismThreshold the (estimated) number of elements
+     * needed for this operation to be executed in parallel
+     * @param transformer a function returning the transformation
+     * for an element
+     * @param basis the identity (initial default value) for the reduction
+     * @param reducer a commutative associative combining function
+     * @return the result of accumulating the given transformation
+     * of all keys
+     * @since 1.8
+     */
+    public double reduceKeysToDouble(long parallelismThreshold,
+                                     ToDoubleFunction<? super K> transformer,
+                                     double basis,
+                                     DoubleBinaryOperator reducer) {
+        if (transformer == null || reducer == null)
+            throw new NullPointerException();
+        return new MapReduceKeysToDoubleTask<K,V>
+            (null, batchFor(parallelismThreshold), 0, 0, table,
+             null, transformer, basis, reducer).invoke();
+    }
+
+    /**
+     * Returns the result of accumulating the given transformation
+     * of all keys using the given reducer to combine values, and
+     * the given basis as an identity value.
+     *
+     * @param parallelismThreshold the (estimated) number of elements
+     * needed for this operation to be executed in parallel
+     * @param transformer a function returning the transformation
+     * for an element
+     * @param basis the identity (initial default value) for the reduction
+     * @param reducer a commutative associative combining function
+     * @return the result of accumulating the given transformation
+     * of all keys
+     * @since 1.8
+     */
+    public long reduceKeysToLong(long parallelismThreshold,
+                                 ToLongFunction<? super K> transformer,
+                                 long basis,
+                                 LongBinaryOperator reducer) {
+        if (transformer == null || reducer == null)
+            throw new NullPointerException();
+        return new MapReduceKeysToLongTask<K,V>
+            (null, batchFor(parallelismThreshold), 0, 0, table,
+             null, transformer, basis, reducer).invoke();
+    }
+
+    /**
+     * Returns the result of accumulating the given transformation
+     * of all keys using the given reducer to combine values, and
+     * the given basis as an identity value.
+     *
+     * @param parallelismThreshold the (estimated) number of elements
+     * needed for this operation to be executed in parallel
+     * @param transformer a function returning the transformation
+     * for an element
+     * @param basis the identity (initial default value) for the reduction
+     * @param reducer a commutative associative combining function
+     * @return the result of accumulating the given transformation
+     * of all keys
+     * @since 1.8
+     */
+    public int reduceKeysToInt(long parallelismThreshold,
+                               ToIntFunction<? super K> transformer,
+                               int basis,
+                               IntBinaryOperator reducer) {
+        if (transformer == null || reducer == null)
+            throw new NullPointerException();
+        return new MapReduceKeysToIntTask<K,V>
+            (null, batchFor(parallelismThreshold), 0, 0, table,
+             null, transformer, basis, reducer).invoke();
+    }
+
+    /**
+     * Performs the given action for each value.
+     *
+     * @param parallelismThreshold the (estimated) number of elements
+     * needed for this operation to be executed in parallel
+     * @param action the action
+     * @since 1.8
+     */
+    public void forEachValue(long parallelismThreshold,
+                             Consumer<? super V> action) {
+        if (action == null)
+            throw new NullPointerException();
+        new ForEachValueTask<K,V>
+            (null, batchFor(parallelismThreshold), 0, 0, table,
+             action).invoke();
+    }
+
+    /**
+     * Performs the given action for each non-null transformation
+     * of each value.
+     *
+     * @param parallelismThreshold the (estimated) number of elements
+     * needed for this operation to be executed in parallel
+     * @param transformer a function returning the transformation
+     * for an element, or null if there is no transformation (in
+     * which case the action is not applied)
+     * @param action the action
+     * @since 1.8
+     */
+    public <U> void forEachValue(long parallelismThreshold,
+                                 Function<? super V, ? extends U> transformer,
+                                 Consumer<? super U> action) {
+        if (transformer == null || action == null)
+            throw new NullPointerException();
+        new ForEachTransformedValueTask<K,V,U>
+            (null, batchFor(parallelismThreshold), 0, 0, table,
+             transformer, action).invoke();
+    }
+
+    /**
+     * Returns a non-null result from applying the given search
+     * function on each value, or null if none.  Upon success,
+     * further element processing is suppressed and the results of
+     * any other parallel invocations of the search function are
+     * ignored.
+     *
+     * @param parallelismThreshold the (estimated) number of elements
+     * needed for this operation to be executed in parallel
+     * @param searchFunction a function returning a non-null
+     * result on success, else null
+     * @return a non-null result from applying the given search
+     * function on each value, or null if none
+     * @since 1.8
+     */
+    public <U> U searchValues(long parallelismThreshold,
+                              Function<? super V, ? extends U> searchFunction) {
+        if (searchFunction == null) throw new NullPointerException();
+        return new SearchValuesTask<K,V,U>
+            (null, batchFor(parallelismThreshold), 0, 0, table,
+             searchFunction, new AtomicReference<U>()).invoke();
+    }
+
+    /**
+     * Returns the result of accumulating all values using the
+     * given reducer to combine values, or null if none.
+     *
+     * @param parallelismThreshold the (estimated) number of elements
+     * needed for this operation to be executed in parallel
+     * @param reducer a commutative associative combining function
+     * @return the result of accumulating all values
+     * @since 1.8
+     */
+    public V reduceValues(long parallelismThreshold,
+                          BiFunction<? super V, ? super V, ? extends V> reducer) {
+        if (reducer == null) throw new NullPointerException();
+        return new ReduceValuesTask<K,V>
+            (null, batchFor(parallelismThreshold), 0, 0, table,
+             null, reducer).invoke();
+    }
+
+    /**
+     * Returns the result of accumulating the given transformation
+     * of all values using the given reducer to combine values, or
+     * null if none.
+     *
+     * @param parallelismThreshold the (estimated) number of elements
+     * needed for this operation to be executed in parallel
+     * @param transformer a function returning the transformation
+     * for an element, or null if there is no transformation (in
+     * which case it is not combined)
+     * @param reducer a commutative associative combining function
+     * @return the result of accumulating the given transformation
+     * of all values
+     * @since 1.8
+     */
+    public <U> U reduceValues(long parallelismThreshold,
+                              Function<? super V, ? extends U> transformer,
+                              BiFunction<? super U, ? super U, ? extends U> reducer) {
+        if (transformer == null || reducer == null)
+            throw new NullPointerException();
+        return new MapReduceValuesTask<K,V,U>
+            (null, batchFor(parallelismThreshold), 0, 0, table,
+             null, transformer, reducer).invoke();
+    }
+
+    /**
+     * Returns the result of accumulating the given transformation
+     * of all values using the given reducer to combine values,
+     * and the given basis as an identity value.
+     *
+     * @param parallelismThreshold the (estimated) number of elements
+     * needed for this operation to be executed in parallel
+     * @param transformer a function returning the transformation
+     * for an element
+     * @param basis the identity (initial default value) for the reduction
+     * @param reducer a commutative associative combining function
+     * @return the result of accumulating the given transformation
+     * of all values
+     * @since 1.8
+     */
+    public double reduceValuesToDouble(long parallelismThreshold,
+                                       ToDoubleFunction<? super V> transformer,
+                                       double basis,
+                                       DoubleBinaryOperator reducer) {
+        if (transformer == null || reducer == null)
+            throw new NullPointerException();
+        return new MapReduceValuesToDoubleTask<K,V>
+            (null, batchFor(parallelismThreshold), 0, 0, table,
+             null, transformer, basis, reducer).invoke();
+    }
+
+    /**
+     * Returns the result of accumulating the given transformation
+     * of all values using the given reducer to combine values,
+     * and the given basis as an identity value.
+     *
+     * @param parallelismThreshold the (estimated) number of elements
+     * needed for this operation to be executed in parallel
+     * @param transformer a function returning the transformation
+     * for an element
+     * @param basis the identity (initial default value) for the reduction
+     * @param reducer a commutative associative combining function
+     * @return the result of accumulating the given transformation
+     * of all values
+     * @since 1.8
+     */
+    public long reduceValuesToLong(long parallelismThreshold,
+                                   ToLongFunction<? super V> transformer,
+                                   long basis,
+                                   LongBinaryOperator reducer) {
+        if (transformer == null || reducer == null)
+            throw new NullPointerException();
+        return new MapReduceValuesToLongTask<K,V>
+            (null, batchFor(parallelismThreshold), 0, 0, table,
+             null, transformer, basis, reducer).invoke();
+    }
+
+    /**
+     * Returns the result of accumulating the given transformation
+     * of all values using the given reducer to combine values,
+     * and the given basis as an identity value.
+     *
+     * @param parallelismThreshold the (estimated) number of elements
+     * needed for this operation to be executed in parallel
+     * @param transformer a function returning the transformation
+     * for an element
+     * @param basis the identity (initial default value) for the reduction
+     * @param reducer a commutative associative combining function
+     * @return the result of accumulating the given transformation
+     * of all values
+     * @since 1.8
+     */
+    public int reduceValuesToInt(long parallelismThreshold,
+                                 ToIntFunction<? super V> transformer,
+                                 int basis,
+                                 IntBinaryOperator reducer) {
+        if (transformer == null || reducer == null)
+            throw new NullPointerException();
+        return new MapReduceValuesToIntTask<K,V>
+            (null, batchFor(parallelismThreshold), 0, 0, table,
+             null, transformer, basis, reducer).invoke();
+    }
+
+    /**
+     * Performs the given action for each entry.
+     *
+     * @param parallelismThreshold the (estimated) number of elements
+     * needed for this operation to be executed in parallel
+     * @param action the action
+     * @since 1.8
+     */
+    public void forEachEntry(long parallelismThreshold,
+                             Consumer<? super Map.Entry<K,V>> action) {
+        if (action == null) throw new NullPointerException();
+        new ForEachEntryTask<K,V>(null, batchFor(parallelismThreshold), 0, 0, table,
+                                  action).invoke();
+    }
+
+    /**
+     * Performs the given action for each non-null transformation
+     * of each entry.
+     *
+     * @param parallelismThreshold the (estimated) number of elements
+     * needed for this operation to be executed in parallel
+     * @param transformer a function returning the transformation
+     * for an element, or null if there is no transformation (in
+     * which case the action is not applied)
+     * @param action the action
+     * @since 1.8
+     */
+    public <U> void forEachEntry(long parallelismThreshold,
+                                 Function<Map.Entry<K,V>, ? extends U> transformer,
+                                 Consumer<? super U> action) {
+        if (transformer == null || action == null)
+            throw new NullPointerException();
+        new ForEachTransformedEntryTask<K,V,U>
+            (null, batchFor(parallelismThreshold), 0, 0, table,
+             transformer, action).invoke();
+    }
+
+    /**
+     * Returns a non-null result from applying the given search
+     * function on each entry, or null if none.  Upon success,
+     * further element processing is suppressed and the results of
+     * any other parallel invocations of the search function are
+     * ignored.
+     *
+     * @param parallelismThreshold the (estimated) number of elements
+     * needed for this operation to be executed in parallel
+     * @param searchFunction a function returning a non-null
+     * result on success, else null
+     * @return a non-null result from applying the given search
+     * function on each entry, or null if none
+     * @since 1.8
+     */
+    public <U> U searchEntries(long parallelismThreshold,
+                               Function<Map.Entry<K,V>, ? extends U> searchFunction) {
+        if (searchFunction == null) throw new NullPointerException();
+        return new SearchEntriesTask<K,V,U>
+            (null, batchFor(parallelismThreshold), 0, 0, table,
+             searchFunction, new AtomicReference<U>()).invoke();
+    }
+
+    /**
+     * Returns the result of accumulating all entries using the
+     * given reducer to combine values, or null if none.
+     *
+     * @param parallelismThreshold the (estimated) number of elements
+     * needed for this operation to be executed in parallel
+     * @param reducer a commutative associative combining function
+     * @return the result of accumulating all entries
+     * @since 1.8
+     */
+    public Map.Entry<K,V> reduceEntries(long parallelismThreshold,
+                                        BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
+        if (reducer == null) throw new NullPointerException();
+        return new ReduceEntriesTask<K,V>
+            (null, batchFor(parallelismThreshold), 0, 0, table,
+             null, reducer).invoke();
+    }
+
+    /**
+     * Returns the result of accumulating the given transformation
+     * of all entries using the given reducer to combine values,
+     * or null if none.
+     *
+     * @param parallelismThreshold the (estimated) number of elements
+     * needed for this operation to be executed in parallel
+     * @param transformer a function returning the transformation
+     * for an element, or null if there is no transformation (in
+     * which case it is not combined)
+     * @param reducer a commutative associative combining function
+     * @return the result of accumulating the given transformation
+     * of all entries
+     * @since 1.8
+     */
+    public <U> U reduceEntries(long parallelismThreshold,
+                               Function<Map.Entry<K,V>, ? extends U> transformer,
+                               BiFunction<? super U, ? super U, ? extends U> reducer) {
+        if (transformer == null || reducer == null)
+            throw new NullPointerException();
+        return new MapReduceEntriesTask<K,V,U>
+            (null, batchFor(parallelismThreshold), 0, 0, table,
+             null, transformer, reducer).invoke();
+    }
+
+    /**
+     * Returns the result of accumulating the given transformation
+     * of all entries using the given reducer to combine values,
+     * and the given basis as an identity value.
+     *
+     * @param parallelismThreshold the (estimated) number of elements
+     * needed for this operation to be executed in parallel
+     * @param transformer a function returning the transformation
+     * for an element
+     * @param basis the identity (initial default value) for the reduction
+     * @param reducer a commutative associative combining function
+     * @return the result of accumulating the given transformation
+     * of all entries
+     * @since 1.8
+     */
+    public double reduceEntriesToDouble(long parallelismThreshold,
+                                        ToDoubleFunction<Map.Entry<K,V>> transformer,
+                                        double basis,
+                                        DoubleBinaryOperator reducer) {
+        if (transformer == null || reducer == null)
+            throw new NullPointerException();
+        return new MapReduceEntriesToDoubleTask<K,V>
+            (null, batchFor(parallelismThreshold), 0, 0, table,
+             null, transformer, basis, reducer).invoke();
+    }
+
+    /**
+     * Returns the result of accumulating the given transformation
+     * of all entries using the given reducer to combine values,
+     * and the given basis as an identity value.
+     *
+     * @param parallelismThreshold the (estimated) number of elements
+     * needed for this operation to be executed in parallel
+     * @param transformer a function returning the transformation
+     * for an element
+     * @param basis the identity (initial default value) for the reduction
+     * @param reducer a commutative associative combining function
+     * @return the result of accumulating the given transformation
+     * of all entries
+     * @since 1.8
+     */
+    public long reduceEntriesToLong(long parallelismThreshold,
+                                    ToLongFunction<Map.Entry<K,V>> transformer,
+                                    long basis,
+                                    LongBinaryOperator reducer) {
+        if (transformer == null || reducer == null)
+            throw new NullPointerException();
+        return new MapReduceEntriesToLongTask<K,V>
+            (null, batchFor(parallelismThreshold), 0, 0, table,
+             null, transformer, basis, reducer).invoke();
+    }
+
+    /**
+     * Returns the result of accumulating the given transformation
+     * of all entries using the given reducer to combine values,
+     * and the given basis as an identity value.
+     *
+     * @param parallelismThreshold the (estimated) number of elements
+     * needed for this operation to be executed in parallel
+     * @param transformer a function returning the transformation
+     * for an element
+     * @param basis the identity (initial default value) for the reduction
+     * @param reducer a commutative associative combining function
+     * @return the result of accumulating the given transformation
+     * of all entries
+     * @since 1.8
+     */
+    public int reduceEntriesToInt(long parallelismThreshold,
+                                  ToIntFunction<Map.Entry<K,V>> transformer,
+                                  int basis,
+                                  IntBinaryOperator reducer) {
+        if (transformer == null || reducer == null)
+            throw new NullPointerException();
+        return new MapReduceEntriesToIntTask<K,V>
+            (null, batchFor(parallelismThreshold), 0, 0, table,
+             null, transformer, basis, reducer).invoke();
+    }
+
+
+    /* ----------------Views -------------- */
+
+    /**
+     * Base class for views.
+     */
+    abstract static class CollectionView<K,V,E>
+        implements Collection<E>, java.io.Serializable {
+        private static final long serialVersionUID = 7249069246763182397L;
+        final ConcurrentHashMap<K,V> map;
+        CollectionView(ConcurrentHashMap<K,V> map)  { this.map = map; }
+
+        /**
+         * Returns the map backing this view.
+         *
+         * @return the map backing this view
+         */
+        public ConcurrentHashMap<K,V> getMap() { return map; }
+
+        /**
+         * Removes all of the elements from this view, by removing all
+         * the mappings from the map backing this view.
+         */
+        public final void clear()      { map.clear(); }
+        public final int size()        { return map.size(); }
+        public final boolean isEmpty() { return map.isEmpty(); }
+
+        // implementations below rely on concrete classes supplying these
+        // abstract methods
+        /**
+         * Returns a "weakly consistent" iterator that will never
+         * throw {@link ConcurrentModificationException}, and
+         * guarantees to traverse elements as they existed upon
+         * construction of the iterator, and may (but is not
+         * guaranteed to) reflect any modifications subsequent to
+         * construction.
+         */
+        public abstract Iterator<E> iterator();
+        public abstract boolean contains(Object o);
+        public abstract boolean remove(Object o);
+
+        private static final String oomeMsg = "Required array size too large";
+
+        public final Object[] toArray() {
+            long sz = map.mappingCount();
+            if (sz > MAX_ARRAY_SIZE)
+                throw new OutOfMemoryError(oomeMsg);
+            int n = (int)sz;
+            Object[] r = new Object[n];
+            int i = 0;
+            for (E e : this) {
+                if (i == n) {
+                    if (n >= MAX_ARRAY_SIZE)
+                        throw new OutOfMemoryError(oomeMsg);
+                    if (n >= MAX_ARRAY_SIZE - (MAX_ARRAY_SIZE >>> 1) - 1)
+                        n = MAX_ARRAY_SIZE;
+                    else
+                        n += (n >>> 1) + 1;
+                    r = Arrays.copyOf(r, n);
+                }
+                r[i++] = e;
+            }
+            return (i == n) ? r : Arrays.copyOf(r, i);
+        }
+
+        public final <T> T[] toArray(T[] a) {
+            long sz = map.mappingCount();
+            if (sz > MAX_ARRAY_SIZE)
+                throw new OutOfMemoryError(oomeMsg);
+            int m = (int)sz;
+            T[] r = (a.length >= m) ? a :
+                (T[])java.lang.reflect.Array
+                .newInstance(a.getClass().getComponentType(), m);
+            int n = r.length;
+            int i = 0;
+            for (E e : this) {
+                if (i == n) {
+                    if (n >= MAX_ARRAY_SIZE)
+                        throw new OutOfMemoryError(oomeMsg);
+                    if (n >= MAX_ARRAY_SIZE - (MAX_ARRAY_SIZE >>> 1) - 1)
+                        n = MAX_ARRAY_SIZE;
+                    else
+                        n += (n >>> 1) + 1;
+                    r = Arrays.copyOf(r, n);
+                }
+                r[i++] = (T)e;
+            }
+            if (a == r && i < n) {
+                r[i] = null; // null-terminate
+                return r;
+            }
+            return (i == n) ? r : Arrays.copyOf(r, i);
+        }
+
+        /**
+         * Returns a string representation of this collection.
+         * The string representation consists of the string representations
+         * of the collection's elements in the order they are returned by
+         * its iterator, enclosed in square brackets ({@code "[]"}).
+         * Adjacent elements are separated by the characters {@code ", "}
+         * (comma and space).  Elements are converted to strings as by
+         * {@link String#valueOf(Object)}.
+         *
+         * @return a string representation of this collection
+         */
+        public final String toString() {
+            StringBuilder sb = new StringBuilder();
+            sb.append('[');
+            Iterator<E> it = iterator();
+            if (it.hasNext()) {
+                for (;;) {
+                    Object e = it.next();
+                    sb.append(e == this ? "(this Collection)" : e);
+                    if (!it.hasNext())
+                        break;
+                    sb.append(',').append(' ');
+                }
+            }
+            return sb.append(']').toString();
+        }
+
+        public final boolean containsAll(Collection<?> c) {
+            if (c != this) {
+                for (Object e : c) {
+                    if (e == null || !contains(e))
+                        return false;
+                }
+            }
+            return true;
+        }
+
+        public final boolean removeAll(Collection<?> c) {
+            boolean modified = false;
+            for (Iterator<E> it = iterator(); it.hasNext();) {
+                if (c.contains(it.next())) {
+                    it.remove();
+                    modified = true;
+                }
+            }
+            return modified;
+        }
+
+        public final boolean retainAll(Collection<?> c) {
+            boolean modified = false;
+            for (Iterator<E> it = iterator(); it.hasNext();) {
+                if (!c.contains(it.next())) {
+                    it.remove();
+                    modified = true;
+                }
+            }
+            return modified;
+        }
+
+    }
+
+    /**
+     * A view of a ConcurrentHashMap as a {@link Set} of keys, in
+     * which additions may optionally be enabled by mapping to a
+     * common value.  This class cannot be directly instantiated.
+     * See {@link #keySet() keySet()},
+     * {@link #keySet(Object) keySet(V)},
+     * {@link #newKeySet() newKeySet()},
+     * {@link #newKeySet(int) newKeySet(int)}.
+     * @since 1.8
+     */
+    public static class KeySetView<K,V> extends CollectionView<K,V,K>
+        implements Set<K>, java.io.Serializable {
+        private static final long serialVersionUID = 7249069246763182397L;
+        private final V value;
+        KeySetView(ConcurrentHashMap<K,V> map, V value) {  // non-public
+            super(map);
+            this.value = value;
+        }
+
+        /**
+         * Returns the default mapped value for additions,
+         * or {@code null} if additions are not supported.
+         *
+         * @return the default mapped value for additions, or {@code null}
+         * if not supported
+         */
+        public V getMappedValue() { return value; }
+
+        /**
+         * {@inheritDoc}
+         * @throws NullPointerException if the specified key is null
+         */
+        public boolean contains(Object o) { return map.containsKey(o); }
+
+        /**
+         * Removes the key from this map view, by removing the key (and its
+         * corresponding value) from the backing map.  This method does
+         * nothing if the key is not in the map.
+         *
+         * @param  o the key to be removed from the backing map
+         * @return {@code true} if the backing map contained the specified key
+         * @throws NullPointerException if the specified key is null
+         */
+        public boolean remove(Object o) { return map.remove(o) != null; }
+
+        /**
+         * @return an iterator over the keys of the backing map
+         */
+        public Iterator<K> iterator() {
+            Node<K,V>[] t;
+            ConcurrentHashMap<K,V> m = map;
+            int f = (t = m.table) == null ? 0 : t.length;
+            return new KeyIterator<K,V>(t, f, 0, f, m);
+        }
+
+        /**
+         * Adds the specified key to this set view by mapping the key to
+         * the default mapped value in the backing map, if defined.
+         *
+         * @param e key to be added
+         * @return {@code true} if this set changed as a result of the call
+         * @throws NullPointerException if the specified key is null
+         * @throws UnsupportedOperationException if no default mapped value
+         * for additions was provided
+         */
+        public boolean add(K e) {
+            V v;
+            if ((v = value) == null)
+                throw new UnsupportedOperationException();
+            return map.internalPut(e, v, true) == null;
+        }
+
+        /**
+         * Adds all of the elements in the specified collection to this set,
+         * as if by calling {@link #add} on each one.
+         *
+         * @param c the elements to be inserted into this set
+         * @return {@code true} if this set changed as a result of the call
+         * @throws NullPointerException if the collection or any of its
+         * elements are {@code null}
+         * @throws UnsupportedOperationException if no default mapped value
+         * for additions was provided
+         */
+        public boolean addAll(Collection<? extends K> c) {
+            boolean added = false;
+            V v;
+            if ((v = value) == null)
+                throw new UnsupportedOperationException();
+            for (K e : c) {
+                if (map.internalPut(e, v, true) == null)
+                    added = true;
+            }
+            return added;
+        }
+
+        public int hashCode() {
+            int h = 0;
+            for (K e : this)
+                h += e.hashCode();
+            return h;
+        }
+
+        public boolean equals(Object o) {
+            Set<?> c;
+            return ((o instanceof Set) &&
+                    ((c = (Set<?>)o) == this ||
+                     (containsAll(c) && c.containsAll(this))));
+        }
+
+        public Spliterator<K> spliterator() {
+            Node<K,V>[] t;
+            ConcurrentHashMap<K,V> m = map;
+            long n = m.sumCount();
+            int f = (t = m.table) == null ? 0 : t.length;
+            return new KeySpliterator<K,V>(t, f, 0, f, n < 0L ? 0L : n);
+        }
+
+        public void forEach(Consumer<? super K> action) {
+            if (action == null) throw new NullPointerException();
+            Node<K,V>[] t;
+            if ((t = map.table) != null) {
+                Traverser<K,V> it = new Traverser<K,V>(t, t.length, 0, t.length);
+                for (Node<K,V> p; (p = it.advance()) != null; )
+                    action.accept((K)p.key);
+            }
+        }
+    }
+
+    /**
+     * A view of a ConcurrentHashMap as a {@link Collection} of
+     * values, in which additions are disabled. This class cannot be
+     * directly instantiated. See {@link #values()}.
+     */
+    static final class ValuesView<K,V> extends CollectionView<K,V,V>
+        implements Collection<V>, java.io.Serializable {
+        private static final long serialVersionUID = 2249069246763182397L;
+        ValuesView(ConcurrentHashMap<K,V> map) { super(map); }
+        public final boolean contains(Object o) {
+            return map.containsValue(o);
+        }
+
+        public final boolean remove(Object o) {
+            if (o != null) {
+                for (Iterator<V> it = iterator(); it.hasNext();) {
+                    if (o.equals(it.next())) {
+                        it.remove();
+                        return true;
+                    }
+                }
+            }
+            return false;
+        }
+
+        public final Iterator<V> iterator() {
+            ConcurrentHashMap<K,V> m = map;
+            Node<K,V>[] t;
+            int f = (t = m.table) == null ? 0 : t.length;
+            return new ValueIterator<K,V>(t, f, 0, f, m);
+        }
+
+        public final boolean add(V e) {
+            throw new UnsupportedOperationException();
+        }
+        public final boolean addAll(Collection<? extends V> c) {
+            throw new UnsupportedOperationException();
+        }
+
+        public Spliterator<V> spliterator() {
+            Node<K,V>[] t;
+            ConcurrentHashMap<K,V> m = map;
+            long n = m.sumCount();
+            int f = (t = m.table) == null ? 0 : t.length;
+            return new ValueSpliterator<K,V>(t, f, 0, f, n < 0L ? 0L : n);
+        }
+
+        public void forEach(Consumer<? super V> action) {
+            if (action == null) throw new NullPointerException();
+            Node<K,V>[] t;
+            if ((t = map.table) != null) {
+                Traverser<K,V> it = new Traverser<K,V>(t, t.length, 0, t.length);
+                for (Node<K,V> p; (p = it.advance()) != null; )
+                    action.accept(p.val);
+            }
+        }
+    }
+
+    /**
+     * A view of a ConcurrentHashMap as a {@link Set} of (key, value)
+     * entries.  This class cannot be directly instantiated. See
+     * {@link #entrySet()}.
+     */
+    static final class EntrySetView<K,V> extends CollectionView<K,V,Map.Entry<K,V>>
+        implements Set<Map.Entry<K,V>>, java.io.Serializable {
+        private static final long serialVersionUID = 2249069246763182397L;
+        EntrySetView(ConcurrentHashMap<K,V> map) { super(map); }
+
+        public boolean contains(Object o) {
+            Object k, v, r; Map.Entry<?,?> e;
+            return ((o instanceof Map.Entry) &&
+                    (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
+                    (r = map.get(k)) != null &&
+                    (v = e.getValue()) != null &&
+                    (v == r || v.equals(r)));
+        }
+
+        public boolean remove(Object o) {
+            Object k, v; Map.Entry<?,?> e;
+            return ((o instanceof Map.Entry) &&
+                    (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
+                    (v = e.getValue()) != null &&
+                    map.remove(k, v));
+        }
+
+        /**
+         * @return an iterator over the entries of the backing map
+         */
+        public Iterator<Map.Entry<K,V>> iterator() {
+            ConcurrentHashMap<K,V> m = map;
+            Node<K,V>[] t;
+            int f = (t = m.table) == null ? 0 : t.length;
+            return new EntryIterator<K,V>(t, f, 0, f, m);
+        }
+
+        public boolean add(Entry<K,V> e) {
+            return map.internalPut(e.getKey(), e.getValue(), false) == null;
+        }
+
+        public boolean addAll(Collection<? extends Entry<K,V>> c) {
+            boolean added = false;
+            for (Entry<K,V> e : c) {
+                if (add(e))
+                    added = true;
+            }
+            return added;
+        }
+
+        public final int hashCode() {
+            int h = 0;
+            Node<K,V>[] t;
+            if ((t = map.table) != null) {
+                Traverser<K,V> it = new Traverser<K,V>(t, t.length, 0, t.length);
+                for (Node<K,V> p; (p = it.advance()) != null; ) {
+                    h += p.hashCode();
+                }
+            }
+            return h;
+        }
+
+        public final boolean equals(Object o) {
+            Set<?> c;
+            return ((o instanceof Set) &&
+                    ((c = (Set<?>)o) == this ||
+                     (containsAll(c) && c.containsAll(this))));
+        }
+
+        public Spliterator<Map.Entry<K,V>> spliterator() {
+            Node<K,V>[] t;
+            ConcurrentHashMap<K,V> m = map;
+            long n = m.sumCount();
+            int f = (t = m.table) == null ? 0 : t.length;
+            return new EntrySpliterator<K,V>(t, f, 0, f, n < 0L ? 0L : n, m);
+        }
+
+        public void forEach(Consumer<? super Map.Entry<K,V>> action) {
+            if (action == null) throw new NullPointerException();
+            Node<K,V>[] t;
+            if ((t = map.table) != null) {
+                Traverser<K,V> it = new Traverser<K,V>(t, t.length, 0, t.length);
+                for (Node<K,V> p; (p = it.advance()) != null; )
+                    action.accept(new MapEntry<K,V>((K)p.key, p.val, map));
+            }
+        }
+
+    }
+
+    // -------------------------------------------------------
+
+    /**
+     * Base class for bulk tasks. Repeats some fields and code from
+     * class Traverser, because we need to subclass CountedCompleter.
+     */
+    abstract static class BulkTask<K,V,R> extends CountedCompleter<R> {
+        Node<K,V>[] tab;        // same as Traverser
+        Node<K,V> next;
+        int index;
+        int baseIndex;
+        int baseLimit;
+        final int baseSize;
+        int batch;              // split control
+
+        BulkTask(BulkTask<K,V,?> par, int b, int i, int f, Node<K,V>[] t) {
+            super(par);
+            this.batch = b;
+            this.index = this.baseIndex = i;
+            if ((this.tab = t) == null)
+                this.baseSize = this.baseLimit = 0;
+            else if (par == null)
+                this.baseSize = this.baseLimit = t.length;
+            else {
+                this.baseLimit = f;
+                this.baseSize = par.baseSize;
+            }
+        }
+
+        /**
+         * Same as Traverser version
+         */
+        final Node<K,V> advance() {
+            Node<K,V> e;
+            if ((e = next) != null)
+                e = e.next;
+            for (;;) {
+                Node<K,V>[] t; int i, n; Object ek;
+                if (e != null)
+                    return next = e;
+                if (baseIndex >= baseLimit || (t = tab) == null ||
+                    (n = t.length) <= (i = index) || i < 0)
+                    return next = null;
+                if ((e = tabAt(t, index)) != null && e.hash < 0) {
+                    if ((ek = e.key) instanceof TreeBin)
+                        e = ((TreeBin<K,V>)ek).first;
+                    else {
+                        tab = (Node<K,V>[])ek;
+                        e = null;
+                        continue;
+                    }
+                }
+                if ((index += baseSize) >= n)
+                    index = ++baseIndex;
             }
         }
-
-        // Read the keys and values, and put the mappings in the table
-        for (;;) {
-            K key = (K) s.readObject();
-            V value = (V) s.readObject();
-            if (key == null)
-                break;
-            put(key, value);
+    }
+
+    /*
+     * Task classes. Coded in a regular but ugly format/style to
+     * simplify checks that each variant differs in the right way from
+     * others. The null screenings exist because compilers cannot tell
+     * that we've already null-checked task arguments, so we force
+     * simplest hoisted bypass to help avoid convoluted traps.
+     */
+
+    static final class ForEachKeyTask<K,V>
+        extends BulkTask<K,V,Void> {
+        final Consumer<? super K> action;
+        ForEachKeyTask
+            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
+             Consumer<? super K> action) {
+            super(p, b, i, f, t);
+            this.action = action;
+        }
+        public final void compute() {
+            final Consumer<? super K> action;
+            if ((action = this.action) != null) {
+                for (int i = baseIndex, f, h; batch > 0 &&
+                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
+                    addToPendingCount(1);
+                    new ForEachKeyTask<K,V>
+                        (this, batch >>>= 1, baseLimit = h, f, tab,
+                         action).fork();
+                }
+                for (Node<K,V> p; (p = advance()) != null;)
+                    action.accept((K)p.key);
+                propagateCompletion();
+            }
+        }
+    }
+
+    static final class ForEachValueTask<K,V>
+        extends BulkTask<K,V,Void> {
+        final Consumer<? super V> action;
+        ForEachValueTask
+            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
+             Consumer<? super V> action) {
+            super(p, b, i, f, t);
+            this.action = action;
+        }
+        public final void compute() {
+            final Consumer<? super V> action;
+            if ((action = this.action) != null) {
+                for (int i = baseIndex, f, h; batch > 0 &&
+                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
+                    addToPendingCount(1);
+                    new ForEachValueTask<K,V>
+                        (this, batch >>>= 1, baseLimit = h, f, tab,
+                         action).fork();
+                }
+                for (Node<K,V> p; (p = advance()) != null;)
+                    action.accept(p.val);
+                propagateCompletion();
+            }
+        }
+    }
+
+    static final class ForEachEntryTask<K,V>
+        extends BulkTask<K,V,Void> {
+        final Consumer<? super Entry<K,V>> action;
+        ForEachEntryTask
+            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
+             Consumer<? super Entry<K,V>> action) {
+            super(p, b, i, f, t);
+            this.action = action;
+        }
+        public final void compute() {
+            final Consumer<? super Entry<K,V>> action;
+            if ((action = this.action) != null) {
+                for (int i = baseIndex, f, h; batch > 0 &&
+                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
+                    addToPendingCount(1);
+                    new ForEachEntryTask<K,V>
+                        (this, batch >>>= 1, baseLimit = h, f, tab,
+                         action).fork();
+                }
+                for (Node<K,V> p; (p = advance()) != null; )
+                    action.accept(p);
+                propagateCompletion();
+            }
+        }
+    }
+
+    static final class ForEachMappingTask<K,V>
+        extends BulkTask<K,V,Void> {
+        final BiConsumer<? super K, ? super V> action;
+        ForEachMappingTask
+            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
+             BiConsumer<? super K,? super V> action) {
+            super(p, b, i, f, t);
+            this.action = action;
+        }
+        public final void compute() {
+            final BiConsumer<? super K, ? super V> action;
+            if ((action = this.action) != null) {
+                for (int i = baseIndex, f, h; batch > 0 &&
+                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
+                    addToPendingCount(1);
+                    new ForEachMappingTask<K,V>
+                        (this, batch >>>= 1, baseLimit = h, f, tab,
+                         action).fork();
+                }
+                for (Node<K,V> p; (p = advance()) != null; )
+                    action.accept((K)p.key, p.val);
+                propagateCompletion();
+            }
+        }
+    }
+
+    static final class ForEachTransformedKeyTask<K,V,U>
+        extends BulkTask<K,V,Void> {
+        final Function<? super K, ? extends U> transformer;
+        final Consumer<? super U> action;
+        ForEachTransformedKeyTask
+            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
+             Function<? super K, ? extends U> transformer, Consumer<? super U> action) {
+            super(p, b, i, f, t);
+            this.transformer = transformer; this.action = action;
+        }
+        public final void compute() {
+            final Function<? super K, ? extends U> transformer;
+            final Consumer<? super U> action;
+            if ((transformer = this.transformer) != null &&
+                (action = this.action) != null) {
+                for (int i = baseIndex, f, h; batch > 0 &&
+                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
+                    addToPendingCount(1);
+                    new ForEachTransformedKeyTask<K,V,U>
+                        (this, batch >>>= 1, baseLimit = h, f, tab,
+                         transformer, action).fork();
+                }
+                for (Node<K,V> p; (p = advance()) != null; ) {
+                    U u;
+                    if ((u = transformer.apply((K)p.key)) != null)
+                        action.accept(u);
+                }
+                propagateCompletion();
+            }
+        }
+    }
+
+    static final class ForEachTransformedValueTask<K,V,U>
+        extends BulkTask<K,V,Void> {
+        final Function<? super V, ? extends U> transformer;
+        final Consumer<? super U> action;
+        ForEachTransformedValueTask
+            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
+             Function<? super V, ? extends U> transformer, Consumer<? super U> action) {
+            super(p, b, i, f, t);
+            this.transformer = transformer; this.action = action;
+        }
+        public final void compute() {
+            final Function<? super V, ? extends U> transformer;
+            final Consumer<? super U> action;
+            if ((transformer = this.transformer) != null &&
+                (action = this.action) != null) {
+                for (int i = baseIndex, f, h; batch > 0 &&
+                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
+                    addToPendingCount(1);
+                    new ForEachTransformedValueTask<K,V,U>
+                        (this, batch >>>= 1, baseLimit = h, f, tab,
+                         transformer, action).fork();
+                }
+                for (Node<K,V> p; (p = advance()) != null; ) {
+                    U u;
+                    if ((u = transformer.apply(p.val)) != null)
+                        action.accept(u);
+                }
+                propagateCompletion();
+            }
+        }
+    }
+
+    static final class ForEachTransformedEntryTask<K,V,U>
+        extends BulkTask<K,V,Void> {
+        final Function<Map.Entry<K,V>, ? extends U> transformer;
+        final Consumer<? super U> action;
+        ForEachTransformedEntryTask
+            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
+             Function<Map.Entry<K,V>, ? extends U> transformer, Consumer<? super U> action) {
+            super(p, b, i, f, t);
+            this.transformer = transformer; this.action = action;
+        }
+        public final void compute() {
+            final Function<Map.Entry<K,V>, ? extends U> transformer;
+            final Consumer<? super U> action;
+            if ((transformer = this.transformer) != null &&
+                (action = this.action) != null) {
+                for (int i = baseIndex, f, h; batch > 0 &&
+                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
+                    addToPendingCount(1);
+                    new ForEachTransformedEntryTask<K,V,U>
+                        (this, batch >>>= 1, baseLimit = h, f, tab,
+                         transformer, action).fork();
+                }
+                for (Node<K,V> p; (p = advance()) != null; ) {
+                    U u;
+                    if ((u = transformer.apply(p)) != null)
+                        action.accept(u);
+                }
+                propagateCompletion();
+            }
+        }
+    }
+
+    static final class ForEachTransformedMappingTask<K,V,U>
+        extends BulkTask<K,V,Void> {
+        final BiFunction<? super K, ? super V, ? extends U> transformer;
+        final Consumer<? super U> action;
+        ForEachTransformedMappingTask
+            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
+             BiFunction<? super K, ? super V, ? extends U> transformer,
+             Consumer<? super U> action) {
+            super(p, b, i, f, t);
+            this.transformer = transformer; this.action = action;
+        }
+        public final void compute() {
+            final BiFunction<? super K, ? super V, ? extends U> transformer;
+            final Consumer<? super U> action;
+            if ((transformer = this.transformer) != null &&
+                (action = this.action) != null) {
+                for (int i = baseIndex, f, h; batch > 0 &&
+                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
+                    addToPendingCount(1);
+                    new ForEachTransformedMappingTask<K,V,U>
+                        (this, batch >>>= 1, baseLimit = h, f, tab,
+                         transformer, action).fork();
+                }
+                for (Node<K,V> p; (p = advance()) != null; ) {
+                    U u;
+                    if ((u = transformer.apply((K)p.key, p.val)) != null)
+                        action.accept(u);
+                }
+                propagateCompletion();
+            }
+        }
+    }
+
+    static final class SearchKeysTask<K,V,U>
+        extends BulkTask<K,V,U> {
+        final Function<? super K, ? extends U> searchFunction;
+        final AtomicReference<U> result;
+        SearchKeysTask
+            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
+             Function<? super K, ? extends U> searchFunction,
+             AtomicReference<U> result) {
+            super(p, b, i, f, t);
+            this.searchFunction = searchFunction; this.result = result;
+        }
+        public final U getRawResult() { return result.get(); }
+        public final void compute() {
+            final Function<? super K, ? extends U> searchFunction;
+            final AtomicReference<U> result;
+            if ((searchFunction = this.searchFunction) != null &&
+                (result = this.result) != null) {
+                for (int i = baseIndex, f, h; batch > 0 &&
+                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
+                    if (result.get() != null)
+                        return;
+                    addToPendingCount(1);
+                    new SearchKeysTask<K,V,U>
+                        (this, batch >>>= 1, baseLimit = h, f, tab,
+                         searchFunction, result).fork();
+                }
+                while (result.get() == null) {
+                    U u;
+                    Node<K,V> p;
+                    if ((p = advance()) == null) {
+                        propagateCompletion();
+                        break;
+                    }
+                    if ((u = searchFunction.apply((K)p.key)) != null) {
+                        if (result.compareAndSet(null, u))
+                            quietlyCompleteRoot();
+                        break;
+                    }
+                }
+            }
+        }
+    }
+
+    static final class SearchValuesTask<K,V,U>
+        extends BulkTask<K,V,U> {
+        final Function<? super V, ? extends U> searchFunction;
+        final AtomicReference<U> result;
+        SearchValuesTask
+            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
+             Function<? super V, ? extends U> searchFunction,
+             AtomicReference<U> result) {
+            super(p, b, i, f, t);
+            this.searchFunction = searchFunction; this.result = result;
+        }
+        public final U getRawResult() { return result.get(); }
+        public final void compute() {
+            final Function<? super V, ? extends U> searchFunction;
+            final AtomicReference<U> result;
+            if ((searchFunction = this.searchFunction) != null &&
+                (result = this.result) != null) {
+                for (int i = baseIndex, f, h; batch > 0 &&
+                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
+                    if (result.get() != null)
+                        return;
+                    addToPendingCount(1);
+                    new SearchValuesTask<K,V,U>
+                        (this, batch >>>= 1, baseLimit = h, f, tab,
+                         searchFunction, result).fork();
+                }
+                while (result.get() == null) {
+                    U u;
+                    Node<K,V> p;
+                    if ((p = advance()) == null) {
+                        propagateCompletion();
+                        break;
+                    }
+                    if ((u = searchFunction.apply(p.val)) != null) {
+                        if (result.compareAndSet(null, u))
+                            quietlyCompleteRoot();
+                        break;
+                    }
+                }
+            }
+        }
+    }
+
+    static final class SearchEntriesTask<K,V,U>
+        extends BulkTask<K,V,U> {
+        final Function<Entry<K,V>, ? extends U> searchFunction;
+        final AtomicReference<U> result;
+        SearchEntriesTask
+            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
+             Function<Entry<K,V>, ? extends U> searchFunction,
+             AtomicReference<U> result) {
+            super(p, b, i, f, t);
+            this.searchFunction = searchFunction; this.result = result;
+        }
+        public final U getRawResult() { return result.get(); }
+        public final void compute() {
+            final Function<Entry<K,V>, ? extends U> searchFunction;
+            final AtomicReference<U> result;
+            if ((searchFunction = this.searchFunction) != null &&
+                (result = this.result) != null) {
+                for (int i = baseIndex, f, h; batch > 0 &&
+                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
+                    if (result.get() != null)
+                        return;
+                    addToPendingCount(1);
+                    new SearchEntriesTask<K,V,U>
+                        (this, batch >>>= 1, baseLimit = h, f, tab,
+                         searchFunction, result).fork();
+                }
+                while (result.get() == null) {
+                    U u;
+                    Node<K,V> p;
+                    if ((p = advance()) == null) {
+                        propagateCompletion();
+                        break;
+                    }
+                    if ((u = searchFunction.apply(p)) != null) {
+                        if (result.compareAndSet(null, u))
+                            quietlyCompleteRoot();
+                        return;
+                    }
+                }
+            }
+        }
+    }
+
+    static final class SearchMappingsTask<K,V,U>
+        extends BulkTask<K,V,U> {
+        final BiFunction<? super K, ? super V, ? extends U> searchFunction;
+        final AtomicReference<U> result;
+        SearchMappingsTask
+            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
+             BiFunction<? super K, ? super V, ? extends U> searchFunction,
+             AtomicReference<U> result) {
+            super(p, b, i, f, t);
+            this.searchFunction = searchFunction; this.result = result;
+        }
+        public final U getRawResult() { return result.get(); }
+        public final void compute() {
+            final BiFunction<? super K, ? super V, ? extends U> searchFunction;
+            final AtomicReference<U> result;
+            if ((searchFunction = this.searchFunction) != null &&
+                (result = this.result) != null) {
+                for (int i = baseIndex, f, h; batch > 0 &&
+                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
+                    if (result.get() != null)
+                        return;
+                    addToPendingCount(1);
+                    new SearchMappingsTask<K,V,U>
+                        (this, batch >>>= 1, baseLimit = h, f, tab,
+                         searchFunction, result).fork();
+                }
+                while (result.get() == null) {
+                    U u;
+                    Node<K,V> p;
+                    if ((p = advance()) == null) {
+                        propagateCompletion();
+                        break;
+                    }
+                    if ((u = searchFunction.apply((K)p.key, p.val)) != null) {
+                        if (result.compareAndSet(null, u))
+                            quietlyCompleteRoot();
+                        break;
+                    }
+                }
+            }
+        }
+    }
+
+    static final class ReduceKeysTask<K,V>
+        extends BulkTask<K,V,K> {
+        final BiFunction<? super K, ? super K, ? extends K> reducer;
+        K result;
+        ReduceKeysTask<K,V> rights, nextRight;
+        ReduceKeysTask
+            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
+             ReduceKeysTask<K,V> nextRight,
+             BiFunction<? super K, ? super K, ? extends K> reducer) {
+            super(p, b, i, f, t); this.nextRight = nextRight;
+            this.reducer = reducer;
+        }
+        public final K getRawResult() { return result; }
+        public final void compute() {
+            final BiFunction<? super K, ? super K, ? extends K> reducer;
+            if ((reducer = this.reducer) != null) {
+                for (int i = baseIndex, f, h; batch > 0 &&
+                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
+                    addToPendingCount(1);
+                    (rights = new ReduceKeysTask<K,V>
+                     (this, batch >>>= 1, baseLimit = h, f, tab,
+                      rights, reducer)).fork();
+                }
+                K r = null;
+                for (Node<K,V> p; (p = advance()) != null; ) {
+                    K u = (K)p.key;
+                    r = (r == null) ? u : u == null ? r : reducer.apply(r, u);
+                }
+                result = r;
+                CountedCompleter<?> c;
+                for (c = firstComplete(); c != null; c = c.nextComplete()) {
+                    ReduceKeysTask<K,V>
+                        t = (ReduceKeysTask<K,V>)c,
+                        s = t.rights;
+                    while (s != null) {
+                        K tr, sr;
+                        if ((sr = s.result) != null)
+                            t.result = (((tr = t.result) == null) ? sr :
+                                        reducer.apply(tr, sr));
+                        s = t.rights = s.nextRight;
+                    }
+                }
+            }
+        }
+    }
+
+    static final class ReduceValuesTask<K,V>
+        extends BulkTask<K,V,V> {
+        final BiFunction<? super V, ? super V, ? extends V> reducer;
+        V result;
+        ReduceValuesTask<K,V> rights, nextRight;
+        ReduceValuesTask
+            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
+             ReduceValuesTask<K,V> nextRight,
+             BiFunction<? super V, ? super V, ? extends V> reducer) {
+            super(p, b, i, f, t); this.nextRight = nextRight;
+            this.reducer = reducer;
+        }
+        public final V getRawResult() { return result; }
+        public final void compute() {
+            final BiFunction<? super V, ? super V, ? extends V> reducer;
+            if ((reducer = this.reducer) != null) {
+                for (int i = baseIndex, f, h; batch > 0 &&
+                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
+                    addToPendingCount(1);
+                    (rights = new ReduceValuesTask<K,V>
+                     (this, batch >>>= 1, baseLimit = h, f, tab,
+                      rights, reducer)).fork();
+                }
+                V r = null;
+                for (Node<K,V> p; (p = advance()) != null; ) {
+                    V v = p.val;
+                    r = (r == null) ? v : reducer.apply(r, v);
+                }
+                result = r;
+                CountedCompleter<?> c;
+                for (c = firstComplete(); c != null; c = c.nextComplete()) {
+                    ReduceValuesTask<K,V>
+                        t = (ReduceValuesTask<K,V>)c,
+                        s = t.rights;
+                    while (s != null) {
+                        V tr, sr;
+                        if ((sr = s.result) != null)
+                            t.result = (((tr = t.result) == null) ? sr :
+                                        reducer.apply(tr, sr));
+                        s = t.rights = s.nextRight;
+                    }
+                }
+            }
+        }
+    }
+
+    static final class ReduceEntriesTask<K,V>
+        extends BulkTask<K,V,Map.Entry<K,V>> {
+        final BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer;
+        Map.Entry<K,V> result;
+        ReduceEntriesTask<K,V> rights, nextRight;
+        ReduceEntriesTask
+            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
+             ReduceEntriesTask<K,V> nextRight,
+             BiFunction<Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
+            super(p, b, i, f, t); this.nextRight = nextRight;
+            this.reducer = reducer;
+        }
+        public final Map.Entry<K,V> getRawResult() { return result; }
+        public final void compute() {
+            final BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer;
+            if ((reducer = this.reducer) != null) {
+                for (int i = baseIndex, f, h; batch > 0 &&
+                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
+                    addToPendingCount(1);
+                    (rights = new ReduceEntriesTask<K,V>
+                     (this, batch >>>= 1, baseLimit = h, f, tab,
+                      rights, reducer)).fork();
+                }
+                Map.Entry<K,V> r = null;
+                for (Node<K,V> p; (p = advance()) != null; )
+                    r = (r == null) ? p : reducer.apply(r, p);
+                result = r;
+                CountedCompleter<?> c;
+                for (c = firstComplete(); c != null; c = c.nextComplete()) {
+                    ReduceEntriesTask<K,V>
+                        t = (ReduceEntriesTask<K,V>)c,
+                        s = t.rights;
+                    while (s != null) {
+                        Map.Entry<K,V> tr, sr;
+                        if ((sr = s.result) != null)
+                            t.result = (((tr = t.result) == null) ? sr :
+                                        reducer.apply(tr, sr));
+                        s = t.rights = s.nextRight;
+                    }
+                }
+            }
+        }
+    }
+
+    static final class MapReduceKeysTask<K,V,U>
+        extends BulkTask<K,V,U> {
+        final Function<? super K, ? extends U> transformer;
+        final BiFunction<? super U, ? super U, ? extends U> reducer;
+        U result;
+        MapReduceKeysTask<K,V,U> rights, nextRight;
+        MapReduceKeysTask
+            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
+             MapReduceKeysTask<K,V,U> nextRight,
+             Function<? super K, ? extends U> transformer,
+             BiFunction<? super U, ? super U, ? extends U> reducer) {
+            super(p, b, i, f, t); this.nextRight = nextRight;
+            this.transformer = transformer;
+            this.reducer = reducer;
+        }
+        public final U getRawResult() { return result; }
+        public final void compute() {
+            final Function<? super K, ? extends U> transformer;
+            final BiFunction<? super U, ? super U, ? extends U> reducer;
+            if ((transformer = this.transformer) != null &&
+                (reducer = this.reducer) != null) {
+                for (int i = baseIndex, f, h; batch > 0 &&
+                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
+                    addToPendingCount(1);
+                    (rights = new MapReduceKeysTask<K,V,U>
+                     (this, batch >>>= 1, baseLimit = h, f, tab,
+                      rights, transformer, reducer)).fork();
+                }
+                U r = null;
+                for (Node<K,V> p; (p = advance()) != null; ) {
+                    U u;
+                    if ((u = transformer.apply((K)p.key)) != null)
+                        r = (r == null) ? u : reducer.apply(r, u);
+                }
+                result = r;
+                CountedCompleter<?> c;
+                for (c = firstComplete(); c != null; c = c.nextComplete()) {
+                    MapReduceKeysTask<K,V,U>
+                        t = (MapReduceKeysTask<K,V,U>)c,
+                        s = t.rights;
+                    while (s != null) {
+                        U tr, sr;
+                        if ((sr = s.result) != null)
+                            t.result = (((tr = t.result) == null) ? sr :
+                                        reducer.apply(tr, sr));
+                        s = t.rights = s.nextRight;
+                    }
+                }
+            }
+        }
+    }
+
+    static final class MapReduceValuesTask<K,V,U>
+        extends BulkTask<K,V,U> {
+        final Function<? super V, ? extends U> transformer;
+        final BiFunction<? super U, ? super U, ? extends U> reducer;
+        U result;
+        MapReduceValuesTask<K,V,U> rights, nextRight;
+        MapReduceValuesTask
+            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
+             MapReduceValuesTask<K,V,U> nextRight,
+             Function<? super V, ? extends U> transformer,
+             BiFunction<? super U, ? super U, ? extends U> reducer) {
+            super(p, b, i, f, t); this.nextRight = nextRight;
+            this.transformer = transformer;
+            this.reducer = reducer;
+        }
+        public final U getRawResult() { return result; }
+        public final void compute() {
+            final Function<? super V, ? extends U> transformer;
+            final BiFunction<? super U, ? super U, ? extends U> reducer;
+            if ((transformer = this.transformer) != null &&
+                (reducer = this.reducer) != null) {
+                for (int i = baseIndex, f, h; batch > 0 &&
+                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
+                    addToPendingCount(1);
+                    (rights = new MapReduceValuesTask<K,V,U>
+                     (this, batch >>>= 1, baseLimit = h, f, tab,
+                      rights, transformer, reducer)).fork();
+                }
+                U r = null;
+                for (Node<K,V> p; (p = advance()) != null; ) {
+                    U u;
+                    if ((u = transformer.apply(p.val)) != null)
+                        r = (r == null) ? u : reducer.apply(r, u);
+                }
+                result = r;
+                CountedCompleter<?> c;
+                for (c = firstComplete(); c != null; c = c.nextComplete()) {
+                    MapReduceValuesTask<K,V,U>
+                        t = (MapReduceValuesTask<K,V,U>)c,
+                        s = t.rights;
+                    while (s != null) {
+                        U tr, sr;
+                        if ((sr = s.result) != null)
+                            t.result = (((tr = t.result) == null) ? sr :
+                                        reducer.apply(tr, sr));
+                        s = t.rights = s.nextRight;
+                    }
+                }
+            }
+        }
+    }
+
+    static final class MapReduceEntriesTask<K,V,U>
+        extends BulkTask<K,V,U> {
+        final Function<Map.Entry<K,V>, ? extends U> transformer;
+        final BiFunction<? super U, ? super U, ? extends U> reducer;
+        U result;
+        MapReduceEntriesTask<K,V,U> rights, nextRight;
+        MapReduceEntriesTask
+            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
+             MapReduceEntriesTask<K,V,U> nextRight,
+             Function<Map.Entry<K,V>, ? extends U> transformer,
+             BiFunction<? super U, ? super U, ? extends U> reducer) {
+            super(p, b, i, f, t); this.nextRight = nextRight;
+            this.transformer = transformer;
+            this.reducer = reducer;
+        }
+        public final U getRawResult() { return result; }
+        public final void compute() {
+            final Function<Map.Entry<K,V>, ? extends U> transformer;
+            final BiFunction<? super U, ? super U, ? extends U> reducer;
+            if ((transformer = this.transformer) != null &&
+                (reducer = this.reducer) != null) {
+                for (int i = baseIndex, f, h; batch > 0 &&
+                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
+                    addToPendingCount(1);
+                    (rights = new MapReduceEntriesTask<K,V,U>
+                     (this, batch >>>= 1, baseLimit = h, f, tab,
+                      rights, transformer, reducer)).fork();
+                }
+                U r = null;
+                for (Node<K,V> p; (p = advance()) != null; ) {
+                    U u;
+                    if ((u = transformer.apply(p)) != null)
+                        r = (r == null) ? u : reducer.apply(r, u);
+                }
+                result = r;
+                CountedCompleter<?> c;
+                for (c = firstComplete(); c != null; c = c.nextComplete()) {
+                    MapReduceEntriesTask<K,V,U>
+                        t = (MapReduceEntriesTask<K,V,U>)c,
+                        s = t.rights;
+                    while (s != null) {
+                        U tr, sr;
+                        if ((sr = s.result) != null)
+                            t.result = (((tr = t.result) == null) ? sr :
+                                        reducer.apply(tr, sr));
+                        s = t.rights = s.nextRight;
+                    }
+                }
+            }
+        }
+    }
+
+    static final class MapReduceMappingsTask<K,V,U>
+        extends BulkTask<K,V,U> {
+        final BiFunction<? super K, ? super V, ? extends U> transformer;
+        final BiFunction<? super U, ? super U, ? extends U> reducer;
+        U result;
+        MapReduceMappingsTask<K,V,U> rights, nextRight;
+        MapReduceMappingsTask
+            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
+             MapReduceMappingsTask<K,V,U> nextRight,
+             BiFunction<? super K, ? super V, ? extends U> transformer,
+             BiFunction<? super U, ? super U, ? extends U> reducer) {
+            super(p, b, i, f, t); this.nextRight = nextRight;
+            this.transformer = transformer;
+            this.reducer = reducer;
+        }
+        public final U getRawResult() { return result; }
+        public final void compute() {
+            final BiFunction<? super K, ? super V, ? extends U> transformer;
+            final BiFunction<? super U, ? super U, ? extends U> reducer;
+            if ((transformer = this.transformer) != null &&
+                (reducer = this.reducer) != null) {
+                for (int i = baseIndex, f, h; batch > 0 &&
+                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
+                    addToPendingCount(1);
+                    (rights = new MapReduceMappingsTask<K,V,U>
+                     (this, batch >>>= 1, baseLimit = h, f, tab,
+                      rights, transformer, reducer)).fork();
+                }
+                U r = null;
+                for (Node<K,V> p; (p = advance()) != null; ) {
+                    U u;
+                    if ((u = transformer.apply((K)p.key, p.val)) != null)
+                        r = (r == null) ? u : reducer.apply(r, u);
+                }
+                result = r;
+                CountedCompleter<?> c;
+                for (c = firstComplete(); c != null; c = c.nextComplete()) {
+                    MapReduceMappingsTask<K,V,U>
+                        t = (MapReduceMappingsTask<K,V,U>)c,
+                        s = t.rights;
+                    while (s != null) {
+                        U tr, sr;
+                        if ((sr = s.result) != null)
+                            t.result = (((tr = t.result) == null) ? sr :
+                                        reducer.apply(tr, sr));
+                        s = t.rights = s.nextRight;
+                    }
+                }
+            }
+        }
+    }
+
+    static final class MapReduceKeysToDoubleTask<K,V>
+        extends BulkTask<K,V,Double> {
+        final ToDoubleFunction<? super K> transformer;
+        final DoubleBinaryOperator reducer;
+        final double basis;
+        double result;
+        MapReduceKeysToDoubleTask<K,V> rights, nextRight;
+        MapReduceKeysToDoubleTask
+            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
+             MapReduceKeysToDoubleTask<K,V> nextRight,
+             ToDoubleFunction<? super K> transformer,
+             double basis,
+             DoubleBinaryOperator reducer) {
+            super(p, b, i, f, t); this.nextRight = nextRight;
+            this.transformer = transformer;
+            this.basis = basis; this.reducer = reducer;
+        }
+        public final Double getRawResult() { return result; }
+        public final void compute() {
+            final ToDoubleFunction<? super K> transformer;
+            final DoubleBinaryOperator reducer;
+            if ((transformer = this.transformer) != null &&
+                (reducer = this.reducer) != null) {
+                double r = this.basis;
+                for (int i = baseIndex, f, h; batch > 0 &&
+                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
+                    addToPendingCount(1);
+                    (rights = new MapReduceKeysToDoubleTask<K,V>
+                     (this, batch >>>= 1, baseLimit = h, f, tab,
+                      rights, transformer, r, reducer)).fork();
+                }
+                for (Node<K,V> p; (p = advance()) != null; )
+                    r = reducer.applyAsDouble(r, transformer.applyAsDouble((K)p.key));
+                result = r;
+                CountedCompleter<?> c;
+                for (c = firstComplete(); c != null; c = c.nextComplete()) {
+                    MapReduceKeysToDoubleTask<K,V>
+                        t = (MapReduceKeysToDoubleTask<K,V>)c,
+                        s = t.rights;
+                    while (s != null) {
+                        t.result = reducer.applyAsDouble(t.result, s.result);
+                        s = t.rights = s.nextRight;
+                    }
+                }
+            }
+        }
+    }
+
+    static final class MapReduceValuesToDoubleTask<K,V>
+        extends BulkTask<K,V,Double> {
+        final ToDoubleFunction<? super V> transformer;
+        final DoubleBinaryOperator reducer;
+        final double basis;
+        double result;
+        MapReduceValuesToDoubleTask<K,V> rights, nextRight;
+        MapReduceValuesToDoubleTask
+            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
+             MapReduceValuesToDoubleTask<K,V> nextRight,
+             ToDoubleFunction<? super V> transformer,
+             double basis,
+             DoubleBinaryOperator reducer) {
+            super(p, b, i, f, t); this.nextRight = nextRight;
+            this.transformer = transformer;
+            this.basis = basis; this.reducer = reducer;
+        }
+        public final Double getRawResult() { return result; }
+        public final void compute() {
+            final ToDoubleFunction<? super V> transformer;
+            final DoubleBinaryOperator reducer;
+            if ((transformer = this.transformer) != null &&
+                (reducer = this.reducer) != null) {
+                double r = this.basis;
+                for (int i = baseIndex, f, h; batch > 0 &&
+                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
+                    addToPendingCount(1);
+                    (rights = new MapReduceValuesToDoubleTask<K,V>
+                     (this, batch >>>= 1, baseLimit = h, f, tab,
+                      rights, transformer, r, reducer)).fork();
+                }
+                for (Node<K,V> p; (p = advance()) != null; )
+                    r = reducer.applyAsDouble(r, transformer.applyAsDouble(p.val));
+                result = r;
+                CountedCompleter<?> c;
+                for (c = firstComplete(); c != null; c = c.nextComplete()) {
+                    MapReduceValuesToDoubleTask<K,V>
+                        t = (MapReduceValuesToDoubleTask<K,V>)c,
+                        s = t.rights;
+                    while (s != null) {
+                        t.result = reducer.applyAsDouble(t.result, s.result);
+                        s = t.rights = s.nextRight;
+                    }
+                }
+            }
+        }
+    }
+
+    static final class MapReduceEntriesToDoubleTask<K,V>
+        extends BulkTask<K,V,Double> {
+        final ToDoubleFunction<Map.Entry<K,V>> transformer;
+        final DoubleBinaryOperator reducer;
+        final double basis;
+        double result;
+        MapReduceEntriesToDoubleTask<K,V> rights, nextRight;
+        MapReduceEntriesToDoubleTask
+            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
+             MapReduceEntriesToDoubleTask<K,V> nextRight,
+             ToDoubleFunction<Map.Entry<K,V>> transformer,
+             double basis,
+             DoubleBinaryOperator reducer) {
+            super(p, b, i, f, t); this.nextRight = nextRight;
+            this.transformer = transformer;
+            this.basis = basis; this.reducer = reducer;
+        }
+        public final Double getRawResult() { return result; }
+        public final void compute() {
+            final ToDoubleFunction<Map.Entry<K,V>> transformer;
+            final DoubleBinaryOperator reducer;
+            if ((transformer = this.transformer) != null &&
+                (reducer = this.reducer) != null) {
+                double r = this.basis;
+                for (int i = baseIndex, f, h; batch > 0 &&
+                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
+                    addToPendingCount(1);
+                    (rights = new MapReduceEntriesToDoubleTask<K,V>
+                     (this, batch >>>= 1, baseLimit = h, f, tab,
+                      rights, transformer, r, reducer)).fork();
+                }
+                for (Node<K,V> p; (p = advance()) != null; )
+                    r = reducer.applyAsDouble(r, transformer.applyAsDouble(p));
+                result = r;
+                CountedCompleter<?> c;
+                for (c = firstComplete(); c != null; c = c.nextComplete()) {
+                    MapReduceEntriesToDoubleTask<K,V>
+                        t = (MapReduceEntriesToDoubleTask<K,V>)c,
+                        s = t.rights;
+                    while (s != null) {
+                        t.result = reducer.applyAsDouble(t.result, s.result);
+                        s = t.rights = s.nextRight;
+                    }
+                }
+            }
+        }
+    }
+
+    static final class MapReduceMappingsToDoubleTask<K,V>
+        extends BulkTask<K,V,Double> {
+        final ToDoubleBiFunction<? super K, ? super V> transformer;
+        final DoubleBinaryOperator reducer;
+        final double basis;
+        double result;
+        MapReduceMappingsToDoubleTask<K,V> rights, nextRight;
+        MapReduceMappingsToDoubleTask
+            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
+             MapReduceMappingsToDoubleTask<K,V> nextRight,
+             ToDoubleBiFunction<? super K, ? super V> transformer,
+             double basis,
+             DoubleBinaryOperator reducer) {
+            super(p, b, i, f, t); this.nextRight = nextRight;
+            this.transformer = transformer;
+            this.basis = basis; this.reducer = reducer;
+        }
+        public final Double getRawResult() { return result; }
+        public final void compute() {
+            final ToDoubleBiFunction<? super K, ? super V> transformer;
+            final DoubleBinaryOperator reducer;
+            if ((transformer = this.transformer) != null &&
+                (reducer = this.reducer) != null) {
+                double r = this.basis;
+                for (int i = baseIndex, f, h; batch > 0 &&
+                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
+                    addToPendingCount(1);
+                    (rights = new MapReduceMappingsToDoubleTask<K,V>
+                     (this, batch >>>= 1, baseLimit = h, f, tab,
+                      rights, transformer, r, reducer)).fork();
+                }
+                for (Node<K,V> p; (p = advance()) != null; )
+                    r = reducer.applyAsDouble(r, transformer.applyAsDouble((K)p.key, p.val));
+                result = r;
+                CountedCompleter<?> c;
+                for (c = firstComplete(); c != null; c = c.nextComplete()) {
+                    MapReduceMappingsToDoubleTask<K,V>
+                        t = (MapReduceMappingsToDoubleTask<K,V>)c,
+                        s = t.rights;
+                    while (s != null) {
+                        t.result = reducer.applyAsDouble(t.result, s.result);
+                        s = t.rights = s.nextRight;
+                    }
+                }
+            }
+        }
+    }
+
+    static final class MapReduceKeysToLongTask<K,V>
+        extends BulkTask<K,V,Long> {
+        final ToLongFunction<? super K> transformer;
+        final LongBinaryOperator reducer;
+        final long basis;
+        long result;
+        MapReduceKeysToLongTask<K,V> rights, nextRight;
+        MapReduceKeysToLongTask
+            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
+             MapReduceKeysToLongTask<K,V> nextRight,
+             ToLongFunction<? super K> transformer,
+             long basis,
+             LongBinaryOperator reducer) {
+            super(p, b, i, f, t); this.nextRight = nextRight;
+            this.transformer = transformer;
+            this.basis = basis; this.reducer = reducer;
+        }
+        public final Long getRawResult() { return result; }
+        public final void compute() {
+            final ToLongFunction<? super K> transformer;
+            final LongBinaryOperator reducer;
+            if ((transformer = this.transformer) != null &&
+                (reducer = this.reducer) != null) {
+                long r = this.basis;
+                for (int i = baseIndex, f, h; batch > 0 &&
+                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
+                    addToPendingCount(1);
+                    (rights = new MapReduceKeysToLongTask<K,V>
+                     (this, batch >>>= 1, baseLimit = h, f, tab,
+                      rights, transformer, r, reducer)).fork();
+                }
+                for (Node<K,V> p; (p = advance()) != null; )
+                    r = reducer.applyAsLong(r, transformer.applyAsLong((K)p.key));
+                result = r;
+                CountedCompleter<?> c;
+                for (c = firstComplete(); c != null; c = c.nextComplete()) {
+                    MapReduceKeysToLongTask<K,V>
+                        t = (MapReduceKeysToLongTask<K,V>)c,
+                        s = t.rights;
+                    while (s != null) {
+                        t.result = reducer.applyAsLong(t.result, s.result);
+                        s = t.rights = s.nextRight;
+                    }
+                }
+            }
+        }
+    }
+
+    static final class MapReduceValuesToLongTask<K,V>
+        extends BulkTask<K,V,Long> {
+        final ToLongFunction<? super V> transformer;
+        final LongBinaryOperator reducer;
+        final long basis;
+        long result;
+        MapReduceValuesToLongTask<K,V> rights, nextRight;
+        MapReduceValuesToLongTask
+            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
+             MapReduceValuesToLongTask<K,V> nextRight,
+             ToLongFunction<? super V> transformer,
+             long basis,
+             LongBinaryOperator reducer) {
+            super(p, b, i, f, t); this.nextRight = nextRight;
+            this.transformer = transformer;
+            this.basis = basis; this.reducer = reducer;
+        }
+        public final Long getRawResult() { return result; }
+        public final void compute() {
+            final ToLongFunction<? super V> transformer;
+            final LongBinaryOperator reducer;
+            if ((transformer = this.transformer) != null &&
+                (reducer = this.reducer) != null) {
+                long r = this.basis;
+                for (int i = baseIndex, f, h; batch > 0 &&
+                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
+                    addToPendingCount(1);
+                    (rights = new MapReduceValuesToLongTask<K,V>
+                     (this, batch >>>= 1, baseLimit = h, f, tab,
+                      rights, transformer, r, reducer)).fork();
+                }
+                for (Node<K,V> p; (p = advance()) != null; )
+                    r = reducer.applyAsLong(r, transformer.applyAsLong(p.val));
+                result = r;
+                CountedCompleter<?> c;
+                for (c = firstComplete(); c != null; c = c.nextComplete()) {
+                    MapReduceValuesToLongTask<K,V>
+                        t = (MapReduceValuesToLongTask<K,V>)c,
+                        s = t.rights;
+                    while (s != null) {
+                        t.result = reducer.applyAsLong(t.result, s.result);
+                        s = t.rights = s.nextRight;
+                    }
+                }
+            }
+        }
+    }
+
+    static final class MapReduceEntriesToLongTask<K,V>
+        extends BulkTask<K,V,Long> {
+        final ToLongFunction<Map.Entry<K,V>> transformer;
+        final LongBinaryOperator reducer;
+        final long basis;
+        long result;
+        MapReduceEntriesToLongTask<K,V> rights, nextRight;
+        MapReduceEntriesToLongTask
+            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
+             MapReduceEntriesToLongTask<K,V> nextRight,
+             ToLongFunction<Map.Entry<K,V>> transformer,
+             long basis,
+             LongBinaryOperator reducer) {
+            super(p, b, i, f, t); this.nextRight = nextRight;
+            this.transformer = transformer;
+            this.basis = basis; this.reducer = reducer;
+        }
+        public final Long getRawResult() { return result; }
+        public final void compute() {
+            final ToLongFunction<Map.Entry<K,V>> transformer;
+            final LongBinaryOperator reducer;
+            if ((transformer = this.transformer) != null &&
+                (reducer = this.reducer) != null) {
+                long r = this.basis;
+                for (int i = baseIndex, f, h; batch > 0 &&
+                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
+                    addToPendingCount(1);
+                    (rights = new MapReduceEntriesToLongTask<K,V>
+                     (this, batch >>>= 1, baseLimit = h, f, tab,
+                      rights, transformer, r, reducer)).fork();
+                }
+                for (Node<K,V> p; (p = advance()) != null; )
+                    r = reducer.applyAsLong(r, transformer.applyAsLong(p));
+                result = r;
+                CountedCompleter<?> c;
+                for (c = firstComplete(); c != null; c = c.nextComplete()) {
+                    MapReduceEntriesToLongTask<K,V>
+                        t = (MapReduceEntriesToLongTask<K,V>)c,
+                        s = t.rights;
+                    while (s != null) {
+                        t.result = reducer.applyAsLong(t.result, s.result);
+                        s = t.rights = s.nextRight;
+                    }
+                }
+            }
+        }
+    }
+
+    static final class MapReduceMappingsToLongTask<K,V>
+        extends BulkTask<K,V,Long> {
+        final ToLongBiFunction<? super K, ? super V> transformer;
+        final LongBinaryOperator reducer;
+        final long basis;
+        long result;
+        MapReduceMappingsToLongTask<K,V> rights, nextRight;
+        MapReduceMappingsToLongTask
+            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
+             MapReduceMappingsToLongTask<K,V> nextRight,
+             ToLongBiFunction<? super K, ? super V> transformer,
+             long basis,
+             LongBinaryOperator reducer) {
+            super(p, b, i, f, t); this.nextRight = nextRight;
+            this.transformer = transformer;
+            this.basis = basis; this.reducer = reducer;
+        }
+        public final Long getRawResult() { return result; }
+        public final void compute() {
+            final ToLongBiFunction<? super K, ? super V> transformer;
+            final LongBinaryOperator reducer;
+            if ((transformer = this.transformer) != null &&
+                (reducer = this.reducer) != null) {
+                long r = this.basis;
+                for (int i = baseIndex, f, h; batch > 0 &&
+                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
+                    addToPendingCount(1);
+                    (rights = new MapReduceMappingsToLongTask<K,V>
+                     (this, batch >>>= 1, baseLimit = h, f, tab,
+                      rights, transformer, r, reducer)).fork();
+                }
+                for (Node<K,V> p; (p = advance()) != null; )
+                    r = reducer.applyAsLong(r, transformer.applyAsLong((K)p.key, p.val));
+                result = r;
+                CountedCompleter<?> c;
+                for (c = firstComplete(); c != null; c = c.nextComplete()) {
+                    MapReduceMappingsToLongTask<K,V>
+                        t = (MapReduceMappingsToLongTask<K,V>)c,
+                        s = t.rights;
+                    while (s != null) {
+                        t.result = reducer.applyAsLong(t.result, s.result);
+                        s = t.rights = s.nextRight;
+                    }
+                }
+            }
+        }
+    }
+
+    static final class MapReduceKeysToIntTask<K,V>
+        extends BulkTask<K,V,Integer> {
+        final ToIntFunction<? super K> transformer;
+        final IntBinaryOperator reducer;
+        final int basis;
+        int result;
+        MapReduceKeysToIntTask<K,V> rights, nextRight;
+        MapReduceKeysToIntTask
+            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
+             MapReduceKeysToIntTask<K,V> nextRight,
+             ToIntFunction<? super K> transformer,
+             int basis,
+             IntBinaryOperator reducer) {
+            super(p, b, i, f, t); this.nextRight = nextRight;
+            this.transformer = transformer;
+            this.basis = basis; this.reducer = reducer;
+        }
+        public final Integer getRawResult() { return result; }
+        public final void compute() {
+            final ToIntFunction<? super K> transformer;
+            final IntBinaryOperator reducer;
+            if ((transformer = this.transformer) != null &&
+                (reducer = this.reducer) != null) {
+                int r = this.basis;
+                for (int i = baseIndex, f, h; batch > 0 &&
+                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
+                    addToPendingCount(1);
+                    (rights = new MapReduceKeysToIntTask<K,V>
+                     (this, batch >>>= 1, baseLimit = h, f, tab,
+                      rights, transformer, r, reducer)).fork();
+                }
+                for (Node<K,V> p; (p = advance()) != null; )
+                    r = reducer.applyAsInt(r, transformer.applyAsInt((K)p.key));
+                result = r;
+                CountedCompleter<?> c;
+                for (c = firstComplete(); c != null; c = c.nextComplete()) {
+                    MapReduceKeysToIntTask<K,V>
+                        t = (MapReduceKeysToIntTask<K,V>)c,
+                        s = t.rights;
+                    while (s != null) {
+                        t.result = reducer.applyAsInt(t.result, s.result);
+                        s = t.rights = s.nextRight;
+                    }
+                }
+            }
+        }
+    }
+
+    static final class MapReduceValuesToIntTask<K,V>
+        extends BulkTask<K,V,Integer> {
+        final ToIntFunction<? super V> transformer;
+        final IntBinaryOperator reducer;
+        final int basis;
+        int result;
+        MapReduceValuesToIntTask<K,V> rights, nextRight;
+        MapReduceValuesToIntTask
+            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
+             MapReduceValuesToIntTask<K,V> nextRight,
+             ToIntFunction<? super V> transformer,
+             int basis,
+             IntBinaryOperator reducer) {
+            super(p, b, i, f, t); this.nextRight = nextRight;
+            this.transformer = transformer;
+            this.basis = basis; this.reducer = reducer;
+        }
+        public final Integer getRawResult() { return result; }
+        public final void compute() {
+            final ToIntFunction<? super V> transformer;
+            final IntBinaryOperator reducer;
+            if ((transformer = this.transformer) != null &&
+                (reducer = this.reducer) != null) {
+                int r = this.basis;
+                for (int i = baseIndex, f, h; batch > 0 &&
+                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
+                    addToPendingCount(1);
+                    (rights = new MapReduceValuesToIntTask<K,V>
+                     (this, batch >>>= 1, baseLimit = h, f, tab,
+                      rights, transformer, r, reducer)).fork();
+                }
+                for (Node<K,V> p; (p = advance()) != null; )
+                    r = reducer.applyAsInt(r, transformer.applyAsInt(p.val));
+                result = r;
+                CountedCompleter<?> c;
+                for (c = firstComplete(); c != null; c = c.nextComplete()) {
+                    MapReduceValuesToIntTask<K,V>
+                        t = (MapReduceValuesToIntTask<K,V>)c,
+                        s = t.rights;
+                    while (s != null) {
+                        t.result = reducer.applyAsInt(t.result, s.result);
+                        s = t.rights = s.nextRight;
+                    }
+                }
+            }
+        }
+    }
+
+    static final class MapReduceEntriesToIntTask<K,V>
+        extends BulkTask<K,V,Integer> {
+        final ToIntFunction<Map.Entry<K,V>> transformer;
+        final IntBinaryOperator reducer;
+        final int basis;
+        int result;
+        MapReduceEntriesToIntTask<K,V> rights, nextRight;
+        MapReduceEntriesToIntTask
+            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
+             MapReduceEntriesToIntTask<K,V> nextRight,
+             ToIntFunction<Map.Entry<K,V>> transformer,
+             int basis,
+             IntBinaryOperator reducer) {
+            super(p, b, i, f, t); this.nextRight = nextRight;
+            this.transformer = transformer;
+            this.basis = basis; this.reducer = reducer;
+        }
+        public final Integer getRawResult() { return result; }
+        public final void compute() {
+            final ToIntFunction<Map.Entry<K,V>> transformer;
+            final IntBinaryOperator reducer;
+            if ((transformer = this.transformer) != null &&
+                (reducer = this.reducer) != null) {
+                int r = this.basis;
+                for (int i = baseIndex, f, h; batch > 0 &&
+                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
+                    addToPendingCount(1);
+                    (rights = new MapReduceEntriesToIntTask<K,V>
+                     (this, batch >>>= 1, baseLimit = h, f, tab,
+                      rights, transformer, r, reducer)).fork();
+                }
+                for (Node<K,V> p; (p = advance()) != null; )
+                    r = reducer.applyAsInt(r, transformer.applyAsInt(p));
+                result = r;
+                CountedCompleter<?> c;
+                for (c = firstComplete(); c != null; c = c.nextComplete()) {
+                    MapReduceEntriesToIntTask<K,V>
+                        t = (MapReduceEntriesToIntTask<K,V>)c,
+                        s = t.rights;
+                    while (s != null) {
+                        t.result = reducer.applyAsInt(t.result, s.result);
+                        s = t.rights = s.nextRight;
+                    }
+                }
+            }
+        }
+    }
+
+    static final class MapReduceMappingsToIntTask<K,V>
+        extends BulkTask<K,V,Integer> {
+        final ToIntBiFunction<? super K, ? super V> transformer;
+        final IntBinaryOperator reducer;
+        final int basis;
+        int result;
+        MapReduceMappingsToIntTask<K,V> rights, nextRight;
+        MapReduceMappingsToIntTask
+            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
+             MapReduceMappingsToIntTask<K,V> nextRight,
+             ToIntBiFunction<? super K, ? super V> transformer,
+             int basis,
+             IntBinaryOperator reducer) {
+            super(p, b, i, f, t); this.nextRight = nextRight;
+            this.transformer = transformer;
+            this.basis = basis; this.reducer = reducer;
+        }
+        public final Integer getRawResult() { return result; }
+        public final void compute() {
+            final ToIntBiFunction<? super K, ? super V> transformer;
+            final IntBinaryOperator reducer;
+            if ((transformer = this.transformer) != null &&
+                (reducer = this.reducer) != null) {
+                int r = this.basis;
+                for (int i = baseIndex, f, h; batch > 0 &&
+                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
+                    addToPendingCount(1);
+                    (rights = new MapReduceMappingsToIntTask<K,V>
+                     (this, batch >>>= 1, baseLimit = h, f, tab,
+                      rights, transformer, r, reducer)).fork();
+                }
+                for (Node<K,V> p; (p = advance()) != null; )
+                    r = reducer.applyAsInt(r, transformer.applyAsInt((K)p.key, p.val));
+                result = r;
+                CountedCompleter<?> c;
+                for (c = firstComplete(); c != null; c = c.nextComplete()) {
+                    MapReduceMappingsToIntTask<K,V>
+                        t = (MapReduceMappingsToIntTask<K,V>)c,
+                        s = t.rights;
+                    while (s != null) {
+                        t.result = reducer.applyAsInt(t.result, s.result);
+                        s = t.rights = s.nextRight;
+                    }
+                }
+            }
         }
     }
 
     // Unsafe mechanics
-    private static final sun.misc.Unsafe UNSAFE;
-    private static final long SBASE;
-    private static final int SSHIFT;
-    private static final long TBASE;
-    private static final int TSHIFT;
-    private static final long HASHSEED_OFFSET;
-    private static final long SEGSHIFT_OFFSET;
-    private static final long SEGMASK_OFFSET;
-    private static final long SEGMENTS_OFFSET;
+    private static final sun.misc.Unsafe U;
+    private static final long SIZECTL;
+    private static final long TRANSFERINDEX;
+    private static final long TRANSFERORIGIN;
+    private static final long BASECOUNT;
+    private static final long CELLSBUSY;
+    private static final long CELLVALUE;
+    private static final long ABASE;
+    private static final int ASHIFT;
 
     static {
-        int ss, ts;
         try {
-            UNSAFE = sun.misc.Unsafe.getUnsafe();
-            Class<?> tc = HashEntry[].class;
-            Class<?> sc = Segment[].class;
-            TBASE = UNSAFE.arrayBaseOffset(tc);
-            SBASE = UNSAFE.arrayBaseOffset(sc);
-            ts = UNSAFE.arrayIndexScale(tc);
-            ss = UNSAFE.arrayIndexScale(sc);
-            HASHSEED_OFFSET = UNSAFE.objectFieldOffset(
-                ConcurrentHashMap.class.getDeclaredField("hashSeed"));
-            SEGSHIFT_OFFSET = UNSAFE.objectFieldOffset(
-                ConcurrentHashMap.class.getDeclaredField("segmentShift"));
-            SEGMASK_OFFSET = UNSAFE.objectFieldOffset(
-                ConcurrentHashMap.class.getDeclaredField("segmentMask"));
-            SEGMENTS_OFFSET = UNSAFE.objectFieldOffset(
-                ConcurrentHashMap.class.getDeclaredField("segments"));
+            U = sun.misc.Unsafe.getUnsafe();
+            Class<?> k = ConcurrentHashMap.class;
+            SIZECTL = U.objectFieldOffset
+                (k.getDeclaredField("sizeCtl"));
+            TRANSFERINDEX = U.objectFieldOffset
+                (k.getDeclaredField("transferIndex"));
+            TRANSFERORIGIN = U.objectFieldOffset
+                (k.getDeclaredField("transferOrigin"));
+            BASECOUNT = U.objectFieldOffset
+                (k.getDeclaredField("baseCount"));
+            CELLSBUSY = U.objectFieldOffset
+                (k.getDeclaredField("cellsBusy"));
+            Class<?> ck = Cell.class;
+            CELLVALUE = U.objectFieldOffset
+                (ck.getDeclaredField("value"));
+            Class<?> sc = Node[].class;
+            ABASE = U.arrayBaseOffset(sc);
+            int scale = U.arrayIndexScale(sc);
+            if ((scale & (scale - 1)) != 0)
+                throw new Error("data type scale not a power of two");
+            ASHIFT = 31 - Integer.numberOfLeadingZeros(scale);
         } catch (Exception e) {
             throw new Error(e);
         }
-        if ((ss & (ss-1)) != 0 || (ts & (ts-1)) != 0)
-            throw new Error("data type scale not a power of two");
-        SSHIFT = 31 - Integer.numberOfLeadingZeros(ss);
-        TSHIFT = 31 - Integer.numberOfLeadingZeros(ts);
     }
-
 }
--- a/jdk/src/share/classes/java/util/spi/LocaleServiceProvider.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/util/spi/LocaleServiceProvider.java	Wed Jul 05 18:59:05 2017 +0200
@@ -128,6 +128,14 @@
  * installed SPI providers, and "JRE" represents the locale sensitive services
  * in the Java Runtime Environment, the locale sensitive services in the SPI
  * providers are looked up first.
+ * <p>
+ * There are two other possible locale sensitive service providers, i.e., "CLDR"
+ * which is a provider based on Unicode Consortium's
+ * <a href="http://cldr.unicode.org/">CLDR Project</a>, and "HOST" which is a
+ * provider that reflects the user's custom settings in the underlying operating
+ * system. These two providers may not be available, depending on the Java Runtime
+ * Environment implementation. Specifying "JRE,SPI" is identical to the default
+ * behavior, which is compatibile with the prior releases.
  *
  * @since        1.6
  */
--- a/jdk/src/share/classes/java/util/stream/DoubleStream.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/util/stream/DoubleStream.java	Wed Jul 05 18:59:05 2017 +0200
@@ -603,7 +603,7 @@
     /**
      * Returns an {@link OptionalDouble} describing the first element of this
      * stream (in the encounter order), or an empty {@code OptionalDouble} if
-     * the stream is empty.  If the stream has no encounter order, than any
+     * the stream is empty.  If the stream has no encounter order, then any
      * element may be returned.
      *
      * <p>This is a <a href="package-summary.html#StreamOps">short-circuiting
--- a/jdk/src/share/classes/java/util/stream/IntStream.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/util/stream/IntStream.java	Wed Jul 05 18:59:05 2017 +0200
@@ -588,7 +588,7 @@
     /**
      * Returns an {@link OptionalInt} describing the first element of this
      * stream (in the encounter order), or an empty {@code OptionalInt} if the
-     * stream is empty.  If the stream has no encounter order, than any element
+     * stream is empty.  If the stream has no encounter order, then any element
      * may be returned.
      *
      * <p>This is a <a href="package-summary.html#StreamOps">short-circuiting
--- a/jdk/src/share/classes/java/util/stream/LongStream.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/util/stream/LongStream.java	Wed Jul 05 18:59:05 2017 +0200
@@ -588,7 +588,7 @@
     /**
      * Returns an {@link OptionalLong} describing the first element of this
      * stream (in the encounter order), or an empty {@code OptionalLong} if the
-     * stream is empty.  If the stream has no encounter order, than any element
+     * stream is empty.  If the stream has no encounter order, then any element
      * may be returned.
      *
      * <p>This is a <a href="package-summary.html#StreamOps">short-circuiting
--- a/jdk/src/share/classes/java/util/stream/Stream.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/util/stream/Stream.java	Wed Jul 05 18:59:05 2017 +0200
@@ -754,7 +754,7 @@
     /**
      * Returns an {@link Optional} describing the first element of this stream
      * (in the encounter order), or an empty {@code Optional} if the stream is
-     * empty.  If the stream has no encounter order, than any element may be
+     * empty.  If the stream has no encounter order, then any element may be
      * returned.
      *
      * <p>This is a <a href="package-summary.html#StreamOps">short-circuiting
--- a/jdk/src/share/classes/java/util/stream/StreamBuilder.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/util/stream/StreamBuilder.java	Wed Jul 05 18:59:05 2017 +0200
@@ -38,7 +38,7 @@
  * <p>A {@code StreamBuilder} has a lifecycle, where it starts in a building
  * phase, during which elements can be added, and then transitions to a built
  * phase, after which elements may not be added.  The built phase begins
- * when the {@link #build()}} method is called, which creates an ordered
+ * when the {@link #build()} method is called, which creates an ordered
  * {@code Stream} whose elements are the elements that were added to the stream
  * builder, in the order they were added.
  *
@@ -98,7 +98,7 @@
      * <p>A stream builder has a lifecycle, where it starts in a building
      * phase, during which elements can be added, and then transitions to a
      * built phase, after which elements may not be added.  The built phase
-     * begins when the {@link #build()}} method is called, which creates an
+     * begins when the {@link #build()} method is called, which creates an
      * ordered stream whose elements are the elements that were added to the
      * stream builder, in the order they were added.
      *
@@ -155,7 +155,7 @@
      * <p>A stream builder has a lifecycle, where it starts in a building
      * phase, during which elements can be added, and then transitions to a
      * built phase, after which elements may not be added.  The built phase
-     * begins when the {@link #build()}} method is called, which creates an
+     * begins when the {@link #build()} method is called, which creates an
      * ordered stream whose elements are the elements that were added to the
      * stream builder, in the order they were added.
      *
@@ -209,6 +209,13 @@
     /**
      * A mutable builder for a {@code DoubleStream}.
      *
+     * <p>A stream builder has a lifecycle, where it starts in a building
+     * phase, during which elements can be added, and then transitions to a
+     * built phase, after which elements may not be added.  The built phase
+     * begins when the {@link #build()} method is called, which creates an
+     * ordered stream whose elements are the elements that were added to the
+     * stream builder, in the order they were added.
+     *
      * @see LongStream#builder()
      * @since 1.8
      */
@@ -217,13 +224,6 @@
         /**
          * Adds an element to the stream being built.
          *
-         * <p>A stream builder  has a lifecycle, where it starts in a building
-         * phase, during which elements can be added, and then transitions to a
-         * built phase, after which elements may not be added.  The built phase
-         * begins when the {@link #build()}} method is called, which creates an
-         * ordered stream whose elements are the elements that were added to the
-         * stream builder, in the order they were added.
-         *
          * @throws IllegalStateException if the builder has already transitioned
          * to the built state
          */
--- a/jdk/src/share/classes/java/util/stream/StreamSupport.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/util/stream/StreamSupport.java	Wed Jul 05 18:59:05 2017 +0200
@@ -41,7 +41,11 @@
  *
  * @since 1.8
  */
-public class StreamSupport {
+public final class StreamSupport {
+
+    // Suppresses default constructor, ensuring non-instantiability.
+    private StreamSupport() {}
+
     /**
      * Creates a new sequential {@code Stream} from a {@code Spliterator}.
      *
@@ -50,7 +54,7 @@
      *
      * <p>It is strongly recommended the spliterator report a characteristic of
      * {@code IMMUTABLE} or {@code CONCURRENT}, or be
-     * <a href="Spliterator.html#binding">late-binding</a>.  Otherwise,
+     * <a href="../Spliterator.html#binding">late-binding</a>.  Otherwise,
      * {@link #stream(Supplier, int)} should be used to
      * reduce the scope of potential interference with the source.  See
      * <a href="package-summary.html#Non-Interference">Non-Interference</a> for
@@ -75,7 +79,7 @@
      *
      * <p>It is strongly recommended the spliterator report a characteristic of
      * {@code IMMUTABLE} or {@code CONCURRENT}, or be
-     * <a href="Spliterator.html#binding">late-binding</a>.  Otherwise,
+     * <a href="../Spliterator.html#binding">late-binding</a>.  Otherwise,
      * {@link #stream(Supplier, int)} should be used to
      * reduce the scope of potential interference with the source.  See
      * <a href="package-summary.html#Non-Interference">Non-Interference</a> for
@@ -102,7 +106,7 @@
      *
      * <p>For spliterators that report a characteristic of {@code IMMUTABLE}
      * or {@code CONCURRENT}, or that are
-     * <a href="Spliterator.html#binding">late-binding</a>, it is likely
+     * <a href="../Spliterator.html#binding">late-binding</a>, it is likely
      * more efficient to use {@link #stream(java.util.Spliterator)} instead.
      * The use of a {@code Supplier} in this form provides a level of
      * indirection that reduces the scope of potential interference with the
@@ -138,7 +142,7 @@
      *
      * <p>For spliterators that report a characteristic of {@code IMMUTABLE}
      * or {@code CONCURRENT}, or that are
-     * <a href="Spliterator.html#binding">late-binding</a>, it is likely
+     * <a href="../Spliterator.html#binding">late-binding</a>, it is likely
      * more efficient to use {@link #stream(Spliterator)} instead.
      * The use of a {@code Supplier} in this form provides a level of
      * indirection that reduces the scope of potential interference with the
@@ -172,7 +176,7 @@
      *
      * <p>It is strongly recommended the spliterator report a characteristic of
      * {@code IMMUTABLE} or {@code CONCURRENT}, or be
-     * <a href="Spliterator.html#binding">late-binding</a>.  Otherwise,
+     * <a href="../Spliterator.html#binding">late-binding</a>.  Otherwise,
      * {@link #stream(Supplier, int)}} should be used to
      * reduce the scope of potential interference with the source.  See
      * <a href="package-summary.html#Non-Interference">Non-Interference</a> for
@@ -195,7 +199,7 @@
      *
      * <p>It is strongly recommended the spliterator report a characteristic of
      * {@code IMMUTABLE} or {@code CONCURRENT}, or be
-     * <a href="Spliterator.html#binding">late-binding</a>.  Otherwise,
+     * <a href="../Spliterator.html#binding">late-binding</a>.  Otherwise,
      * {@link #stream(Supplier, int)}} should be used to
      * reduce the scope of potential interference with the source.  See
      * <a href="package-summary.html#Non-Interference">Non-Interference</a> for
@@ -220,7 +224,7 @@
      *
      * <p>For spliterators that report a characteristic of {@code IMMUTABLE}
      * or {@code CONCURRENT}, or that are
-     * <a href="Spliterator.html#binding">late-binding</a>, it is likely
+     * <a href="../Spliterator.html#binding">late-binding</a>, it is likely
      * more efficient to use {@link #intStream(Spliterator.OfInt)} instead.
      * The use of a {@code Supplier} in this form provides a level of
      * indirection that reduces the scope of potential interference with the
@@ -254,7 +258,7 @@
      *
      * <p>For spliterators that report a characteristic of {@code IMMUTABLE}
      * or {@code CONCURRENT}, or that are
-     * <a href="Spliterator.html#binding">late-binding</a>, it is likely
+     * <a href="../Spliterator.html#binding">late-binding</a>, it is likely
      * more efficient to use {@link #intStream(Spliterator.OfInt)} instead.
      * The use of a {@code Supplier} in this form provides a level of
      * indirection that reduces the scope of potential interference with the
@@ -286,7 +290,7 @@
      *
      * <p>It is strongly recommended the spliterator report a characteristic of
      * {@code IMMUTABLE} or {@code CONCURRENT}, or be
-     * <a href="Spliterator.html#binding">late-binding</a>.  Otherwise,
+     * <a href="../Spliterator.html#binding">late-binding</a>.  Otherwise,
      * {@link #stream(Supplier, int)} should be used to
      * reduce the scope of potential interference with the source.  See
      * <a href="package-summary.html#Non-Interference">Non-Interference</a> for
@@ -310,7 +314,7 @@
      *
      * <p>It is strongly recommended the spliterator report a characteristic of
      * {@code IMMUTABLE} or {@code CONCURRENT}, or be
-     * <a href="Spliterator.html#binding">late-binding</a>.  Otherwise,
+     * <a href="../Spliterator.html#binding">late-binding</a>.  Otherwise,
      * {@link #stream(Supplier, int)} should be used to
      * reduce the scope of potential interference with the source.  See
      * <a href="package-summary.html#Non-Interference">Non-Interference</a> for
@@ -335,7 +339,7 @@
      *
      * <p>For spliterators that report a characteristic of {@code IMMUTABLE}
      * or {@code CONCURRENT}, or that are
-     * <a href="Spliterator.html#binding">late-binding</a>, it is likely
+     * <a href="../Spliterator.html#binding">late-binding</a>, it is likely
      * more efficient to use {@link #longStream(Spliterator.OfLong)} instead.
      * The use of a {@code Supplier} in this form provides a level of
      * indirection that reduces the scope of potential interference with the
@@ -369,7 +373,7 @@
      *
      * <p>For spliterators that report a characteristic of {@code IMMUTABLE}
      * or {@code CONCURRENT}, or that are
-     * <a href="Spliterator.html#binding">late-binding</a>, it is likely
+     * <a href="../Spliterator.html#binding">late-binding</a>, it is likely
      * more efficient to use {@link #longStream(Spliterator.OfLong)} instead.
      * The use of a {@code Supplier} in this form provides a level of
      * indirection that reduces the scope of potential interference with the
@@ -402,7 +406,7 @@
      *
      * <p>It is strongly recommended the spliterator report a characteristic of
      * {@code IMMUTABLE} or {@code CONCURRENT}, or be
-     * <a href="Spliterator.html#binding">late-binding</a>.  Otherwise,
+     * <a href="../Spliterator.html#binding">late-binding</a>.  Otherwise,
      * {@link #stream(Supplier, int)} should be used to
      * reduce the scope of potential interference with the source.  See
      * <a href="package-summary.html#Non-Interference">Non-Interference</a> for
@@ -426,7 +430,7 @@
      *
      * <p>It is strongly recommended the spliterator report a characteristic of
      * {@code IMMUTABLE} or {@code CONCURRENT}, or be
-     * <a href="Spliterator.html#binding">late-binding</a>.  Otherwise,
+     * <a href="../Spliterator.html#binding">late-binding</a>.  Otherwise,
      * {@link #stream(Supplier, int)} should be used to
      * reduce the scope of potential interference with the source.  See
      * <a href="package-summary.html#Non-Interference">Non-Interference</a> for
@@ -451,7 +455,7 @@
      * <p>
      * For spliterators that report a characteristic of {@code IMMUTABLE}
      * or {@code CONCURRENT}, or that are
-     * <a href="Spliterator.html#binding">late-binding</a>, it is likely
+     * <a href="../Spliterator.html#binding">late-binding</a>, it is likely
      * more efficient to use {@link #doubleStream(Spliterator.OfDouble)} instead.
      * The use of a {@code Supplier} in this form provides a level of
      * indirection that reduces the scope of potential interference with the
@@ -485,7 +489,7 @@
      *
      * <p>For spliterators that report a characteristic of {@code IMMUTABLE}
      * or {@code CONCURRENT}, or that are
-     * <a href="Spliterator.html#binding">late-binding</a>, it is likely
+     * <a href="../Spliterator.html#binding">late-binding</a>, it is likely
      * more efficient to use {@link #doubleStream(Spliterator.OfDouble)} instead.
      * The use of a {@code Supplier} in this form provides a level of
      * indirection that reduces the scope of potential interference with the
--- a/jdk/src/share/classes/java/util/zip/ZipConstants.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/util/zip/ZipConstants.java	Wed Jul 05 18:59:05 2017 +0200
@@ -69,6 +69,14 @@
     static final int EXTLEN = 12;       // uncompressed size
 
     /*
+     * Extra field header ID
+     */
+    static final int  EXTID_ZIP64 = 0x0001;      // Zip64
+    static final int  EXTID_NTFS  = 0x000a;      // NTFS
+    static final int  EXTID_UNIX  = 0x000d;      // UNIX
+    static final int  EXTID_EXTT  = 0x5455;      // Info-ZIP Extended Timestamp
+
+    /*
      * Central directory (CEN) header field offsets
      */
     static final int CENVEM = 4;        // version made by
--- a/jdk/src/share/classes/java/util/zip/ZipEntry.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/util/zip/ZipEntry.java	Wed Jul 05 18:59:05 2017 +0200
@@ -25,8 +25,6 @@
 
 package java.util.zip;
 
-import java.util.Date;
-
 /**
  * This class is used to represent a ZIP file entry.
  *
@@ -35,7 +33,7 @@
 public
 class ZipEntry implements ZipConstants, Cloneable {
     String name;        // entry name
-    long time = -1;     // modification time (in DOS time)
+    long mtime = -1;    // last modification time
     long crc = -1;      // crc-32 of entry data
     long size = -1;     // uncompressed size of entry data
     long csize = -1;    // compressed size of entry data
@@ -79,7 +77,7 @@
      */
     public ZipEntry(ZipEntry e) {
         name = e.name;
-        time = e.time;
+        mtime = e.mtime;
         crc = e.crc;
         size = e.size;
         csize = e.csize;
@@ -89,7 +87,7 @@
         comment = e.comment;
     }
 
-    /*
+    /**
      * Creates a new un-initialized zip entry
      */
     ZipEntry() {}
@@ -103,22 +101,26 @@
     }
 
     /**
-     * Sets the modification time of the entry.
-     * @param time the entry modification time in number of milliseconds
-     *             since the epoch
+     * Sets the last modification time of the entry.
+     *
+     * @param time the last modification time of the entry in milliseconds since the epoch
      * @see #getTime()
      */
     public void setTime(long time) {
-        this.time = javaToDosTime(time);
+        this.mtime = time;
     }
 
     /**
-     * Returns the modification time of the entry, or -1 if not specified.
-     * @return the modification time of the entry, or -1 if not specified
+     * Returns the last modification time of the entry.
+     * <p> The last modificatin time may come from zip entry's extensible
+     * data field {@code NTFS} or {@code Info-ZIP Extended Timestamp}, if
+     * the entry is read from {@link ZipInputStream} or {@link ZipFile}.
+     *
+     * @return the last modification time of the entry, or -1 if not specified
      * @see #setTime(long)
      */
     public long getTime() {
-        return time != -1 ? dosToJavaTime(time) : -1;
+        return mtime;
     }
 
     /**
@@ -277,35 +279,6 @@
         return getName();
     }
 
-    /*
-     * Converts DOS time to Java time (number of milliseconds since epoch).
-     */
-    private static long dosToJavaTime(long dtime) {
-        @SuppressWarnings("deprecation") // Use of date constructor.
-        Date d = new Date((int)(((dtime >> 25) & 0x7f) + 80),
-                          (int)(((dtime >> 21) & 0x0f) - 1),
-                          (int)((dtime >> 16) & 0x1f),
-                          (int)((dtime >> 11) & 0x1f),
-                          (int)((dtime >> 5) & 0x3f),
-                          (int)((dtime << 1) & 0x3e));
-        return d.getTime();
-    }
-
-    /*
-     * Converts Java time to DOS time.
-     */
-    @SuppressWarnings("deprecation") // Use of date methods
-    private static long javaToDosTime(long time) {
-        Date d = new Date(time);
-        int year = d.getYear() + 1900;
-        if (year < 1980) {
-            return (1 << 21) | (1 << 16);
-        }
-        return (year - 1980) << 25 | (d.getMonth() + 1) << 21 |
-               d.getDate() << 16 | d.getHours() << 11 | d.getMinutes() << 5 |
-               d.getSeconds() >> 1;
-    }
-
     /**
      * Returns the hash code value for this entry.
      */
--- a/jdk/src/share/classes/java/util/zip/ZipFile.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/util/zip/ZipFile.java	Wed Jul 05 18:59:05 2017 +0200
@@ -46,6 +46,7 @@
 import java.util.stream.StreamSupport;
 
 import static java.util.zip.ZipConstants64.*;
+import static java.util.zip.ZipUtils.*;
 
 /**
  * This class is used to read entries from a zip file.
@@ -564,12 +565,44 @@
                 e.name = zc.toString(bname, bname.length);
             }
         }
-        e.time = getEntryTime(jzentry);
         e.crc = getEntryCrc(jzentry);
         e.size = getEntrySize(jzentry);
         e. csize = getEntryCSize(jzentry);
         e.method = getEntryMethod(jzentry);
         e.extra = getEntryBytes(jzentry, JZENTRY_EXTRA);
+        if (e.extra != null) {
+            byte[] extra = e.extra;
+            int len = e.extra.length;
+            int off = 0;
+            while (off + 4 < len) {
+                int pos = off;
+                int tag = get16(extra, pos);
+                int sz = get16(extra, pos + 2);
+                pos += 4;
+                if (pos + sz > len)         // invalid data
+                    break;
+                switch (tag) {
+                case EXTID_NTFS:
+                    pos += 4;    // reserved 4 bytes
+                    if (get16(extra, pos) !=  0x0001 || get16(extra, pos + 2) != 24)
+                        break;
+                    e.mtime  = winToJavaTime(get64(extra, pos + 4));
+                    break;
+                case EXTID_EXTT:
+                    int flag = Byte.toUnsignedInt(extra[pos++]);
+                    if ((flag & 0x1) != 0) {
+                        e.mtime = unixToJavaTime(get32(extra, pos));
+                        pos += 4;
+                    }
+                    break;
+                default:    // unknown tag
+                }
+                off += (sz + 4);
+            }
+        }
+        if (e.mtime == -1) {
+            e.mtime = dosToJavaTime(getEntryTime(jzentry));
+        }
         byte[] bcomm = getEntryBytes(jzentry, JZENTRY_COMMENT);
         if (bcomm == null) {
             e.comment = null;
--- a/jdk/src/share/classes/java/util/zip/ZipInputStream.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/util/zip/ZipInputStream.java	Wed Jul 05 18:59:05 2017 +0200
@@ -32,6 +32,7 @@
 import java.nio.charset.Charset;
 import java.nio.charset.StandardCharsets;
 import static java.util.zip.ZipConstants64.*;
+import static java.util.zip.ZipUtils.*;
 
 /**
  * This class implements an input stream filter for reading files in the
@@ -302,7 +303,7 @@
             throw new ZipException("encrypted ZIP entry not supported");
         }
         e.method = get16(tmpbuf, LOCHOW);
-        e.time = get32(tmpbuf, LOCTIM);
+        e.mtime = dosToJavaTime(get32(tmpbuf, LOCTIM));
         if ((flag & 8) == 8) {
             /* "Data Descriptor" present */
             if (e.method != DEFLATED) {
@@ -316,32 +317,51 @@
         }
         len = get16(tmpbuf, LOCEXT);
         if (len > 0) {
-            byte[] bb = new byte[len];
-            readFully(bb, 0, len);
-            e.setExtra(bb);
+            byte[] extra = new byte[len];
+            readFully(extra, 0, len);
+            e.setExtra(extra);
             // extra fields are in "HeaderID(2)DataSize(2)Data... format
-            if (e.csize == ZIP64_MAGICVAL || e.size == ZIP64_MAGICVAL) {
-                int off = 0;
-                while (off + 4 < len) {
-                    int sz = get16(bb, off + 2);
-                    if (get16(bb, off) == ZIP64_EXTID) {
-                        off += 4;
-                        // LOC extra zip64 entry MUST include BOTH original and
-                        // compressed file size fields
-                        if (sz < 16 || (off + sz) > len ) {
-                            // Invalid zip64 extra fields, simply skip. Even it's
-                            // rare, it's possible the entry size happens to be
-                            // the magic value and it "accidnetly" has some bytes
-                            // in extra match the id.
-                            return e;
-                        }
-                        e.size = get64(bb, off);
-                        e.csize = get64(bb, off + 8);
+            int off = 0;
+            while (off + 4 < len) {
+                int pos = off;
+                int tag = get16(extra, pos);
+                int sz = get16(extra, pos + 2);
+                pos += 4;
+                if (pos + sz > len)         // invalid data
+                    break;
+                switch (tag) {
+                case EXTID_ZIP64 :
+                    // LOC extra zip64 entry MUST include BOTH original and
+                    // compressed file size fields.
+                    //
+                    // If invalid zip64 extra fields, simply skip. Even it's
+                    // rare, it's possible the entry size happens to be
+                    // the magic value and it "accidently" has some bytes
+                    // in extra match the id.
+                    if (sz >= 16 && (pos + sz) <= len ) {
+                        e.size = get64(extra, pos);
+                        e.csize = get64(extra, pos + 8);
+                    }
+                    break;
+                case EXTID_NTFS:
+                    pos += 4;    // reserved 4 bytes
+                    if (get16(extra, pos) !=  0x0001 || get16(extra, pos + 2) != 24)
                         break;
+                    // override the loc field, NTFS time has 'microsecond' granularity
+                    e.mtime  = winToJavaTime(get64(extra, pos + 4));
+                    break;
+                case EXTID_EXTT:
+                    int flag = Byte.toUnsignedInt(extra[pos++]);
+                    if ((flag & 0x1) != 0) {
+                        e.mtime = unixToJavaTime(get32(extra, pos));
+                        pos += 4;
                     }
-                    off += (sz + 4);
+                    break;
+                default:    // unknown tag
                 }
+                off += (sz + 4);
             }
+
         }
         return e;
     }
@@ -430,27 +450,4 @@
         }
     }
 
-    /*
-     * Fetches unsigned 16-bit value from byte array at specified offset.
-     * The bytes are assumed to be in Intel (little-endian) byte order.
-     */
-    private static final int get16(byte b[], int off) {
-        return Byte.toUnsignedInt(b[off]) | (Byte.toUnsignedInt(b[off+1]) << 8);
-    }
-
-    /*
-     * Fetches unsigned 32-bit value from byte array at specified offset.
-     * The bytes are assumed to be in Intel (little-endian) byte order.
-     */
-    private static final long get32(byte b[], int off) {
-        return (get16(b, off) | ((long)get16(b, off+2) << 16)) & 0xffffffffL;
-    }
-
-    /*
-     * Fetches signed 64-bit value from byte array at specified offset.
-     * The bytes are assumed to be in Intel (little-endian) byte order.
-     */
-    private static final long get64(byte b[], int off) {
-        return get32(b, off) | (get32(b, off+4) << 32);
-    }
 }
--- a/jdk/src/share/classes/java/util/zip/ZipOutputStream.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/java/util/zip/ZipOutputStream.java	Wed Jul 05 18:59:05 2017 +0200
@@ -32,6 +32,7 @@
 import java.util.Vector;
 import java.util.HashSet;
 import static java.util.zip.ZipConstants64.*;
+import static java.util.zip.ZipUtils.*;
 
 /**
  * This class implements an output stream filter for writing files in the
@@ -190,7 +191,7 @@
         if (current != null) {
             closeEntry();       // close previous entry
         }
-        if (e.time == -1) {
+        if (e.mtime == -1) {
             e.setTime(System.currentTimeMillis());
         }
         if (e.method == -1) {
@@ -382,16 +383,25 @@
     private void writeLOC(XEntry xentry) throws IOException {
         ZipEntry e = xentry.entry;
         int flag = e.flag;
+        boolean hasZip64 = false;
         int elen = (e.extra != null) ? e.extra.length : 0;
-        boolean hasZip64 = false;
-
+        int eoff = 0;
+        boolean foundEXTT = false;      // if EXTT already present
+                                        // do nothing.
+        while (eoff + 4 < elen) {
+            int tag = get16(e.extra, eoff);
+            int sz = get16(e.extra, eoff + 2);
+            if (tag == EXTID_EXTT) {
+                foundEXTT = true;
+            }
+            eoff += (4 + sz);
+        }
         writeInt(LOCSIG);               // LOC header signature
-
         if ((flag & 8) == 8) {
             writeShort(version(e));     // version needed to extract
             writeShort(flag);           // general purpose bit flag
             writeShort(e.method);       // compression method
-            writeInt(e.time);           // last modification time
+            writeInt(javaToDosTime(e.mtime)); // last modification time
 
             // store size, uncompressed size, and crc-32 in data descriptor
             // immediately following compressed entry data
@@ -407,7 +417,7 @@
             }
             writeShort(flag);           // general purpose bit flag
             writeShort(e.method);       // compression method
-            writeInt(e.time);           // last modification time
+            writeInt(javaToDosTime(e.mtime)); // last modification time
             writeInt(e.crc);            // crc-32
             if (hasZip64) {
                 writeInt(ZIP64_MAGICVAL);
@@ -420,6 +430,8 @@
         }
         byte[] nameBytes = zc.getBytes(e.name);
         writeShort(nameBytes.length);
+        if (!foundEXTT)
+            elen += 9;              // use Info-ZIP's ext time in extra
         writeShort(elen);
         writeBytes(nameBytes, 0, nameBytes.length);
         if (hasZip64) {
@@ -428,6 +440,12 @@
             writeLong(e.size);
             writeLong(e.csize);
         }
+        if (!foundEXTT) {
+            writeShort(EXTID_EXTT);
+            writeShort(5);          // size for the folowing data block
+            writeByte(0x1);         // flags byte, mtime only
+            writeInt(javaToUnixTime(e.mtime));
+        }
         if (e.extra != null) {
             writeBytes(e.extra, 0, e.extra.length);
         }
@@ -457,25 +475,25 @@
         ZipEntry e  = xentry.entry;
         int flag = e.flag;
         int version = version(e);
-
         long csize = e.csize;
         long size = e.size;
         long offset = xentry.offset;
-        int e64len = 0;
+        int elenZIP64 = 0;
         boolean hasZip64 = false;
+
         if (e.csize >= ZIP64_MAGICVAL) {
             csize = ZIP64_MAGICVAL;
-            e64len += 8;              // csize(8)
+            elenZIP64 += 8;              // csize(8)
             hasZip64 = true;
         }
         if (e.size >= ZIP64_MAGICVAL) {
             size = ZIP64_MAGICVAL;    // size(8)
-            e64len += 8;
+            elenZIP64 += 8;
             hasZip64 = true;
         }
         if (xentry.offset >= ZIP64_MAGICVAL) {
             offset = ZIP64_MAGICVAL;
-            e64len += 8;              // offset(8)
+            elenZIP64 += 8;              // offset(8)
             hasZip64 = true;
         }
         writeInt(CENSIG);           // CEN header signature
@@ -488,18 +506,32 @@
         }
         writeShort(flag);           // general purpose bit flag
         writeShort(e.method);       // compression method
-        writeInt(e.time);           // last modification time
+        writeInt(javaToDosTime(e.mtime)); // last modification time
         writeInt(e.crc);            // crc-32
         writeInt(csize);            // compressed size
         writeInt(size);             // uncompressed size
         byte[] nameBytes = zc.getBytes(e.name);
         writeShort(nameBytes.length);
+
+        int elen = (e.extra != null) ? e.extra.length : 0;
+        int eoff = 0;
+        boolean foundEXTT = false;  // if EXTT already present
+                                    // do nothing.
+        while (eoff + 4 < elen) {
+            int tag = get16(e.extra, eoff);
+            int sz = get16(e.extra, eoff + 2);
+            if (tag == EXTID_EXTT) {
+                foundEXTT = true;
+            }
+            eoff += (4 + sz);
+        }
         if (hasZip64) {
             // + headid(2) + datasize(2)
-            writeShort(e64len + 4 + (e.extra != null ? e.extra.length : 0));
-        } else {
-            writeShort(e.extra != null ? e.extra.length : 0);
+            elen += (elenZIP64 + 4);
         }
+        if (!foundEXTT)
+            elen += 9;              // Info-ZIP's Extended Timestamp
+        writeShort(elen);
         byte[] commentBytes;
         if (e.comment != null) {
             commentBytes = zc.getBytes(e.comment);
@@ -515,7 +547,7 @@
         writeBytes(nameBytes, 0, nameBytes.length);
         if (hasZip64) {
             writeShort(ZIP64_EXTID);// Zip64 extra
-            writeShort(e64len);
+            writeShort(elenZIP64);
             if (size == ZIP64_MAGICVAL)
                 writeLong(e.size);
             if (csize == ZIP64_MAGICVAL)
@@ -523,6 +555,12 @@
             if (offset == ZIP64_MAGICVAL)
                 writeLong(xentry.offset);
         }
+        if (!foundEXTT) {
+            writeShort(EXTID_EXTT);
+            writeShort(5);
+            writeByte(0x1);            // flags byte
+            writeInt(javaToUnixTime(e.mtime));
+        }
         if (e.extra != null) {
             writeBytes(e.extra, 0, e.extra.length);
         }
@@ -589,6 +627,15 @@
     }
 
     /*
+     * Writes a 8-bit byte to the output stream.
+     */
+    private void writeByte(int v) throws IOException {
+        OutputStream out = this.out;
+        out.write(v & 0xff);
+        written += 1;
+    }
+
+    /*
      * Writes a 16-bit short to the output stream in little-endian byte order.
      */
     private void writeShort(int v) throws IOException {
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/src/share/classes/java/util/zip/ZipUtils.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,120 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.  Oracle designates this
+ * particular file as subject to the "Classpath" exception as provided
+ * by Oracle in the LICENSE file that accompanied this code.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+package java.util.zip;
+
+import java.util.Date;
+import java.util.concurrent.TimeUnit;
+
+class ZipUtils {
+
+    // used to adjust values between Windows and java epoch
+    private static final long WINDOWS_EPOCH_IN_MICROSECONDS = -11644473600000000L;
+
+    /**
+     * Converts Windows time (in microseconds, UTC/GMT) time to Java time.
+     */
+    public static final long winToJavaTime(long wtime) {
+        return TimeUnit.MILLISECONDS.convert(
+               wtime / 10 + WINDOWS_EPOCH_IN_MICROSECONDS, TimeUnit.MICROSECONDS);
+    }
+
+    /**
+     * Converts Java time to Windows time.
+     */
+    public static final long javaToWinTime(long time) {
+        return (TimeUnit.MICROSECONDS.convert(time, TimeUnit.MILLISECONDS)
+               - WINDOWS_EPOCH_IN_MICROSECONDS) * 10;
+    }
+
+    /**
+     * Converts "standard Unix time"(in seconds, UTC/GMT) to Java time
+     */
+    public static final long unixToJavaTime(long utime) {
+        return TimeUnit.MILLISECONDS.convert(utime, TimeUnit.SECONDS);
+    }
+
+    /**
+     * Converts Java time to "standard Unix time".
+     */
+    public static final long javaToUnixTime(long time) {
+        return TimeUnit.SECONDS.convert(time, TimeUnit.MILLISECONDS);
+    }
+
+    /**
+     * Converts DOS time to Java time (number of milliseconds since epoch).
+     */
+    public static long dosToJavaTime(long dtime) {
+        @SuppressWarnings("deprecation") // Use of date constructor.
+        Date d = new Date((int)(((dtime >> 25) & 0x7f) + 80),
+                          (int)(((dtime >> 21) & 0x0f) - 1),
+                          (int)((dtime >> 16) & 0x1f),
+                          (int)((dtime >> 11) & 0x1f),
+                          (int)((dtime >> 5) & 0x3f),
+                          (int)((dtime << 1) & 0x3e));
+        return d.getTime();
+    }
+
+    /**
+     * Converts Java time to DOS time.
+     */
+    @SuppressWarnings("deprecation") // Use of date methods
+    public static long javaToDosTime(long time) {
+        Date d = new Date(time);
+        int year = d.getYear() + 1900;
+        if (year < 1980) {
+            return (1 << 21) | (1 << 16);
+        }
+        return (year - 1980) << 25 | (d.getMonth() + 1) << 21 |
+               d.getDate() << 16 | d.getHours() << 11 | d.getMinutes() << 5 |
+               d.getSeconds() >> 1;
+    }
+
+
+    /**
+     * Fetches unsigned 16-bit value from byte array at specified offset.
+     * The bytes are assumed to be in Intel (little-endian) byte order.
+     */
+    public static final int get16(byte b[], int off) {
+        return Byte.toUnsignedInt(b[off]) | (Byte.toUnsignedInt(b[off+1]) << 8);
+    }
+
+    /**
+     * Fetches unsigned 32-bit value from byte array at specified offset.
+     * The bytes are assumed to be in Intel (little-endian) byte order.
+     */
+    public static final long get32(byte b[], int off) {
+        return (get16(b, off) | ((long)get16(b, off+2) << 16)) & 0xffffffffL;
+    }
+
+    /**
+     * Fetches signed 64-bit value from byte array at specified offset.
+     * The bytes are assumed to be in Intel (little-endian) byte order.
+     */
+    public static final long get64(byte b[], int off) {
+        return get32(b, off) | (get32(b, off+4) << 32);
+    }
+
+}
--- a/jdk/src/share/classes/javax/crypto/Cipher.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/javax/crypto/Cipher.java	Wed Jul 05 18:59:05 2017 +0200
@@ -1158,6 +1158,9 @@
      * determined from the given key, or if the given key has a keysize that
      * exceeds the maximum allowable keysize (as determined from the
      * configured jurisdiction policy files).
+     * @throws UnsupportedOperationException if (@code opmode} is
+     * {@code WRAP_MODE} or {@code UNWRAP_MODE} but the mode is not implemented
+     * by the underlying {@code CipherSpi}.
      */
     public final void init(int opmode, Key key) throws InvalidKeyException {
         init(opmode, key, JceSecurity.RANDOM);
@@ -1208,6 +1211,9 @@
      * determined from the given key, or if the given key has a keysize that
      * exceeds the maximum allowable keysize (as determined from the
      * configured jurisdiction policy files).
+     * @throws UnsupportedOperationException if (@code opmode} is
+     * {@code WRAP_MODE} or {@code UNWRAP_MODE} but the mode is not implemented
+     * by the underlying {@code CipherSpi}.
      */
     public final void init(int opmode, Key key, SecureRandom random)
             throws InvalidKeyException
@@ -1285,6 +1291,9 @@
      * algorithm parameters imply a cryptographic strength that would exceed
      * the legal limits (as determined from the configured jurisdiction
      * policy files).
+     * @throws UnsupportedOperationException if (@code opmode} is
+     * {@code WRAP_MODE} or {@code UNWRAP_MODE} but the mode is not implemented
+     * by the underlying {@code CipherSpi}.
      */
     public final void init(int opmode, Key key, AlgorithmParameterSpec params)
             throws InvalidKeyException, InvalidAlgorithmParameterException
@@ -1343,6 +1352,9 @@
      * algorithm parameters imply a cryptographic strength that would exceed
      * the legal limits (as determined from the configured jurisdiction
      * policy files).
+     * @throws UnsupportedOperationException if (@code opmode} is
+     * {@code WRAP_MODE} or {@code UNWRAP_MODE} but the mode is not implemented
+     * by the underlying {@code CipherSpi}.
      */
     public final void init(int opmode, Key key, AlgorithmParameterSpec params,
                            SecureRandom random)
@@ -1416,6 +1428,9 @@
      * algorithm parameters imply a cryptographic strength that would exceed
      * the legal limits (as determined from the configured jurisdiction
      * policy files).
+     * @throws UnsupportedOperationException if (@code opmode} is
+     * {@code WRAP_MODE} or {@code UNWRAP_MODE} but the mode is not implemented
+     * by the underlying {@code CipherSpi}.
      */
     public final void init(int opmode, Key key, AlgorithmParameters params)
             throws InvalidKeyException, InvalidAlgorithmParameterException
@@ -1474,6 +1489,9 @@
      * algorithm parameters imply a cryptographic strength that would exceed
      * the legal limits (as determined from the configured jurisdiction
      * policy files).
+     * @throws UnsupportedOperationException if (@code opmode} is
+     * {@code WRAP_MODE} or {@code UNWRAP_MODE} but the mode is not implemented
+     * by the underlying {@code CipherSpi}.
      */
     public final void init(int opmode, Key key, AlgorithmParameters params,
                            SecureRandom random)
@@ -1552,6 +1570,9 @@
      * in the given certificate has a keysize that exceeds the maximum
      * allowable keysize (as determined by the configured jurisdiction policy
      * files).
+     * @throws UnsupportedOperationException if (@code opmode} is
+     * {@code WRAP_MODE} or {@code UNWRAP_MODE} but the mode is not implemented
+     * by the underlying {@code CipherSpi}.
      */
     public final void init(int opmode, Certificate certificate)
             throws InvalidKeyException
@@ -1619,6 +1640,9 @@
      * in the given certificate has a keysize that exceeds the maximum
      * allowable keysize (as determined by the configured jurisdiction policy
      * files).
+     * @throws UnsupportedOperationException if (@code opmode} is
+     * {@code WRAP_MODE} or {@code UNWRAP_MODE} but the mode is not implemented
+     * by the underlying {@code CipherSpi}.
      */
     public final void init(int opmode, Certificate certificate,
                            SecureRandom random)
@@ -2410,6 +2434,9 @@
      * @exception InvalidKeyException if it is impossible or unsafe to
      * wrap the key with this cipher (e.g., a hardware protected key is
      * being passed to a software-only cipher).
+     *
+     * @throws UnsupportedOperationException if the corresponding method in the
+     * {@code CipherSpi} is not supported.
      */
     public final byte[] wrap(Key key)
             throws IllegalBlockSizeException, InvalidKeyException {
@@ -2451,6 +2478,9 @@
      * @exception InvalidKeyException if <code>wrappedKey</code> does not
      * represent a wrapped key of type <code>wrappedKeyType</code> for
      * the <code>wrappedKeyAlgorithm</code>.
+     *
+     * @throws UnsupportedOperationException if the corresponding method in the
+     * {@code CipherSpi} is not supported.
      */
     public final Key unwrap(byte[] wrappedKey,
                             String wrappedKeyAlgorithm,
--- a/jdk/src/share/classes/javax/crypto/CipherInputStream.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/javax/crypto/CipherInputStream.java	Wed Jul 05 18:59:05 2017 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 1997, 2007, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -86,6 +86,8 @@
     private int ostart = 0;
     // the offset pointing to the last "new" byte
     private int ofinish = 0;
+    // stream status
+    private boolean closed = false;
 
     /**
      * private convenience function.
@@ -293,14 +295,17 @@
      * @since JCE1.2
      */
     public void close() throws IOException {
+        if (closed) {
+            return;
+        }
+
+        closed = true;
         input.close();
         try {
             // throw away the unprocessed data
             cipher.doFinal();
         }
-        catch (BadPaddingException ex) {
-        }
-        catch (IllegalBlockSizeException ex) {
+        catch (BadPaddingException | IllegalBlockSizeException ex) {
         }
         ostart = 0;
         ofinish = 0;
--- a/jdk/src/share/classes/javax/crypto/CipherOutputStream.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/javax/crypto/CipherOutputStream.java	Wed Jul 05 18:59:05 2017 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 1997, 2007, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -74,6 +74,9 @@
     // the buffer holding data ready to be written out
     private byte[] obuffer;
 
+    // stream status
+    private boolean closed = false;
+
     /**
      *
      * Constructs a CipherOutputStream from an OutputStream and a
@@ -198,11 +201,14 @@
      * @since      JCE1.2
      */
     public void close() throws IOException {
+        if (closed) {
+            return;
+        }
+
+        closed = true;
         try {
             obuffer = cipher.doFinal();
-        } catch (IllegalBlockSizeException e) {
-            obuffer = null;
-        } catch (BadPaddingException e) {
+        } catch (IllegalBlockSizeException | BadPaddingException e) {
             obuffer = null;
         }
         try {
--- a/jdk/src/share/classes/javax/crypto/CipherSpi.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/javax/crypto/CipherSpi.java	Wed Jul 05 18:59:05 2017 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 1997, 2012, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -347,6 +347,9 @@
      * initializing this cipher, or requires
      * algorithm parameters that cannot be
      * determined from the given key.
+     * @throws UnsupportedOperationException if {@code opmode} is
+     * {@code WRAP_MODE} or {@code UNWRAP_MODE} is not implemented
+     * by the cipher.
      */
     protected abstract void engineInit(int opmode, Key key,
                                        SecureRandom random)
@@ -399,6 +402,9 @@
      * parameters are inappropriate for this cipher,
      * or if this cipher requires
      * algorithm parameters and <code>params</code> is null.
+     * @throws UnsupportedOperationException if {@code opmode} is
+     * {@code WRAP_MODE} or {@code UNWRAP_MODE} is not implemented
+     * by the cipher.
      */
     protected abstract void engineInit(int opmode, Key key,
                                        AlgorithmParameterSpec params,
@@ -452,6 +458,9 @@
      * parameters are inappropriate for this cipher,
      * or if this cipher requires
      * algorithm parameters and <code>params</code> is null.
+     * @throws UnsupportedOperationException if {@code opmode} is
+     * {@code WRAP_MODE} or {@code UNWRAP_MODE} is not implemented
+     * by the cipher.
      */
     protected abstract void engineInit(int opmode, Key key,
                                        AlgorithmParameters params,
@@ -863,6 +872,8 @@
      * @exception InvalidKeyException if it is impossible or unsafe to
      * wrap the key with this cipher (e.g., a hardware protected key is
      * being passed to a software-only cipher).
+     *
+     * @throws UnsupportedOperationException if this method is not supported.
      */
     protected byte[] engineWrap(Key key)
         throws IllegalBlockSizeException, InvalidKeyException
@@ -899,6 +910,8 @@
      * @exception InvalidKeyException if <code>wrappedKey</code> does not
      * represent a wrapped key of type <code>wrappedKeyType</code> for
      * the <code>wrappedKeyAlgorithm</code>.
+     *
+     * @throws UnsupportedOperationException if this method is not supported.
      */
     protected Key engineUnwrap(byte[] wrappedKey,
                                String wrappedKeyAlgorithm,
--- a/jdk/src/share/classes/javax/swing/DefaultComboBoxModel.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/javax/swing/DefaultComboBoxModel.java	Wed Jul 05 18:59:05 2017 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 1998, 2004, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 1998, 2013, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -55,8 +55,7 @@
      * @param items  an array of Object objects
      */
     public DefaultComboBoxModel(final E items[]) {
-        objects = new Vector<E>();
-        objects.ensureCapacity( items.length );
+        objects = new Vector<E>(items.length);
 
         int i,c;
         for ( i=0,c=items.length;i<c;i++ )
--- a/jdk/src/share/classes/javax/swing/KeyboardManager.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/javax/swing/KeyboardManager.java	Wed Jul 05 18:59:05 2017 +0200
@@ -285,10 +285,11 @@
                  while (iter.hasMoreElements()) {
                      JMenuBar mb = (JMenuBar)iter.nextElement();
                      if ( mb.isShowing() && mb.isEnabled() ) { // don't want to give these out
-                         if( !(ks.equals(ksE)) ) {
+                         boolean extended = (ksE != null) && !ksE.equals(ks);
+                         if (extended) {
                              fireBinding(mb, ksE, e, pressed);
                          }
-                         if(ks.equals(ksE) || !e.isConsumed()) {
+                         if (!extended || !e.isConsumed()) {
                              fireBinding(mb, ks, e, pressed);
                          }
                          if (e.isConsumed()) {
--- a/jdk/src/share/classes/javax/swing/plaf/basic/BasicComboBoxUI.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/javax/swing/plaf/basic/BasicComboBoxUI.java	Wed Jul 05 18:59:05 2017 +0200
@@ -692,9 +692,9 @@
      */
     protected void installComponents() {
         arrowButton = createArrowButton();
-        comboBox.add( arrowButton );
 
         if (arrowButton != null)  {
+            comboBox.add(arrowButton);
             configureArrowButton();
         }
 
--- a/jdk/src/share/classes/javax/swing/plaf/basic/BasicTableUI.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/javax/swing/plaf/basic/BasicTableUI.java	Wed Jul 05 18:59:05 2017 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 1997, 2007, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -419,7 +419,7 @@
                     }
                 }
                 else {
-                    if (!(table.getParent().getParent() instanceof
+                    if (!(SwingUtilities.getUnwrappedParent(table).getParent() instanceof
                             JScrollPane)) {
                         return;
                     }
@@ -1431,7 +1431,7 @@
         }
 
         // install the scrollpane border
-        Container parent = table.getParent();  // should be viewport
+        Container parent = SwingUtilities.getUnwrappedParent(table);  // should be viewport
         if (parent != null) {
             parent = parent.getParent();  // should be the scrollpane
             if (parent != null && parent instanceof JScrollPane) {
--- a/jdk/src/share/classes/sun/awt/AWTAccessor.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/sun/awt/AWTAccessor.java	Wed Jul 05 18:59:05 2017 +0200
@@ -484,6 +484,11 @@
          */
         void invokeAndWait(Object source, Runnable r)
             throws InterruptedException, InvocationTargetException;
+
+        /**
+         * Sets the delegate for the EventQueue used by FX/AWT single threaded mode
+         */
+        public void setFwDispatcher(EventQueue eventQueue, FwDispatcher dispatcher);
     }
 
     /*
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/src/share/classes/sun/awt/FwDispatcher.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,59 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.  Oracle designates this
+ * particular file as subject to the "Classpath" exception as provided
+ * by Oracle in the LICENSE file that accompanied this code.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+package sun.awt;
+
+import java.awt.*;
+
+/**
+ * An interface for the EventQueue delegate.
+ * This class is added to support JavaFX/AWT interop single threaded mode
+ * The delegate should be set in EventQueue by {@link EventQueue#setFwDispatcher(FwDispatcher)}
+ * If the delegate is not null, than it handles supported methods instead of the
+ * event queue. If it is null than the behaviour of an event queue does not change.
+ *
+ * @see EventQueue
+ *
+ * @author Petr Pchelko
+ *
+ * @since 1.8
+ */
+public interface FwDispatcher {
+    /**
+     * Delegates the {@link EventQueue#isDispatchThread()} method
+     */
+    boolean isDispatchThread();
+
+    /**
+     * Forwards a runnable to the delegate, which executes it on an appropriate thread.
+     * @param r - a runnable calling {@link EventQueue#dispatchEventImpl(java.awt.AWTEvent, Object)}
+     */
+    void scheduleDispatch(Runnable r);
+
+    /**
+     * Delegates the {@link java.awt.EventQueue#createSecondaryLoop()} method
+     */
+    SecondaryLoop createSecondaryLoop();
+}
--- a/jdk/src/share/classes/sun/java2d/loops/MaskFill.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/sun/java2d/loops/MaskFill.java	Wed Jul 05 18:59:05 2017 +0200
@@ -36,6 +36,7 @@
 import sun.java2d.loops.GraphicsPrimitive;
 import sun.java2d.SunGraphics2D;
 import sun.java2d.SurfaceData;
+import sun.java2d.pipe.Region;
 
 /**
  * MaskFill
@@ -194,10 +195,13 @@
             // REMIND: This is not pretty.  It would be nicer if we
             // passed a "FillData" object to the Pixel loops, instead
             // of a SunGraphics2D parameter...
+            Region clip = sg2d.clipRegion;
+            sg2d.clipRegion = null;
             int pixel = sg2d.pixel;
             sg2d.pixel = tmpData.pixelFor(sg2d.getColor());
             fillop.FillRect(sg2d, tmpData, 0, 0, w, h);
             sg2d.pixel = pixel;
+            sg2d.clipRegion = clip;
 
             maskop.MaskBlit(tmpData, sData, comp, null,
                             0, 0, x, y, w, h,
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/src/share/classes/sun/management/DiagnosticCommandArgumentInfo.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,159 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.  Oracle designates this
+ * particular file as subject to the "Classpath" exception as provided
+ * by Oracle in the LICENSE file that accompanied this code.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+package sun.management;
+
+/**
+ * Diagnostic Command Argument information. It contains the description
+ * of one parameter of the diagnostic command. A parameter can either be an
+ * option or an argument. Options are identified by the option name while
+ * arguments are identified by their position in the command line. The generic
+ * syntax of a diagnostic command is:
+ *  <blockquote>
+ *    &lt;command name&gt; [&lt;option&gt;=&lt;value&gt;] [&lt;argument_value&gt;]
+ * </blockquote>
+ * Example:
+ * <blockquote>
+ * command_name option1=value1 option2=value argumentA argumentB argumentC
+ * </blockquote>
+ * In this command line, the diagnostic command receives five parameters, two
+ * options named {@code option1} and {@code option2}, and three arguments.
+ * argumentA's position is 0, argumentB's position is 1 and argumentC's
+ * position is 2.
+ *
+ * @since 8
+ */
+
+class DiagnosticCommandArgumentInfo {
+    private final String name;
+    private final String description;
+    private final String type;
+    private final String defaultValue;
+    private final boolean mandatory;
+    private final boolean option;
+    private final boolean multiple;
+    private final int position;
+
+    /**
+     * Returns the argument name.
+     *
+     * @return the argument name
+     */
+    String getName() {
+        return name;
+    }
+
+   /**
+     * Returns the argument description.
+     *
+     * @return the argument description
+     */
+    String getDescription() {
+        return description;
+    }
+
+    /**
+     * Returns the argument type.
+     *
+     * @return the argument type
+     */
+    String getType() {
+        return type;
+    }
+
+    /**
+     * Returns the default value as a String if a default value
+     * is defined, null otherwise.
+     *
+     * @return the default value as a String if a default value
+     * is defined, null otherwise.
+     */
+    String getDefault() {
+        return defaultValue;
+    }
+
+    /**
+     * Returns {@code true} if the argument is mandatory,
+     *         {@code false} otherwise.
+     *
+     * @return {@code true} if the argument is mandatory,
+     *         {@code false} otherwise
+     */
+    boolean isMandatory() {
+        return mandatory;
+    }
+
+    /**
+     * Returns {@code true} if the argument is an option,
+     *         {@code false} otherwise. Options have to be specified using the
+     *         &lt;key&gt;=&lt;value&gt; syntax on the command line, while other
+     *         arguments are specified with a single &lt;value&gt; field and are
+     *         identified by their position on command line.
+     *
+     * @return {@code true} if the argument is an option,
+     *         {@code false} otherwise
+     */
+    boolean isOption() {
+        return option;
+    }
+
+    /**
+     * Returns {@code true} if the argument can be specified multiple times,
+     *         {@code false} otherwise.
+     *
+     * @return {@code true} if the argument can be specified multiple times,
+     *         {@code false} otherwise
+     */
+    boolean isMultiple() {
+        return multiple;
+    }
+
+    /**
+     * Returns the expected position of this argument if it is not an option,
+     *         -1 otherwise. Argument position if defined from left to right,
+     *         starting at zero and ignoring the diagnostic command name and
+     *         options.
+     *
+     * @return the expected position of this argument if it is not an option,
+     *         -1 otherwise.
+     */
+    int getPosition() {
+        return position;
+    }
+
+    DiagnosticCommandArgumentInfo(String name, String description,
+                                         String type, String defaultValue,
+                                         boolean mandatory, boolean option,
+                                         boolean multiple, int position) {
+        this.name = name;
+        this.description = description;
+        this.type = type;
+        this.defaultValue = defaultValue;
+        this.mandatory = mandatory;
+        this.option = option;
+        this.multiple = multiple;
+        this.position = position;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/src/share/classes/sun/management/DiagnosticCommandImpl.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,380 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.  Oracle designates this
+ * particular file as subject to the "Classpath" exception as provided
+ * by Oracle in the LICENSE file that accompanied this code.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+package sun.management;
+
+import com.sun.management.DiagnosticCommandMBean;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
+import java.security.Permission;
+import java.util.*;
+import javax.management.*;
+
+/**
+ * Implementation class for the diagnostic commands subsystem.
+ *
+ * @since 8
+ */
+class DiagnosticCommandImpl extends NotificationEmitterSupport
+    implements DiagnosticCommandMBean {
+
+    private final VMManagement jvm;
+    private volatile Map<String, Wrapper> wrappers = null;
+    private static final String strClassName = "".getClass().getName();
+    private static final String strArrayClassName = String[].class.getName();
+    private final boolean isSupported;
+
+    @Override
+    public Object getAttribute(String attribute) throws AttributeNotFoundException,
+        MBeanException, ReflectionException {
+        throw new AttributeNotFoundException(attribute);
+    }
+
+    @Override
+    public void setAttribute(Attribute attribute) throws AttributeNotFoundException,
+        InvalidAttributeValueException, MBeanException, ReflectionException {
+        throw new AttributeNotFoundException(attribute.getName());
+    }
+
+    @Override
+    public AttributeList getAttributes(String[] attributes) {
+        return new AttributeList();
+    }
+
+    @Override
+    public AttributeList setAttributes(AttributeList attributes) {
+        return new AttributeList();
+    }
+
+    private class Wrapper {
+
+        String name;
+        String cmd;
+        DiagnosticCommandInfo info;
+        Permission permission;
+
+        Wrapper(String name, String cmd, DiagnosticCommandInfo info)
+                throws InstantiationException {
+            this.name = name;
+            this.cmd = cmd;
+            this.info = info;
+            this.permission = null;
+            Exception cause = null;
+            if (info.getPermissionClass() != null) {
+                try {
+                    Class c = Class.forName(info.getPermissionClass());
+                    if (info.getPermissionAction() == null) {
+                        try {
+                            Constructor constructor = c.getConstructor(String.class);
+                            permission = (Permission) constructor.newInstance(info.getPermissionName());
+
+                        } catch (InstantiationException | IllegalAccessException
+                                | IllegalArgumentException | InvocationTargetException
+                                | NoSuchMethodException | SecurityException ex) {
+                            cause = ex;
+                        }
+                    }
+                    if (permission == null) {
+                        try {
+                            Constructor constructor = c.getConstructor(String.class, String.class);
+                            permission = (Permission) constructor.newInstance(
+                                    info.getPermissionName(),
+                                    info.getPermissionAction());
+                        } catch (InstantiationException | IllegalAccessException
+                                | IllegalArgumentException | InvocationTargetException
+                                | NoSuchMethodException | SecurityException ex) {
+                            cause = ex;
+                        }
+                    }
+                } catch (ClassNotFoundException ex) { }
+                if (permission == null) {
+                    InstantiationException iex =
+                            new InstantiationException("Unable to instantiate required permission");
+                    iex.initCause(cause);
+                }
+            }
+        }
+
+        public String execute(String[] args) {
+            if (permission != null) {
+                SecurityManager sm = System.getSecurityManager();
+                if (sm != null) {
+                    sm.checkPermission(permission);
+                }
+            }
+            if(args == null) {
+                return executeDiagnosticCommand(cmd);
+            } else {
+                StringBuilder sb = new StringBuilder();
+                sb.append(cmd);
+                for(int i=0; i<args.length; i++) {
+                    if(args[i] == null) {
+                        throw new IllegalArgumentException("Invalid null argument");
+                    }
+                    sb.append(" ");
+                    sb.append(args[i]);
+                }
+                return executeDiagnosticCommand(sb.toString());
+            }
+        }
+    }
+
+    DiagnosticCommandImpl(VMManagement jvm) {
+        this.jvm = jvm;
+        isSupported = jvm.isRemoteDiagnosticCommandsSupported();
+    }
+
+    private static class OperationInfoComparator implements Comparator<MBeanOperationInfo> {
+        @Override
+        public int compare(MBeanOperationInfo o1, MBeanOperationInfo o2) {
+            return o1.getName().compareTo(o2.getName());
+        }
+    }
+
+    @Override
+    public MBeanInfo getMBeanInfo() {
+        SortedSet<MBeanOperationInfo> operations = new TreeSet<>(new OperationInfoComparator());
+        Map<String, Wrapper> wrappersmap;
+        if (!isSupported) {
+            wrappersmap = (Map<String, Wrapper>) Collections.EMPTY_MAP;
+        } else {
+            try {
+                String[] command = getDiagnosticCommands();
+                DiagnosticCommandInfo[] info = getDiagnosticCommandInfo(command);
+                MBeanParameterInfo stringArgInfo[] = new MBeanParameterInfo[]{
+                    new MBeanParameterInfo("arguments", strArrayClassName,
+                    "Array of Diagnostic Commands Arguments and Options")
+                };
+                wrappersmap = new HashMap<>();
+                for (int i = 0; i < command.length; i++) {
+                    String name = transform(command[i]);
+                    try {
+                        Wrapper w = new Wrapper(name, command[i], info[i]);
+                        wrappersmap.put(name, w);
+                        operations.add(new MBeanOperationInfo(
+                                w.name,
+                                w.info.getDescription(),
+                                (w.info.getArgumentsInfo() == null
+                                    || w.info.getArgumentsInfo().isEmpty())
+                                    ? null : stringArgInfo,
+                                strClassName,
+                                MBeanOperationInfo.ACTION_INFO,
+                                commandDescriptor(w)));
+                    } catch (InstantiationException ex) {
+                        // If for some reasons the creation of a diagnostic command
+                        // wrappers fails, the diagnostic command is just ignored
+                        // and won't appear in the DynamicMBean
+                    }
+                }
+            } catch (IllegalArgumentException | UnsupportedOperationException e) {
+                wrappersmap = (Map<String, Wrapper>) Collections.EMPTY_MAP;
+            }
+        }
+        wrappers =  Collections.unmodifiableMap(wrappersmap);
+        HashMap<String, Object> map = new HashMap<>();
+        map.put("immutableInfo", "false");
+        map.put("interfaceClassName","com.sun.management.DiagnosticCommandMBean");
+        map.put("mxbean", "false");
+        Descriptor desc = new ImmutableDescriptor(map);
+        return new MBeanInfo(
+                this.getClass().getName(),
+                "Diagnostic Commands",
+                null, // attributes
+                null, // constructors
+                operations.toArray(new MBeanOperationInfo[operations.size()]), // operations
+                getNotificationInfo(), // notifications
+                desc);
+    }
+
+    @Override
+    public Object invoke(String actionName, Object[] params, String[] signature)
+            throws MBeanException, ReflectionException {
+        if (!isSupported) {
+            throw new UnsupportedOperationException();
+        }
+        if (wrappers == null) {
+            getMBeanInfo();
+        }
+        Wrapper w = wrappers.get(actionName);
+        if (w != null) {
+            if (w.info.getArgumentsInfo().isEmpty()
+                    && (params == null || params.length == 0)
+                    && (signature == null || signature.length == 0)) {
+                return w.execute(null);
+            } else if((params != null && params.length == 1)
+                    && (signature != null && signature.length == 1
+                    && signature[0] != null
+                    && signature[0].compareTo(strArrayClassName) == 0)) {
+                return w.execute((String[]) params[0]);
+            }
+        }
+        throw new ReflectionException(new NoSuchMethodException(actionName));
+    }
+
+    private static String transform(String name) {
+        StringBuilder sb = new StringBuilder();
+        boolean toLower = true;
+        boolean toUpper = false;
+        for (int i = 0; i < name.length(); i++) {
+            char c = name.charAt(i);
+            if (c == '.' || c == '_') {
+                toLower = false;
+                toUpper = true;
+            } else {
+                if (toUpper) {
+                    toUpper = false;
+                    sb.append(Character.toUpperCase(c));
+                } else if(toLower) {
+                    sb.append(Character.toLowerCase(c));
+                } else {
+                    sb.append(c);
+                }
+            }
+        }
+        return sb.toString();
+    }
+
+    private Descriptor commandDescriptor(Wrapper w) throws IllegalArgumentException {
+        HashMap<String, Object> map = new HashMap<>();
+        map.put("dcmd.name", w.info.getName());
+        map.put("dcmd.description", w.info.getDescription());
+        map.put("dcmd.vmImpact", w.info.getImpact());
+        map.put("dcmd.permissionClass", w.info.getPermissionClass());
+        map.put("dcmd.permissionName", w.info.getPermissionName());
+        map.put("dcmd.permissionAction", w.info.getPermissionAction());
+        map.put("dcmd.enabled", w.info.isEnabled());
+        StringBuilder sb = new StringBuilder();
+        sb.append("help ");
+        sb.append(w.info.getName());
+        map.put("dcmd.help", executeDiagnosticCommand(sb.toString()));
+        if (w.info.getArgumentsInfo() != null && !w.info.getArgumentsInfo().isEmpty()) {
+            HashMap<String, Object> allargmap = new HashMap<>();
+            for (DiagnosticCommandArgumentInfo arginfo : w.info.getArgumentsInfo()) {
+                HashMap<String, Object> argmap = new HashMap<>();
+                argmap.put("dcmd.arg.name", arginfo.getName());
+                argmap.put("dcmd.arg.type", arginfo.getType());
+                argmap.put("dcmd.arg.description", arginfo.getDescription());
+                argmap.put("dcmd.arg.isMandatory", arginfo.isMandatory());
+                argmap.put("dcmd.arg.isMultiple", arginfo.isMultiple());
+                boolean isOption = arginfo.isOption();
+                argmap.put("dcmd.arg.isOption", isOption);
+                if(!isOption) {
+                    argmap.put("dcmd.arg.position", arginfo.getPosition());
+                } else {
+                    argmap.put("dcmd.arg.position", -1);
+                }
+                allargmap.put(arginfo.getName(), new ImmutableDescriptor(argmap));
+            }
+            map.put("dcmd.arguments", new ImmutableDescriptor(allargmap));
+        }
+        return new ImmutableDescriptor(map);
+    }
+
+    private final static String notifName =
+        "javax.management.Notification";
+
+    private final static String[] diagFramNotifTypes = {
+        "jmx.mbean.info.changed"
+    };
+
+    private MBeanNotificationInfo[] notifInfo = null;
+
+    @Override
+    public MBeanNotificationInfo[] getNotificationInfo() {
+        synchronized (this) {
+            if (notifInfo == null) {
+                 notifInfo = new MBeanNotificationInfo[1];
+                 notifInfo[0] =
+                         new MBeanNotificationInfo(diagFramNotifTypes,
+                                                   notifName,
+                                                   "Diagnostic Framework Notification");
+            }
+        }
+        return notifInfo;
+    }
+
+    private static long seqNumber = 0;
+    private static long getNextSeqNumber() {
+        return ++seqNumber;
+    }
+
+    private void createDiagnosticFrameworkNotification() {
+
+        if (!hasListeners()) {
+            return;
+        }
+        ObjectName on = null;
+        try {
+            on = ObjectName.getInstance(ManagementFactoryHelper.HOTSPOT_DIAGNOSTIC_COMMAND_MBEAN_NAME);
+        } catch (MalformedObjectNameException e) { }
+        Notification notif = new Notification("jmx.mbean.info.changed",
+                                              on,
+                                              getNextSeqNumber());
+        notif.setUserData(getMBeanInfo());
+        sendNotification(notif);
+    }
+
+    @Override
+    public synchronized void addNotificationListener(NotificationListener listener,
+            NotificationFilter filter,
+            Object handback) {
+        boolean before = hasListeners();
+        super.addNotificationListener(listener, filter, handback);
+        boolean after = hasListeners();
+        if (!before && after) {
+            setNotificationEnabled(true);
+        }
+    }
+
+    @Override
+    public synchronized void removeNotificationListener(NotificationListener listener)
+            throws ListenerNotFoundException {
+        boolean before = hasListeners();
+        super.removeNotificationListener(listener);
+        boolean after = hasListeners();
+        if (before && !after) {
+            setNotificationEnabled(false);
+        }
+    }
+
+    @Override
+    public synchronized void removeNotificationListener(NotificationListener listener,
+            NotificationFilter filter,
+            Object handback)
+            throws ListenerNotFoundException {
+        boolean before = hasListeners();
+        super.removeNotificationListener(listener, filter, handback);
+        boolean after = hasListeners();
+        if (before && !after) {
+            setNotificationEnabled(false);
+        }
+    }
+
+    private native void setNotificationEnabled(boolean enabled);
+    private native String[] getDiagnosticCommands();
+    private native DiagnosticCommandInfo[] getDiagnosticCommandInfo(String[] commands);
+    private native String executeDiagnosticCommand(String command);
+
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/src/share/classes/sun/management/DiagnosticCommandInfo.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,151 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.  Oracle designates this
+ * particular file as subject to the "Classpath" exception as provided
+ * by Oracle in the LICENSE file that accompanied this code.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+package sun.management;
+
+import java.util.List;
+
+/**
+ * Diagnostic command information. It contains the description of a
+ * diagnostic command.
+ *
+ * @since 8
+ */
+
+class DiagnosticCommandInfo {
+    private final String name;
+    private final String description;
+    private final String impact;
+    private final String permissionClass;
+    private final String permissionName;
+    private final String permissionAction;
+    private final boolean enabled;
+    private final List<DiagnosticCommandArgumentInfo> arguments;
+
+    /**
+     * Returns the diagnostic command name.
+     *
+     * @return the diagnostic command name
+     */
+    String getName() {
+        return name;
+    }
+
+   /**
+     * Returns the diagnostic command description.
+     *
+     * @return the diagnostic command description
+     */
+    String getDescription() {
+        return description;
+    }
+
+    /**
+     * Returns the potential impact of the diagnostic command execution
+     *         on the Java virtual machine behavior.
+     *
+     * @return the potential impact of the diagnostic command execution
+     *         on the Java virtual machine behavior
+     */
+    String getImpact() {
+        return impact;
+    }
+
+    /**
+     * Returns the name of the permission class required to be allowed
+     *         to invoke the diagnostic command, or null if no permission
+     *         is required.
+     *
+     * @return the name of the permission class name required to be allowed
+     *         to invoke the diagnostic command, or null if no permission
+     *         is required
+     */
+    String getPermissionClass() {
+        return permissionClass;
+    }
+
+    /**
+     * Returns the permission name required to be allowed to invoke the
+     *         diagnostic command, or null if no permission is required.
+     *
+     * @return the permission name required to be allowed to invoke the
+     *         diagnostic command, or null if no permission is required
+     */
+    String getPermissionName() {
+        return permissionName;
+    }
+
+    /**
+     * Returns the permission action required to be allowed to invoke the
+     *         diagnostic command, or null if no permission is required or
+     *         if the permission has no action specified.
+     *
+     * @return the permission action required to be allowed to invoke the
+     *         diagnostic command, or null if no permission is required or
+     *         if the permission has no action specified
+     */
+    String getPermissionAction() {
+        return permissionAction;
+    }
+
+    /**
+     * Returns {@code true} if the diagnostic command is enabled,
+     *         {@code false} otherwise. The enabled/disabled
+     *         status of a diagnostic command can evolve during
+     *         the lifetime of the Java virtual machine.
+     *
+     * @return {@code true} if the diagnostic command is enabled,
+     *         {@code false} otherwise
+     */
+    boolean isEnabled() {
+        return enabled;
+    }
+
+    /**
+     * Returns the list of the diagnostic command arguments description.
+     * If the diagnostic command has no arguments, it returns an empty list.
+     *
+     * @return a list of the diagnostic command arguments description
+     */
+    List<DiagnosticCommandArgumentInfo> getArgumentsInfo() {
+        return arguments;
+    }
+
+    DiagnosticCommandInfo(String name, String description,
+                                    String impact, String permissionClass,
+                                    String permissionName, String permissionAction,
+                                    boolean enabled,
+                                    List<DiagnosticCommandArgumentInfo> arguments)
+    {
+        this.name = name;
+        this.description = description;
+        this.impact = impact;
+        this.permissionClass = permissionClass;
+        this.permissionName = permissionName;
+        this.permissionAction = permissionAction;
+        this.enabled = enabled;
+        this.arguments = arguments;
+    }
+}
--- a/jdk/src/share/classes/sun/management/ManagementFactoryHelper.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/sun/management/ManagementFactoryHelper.java	Wed Jul 05 18:59:05 2017 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2003, 2012, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -27,6 +27,7 @@
 
 import java.lang.management.*;
 
+import javax.management.DynamicMBean;
 import javax.management.InstanceAlreadyExistsException;
 import javax.management.InstanceNotFoundException;
 import javax.management.MBeanServer;
@@ -42,7 +43,9 @@
 
 import java.util.ArrayList;
 import java.util.Collections;
+import java.util.HashMap;
 import java.util.List;
+import com.sun.management.DiagnosticCommandMBean;
 import com.sun.management.OSMBeanFactory;
 import com.sun.management.HotSpotDiagnosticMXBean;
 
@@ -263,6 +266,7 @@
     private static HotspotThread hsThreadMBean = null;
     private static HotspotCompilation hsCompileMBean = null;
     private static HotspotMemory hsMemoryMBean = null;
+    private static DiagnosticCommandImpl hsDiagCommandMBean = null;
 
     public static synchronized HotSpotDiagnosticMXBean getDiagnosticMXBean() {
         if (hsDiagMBean == null) {
@@ -311,6 +315,14 @@
         return hsMemoryMBean;
     }
 
+    public static synchronized DiagnosticCommandMBean getDiagnosticCommandMBean() {
+        // Remote Diagnostic Commands may not be supported
+        if (hsDiagCommandMBean == null && jvm.isRemoteDiagnosticCommandsSupported()) {
+            hsDiagCommandMBean = new DiagnosticCommandImpl(jvm);
+        }
+        return hsDiagCommandMBean;
+    }
+
     /**
      * This method is for testing only.
      */
@@ -365,6 +377,18 @@
     private final static String HOTSPOT_THREAD_MBEAN_NAME =
         "sun.management:type=HotspotThreading";
 
+    final static String HOTSPOT_DIAGNOSTIC_COMMAND_MBEAN_NAME =
+        "com.sun.management:type=DiagnosticCommand";
+
+    public static HashMap<ObjectName, DynamicMBean> getPlatformDynamicMBeans() {
+        HashMap<ObjectName, DynamicMBean> map = new HashMap<>();
+        DiagnosticCommandMBean diagMBean = getDiagnosticCommandMBean();
+        if (diagMBean != null) {
+            map.put(Util.newObjectName(HOTSPOT_DIAGNOSTIC_COMMAND_MBEAN_NAME), diagMBean);
+        }
+        return map;
+    }
+
     static void registerInternalMBeans(MBeanServer mbs) {
         // register all internal MBeans if not registered
         // No exception is thrown if a MBean with that object name
--- a/jdk/src/share/classes/sun/management/VMManagement.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/sun/management/VMManagement.java	Wed Jul 05 18:59:05 2017 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2003, 2011, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -46,6 +46,7 @@
     public boolean isThreadAllocatedMemorySupported();
     public boolean isThreadAllocatedMemoryEnabled();
     public boolean isGcNotificationSupported();
+    public boolean isRemoteDiagnosticCommandsSupported();
 
     // Class Loading Subsystem
     public long    getTotalClassCount();
--- a/jdk/src/share/classes/sun/management/VMManagementImpl.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/sun/management/VMManagementImpl.java	Wed Jul 05 18:59:05 2017 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2003, 2011, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -57,6 +57,7 @@
     private static boolean synchronizerUsageSupport;
     private static boolean threadAllocatedMemorySupport;
     private static boolean gcNotificationSupport;
+    private static boolean remoteDiagnosticCommandsSupport;
 
 
     static {
@@ -106,6 +107,10 @@
         return gcNotificationSupport;
     }
 
+    public boolean isRemoteDiagnosticCommandsSupported() {
+        return remoteDiagnosticCommandsSupport;
+    }
+
     public native boolean isThreadContentionMonitoringEnabled();
     public native boolean isThreadCpuTimeEnabled();
     public native boolean isThreadAllocatedMemoryEnabled();
--- a/jdk/src/share/classes/sun/management/jdp/JdpPacketWriter.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/sun/management/jdp/JdpPacketWriter.java	Wed Jul 05 18:59:05 2017 +0200
@@ -60,9 +60,12 @@
      */
     public void addEntry(String entry)
             throws IOException {
-        pkt.writeShort(entry.length());
-        byte[] b = entry.getBytes("UTF-8");
-        pkt.write(b);
+        /* DataOutputStream.writeUTF() do essentially
+         *  the same as:
+         *    pkt.writeShort(entry.getBytes("UTF-8").length);
+         *    pkt.write(entry.getBytes("UTF-8"));
+         */
+        pkt.writeUTF(entry);
     }
 
     /**
--- a/jdk/src/share/classes/sun/misc/Contended.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/sun/misc/Contended.java	Wed Jul 05 18:59:05 2017 +0200
@@ -31,7 +31,42 @@
 import java.lang.annotation.Target;
 
 /**
- * This annotation marks classes and fields as considered to be contended.
+ * <p>An annotation expressing that objects and/or their fields are
+ * expected to encounter memory contention, generally in the form of
+ * "false sharing". This annotation serves as a hint that such objects
+ * and fields should reside in locations isolated from those of other
+ * objects or fields. Susceptibility to memory contention is a
+ * property of the intended usages of objects and fields, not their
+ * types or qualifiers. The effects of this annotation will nearly
+ * always add significant space overhead to objects. The use of
+ * {@code @Contended} is warranted only when the performance impact of
+ * this time/space tradeoff is intrinsically worthwhile; for example,
+ * in concurrent contexts in which each instance of the annotated
+ * class is often accessed by a different thread.
+ *
+ * <p>A {@code @Contended} field annotation may optionally include a
+ * <i>contention group</i> tag. A contention group defines a set of one
+ * or more fields that collectively must be isolated from all other
+ * contention groups. The fields in the same contention group may not be
+ * pairwise isolated. With no contention group tag (or with the default
+ * empty tag: "") each {@code @Contended} field resides in its own
+ * <i>distinct</i> and <i>anonymous</i> contention group.
+ *
+ * <p>When the annotation is used at the class level, the effect is
+ * equivalent to grouping all the declared fields not already having the
+ * {@code @Contended} annotation into the same anonymous group.
+ * With the class level annotation, implementations may choose different
+ * isolation techniques, such as isolating the entire object, rather than
+ * isolating distinct fields. A contention group tag has no meaning
+ * in a class level {@code @Contended} annotation, and is ignored.
+ *
+ * <p>The class level {@code @Contended} annotation is not inherited and has
+ * no effect on the fields declared in any sub-classes. The effects of all
+ * {@code @Contended} annotations, however, remain in force for all
+ * subclass instances, providing isolation of all the defined contention
+ * groups. Contention group tags are not inherited, and the same tag used
+ * in a superclass and subclass, represent distinct contention groups.
+ *
  * @since 1.8
  */
 @Retention(RetentionPolicy.RUNTIME)
@@ -39,7 +74,10 @@
 public @interface Contended {
 
     /**
-      Defines the contention group tag.
+     * The (optional) contention group tag.
+     * This tag is only meaningful for field level annotations.
+     *
+     * @return contention group tag.
      */
     String value() default "";
 }
--- a/jdk/src/share/classes/sun/misc/Hashing.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/sun/misc/Hashing.java	Wed Jul 05 18:59:05 2017 +0200
@@ -24,7 +24,7 @@
  */
 package sun.misc;
 
-import java.util.Random;
+import java.util.concurrent.ThreadLocalRandom;
 
 /**
  * Hashing utilities.
@@ -207,28 +207,16 @@
     }
 
     /**
-     * Holds references to things that can't be initialized until after VM
-     * is fully booted.
+     * Return a non-zero 32-bit pseudo random value. The {@code instance} object
+     * may be used as part of the value.
+     *
+     * @param instance an object to use if desired in choosing value.
+     * @return a non-zero 32-bit pseudo random value.
      */
-    private static class Holder {
-
-        /**
-         * Used for generating per-instance hash seeds.
-         *
-         * We try to improve upon the default seeding.
-         */
-        static final Random SEED_MAKER = new Random(
-                Double.doubleToRawLongBits(Math.random())
-                ^ System.identityHashCode(Hashing.class)
-                ^ System.currentTimeMillis()
-                ^ System.nanoTime()
-                ^ Runtime.getRuntime().freeMemory());
-    }
-
     public static int randomHashSeed(Object instance) {
         int seed;
         if (sun.misc.VM.isBooted()) {
-            seed = Holder.SEED_MAKER.nextInt();
+            seed = ThreadLocalRandom.current().nextInt();
         } else {
             // lower quality "random" seed value--still better than zero and not
             // not practically reversible.
--- a/jdk/src/share/classes/sun/net/www/protocol/http/HttpURLConnection.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/sun/net/www/protocol/http/HttpURLConnection.java	Wed Jul 05 18:59:05 2017 +0200
@@ -3158,6 +3158,7 @@
         private boolean marked = false;
         private int inCache = 0;
         private int markCount = 0;
+        private boolean closed;  // false
 
         public HttpInputStream (InputStream is) {
             super (is);
@@ -3233,8 +3234,14 @@
             }
         }
 
+        private void ensureOpen() throws IOException {
+            if (closed)
+                throw new IOException("stream is closed");
+        }
+
         @Override
         public int read() throws IOException {
+            ensureOpen();
             try {
                 byte[] b = new byte[1];
                 int ret = read(b);
@@ -3254,6 +3261,7 @@
 
         @Override
         public int read(byte[] b, int off, int len) throws IOException {
+            ensureOpen();
             try {
                 int newLen = super.read(b, off, len);
                 int nWrite;
@@ -3291,7 +3299,7 @@
 
         @Override
         public long skip (long n) throws IOException {
-
+            ensureOpen();
             long remaining = n;
             int nr;
             if (skipBuffer == null)
@@ -3317,6 +3325,9 @@
 
         @Override
         public void close () throws IOException {
+            if (closed)
+                return;
+
             try {
                 if (outputStream != null) {
                     if (read() != -1) {
@@ -3332,6 +3343,7 @@
                 }
                 throw ioex;
             } finally {
+                closed = true;
                 HttpURLConnection.this.http = null;
                 checkResponseCredentials (true);
             }
--- a/jdk/src/share/classes/sun/security/pkcs11/P11KeyAgreement.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/sun/security/pkcs11/P11KeyAgreement.java	Wed Jul 05 18:59:05 2017 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2003, 2012, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -330,7 +330,7 @@
                 // as here we always retrieve the CKA_VALUE even for tokens
                 // that do not have that bug.
                 byte[] keyBytes = key.getEncoded();
-                byte[] newBytes = P11Util.trimZeroes(keyBytes);
+                byte[] newBytes = KeyUtil.trimZeroes(keyBytes);
                 if (keyBytes != newBytes) {
                     key = new SecretKeySpec(newBytes, algorithm);
                 }
--- a/jdk/src/share/classes/sun/security/pkcs11/P11Signature.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/sun/security/pkcs11/P11Signature.java	Wed Jul 05 18:59:05 2017 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2003, 2012, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -41,6 +41,7 @@
 
 import sun.security.pkcs11.wrapper.*;
 import static sun.security.pkcs11.wrapper.PKCS11Constants.*;
+import sun.security.util.KeyUtil;
 
 /**
  * Signature implementation class. This class currently supports the
@@ -697,8 +698,8 @@
             BigInteger r = values[0].getPositiveBigInteger();
             BigInteger s = values[1].getPositiveBigInteger();
             // trim leading zeroes
-            byte[] br = P11Util.trimZeroes(r.toByteArray());
-            byte[] bs = P11Util.trimZeroes(s.toByteArray());
+            byte[] br = KeyUtil.trimZeroes(r.toByteArray());
+            byte[] bs = KeyUtil.trimZeroes(s.toByteArray());
             int k = Math.max(br.length, bs.length);
             // r and s each occupy half the array
             byte[] res = new byte[k << 1];
--- a/jdk/src/share/classes/sun/security/pkcs11/P11Util.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/sun/security/pkcs11/P11Util.java	Wed Jul 05 18:59:05 2017 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2003, 2011, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -131,20 +131,6 @@
         return b;
     }
 
-    // trim leading (most significant) zeroes from the result
-    static byte[] trimZeroes(byte[] b) {
-        int i = 0;
-        while ((i < b.length - 1) && (b[i] == 0)) {
-            i++;
-        }
-        if (i == 0) {
-            return b;
-        }
-        byte[] t = new byte[b.length - i];
-        System.arraycopy(b, i, t, 0, t.length);
-        return t;
-    }
-
     public static byte[] getMagnitude(BigInteger bi) {
         byte[] b = bi.toByteArray();
         if ((b.length > 1) && (b[0] == 0)) {
--- a/jdk/src/share/classes/sun/security/util/KeyUtil.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/sun/security/util/KeyUtil.java	Wed Jul 05 18:59:05 2017 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -200,5 +200,24 @@
 
         // Don't bother to check against the y^q mod p if safe primes are used.
     }
+
+    /**
+     * Trim leading (most significant) zeroes from the result.
+     *
+     * @throws NullPointerException if {@code b} is null
+     */
+    public static byte[] trimZeroes(byte[] b) {
+        int i = 0;
+        while ((i < b.length - 1) && (b[i] == 0)) {
+            i++;
+        }
+        if (i == 0) {
+            return b;
+        }
+        byte[] t = new byte[b.length - i];
+        System.arraycopy(b, i, t, 0, t.length);
+        return t;
+    }
+
 }
 
--- a/jdk/src/share/classes/sun/tools/jconsole/SummaryTab.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/sun/tools/jconsole/SummaryTab.java	Wed Jul 05 18:59:05 2017 +0200
@@ -360,6 +360,8 @@
                     Math.min(99F,
                              elapsedCpu / (elapsedTime * 10000F * result.nCPUs));
 
+                cpuUsage = Math.max(0F, cpuUsage);
+
                 getPlotter().addValues(result.timeStamp,
                                 Math.round(cpuUsage * Math.pow(10.0, CPU_DECIMALS)));
                 getInfoLabel().setText(Resources.format(Messages.CPU_USAGE_FORMAT,
--- a/jdk/src/share/classes/sun/util/locale/provider/FallbackLocaleProviderAdapter.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/sun/util/locale/provider/FallbackLocaleProviderAdapter.java	Wed Jul 05 18:59:05 2017 +0200
@@ -25,6 +25,11 @@
 
 package sun.util.locale.provider;
 
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Locale;
+import java.util.Set;
+
 /**
  * FallbackProviderAdapter implementation.
  *
@@ -33,10 +38,32 @@
 public class FallbackLocaleProviderAdapter extends JRELocaleProviderAdapter {
 
     /**
+     * Supported language tag set.
+     */
+    private static final Set<String> rootTagSet =
+        Collections.singleton(Locale.ROOT.toLanguageTag());
+
+    /**
+     * Fallback provider only provides the ROOT locale data.
+     */
+    private final LocaleResources rootLocaleResources =
+        new LocaleResources(this, Locale.ROOT);
+
+    /**
      * Returns the type of this LocaleProviderAdapter
      */
     @Override
     public LocaleProviderAdapter.Type getAdapterType() {
         return Type.FALLBACK;
     }
+
+    @Override
+    public LocaleResources getLocaleResources(Locale locale) {
+        return rootLocaleResources;
+    }
+
+    @Override
+    protected Set<String> createLanguageTagSet(String category) {
+        return rootTagSet;
+    }
 }
--- a/jdk/src/share/classes/sun/util/locale/provider/JRELocaleProviderAdapter.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/sun/util/locale/provider/JRELocaleProviderAdapter.java	Wed Jul 05 18:59:05 2017 +0200
@@ -34,12 +34,10 @@
 import java.text.spi.DateFormatSymbolsProvider;
 import java.text.spi.DecimalFormatSymbolsProvider;
 import java.text.spi.NumberFormatProvider;
-import java.util.Calendar;
 import java.util.HashSet;
 import java.util.Locale;
 import java.util.Set;
 import java.util.StringTokenizer;
-import java.util.TimeZone;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentMap;
 import java.util.spi.CalendarDataProvider;
--- a/jdk/src/share/classes/sun/util/locale/provider/LocaleProviderAdapter.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/sun/util/locale/provider/LocaleProviderAdapter.java	Wed Jul 05 18:59:05 2017 +0200
@@ -120,6 +120,12 @@
     private static LocaleProviderAdapter fallbackLocaleProviderAdapter = null;
 
     /**
+     * Default fallback adapter type, which should return something meaningful in any case.
+     * This is either JRE or FALLBACK.
+     */
+    static LocaleProviderAdapter.Type defaultLocaleProviderAdapter = null;
+
+    /**
      * Adapter lookup cache.
      */
     private static ConcurrentMap<Class<? extends LocaleServiceProvider>, ConcurrentMap<Locale, LocaleProviderAdapter>>
@@ -140,13 +146,19 @@
                     // load adapter if necessary
                     switch (aType) {
                         case CLDR:
-                            cldrLocaleProviderAdapter = new CLDRLocaleProviderAdapter();
+                            if (cldrLocaleProviderAdapter == null) {
+                                cldrLocaleProviderAdapter = new CLDRLocaleProviderAdapter();
+                            }
                             break;
                         case HOST:
-                            hostLocaleProviderAdapter = new HostLocaleProviderAdapter();
+                            if (hostLocaleProviderAdapter == null) {
+                                hostLocaleProviderAdapter = new HostLocaleProviderAdapter();
+                            }
                             break;
                     }
-                    typeList.add(aType);
+                    if (!typeList.contains(aType)) {
+                        typeList.add(aType);
+                    }
                 } catch (IllegalArgumentException | UnsupportedOperationException e) {
                     // could be caused by the user specifying wrong
                     // provider name or format in the system property
@@ -160,11 +172,15 @@
                 // Append FALLBACK as the last resort.
                 fallbackLocaleProviderAdapter = new FallbackLocaleProviderAdapter();
                 typeList.add(Type.FALLBACK);
+                defaultLocaleProviderAdapter = Type.FALLBACK;
+            } else {
+                defaultLocaleProviderAdapter = Type.JRE;
             }
         } else {
             // Default preference list
             typeList.add(Type.JRE);
             typeList.add(Type.SPI);
+            defaultLocaleProviderAdapter = Type.JRE;
         }
 
         adapterPreference = Collections.unmodifiableList(typeList);
--- a/jdk/src/share/classes/sun/util/locale/provider/LocaleServiceProviderPool.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/classes/sun/util/locale/provider/LocaleServiceProviderPool.java	Wed Jul 05 18:59:05 2017 +0200
@@ -127,32 +127,13 @@
     private LocaleServiceProviderPool (final Class<? extends LocaleServiceProvider> c) {
         providerClass = c;
 
-        // Add the JRE Locale Data Adapter implementation.
-        providers.putIfAbsent(LocaleProviderAdapter.Type.JRE,
-            LocaleProviderAdapter.forJRE().getLocaleServiceProvider(c));
-
-        // Add the SPI Locale Data Adapter implementation.
-        LocaleProviderAdapter lda = LocaleProviderAdapter.forType(LocaleProviderAdapter.Type.SPI);
-        LocaleServiceProvider provider = lda.getLocaleServiceProvider(c);
-        if (provider != null) {
-            providers.putIfAbsent(LocaleProviderAdapter.Type.SPI, provider);
-        }
-
-        // Add the CLDR Locale Data Adapter implementation, if needed.
-        lda =  LocaleProviderAdapter.forType(LocaleProviderAdapter.Type.CLDR);
-        if (lda != null) {
-            provider = lda.getLocaleServiceProvider(c);
-            if (provider != null) {
-                providers.putIfAbsent(LocaleProviderAdapter.Type.CLDR, provider);
-            }
-        }
-
-        // Add the Host Locale Data Adapter implementation, if needed.
-        lda =  LocaleProviderAdapter.forType(LocaleProviderAdapter.Type.HOST);
-        if (lda != null) {
-            provider = lda.getLocaleServiceProvider(c);
-            if (provider != null) {
-                providers.putIfAbsent(LocaleProviderAdapter.Type.HOST, provider);
+        for (LocaleProviderAdapter.Type type : LocaleProviderAdapter.getAdapterPreference()) {
+            LocaleProviderAdapter lda = LocaleProviderAdapter.forType(type);
+            if (lda != null) {
+                LocaleServiceProvider provider = lda.getLocaleServiceProvider(c);
+                if (provider != null) {
+                    providers.putIfAbsent(type, provider);
+                }
             }
         }
     }
@@ -246,7 +227,8 @@
      */
     boolean hasProviders() {
         return providers.size() != 1 ||
-               providers.get(LocaleProviderAdapter.Type.JRE) == null;
+               (providers.get(LocaleProviderAdapter.Type.JRE) == null &&
+                providers.get(LocaleProviderAdapter.Type.FALLBACK) == null);
     }
 
     /**
@@ -296,9 +278,8 @@
         // Check whether JRE is the sole locale data provider or not,
         // and directly call it if it is.
         if (!hasProviders()) {
-            return getter.getObject(
-                (P)providers.get(LocaleProviderAdapter.Type.JRE),
-                locale, key, params);
+            return getter.getObject((P)providers.get(LocaleProviderAdapter.defaultLocaleProviderAdapter),
+                                    locale, key, params);
         }
 
         List<Locale> lookupLocales = getLookupLocales(locale);
--- a/jdk/src/share/demo/nio/zipfs/src/com/sun/nio/zipfs/ZipFileSystem.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/demo/nio/zipfs/src/com/sun/nio/zipfs/ZipFileSystem.java	Wed Jul 05 18:59:05 2017 +0200
@@ -1818,7 +1818,7 @@
 
         Entry(byte[] name) {
             name(name);
-            this.mtime  = System.currentTimeMillis();
+            this.mtime  = this.ctime = this.atime = System.currentTimeMillis();
             this.crc    = 0;
             this.size   = 0;
             this.csize  = 0;
@@ -1912,17 +1912,18 @@
         {
             int written  = CENHDR;
             int version0 = version();
-
             long csize0  = csize;
             long size0   = size;
             long locoff0 = locoff;
             int elen64   = 0;                // extra for ZIP64
             int elenNTFS = 0;                // extra for NTFS (a/c/mtime)
             int elenEXTT = 0;                // extra for Extended Timestamp
+            boolean foundExtraTime = false;  // if time stamp NTFS, EXTT present
 
             // confirm size/length
             int nlen = (name != null) ? name.length : 0;
             int elen = (extra != null) ? extra.length : 0;
+            int eoff = 0;
             int clen = (comment != null) ? comment.length : 0;
             if (csize >= ZIP64_MINVAL) {
                 csize0 = ZIP64_MINVAL;
@@ -1936,14 +1937,24 @@
                 locoff0 = ZIP64_MINVAL;
                 elen64 += 8;                 // offset(8)
             }
-            if (elen64 != 0)
+            if (elen64 != 0) {
                 elen64 += 4;                 // header and data sz 4 bytes
+            }
 
-            if (atime != -1) {
-                if (isWindows)               // use NTFS
+            while (eoff + 4 < elen) {
+                int tag = SH(extra, eoff);
+                int sz = SH(extra, eoff + 2);
+                if (tag == EXTID_EXTT || tag == EXTID_NTFS) {
+                    foundExtraTime = true;
+                }
+                eoff += (4 + sz);
+            }
+            if (!foundExtraTime) {
+                if (isWindows) {             // use NTFS
                     elenNTFS = 36;           // total 36 bytes
-                else                         // Extended Timestamp otherwise
+                } else {                     // Extended Timestamp otherwise
                     elenEXTT = 9;            // only mtime in cen
+                }
             }
             writeInt(os, CENSIG);            // CEN header signature
             if (elen64 != 0) {
@@ -2092,11 +2103,13 @@
         {
             writeInt(os, LOCSIG);               // LOC header signature
             int version = version();
-
             int nlen = (name != null) ? name.length : 0;
             int elen = (extra != null) ? extra.length : 0;
+            boolean foundExtraTime = false;     // if extra timestamp present
+            int eoff = 0;
             int elen64 = 0;
             int elenEXTT = 0;
+            int elenNTFS = 0;
             if ((flag & FLAG_DATADESCR) != 0) {
                 writeShort(os, version());      // version needed to extract
                 writeShort(os, flag);           // general purpose bit flag
@@ -2128,14 +2141,27 @@
                     writeInt(os, size);         // uncompressed size
                 }
             }
-            if (atime != -1 && !isWindows) {    // on unix use "ext time"
-                if (ctime == -1)
-                    elenEXTT = 13;
-                else
-                    elenEXTT = 17;
+            while (eoff + 4 < elen) {
+                int tag = SH(extra, eoff);
+                int sz = SH(extra, eoff + 2);
+                if (tag == EXTID_EXTT || tag == EXTID_NTFS) {
+                    foundExtraTime = true;
+                }
+                eoff += (4 + sz);
+            }
+            if (!foundExtraTime) {
+                if (isWindows) {
+                    elenNTFS = 36;              // NTFS, total 36 bytes
+                } else {                        // on unix use "ext time"
+                    elenEXTT = 9;
+                    if (atime != -1)
+                        elenEXTT += 4;
+                    if (ctime != -1)
+                        elenEXTT += 4;
+                }
             }
             writeShort(os, name.length);
-            writeShort(os, elen + elen64 + elenEXTT);
+            writeShort(os, elen + elen64 + elenNTFS + elenEXTT);
             writeBytes(os, name);
             if (elen64 != 0) {
                 writeShort(os, EXTID_ZIP64);
@@ -2143,15 +2169,28 @@
                 writeLong(os, size);
                 writeLong(os, csize);
             }
+            if (elenNTFS != 0) {
+                writeShort(os, EXTID_NTFS);
+                writeShort(os, elenNTFS - 4);
+                writeInt(os, 0);            // reserved
+                writeShort(os, 0x0001);     // NTFS attr tag
+                writeShort(os, 24);
+                writeLong(os, javaToWinTime(mtime));
+                writeLong(os, javaToWinTime(atime));
+                writeLong(os, javaToWinTime(ctime));
+            }
             if (elenEXTT != 0) {
                 writeShort(os, EXTID_EXTT);
                 writeShort(os, elenEXTT - 4);// size for the folowing data block
-                if (ctime == -1)
-                    os.write(0x3);           // mtime and atime
-                else
-                    os.write(0x7);           // mtime, atime and ctime
+                int fbyte = 0x1;
+                if (atime != -1)           // mtime and atime
+                    fbyte |= 0x2;
+                if (ctime != -1)           // mtime, atime and ctime
+                    fbyte |= 0x4;
+                os.write(fbyte);           // flags byte
                 writeInt(os, javaToUnixTime(mtime));
-                writeInt(os, javaToUnixTime(atime));
+                if (atime != -1)
+                    writeInt(os, javaToUnixTime(atime));
                 if (ctime != -1)
                     writeInt(os, javaToUnixTime(ctime));
             }
--- a/jdk/src/share/demo/nio/zipfs/src/com/sun/nio/zipfs/ZipInfo.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/demo/nio/zipfs/src/com/sun/nio/zipfs/ZipInfo.java	Wed Jul 05 18:59:05 2017 +0200
@@ -214,7 +214,7 @@
                       winToJavaTime(LL(extra, off + 24)));
                 break;
             case EXTID_EXTT:
-                print("         ->Inof-ZIP Extended Timestamp: flag=%x%n",extra[off]);
+                print("         ->Info-ZIP Extended Timestamp: flag=%x%n",extra[off]);
                 pos = off + 1 ;
                 while (pos + 4 <= off + sz) {
                     print("            *%tc%n",
@@ -223,6 +223,7 @@
                 }
                 break;
             default:
+                print("         ->[tag=%x, size=%d]%n", tag, sz);
             }
             off += sz;
         }
--- a/jdk/src/share/javavm/export/jmm.h	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/javavm/export/jmm.h	Wed Jul 05 18:59:05 2017 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2003, 2012, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -49,7 +49,8 @@
   JMM_VERSION_1_1 = 0x20010100, // JDK 6
   JMM_VERSION_1_2 = 0x20010200, // JDK 7
   JMM_VERSION_1_2_1 = 0x20010201, // JDK 7 GA
-  JMM_VERSION     = 0x20010202
+  JMM_VERSION_1_2_2 = 0x20010202,
+  JMM_VERSION     = 0x20010203
 };
 
 typedef struct {
@@ -62,7 +63,8 @@
   unsigned int isObjectMonitorUsageSupported : 1;
   unsigned int isSynchronizerUsageSupported : 1;
   unsigned int isThreadAllocatedMemorySupported : 1;
-  unsigned int : 23;
+  unsigned int isRemoteDiagnosticCommandsSupported : 1;
+  unsigned int : 22;
 } jmmOptionalSupport;
 
 typedef enum {
@@ -190,21 +192,27 @@
 } jmmGCStat;
 
 typedef struct {
-  const char* name;
-  const char* description;
-  const char* impact;
-  int         num_arguments;
-  jboolean    enabled;
+  const char* name;                /* Name of the diagnostic command */
+  const char* description;         /* Short description */
+  const char* impact;              /* Impact on the JVM */
+  const char* permission_class;    /* Class name of the required permission if any */
+  const char* permission_name;     /* Permission name of the required permission if any */
+  const char* permission_action;   /* Action name of the required permission if any*/
+  int         num_arguments;       /* Number of supported options or arguments */
+  jboolean    enabled;             /* True if the diagnostic command can be invoked, false otherwise*/
 } dcmdInfo;
 
 typedef struct {
-  const char* name;
-  const char* description;
-  const char* type;
-  const char* default_string;
-  jboolean    mandatory;
-  jboolean    option;
-  int         position;
+  const char* name;                /* Option/Argument name*/
+  const char* description;         /* Short description */
+  const char* type;                /* Type: STRING, BOOLEAN, etc. */
+  const char* default_string;      /* Default value in a parsable string */
+  jboolean    mandatory;           /* True if the option/argument is mandatory */
+  jboolean    option;              /* True if it is an option, false if it is an argument */
+                                   /* (see diagnosticFramework.hpp for option/argument definitions) */
+  jboolean    multiple;            /* True is the option can be specified several time */
+  int         position;            /* Expected position for this argument (this field is */
+                                   /* meaningless for options) */
 } dcmdArgInfo;
 
 typedef struct jmmInterface_1_ {
@@ -327,6 +335,9 @@
   jstring      (JNICALL *ExecuteDiagnosticCommand)
                                                  (JNIEnv *env,
                                                   jstring command);
+  void         (JNICALL *SetDiagnosticFrameworkNotificationEnabled)
+                                                 (JNIEnv *env,
+                                                  jboolean enabled);
 } JmmInterface;
 
 #ifdef __cplusplus
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/src/share/native/sun/management/DiagnosticCommandImpl.c	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,169 @@
+/*
+ * Copyright (c)  2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.  Oracle designates this
+ * particular file as subject to the "Classpath" exception as provided
+ * by Oracle in the LICENSE file that accompanied this code.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+#include <jni.h>
+#include "management.h"
+#include "sun_management_DiagnosticCommandImpl.h"
+
+JNIEXPORT void JNICALL Java_sun_management_DiagnosticCommandImpl_setNotificationEnabled
+(JNIEnv *env, jobject dummy, jboolean enabled) {
+    if(jmm_version > JMM_VERSION_1_2_2) {
+        jmm_interface->SetDiagnosticFrameworkNotificationEnabled(env, enabled);
+    } else {
+        JNU_ThrowByName(env, "java/lang/UnsupportedOperationException",
+                        "JMX interface to diagnostic framework notifications is not supported by this VM");
+    }
+}
+
+JNIEXPORT jobjectArray JNICALL
+Java_sun_management_DiagnosticCommandImpl_getDiagnosticCommands
+  (JNIEnv *env, jobject dummy)
+{
+  return jmm_interface->GetDiagnosticCommands(env);
+}
+
+jobject getDiagnosticCommandArgumentInfoArray(JNIEnv *env, jstring command,
+                                              int num_arg) {
+  int i;
+  jobject obj;
+  jobjectArray result;
+  dcmdArgInfo* dcmd_arg_info_array;
+  jclass dcmdArgInfoCls;
+  jclass arraysCls;
+  jmethodID mid;
+  jobject resultList;
+
+  dcmd_arg_info_array = (dcmdArgInfo*) malloc(num_arg * sizeof(dcmdArgInfo));
+  if (dcmd_arg_info_array == NULL) {
+    return NULL;
+  }
+  jmm_interface->GetDiagnosticCommandArgumentsInfo(env, command,
+                                                   dcmd_arg_info_array);
+  dcmdArgInfoCls = (*env)->FindClass(env,
+                                     "sun/management/DiagnosticCommandArgumentInfo");
+  result = (*env)->NewObjectArray(env, num_arg, dcmdArgInfoCls, NULL);
+  if (result == NULL) {
+    free(dcmd_arg_info_array);
+    return NULL;
+  }
+  for (i=0; i<num_arg; i++) {
+    obj = JNU_NewObjectByName(env,
+                              "sun/management/DiagnosticCommandArgumentInfo",
+                              "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ZZZI)V",
+                              (*env)->NewStringUTF(env,dcmd_arg_info_array[i].name),
+                              (*env)->NewStringUTF(env,dcmd_arg_info_array[i].description),
+                              (*env)->NewStringUTF(env,dcmd_arg_info_array[i].type),
+                              dcmd_arg_info_array[i].default_string == NULL ? NULL:
+                              (*env)->NewStringUTF(env, dcmd_arg_info_array[i].default_string),
+                              dcmd_arg_info_array[i].mandatory,
+                              dcmd_arg_info_array[i].option,
+                              dcmd_arg_info_array[i].multiple,
+                              dcmd_arg_info_array[i].position);
+    if (obj == NULL) {
+      free(dcmd_arg_info_array);
+      return NULL;
+    }
+    (*env)->SetObjectArrayElement(env, result, i, obj);
+  }
+  free(dcmd_arg_info_array);
+  arraysCls = (*env)->FindClass(env, "java/util/Arrays");
+  mid = (*env)->GetStaticMethodID(env, arraysCls,
+                                  "asList", "([Ljava/lang/Object;)Ljava/util/List;");
+  resultList = (*env)->CallStaticObjectMethod(env, arraysCls, mid, result);
+  return resultList;
+}
+
+/* Throws IllegalArgumentException if at least one of the diagnostic command
+ * passed in argument is not supported by the JVM
+ */
+JNIEXPORT jobjectArray JNICALL
+Java_sun_management_DiagnosticCommandImpl_getDiagnosticCommandInfo
+(JNIEnv *env, jobject dummy, jobjectArray commands)
+{
+  int i;
+  jclass dcmdInfoCls;
+  jobject result;
+  jobjectArray args;
+  jobject obj;
+  jmmOptionalSupport mos;
+  jint ret = jmm_interface->GetOptionalSupport(env, &mos);
+  jsize num_commands;
+  dcmdInfo* dcmd_info_array;
+
+  if (commands == NULL) {
+      JNU_ThrowNullPointerException(env, "Invalid String Array");
+      return NULL;
+  }
+  num_commands = (*env)->GetArrayLength(env, commands);
+  dcmd_info_array = (dcmdInfo*) malloc(num_commands *
+                                       sizeof(dcmdInfo));
+  if (dcmd_info_array == NULL) {
+      JNU_ThrowOutOfMemoryError(env, NULL);
+  }
+  jmm_interface->GetDiagnosticCommandInfo(env, commands, dcmd_info_array);
+  dcmdInfoCls = (*env)->FindClass(env,
+                                  "sun/management/DiagnosticCommandInfo");
+  result = (*env)->NewObjectArray(env, num_commands, dcmdInfoCls, NULL);
+  if (result == NULL) {
+      free(dcmd_info_array);
+      JNU_ThrowOutOfMemoryError(env, 0);
+  }
+  for (i=0; i<num_commands; i++) {
+      args = getDiagnosticCommandArgumentInfoArray(env,
+                                                   (*env)->GetObjectArrayElement(env,commands,i),
+                                                   dcmd_info_array[i].num_arguments);
+      if (args == NULL) {
+          free(dcmd_info_array);
+          JNU_ThrowOutOfMemoryError(env, 0);
+      }
+      obj = JNU_NewObjectByName(env,
+                                "sun/management/DiagnosticCommandInfo",
+                                "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;ZLjava/util/List;)V",
+                                (*env)->NewStringUTF(env,dcmd_info_array[i].name),
+                                (*env)->NewStringUTF(env,dcmd_info_array[i].description),
+                                (*env)->NewStringUTF(env,dcmd_info_array[i].impact),
+                                dcmd_info_array[i].permission_class==NULL?NULL:(*env)->NewStringUTF(env,dcmd_info_array[i].permission_class),
+                                dcmd_info_array[i].permission_name==NULL?NULL:(*env)->NewStringUTF(env,dcmd_info_array[i].permission_name),
+                                dcmd_info_array[i].permission_action==NULL?NULL:(*env)->NewStringUTF(env,dcmd_info_array[i].permission_action),
+                                dcmd_info_array[i].enabled,
+                                args);
+      if (obj == NULL) {
+          free(dcmd_info_array);
+          JNU_ThrowOutOfMemoryError(env, 0);
+      }
+      (*env)->SetObjectArrayElement(env, result, i, obj);
+  }
+  free(dcmd_info_array);
+  return result;
+}
+
+/* Throws IllegalArgumentException if the diagnostic command
+ * passed in argument is not supported by the JVM
+ */
+JNIEXPORT jstring JNICALL
+Java_sun_management_DiagnosticCommandImpl_executeDiagnosticCommand
+(JNIEnv *env, jobject dummy, jstring command) {
+  return jmm_interface->ExecuteDiagnosticCommand(env, command);
+}
--- a/jdk/src/share/native/sun/management/VMManagementImpl.c	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/share/native/sun/management/VMManagementImpl.c	Wed Jul 05 18:59:05 2017 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2003, 2011, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -24,6 +24,7 @@
  */
 
 #include <jni.h>
+#include <stdlib.h>
 #include "jvm.h"
 #include "management.h"
 #include "sun_management_VMManagementImpl.h"
@@ -96,6 +97,9 @@
     value = mos.isThreadAllocatedMemorySupported;
     setStaticBooleanField(env, cls, "threadAllocatedMemorySupport", value);
 
+    value = mos.isRemoteDiagnosticCommandsSupported;
+    setStaticBooleanField(env, cls, "remoteDiagnosticCommandsSupport", value);
+
     if ((jmm_version > JMM_VERSION_1_2) ||
         (jmm_version == JMM_VERSION_1_2 && ((jmm_version&0xFF) >= 1))) {
         setStaticBooleanField(env, cls, "gcNotificationSupport", JNI_TRUE);
--- a/jdk/src/solaris/bin/java_md_solinux.c	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/solaris/bin/java_md_solinux.c	Wed Jul 05 18:59:05 2017 +0200
@@ -649,9 +649,9 @@
                         && (dmpath == NULL) /* data model specific variables not set  */
 #endif /* __solaris__ */
                         ) {
-
+                    JLI_MemFree(newargv);
+                    JLI_MemFree(new_runpath);
                     return;
-
                 }
             }
 
@@ -935,7 +935,7 @@
         char buf[PATH_MAX+1];
         int len = readlink(self, buf, PATH_MAX);
         if (len >= 0) {
-            buf[len] = '\0';            /* readlink doesn't nul terminate */
+            buf[len] = '\0';            /* readlink(2) doesn't NUL terminate */
             exec_path = JLI_StringDup(buf);
         }
     }
--- a/jdk/src/solaris/classes/sun/awt/X11/XClipboard.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/solaris/classes/sun/awt/X11/XClipboard.java	Wed Jul 05 18:59:05 2017 +0200
@@ -179,6 +179,7 @@
             }
             synchronized (XClipboard.classLock) {
                 if (targetsAtom2Clipboard != null && !targetsAtom2Clipboard.isEmpty()) {
+                    // The viewer is still registered, schedule next poll.
                     XToolkit.schedule(this, XClipboard.getPollInterval());
                 }
             }
@@ -191,7 +192,8 @@
                 final XSelectionEvent xse = ev.get_xselection();
                 XClipboard clipboard = null;
                 synchronized (XClipboard.classLock) {
-                    if (targetsAtom2Clipboard != null && !targetsAtom2Clipboard.isEmpty()) {
+                    if (targetsAtom2Clipboard != null && targetsAtom2Clipboard.isEmpty()) {
+                        // The viewer was unregistered, remove the dispatcher.
                         XToolkit.removeEventDispatcher(XWindow.getXAWTRootWindow().getWindow(), this);
                         return;
                     }
--- a/jdk/src/solaris/classes/sun/awt/X11/XTextAreaPeer.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/solaris/classes/sun/awt/X11/XTextAreaPeer.java	Wed Jul 05 18:59:05 2017 +0200
@@ -1143,6 +1143,19 @@
             addNotify();
         }
 
+        @Override
+        public void invalidate() {
+            synchronized (getTreeLock()) {
+                final Container parent = getParent();
+                AWTAccessor.getComponentAccessor().setParent(this, null);
+                try {
+                    super.invalidate();
+                } finally {
+                    AWTAccessor.getComponentAccessor().setParent(this, parent);
+                }
+            }
+        }
+
         public void focusGained(FocusEvent e) {
             Graphics g = getGraphics();
             Rectangle r = getViewportBorderBounds();
--- a/jdk/src/solaris/classes/sun/awt/fontconfigs/linux.fontconfig.Fedora.properties	Mon Jun 10 17:04:18 2013 -0700
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,377 +0,0 @@
-#
-# 
-# Copyright (c) 2007, 2010, Oracle and/or its affiliates. All rights reserved.
-# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
-#
-# This code is free software; you can redistribute it and/or modify it
-# under the terms of the GNU General Public License version 2 only, as
-# published by the Free Software Foundation.  Oracle designates this
-# particular file as subject to the "Classpath" exception as provided
-# by Oracle in the LICENSE file that accompanied this code.
-#
-# This code is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
-# FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
-# version 2 for more details (a copy is included in the LICENSE file that
-# accompanied this code).
-#
-# You should have received a copy of the GNU General Public License version
-# 2 along with this work; if not, write to the Free Software Foundation,
-# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
-# or visit www.oracle.com if you need additional information or have any
-# questions.
-#
-
-# Version
-
-# Uses Fedora 9 fonts and file paths.
-version=1
-
-# Component Font Mappings
-
-dialog.plain.latin-1=DejaVu Sans
-dialog.plain.japanese-x0208=Sazanami Gothic
-dialog.plain.korean=Baekmuk Gulim
-dialog.plain.chinese-big5=AR PL ShanHeiSun Uni
-dialog.plain.chinese-gb18030=AR PL ShanHeiSun Uni
-dialog.plain.bengali=Lohit Bengali
-dialog.plain.gujarati=Lohit Gujarati
-dialog.plain.hindi=Lohit Hindi
-dialog.plain.malayalam=Lohit Malayalam
-dialog.plain.oriya=Lohit Oriya
-dialog.plain.punjabi=Lohit Punjabi
-dialog.plain.tamil=Lohit Tamil
-dialog.plain.telugu=Lohit Telugu
-dialog.plain.sinhala=LKLUG
-
-dialog.bold.latin-1=DejaVu Sans Bold
-dialog.bold.japanese-x0208=Sazanami Gothic
-dialog.bold.korean=Baekmuk Gulim
-dialog.bold.chinese-big5=AR PL ShanHeiSun Uni
-dialog.bold.chinese-gb18030=AR PL ShanHeiSun Uni
-dialog.bold.bengali=Lohit Bengali
-dialog.bold.gujarati=Lohit Gujarati
-dialog.bold.hindi=Lohit Hindi
-dialog.bold.malayalam=Lohit Malayalam
-dialog.bold.oriya=Lohit Oriya
-dialog.bold.punjabi=Lohit Punjabi
-dialog.bold.tamil=Lohit Tamil
-dialog.bold.telugu=Lohit Telugu
-dialog.bold.sinhala=LKLUG
-
-dialog.italic.latin-1=DejaVu Sans Oblique
-dialog.italic.japanese-x0208=Sazanami Gothic
-dialog.italic.korean=Baekmuk Gulim
-dialog.italic.chinese-big5=AR PL ShanHeiSun Uni
-dialog.italic.chinese-gb18030=AR PL ShanHeiSun Uni
-dialog.italic.bengali=Lohit Bengali
-dialog.italic.gujarati=Lohit Gujarati
-dialog.italic.hindi=Lohit Hindi
-dialog.italic.malayalam=Lohit Malayalam
-dialog.italic.oriya=Lohit Oriya
-dialog.italic.punjabi=Lohit Punjabi
-dialog.italic.tamil=Lohit Tamil
-dialog.italic.telugu=Lohit Telugu
-dialog.italic.sinhala=LKLUG
-
-dialog.bolditalic.latin-1=DejaVu Sans Bold Oblique
-dialog.bolditalic.japanese-x0208=Sazanami Gothic
-dialog.bolditalic.korean=Baekmuk Gulim
-dialog.bolditalic.chinese-big5=AR PL ShanHeiSun Uni
-dialog.bolditalic.chinese-gb18030=AR PL ShanHeiSun Uni
-dialog.bolditalic.bengali=Lohit Bengali
-dialog.bolditalic.gujarati=Lohit Gujarati
-dialog.bolditalic.hindi=Lohit Hindi
-dialog.bolditalic.malayalam=Lohit Malayalam
-dialog.bolditalic.oriya=Lohit Oriya
-dialog.bolditalic.punjabi=Lohit Punjabi
-dialog.bolditalic.tamil=Lohit Tamil
-dialog.bolditalic.telugu=Lohit Telugu
-dialog.bolditalic.sinhala=LKLUG
-
-sansserif.plain.latin-1=DejaVu Sans
-sansserif.plain.japanese-x0208=Sazanami Gothic
-sansserif.plain.korean=Baekmuk Gulim
-sansserif.plain.chinese-big5=AR PL ShanHeiSun Uni
-sansserif.plain.chinese-gb18030=AR PL ShanHeiSun Uni
-sansserif.plain.bengali=Lohit Bengali
-sansserif.plain.gujarati=Lohit Gujarati
-sansserif.plain.hindi=Lohit Hindi
-sansserif.plain.malayalam=Lohit Malayalam
-sansserif.plain.oriya=Lohit Oriya
-sansserif.plain.punjabi=Lohit Punjabi
-sansserif.plain.tamil=Lohit Tamil
-sansserif.plain.telugu=Lohit Telugu
-sansserif.plain.sinhala=LKLUG
-
-sansserif.bold.latin-1=DejaVu Sans Bold
-sansserif.bold.japanese-x0208=Sazanami Gothic
-sansserif.bold.korean=Baekmuk Gulim
-sansserif.bold.chinese-big5=AR PL ShanHeiSun Uni
-sansserif.bold.chinese-gb18030=AR PL ShanHeiSun Uni
-sansserif.bold.bengali=Lohit Bengali
-sansserif.bold.gujarati=Lohit Gujarati
-sansserif.bold.hindi=Lohit Hindi
-sansserif.bold.malayalam=Lohit Malayalam
-sansserif.bold.oriya=Lohit Oriya
-sansserif.bold.punjabi=Lohit Punjabi
-sansserif.bold.tamil=Lohit Tamil
-sansserif.bold.telugu=Lohit Telugu
-sansserif.bold.sinhala=LKLUG
-
-sansserif.italic.latin-1=DejaVu Sans Oblique
-sansserif.italic.japanese-x0208=Sazanami Gothic
-sansserif.italic.korean=Baekmuk Gulim
-sansserif.italic.chinese-big5=AR PL ShanHeiSun Uni
-sansserif.italic.chinese-gb18030=AR PL ShanHeiSun Uni
-sansserif.italic.bengali=Lohit Bengali
-sansserif.italic.gujarati=Lohit Gujarati
-sansserif.italic.hindi=Lohit Hindi
-sansserif.italic.malayalam=Lohit Malayalam
-sansserif.italic.oriya=Lohit Oriya
-sansserif.italic.punjabi=Lohit Punjabi
-sansserif.italic.tamil=Lohit Tamil
-sansserif.italic.telugu=Lohit Telugu
-sansserif.italic.sinhala=LKLUG
-
-sansserif.bolditalic.latin-1=DejaVu Sans Bold Oblique
-sansserif.bolditalic.japanese-x0208=Sazanami Gothic
-sansserif.bolditalic.korean=Baekmuk Gulim
-sansserif.bolditalic.chinese-big5=AR PL ShanHeiSun Uni
-sansserif.bolditalic.chinese-gb18030=AR PL ShanHeiSun Uni
-sansserif.bolditalic.bengali=Lohit Bengali
-sansserif.bolditalic.gujarati=Lohit Gujarati
-sansserif.bolditalic.hindi=Lohit Hindi
-sansserif.bolditalic.malayalam=Lohit Malayalam
-sansserif.bolditalic.oriya=Lohit Oriya
-sansserif.bolditalic.punjabi=Lohit Punjabi
-sansserif.bolditalic.tamil=Lohit Tamil
-sansserif.bolditalic.telugu=Lohit Telugu
-sansserif.bolditalic.sinhala=LKLUG
-
-serif.plain.latin-1=DejaVu Serif
-serif.plain.japanese-x0208=Sazanami Mincho
-serif.plain.korean=Baekmuk Batang
-serif.plain.chinese-big5=AR PL ZenKai Uni
-serif.plain.chinese-gb18030=AR PL ZenKai Uni
-serif.plain.bengali=Lohit Bengali
-serif.plain.gujarati=Lohit Gujarati
-serif.plain.hindi=Lohit Hindi
-serif.plain.malayalam=Lohit Malayalam
-serif.plain.oriya=Lohit Oriya
-serif.plain.punjabi=Lohit Punjabi
-serif.plain.tamil=Lohit Tamil
-serif.plain.telugu=Lohit Telugu
-serif.plain.sinhala=LKLUG
-
-serif.bold.latin-1=DejaVu Serif Bold
-serif.bold.japanese-x0208=Sazanami Mincho
-serif.bold.korean=Baekmuk Batang
-serif.bold.chinese-big5=AR PL ZenKai Uni
-serif.bold.chinese-gb18030=AR PL ZenKai Uni
-serif.bold.bengali=Lohit Bengali
-serif.bold.gujarati=Lohit Gujarati
-serif.bold.hindi=Lohit Hindi
-serif.bold.malayalam=Lohit Malayalam
-serif.bold.oriya=Lohit Oriya
-serif.bold.punjabi=Lohit Punjabi
-serif.bold.tamil=Lohit Tamil
-serif.bold.telugu=Lohit Telugu
-serif.bold.sinhala=LKLUG
-
-serif.italic.latin-1=DejaVu Serif Oblique
-serif.italic.japanese-x0208=Sazanami Mincho
-serif.italic.korean=Baekmuk Batang
-serif.italic.chinese-big5=AR PL ZenKai Uni
-serif.italic.chinese-gb18030=AR PL ZenKai Uni
-serif.italic.bengali=Lohit Bengali
-serif.italic.gujarati=Lohit Gujarati
-serif.italic.hindi=Lohit Hindi
-serif.italic.malayalam=Lohit Malayalam
-serif.italic.oriya=Lohit Oriya
-serif.italic.punjabi=Lohit Punjabi
-serif.italic.tamil=Lohit Tamil
-serif.italic.telugu=Lohit Telugu
-serif.italic.sinhala=LKLUG
-
-serif.bolditalic.latin-1=DejaVu Serif Bold Oblique
-serif.bolditalic.japanese-x0208=Sazanami Mincho
-serif.bolditalic.korean=Baekmuk Batang
-serif.bolditalic.chinese-big5=AR PL ZenKai Uni
-serif.bolditalic.chinese-gb18030=AR PL ZenKai Uni
-serif.bolditalic.bengali=Lohit Bengali
-serif.bolditalic.gujarati=Lohit Gujarati
-serif.bolditalic.hindi=Lohit Hindi
-serif.bolditalic.malayalam=Lohit Malayalam
-serif.bolditalic.oriya=Lohit Oriya
-serif.bolditalic.punjabi=Lohit Punjabi
-serif.bolditalic.tamil=Lohit Tamil
-serif.bolditalic.telugu=Lohit Telugu
-serif.bolditalic.sinhala=LKLUG
-
-monospaced.plain.latin-1=DejaVu Sans Mono
-monospaced.plain.japanese-x0208=Sazanami Gothic
-monospaced.plain.korean=Baekmuk Gulim
-monospaced.plain.chinese-big5=AR PL ShanHeiSun Uni
-monospaced.plain.chinese-gb18030=AR PL ShanHeiSun Uni
-monospaced.plain.bengali=Lohit Bengali
-monospaced.plain.gujarati=Lohit Gujarati
-monospaced.plain.hindi=Lohit Hindi
-monospaced.plain.malayalam=Lohit Malayalam
-monospaced.plain.oriya=Lohit Oriya
-monospaced.plain.punjabi=Lohit Punjabi
-monospaced.plain.tamil=Lohit Tamil
-monospaced.plain.telugu=Lohit Telugu
-monospaced.plain.sinhala=LKLUG
-
-monospaced.bold.latin-1=DejaVu Sans Mono Bold
-monospaced.bold.japanese-x0208=Sazanami Gothic
-monospaced.bold.korean=Baekmuk Gulim
-monospaced.bold.chinese-big5=AR PL ShanHeiSun Uni
-monospaced.bold.chinese-gb18030=AR PL ShanHeiSun Uni
-monospaced.bold.bengali=Lohit Bengali
-monospaced.bold.gujarati=Lohit Gujarati
-monospaced.bold.hindi=Lohit Hindi
-monospaced.bold.malayalam=Lohit Malayalam
-monospaced.bold.oriya=Lohit Oriya
-monospaced.bold.punjabi=Lohit Punjabi
-monospaced.bold.tamil=Lohit Tamil
-monospaced.bold.telugu=Lohit Telugu
-monospaced.bold.sinhala=LKLUG
-
-monospaced.italic.latin-1=DejaVu Sans Mono Oblique
-monospaced.italic.japanese-x0208=Sazanami Gothic
-monospaced.italic.korean=Baekmuk Gulim
-monospaced.italic.chinese-big5=AR PL ShanHeiSun Uni
-monospaced.italic.chinese-gb18030=AR PL ShanHeiSun Uni
-monospaced.italic.bengali=Lohit Bengali
-monospaced.italic.gujarati=Lohit Gujarati
-monospaced.italic.hindi=Lohit Hindi
-monospaced.italic.malayalam=Lohit Malayalam
-monospaced.italic.oriya=Lohit Oriya
-monospaced.italic.punjabi=Lohit Punjabi
-monospaced.italic.tamil=Lohit Tamil
-monospaced.italic.telugu=Lohit Telugu
-monospaced.italic.sinhala=LKLUG
-
-monospaced.bolditalic.latin-1=DejaVu Sans Mono Bold Oblique
-monospaced.bolditalic.japanese-x0208=Sazanami Gothic
-monospaced.bolditalic.korean=Baekmuk Gulim
-monospaced.bolditalic.chinese-big5=AR PL ShanHeiSun Uni
-monospaced.bolditalic.chinese-gb18030=AR PL ShanHeiSun Uni
-monospaced.bolditalic.bengali=Lohit Bengali
-monospaced.bolditalic.gujarati=Lohit Gujarati
-monospaced.bolditalic.hindi=Lohit Hindi
-monospaced.bolditalic.malayalam=Lohit Malayalam
-monospaced.bolditalic.oriya=Lohit Oriya
-monospaced.bolditalic.punjabi=Lohit Punjabi
-monospaced.bolditalic.tamil=Lohit Tamil
-monospaced.bolditalic.telugu=Lohit Telugu
-monospaced.bolditalic.sinhala=LKLUG
-
-dialoginput.plain.latin-1=DejaVu Sans Mono
-dialoginput.plain.japanese-x0208=Sazanami Gothic
-dialoginput.plain.korean=Baekmuk Gulim
-dialoginput.plain.chinese-big5=AR PL ShanHeiSun Uni
-dialoginput.plain.chinese-gb18030=AR PL ShanHeiSun Uni
-dialoginput.plain.bengali=Lohit Bengali
-dialoginput.plain.gujarati=Lohit Gujarati
-dialoginput.plain.hindi=Lohit Hindi
-dialoginput.plain.malayalam=Lohit Malayalam
-dialoginput.plain.oriya=Lohit Oriya
-dialoginput.plain.punjabi=Lohit Punjabi
-dialoginput.plain.tamil=Lohit Tamil
-dialoginput.plain.telugu=Lohit Telugu
-dialoginput.plain.sinhala=LKLUG
-
-dialoginput.bold.latin-1=DejaVu Sans Mono Bold
-dialoginput.bold.japanese-x0208=Sazanami Gothic
-dialoginput.bold.korean=Baekmuk Gulim
-dialoginput.bold.chinese-big5=AR PL ShanHeiSun Uni
-dialoginput.bold.chinese-gb18030=AR PL ShanHeiSun Uni
-dialoginput.bold.bengali=Lohit Bengali
-dialoginput.bold.gujarati=Lohit Gujarati
-dialoginput.bold.hindi=Lohit Hindi
-dialoginput.bold.malayalam=Lohit Malayalam
-dialoginput.bold.oriya=Lohit Oriya
-dialoginput.bold.punjabi=Lohit Punjabi
-dialoginput.bold.tamil=Lohit Tamil
-dialoginput.bold.telugu=Lohit Telugu
-dialoginput.bold.sinhala=LKLUG
-
-dialoginput.italic.latin-1=DejaVu Sans Mono Oblique
-dialoginput.italic.japanese-x0208=Sazanami Gothic
-dialoginput.italic.korean=Baekmuk Gulim
-dialoginput.italic.chinese-big5=AR PL ShanHeiSun Uni
-dialoginput.italic.chinese-gb18030=AR PL ShanHeiSun Uni
-dialoginput.italic.bengali=Lohit Bengali
-dialoginput.italic.gujarati=Lohit Gujarati
-dialoginput.italic.hindi=Lohit Hindi
-dialoginput.italic.malayalam=Lohit Malayalam
-dialoginput.italic.oriya=Lohit Oriya
-dialoginput.italic.punjabi=Lohit Punjabi
-dialoginput.italic.tamil=Lohit Tamil
-dialoginput.italic.telugu=Lohit Telugu
-dialoginput.italic.sinhala=LKLUG
-
-dialoginput.bolditalic.latin-1=DejaVu Sans Mono Bold Oblique
-dialoginput.bolditalic.japanese-x0208=Sazanami Gothic
-dialoginput.bolditalic.korean=Baekmuk Gulim
-dialoginput.bolditalic.chinese-big5=AR PL ShanHeiSun Uni
-dialoginput.bolditalic.chinese-gb18030=AR PL ShanHeiSun Uni
-dialoginput.bolditalic.bengali=Lohit Bengali
-dialoginput.bolditalic.gujarati=Lohit Gujarati
-dialoginput.bolditalic.hindi=Lohit Hindi
-dialoginput.bolditalic.malayalam=Lohit Malayalam
-dialoginput.bolditalic.oriya=Lohit Oriya
-dialoginput.bolditalic.punjabi=Lohit Punjabi
-dialoginput.bolditalic.tamil=Lohit Tamil
-dialoginput.bolditalic.telugu=Lohit Telugu
-dialoginput.bolditalic.sinhala=LKLUG
-
-# Search Sequences
-
-sequence.allfonts=latin-1
-sequence.allfonts.Big5=chinese-big5,latin-1
-sequence.allfonts.x-euc-jp-linux=japanese-x0208,latin-1
-sequence.allfonts.EUC-KR=korean,latin-1
-sequence.allfonts.GB18030=chinese-gb18030,latin-1
-sequence.fallback=chinese-big5,chinese-gb18030,japanese-x0208,korean,bengali,gujarati,hindi,oriya,punjabi,malayalam,tamil,telugu,sinhala
-
-# Font File Names
-
-filename.DejaVu_Sans=/usr/share/fonts/dejavu/DejaVuSans.ttf
-filename.DejaVu_Sans_Bold=/usr/share/fonts/dejavu/DejaVuSans-Bold.ttf
-filename.DejaVu_Sans_Oblique=/usr/share/fonts/dejavu/DejaVuSans-Oblique.ttf
-filename.DejaVu_Sans_Bold_Oblique=/usr/share/fonts/dejavu/DejaVuSans-BoldOblique.ttf
-
-filename.DejaVu_Sans_Mono=/usr/share/fonts/dejavu/DejaVuSansMono.ttf
-filename.DejaVu_Sans_Mono_Bold=/usr/share/fonts/dejavu/DejaVuSansMono-Bold.ttf
-filename.DejaVu_Sans_Mono_Oblique=/usr/share/fonts/dejavu/DejaVuSansMono-Oblique.ttf
-filename.DejaVu_Sans_Mono_Bold_Oblique=/usr/share/fonts/dejavu/DejaVuSansMono-BoldOblique.ttf
-
-filename.DejaVu_Serif=/usr/share/fonts/dejavu/DejaVuSerif.ttf
-filename.DejaVu_Serif_Bold=/usr/share/fonts/dejavu/DejaVuSerif-Bold.ttf
-filename.DejaVu_Serif_Oblique=/usr/share/fonts/dejavu/DejaVuSerif-Oblique.ttf
-filename.DejaVu_Serif_Bold_Oblique=/usr/share/fonts/dejavu/DejaVuSerif-BoldOblique.ttf
-
-filename.Sazanami_Gothic=/usr/share/fonts/sazanami-fonts-gothic/sazanami-gothic.ttf
-filename.Sazanami_Mincho=/usr/share/fonts/sazanami-fonts-mincho/sazanami-mincho.ttf
-filename.AR_PL_ShanHeiSun_Uni=/usr/share/fonts/cjkunifonts-uming/uming.ttc
-filename.AR_PL_ZenKai_Uni=/usr/share/fonts/cjkunifonts-ukai/ukai.ttc
-filename.Baekmuk_Gulim=/usr/share/fonts/baekmuk-ttf-gulim/gulim.ttf
-filename.Baekmuk_Batang=/usr/share/fonts/baekmuk-ttf-batang/batang.ttf
-
-filename.Lohit_Bengali=/usr/share/fonts/lohit-bengali/lohit_bn.ttf
-filename.Lohit_Gujarati=/usr/share/fonts/lohit-gujarati/lohit_gu.ttf
-filename.Lohit_Hindi=/usr/share/fonts/lohit-hindi/lohit_hi.ttf
-filename.Lohit_Kannda=/usr/share/fonts/lohit-kannada/lohit_kn.ttf
-filename.Lohit_Malayalam=/usr/share/fonts/lohit-malayalam/lohit_ml.ttf
-filename.Lohit_Oriya=/usr/share/fonts/lohit-oriya/lohit_or.ttf
-filename.Lohit_Punjabi=/usr/share/fonts/lohit-punjabi/lohit_pa.ttf
-filename.Lohit_Tamil=/usr/share/fonts/lohit-tamil/lohit_ta.ttf
-filename.Lohit_Telugu=/usr/share/fonts/lohit-telugu/lohit_te.ttf
-filename.LKLUG=/usr/share/fonts/lklug/lklug.ttf
-
--- a/jdk/src/solaris/classes/sun/awt/fontconfigs/linux.fontconfig.SuSE.properties	Mon Jun 10 17:04:18 2013 -0700
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,154 +0,0 @@
-#
-# 
-# Copyright (c) 2007, Oracle and/or its affiliates. All rights reserved.
-# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
-#
-# This code is free software; you can redistribute it and/or modify it
-# under the terms of the GNU General Public License version 2 only, as
-# published by the Free Software Foundation.  Oracle designates this
-# particular file as subject to the "Classpath" exception as provided
-# by Oracle in the LICENSE file that accompanied this code.
-#
-# This code is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
-# FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
-# version 2 for more details (a copy is included in the LICENSE file that
-# accompanied this code).
-#
-# You should have received a copy of the GNU General Public License version
-# 2 along with this work; if not, write to the Free Software Foundation,
-# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
-# or visit www.oracle.com if you need additional information or have any
-# questions.
-#
-
-# Version
-
-# Uses SuSE 10.2 fonts and file paths.
-version=1
-
-# Component Font Mappings
-
-dialog.plain.latin-1=Albany AMT
-dialog.plain.japanese-x0208=Sazanami Gothic
-dialog.plain.korean=UnDotum
-
-dialog.bold.latin-1=Albany AMT Bold
-dialog.bold.japanese-x0208=Sazanami Gothic
-dialog.bold.korean=UnDotum Bold
-
-dialog.italic.latin-1=Albany AMT Italic
-dialog.italic.japanese-x0208=Sazanami Gothic
-dialog.italic.korean=UnDotum
-
-dialog.bolditalic.latin-1=Albany AMT Bold Italic
-dialog.bolditalic.japanese-x0208=Sazanami Gothic
-dialog.bolditalic.korean=UnDotum Bold
-
-
-sansserif.plain.latin-1=Albany AMT
-sansserif.plain.japanese-x0208=Sazanami Gothic
-sansserif.plain.korean=UnDotum
-
-sansserif.bold.latin-1=Albany AMT Bold
-sansserif.bold.japanese-x0208=Sazanami Gothic
-sansserif.bold.korean=UnDotum Bold
-
-sansserif.italic.latin-1=Albany AMT Italic
-sansserif.italic.japanese-x0208=Sazanami Gothic
-sansserif.italic.korean=UnDotum
-
-sansserif.bolditalic.latin-1=Albany AMT Bold Italic
-sansserif.bolditalic.japanese-x0208=Sazanami Gothic
-sansserif.bolditalic.korean=UnDotum Bold
-
-
-serif.plain.latin-1=Thorndale AMT
-serif.plain.japanese-x0208=Sazanami Mincho
-serif.plain.korean=UnBatang
-
-serif.bold.latin-1=Thorndale AMT Bold
-serif.bold.japanese-x0208=Sazanami Mincho
-serif.bold.korean=UnBatang Bold
-
-serif.italic.latin-1=Thorndale AMT Italic
-serif.italic.japanese-x0208=Sazanami Mincho
-serif.italic.korean=UnBatang
-
-serif.bolditalic.latin-1=Thorndale AMT Bold Italic
-serif.bolditalic.japanese-x0208=Sazanami Mincho
-serif.bolditalic.korean=UnBatang Bold
-
-
-monospaced.plain.latin-1=Cumberland AMT
-monospaced.plain.japanese-x0208=Sazanami Gothic
-monospaced.plain.korean=UnDotum
-
-monospaced.bold.latin-1=Cumberland AMT Bold
-monospaced.bold.japanese-x0208=Sazanami Gothic
-monospaced.bold.korean=UnDotum Bold
-
-monospaced.italic.latin-1=Cumberland AMT Italic
-monospaced.italic.japanese-x0208=Sazanami Gothic
-monospaced.italic.korean=UnDotum
-
-monospaced.bolditalic.latin-1=Cumberland AMT Bold Italic
-monospaced.bolditalic.japanese-x0208=Sazanami Gothic
-monospaced.bolditalic.korean=UnDotum Bold
-
-
-dialoginput.plain.latin-1=Cumberland AMT
-dialoginput.plain.japanese-x0208=Sazanami Gothic
-dialoginput.plain.korean=UnDotum
-
-dialoginput.bold.latin-1=Cumberland AMT Bold
-dialoginput.bold.japanese-x0208=Sazanami Gothic
-dialoginput.bold.korean=UnDotum Bold
-
-dialoginput.italic.latin-1=Cumberland AMT Italic
-dialoginput.italic.japanese-x0208=Sazanami Gothic
-dialoginput.italic.korean=UnDotum
-
-dialoginput.bolditalic.latin-1=Cumberland AMT Bold Italic
-dialoginput.bolditalic.japanese-x0208=Sazanami Gothic
-dialoginput.bolditalic.korean=UnDotum Bold
-
-allfonts.chinese-big5=AR PL Mingti2L Big5
-allfonts.chinese-gb18030=AR PL SungtiL GB
-
-# Search Sequences
-
-sequence.allfonts=latin-1
-sequence.allfonts.Big5=chinese-big5,latin-1
-sequence.allfonts.x-euc-jp-linux=japanese-x0208,latin-1
-sequence.allfonts.EUC-KR=korean,latin-1
-sequence.allfonts.GB18030=chinese-gb18030,latin-1
-sequence.fallback=chinese-big5,chinese-gb18030,japanese-x0208,korean
-
-# Font File Names
-
-filename.Albany_AMT=/usr/share/fonts/truetype/albw.ttf
-filename.Albany_AMT_Bold=/usr/share/fonts/truetype/albwb.ttf
-filename.Albany_AMT_Italic=/usr/share/fonts/truetype/albwb.ttf
-filename.Albany_AMT_Bold_Italic=/usr/share/fonts/truetype/albwbi.ttf
-
-filename.Thorndale_AMT=/usr/share/fonts/truetype/thowr___.ttf
-filename.Thorndale_AMT_Bold=/usr/share/fonts/truetype/thowb___.ttf
-filename.Thorndale_AMT_Italic=/usr/share/fonts/truetype/thowi___.ttf
-filename.Thorndale_AMT_Bold_Italic=/usr/share/fonts/truetype/thowbi__.ttf
-
-filename.Cumberland_AMT=/usr/share/fonts/truetype/cumbwr__.ttf
-filename.Cumberland_AMT_Bold=/usr/share/fonts/truetype/cumbwb__.ttf
-filename.Cumberland_AMT_Italic=/usr/share/fonts/truetype/cumbwi__.ttf
-filename.Cumberland_AMT_Bold_Italic=/usr/share/fonts/truetype/cumbwbi_.ttf
-
-filename.Sazanami_Gothic=/usr/share/fonts/truetype/sazanami-gothic.ttf
-filename.Sazanami_Mincho=/usr/share/fonts/truetype/sazanami-mincho.ttf
-filename.AR_PL_SungtiL_GB=/usr/share/fonts/truetype/gbsn00lp.ttf
-filename.AR_PL_Mingti2L_Big5=/usr/share/fonts/truetype/bsmi00lp.ttf
-filename.UnDotum=/usr/share/fonts/truetype/UnDotum.ttf
-filename.UnDotum_Bold=/usr/share/fonts/truetype/UnDotumBold.ttf
-filename.UnBatang=/usr/share/fonts/truetype/UnBatang.ttf
-filename.UnBatang_Bold=/usr/share/fonts/truetype/UnBatangBold.ttf
--- a/jdk/src/solaris/classes/sun/awt/fontconfigs/linux.fontconfig.Ubuntu.properties	Mon Jun 10 17:04:18 2013 -0700
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,348 +0,0 @@
-#
-# 
-# Copyright (c) 2007, 2010, Oracle and/or its affiliates. All rights reserved.
-# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
-#
-# This code is free software; you can redistribute it and/or modify it
-# under the terms of the GNU General Public License version 2 only, as
-# published by the Free Software Foundation.  Oracle designates this
-# particular file as subject to the "Classpath" exception as provided
-# by Oracle in the LICENSE file that accompanied this code.
-#
-# This code is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
-# FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
-# version 2 for more details (a copy is included in the LICENSE file that
-# accompanied this code).
-#
-# You should have received a copy of the GNU General Public License version
-# 2 along with this work; if not, write to the Free Software Foundation,
-# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
-# or visit www.oracle.com if you need additional information or have any
-# questions.
-#
-
-# Version
-
-# Uses Ubuntu 8.04 (hardy), Debian 6.0 (Squeeze) (and more recent releases) fonts and file paths.
-version=1
-
-# Component Font Mappings
-
-# Chinese fonts
-allfonts.umingcn=AR PL UMing CN
-#allfonts.umingcn.motif=AR PL UMing CN
-allfonts.uminghk=AR PL UMing HK
-#allfonts.uminghk.motif=AR PL UMing HK
-allfonts.umingtw=AR PL UMing TW
-#allfonts.umingtw.motif=AR PL UMing TW
-allfonts.wqy-zenhei=WenQuanYi Zen Hei
-#allfonts.wqy-zenhei.motif=WenQuanYi Zen Hei
-allfonts.shanheisun=AR PL ShanHeiSun Uni
-#allfonts.shanheisun.motif=AR PL ShanHeiSun Uni
-
-# Indic scripts
-allfonts.bengali=Lohit Bengali
-allfonts.gujarati=Lohit Gujarati
-allfonts.hindi=Lohit Hindi
-#allfonts.malayalam=Lohit Malayalam
-allfonts.oriya=Lohit Oriya
-allfonts.punjabi=Lohit Punjabi
-allfonts.tamil=Lohit Tamil
-allfonts.telugu=Lohit Telugu
-allfonts.sinhala=LKLUG
-
-
-serif.plain.latin-1=DejaVu Serif
-#serif.plain.latin-1.motif=LuxiSerif-Regular
-serif.plain.japanese-kochi=Kochi Mincho
-serif.plain.japanese-sazanami=Sazanami Mincho
-serif.plain.japanese-vlgothic=Sazanami Mincho
-serif.plain.korean-baekmuk=Baekmuk Batang
-#serif.plain.korean-baekmuk.motif=Baekmuk Batang
-serif.plain.korean-un=UnBatang
-#serif.plain.korean-un.motif=UnBatang
-
-serif.bold.latin-1=DejaVu Serif Bold
-#serif.bold.latin-1.motif=LuxiSerif-Bold
-serif.bold.japanese-kochi=Kochi Mincho
-serif.bold.japanese-sazanami=Sazanami Mincho
-serif.bold.japanese-vlgothic=Sazanami Mincho
-serif.bold.korean-baekmuk=Baekmuk Batang
-#serif.bold.korean-baekmuk.motif=Baekmuk Batang
-serif.bold.korean-un=UnBatang Bold
-#serif.bold.korean-un.motif=UnBatang Bold
-
-serif.italic.latin-1=DejaVu Serif Oblique
-#serif.italic.latin-1.motif=LuxiSerif-Oblique
-serif.italic.japanese-kochi=Kochi Mincho
-serif.italic.japanese-sazanami=Sazanami Mincho
-serif.italic.japanese-vlgothic=Sazanami Mincho
-serif.italic.korean-baekmuk=Baekmuk Batang
-#serif.italic.korean-baekmuk.motif=Baekmuk Batang
-serif.italic.korean-un=UnBatang
-#serif.italic.korean-un.motif=UnBatang
-
-serif.bolditalic.latin-1=DejaVu Serif Bold Oblique
-#serif.bolditalic.latin-1.motif=LuxiSerif-BoldOblique
-serif.bolditalic.japanese-kochi=Kochi Mincho
-serif.bolditalic.japanese-sazanami=Sazanami Mincho
-serif.bolditalic.japanese-vlgothic=Sazanami Mincho
-serif.bolditalic.korean-baekmuk=Baekmuk Batang
-#serif.bolditalic.korean-baekmuk.motif=Baekmuk Batang
-serif.bolditalic.korean-un=UnBatang Bold
-#serif.bolditalic.korean-un.motif=UnBatang Bold
-
-sansserif.plain.latin-1=DejaVu Sans
-#sansserif.plain.latin-1.motif=LuxiSans-Regular
-sansserif.plain.japanese-kochi=Kochi Gothic
-sansserif.plain.japanese-sazanami=Sazanami Gothic
-sansserif.plain.japanese-vlgothic=VL PGothic
-sansserif.plain.korean-baekmuk=Baekmuk Gulim
-#sansserif.plain.korean-baekmuk.motif=Baekmuk Gulim
-sansserif.plain.korean-un=UnDotum
-#sansserif.plain.korean-un.motif=UnDotum
-
-sansserif.bold.latin-1=DejaVu Sans Bold
-#sansserif.bold.latin-1.motif=LuxiSans-Bold
-sansserif.bold.japanese-kochi=Kochi Gothic
-sansserif.bold.japanese-sazanami=Sazanami Gothic
-sansserif.bold.japanese-vlgothic=VL PGothic
-sansserif.bold.korean-baekmuk=Baekmuk Gulim
-#sansserif.bold.korean-baekmuk.motif=Baekmuk Gulim
-sansserif.bold.korean-un=UnDotum Bold
-#sansserif.bold.korean-un.motif=UnDotum Bold
-
-sansserif.italic.latin-1=DejaVu Sans Oblique
-#sansserif.italic.latin-1.motif=LuxiSans-Oblique
-sansserif.italic.japanese-kochi=Kochi Gothic
-sansserif.italic.japanese-sazanami=Sazanami Gothic
-sansserif.italic.japanese-vlgothic=VL PGothic
-sansserif.italic.korean-baekmuk=Baekmuk Gulim
-#sansserif.italic.korean-baekmuk.motif=Baekmuk Gulim
-sansserif.italic.korean-un=UnDotum
-#sansserif.italic.korean-un.motif=UnDotum
-
-sansserif.bolditalic.latin-1=DejaVu Sans Bold Oblique
-#sansserif.bolditalic.latin-1.motif=LuxiSans-BoldOblique
-sansserif.bolditalic.japanese-kochi=Kochi Gothic
-sansserif.bolditalic.japanese-sazanami=Sazanami Gothic
-sansserif.bolditalic.japanese-vlgothic=VL PGothic
-sansserif.bolditalic.korean-baekmuk=Baekmuk Gulim
-#sansserif.bolditalic.korean-baekmuk.motif=Baekmuk Gulim
-sansserif.bolditalic.korean-un=UnDotum Bold
-#sansserif.bolditalic.korean-un.motif=UnDotum Bold
-
-monospaced.plain.latin-1=DejaVu Sans Mono
-#monospaced.plain.latin-1.motif=LuxiMono-Regular
-monospaced.plain.japanese-kochi=Kochi Gothic
-monospaced.plain.japanese-sazanami=Sazanami Gothic
-monospaced.plain.japanese-vlgothic=VL Gothic
-monospaced.plain.korean-baekmuk=Baekmuk Gulim
-#monospaced.plain.korean-baekmuk.motif=Baekmuk Gulim
-monospaced.plain.korean-un=UnDotum
-#monospaced.plain.korean-un.motif=UnDotum
-
-monospaced.bold.latin-1=DejaVu Sans Mono Bold
-#monospaced.bold.latin-1.motif=LuxiMono-Bold
-monospaced.bold.japanese-kochi=Kochi Gothic
-monospaced.bold.japanese-sazanami=Sazanami Gothic
-monospaced.bold.japanese-vlgothic=VL Gothic
-monospaced.bold.korean-baekmuk=Baekmuk Gulim
-#monospaced.bold.korean-baekmuk.motif=Baekmuk Gulim
-monospaced.bold.korean-un=UnDotum Bold
-#monospaced.bold.korean-un.motif=UnDotum Bold
-
-monospaced.italic.latin-1=DejaVu Sans Mono Oblique
-#monospaced.italic.latin-1.motif=LuxiMono-Oblique
-monospaced.italic.japanese-kochi=Kochi Gothic
-monospaced.italic.japanese-sazanami=Sazanami Gothic
-monospaced.italic.japanese-vlgothic=VL Gothic
-monospaced.italic.korean-baekmuk=Baekmuk Gulim
-#monospaced.italic.korean-baekmuk.motif=Baekmuk Gulim
-monospaced.italic.korean-un=UnDotum
-#monospaced.italic.korean-un.motif=UnDotum
-
-monospaced.bolditalic.latin-1=DejaVu Sans Mono Bold Oblique
-#monospaced.bolditalic.latin-1.motif=LuxiMono-BoldOblique
-monospaced.bolditalic.japanese-kochi=Kochi Gothic
-monospaced.bolditalic.japanese-sazanami=Sazanami Gothic
-monospaced.bolditalic.japanese-vlgothic=VL Gothic
-monospaced.bolditalic.korean-baekmuk=Baekmuk Gulim
-#monospaced.bolditalic.korean-baekmuk.motif=Baekmuk Gulim
-monospaced.bolditalic.korean-un=UnDotum Bold
-#monospaced.bolditalic.korean-un.motif=UnDotum Bold
-
-dialog.plain.latin-1=DejaVu Sans
-#dialog.plain.latin-1.motif=LuxiSans-Regular
-dialog.plain.japanese-kochi=Kochi Gothic
-dialog.plain.japanese-sazanami=Sazanami Gothic
-dialog.plain.japanese-vlgothic=VL PGothic
-dialog.plain.korean-baekmuk=Baekmuk Gulim
-#dialog.plain.korean-baekmuk.motif=Baekmuk Gulim
-dialog.plain.korean-un=UnDotum
-#dialog.plain.korean-un.motif=UnDotum
-
-dialog.bold.latin-1=DejaVu Sans Bold
-#dialog.bold.latin-1.motif=LuxiSans-Bold
-dialog.bold.japanese-kochi=Kochi Gothic
-dialog.bold.japanese-sazanami=Sazanami Gothic
-dialog.bold.japanese-vlgothic=VL PGothic
-dialog.bold.korean-baekmuk=Baekmuk Gulim
-#dialog.bold.korean-baekmuk.motif=Baekmuk Gulim
-dialog.bold.korean-un=UnDotum Bold
-#dialog.bold.korean-un.motif=UnDotum Bold
-
-dialog.italic.latin-1=DejaVu Sans Oblique
-#dialog.italic.latin-1.motif=LuxiSans-Oblique
-dialog.italic.japanese-kochi=Kochi Gothic
-dialog.italic.japanese-sazanami=Sazanami Gothic
-dialog.italic.japanese-vlgothic=VL PGothic
-dialog.italic.korean-baekmuk=Baekmuk Gulim
-#dialog.italic.korean-baekmuk.motif=Baekmuk Gulim
-dialog.italic.korean-un=UnDotum
-#dialog.italic.korean-un.motif=UnDotum
-
-dialog.bolditalic.latin-1=DejaVu Sans Bold Oblique
-#dialog.bolditalic.latin-1.motif=LuxiSans-BoldOblique
-dialog.bolditalic.japanese-kochi=Kochi Gothic
-dialog.bolditalic.japanese-sazanami=Sazanami Gothic
-dialog.bolditalic.japanese-vlgothic=VL PGothic
-dialog.bolditalic.korean-baekmuk=Baekmuk Gulim
-#dialog.bolditalic.korean-baekmuk.motif=Baekmuk Gulim
-dialog.bolditalic.korean-un=UnDotum Bold
-#dialog.bolditalic.korean-un.motif=UnDotum Bold
-
-dialoginput.plain.latin-1=DejaVu Sans Mono
-#dialoginput.plain.latin-1.motif=LuxiMono-Regular
-dialoginput.plain.japanese-kochi=Kochi Gothic
-dialoginput.plain.japanese-sazanami=Sazanami Gothic
-dialoginput.plain.japanese-vlgothic=VL Gothic
-dialoginput.plain.korean-baekmuk=Baekmuk Gulim
-#dialoginput.plain.korean-baekmuk.motif=Baekmuk Gulim
-dialoginput.plain.korean-un=UnDotum
-#dialoginput.plain.korean-un.motif=UnDotum
-
-dialoginput.bold.latin-1=DejaVu Sans Mono Bold
-#dialoginput.bold.latin-1.motif=LuxiMono-Bold
-dialoginput.bold.japanese-kochi=Kochi Gothic
-dialoginput.bold.japanese-sazanami=Sazanami Gothic
-dialoginput.bold.japanese-vlgothic=VL Gothic
-dialoginput.bold.korean-baekmuk=Baekmuk Gulim
-#dialoginput.bold.korean-baekmuk.motif=Baekmuk Gulim
-dialoginput.bold.korean-un=UnDotum Bold
-#dialoginput.bold.korean-un.motif=UnDotum Bold
-
-dialoginput.italic.latin-1=DejaVu Sans Mono Oblique
-#dialoginput.italic.latin-1.motif=LuxiMono-Oblique
-dialoginput.italic.japanese-kochi=Kochi Gothic
-dialoginput.italic.japanese-sazanami=Sazanami Gothic
-dialoginput.italic.japanese-vlgothic=VL Gothic
-dialoginput.italic.korean-baekmuk=Baekmuk Gulim
-#dialoginput.italic.korean-baekmuk.motif=Baekmuk Gulim
-dialoginput.italic.korean-un=UnDotum
-#dialoginput.italic.korean-un.motif=UnDotum
-
-dialoginput.bolditalic.latin-1=DejaVu Sans Mono Bold Oblique
-#dialoginput.bolditalic.latin-1.motif=LuxiMono-BoldOblique
-dialoginput.bolditalic.japanese-kochi=Kochi Gothic
-dialoginput.bolditalic.japanese-sazanami=Sazanami Gothic
-dialoginput.bolditalic.japanese-vlgothic=VL Gothic
-dialoginput.bolditalic.korean-baekmuk=Baekmuk Gulim
-#dialoginput.bolditalic.korean-baekmuk.motif=Baekmuk Gulim
-dialoginput.bolditalic.korean-un=UnDotum Bold
-#dialoginput.bolditalic.korean-un.motif=UnDotum Bold
-
-# Search Sequences
-
-sequence.allfonts=latin-1
-sequence.allfonts.GB18030=latin-1,umingcn,shanheisun,wqy-zenhei
-sequence.allfonts.GB2312=latin-1,umingcn,shanheisun,wqy-zenhei
-sequence.allfonts.GBK=latin-1,umingcn,shanheisun,wqy-zenhei
-sequence.allfonts.x-euc-jp-linux=latin-1,japanese-vlgothic,japanese-sazanami,japanese-kochi
-sequence.allfonts.EUC-KR=latin-1,korean-un,korean-baekmuk
-sequence.allfonts.Big5=latin-1,umingtw,shanheisun,wqy-zenhei
-sequence.allfonts.Big5-HKSCS=latin-1,uminghk,shanheisun,wqy-zenhei
-#sequence.fallback=uminghk,shanheisun,wqy-zenhei,japanese-vlgothic,japanese-kochi,japanese-sazanami,korean-un,korean-baekmuk,bengali,gujarati,hindi,oriya,punjabi,malayalam,tamil,telugu,sinhala
-sequence.fallback=uminghk,shanheisun,wqy-zenhei,japanese-vlgothic,japanese-sazanami,japanese-kochi,korean-un,korean-baekmuk,bengali,gujarati,hindi,oriya,punjabi,tamil,telugu
-
-# Exclusion Ranges
-
-exclusion.japanese-kochi=0390-03d6,2200-22ef,2701-27be
-exclusion.japanese-sazanami=0390-03d6,2200-22ef,2701-27be
-exclusion.japanese-vlgothic=0390-03d6,2200-22ef,2701-27be
-
-# Font File Names
-
-filename.DejaVu_Sans=/usr/share/fonts/truetype/ttf-dejavu/DejaVuSans.ttf
-filename.DejaVu_Sans_Bold=/usr/share/fonts/truetype/ttf-dejavu/DejaVuSans-Bold.ttf
-filename.DejaVu_Sans_Oblique=/usr/share/fonts/truetype/ttf-dejavu/DejaVuSans-Oblique.ttf
-filename.DejaVu_Sans_Bold_Oblique=/usr/share/fonts/truetype/ttf-dejavu/DejaVuSans-BoldOblique.ttf
-
-filename.DejaVu_Sans_Mono=/usr/share/fonts/truetype/ttf-dejavu/DejaVuSansMono.ttf
-filename.DejaVu_Sans_Mono_Bold=/usr/share/fonts/truetype/ttf-dejavu/DejaVuSansMono-Bold.ttf
-filename.DejaVu_Sans_Mono_Oblique=/usr/share/fonts/truetype/ttf-dejavu/DejaVuSansMono-Oblique.ttf
-filename.DejaVu_Sans_Mono_Bold_Oblique=/usr/share/fonts/truetype/ttf-dejavu/DejaVuSansMono-BoldOblique.ttf
-
-filename.DejaVu_Serif=/usr/share/fonts/truetype/ttf-dejavu/DejaVuSerif.ttf
-filename.DejaVu_Serif_Bold=/usr/share/fonts/truetype/ttf-dejavu/DejaVuSerif-Bold.ttf
-filename.DejaVu_Serif_Oblique=/usr/share/fonts/truetype/ttf-dejavu/DejaVuSerif-Oblique.ttf
-filename.DejaVu_Serif_Bold_Oblique=/usr/share/fonts/truetype/ttf-dejavu/DejaVuSerif-BoldOblique.ttf
-
-filename.AR_PL_UMing_CN=/usr/share/fonts/truetype/arphic/uming.ttc
-filename.AR_PL_UMing_HK=/usr/share/fonts/truetype/arphic/uming.ttc
-filename.AR_PL_UMing_TW=/usr/share/fonts/truetype/arphic/uming.ttc
-filename.AR_PL_ShanHeiSun_Uni=/usr/share/fonts/truetype/arphic/uming.ttf
-
-filename.WenQuanYi_Zen_Hei=/usr/share/fonts/truetype/wqy/wqy-zenhei.ttf
-filename.Baekmuk_Batang=/usr/share/fonts/truetype/baekmuk/batang.ttf
-filename.UnBatang=/usr/share/fonts/truetype/unfonts/UnBatang.ttf
-filename.UnBatang_Bold=/usr/share/fonts/truetype/unfonts/UnBatangBold.ttf
-filename.Baekmuk_Gulim=/usr/share/fonts/truetype/baekmuk/gulim.ttf
-filename.UnDotum=/usr/share/fonts/truetype/unfonts/UnDotum.ttf
-filename.UnDotum_Bold=/usr/share/fonts/truetype/unfonts/UnDotumBold.ttf
-filename.Kochi_Gothic=/usr/share/fonts/truetype/kochi/kochi-gothic.ttf
-filename.Sazanami_Gothic=/usr/share/fonts/truetype/sazanami/sazanami-gothic.ttf
-filename.Kochi_Mincho=/usr/share/fonts/truetype/kochi/kochi-mincho.ttf
-filename.Sazanami_Mincho=/usr/share/fonts/truetype/sazanami/sazanami-mincho.ttf
-filename.VL_Gothic=/usr/share/fonts/truetype/vlgothic/VL-Gothic-Regular.ttf
-filename.VL_PGothic=/usr/share/fonts/truetype/vlgothic/VL-PGothic-Regular.ttf
-
-filename.Lohit_Bengali=/usr/share/fonts/truetype/ttf-bengali-fonts/lohit_bn.ttf
-filename.Lohit_Gujarati=/usr/share/fonts/truetype/ttf-indic-fonts-core/lohit_gu.ttf
-filename.Lohit_Hindi=/usr/share/fonts/truetype/ttf-indic-fonts-core/lohit_hi.ttf
-filename.Lohit_Kannda=/usr/share/fonts/truetype/ttf-kannada-fonts/lohit_kn.ttf
-#filename.Lohit_Malayalam=/usr/share/fonts/lohit-malayalam/lohit_ml.ttf
-filename.Lohit_Oriya=/usr/share/fonts/truetype/ttf-oriya-fonts/lohit_or.ttf
-filename.Lohit_Punjabi=/usr/share/fonts/truetype/ttf-indic-fonts-core/lohit_pa.ttf
-filename.Lohit_Tamil=/usr/share/fonts/truetype/ttf-indic-fonts-core/lohit_ta.ttf
-filename.Lohit_Telugu=/usr/share/fonts/truetype/ttf-telugu-fonts/lohit_te.ttf
-filename.LKLUG=/usr/share/fonts/truetype/ttf-sinhala-lklug/lklug.ttf
-
-filename.LuxiSans-Regular=/usr/share/fonts/truetype/ttf-xfree86-nonfree/luxisr.ttf
-filename.LuxiSans-Bold=/usr/share/fonts/truetype/ttf-xfree86-nonfree/luxisb.ttf
-filename.LuxiSans-Oblique=/usr/share/fonts/truetype/ttf-xfree86-nonfree/luxisri.ttf
-filename.LuxiSans-BoldOblique=/usr/share/fonts/truetype/ttf-xfree86-nonfree/luxisbi.ttf
-filename.LuxiMono-Regular=/usr/share/fonts/truetype/ttf-xfree86-nonfree/luximr.ttf
-filename.LuxiMono-Bold=/usr/share/fonts/truetype/ttf-xfree86-nonfree/luximb.ttf
-filename.LuxiMono-Oblique=/usr/share/fonts/truetype/ttf-xfree86-nonfree/luximri.ttf
-filename.LuxiMono-BoldOblique=/usr/share/fonts/truetype/ttf-xfree86-nonfree/luximbi.ttf
-filename.LuxiSerif-Regular=/usr/share/fonts/truetype/ttf-xfree86-nonfree/luxirr.ttf
-filename.LuxiSerif-Bold=/usr/share/fonts/truetype/ttf-xfree86-nonfree/luxirb.ttf
-filename.LuxiSerif-Oblique=/usr/share/fonts/truetype/ttf-xfree86-nonfree/luxirri.ttf
-filename.LuxiSerif-BoldOblique=/usr/share/fonts/truetype/ttf-xfree86-nonfree/luxirbi.ttf
-
-# AWT X11 font paths
-awtfontpath.latin-1=/usr/share/fonts/X11/Type1
-awtfontpath.umingcn=/usr/share/fonts/truetype/arphic
-awtfontpath.uminghk=/usr/share/fonts/truetype/arphic
-awtfontpath.umingtw=/usr/share/fonts/truetype/arphic
-awtfontpath.shanheisun=/usr/share/fonts/truetype/arphic
-awtfontpath.wqy-zenhei=/usr/share/fonts/truetype/wqy
-awtfontpath.japanese-kochi=/usr/share/fonts/truetype/kochi
-awtfontpath.japanese-sazanami=/usr/share/fonts/truetype/sazanami
-awtfontpath.japanese-vlgothic=/usr/share/fonts/truetype/vlgothic
-awtfontpath.korean-baekmuk=/usr/share/fonts/truetype/baekmuk
-awtfontpath.korean-un=/usr/share/fonts/truetype/unfonts
--- a/jdk/src/solaris/classes/sun/awt/fontconfigs/linux.fontconfig.properties	Mon Jun 10 17:04:18 2013 -0700
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,189 +0,0 @@
-#
-# 
-# Copyright (c) 2007, Oracle and/or its affiliates. All rights reserved.
-# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
-#
-# This code is free software; you can redistribute it and/or modify it
-# under the terms of the GNU General Public License version 2 only, as
-# published by the Free Software Foundation.  Oracle designates this
-# particular file as subject to the "Classpath" exception as provided
-# by Oracle in the LICENSE file that accompanied this code.
-#
-# This code is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
-# FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
-# version 2 for more details (a copy is included in the LICENSE file that
-# accompanied this code).
-#
-# You should have received a copy of the GNU General Public License version
-# 2 along with this work; if not, write to the Free Software Foundation,
-# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
-# or visit www.oracle.com if you need additional information or have any
-# questions.
-#
-
-# Version
-
-# Uses Fedora Core 6 fonts and file paths.
-version=1
-
-# Component Font Mappings
-
-dialog.plain.latin-1=DejaVu LGC Sans
-dialog.plain.japanese-x0208=Sazanami Gothic
-dialog.plain.korean=Baekmuk Gulim
-dialog.plain.chinese-big5=AR PL ShanHeiSun Uni
-dialog.plain.chinese-gb18030=AR PL ShanHeiSun Uni
-
-dialog.bold.latin-1=DejaVu LGC Sans Bold
-dialog.bold.japanese-x0208=Sazanami Gothic
-dialog.bold.korean=Baekmuk Gulim
-dialog.bold.chinese-big5=AR PL ShanHeiSun Uni
-dialog.bold.chinese-gb18030=AR PL ShanHeiSun Uni
-
-dialog.italic.latin-1=DejaVu LGC Sans Oblique
-dialog.italic.japanese-x0208=Sazanami Gothic
-dialog.italic.korean=Baekmuk Gulim
-dialog.italic.chinese-big5=AR PL ShanHeiSun Uni
-dialog.italic.chinese-gb18030=AR PL ShanHeiSun Uni
-
-dialog.bolditalic.latin-1=DejaVu LGC Sans Bold Oblique
-dialog.bolditalic.japanese-x0208=Sazanami Gothic
-dialog.bolditalic.korean=Baekmuk Gulim
-dialog.bolditalic.chinese-big5=AR PL ShanHeiSun Uni
-dialog.bolditalic.chinese-gb18030=AR PL ShanHeiSun Uni
-
-
-sansserif.plain.latin-1=DejaVu LGC Sans
-sansserif.plain.japanese-x0208=Sazanami Gothic
-sansserif.plain.korean=Baekmuk Gulim
-sansserif.plain.chinese-big5=AR PL ShanHeiSun Uni
-sansserif.plain.chinese-gb18030=AR PL ShanHeiSun Uni
-
-sansserif.bold.latin-1=DejaVu LGC Sans Bold
-sansserif.bold.japanese-x0208=Sazanami Gothic
-sansserif.bold.korean=Baekmuk Gulim
-sansserif.bold.chinese-big5=AR PL ShanHeiSun Uni
-sansserif.bold.chinese-gb18030=AR PL ShanHeiSun Uni
-
-sansserif.italic.latin-1=DejaVu LGC Sans Oblique
-sansserif.italic.japanese-x0208=Sazanami Gothic
-sansserif.italic.korean=Baekmuk Gulim
-sansserif.italic.chinese-big5=AR PL ShanHeiSun Uni
-sansserif.italic.chinese-gb18030=AR PL ShanHeiSun Uni
-
-sansserif.bolditalic.latin-1=DejaVu LGC Sans Bold Oblique
-sansserif.bolditalic.japanese-x0208=Sazanami Gothic
-sansserif.bolditalic.korean=Baekmuk Gulim
-sansserif.bolditalic.chinese-big5=AR PL ShanHeiSun Uni
-sansserif.bolditalic.chinese-gb18030=AR PL ShanHeiSun Uni
-
-
-serif.plain.latin-1=DejaVu LGC Serif
-serif.plain.japanese-x0208=Sazanami Mincho
-serif.plain.korean=Baekmuk Batang
-serif.plain.chinese-big5=AR PL ZenKai Uni
-serif.plain.chinese-gb18030=AR PL ZenKai Uni
-
-serif.bold.latin-1=DejaVu LGC Serif Bold
-serif.bold.japanese-x0208=Sazanami Mincho
-serif.bold.korean=Baekmuk Batang
-serif.bold.chinese-big5=AR PL ZenKai Uni
-serif.bold.chinese-gb18030=AR PL ZenKai Uni
-
-serif.italic.latin-1=DejaVu LGC Serif Oblique
-serif.italic.japanese-x0208=Sazanami Mincho
-serif.italic.korean=Baekmuk Batang
-serif.italic.chinese-big5=AR PL ZenKai Uni
-serif.italic.chinese-gb18030=AR PL ZenKai Uni
-
-serif.bolditalic.latin-1=DejaVu LGC Serif Bold Oblique
-serif.bolditalic.japanese-x0208=Sazanami Mincho
-serif.bolditalic.korean=Baekmuk Batang
-serif.bolditalic.chinese-big5=AR PL ZenKai Uni
-serif.bolditalic.chinese-gb18030=AR PL ZenKai Uni
-
-
-monospaced.plain.latin-1=DejaVu LGC Sans Mono
-monospaced.plain.japanese-x0208=Sazanami Gothic
-monospaced.plain.korean=Baekmuk Gulim
-monospaced.plain.chinese-big5=AR PL ShanHeiSun Uni
-monospaced.plain.chinese-gb18030=AR PL ShanHeiSun Uni
-
-monospaced.bold.latin-1=DejaVu LGC Sans Mono Bold
-monospaced.bold.japanese-x0208=Sazanami Gothic
-monospaced.bold.korean=Baekmuk Gulim
-monospaced.bold.chinese-big5=AR PL ShanHeiSun Uni
-monospaced.bold.chinese-gb18030=AR PL ShanHeiSun Uni
-
-monospaced.italic.latin-1=DejaVu LGC Sans Mono Oblique
-monospaced.italic.japanese-x0208=Sazanami Gothic
-monospaced.italic.korean=Baekmuk Gulim
-monospaced.italic.chinese-big5=AR PL ShanHeiSun Uni
-monospaced.italic.chinese-gb18030=AR PL ShanHeiSun Uni
-
-monospaced.bolditalic.latin-1=DejaVu LGC Sans Mono Bold Oblique
-monospaced.bolditalic.japanese-x0208=Sazanami Gothic
-monospaced.bolditalic.korean=Baekmuk Gulim
-monospaced.bolditalic.chinese-big5=AR PL ShanHeiSun Uni
-monospaced.bolditalic.chinese-gb18030=AR PL ShanHeiSun Uni
-
-
-dialoginput.plain.latin-1=DejaVu LGC Sans Mono
-dialoginput.plain.japanese-x0208=Sazanami Gothic
-dialoginput.plain.korean=Baekmuk Gulim
-dialoginput.plain.chinese-big5=AR PL ShanHeiSun Uni
-dialoginput.plain.chinese-gb18030=AR PL ShanHeiSun Uni
-
-dialoginput.bold.latin-1=DejaVu LGC Sans Mono Bold
-dialoginput.bold.japanese-x0208=Sazanami Gothic
-dialoginput.bold.korean=Baekmuk Gulim
-dialoginput.bold.chinese-big5=AR PL ShanHeiSun Uni
-dialoginput.bold.chinese-gb18030=AR PL ShanHeiSun Uni
-
-dialoginput.italic.latin-1=DejaVu LGC Sans Mono Oblique
-dialoginput.italic.japanese-x0208=Sazanami Gothic
-dialoginput.italic.korean=Baekmuk Gulim
-dialoginput.italic.chinese-big5=AR PL ShanHeiSun Uni
-dialoginput.italic.chinese-gb18030=AR PL ShanHeiSun Uni
-
-dialoginput.bolditalic.latin-1=DejaVu LGC Sans Mono Bold Oblique
-dialoginput.bolditalic.japanese-x0208=Sazanami Gothic
-dialoginput.bolditalic.korean=Baekmuk Gulim
-dialoginput.bolditalic.chinese-big5=AR PL ShanHeiSun Uni
-dialoginput.bolditalic.chinese-gb18030=AR PL ShanHeiSun Uni
-
-# Search Sequences
-
-sequence.allfonts=latin-1
-sequence.allfonts.Big5=chinese-big5,latin-1
-sequence.allfonts.x-euc-jp-linux=japanese-x0208,latin-1
-sequence.allfonts.EUC-KR=korean,latin-1
-sequence.allfonts.GB18030=chinese-gb18030,latin-1
-sequence.fallback=chinese-big5,chinese-gb18030,japanese-x0208,korean
-
-# Font File Names
-
-filename.DejaVu_LGC_Sans=/usr/share/fonts/dejavu-lgc/DejaVuLGCSans.ttf
-filename.DejaVu_LGC_Sans_Bold=/usr/share/fonts/dejavu-lgc/DejaVuLGCSans-Bold.ttf
-filename.DejaVu_LGC_Sans_Oblique=/usr/share/fonts/dejavu-lgc/DejaVuLGCSans-Oblique.ttf
-filename.DejaVu_LGC_Sans_Bold_Oblique=/usr/share/fonts/dejavu-lgc/DejaVuLGCSans-BoldOblique.ttf
-
-filename.DejaVu_LGC_Sans_Mono=/usr/share/fonts/dejavu-lgc/DejaVuLGCSansMono.ttf
-filename.DejaVu_LGC_Sans_Mono_Bold=/usr/share/fonts/dejavu-lgc/DejaVuLGCSansMono-Bold.ttf
-filename.DejaVu_LGC_Sans_Mono_Oblique=/usr/share/fonts/dejavu-lgc/DejaVuLGCSansMono-Oblique.ttf
-filename.DejaVu_LGC_Sans_Mono_Bold_Oblique=/usr/share/fonts/dejavu-lgc/DejaVuLGCSansMono-BoldOblique.ttf
-
-filename.DejaVu_LGC_Serif=/usr/share/fonts/dejavu-lgc/DejaVuLGCSerif.ttf
-filename.DejaVu_LGC_Serif_Bold=/usr/share/fonts/dejavu-lgc/DejaVuLGCSerif-Bold.ttf
-filename.DejaVu_LGC_Serif_Oblique=/usr/share/fonts/dejavu-lgc/DejaVuLGCSerif-Oblique.ttf
-filename.DejaVu_LGC_Serif_Bold_Oblique=/usr/share/fonts/dejavu-lgc/DejaVuLGCSerif-BoldOblique.ttf
-
-filename.Sazanami_Gothic=/usr/share/fonts/japanese/TrueType/sazanami-gothic.ttf
-filename.Sazanami_Mincho=/usr/share/fonts/japanese/TrueType/sazanami-mincho.ttf
-filename.AR_PL_ShanHeiSun_Uni=/usr/share/fonts/chinese/TrueType/uming.ttf
-filename.AR_PL_ZenKai_Uni=/usr/share/fonts/chinese/TrueType/ukai.ttf
-filename.Baekmuk_Gulim=/usr/share/fonts/korean/TrueType/gulim.ttf
-filename.Baekmuk_Batang=/usr/share/fonts/korean/TrueType/batang.ttf
--- a/jdk/src/solaris/native/java/net/NetworkInterface.c	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/solaris/native/java/net/NetworkInterface.c	Wed Jul 05 18:59:05 2017 +0200
@@ -658,9 +658,9 @@
                     if (ia2Obj) {
                        setInetAddress_addr(env, ia2Obj, htonl(((struct sockaddr_in*)addrP->brdcast)->sin_addr.s_addr));
                        (*env)->SetObjectField(env, ibObj, ni_ib4broadcastID, ia2Obj);
-                       (*env)->SetShortField(env, ibObj, ni_ib4maskID, addrP->mask);
                     }
                  }
+                 (*env)->SetShortField(env, ibObj, ni_ib4maskID, addrP->mask);
                  (*env)->SetObjectArrayElement(env, bindArr, bind_index++, ibObj);
             }
         }
@@ -887,15 +887,12 @@
     addrP->mask = prefix;
     addrP->next = 0;
     if (family == AF_INET) {
-      /*
-       * Deal with broadcast addr & subnet mask
-       */
+       // Deal with broadcast addr & subnet mask
        struct sockaddr * brdcast_to = (struct sockaddr *) ((char *) addrP + sizeof(netaddr) + addr_size);
        addrP->brdcast = getBroadcast(env, sock, name,  brdcast_to );
 
-       if (addrP->brdcast && (mask = getSubnet(env, sock, name)) != -1) {
+       if ((mask = getSubnet(env, sock, name)) != -1)
            addrP->mask = mask;
-       }
      }
 
     /**
--- a/jdk/src/solaris/native/sun/awt/awt_InputMethod.c	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/solaris/native/sun/awt/awt_InputMethod.c	Wed Jul 05 18:59:05 2017 +0200
@@ -185,7 +185,6 @@
 );
 #endif
 
-#ifdef XAWT_HACK
 /*
  * This function is stolen from /src/solaris/hpi/src/system_md.c
  * It is used in setting the time in Java-level InputEvents
@@ -197,7 +196,6 @@
     gettimeofday(&t, NULL);
     return ((jlong)t.tv_sec) * 1000 + (jlong)(t.tv_usec/1000);
 }
-#endif /* XAWT_HACK */
 
 /*
  * Converts the wchar_t string to a multi-byte string calling wcstombs(). A
@@ -546,11 +544,7 @@
                                  "dispatchCommittedText",
                                  "(Ljava/lang/String;J)V",
                                  javastr,
-#ifndef XAWT_HACK
-                                 awt_util_nowMillisUTC_offset(event->time));
-#else
                                  event->time);
-#endif
         }
         break;
 
--- a/jdk/src/solaris/native/sun/xawt/XlibWrapper.c	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/solaris/native/sun/xawt/XlibWrapper.c	Wed Jul 05 18:59:05 2017 +0200
@@ -1946,13 +1946,16 @@
 JNIEXPORT jboolean JNICALL
 Java_sun_awt_X11_XlibWrapper_XNextSecondaryLoopEvent(JNIEnv *env, jclass clazz,
                                                      jlong display, jlong ptr) {
+    uint32_t timeout = 1;
+
     AWT_CHECK_HAVE_LOCK();
     exitSecondaryLoop = False;
     while (!exitSecondaryLoop) {
         if (XCheckIfEvent((Display*) jlong_to_ptr(display), (XEvent*) jlong_to_ptr(ptr), secondary_loop_event, NULL)) {
             return JNI_TRUE;
         }
-        AWT_WAIT(AWT_SECONDARY_LOOP_TIMEOUT);
+        timeout = (timeout < AWT_SECONDARY_LOOP_TIMEOUT) ? (timeout << 1) : AWT_SECONDARY_LOOP_TIMEOUT;
+        AWT_WAIT(timeout);
     }
     return JNI_FALSE;
 }
--- a/jdk/src/windows/classes/sun/util/locale/provider/HostLocaleProviderAdapterImpl.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/windows/classes/sun/util/locale/provider/HostLocaleProviderAdapterImpl.java	Wed Jul 05 18:59:05 2017 +0200
@@ -48,7 +48,6 @@
 import java.util.concurrent.ConcurrentMap;
 import java.util.concurrent.atomic.AtomicReferenceArray;
 import java.util.spi.CalendarDataProvider;
-import java.util.spi.CalendarNameProvider;
 import java.util.spi.CurrencyNameProvider;
 import java.util.spi.LocaleNameProvider;
 import sun.util.spi.CalendarProvider;
@@ -364,32 +363,6 @@
         };
     }
 
-    public static CalendarNameProvider getCalendarNameProvider() {
-        return new CalendarNameProvider() {
-            @Override
-            public Locale[] getAvailableLocales() {
-                return getSupportedCalendarLocales();
-            }
-
-            @Override
-            public boolean isSupportedLocale(Locale locale) {
-                return isSupportedCalendarLocale(locale);
-            }
-
-            @Override
-            public String getDisplayName(String calType, int field, int value,
-                                         int style, Locale locale) {
-                return null;
-            }
-
-            @Override
-            public Map<String, Integer> getDisplayNames(String calType,
-                                         int field, int style, Locale locale) {
-                return null;
-            }
-        };
-    }
-
     public static CalendarProvider getCalendarProvider() {
         return new CalendarProvider() {
             @Override
--- a/jdk/src/windows/native/java/net/DualStackPlainDatagramSocketImpl.c	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/windows/native/java/net/DualStackPlainDatagramSocketImpl.c	Wed Jul 05 18:59:05 2017 +0200
@@ -256,14 +256,14 @@
     packetBuffer = (*env)->GetObjectField(env, dpObj, dp_bufID);
     packetBufferOffset = (*env)->GetIntField(env, dpObj, dp_offsetID);
     packetBufferLen = (*env)->GetIntField(env, dpObj, dp_bufLengthID);
+    /* Note: the buffer needn't be greater than 65,536 (0xFFFF)
+    * the max size of an IP packet. Anything bigger is truncated anyway.
+    */
+    if (packetBufferLen > MAX_PACKET_LEN) {
+        packetBufferLen = MAX_PACKET_LEN;
+    }
 
     if (packetBufferLen > MAX_BUFFER_LEN) {
-        /* Note: the buffer needn't be greater than 65,536 (0xFFFF)
-         * the max size of an IP packet. Anything bigger is truncated anyway.
-         */
-        if (packetBufferLen > MAX_PACKET_LEN) {
-            packetBufferLen = MAX_PACKET_LEN;
-        }
         fullPacket = (char *)malloc(packetBufferLen);
         if (!fullPacket) {
             JNU_ThrowOutOfMemoryError(env, "Native heap allocation failed");
--- a/jdk/src/windows/native/java/net/TwoStacksPlainDatagramSocketImpl.c	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/src/windows/native/java/net/TwoStacksPlainDatagramSocketImpl.c	Wed Jul 05 18:59:05 2017 +0200
@@ -145,7 +145,7 @@
 /*
  * This function returns JNI_TRUE if the datagram size exceeds the underlying
  * provider's ability to send to the target address. The following OS
- * oddies have been observed :-
+ * oddities have been observed :-
  *
  * 1. On Windows 95/98 if we try to send a datagram > 12k to an application
  *    on the same machine then the send will fail silently.
@@ -218,7 +218,7 @@
 
             /*
              * Step 3: On Windows 95/98 then enumerate the IP addresses on
-             * this machine. This is necesary because we need to check if the
+             * this machine. This is neccesary because we need to check if the
              * datagram is being sent to an application on the same machine.
              */
             if (is95or98) {
@@ -565,8 +565,8 @@
 
     if (xp_or_later) {
         /* SIO_UDP_CONNRESET fixes a bug introduced in Windows 2000, which
-         * returns connection reset errors un connected UDP sockets (as well
-         * as connected sockets. The solution is to only enable this feature
+         * returns connection reset errors on connected UDP sockets (as well
+         * as connected sockets). The solution is to only enable this feature
          * when the socket is connected
          */
         DWORD x1, x2; /* ignored result codes */
@@ -690,6 +690,12 @@
     fd = (*env)->GetIntField(env, fdObj, IO_fd_fdID);
 
     packetBufferLen = (*env)->GetIntField(env, packet, dp_lengthID);
+    /* Note: the buffer needn't be greater than 65,536 (0xFFFF)...
+     * the maximum size of an IP packet. Anything bigger is truncated anyway.
+     */
+    if (packetBufferLen > MAX_PACKET_LEN) {
+        packetBufferLen = MAX_PACKET_LEN;
+    }
 
     if (connected) {
         addrp = 0; /* arg to JVM_Sendto () null in this case */
@@ -728,7 +734,7 @@
         }
 
         /* When JNI-ifying the JDK's IO routines, we turned
-         * read's and write's of byte arrays of size greater
+         * reads and writes of byte arrays of size greater
          * than 2048 bytes into several operations of size 2048.
          * This saves a malloc()/memcpy()/free() for big
          * buffers.  This is OK for file IO and TCP, but that
--- a/jdk/test/ProblemList.txt	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/ProblemList.txt	Wed Jul 05 18:59:05 2017 +0200
@@ -122,9 +122,6 @@
 
 # jdk_lang
 
-# 8009615
-java/lang/instrument/IsModifiableClassAgent.java                generic-all
-
 # 6944188
 java/lang/management/ThreadMXBean/ThreadStateTest.java          generic-all
 
@@ -137,6 +134,9 @@
 # 8008200
 java/lang/Class/asSubclass/BasicUnit.java                       generic-all
 
+# 8015780
+java/lang/reflect/Method/GenericStringTest.java			generic-all
+
 ############################################################################
 
 # jdk_management
@@ -199,12 +199,6 @@
 # 7143960
 java/net/DatagramSocket/SendDatagramToBadAddress.java            macosx-all
 
-# 8014720
-java/net/ResponseCache/B6181108.java                             generic-all
-
-# 8014723
-sun/misc/URLClassPath/ClassnameCharTest.java                     generic-all
-
 # 8014719
 sun/net/www/http/HttpClient/ProxyTest.java                       generic-all
 
@@ -236,9 +230,6 @@
 # 7132677
 java/nio/channels/Selector/OutOfBand.java                       macosx-all
 
-# 8003895
-java/nio/channels/AsynchronousChannelGroup/Unbounded.java       windows-amd64
-
 ############################################################################
 
 # jdk_rmi
@@ -277,6 +268,13 @@
 sun/security/pkcs11/ec/ReadPKCS12.java                          solaris-all
 sun/security/pkcs11/sslecc/ClientJSSEServerJSSE.java            solaris-all
 
+# 8005247
+sun/security/pkcs11/ec/TestECDSA.java				solaris-all
+
+# 8009438
+sun/security/pkcs11/Secmod/AddPrivateKey.java			linux-all
+sun/security/pkcs11/Secmod/TrustAnchors.java 			linux-all
+
 # 7041639, Solaris DSA keypair generation bug (Note: jdk_util also affected)
 java/security/KeyPairGenerator/SolarisShortDSA.java             solaris-all
 sun/security/tools/jarsigner/onlymanifest.sh                    solaris-all
@@ -331,6 +329,8 @@
 # Tests take too long, on sparcs see 7143279
 tools/pack200/CommandLineTests.java                             solaris-all, macosx-all
 tools/pack200/Pack200Test.java                                  solaris-all, macosx-all
+# 8015666 
+tools/pack200/TimeStamp.java                                    generic-all
 
 # 8007410
 tools/launcher/FXLauncherTest.java                              linux-all
--- a/jdk/test/com/sun/crypto/provider/Mac/HmacPBESHA1.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/com/sun/crypto/provider/Mac/HmacPBESHA1.java	Wed Jul 05 18:59:05 2017 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2003, 2012, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -23,8 +23,8 @@
 
 /**
  * @test
- * @bug 4893959
- * @summary basic test for HmacPBESHA1
+ * @bug 4893959 8013069
+ * @summary basic test for PBE MAC algorithms.
  * @author Valerie Peng
  */
 import java.io.PrintStream;
@@ -68,8 +68,9 @@
         }
         Mac mac = Mac.getInstance(algo, PROVIDER);
         byte[] plainText = new byte[30];
-
-        mac.init(key);
+        PBEParameterSpec spec =
+            new PBEParameterSpec("saltValue".getBytes(), 250);
+        mac.init(key, spec);
         mac.update(plainText);
         byte[] value1 = mac.doFinal();
         if (value1.length != length) {
--- a/jdk/test/com/sun/crypto/provider/Mac/MacClone.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/com/sun/crypto/provider/Mac/MacClone.java	Wed Jul 05 18:59:05 2017 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 1998, 2012, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 1998, 2013, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -23,12 +23,13 @@
 
 /*
  * @test
- * @bug 7087021
- * @summary MacClone
+ * @bug 7087021 8013069
+ * @summary Clone tests for all MAC algorithms.
  * @author Jan Luehe
  */
+import java.security.spec.AlgorithmParameterSpec;
 import javax.crypto.*;
-import javax.crypto.spec.SecretKeySpec;
+import javax.crypto.spec.*;
 
 public class MacClone {
 
@@ -39,18 +40,23 @@
         KeyGenerator kgen = KeyGenerator.getInstance("DES");
         SecretKey skey = kgen.generateKey();
         for (String algo : algos) {
-            doTest(algo, skey);
+            doTest(algo, skey, null);
         }
 
-        String[] algos2 = { "HmacPBESHA1" };
+        String[] algos2 = { "HmacPBESHA1", "PBEWithHmacSHA1",
+                            "PBEWithHmacSHA224", "PBEWithHmacSHA256",
+                            "PBEWithHmacSHA384", "PBEWithHmacSHA512" };
         skey = new SecretKeySpec("whatever".getBytes(), "PBE");
+        PBEParameterSpec params =
+            new PBEParameterSpec("1234567890".getBytes(), 500);
         for (String algo : algos2) {
-            doTest(algo, skey);
+            doTest(algo, skey, params);
         }
         System.out.println("Test Passed");
     }
 
-    private static void doTest(String algo, SecretKey skey) throws Exception {
+    private static void doTest(String algo, SecretKey skey,
+        AlgorithmParameterSpec params) throws Exception {
         //
         // Clone an uninitialized Mac object
         //
@@ -72,7 +78,7 @@
         // Clone an initialized Mac object
         //
         mac = Mac.getInstance(algo, "SunJCE");
-        mac.init(skey);
+        mac.init(skey, params);
         macClone = (Mac)mac.clone();
         System.out.println(macClone.getProvider().toString());
         System.out.println(macClone.getAlgorithm());
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/test/com/sun/crypto/provider/TLS/TestLeadingZeroes.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,420 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/*
+ * @test
+ * @bug 8014618
+ * @summary Need to strip leading zeros in TlsPremasterSecret of DHKeyAgreement
+ * @author Pasi Eronen
+ */
+
+import java.io.*;
+import java.security.*;
+import java.security.spec.*;
+import java.security.interfaces.*;
+import javax.crypto.*;
+import javax.crypto.spec.*;
+import javax.crypto.interfaces.*;
+import com.sun.crypto.provider.SunJCE;
+
+/**
+ * Test that leading zeroes are stripped in TlsPremasterSecret case,
+ * but are left as-is in other cases.
+ *
+ * We use pre-generated keypairs, since with randomly generated keypairs,
+ * a leading zero happens only (roughly) 1 out of 256 cases.
+ */
+
+public class TestLeadingZeroes {
+
+    private static final String SUNJCE = "SunJCE";
+
+    private TestLeadingZeroes() {}
+
+    public static void main(String argv[]) throws Exception {
+        // Add JCE to the list of providers
+        SunJCE jce = new SunJCE();
+        Security.addProvider(jce);
+
+        TestLeadingZeroes keyAgree = new TestLeadingZeroes();
+        keyAgree.run();
+        System.out.println("Test Passed");
+    }
+
+    private void run() throws Exception {
+
+        // decode pre-generated keypairs
+        KeyFactory kfac = KeyFactory.getInstance("DH");
+        PublicKey alicePubKey =
+            kfac.generatePublic(new X509EncodedKeySpec(alicePubKeyEnc));
+        PublicKey bobPubKey =
+            kfac.generatePublic(new X509EncodedKeySpec(bobPubKeyEnc));
+        PrivateKey alicePrivKey =
+            kfac.generatePrivate(new PKCS8EncodedKeySpec(alicePrivKeyEnc));
+        PrivateKey bobPrivKey =
+            kfac.generatePrivate(new PKCS8EncodedKeySpec(bobPrivKeyEnc));
+
+        // generate normal shared secret
+        KeyAgreement aliceKeyAgree = KeyAgreement.getInstance("DH", SUNJCE);
+        aliceKeyAgree.init(alicePrivKey);
+        aliceKeyAgree.doPhase(bobPubKey, true);
+        byte[] sharedSecret = aliceKeyAgree.generateSecret();
+        System.out.println("shared secret:\n" + toHexString(sharedSecret));
+
+        // verify that leading zero is present
+        if (sharedSecret.length != 128) {
+            throw new Exception("Unexpected shared secret length");
+        }
+        if (sharedSecret[0] != 0) {
+            throw new Exception("First byte is not zero as expected");
+        }
+
+        // now, test TLS premaster secret
+        aliceKeyAgree.init(alicePrivKey);
+        aliceKeyAgree.doPhase(bobPubKey, true);
+        byte[] tlsPremasterSecret =
+            aliceKeyAgree.generateSecret("TlsPremasterSecret").getEncoded();
+        System.out.println(
+            "tls premaster secret:\n" + toHexString(tlsPremasterSecret));
+
+        // check that leading zero has been stripped
+        if (tlsPremasterSecret.length != 127) {
+            throw new Exception("Unexpected TLS premaster secret length");
+        }
+        if (tlsPremasterSecret[0] == 0) {
+            throw new Exception("First byte is zero");
+        }
+        for (int i = 0; i < tlsPremasterSecret.length; i++) {
+            if (tlsPremasterSecret[i] != sharedSecret[i+1]) {
+                throw new Exception("Shared secrets differ");
+            }
+        }
+
+    }
+
+    /*
+     * Converts a byte to hex digit and writes to the supplied buffer
+     */
+    private void byte2hex(byte b, StringBuffer buf) {
+        char[] hexChars = { '0', '1', '2', '3', '4', '5', '6', '7', '8',
+                            '9', 'A', 'B', 'C', 'D', 'E', 'F' };
+        int high = ((b & 0xf0) >> 4);
+        int low = (b & 0x0f);
+        buf.append(hexChars[high]);
+        buf.append(hexChars[low]);
+    }
+
+    /*
+     * Converts a byte array to hex string
+     */
+    private String toHexString(byte[] block) {
+        StringBuffer buf = new StringBuffer();
+
+        int len = block.length;
+
+        for (int i = 0; i < len; i++) {
+             byte2hex(block[i], buf);
+             if (i < len-1) {
+                 buf.append(":");
+             }
+        }
+        return buf.toString();
+    }
+
+    private static final byte alicePubKeyEnc[] = {
+        (byte)0x30, (byte)0x82, (byte)0x01, (byte)0x24,
+        (byte)0x30, (byte)0x81, (byte)0x99, (byte)0x06,
+        (byte)0x09, (byte)0x2A, (byte)0x86, (byte)0x48,
+        (byte)0x86, (byte)0xF7, (byte)0x0D, (byte)0x01,
+        (byte)0x03, (byte)0x01, (byte)0x30, (byte)0x81,
+        (byte)0x8B, (byte)0x02, (byte)0x81, (byte)0x81,
+        (byte)0x00, (byte)0xF4, (byte)0x88, (byte)0xFD,
+        (byte)0x58, (byte)0x4E, (byte)0x49, (byte)0xDB,
+        (byte)0xCD, (byte)0x20, (byte)0xB4, (byte)0x9D,
+        (byte)0xE4, (byte)0x91, (byte)0x07, (byte)0x36,
+        (byte)0x6B, (byte)0x33, (byte)0x6C, (byte)0x38,
+        (byte)0x0D, (byte)0x45, (byte)0x1D, (byte)0x0F,
+        (byte)0x7C, (byte)0x88, (byte)0xB3, (byte)0x1C,
+        (byte)0x7C, (byte)0x5B, (byte)0x2D, (byte)0x8E,
+        (byte)0xF6, (byte)0xF3, (byte)0xC9, (byte)0x23,
+        (byte)0xC0, (byte)0x43, (byte)0xF0, (byte)0xA5,
+        (byte)0x5B, (byte)0x18, (byte)0x8D, (byte)0x8E,
+        (byte)0xBB, (byte)0x55, (byte)0x8C, (byte)0xB8,
+        (byte)0x5D, (byte)0x38, (byte)0xD3, (byte)0x34,
+        (byte)0xFD, (byte)0x7C, (byte)0x17, (byte)0x57,
+        (byte)0x43, (byte)0xA3, (byte)0x1D, (byte)0x18,
+        (byte)0x6C, (byte)0xDE, (byte)0x33, (byte)0x21,
+        (byte)0x2C, (byte)0xB5, (byte)0x2A, (byte)0xFF,
+        (byte)0x3C, (byte)0xE1, (byte)0xB1, (byte)0x29,
+        (byte)0x40, (byte)0x18, (byte)0x11, (byte)0x8D,
+        (byte)0x7C, (byte)0x84, (byte)0xA7, (byte)0x0A,
+        (byte)0x72, (byte)0xD6, (byte)0x86, (byte)0xC4,
+        (byte)0x03, (byte)0x19, (byte)0xC8, (byte)0x07,
+        (byte)0x29, (byte)0x7A, (byte)0xCA, (byte)0x95,
+        (byte)0x0C, (byte)0xD9, (byte)0x96, (byte)0x9F,
+        (byte)0xAB, (byte)0xD0, (byte)0x0A, (byte)0x50,
+        (byte)0x9B, (byte)0x02, (byte)0x46, (byte)0xD3,
+        (byte)0x08, (byte)0x3D, (byte)0x66, (byte)0xA4,
+        (byte)0x5D, (byte)0x41, (byte)0x9F, (byte)0x9C,
+        (byte)0x7C, (byte)0xBD, (byte)0x89, (byte)0x4B,
+        (byte)0x22, (byte)0x19, (byte)0x26, (byte)0xBA,
+        (byte)0xAB, (byte)0xA2, (byte)0x5E, (byte)0xC3,
+        (byte)0x55, (byte)0xE9, (byte)0x2F, (byte)0x78,
+        (byte)0xC7, (byte)0x02, (byte)0x01, (byte)0x02,
+        (byte)0x02, (byte)0x02, (byte)0x02, (byte)0x00,
+        (byte)0x03, (byte)0x81, (byte)0x85, (byte)0x00,
+        (byte)0x02, (byte)0x81, (byte)0x81, (byte)0x00,
+        (byte)0xEE, (byte)0xD6, (byte)0xB1, (byte)0xA3,
+        (byte)0xB4, (byte)0x78, (byte)0x2B, (byte)0x35,
+        (byte)0xEF, (byte)0xCD, (byte)0x17, (byte)0x86,
+        (byte)0x63, (byte)0x2B, (byte)0x97, (byte)0x0E,
+        (byte)0x7A, (byte)0xD1, (byte)0xFF, (byte)0x7A,
+        (byte)0xEB, (byte)0x57, (byte)0x61, (byte)0xA1,
+        (byte)0xF7, (byte)0x90, (byte)0x11, (byte)0xA7,
+        (byte)0x79, (byte)0x28, (byte)0x69, (byte)0xBA,
+        (byte)0xA7, (byte)0xB2, (byte)0x37, (byte)0x17,
+        (byte)0xAE, (byte)0x3C, (byte)0x92, (byte)0x89,
+        (byte)0x88, (byte)0xE5, (byte)0x7E, (byte)0x8E,
+        (byte)0xF0, (byte)0x24, (byte)0xD0, (byte)0xE1,
+        (byte)0xC4, (byte)0xB0, (byte)0x26, (byte)0x5A,
+        (byte)0x1E, (byte)0xBD, (byte)0xA0, (byte)0xCF,
+        (byte)0x3E, (byte)0x97, (byte)0x2A, (byte)0x13,
+        (byte)0x92, (byte)0x3B, (byte)0x39, (byte)0xD0,
+        (byte)0x1D, (byte)0xA3, (byte)0x6B, (byte)0x3E,
+        (byte)0xC2, (byte)0xBB, (byte)0x14, (byte)0xB6,
+        (byte)0xE2, (byte)0x4C, (byte)0x0E, (byte)0x5B,
+        (byte)0x4B, (byte)0xA4, (byte)0x9D, (byte)0xA6,
+        (byte)0x21, (byte)0xB0, (byte)0xF9, (byte)0xDE,
+        (byte)0x55, (byte)0xAE, (byte)0x5C, (byte)0x29,
+        (byte)0x0E, (byte)0xC1, (byte)0xFC, (byte)0xBA,
+        (byte)0x51, (byte)0xD3, (byte)0xB6, (byte)0x6D,
+        (byte)0x75, (byte)0x72, (byte)0xDF, (byte)0x43,
+        (byte)0xAB, (byte)0x94, (byte)0x21, (byte)0x6E,
+        (byte)0x0C, (byte)0xD1, (byte)0x93, (byte)0x54,
+        (byte)0x56, (byte)0x7D, (byte)0x4B, (byte)0x90,
+        (byte)0xF1, (byte)0x94, (byte)0x45, (byte)0xD4,
+        (byte)0x2A, (byte)0x71, (byte)0xA1, (byte)0xB8,
+        (byte)0xDD, (byte)0xAA, (byte)0x05, (byte)0xF0,
+        (byte)0x27, (byte)0x37, (byte)0xBD, (byte)0x44
+    };
+
+    private static final byte alicePrivKeyEnc[] = {
+        (byte)0x30, (byte)0x81, (byte)0xE3, (byte)0x02,
+        (byte)0x01, (byte)0x00, (byte)0x30, (byte)0x81,
+        (byte)0x99, (byte)0x06, (byte)0x09, (byte)0x2A,
+        (byte)0x86, (byte)0x48, (byte)0x86, (byte)0xF7,
+        (byte)0x0D, (byte)0x01, (byte)0x03, (byte)0x01,
+        (byte)0x30, (byte)0x81, (byte)0x8B, (byte)0x02,
+        (byte)0x81, (byte)0x81, (byte)0x00, (byte)0xF4,
+        (byte)0x88, (byte)0xFD, (byte)0x58, (byte)0x4E,
+        (byte)0x49, (byte)0xDB, (byte)0xCD, (byte)0x20,
+        (byte)0xB4, (byte)0x9D, (byte)0xE4, (byte)0x91,
+        (byte)0x07, (byte)0x36, (byte)0x6B, (byte)0x33,
+        (byte)0x6C, (byte)0x38, (byte)0x0D, (byte)0x45,
+        (byte)0x1D, (byte)0x0F, (byte)0x7C, (byte)0x88,
+        (byte)0xB3, (byte)0x1C, (byte)0x7C, (byte)0x5B,
+        (byte)0x2D, (byte)0x8E, (byte)0xF6, (byte)0xF3,
+        (byte)0xC9, (byte)0x23, (byte)0xC0, (byte)0x43,
+        (byte)0xF0, (byte)0xA5, (byte)0x5B, (byte)0x18,
+        (byte)0x8D, (byte)0x8E, (byte)0xBB, (byte)0x55,
+        (byte)0x8C, (byte)0xB8, (byte)0x5D, (byte)0x38,
+        (byte)0xD3, (byte)0x34, (byte)0xFD, (byte)0x7C,
+        (byte)0x17, (byte)0x57, (byte)0x43, (byte)0xA3,
+        (byte)0x1D, (byte)0x18, (byte)0x6C, (byte)0xDE,
+        (byte)0x33, (byte)0x21, (byte)0x2C, (byte)0xB5,
+        (byte)0x2A, (byte)0xFF, (byte)0x3C, (byte)0xE1,
+        (byte)0xB1, (byte)0x29, (byte)0x40, (byte)0x18,
+        (byte)0x11, (byte)0x8D, (byte)0x7C, (byte)0x84,
+        (byte)0xA7, (byte)0x0A, (byte)0x72, (byte)0xD6,
+        (byte)0x86, (byte)0xC4, (byte)0x03, (byte)0x19,
+        (byte)0xC8, (byte)0x07, (byte)0x29, (byte)0x7A,
+        (byte)0xCA, (byte)0x95, (byte)0x0C, (byte)0xD9,
+        (byte)0x96, (byte)0x9F, (byte)0xAB, (byte)0xD0,
+        (byte)0x0A, (byte)0x50, (byte)0x9B, (byte)0x02,
+        (byte)0x46, (byte)0xD3, (byte)0x08, (byte)0x3D,
+        (byte)0x66, (byte)0xA4, (byte)0x5D, (byte)0x41,
+        (byte)0x9F, (byte)0x9C, (byte)0x7C, (byte)0xBD,
+        (byte)0x89, (byte)0x4B, (byte)0x22, (byte)0x19,
+        (byte)0x26, (byte)0xBA, (byte)0xAB, (byte)0xA2,
+        (byte)0x5E, (byte)0xC3, (byte)0x55, (byte)0xE9,
+        (byte)0x2F, (byte)0x78, (byte)0xC7, (byte)0x02,
+        (byte)0x01, (byte)0x02, (byte)0x02, (byte)0x02,
+        (byte)0x02, (byte)0x00, (byte)0x04, (byte)0x42,
+        (byte)0x02, (byte)0x40, (byte)0x36, (byte)0x4D,
+        (byte)0xD0, (byte)0x58, (byte)0x64, (byte)0x91,
+        (byte)0x78, (byte)0xA2, (byte)0x4B, (byte)0x79,
+        (byte)0x46, (byte)0xFE, (byte)0xC9, (byte)0xD9,
+        (byte)0xCA, (byte)0x5C, (byte)0xF9, (byte)0xFD,
+        (byte)0x6C, (byte)0x5D, (byte)0x76, (byte)0x3A,
+        (byte)0x41, (byte)0x6D, (byte)0x44, (byte)0x62,
+        (byte)0x75, (byte)0x93, (byte)0x81, (byte)0x93,
+        (byte)0x00, (byte)0x4C, (byte)0xB1, (byte)0xD8,
+        (byte)0x7D, (byte)0x9D, (byte)0xF3, (byte)0x16,
+        (byte)0x2C, (byte)0x6C, (byte)0x9F, (byte)0x7A,
+        (byte)0x84, (byte)0xA3, (byte)0x7A, (byte)0xC1,
+        (byte)0x4F, (byte)0x60, (byte)0xE3, (byte)0xB5,
+        (byte)0x86, (byte)0x28, (byte)0x08, (byte)0x4D,
+        (byte)0x94, (byte)0xB6, (byte)0x04, (byte)0x0D,
+        (byte)0xAC, (byte)0xBD, (byte)0x1F, (byte)0x42,
+        (byte)0x8F, (byte)0x1B
+    };
+
+    private static final byte bobPubKeyEnc[] = {
+        (byte)0x30, (byte)0x82, (byte)0x01, (byte)0x23,
+        (byte)0x30, (byte)0x81, (byte)0x99, (byte)0x06,
+        (byte)0x09, (byte)0x2A, (byte)0x86, (byte)0x48,
+        (byte)0x86, (byte)0xF7, (byte)0x0D, (byte)0x01,
+        (byte)0x03, (byte)0x01, (byte)0x30, (byte)0x81,
+        (byte)0x8B, (byte)0x02, (byte)0x81, (byte)0x81,
+        (byte)0x00, (byte)0xF4, (byte)0x88, (byte)0xFD,
+        (byte)0x58, (byte)0x4E, (byte)0x49, (byte)0xDB,
+        (byte)0xCD, (byte)0x20, (byte)0xB4, (byte)0x9D,
+        (byte)0xE4, (byte)0x91, (byte)0x07, (byte)0x36,
+        (byte)0x6B, (byte)0x33, (byte)0x6C, (byte)0x38,
+        (byte)0x0D, (byte)0x45, (byte)0x1D, (byte)0x0F,
+        (byte)0x7C, (byte)0x88, (byte)0xB3, (byte)0x1C,
+        (byte)0x7C, (byte)0x5B, (byte)0x2D, (byte)0x8E,
+        (byte)0xF6, (byte)0xF3, (byte)0xC9, (byte)0x23,
+        (byte)0xC0, (byte)0x43, (byte)0xF0, (byte)0xA5,
+        (byte)0x5B, (byte)0x18, (byte)0x8D, (byte)0x8E,
+        (byte)0xBB, (byte)0x55, (byte)0x8C, (byte)0xB8,
+        (byte)0x5D, (byte)0x38, (byte)0xD3, (byte)0x34,
+        (byte)0xFD, (byte)0x7C, (byte)0x17, (byte)0x57,
+        (byte)0x43, (byte)0xA3, (byte)0x1D, (byte)0x18,
+        (byte)0x6C, (byte)0xDE, (byte)0x33, (byte)0x21,
+        (byte)0x2C, (byte)0xB5, (byte)0x2A, (byte)0xFF,
+        (byte)0x3C, (byte)0xE1, (byte)0xB1, (byte)0x29,
+        (byte)0x40, (byte)0x18, (byte)0x11, (byte)0x8D,
+        (byte)0x7C, (byte)0x84, (byte)0xA7, (byte)0x0A,
+        (byte)0x72, (byte)0xD6, (byte)0x86, (byte)0xC4,
+        (byte)0x03, (byte)0x19, (byte)0xC8, (byte)0x07,
+        (byte)0x29, (byte)0x7A, (byte)0xCA, (byte)0x95,
+        (byte)0x0C, (byte)0xD9, (byte)0x96, (byte)0x9F,
+        (byte)0xAB, (byte)0xD0, (byte)0x0A, (byte)0x50,
+        (byte)0x9B, (byte)0x02, (byte)0x46, (byte)0xD3,
+        (byte)0x08, (byte)0x3D, (byte)0x66, (byte)0xA4,
+        (byte)0x5D, (byte)0x41, (byte)0x9F, (byte)0x9C,
+        (byte)0x7C, (byte)0xBD, (byte)0x89, (byte)0x4B,
+        (byte)0x22, (byte)0x19, (byte)0x26, (byte)0xBA,
+        (byte)0xAB, (byte)0xA2, (byte)0x5E, (byte)0xC3,
+        (byte)0x55, (byte)0xE9, (byte)0x2F, (byte)0x78,
+        (byte)0xC7, (byte)0x02, (byte)0x01, (byte)0x02,
+        (byte)0x02, (byte)0x02, (byte)0x02, (byte)0x00,
+        (byte)0x03, (byte)0x81, (byte)0x84, (byte)0x00,
+        (byte)0x02, (byte)0x81, (byte)0x80, (byte)0x2C,
+        (byte)0x40, (byte)0xFA, (byte)0xF6, (byte)0xA6,
+        (byte)0xF8, (byte)0xAC, (byte)0xC2, (byte)0x4F,
+        (byte)0xCD, (byte)0xC7, (byte)0x37, (byte)0x93,
+        (byte)0xE5, (byte)0xE4, (byte)0x5E, (byte)0x18,
+        (byte)0x14, (byte)0xE6, (byte)0x50, (byte)0xDA,
+        (byte)0x55, (byte)0x38, (byte)0x5D, (byte)0x24,
+        (byte)0xF5, (byte)0x42, (byte)0x68, (byte)0x5F,
+        (byte)0xF5, (byte)0x15, (byte)0xC8, (byte)0x9B,
+        (byte)0x5D, (byte)0x06, (byte)0x3D, (byte)0xE1,
+        (byte)0x52, (byte)0x2F, (byte)0x98, (byte)0xFF,
+        (byte)0x37, (byte)0xBB, (byte)0x75, (byte)0x48,
+        (byte)0x48, (byte)0xE9, (byte)0x65, (byte)0x84,
+        (byte)0x37, (byte)0xBB, (byte)0xB3, (byte)0xE9,
+        (byte)0x36, (byte)0x01, (byte)0xB4, (byte)0x6A,
+        (byte)0x1C, (byte)0xB2, (byte)0x11, (byte)0x82,
+        (byte)0xCE, (byte)0x3D, (byte)0x65, (byte)0xE5,
+        (byte)0x3C, (byte)0x89, (byte)0xE9, (byte)0x52,
+        (byte)0x19, (byte)0xBD, (byte)0x58, (byte)0xF6,
+        (byte)0xA2, (byte)0x03, (byte)0xA8, (byte)0xB2,
+        (byte)0xA5, (byte)0xDB, (byte)0xEB, (byte)0xF5,
+        (byte)0x94, (byte)0xF9, (byte)0x46, (byte)0xBE,
+        (byte)0x45, (byte)0x4C, (byte)0x65, (byte)0xD2,
+        (byte)0xD1, (byte)0xCF, (byte)0xFF, (byte)0xFF,
+        (byte)0xFA, (byte)0x38, (byte)0xF1, (byte)0x72,
+        (byte)0xAB, (byte)0xB9, (byte)0x14, (byte)0x4E,
+        (byte)0xF5, (byte)0xF0, (byte)0x7A, (byte)0x8E,
+        (byte)0x45, (byte)0xFD, (byte)0x5B, (byte)0xF9,
+        (byte)0xA2, (byte)0x97, (byte)0x1B, (byte)0xAE,
+        (byte)0x2C, (byte)0x7B, (byte)0x6B, (byte)0x7C,
+        (byte)0x98, (byte)0xFE, (byte)0x58, (byte)0xDD,
+        (byte)0xBE, (byte)0xF6, (byte)0x1C, (byte)0x8E,
+        (byte)0xD0, (byte)0xA1, (byte)0x72
+    };
+
+    private static final byte bobPrivKeyEnc[] = {
+        (byte)0x30, (byte)0x81, (byte)0xE4, (byte)0x02,
+        (byte)0x01, (byte)0x00, (byte)0x30, (byte)0x81,
+        (byte)0x99, (byte)0x06, (byte)0x09, (byte)0x2A,
+        (byte)0x86, (byte)0x48, (byte)0x86, (byte)0xF7,
+        (byte)0x0D, (byte)0x01, (byte)0x03, (byte)0x01,
+        (byte)0x30, (byte)0x81, (byte)0x8B, (byte)0x02,
+        (byte)0x81, (byte)0x81, (byte)0x00, (byte)0xF4,
+        (byte)0x88, (byte)0xFD, (byte)0x58, (byte)0x4E,
+        (byte)0x49, (byte)0xDB, (byte)0xCD, (byte)0x20,
+        (byte)0xB4, (byte)0x9D, (byte)0xE4, (byte)0x91,
+        (byte)0x07, (byte)0x36, (byte)0x6B, (byte)0x33,
+        (byte)0x6C, (byte)0x38, (byte)0x0D, (byte)0x45,
+        (byte)0x1D, (byte)0x0F, (byte)0x7C, (byte)0x88,
+        (byte)0xB3, (byte)0x1C, (byte)0x7C, (byte)0x5B,
+        (byte)0x2D, (byte)0x8E, (byte)0xF6, (byte)0xF3,
+        (byte)0xC9, (byte)0x23, (byte)0xC0, (byte)0x43,
+        (byte)0xF0, (byte)0xA5, (byte)0x5B, (byte)0x18,
+        (byte)0x8D, (byte)0x8E, (byte)0xBB, (byte)0x55,
+        (byte)0x8C, (byte)0xB8, (byte)0x5D, (byte)0x38,
+        (byte)0xD3, (byte)0x34, (byte)0xFD, (byte)0x7C,
+        (byte)0x17, (byte)0x57, (byte)0x43, (byte)0xA3,
+        (byte)0x1D, (byte)0x18, (byte)0x6C, (byte)0xDE,
+        (byte)0x33, (byte)0x21, (byte)0x2C, (byte)0xB5,
+        (byte)0x2A, (byte)0xFF, (byte)0x3C, (byte)0xE1,
+        (byte)0xB1, (byte)0x29, (byte)0x40, (byte)0x18,
+        (byte)0x11, (byte)0x8D, (byte)0x7C, (byte)0x84,
+        (byte)0xA7, (byte)0x0A, (byte)0x72, (byte)0xD6,
+        (byte)0x86, (byte)0xC4, (byte)0x03, (byte)0x19,
+        (byte)0xC8, (byte)0x07, (byte)0x29, (byte)0x7A,
+        (byte)0xCA, (byte)0x95, (byte)0x0C, (byte)0xD9,
+        (byte)0x96, (byte)0x9F, (byte)0xAB, (byte)0xD0,
+        (byte)0x0A, (byte)0x50, (byte)0x9B, (byte)0x02,
+        (byte)0x46, (byte)0xD3, (byte)0x08, (byte)0x3D,
+        (byte)0x66, (byte)0xA4, (byte)0x5D, (byte)0x41,
+        (byte)0x9F, (byte)0x9C, (byte)0x7C, (byte)0xBD,
+        (byte)0x89, (byte)0x4B, (byte)0x22, (byte)0x19,
+        (byte)0x26, (byte)0xBA, (byte)0xAB, (byte)0xA2,
+        (byte)0x5E, (byte)0xC3, (byte)0x55, (byte)0xE9,
+        (byte)0x2F, (byte)0x78, (byte)0xC7, (byte)0x02,
+        (byte)0x01, (byte)0x02, (byte)0x02, (byte)0x02,
+        (byte)0x02, (byte)0x00, (byte)0x04, (byte)0x43,
+        (byte)0x02, (byte)0x41, (byte)0x00, (byte)0xE0,
+        (byte)0x31, (byte)0xE7, (byte)0x77, (byte)0xB8,
+        (byte)0xD0, (byte)0x7E, (byte)0x0A, (byte)0x9B,
+        (byte)0x94, (byte)0xD5, (byte)0x3D, (byte)0x33,
+        (byte)0x62, (byte)0x32, (byte)0x51, (byte)0xCE,
+        (byte)0x74, (byte)0x5C, (byte)0xA5, (byte)0x72,
+        (byte)0xD9, (byte)0x36, (byte)0xF3, (byte)0x8A,
+        (byte)0x3F, (byte)0x8B, (byte)0xC6, (byte)0xFE,
+        (byte)0xEF, (byte)0x94, (byte)0x8B, (byte)0x50,
+        (byte)0x41, (byte)0x9B, (byte)0x14, (byte)0xC8,
+        (byte)0xE9, (byte)0x1F, (byte)0x24, (byte)0x1F,
+        (byte)0x65, (byte)0x8E, (byte)0xD3, (byte)0x85,
+        (byte)0xD0, (byte)0x68, (byte)0x6C, (byte)0xF1,
+        (byte)0x79, (byte)0x45, (byte)0xD0, (byte)0x06,
+        (byte)0xA4, (byte)0xB8, (byte)0xE0, (byte)0x64,
+        (byte)0xF5, (byte)0x38, (byte)0x72, (byte)0x97,
+        (byte)0x00, (byte)0x23, (byte)0x5F
+    };
+}
+
--- a/jdk/test/com/sun/jmx/remote/NotificationMarshalVersions/TestSerializationMismatch.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/com/sun/jmx/remote/NotificationMarshalVersions/TestSerializationMismatch.java	Wed Jul 05 18:59:05 2017 +0200
@@ -12,7 +12,7 @@
  * @bug 6937053 8005472
  *
  * @run clean TestSerializationMismatch
- * @run main TestSerializationMismatch
+ * @run main/othervm TestSerializationMismatch
  *
  */
 public class TestSerializationMismatch {
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/test/com/sun/management/DiagnosticCommandMBean/DcmdMBeanDoubleInvocationTest.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,90 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/*
+ * @test
+ * @bug     7150256
+ * @summary Basic Test for the DiagnosticCommandMBean
+ * @author  Frederic Parain
+ *
+ * @run main/othervm -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.port=8125 DcmdMBeanDoubleInvocationTest
+ */
+
+
+import java.io.IOException;
+import java.lang.management.ManagementFactory;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import javax.management.Descriptor;
+import javax.management.InstanceNotFoundException;
+import javax.management.IntrospectionException;
+import javax.management.MBeanInfo;
+import javax.management.MBeanOperationInfo;
+import javax.management.MBeanServer;
+import javax.management.MalformedObjectNameException;
+import javax.management.ObjectName;
+import javax.management.ReflectionException;
+import javax.management.*;
+import javax.management.remote.*;
+
+public class DcmdMBeanDoubleInvocationTest {
+
+    private static String HOTSPOT_DIAGNOSTIC_MXBEAN_NAME =
+        "com.sun.management:type=DiagnosticCommand";
+
+    public static void main(String[] args) {
+        MBeanServerConnection mbs = null;
+        try {
+            JMXServiceURL url = new JMXServiceURL("service:jmx:rmi:///jndi/rmi://localhost:8125/jmxrmi");
+            JMXConnector connector = JMXConnectorFactory.connect(url);
+            mbs = connector.getMBeanServerConnection();
+        } catch(Throwable t) {
+            t.printStackTrace();
+        }
+        ObjectName name;
+        try {
+            name = new ObjectName(HOTSPOT_DIAGNOSTIC_MXBEAN_NAME);
+            MBeanInfo info = mbs.getMBeanInfo(name);
+            String[] helpArgs = {"-all", "\n", "VM.version"};
+            Object[] dcmdArgs = {helpArgs};
+            String[] signature = {String[].class.getName()};
+            String result = (String) mbs.invoke(name, "help", dcmdArgs, signature);
+            System.out.println(result);
+         } catch (RuntimeMBeanException ex) {
+            if (ex.getCause() instanceof IllegalArgumentException) {
+                System.out.println("Test passed");
+                return;
+            } else {
+                ex.printStackTrace();
+                throw new RuntimeException("TEST FAILED");
+            }
+        } catch (InstanceNotFoundException | IntrospectionException
+                | ReflectionException | MalformedObjectNameException
+                | MBeanException|IOException ex) {
+            ex.printStackTrace();
+            throw new RuntimeException("TEST FAILED");
+        }
+        System.out.println("Double commands have not been detected");
+        throw new RuntimeException("TEST FAILED");
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/test/com/sun/management/DiagnosticCommandMBean/DcmdMBeanInvocationTest.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,81 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/*
+ * @test
+ * @bug     7150256
+ * @summary Basic Test for the DiagnosticCommandMBean
+ * @author  Frederic Parain
+ *
+ * @run main/othervm -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.port=8129 DcmdMBeanInvocationTest
+ */
+
+
+import java.io.IOException;
+import java.lang.management.ManagementFactory;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import javax.management.Descriptor;
+import javax.management.InstanceNotFoundException;
+import javax.management.IntrospectionException;
+import javax.management.MBeanInfo;
+import javax.management.MBeanOperationInfo;
+import javax.management.MBeanServer;
+import javax.management.MalformedObjectNameException;
+import javax.management.ObjectName;
+import javax.management.ReflectionException;
+import javax.management.*;
+import javax.management.remote.*;
+
+public class DcmdMBeanInvocationTest {
+
+    private static String HOTSPOT_DIAGNOSTIC_MXBEAN_NAME =
+        "com.sun.management:type=DiagnosticCommand";
+
+    public static void main(String[] args) {
+        MBeanServerConnection mbs = null;
+        try {
+            JMXServiceURL url = new JMXServiceURL("service:jmx:rmi:///jndi/rmi://localhost:8129/jmxrmi");
+            JMXConnector connector = JMXConnectorFactory.connect(url);
+            mbs = connector.getMBeanServerConnection();
+        } catch(Throwable t) {
+            t.printStackTrace();
+        }
+        ObjectName name;
+        try {
+            name = new ObjectName(HOTSPOT_DIAGNOSTIC_MXBEAN_NAME);
+            MBeanInfo info = mbs.getMBeanInfo(name);
+            String[] helpArgs = {"-all"};
+            Object[] dcmdArgs = {helpArgs};
+            String[] signature = {String[].class.getName()};
+            String result = (String) mbs.invoke(name, "help", dcmdArgs, signature);
+            System.out.println(result);
+        } catch (InstanceNotFoundException | IntrospectionException
+                | ReflectionException | MalformedObjectNameException
+                | MBeanException|IOException ex) {
+            ex.printStackTrace();
+            throw new RuntimeException("TEST FAILED");
+        }
+        System.out.println("Test passed");
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/test/com/sun/management/DiagnosticCommandMBean/DcmdMBeanPermissionsTest.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,242 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/*
+ * @test
+ * @bug     7150256
+ * @summary Permissions Tests for the DiagnosticCommandMBean
+ * @author  Frederic Parain
+ *
+ * @run main/othervm DcmdMBeanPermissionsTest
+ */
+
+import java.lang.management.ManagementFactory;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.ReflectPermission;
+import java.security.Permission;
+import java.util.HashSet;
+import java.util.Iterator;
+import javax.management.Descriptor;
+import javax.management.InstanceNotFoundException;
+import javax.management.IntrospectionException;
+import javax.management.MBeanException;
+import javax.management.MBeanInfo;
+import javax.management.MBeanOperationInfo;
+import javax.management.MBeanPermission;
+import javax.management.MBeanServer;
+import javax.management.MalformedObjectNameException;
+import javax.management.ObjectName;
+import javax.management.ReflectionException;
+import javax.management.RuntimeMBeanException;
+
+/**
+ *
+ * @author fparain
+ */
+public class DcmdMBeanPermissionsTest {
+
+    private static String HOTSPOT_DIAGNOSTIC_MXBEAN_NAME =
+        "com.sun.management:type=DiagnosticCommand";
+
+    static public class CustomSecurityManager extends SecurityManager {
+
+        private HashSet<Permission> grantedPermissions;
+
+        public CustomSecurityManager() {
+            grantedPermissions = new HashSet<Permission>();
+        }
+
+        public final void grantPermission(final Permission perm) {
+            grantedPermissions.add(perm);
+        }
+
+        public final void denyPermission(final Permission perm) {
+            Iterator<Permission> it = grantedPermissions.iterator();
+            while (it.hasNext()) {
+                Permission p = it.next();
+                if (p.equals(perm)) {
+                    it.remove();
+                }
+            }
+        }
+
+        public final void checkPermission(final Permission perm) {
+            for (Permission p : grantedPermissions) {
+                if (p.implies(perm)) {
+                    return;
+                }
+            }
+            throw new SecurityException(perm.toString());
+        }
+    };
+
+    static Permission createPermission(String classname, String name,
+            String action) {
+        Permission permission = null;
+        try {
+            Class c = Class.forName(classname);
+            if (action == null) {
+                try {
+                    Constructor constructor = c.getConstructor(String.class);
+                    permission = (Permission) constructor.newInstance(name);
+
+                } catch (InstantiationException | IllegalAccessException
+                        | IllegalArgumentException | InvocationTargetException
+                        | NoSuchMethodException | SecurityException ex) {
+                    ex.printStackTrace();
+                    throw new RuntimeException("TEST FAILED");
+                }
+            }
+            if (permission == null) {
+                try {
+                    Constructor constructor = c.getConstructor(String.class,
+                            String.class);
+                    permission = (Permission) constructor.newInstance(
+                            name,
+                            action);
+                } catch (InstantiationException | IllegalAccessException
+                        | IllegalArgumentException | InvocationTargetException
+                        | NoSuchMethodException | SecurityException ex) {
+                    ex.printStackTrace();
+                    throw new RuntimeException("TEST FAILED");
+                }
+            }
+        } catch (ClassNotFoundException ex) {
+            ex.printStackTrace();
+                    throw new RuntimeException("TEST FAILED");
+        }
+        if (permission == null) {
+            throw new RuntimeException("TEST FAILED");
+        }
+        return permission;
+    }
+
+    // return true if invokation triggered a SecurityException
+    static boolean invokeOperation(MBeanServer mbs, ObjectName on,
+            MBeanOperationInfo opInfo) {
+        try {
+            if (opInfo.getSignature().length == 0) {
+                mbs.invoke(on, opInfo.getName(),
+                        new Object[0], new String[0]);
+            } else {
+                mbs.invoke(on, opInfo.getName(),
+                        new Object[1], new String[]{ String[].class.getName()});
+            }
+        } catch (SecurityException ex) {
+            ex.printStackTrace();
+            return true;
+        } catch (RuntimeMBeanException ex) {
+            if (ex.getCause() instanceof SecurityException) {
+                //ex.printStackTrace();
+                return true;
+            }
+        } catch (MBeanException | InstanceNotFoundException
+                | ReflectionException ex) {
+            throw new RuntimeException("TEST FAILED");
+        }
+        return false;
+    }
+
+    static void testOperation(MBeanServer mbs, CustomSecurityManager sm,
+            ObjectName on, MBeanOperationInfo opInfo) {
+        System.out.println("Testing " + opInfo.getName());
+        Descriptor desc = opInfo.getDescriptor();
+        if (desc.getFieldValue("dcmd.permissionClass") == null) {
+        // No special permission required, execution should not trigger
+        // any security exception
+            if (invokeOperation(mbs, on, opInfo)) {
+                throw new RuntimeException("TEST FAILED");
+            }
+        } else {
+            // Building the required permission
+            Permission reqPerm = createPermission(
+                    (String)desc.getFieldValue("dcmd.permissionClass"),
+                    (String)desc.getFieldValue("dcmd.permissionName"),
+                    (String)desc.getFieldValue("dcmd.permissionAction"));
+            // Paranoid mode: check that the SecurityManager has not already
+            // been granted the permission
+            sm.denyPermission(reqPerm);
+            // A special permission is required for this operation,
+            // invoking it without the permission granted must trigger
+            // a security exception
+            if(!invokeOperation(mbs, on, opInfo)) {
+                throw new RuntimeException("TEST FAILED");
+            }
+            // grant the permission and re-try invoking the operation
+            sm.grantPermission(reqPerm);
+            if(invokeOperation(mbs, on, opInfo)) {
+                throw new RuntimeException("TEST FAILED");
+            }
+            // Clean up
+            sm.denyPermission(reqPerm);
+        }
+    }
+
+    public static void main(final String[] args) {
+        final MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
+        ObjectName on = null;
+        try {
+            on = new ObjectName(HOTSPOT_DIAGNOSTIC_MXBEAN_NAME);
+        } catch (MalformedObjectNameException ex) {
+            ex.printStackTrace();
+            throw new RuntimeException("TEST FAILED");
+        }
+        MBeanInfo info = null;
+        try {
+            info = mbs.getMBeanInfo(on);
+        } catch (InstanceNotFoundException | IntrospectionException
+                | ReflectionException ex) {
+            ex.printStackTrace();
+            throw new RuntimeException("TEST FAILED");
+        }
+        CustomSecurityManager sm = new CustomSecurityManager();
+        System.setSecurityManager(sm);
+        // Set of permission required to run the test cleanly
+        // Some permissions are required by the MBeanServer and other
+        // platform services (RuntimePermission("createClassLoader"),
+        // ReflectPermission("suppressAccessChecks"),
+        // java.util.logging.LoggingPermission("control"),
+        // RuntimePermission("exitVM.97")).
+        // Other permissions are required by commands being invoked
+        // in the test (for instance, RuntimePermission("modifyThreadGroup")
+        // and RuntimePermission("modifyThread") are checked when
+        // runFinalization() is invoked by the gcRunFinalization command.
+        sm.grantPermission(new RuntimePermission("createClassLoader"));
+        sm.grantPermission(new ReflectPermission("suppressAccessChecks"));
+        sm.grantPermission(new java.util.logging.LoggingPermission("control", ""));
+        sm.grantPermission(new java.lang.RuntimePermission("exitVM.97"));
+        sm.grantPermission(new java.lang.RuntimePermission("modifyThreadGroup"));
+        sm.grantPermission(new java.lang.RuntimePermission("modifyThread"));
+        for(MBeanOperationInfo opInfo : info.getOperations()) {
+            Permission opPermission = new MBeanPermission(info.getClassName(),
+                    opInfo.getName(),
+                    on,
+                    "invoke");
+            sm.grantPermission(opPermission);
+            testOperation(mbs, sm, on, opInfo);
+            sm.denyPermission(opPermission);
+        }
+        System.out.println("TEST PASSED");
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/test/com/sun/management/DiagnosticCommandMBean/DcmdMBeanTest.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,113 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/*
+ * @test
+ * @bug     7150256
+ * @summary Basic Test for the DiagnosticCommandMBean
+ * @author  Frederic Parain
+ *
+ * @run main/othervm -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.port=8127 DcmdMBeanTest
+ */
+
+
+import java.io.IOException;
+import java.lang.management.ManagementFactory;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import javax.management.Descriptor;
+import javax.management.InstanceNotFoundException;
+import javax.management.IntrospectionException;
+import javax.management.MBeanInfo;
+import javax.management.MBeanOperationInfo;
+import javax.management.MBeanServer;
+import javax.management.MalformedObjectNameException;
+import javax.management.ObjectName;
+import javax.management.ReflectionException;
+import javax.management.*;
+import javax.management.remote.*;
+
+public class DcmdMBeanTest {
+
+    private static String HOTSPOT_DIAGNOSTIC_MXBEAN_NAME =
+        "com.sun.management:type=DiagnosticCommand";
+
+    public static void main(String[] args) {
+        MBeanServerConnection mbs = null;
+        try {
+            JMXServiceURL url = new JMXServiceURL("service:jmx:rmi:///jndi/rmi://localhost:8127/jmxrmi");
+            JMXConnector connector = JMXConnectorFactory.connect(url);
+            mbs = connector.getMBeanServerConnection();
+        } catch(Throwable t) {
+            t.printStackTrace();
+        }
+        ObjectName name;
+        try {
+            name = new ObjectName(HOTSPOT_DIAGNOSTIC_MXBEAN_NAME);
+            MBeanInfo info = mbs.getMBeanInfo(name);
+            // the test should check that the MBean doesn't have any
+            // Attribute, notification or constructor. Current version only
+            // check operations
+            System.out.println("Class Name:"+info.getClassName());
+            System.out.println("Description:"+info.getDescription());
+            MBeanOperationInfo[] opInfo = info.getOperations();
+            System.out.println("Operations:");
+            for(int i=0; i<opInfo.length; i++) {
+                printOperation(opInfo[i]);
+                System.out.println("\n@@@@@@\n");
+            }
+        } catch (InstanceNotFoundException|IntrospectionException|ReflectionException
+                 |MalformedObjectNameException|IOException ex) {
+            Logger.getLogger(DcmdMBeanTest.class.getName()).log(Level.SEVERE, null, ex);
+        }
+    }
+
+    static void printOperation(MBeanOperationInfo info) {
+        System.out.println("Name: "+info.getName());
+        System.out.println("Description: "+info.getDescription());
+        System.out.println("Return Type: "+info.getReturnType());
+        System.out.println("Impact: "+info.getImpact());
+        Descriptor desc = info.getDescriptor();
+        System.out.println("Descriptor");
+        for(int i=0; i<desc.getFieldNames().length; i++) {
+            if(desc.getFieldNames()[i].compareTo("dcmd.arguments") == 0) {
+                System.out.println("\t"+desc.getFieldNames()[i]+":");
+                Descriptor desc2 =
+                        (Descriptor)desc.getFieldValue(desc.getFieldNames()[i]);
+                for(int j=0; j<desc2.getFieldNames().length; j++) {
+                    System.out.println("\t\t"+desc2.getFieldNames()[j]+"=");
+                    Descriptor desc3 =
+                            (Descriptor)desc2.getFieldValue(desc2.getFieldNames()[j]);
+                    for(int k=0; k<desc3.getFieldNames().length; k++) {
+                        System.out.println("\t\t\t"+desc3.getFieldNames()[k]+"="
+                                           +desc3.getFieldValue(desc3.getFieldNames()[k]));
+                    }
+                }
+            } else {
+                System.out.println("\t"+desc.getFieldNames()[i]+"="
+                        +desc.getFieldValue(desc.getFieldNames()[i]));
+            }
+        }
+    }
+}
+
--- a/jdk/test/demo/zipfs/ZipFSTester.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/demo/zipfs/ZipFSTester.java	Wed Jul 05 18:59:05 2017 +0200
@@ -29,6 +29,7 @@
 import java.nio.file.attribute.*;
 import java.net.*;
 import java.util.*;
+import java.util.concurrent.TimeUnit;
 import java.util.zip.*;
 
 import static java.nio.file.StandardOpenOption.*;
@@ -48,6 +49,7 @@
             test0(fs);
             test1(fs);
             test2(fs);   // more tests
+            testTime(Paths.get(args[0]));
         }
     }
 
@@ -337,6 +339,46 @@
         Files.delete(fs3Path);
     }
 
+    // test file stamp
+    static void testTime(Path src) throws Exception {
+        // create a new filesystem, copy this file into it
+        Map<String, Object> env = new HashMap<String, Object>();
+        env.put("create", "true");
+        Path fsPath = getTempPath();
+        FileSystem fs = newZipFileSystem(fsPath, env);
+
+        System.out.println("test copy with timestamps...");
+        // copyin
+        Path dst = getPathWithParents(fs, "me");
+        Files.copy(src, dst, COPY_ATTRIBUTES);
+        checkEqual(src, dst);
+
+        BasicFileAttributes attrs = Files
+                        .getFileAttributeView(src, BasicFileAttributeView.class)
+                        .readAttributes();
+        System.out.println("mtime: " + attrs.lastModifiedTime());
+        System.out.println("ctime: " + attrs.creationTime());
+        System.out.println("atime: " + attrs.lastAccessTime());
+        System.out.println(" ==============>");
+        BasicFileAttributes dstAttrs = Files
+                        .getFileAttributeView(dst, BasicFileAttributeView.class)
+                        .readAttributes();
+        System.out.println("mtime: " + dstAttrs.lastModifiedTime());
+        System.out.println("ctime: " + dstAttrs.creationTime());
+        System.out.println("atime: " + dstAttrs.lastAccessTime());
+
+        // 1-second granularity
+        if (attrs.lastModifiedTime().to(TimeUnit.SECONDS) !=
+            dstAttrs.lastModifiedTime().to(TimeUnit.SECONDS) ||
+            attrs.lastAccessTime().to(TimeUnit.SECONDS) !=
+            dstAttrs.lastAccessTime().to(TimeUnit.SECONDS) ||
+            attrs.creationTime().to(TimeUnit.SECONDS) !=
+            dstAttrs.creationTime().to(TimeUnit.SECONDS)) {
+            throw new RuntimeException("Timestamp Copy Failed!");
+        }
+        Files.delete(fsPath);
+    }
+
     private static FileSystem newZipFileSystem(Path path, Map<String, ?> env)
         throws Exception
     {
--- a/jdk/test/demo/zipfs/basic.sh	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/demo/zipfs/basic.sh	Wed Jul 05 18:59:05 2017 +0200
@@ -22,7 +22,7 @@
 #
 # @test
 # @bug 6990846 7009092 7009085 7015391 7014948 7005986 7017840 7007596
-# 7157656 8002390
+# 7157656 8002390 7012868 7012856
 # @summary Test ZipFileSystem demo
 # @build Basic PathOps ZipFSTester
 # @run shell basic.sh
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/test/java/awt/Focus/ResetMostRecentFocusOwnerTest/ResetMostRecentFocusOwnerTest.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,121 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/*
+  @test
+  @bug      8013773
+  @summary  Tests that disabled component is not retained as most recent focus owner.
+  @author   Anton.Tarasov: area=awt.focus
+  @library  ../../regtesthelpers
+  @build    Util
+  @run      main ResetMostRecentFocusOwnerTest
+*/
+
+import java.applet.Applet;
+import java.awt.AWTEvent;
+import java.awt.FlowLayout;
+import java.awt.Robot;
+import java.awt.Toolkit;
+import java.awt.event.AWTEventListener;
+import java.awt.event.FocusEvent;
+import java.awt.event.WindowEvent;
+import javax.swing.JButton;
+import javax.swing.JFrame;
+import test.java.awt.regtesthelpers.Util;
+
+public class ResetMostRecentFocusOwnerTest extends Applet {
+
+    public static void main(String[] args) {
+        ResetMostRecentFocusOwnerTest app = new ResetMostRecentFocusOwnerTest();
+        app.init();
+        app.start();
+    }
+
+    @Override
+    public void start() {
+
+        Toolkit.getDefaultToolkit().addAWTEventListener(new AWTEventListener() {
+            public void eventDispatched(AWTEvent e) {
+                System.err.println(e);
+            }
+        }, FocusEvent.FOCUS_EVENT_MASK | WindowEvent.WINDOW_FOCUS_EVENT_MASK);
+
+        boolean gained = false;
+        final Robot robot = Util.createRobot();
+
+        JFrame frame1 = new JFrame("Main Frame");
+        final JButton b1 = new JButton("button1");
+        frame1.add(b1);
+        frame1.pack();
+        frame1.setLocation(0, 300);
+
+        Util.showWindowWait(frame1);
+
+        final JFrame frame2 = new JFrame("Test Frame");
+        final JButton b2 = new JButton("button2");
+        frame2.add(b2);
+        frame2.pack();
+        frame2.setLocation(300, 300);
+
+        b2.setEnabled(false);
+        b2.requestFocus();
+
+        Util.showWindowWait(frame2);
+
+        robot.delay(500);
+
+        //
+        // It's expeced that the focus is restored to <button1>.
+        // If not, click <button1> to set focus on it.
+        //
+        if (!b1.hasFocus()) {
+            gained = Util.trackFocusGained(b1, new Runnable() {
+                public void run() {
+                    Util.clickOnComp(b1, robot);
+                }
+            }, 5000, false);
+
+            if (!gained) {
+                throw new RuntimeException("Unexpected state: focus is not on <button1>");
+            }
+        }
+
+        robot.delay(500);
+
+        //
+        // Click <button2>, check that focus is set on the parent frame.
+        //
+        gained = false;
+        gained = Util.trackFocusGained(frame2, new Runnable() {
+            public void run() {
+                Util.clickOnComp(b2, robot);
+            }
+        }, 5000, false);
+
+        if (!gained) {
+            throw new RuntimeException("Test failed: focus wasn't set to <frame2>");
+        }
+
+        System.out.println("Test passed.");
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/test/java/awt/FontClass/SurrogateTest/SuppCharTest.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,127 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/*
+ * @test
+ * @bug 8015556
+ * @summary Surrogate pairs do not render properly on MacOS X.
+ */
+
+import java.util.Locale;
+import java.awt.*;
+import java.awt.font.*;
+import javax.swing.*;
+
+public class SuppCharTest {
+
+   static String str = "ABC\uD840\uDC01\uD840\uDC00AB";
+   static String EXTB_FONT = "MingLiU-ExtB";
+
+   public static void main(String args[]) throws Exception {
+
+      final Font font = new Font(EXTB_FONT, Font.PLAIN, 36);
+      if (!EXTB_FONT.equalsIgnoreCase(font.getFamily(Locale.ENGLISH))) {
+         return;
+      }
+
+      SwingUtilities.invokeLater(new Runnable(){
+        @Override
+        public void run(){
+            JFrame f = new JFrame("Test Supplementary Char Support");
+            Component c = new SuppCharComp(font, str);
+            f.add("Center", c);
+            JButton b = new JButton(str);
+            b.setFont(font);
+            f.add("South", b);
+            f.pack();
+            f.setVisible(true);
+        }
+      });
+
+      /* If a supplementary character was found, 'invisible glyphs'
+       * with value 65535 will be inserted in the place of the 2nd (low)
+       * char index. So we are looking here to make sure such substitutions
+       * took place.
+       */
+      FontRenderContext frc = new FontRenderContext(null, false, false);
+      GlyphVector gv = font.createGlyphVector(frc, str);
+      int numGlyphs = gv.getNumGlyphs();
+      int[] codes = gv.getGlyphCodes(0, numGlyphs, null);
+      boolean foundInvisibleGlyph = false;
+      for (int i=0; i<numGlyphs;i++) {
+           if (codes[i] == 65535) {
+               foundInvisibleGlyph = true;
+               break;
+           }
+      }
+
+      if (!foundInvisibleGlyph) {
+           throw new RuntimeException("No invisible glyphs");
+      }
+
+      if (font.canDisplayUpTo(str) != -1) {
+          throw new RuntimeException("Font can't display all chars");
+      }
+
+   }
+}
+
+class SuppCharComp extends Component {
+
+  static final int w=400, h=250;
+  public Dimension preferredSize() {
+     return new Dimension(w,h);
+  }
+
+  String str = null;
+  Font font = null;
+  public SuppCharComp(Font font, String str) {
+    this.font = font;
+    this.str = str;
+  }
+  public void paint(Graphics g) {
+     Graphics2D g2d = (Graphics2D)g.create();
+     g2d.setColor(Color.white);
+     g2d.fillRect(0,0,w,h);
+     g2d.setColor(Color.black);
+     int y = 0;
+     g2d.setRenderingHint(RenderingHints.KEY_FRACTIONALMETRICS,
+                          RenderingHints.VALUE_FRACTIONALMETRICS_ON);
+     g2d.setRenderingHint(RenderingHints.KEY_TEXT_ANTIALIASING,
+                          RenderingHints.VALUE_TEXT_ANTIALIAS_ON);
+
+     g2d.setFont(font);
+     g2d.drawString(str, 10, 50);
+
+     FontRenderContext frc = g2d.getFontRenderContext();
+     GlyphVector gv = font.createGlyphVector(frc, str);
+     g2d.drawGlyphVector(gv, 10, 100);
+     TextLayout tl = new TextLayout(str, font, frc);
+     tl.draw(g2d, 10, 150);
+     char[] ca = str.toCharArray();
+     g2d.drawChars(ca, 0, ca.length, 10, 200);
+
+  }
+
+}
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/test/java/awt/TextArea/Mixing/TextAreaMixing.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,99 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/* @test
+ * @bug 8013189
+ * @run main TextAreaMixing
+ * @summary TextArea should support HW/LW mixing
+ * @author anthony.petrov@oracle.com
+ */
+
+import java.awt.*;
+import java.awt.event.*;
+import javax.swing.*;
+
+public class TextAreaMixing {
+
+    private static volatile boolean menuClicked = false;
+    private static JMenuItem menuItem;
+
+    public static void main(String[] args) throws Exception {
+        // The bug is only reproducible on X11, but there's no reason
+        // for this test to not pass on any platofrm
+
+        final JFrame frame = new JFrame("JFrame");
+        frame.setLayout(new GridLayout(0, 1));
+        frame.setSize(200, 200);
+
+        JMenuBar menuBar = new JMenuBar();
+        JMenu menu = new JMenu("Test Menu");
+
+        for (int i = 0; i < 6; i++) {
+            JMenuItem mi = new JMenuItem(Integer.toString(i));
+            mi.addActionListener(new ActionListener() {
+                @Override
+                public void actionPerformed(ActionEvent e) {
+                    menuClicked = true;
+                }
+            });
+            menu.add(mi);
+
+            // Choose a random (OK, the fourth) menu item to click on
+            if (i == 3) {
+                menuItem = mi;
+            }
+        }
+        menuBar.add(menu);
+        frame.setJMenuBar(menuBar);
+
+        frame.getContentPane().add(new TextArea());
+        frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
+        frame.setVisible(true);
+
+        Thread.sleep(2000);
+
+        Robot robot = new Robot();
+
+        // Open the menu
+        Point loc = menu.getLocationOnScreen();
+        robot.mouseMove(loc.x + menu.getWidth() / 2, loc.y + menu.getHeight() / 2);
+        robot.mousePress(InputEvent.BUTTON1_DOWN_MASK);
+        robot.mouseRelease(InputEvent.BUTTON1_DOWN_MASK);
+
+        Thread.sleep(500);
+
+        // Click an item
+        loc = menuItem.getLocationOnScreen();
+        robot.mouseMove(loc.x + menuItem.getWidth() / 2, loc.y + menuItem.getHeight() / 2);
+        robot.mousePress(InputEvent.BUTTON1_DOWN_MASK);
+        robot.mouseRelease(InputEvent.BUTTON1_DOWN_MASK);
+
+        Thread.sleep(500);
+
+        frame.dispose();
+
+        if (!menuClicked) {
+            throw new RuntimeException("A menu item has never been clicked.");
+        }
+    }
+}
--- a/jdk/test/java/awt/Window/Grab/GrabTest.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/java/awt/Window/Grab/GrabTest.java	Wed Jul 05 18:59:05 2017 +0200
@@ -175,7 +175,7 @@
 
         // 6. Check that press on the outside area causes ungrab
         Point loc = f.getLocationOnScreen();
-        robot.mouseMove(loc.x + 100, loc.y + f.getSize().height + 1);
+        robot.mouseMove(loc.x + 100, loc.y + f.getSize().height + 10);
         Util.waitForIdle(robot);
         robot.mousePress(InputEvent.BUTTON1_MASK);
         robot.delay(50);
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/test/java/awt/Window/WindowClosedEvents/WindowClosedEventOnDispose.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,203 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/*
+  @test
+  @bug 8015500
+  @summary DisposeAction multiplies the WINDOW_CLOSED event.
+  @author jlm@joseluismartin.info
+  @run main WindowClosedEventOnDispose
+ */
+
+
+import java.awt.Toolkit;
+import java.awt.event.WindowAdapter;
+import java.awt.event.WindowEvent;
+
+import javax.swing.JDialog;
+import javax.swing.JFrame;
+import javax.swing.SwingUtilities;
+
+/**
+ * WindowClosedEventOnDispose.java
+ * Summary: tests that Window don't multiplies the WINDOW_CLOSED event
+ * on dispose.
+ * Test fails if fire more events that expected;
+ */
+public class WindowClosedEventOnDispose {
+
+    private static int N_LOOPS = 5;
+    private static int N_DIALOGS = 2;
+
+    public static void main(String args[]) throws Exception {
+        tesWithFrame();
+        testWithoutFrame();
+        testHidenChildDispose();
+        testHidenWindowDispose();
+    }
+
+    /**
+     * Test WINDOW_CLOSED event received by a dialog
+     * that have a owner window.
+     * @throws Exception
+     */
+    public static void tesWithFrame() throws Exception {
+        doTest(true);
+    }
+
+    /**
+     * Test WINDOW_CLOSED event received by a dialog
+     * that don't have a owner window.
+     * @throws Exception
+     */
+    public static void testWithoutFrame() throws Exception  {
+        System.out.println("Run without owner Frame");
+        doTest(false);
+    }
+
+    /**
+     * Test if a dialog that has never been shown fire
+     * the WINDOW_CLOSED event on parent dispose().
+     * @throws Exception
+     */
+    public static void testHidenChildDispose() throws Exception {
+        JFrame f = new JFrame();
+        JDialog dlg = new JDialog(f);
+        Listener l = new Listener();
+        dlg.addWindowListener(l);
+        f.dispose();
+        waitEvents();
+
+        assertEquals(0, l.getCount());
+    }
+
+    /**
+     * Test if a dialog fire the WINDOW_CLOSED event
+     * on parent dispose().
+     * @throws Exception
+     */
+    public static void testVisibleChildParentDispose() throws Exception {
+        JFrame f = new JFrame();
+        JDialog dlg = new JDialog(f);
+        Listener l = new Listener();
+        dlg.addWindowListener(l);
+        dlg.setVisible(true);
+        f.dispose();
+        waitEvents();
+
+        assertEquals(1, l.getCount());
+    }
+
+    /**
+     * Test if a Window that has never been shown fire the
+     * WINDOW_CLOSED event on dispose()
+     */
+    public static void testHidenWindowDispose() throws Exception {
+        JFrame f = new JFrame();
+        Listener l = new Listener();
+        f.addWindowListener(l);
+        f.dispose();
+        waitEvents();
+
+        assertEquals(0, l.getCount());
+    }
+
+    /**
+     * Test if a JDialog receive the correct number
+     * of WINDOW_CLOSED_EVENT
+     * @param useFrame true if use a owner frame
+     * @throws Exception
+     */
+    private static void doTest(final boolean useFrame) throws Exception {
+        final Listener l  = new Listener();
+        final JFrame f = new JFrame();
+
+        for (int i = 0; i < N_LOOPS; i++) {
+
+            SwingUtilities.invokeLater(new Runnable() {
+
+                public void run() {
+                    JDialog[] dialogs = new JDialog[N_DIALOGS];
+                    for (int i = 0; i < N_DIALOGS; i++) {
+                        if (useFrame) {
+                            dialogs[i]= new JDialog(f);
+                        }
+                        else {
+                            dialogs[i] = new JDialog();
+                        }
+
+                        dialogs[i].addWindowListener(l);
+                        dialogs[i].setVisible(true);
+                    }
+
+                    // Dispose all
+                    for (JDialog d : dialogs)
+                        d.dispose();
+
+                    f.dispose();
+                }
+            });
+        }
+
+        waitEvents();
+
+        assertEquals(N_DIALOGS * N_LOOPS, l.getCount());
+    }
+
+    private static void waitEvents() throws InterruptedException {
+        // Wait until events are dispatched
+        while (Toolkit.getDefaultToolkit().getSystemEventQueue().peekEvent() != null)
+            Thread.sleep(100);
+    }
+
+    /**
+     * @param expected the expected value
+     * @param real the real value
+     */
+    private static void assertEquals(int expected, int real) throws Exception {
+        if (expected != real) {
+            throw new Exception("Expected events: " + expected + " Received Events: " + real);
+        }
+    }
+
+}
+
+/**
+ * Listener to count events
+ */
+class Listener extends WindowAdapter {
+
+    private volatile int count = 0;
+
+    public void windowClosed(WindowEvent e) {
+        count++;
+    }
+
+    public int getCount() {
+        return count;
+    }
+
+    public void setCount(int count) {
+        this.count = count;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/test/java/beans/XMLEncoder/Test6989223.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,60 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/*
+ * @test
+ * @bug 6989223
+ * @summary Tests Rectangle2D.Double encoding
+ * @author Sergey Malenkov
+ */
+
+import java.awt.geom.Rectangle2D;
+
+public class Test6989223 extends AbstractTest {
+    public static void main(String[] args) {
+        new Test6989223().test(true);
+    }
+
+    protected Object getObject() {
+        return new Bean(1, 2, 3, 4);
+    }
+
+    @Override
+    protected Object getAnotherObject() {
+        return new Bean(1, 2, 3, 5);
+    }
+
+    public static class Bean extends Rectangle2D.Double {
+        public Bean() {
+        }
+
+        public Bean(double x, double y, double w, double h) {
+            super(x, y, w, h);
+        }
+
+        @Override
+        public boolean equals(Object object) {
+            return super.equals(object); // to avoid recursion during validation
+        }
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/test/java/beans/XMLEncoder/Test7080156.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,60 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/*
+ * @test
+ * @bug 7080156 7094245
+ * @summary Tests beans with public arrays
+ * @author Sergey Malenkov
+ */
+
+public class Test7080156 extends AbstractTest {
+    public static void main(String[] args) {
+        new Test7080156().test(true);
+    }
+
+    protected Object getObject() {
+        Bean bean = new Bean();
+        bean.setArray("something");
+        return bean;
+    }
+
+    @Override
+    protected Object getAnotherObject() {
+        Bean bean = new Bean();
+        bean.setArray("some", "thing");
+        return bean;
+    }
+
+    public static class Bean {
+        public String[] array = {"default"};
+
+        public void setArray(String... array) {
+            this.array = array;
+        }
+
+        public String[] getArray() {
+            return this.array;
+        }
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/test/java/beans/XMLEncoder/Test8013557.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,84 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/*
+ * @test
+ * @bug 8013557
+ * @summary Tests beans with public fields
+ * @author Sergey Malenkov
+ */
+
+public class Test8013557 extends AbstractTest {
+    public static void main(String[] args) {
+        new Test8013557().test(true);
+    }
+
+    protected Object getObject() {
+        return new Bean(new Value("something"));
+    }
+
+    @Override
+    protected Object getAnotherObject() {
+        return new Bean(new Value());
+    }
+
+    public static class Bean {
+        public Value value;
+
+        public Bean() {
+            this.value = new Value();
+        }
+
+        public Bean(Value value) {
+            this.value = value;
+        }
+
+        public void setValue(Value value) {
+            this.value = value;
+        }
+
+        public Value getValue() {
+            return this.value;
+        }
+    }
+
+    public static class Value {
+        private String string;
+
+        public Value() {
+            this.string = "default";
+        }
+
+        public Value(String value) {
+            this.string = value;
+        }
+
+        public void setString(String string) {
+            this.string = string;
+        }
+
+        public String getString() {
+            return this.string;
+        }
+    }
+}
--- a/jdk/test/java/io/pathNames/General.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/java/io/pathNames/General.java	Wed Jul 05 18:59:05 2017 +0200
@@ -277,8 +277,8 @@
     {
         check(ans, ask + slash);
         checkNames(depth, create,
-                   ans,
-                   ask);
+                   ans.endsWith(File.separator) ? ans : ans + File.separator,
+                   ask + slash);
     }
 
 
@@ -308,9 +308,6 @@
                                   String ans, String ask)
         throws Exception
     {
-        ans = ans.endsWith(File.separator) ? ans : ans + File.separator;
-        ask = ask.endsWith(File.separator) ? ask : ask + File.separator;
-
         int d = depth - 1;
         File f = new File(ans);
         String n;
--- a/jdk/test/java/io/pathNames/GeneralWin32.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/java/io/pathNames/GeneralWin32.java	Wed Jul 05 18:59:05 2017 +0200
@@ -50,13 +50,13 @@
     private static final int DEPTH = 2;
     private static String baseDir = null;
     private static String userDir = null;
+    private static String relative = null;
 
     /* Pathnames relative to working directory */
 
     private static void checkCaseLookup() throws IOException {
         /* Use long names here to avoid 8.3 format, which Samba servers often
            force to lowercase */
-        String relative = baseDir.substring(userDir.length() + 1);
         File d1 = new File(relative, "XyZzY0123");
         File d2 = new File(d1, "FOO_bar_BAZ");
         File f = new File(d2, "GLORPified");
@@ -79,9 +79,9 @@
            case of filenames, rather than just using the input case */
         File y = new File(userDir, f.getPath());
         String ans = y.getPath();
-        check(ans, relative + "\\" + "XyZzY0123\\FOO_bar_BAZ\\GLORPified");
-        check(ans, relative + "\\" + "xyzzy0123\\foo_bar_baz\\glorpified");
-        check(ans, relative + "\\" + "XYZZY0123\\FOO_BAR_BAZ\\GLORPIFIED");
+        check(ans, relative + "XyZzY0123\\FOO_bar_BAZ\\GLORPified");
+        check(ans, relative + "xyzzy0123\\foo_bar_baz\\glorpified");
+        check(ans, relative + "XYZZY0123\\FOO_BAR_BAZ\\GLORPIFIED");
     }
 
     private static void checkWild(File f) throws Exception {
@@ -103,8 +103,7 @@
     private static void checkRelativePaths() throws Exception {
         checkCaseLookup();
         checkWildCards();
-        String relative = baseDir.substring(userDir.length() + 1);
-        checkNames(3, true, baseDir.toString(), relative);
+        checkNames(3, true, baseDir, relative);
     }
 
 
@@ -136,7 +135,6 @@
         String ans = exists ? df.getAbsolutePath() : d;
         if (!ans.endsWith("\\"))
             ans = ans + "\\";
-        String relative = baseDir.substring(userDir.length() + 1);
         checkNames(depth, false, ans + relative, d + relative);
     }
 
@@ -171,15 +169,16 @@
             return;
         }
         if (args.length > 0) debug = true;
-        userDir = System.getProperty("user.dir");
-        baseDir = initTestData(6);
+        userDir = System.getProperty("user.dir") + '\\';
+        baseDir = initTestData(6) + '\\';
+        relative = baseDir.substring(userDir.length());
         checkRelativePaths();
         checkDrivePaths();
         checkUncPaths();
     }
 
     private static String initTestData(int maxDepth) throws IOException {
-        File parent = new File(System.getProperty("user.dir"));
+        File parent = new File(userDir);
         String baseDir = null;
         maxDepth = maxDepth < DEPTH + 2 ? DEPTH + 2 : maxDepth;
         for (int i = 0; i < maxDepth; i ++) {
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/test/java/lang/IntegralPrimitiveToString.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,194 @@
+/*
+ * Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.math.BigInteger;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.Arrays;
+import java.util.List;
+import java.util.function.LongFunction;
+import java.util.function.Function;
+
+import static org.testng.Assert.assertEquals;
+
+/**
+ * @test
+ * @run testng IntegralPrimitiveToString
+ * @summary test string conversions for primitive integral types.
+ * @author Mike Duigou
+ */
+public class IntegralPrimitiveToString {
+
+    @Test(dataProvider="numbers")
+    public <N extends Number> void testToString(String description,
+        Function<N, BigInteger> converter,
+        Function<N, BigInteger> unsignedConverter,
+        N[] values,
+        Stringifier<N>[] stringifiers) {
+        System.out.printf("%s : conversions: %d values: %d\n", description, stringifiers.length, values.length);
+        for( N value : values) {
+            BigInteger asBigInt = converter.apply(value);
+            BigInteger asUnsignedBigInt = unsignedConverter.apply(value);
+            for(Stringifier<N> stringifier : stringifiers) {
+                stringifier.assertMatchingToString(value, asBigInt, asUnsignedBigInt, description);
+            }
+        }
+    }
+
+    static class Stringifier<N extends Number> {
+        final boolean signed;
+        final int  radix;
+        final Function<N,String> toString;
+        Stringifier(boolean signed, int radix, Function<N,String> toString) {
+            this.signed = signed;
+            this.radix = radix;
+            this.toString = toString;
+        }
+
+        public void assertMatchingToString(N value, BigInteger asSigned, BigInteger asUnsigned, String description) {
+            String expected = signed
+                ? asSigned.toString(radix)
+                : asUnsigned.toString(radix);
+
+            String actual = toString.apply(value);
+
+            assertEquals(actual, expected, description + " conversion should be the same");
+        }
+    }
+
+    @DataProvider(name="numbers", parallel=true)
+    public Iterator<Object[]> testSetProvider() {
+
+    return Arrays.asList(
+        new Object[] { "Byte",
+            (Function<Byte,BigInteger>) b -> BigInteger.valueOf((long) b),
+            (Function<Byte,BigInteger>) b -> BigInteger.valueOf(Integer.toUnsignedLong((byte) b)),
+            numberProvider((LongFunction<Byte>) l -> Byte.valueOf((byte) l), Byte.SIZE),
+            new Stringifier[] {
+                new Stringifier<Byte>(true, 10, b -> b.toString()),
+                new Stringifier<Byte>(true, 10, b -> Byte.toString(b))
+            }
+        },
+        new Object[] { "Short",
+            (Function<Short,BigInteger>) s -> BigInteger.valueOf((long) s),
+            (Function<Short,BigInteger>) s -> BigInteger.valueOf(Integer.toUnsignedLong((short) s)),
+            numberProvider((LongFunction<Short>) l -> Short.valueOf((short) l), Short.SIZE),
+            new Stringifier[] {
+                new Stringifier<Short>(true, 10, s -> s.toString()),
+                new Stringifier<Short>(true, 10, s -> Short.toString( s))
+            }
+        },
+        new Object[] { "Integer",
+            (Function<Integer,BigInteger>) i -> BigInteger.valueOf((long) i),
+            (Function<Integer,BigInteger>) i -> BigInteger.valueOf(Integer.toUnsignedLong(i)),
+            numberProvider((LongFunction<Integer>) l -> Integer.valueOf((int) l), Integer.SIZE),
+            new Stringifier[] {
+                new Stringifier<Integer>(true, 10, i -> i.toString()),
+                new Stringifier<Integer>(true, 10, i -> Integer.toString(i)),
+                new Stringifier<Integer>(false, 2, Integer::toBinaryString),
+                new Stringifier<Integer>(false, 16, Integer::toHexString),
+                new Stringifier<Integer>(false, 8, Integer::toOctalString),
+                new Stringifier<Integer>(true, 2, i -> Integer.toString(i, 2)),
+                new Stringifier<Integer>(true, 8, i -> Integer.toString(i, 8)),
+                new Stringifier<Integer>(true, 10, i -> Integer.toString(i, 10)),
+                new Stringifier<Integer>(true, 16, i -> Integer.toString(i, 16)),
+                new Stringifier<Integer>(true, Character.MAX_RADIX, i -> Integer.toString(i, Character.MAX_RADIX)),
+                new Stringifier<Integer>(false, 10, i -> Integer.toUnsignedString(i)),
+                new Stringifier<Integer>(false, 2, i -> Integer.toUnsignedString(i, 2)),
+                new Stringifier<Integer>(false, 8, i -> Integer.toUnsignedString(i, 8)),
+                new Stringifier<Integer>(false, 10, i -> Integer.toUnsignedString(i, 10)),
+                new Stringifier<Integer>(false, 16, i -> Integer.toUnsignedString(i, 16)),
+                new Stringifier<Integer>(false, Character.MAX_RADIX, i -> Integer.toUnsignedString(i, Character.MAX_RADIX))
+            }
+        },
+        new Object[] { "Long",
+            (Function<Long, BigInteger>) BigInteger::valueOf,
+            (Function<Long, BigInteger>) l -> {
+                if (l >= 0) {
+                    return BigInteger.valueOf((long) l);
+                } else {
+                    int upper = (int)(l >>> 32);
+                    int lower = (int) (long) l;
+
+                    // return (upper << 32) + lower
+                    return (BigInteger.valueOf(Integer.toUnsignedLong(upper))).shiftLeft(32).
+                    add(BigInteger.valueOf(Integer.toUnsignedLong(lower)));
+                }
+            },
+            numberProvider((LongFunction<Long>) Long::valueOf, Long.SIZE),
+            new Stringifier[] {
+                new Stringifier<Long>(true, 10, l -> l.toString()),
+                new Stringifier<Long>(true, 10, l -> Long.toString(l)),
+                new Stringifier<Long>(false, 2, Long::toBinaryString),
+                new Stringifier<Long>(false, 16, Long::toHexString),
+                new Stringifier<Long>(false, 8, Long::toOctalString),
+                new Stringifier<Long>(true, 2, l -> Long.toString(l, 2)),
+                new Stringifier<Long>(true, 8, l -> Long.toString(l, 8)),
+                new Stringifier<Long>(true, 10, l -> Long.toString(l, 10)),
+                new Stringifier<Long>(true, 16, l -> Long.toString(l, 16)),
+                new Stringifier<Long>(true, Character.MAX_RADIX, l -> Long.toString(l, Character.MAX_RADIX)),
+                new Stringifier<Long>(false, 10, Long::toUnsignedString),
+                new Stringifier<Long>(false, 2, l -> Long.toUnsignedString(l, 2)),
+                new Stringifier<Long>(false, 8, l-> Long.toUnsignedString(l, 8)),
+                new Stringifier<Long>(false, 10, l -> Long.toUnsignedString(l, 10)),
+                new Stringifier<Long>(false, 16, l -> Long.toUnsignedString(l, 16)),
+                new Stringifier<Long>(false, Character.MAX_RADIX, l -> Long.toUnsignedString(l, Character.MAX_RADIX))
+            }
+        }
+        ).iterator();
+    }
+    private static final long[] SOME_PRIMES = {
+        3L, 5L, 7L, 11L, 13L, 17L, 19L, 23L, 29L, 31L, 37L, 41L, 43L, 47L, 53L,
+        59L, 61L, 71L, 73L, 79L, 83L, 89L, 97L, 101L, 103L, 107L, 109L, 113L,
+        5953L, 5981L, 5987L, 6007L, 6011L, 6029L, 6037L, 6043L, 6047L, 6053L,
+        16369L, 16381L, 16411L, 32749L, 32771L, 65521L, 65537L,
+        (long) Integer.MAX_VALUE };
+
+    public <N extends Number> N[] numberProvider(LongFunction<N> boxer, int bits, N... extras) {
+        List<N> numbers = new ArrayList<>();
+
+        for(int bitmag = 0; bitmag < bits; bitmag++) {
+            long value = 1L << bitmag;
+            numbers.add(boxer.apply(value));
+            numbers.add(boxer.apply(value - 1));
+            numbers.add(boxer.apply(value + 1));
+            numbers.add(boxer.apply(-value));
+            for(int divisor = 0; divisor < SOME_PRIMES.length && value < SOME_PRIMES[divisor]; divisor++) {
+                numbers.add(boxer.apply(value - SOME_PRIMES[divisor]));
+                numbers.add(boxer.apply(value + SOME_PRIMES[divisor]));
+                numbers.add(boxer.apply(value * SOME_PRIMES[divisor]));
+                numbers.add(boxer.apply(value / SOME_PRIMES[divisor]));
+                numbers.add(boxer.apply(value | SOME_PRIMES[divisor]));
+                numbers.add(boxer.apply(value & SOME_PRIMES[divisor]));
+                numbers.add(boxer.apply(value ^ SOME_PRIMES[divisor]));
+            }
+        }
+
+        numbers.addAll(Arrays.asList(extras));
+
+        return (N[]) numbers.toArray(new Number[numbers.size()]);
+    }
+}
--- a/jdk/test/java/lang/management/MXBean/MXBeanBehavior.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/java/lang/management/MXBean/MXBeanBehavior.java	Wed Jul 05 18:59:05 2017 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2005, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2005, 2013 Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -36,6 +36,10 @@
 import javax.management.*;
 
 public class MXBeanBehavior {
+    // Exclude list: list of platform MBeans that are not MXBeans
+    public static final HashSet<String> excludeList = new HashSet<>(
+            Arrays.asList("com.sun.management:type=DiagnosticCommand"));
+
     public static void main(String[] args) throws Exception {
         MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
 
@@ -92,6 +96,10 @@
        by generic MXBean tests.
      */
     private static void test(MBeanServer mbs, ObjectName name) throws Exception {
+        if(excludeList.contains(name.getCanonicalName())) {
+            // Skipping not MXBean objects.
+            return;
+        }
         System.out.println("Testing: " + name);
 
         MBeanInfo mbi = mbs.getMBeanInfo(name);
--- a/jdk/test/java/lang/management/ManagementFactory/MBeanServerMXBeanUnsupportedTest.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/java/lang/management/ManagementFactory/MBeanServerMXBeanUnsupportedTest.java	Wed Jul 05 18:59:05 2017 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2006, 2011, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2006, 2013 Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -36,6 +36,8 @@
 import java.lang.reflect.InvocationHandler;
 import java.lang.reflect.Method;
 import java.lang.reflect.Proxy;
+import java.util.Arrays;
+import java.util.HashSet;
 import javax.management.MBeanServer;
 import javax.management.MBeanServerBuilder;
 import javax.management.MBeanServerDelegate;
@@ -81,6 +83,9 @@
     public static class MBeanServerForwarderInvocationHandler
             implements InvocationHandler {
 
+        public static final HashSet<String> excludeList = new HashSet<String>(
+            Arrays.asList("com.sun.management:type=DiagnosticCommand"));
+
         public static MBeanServerForwarder newProxyInstance() {
 
             final InvocationHandler handler =
@@ -126,15 +131,17 @@
                 if (domain.equals("java.lang") ||
                     domain.equals("java.util.logging") ||
                     domain.equals("com.sun.management")) {
-                    String mxbean = (String)
-                        mbs.getMBeanInfo(name).getDescriptor().getFieldValue("mxbean");
-                    if (mxbean == null || !mxbean.equals("true")) {
-                        throw new RuntimeException(
+                    if(!excludeList.contains(name.getCanonicalName())) {
+                        String mxbean = (String)
+                            mbs.getMBeanInfo(name).getDescriptor().getFieldValue("mxbean");
+                        if (mxbean == null || !mxbean.equals("true")) {
+                            throw new RuntimeException(
                                 "Platform MBeans must be MXBeans!");
-                    }
-                    if (!(mbean instanceof StandardMBean)) {
-                        throw new RuntimeException(
+                        }
+                        if (!(mbean instanceof StandardMBean)) {
+                            throw new RuntimeException(
                                 "MXBeans must be wrapped in StandardMBean!");
+                        }
                     }
                 }
                 return result;
--- a/jdk/test/java/net/CookieHandler/TestHttpCookie.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/java/net/CookieHandler/TestHttpCookie.java	Wed Jul 05 18:59:05 2017 +0200
@@ -243,6 +243,10 @@
         test("set-cookie2: Customer = \"WILE_E_COYOTE\"; Version = \"1\"; Path = \"/acme\"")
         .n("Customer").v("WILE_E_COYOTE").ver(1).p("/acme");
 
+        // $NAME is reserved; result should be null
+        test("set-cookie2: $Customer = \"WILE_E_COYOTE\"; Version = \"1\"; Path = \"/acme\"")
+        .nil();
+
         // a 'full' cookie
         test("set-cookie2: Customer=\"WILE_E_COYOTE\"" +
                 ";Version=\"1\"" +
--- a/jdk/test/java/net/InterfaceAddress/NetworkPrefixLength.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/java/net/InterfaceAddress/NetworkPrefixLength.java	Wed Jul 05 18:59:05 2017 +0200
@@ -22,7 +22,7 @@
  */
 
 /* @test
- * @bug 6707289
+ * @bug 6707289 7107883
  * @summary InterfaceAddress.getNetworkPrefixLength() does not conform to Javadoc
  */
 
@@ -47,6 +47,14 @@
                     passed = false;
                     debug(nic.getName(), iaddr);
                 }
+                InetAddress ia = iaddr.getAddress();
+                if (ia.isLoopbackAddress() && ia instanceof Inet4Address) {
+                    // assumption: prefix length will always be 8
+                    if (iaddr.getNetworkPrefixLength() != 8) {
+                        out.println("Expected prefix of 8, got " + iaddr);
+                        passed = false;
+                    }
+                }
             }
         }
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/test/java/nio/Buffer/Chars.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,137 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/**
+ * @test
+ * @bug 8014854
+ * @summary Exercises CharBuffer#chars on each of the CharBuffer types
+ * @run testng Chars
+ */
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.CharBuffer;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Random;
+
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import static org.testng.Assert.assertEquals;
+
+public class Chars {
+
+    static final Random RAND = new Random();
+
+    static final int SIZE = 128 + RAND.nextInt(1024);
+
+    /**
+     * Randomize the char buffer's position and limit.
+     */
+    static CharBuffer randomizeRange(CharBuffer cb) {
+        int mid = cb.capacity() >>> 1;
+        int start = RAND.nextInt(mid);
+        int end = mid + RAND.nextInt(mid);
+        cb.position(start);
+        cb.limit(end);
+        return cb;
+    }
+
+    /**
+     * Randomize the char buffer's contents, position and limit.
+     */
+    static CharBuffer randomize(CharBuffer cb) {
+        while (cb.hasRemaining()) {
+            cb.put((char)RAND.nextInt());
+        }
+        return randomizeRange(cb);
+    }
+
+    /**
+     * Sums the remaining chars in the char buffer.
+     */
+    static int intSum(CharBuffer cb) {
+        int sum = 0;
+        cb.mark();
+        while (cb.hasRemaining()) {
+            sum += cb.get();
+        }
+        cb.reset();
+        return sum;
+    }
+
+    /**
+     * Creates char buffers to test, adding them to the given list.
+     */
+    static void addCases(CharBuffer cb, List<CharBuffer> buffers) {
+        randomize(cb);
+        buffers.add(cb);
+
+        buffers.add(cb.slice());
+        buffers.add(cb.duplicate());
+        buffers.add(cb.asReadOnlyBuffer());
+
+        buffers.add(randomizeRange(cb.slice()));
+        buffers.add(randomizeRange(cb.duplicate()));
+        buffers.add(randomizeRange(cb.asReadOnlyBuffer()));
+    }
+
+    @DataProvider(name = "charbuffers")
+    public Object[][] createCharBuffers() {
+        List<CharBuffer> buffers = new ArrayList<>();
+
+        // heap
+        addCases(CharBuffer.allocate(SIZE), buffers);
+        addCases(CharBuffer.wrap(new char[SIZE]), buffers);
+        addCases(ByteBuffer.allocate(SIZE*2).order(ByteOrder.BIG_ENDIAN).asCharBuffer(),
+                 buffers);
+        addCases(ByteBuffer.allocate(SIZE*2).order(ByteOrder.LITTLE_ENDIAN).asCharBuffer(),
+                 buffers);
+
+        // direct
+        addCases(ByteBuffer.allocateDirect(SIZE*2).order(ByteOrder.BIG_ENDIAN).asCharBuffer(),
+                 buffers);
+        addCases(ByteBuffer.allocateDirect(SIZE*2).order(ByteOrder.LITTLE_ENDIAN).asCharBuffer(),
+                 buffers);
+
+        // read-only buffer backed by a CharSequence
+        buffers.add(CharBuffer.wrap(randomize(CharBuffer.allocate(SIZE))));
+
+        Object[][] params = new Object[buffers.size()][];
+        for (int i = 0; i < buffers.size(); i++) {
+            CharBuffer cb = buffers.get(i);
+            params[i] = new Object[] { cb.getClass().getName(), cb };
+        }
+
+        return params;
+    }
+
+    @Test(dataProvider = "charbuffers")
+    public void testChars(String type, CharBuffer cb) {
+        System.out.format("%s position=%d, limit=%d%n", type, cb.position(), cb.limit());
+        int expected = intSum(cb);
+        assertEquals(cb.chars().sum(), expected);
+        assertEquals(cb.chars().parallel().sum(), expected);
+    }
+}
--- a/jdk/test/java/nio/channels/AsynchronousChannelGroup/Unbounded.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/java/nio/channels/AsynchronousChannelGroup/Unbounded.java	Wed Jul 05 18:59:05 2017 +0200
@@ -43,47 +43,24 @@
     static volatile boolean finished;
 
     public static void main(String[] args) throws Exception {
-        // all accepted connections are added to a queue
-        final ArrayBlockingQueue<AsynchronousSocketChannel> queue =
-            new ArrayBlockingQueue<AsynchronousSocketChannel>(CONCURRENCY_COUNT);
-
         // create listener to accept connections
-        final AsynchronousServerSocketChannel listener =
+        AsynchronousServerSocketChannel listener =
             AsynchronousServerSocketChannel.open()
                 .bind(new InetSocketAddress(0));
-        listener.accept((Void)null, new CompletionHandler<AsynchronousSocketChannel,Void>() {
-            public void completed(AsynchronousSocketChannel ch, Void att) {
-                queue.add(ch);
-                listener.accept((Void)null, this);
-            }
-            public void failed(Throwable exc, Void att) {
-                if (!finished) {
-                    failed = true;
-                    System.err.println("accept failed: " + exc);
-                }
-            }
-        });
-        System.out.println("Listener created.");
+
+        // establish connections
 
-        // establish lots of connections
+        AsynchronousSocketChannel[] clients = new AsynchronousSocketChannel[CONCURRENCY_COUNT];
+        AsynchronousSocketChannel[] peers = new AsynchronousSocketChannel[CONCURRENCY_COUNT];
+
         int port = ((InetSocketAddress)(listener.getLocalAddress())).getPort();
         SocketAddress sa = new InetSocketAddress(InetAddress.getLocalHost(), port);
-        AsynchronousSocketChannel[] channels =
-            new AsynchronousSocketChannel[CONCURRENCY_COUNT];
+
         for (int i=0; i<CONCURRENCY_COUNT; i++) {
-            int attempts = 0;
-            for (;;) {
-                try {
-                    channels[i] = AsynchronousSocketChannel.open();
-                    channels[i].connect(sa).get();
-                    break;
-                } catch (IOException x) {
-                    // probably resource issue so back off and retry
-                    if (++attempts >= 3)
-                        throw x;
-                    Thread.sleep(50);
-                }
-            }
+            clients[i] = AsynchronousSocketChannel.open();
+            Future<Void> result = clients[i].connect(sa);
+            peers[i] = listener.accept().get();
+            result.get();
         }
         System.out.println("All connection established.");
 
@@ -91,9 +68,9 @@
         final CyclicBarrier barrier = new CyclicBarrier(CONCURRENCY_COUNT+1);
 
         // initiate a read operation on each channel.
-        for (int i=0; i<CONCURRENCY_COUNT; i++) {
+        for (AsynchronousSocketChannel client: clients) {
             ByteBuffer buf = ByteBuffer.allocateDirect(100);
-            channels[i].read( buf, channels[i],
+            client.read(buf, client,
                 new CompletionHandler<Integer,AsynchronousSocketChannel>() {
                     public void completed(Integer bytesRead, AsynchronousSocketChannel ch) {
                         try {
@@ -113,13 +90,10 @@
         System.out.println("All read operations outstanding.");
 
         // write data to each of the accepted connections
-        int remaining = CONCURRENCY_COUNT;
-        while (remaining > 0) {
-            AsynchronousSocketChannel ch = queue.take();
-            ch.write(ByteBuffer.wrap("welcome".getBytes())).get();
-            ch.shutdownOutput();
-            ch.close();
-            remaining--;
+        for (AsynchronousSocketChannel peer: peers) {
+            peer.write(ByteBuffer.wrap("welcome".getBytes())).get();
+            peer.shutdownOutput();
+            peer.close();
         }
 
         // wait for all threads to reach the barrier
--- a/jdk/test/java/nio/file/Files/StreamTest.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/java/nio/file/Files/StreamTest.java	Wed Jul 05 18:59:05 2017 +0200
@@ -476,15 +476,25 @@
     }
 
     public void testSecurityException() throws IOException {
-        Path triggerFile = testFolder.resolve(Paths.get("dir", "SecurityException"));
-        Files.createFile(triggerFile);
-        Path sampleFile = testFolder.resolve(Paths.get("dir", "sample"));
-        Files.createFile(sampleFile);
-        Path triggerDir = testFolder.resolve(Paths.get("dir2", "SecurityException"));
-        Files.createDirectories(triggerDir);
+        Path empty = testFolder.resolve("empty");
+        Path triggerFile = Files.createFile(empty.resolve("SecurityException"));
+        Path sampleFile = Files.createDirectories(empty.resolve("sample"));
+
+        Path dir2 = testFolder.resolve("dir2");
+        Path triggerDir = Files.createDirectories(dir2.resolve("SecurityException"));
         Files.createFile(triggerDir.resolve("fileInSE"));
-        Path sample = testFolder.resolve(Paths.get("dir2", "file"));
-        Files.createFile(sample);
+        Path sample = Files.createFile(dir2.resolve("file"));
+
+        Path triggerLink = null;
+        Path linkTriggerDir = null;
+        Path linkTriggerFile = null;
+        if (supportsLinks) {
+            Path dir = testFolder.resolve("dir");
+            triggerLink = Files.createSymbolicLink(dir.resolve("SecurityException"), empty);
+            linkTriggerDir = Files.createSymbolicLink(dir.resolve("lnDirSE"), triggerDir);
+            linkTriggerFile = Files.createSymbolicLink(dir.resolve("lnFileSE"), triggerFile);
+        }
+
         FaultyFileSystem.FaultyFSProvider fsp = FaultyFileSystem.FaultyFSProvider.getInstance();
         FaultyFileSystem fs = (FaultyFileSystem) fsp.newFileSystem(testFolder, null);
 
@@ -492,10 +502,10 @@
             fsp.setFaultyMode(false);
             Path fakeRoot = fs.getRoot();
             // validate setting
-            try (CloseableStream<Path> s = Files.list(fakeRoot.resolve("dir"))) {
+            try (CloseableStream<Path> s = Files.list(fakeRoot.resolve("empty"))) {
                 String[] result = s.map(path -> path.getFileName().toString())
                                    .toArray(String[]::new);
-                assertEqualsNoOrder(result, new String[] { "d1","f1", "lnDir2", "SecurityException", "sample" });
+                assertEqualsNoOrder(result, new String[] { "SecurityException", "sample" });
             }
 
             try (CloseableStream<Path> s = Files.walk(fakeRoot.resolve("dir2"))) {
@@ -504,13 +514,21 @@
                 assertEqualsNoOrder(result, new String[] { "dir2", "SecurityException", "fileInSE", "file" });
             }
 
+            if (supportsLinks) {
+                try (CloseableStream<Path> s = Files.list(fakeRoot.resolve("dir"))) {
+                    String[] result = s.map(path -> path.getFileName().toString())
+                                       .toArray(String[]::new);
+                    assertEqualsNoOrder(result, new String[] { "d1", "f1", "lnDir2", "SecurityException", "lnDirSE", "lnFileSE" });
+                }
+            }
+
             // execute test
             fsp.setFaultyMode(true);
             // ignore file cause SecurityException
-            try (CloseableStream<Path> s = Files.walk(fakeRoot.resolve("dir"))) {
+            try (CloseableStream<Path> s = Files.walk(fakeRoot.resolve("empty"))) {
                 String[] result = s.map(path -> path.getFileName().toString())
                                    .toArray(String[]::new);
-                assertEqualsNoOrder(result, new String[] { "dir", "d1","f1", "lnDir2", "sample" });
+                assertEqualsNoOrder(result, new String[] { "empty", "sample" });
             }
             // skip folder cause SecurityException
             try (CloseableStream<Path> s = Files.walk(fakeRoot.resolve("dir2"))) {
@@ -519,11 +537,29 @@
                 assertEqualsNoOrder(result, new String[] { "dir2", "file" });
             }
 
+            if (supportsLinks) {
+                // not following links
+                try (CloseableStream<Path> s = Files.walk(fakeRoot.resolve("dir"))) {
+                    String[] result = s.map(path -> path.getFileName().toString())
+                                       .toArray(String[]::new);
+                    assertEqualsNoOrder(result, new String[] { "dir", "d1", "f1", "lnDir2", "lnDirSE", "lnFileSE" });
+                }
+
+                // following links
+                try (CloseableStream<Path> s = Files.walk(fakeRoot.resolve("dir"), FileVisitOption.FOLLOW_LINKS)) {
+                    String[] result = s.map(path -> path.getFileName().toString())
+                                       .toArray(String[]::new);
+                    // ?? Should fileInSE show up?
+                    // With FaultyFS, it does as no exception thrown for link to "SecurityException" with read on "lnXxxSE"
+                    assertEqualsNoOrder(result, new String[] { "dir", "d1", "f1", "lnDir2", "file", "lnDirSE", "lnFileSE", "fileInSE" });
+                }
+            }
+
             // list instead of walk
-            try (CloseableStream<Path> s = Files.list(fakeRoot.resolve("dir"))) {
+            try (CloseableStream<Path> s = Files.list(fakeRoot.resolve("empty"))) {
                 String[] result = s.map(path -> path.getFileName().toString())
                                    .toArray(String[]::new);
-                assertEqualsNoOrder(result, new String[] { "d1","f1", "lnDir2", "sample" });
+                assertEqualsNoOrder(result, new String[] { "sample" });
             }
             try (CloseableStream<Path> s = Files.list(fakeRoot.resolve("dir2"))) {
                 String[] result = s.map(path -> path.getFileName().toString())
@@ -578,6 +614,11 @@
             if (fs != null) {
                 fs.close();
             }
+            if (supportsLinks) {
+                Files.delete(triggerLink);
+                Files.delete(linkTriggerDir);
+                Files.delete(linkTriggerFile);
+            }
             Files.delete(triggerFile);
             Files.delete(sampleFile);
             Files.delete(sample);
@@ -589,7 +630,6 @@
         try (CloseableStream<String> s = Files.lines(testFolder.resolve("notExist"), Charset.forName("UTF-8"))) {
             s.forEach(l -> fail("File is not even exist!"));
         } catch (IOException ioe) {
-            ioe.printStackTrace(System.err);
             assertTrue(ioe instanceof NoSuchFileException);
         }
     }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/test/java/security/AccessController/LimitedDoPrivileged.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,215 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/*
+ * @test
+ * @bug 8014097
+ * @summary Test the limited privilege scope version of doPrivileged
+ */
+
+import java.security.*;
+import java.util.*;
+
+public class LimitedDoPrivileged {
+    /*
+     * Test variations of doPrivileged() and doPrivileged() with a limited privilege scope
+     * in a sandbox with the usual default permission to read the system properties for the
+     * file and path separators.
+     *
+     * By passing in an "assigned" AccessControlContext that has
+     * no default permissions we can test how code privileges are being scoped.
+     */
+
+    private static final ProtectionDomain domain =
+        new ProtectionDomain(null, null, null, null);
+    private static final AccessControlContext acc =
+        new AccessControlContext(new ProtectionDomain[] { domain });
+    private static final PropertyPermission pathPerm =
+        new PropertyPermission("path.separator", "read");
+    private static final PropertyPermission filePerm =
+        new PropertyPermission("file.separator", "read");
+
+    public static void main(String[] args) throws Exception {
+        /*
+         * Verify that we have the usual default property read permission.
+         */
+        AccessController.getContext().checkPermission(filePerm);
+        AccessController.getContext().checkPermission(pathPerm);
+        System.out.println("test 1 passed");
+
+        /*
+         * Inject the "no permission" AccessControlContext.
+         */
+        AccessController.doPrivileged(new PrivilegedAction() {
+            public Object run() {
+
+                /*
+                 * Verify that we no longer have the "file.separator" permission.
+                 */
+                try {
+                    AccessController.getContext().checkPermission(pathPerm);
+                } catch (AccessControlException ace) {
+                    System.out.println("test 2 passed");
+                }
+
+                /*
+                 * Verify that we can give ourselves limited privilege to read
+                 * any system property starting with "path.".
+                 */
+                AccessController.doPrivileged
+                    (new PrivilegedAction() {
+                        public Object run() {
+                            AccessController.getContext().checkPermission(pathPerm);
+                            return null;
+                        }
+                }, null, new PropertyPermission("path.*", "read"));
+                System.out.println("test 3 passed");
+
+                /*
+                 * Verify that if we give ourselves limited privilege to read
+                 * any system property starting with "path." it won't give us the
+                 * the ability to read "file.separator".
+                 */
+                try {
+                    AccessController.doPrivileged
+                        (new PrivilegedAction() {
+                            public Object run() {
+                                AccessController.getContext().checkPermission(filePerm);
+                                return null;
+                            }
+                    }, null, new PropertyPermission("path.*", "read"));
+                } catch (AccessControlException ace) {
+                    System.out.println("test 4 passed");
+                }
+
+                /*
+                 * Verify that capturing and passing in the context with no default
+                 * system property permission grants will prevent access that succeeded
+                 * earlier without the context assignment.
+                 */
+                final AccessControlContext context = AccessController.getContext();
+                try {
+                    AccessController.doPrivileged
+                        (new PrivilegedAction() {
+                            public Object run() {
+                                AccessController.getContext().checkPermission(pathPerm);
+                                return null;
+                            }
+                    }, context, new PropertyPermission("path.*", "read"));
+                } catch (AccessControlException ace) {
+                    System.out.println("test 5 passed");
+                }
+
+                /*
+                 * Verify that we can give ourselves full privilege to read
+                 * any system property starting with "path.".
+                 */
+                AccessController.doPrivileged
+                     (new PrivilegedAction() {
+                        public Object run() {
+                            AccessController.getContext().checkPermission(pathPerm);
+                            return null;
+                        }
+                });
+                System.out.println("test 6 passed");
+
+                /*
+                 * Verify that capturing and passing in the context with no default
+                 * system property permission grants will prevent access that succeeded
+                 * earlier without the context assignment.
+                 */
+                try {
+                    AccessController.doPrivileged
+                        (new PrivilegedAction() {
+                            public Object run() {
+                                AccessController.getContext().checkPermission(pathPerm);
+                                return null;
+                            }
+                    }, context);
+                } catch (AccessControlException ace) {
+                    System.out.println("test 7 passed");
+                }
+
+                /*
+                 * Verify that we can give ourselves limited privilege to read
+                 * any system property starting with "path." when a limited
+                 * privilege scope context is captured and passed to a regular
+                 * doPrivileged() as an assigned context.
+                 */
+                AccessController.doPrivileged
+                     (new PrivilegedAction() {
+                        public Object run() {
+
+                            /*
+                             * Capture the limited privilege scope and inject it into the
+                             * regular doPrivileged().
+                             */
+                            final AccessControlContext limitedContext = AccessController.getContext();
+                            AccessController.doPrivileged
+                                (new PrivilegedAction() {
+                                    public Object run() {
+                                        AccessController.getContext().checkPermission(pathPerm);
+                                        return null;
+                                }
+                            }, limitedContext);
+                            return null;
+                        }
+                }, null, new PropertyPermission("path.*", "read"));
+                System.out.println("test 8 passed");
+
+                /*
+                 * Verify that we can give ourselves limited privilege to read
+                 * any system property starting with "path." it won't give us the
+                 * the ability to read "file.separator" when a limited
+                 * privilege scope context is captured and passed to a regular
+                 * doPrivileged() as an assigned context.
+                 */
+                AccessController.doPrivileged
+                     (new PrivilegedAction() {
+                        public Object run() {
+
+                            /*
+                             * Capture the limited privilege scope and inject it into the
+                             * regular doPrivileged().
+                             */
+                            final AccessControlContext limitedContext = AccessController.getContext();
+                            try {
+                                AccessController.doPrivileged
+                                    (new PrivilegedAction() {
+                                        public Object run() {
+                                            AccessController.getContext().checkPermission(filePerm);
+                                            return null;
+                                    }
+                                }, limitedContext);
+                            } catch (AccessControlException ace) {
+                                System.out.println("test 9 passed");
+                            }
+                            return null;
+                        }
+                }, null, new PropertyPermission("path.*", "read"));
+
+                return null;
+            }
+        }, acc);
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/test/java/util/Iterator/PrimitiveIteratorDefaults.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,115 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+import org.testng.annotations.Test;
+
+import java.util.PrimitiveIterator;
+import java.util.function.Consumer;
+import java.util.function.DoubleConsumer;
+import java.util.function.IntConsumer;
+import java.util.function.LongConsumer;
+
+import static org.testng.Assert.assertNotNull;
+import static org.testng.Assert.assertTrue;
+
+/**
+ * @test
+ * @run testng PrimitiveIteratorDefaults
+ * @summary test default methods on PrimitiveIterator
+ */
+@Test
+public class PrimitiveIteratorDefaults {
+
+    public void testIntForEachRemainingWithNull() {
+        PrimitiveIterator.OfInt i = new PrimitiveIterator.OfInt() {
+            @Override
+            public int nextInt() {
+                return 0;
+            }
+
+            @Override
+            public boolean hasNext() {
+                return false;
+            }
+        };
+
+        executeAndCatch(() -> i.forEachRemaining((IntConsumer) null));
+        executeAndCatch(() -> i.forEachRemaining((Consumer<Integer>) null));
+    }
+
+    public void testLongForEachRemainingWithNull() {
+        PrimitiveIterator.OfLong i = new PrimitiveIterator.OfLong() {
+            @Override
+            public long nextLong() {
+                return 0;
+            }
+
+            @Override
+            public boolean hasNext() {
+                return false;
+            }
+        };
+
+        executeAndCatch(() -> i.forEachRemaining((LongConsumer) null));
+        executeAndCatch(() -> i.forEachRemaining((Consumer<Long>) null));
+    }
+
+    public void testDoubleForEachRemainingWithNull() {
+        PrimitiveIterator.OfDouble i = new PrimitiveIterator.OfDouble() {
+            @Override
+            public double nextDouble() {
+                return 0;
+            }
+
+            @Override
+            public boolean hasNext() {
+                return false;
+            }
+        };
+
+        executeAndCatch(() -> i.forEachRemaining((DoubleConsumer) null));
+        executeAndCatch(() -> i.forEachRemaining((Consumer<Double>) null));
+    }
+
+    private void executeAndCatch(Runnable r) {
+        executeAndCatch(NullPointerException.class, r);
+    }
+
+    private void executeAndCatch(Class<? extends Exception> expected, Runnable r) {
+        Exception caught = null;
+        try {
+            r.run();
+        }
+        catch (Exception e) {
+            caught = e;
+        }
+
+        assertNotNull(caught,
+                      String.format("No Exception was thrown, expected an Exception of %s to be thrown",
+                                    expected.getName()));
+        assertTrue(expected.isInstance(caught),
+                   String.format("Exception thrown %s not an instance of %s",
+                                 caught.getClass().getName(), expected.getName()));
+    }
+
+}
--- a/jdk/test/java/util/Locale/LocaleCategory.sh	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/java/util/Locale/LocaleCategory.sh	Wed Jul 05 18:59:05 2017 +0200
@@ -1,3 +1,4 @@
+#!/bin/sh
 #
 # Copyright (c) 2011, 2012, Oracle and/or its affiliates. All rights reserved.
 # DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
@@ -20,7 +21,6 @@
 # or visit www.oracle.com if you need additional information or have any
 # questions.
 #
-#!/bin/sh
 #
 # @test
 # @bug 4700857 6997928 7079486
--- a/jdk/test/java/util/Locale/LocaleProviders.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/java/util/Locale/LocaleProviders.java	Wed Jul 05 18:59:05 2017 +0200
@@ -64,6 +64,10 @@
                 bug8013086Test(args[1], args[2]);
                 break;
 
+            case "bug8013903Test":
+                bug8013903Test();
+                break;
+
             default:
                 throw new RuntimeException("Test method '"+methodName+"' not found.");
         }
@@ -195,4 +199,30 @@
             // ParseException is fine in this test, as it's not "UTC"
 }
     }
+
+    static void bug8013903Test() {
+        if (System.getProperty("os.name").startsWith("Windows")) {
+            Date sampleDate = new Date(0x10000000000L);
+            String fallbackResult = "Heisei 16.Nov.03 (Wed) AM 11:53:47";
+            String jreResult = "\u5e73\u6210 16.11.03 (\u6c34) \u5348\u524d 11:53:47";
+            Locale l = new Locale("ja", "JP", "JP");
+            SimpleDateFormat sdf = new SimpleDateFormat("GGGG yyyy.MMM.dd '('E')' a hh:mm:ss", l);
+            String result = sdf.format(sampleDate);
+            System.out.println(result);
+            if (LocaleProviderAdapter.getAdapterPreference()
+                .contains(LocaleProviderAdapter.Type.JRE)) {
+                if (!jreResult.equals(result)) {
+                    throw new RuntimeException("Format failed. result: \"" +
+                        result + "\", expected: \"" + jreResult);
+                }
+            } else {
+                // should be FALLBACK, as Windows HOST does not return
+                // display names
+                if (!fallbackResult.equals(result)) {
+                    throw new RuntimeException("Format failed. result: \"" +
+                        result + "\", expected: \"" + fallbackResult);
+                }
+            }
+        }
+    }
 }
--- a/jdk/test/java/util/Locale/LocaleProviders.sh	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/java/util/Locale/LocaleProviders.sh	Wed Jul 05 18:59:05 2017 +0200
@@ -1,3 +1,4 @@
+#!/bin/sh
 #
 # Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved.
 # DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
@@ -20,11 +21,10 @@
 # or visit www.oracle.com if you need additional information or have any
 # questions.
 #
-#!/bin/sh
 #
 # @test
 # @bug 6336885 7196799 7197573 7198834 8000245 8000615 8001440 8010666
-#      8013086 8013233
+#      8013086 8013233 8013903
 # @summary tests for "java.locale.providers" system property
 # @compile -XDignore.symbol.file LocaleProviders.java
 # @run shell/timeout=600 LocaleProviders.sh
@@ -300,4 +300,18 @@
 PARAM3=
 runTest
 
+# testing 8013903 fix. (Windows only)
+METHODNAME=bug8013903Test
+PREFLIST=HOST,JRE
+PARAM1=
+PARAM2=
+PARAM3=
+runTest
+METHODNAME=bug8013903Test
+PREFLIST=HOST
+PARAM1=
+PARAM2=
+PARAM3=
+runTest
+
 exit $result
--- a/jdk/test/java/util/Locale/data/deflocale.sh	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/java/util/Locale/data/deflocale.sh	Wed Jul 05 18:59:05 2017 +0200
@@ -1,3 +1,4 @@
+#!/bin/sh
 # 
 # Copyright (c) 2007, 2010, Oracle and/or its affiliates. All rights reserved.
 # DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
@@ -20,7 +21,6 @@
 # or visit www.oracle.com if you need additional information or have any
 # questions.
 #
-#!/bin/sh
 #
 #
 #
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/test/java/util/Map/CheckRandomHashSeed.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,93 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/**
+ * @test
+ * @bug 8005698
+ * @summary Check operation of jdk.map.useRandomSeed property
+ * @run main CheckRandomHashSeed
+ * @run main/othervm -Djdk.map.useRandomSeed=false CheckRandomHashSeed
+ * @run main/othervm -Djdk.map.useRandomSeed=bogus CheckRandomHashSeed
+ * @run main/othervm -Djdk.map.useRandomSeed=true CheckRandomHashSeed true
+ * @author Brent Christian
+ */
+import java.lang.reflect.Field;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.Hashtable;
+import java.util.WeakHashMap;
+
+public class CheckRandomHashSeed {
+    private final static String PROP_NAME = "jdk.map.useRandomSeed";
+    static boolean expectRandom = false;
+
+    public static void main(String[] args) {
+        if (args.length > 0 && args[0].equals("true")) {
+            expectRandom = true;
+        }
+        String hashSeedProp = System.getProperty(PROP_NAME);
+        boolean propSet = (null != hashSeedProp)
+                ? Boolean.parseBoolean(hashSeedProp) : false;
+        if (expectRandom != propSet) {
+            throw new Error("Error in test setup: " + (expectRandom ? "" : "not " ) + "expecting random hashSeed, but " + PROP_NAME + " is " + (propSet ? "" : "not ") + "enabled");
+        }
+
+        testMap(new HashMap());
+        testMap(new LinkedHashMap());
+        testMap(new WeakHashMap());
+        testMap(new Hashtable());
+    }
+
+    private static void testMap(Map map) {
+        int hashSeed = getHashSeed(map);
+        boolean hashSeedIsZero = (hashSeed == 0);
+
+        if (expectRandom != hashSeedIsZero) {
+            System.out.println("Test passed for " + map.getClass().getSimpleName() + " - expectRandom: " + expectRandom + ", hashSeed: " + hashSeed);
+        } else {
+            throw new Error ("Test FAILED for " + map.getClass().getSimpleName() + " -  expectRandom: " + expectRandom + ", hashSeed: " + hashSeed);
+        }
+    }
+
+    private static int getHashSeed(Map map) {
+        try {
+            if (map instanceof HashMap || map instanceof LinkedHashMap) {
+                map.put("Key", "Value");
+                Field hashSeedField = HashMap.class.getDeclaredField("hashSeed");
+                hashSeedField.setAccessible(true);
+                int hashSeed = hashSeedField.getInt(map);
+                return hashSeed;
+            } else {
+                map.put("Key", "Value");
+                Field hashSeedField = map.getClass().getDeclaredField("hashSeed");
+                hashSeedField.setAccessible(true);
+                int hashSeed = hashSeedField.getInt(map);
+                return hashSeed;
+            }
+        } catch(Exception e) {
+            e.printStackTrace();
+            throw new Error(e);
+        }
+    }
+}
--- a/jdk/test/java/util/Map/Collisions.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/java/util/Map/Collisions.java	Wed Jul 05 18:59:05 2017 +0200
@@ -26,6 +26,7 @@
  * @bug 7126277
  * @run main Collisions -shortrun
  * @run main/othervm -Djdk.map.althashing.threshold=0 Collisions -shortrun
+ * @run main/othervm -Djdk.map.useRandomSeed=true Collisions -shortrun
  * @summary Ensure Maps behave well with lots of hashCode() collisions.
  * @author Mike Duigou
  */
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/test/java/util/Map/InPlaceOpsCollisions.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,665 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/*
+ * @test
+ * @bug 8005698
+ * @run main InPlaceOpsCollisions -shortrun
+ * @run main/othervm -Djdk.map.randomseed=true InPlaceOpsCollisions -shortrun
+ * @summary Ensure overrides of in-place operations in Maps behave well with lots of collisions.
+ * @author Brent Christian
+ */
+import java.util.*;
+import java.util.function.*;
+
+public class InPlaceOpsCollisions {
+
+    /**
+     * Number of elements per map.
+     */
+    private static final int TEST_SIZE = 5000;
+
+    final static class HashableInteger implements Comparable<HashableInteger> {
+
+        final int value;
+        final int hashmask; //yes duplication
+
+        HashableInteger(int value, int hashmask) {
+            this.value = value;
+            this.hashmask = hashmask;
+        }
+
+        @Override
+        public boolean equals(Object obj) {
+            if (obj instanceof HashableInteger) {
+                HashableInteger other = (HashableInteger) obj;
+
+                return other.value == value;
+            }
+
+            return false;
+        }
+
+        @Override
+        public int hashCode() {
+            return value % hashmask;
+        }
+
+        @Override
+        public int compareTo(HashableInteger o) {
+            return value - o.value;
+        }
+
+        @Override
+        public String toString() {
+            return Integer.toString(value);
+        }
+    }
+
+    static HashableInteger EXTRA_INT_VAL;
+    static String EXTRA_STRING_VAL;
+
+    private static Object[][] makeTestData(int size) {
+        HashableInteger UNIQUE_OBJECTS[] = new HashableInteger[size];
+        HashableInteger COLLIDING_OBJECTS[] = new HashableInteger[size];
+        String UNIQUE_STRINGS[] = new String[size];
+        String COLLIDING_STRINGS[] = new String[size];
+
+        for (int i = 0; i < size; i++) {
+            UNIQUE_OBJECTS[i] = new HashableInteger(i, Integer.MAX_VALUE);
+            COLLIDING_OBJECTS[i] = new HashableInteger(i, 10);
+            UNIQUE_STRINGS[i] = unhash(i);
+            COLLIDING_STRINGS[i] = (0 == i % 2)
+                    ? UNIQUE_STRINGS[i / 2]
+                    : "\u0000\u0000\u0000\u0000\u0000" + COLLIDING_STRINGS[i - 1];
+        }
+        EXTRA_INT_VAL = new HashableInteger(size, Integer.MAX_VALUE);
+        EXTRA_STRING_VAL = new String ("Extra Value");
+
+     return new Object[][] {
+            new Object[]{"Unique Objects", UNIQUE_OBJECTS},
+            new Object[]{"Colliding Objects", COLLIDING_OBJECTS},
+            new Object[]{"Unique Strings", UNIQUE_STRINGS},
+            new Object[]{"Colliding Strings", COLLIDING_STRINGS}
+        };
+    }
+
+    /**
+     * Returns a string with a hash equal to the argument.
+     *
+     * @return string with a hash equal to the argument.
+     */
+    public static String unhash(int target) {
+        StringBuilder answer = new StringBuilder();
+        if (target < 0) {
+            // String with hash of Integer.MIN_VALUE, 0x80000000
+            answer.append("\\u0915\\u0009\\u001e\\u000c\\u0002");
+
+            if (target == Integer.MIN_VALUE) {
+                return answer.toString();
+            }
+            // Find target without sign bit set
+            target = target & Integer.MAX_VALUE;
+        }
+
+        unhash0(answer, target);
+        return answer.toString();
+    }
+
+    private static void unhash0(StringBuilder partial, int target) {
+        int div = target / 31;
+        int rem = target % 31;
+
+        if (div <= Character.MAX_VALUE) {
+            if (div != 0) {
+                partial.append((char) div);
+            }
+            partial.append((char) rem);
+        } else {
+            unhash0(partial, div);
+            partial.append((char) rem);
+        }
+    }
+
+    private static void realMain(String[] args) throws Throwable {
+        boolean shortRun = args.length > 0 && args[0].equals("-shortrun");
+
+        Object[][] mapKeys = makeTestData(shortRun ? (TEST_SIZE / 2) : TEST_SIZE);
+
+        // loop through data sets
+        for (Object[] keys_desc : mapKeys) {
+            Map<Object, Object>[] maps = (Map<Object, Object>[]) new Map[]{
+                        new HashMap<>(),
+                        new LinkedHashMap<>(),
+                    };
+
+            // for each map type.
+            for (Map<Object, Object> map : maps) {
+                String desc = (String) keys_desc[0];
+                Object[] keys = (Object[]) keys_desc[1];
+                try {
+                    testInPlaceOps(map, desc, keys);
+                } catch(Exception all) {
+                    unexpected("Failed for " + map.getClass().getName() + " with " + desc, all);
+                }
+            }
+        }
+    }
+
+    private static <T> void testInsertion(Map<T, T> map, String keys_desc, T[] keys) {
+        check("map empty", (map.size() == 0) && map.isEmpty());
+
+        for (int i = 0; i < keys.length; i++) {
+            check(String.format("insertion: map expected size m%d != i%d", map.size(), i),
+                    map.size() == i);
+            check(String.format("insertion: put(%s[%d])", keys_desc, i), null == map.put(keys[i], keys[i]));
+            check(String.format("insertion: containsKey(%s[%d])", keys_desc, i), map.containsKey(keys[i]));
+            check(String.format("insertion: containsValue(%s[%d])", keys_desc, i), map.containsValue(keys[i]));
+        }
+
+        check(String.format("map expected size m%d != k%d", map.size(), keys.length),
+                map.size() == keys.length);
+    }
+
+
+    private static <T> void testInPlaceOps(Map<T, T> map, String keys_desc, T[] keys) {
+        System.out.println(map.getClass() + " : " + keys_desc + ", testInPlaceOps");
+        System.out.flush();
+
+        testInsertion(map, keys_desc, keys);
+        testPutIfAbsent(map, keys_desc, keys);
+
+        map.clear();
+        testInsertion(map, keys_desc, keys);
+        testRemoveMapping(map, keys_desc, keys);
+
+        map.clear();
+        testInsertion(map, keys_desc, keys);
+        testReplaceOldValue(map, keys_desc, keys);
+
+        map.clear();
+        testInsertion(map, keys_desc, keys);
+        testReplaceIfMapped(map, keys_desc, keys);
+
+        map.clear();
+        testInsertion(map, keys_desc, keys);
+        testComputeIfAbsent(map, keys_desc, keys, (k) -> getExtraVal(keys[0]));
+
+        map.clear();
+        testInsertion(map, keys_desc, keys);
+        testComputeIfAbsent(map, keys_desc, keys, (k) -> null);
+
+        map.clear();
+        testInsertion(map, keys_desc, keys);
+        testComputeIfPresent(map, keys_desc, keys, (k, v) -> getExtraVal(keys[0]));
+
+        map.clear();
+        testInsertion(map, keys_desc, keys);
+        testComputeIfPresent(map, keys_desc, keys, (k, v) -> null);
+
+        if (!keys_desc.contains("Strings")) { // avoid parseInt() number format error
+            map.clear();
+            testInsertion(map, keys_desc, keys);
+            testComputeNonNull(map, keys_desc, keys);
+        }
+
+        map.clear();
+        testInsertion(map, keys_desc, keys);
+        testComputeNull(map, keys_desc, keys);
+
+        if (!keys_desc.contains("Strings")) { // avoid parseInt() number format error
+            map.clear();
+            testInsertion(map, keys_desc, keys);
+            testMergeNonNull(map, keys_desc, keys);
+        }
+
+        map.clear();
+        testInsertion(map, keys_desc, keys);
+        testMergeNull(map, keys_desc, keys);
+    }
+
+
+
+    private static <T> void testPutIfAbsent(Map<T, T> map, String keys_desc, T[] keys) {
+        T extraVal = getExtraVal(keys[0]);
+        T retVal;
+        removeOddKeys(map, keys);
+        for (int i = 0; i < keys.length; i++) {
+            retVal = map.putIfAbsent(keys[i], extraVal);
+            if (i % 2 == 0) { // even: not absent, not put
+                check(String.format("putIfAbsent: (%s[%d]) retVal", keys_desc, i), retVal == keys[i]);
+                check(String.format("putIfAbsent: get(%s[%d])", keys_desc, i), keys[i] == map.get(keys[i]));
+                check(String.format("putIfAbsent: containsValue(%s[%d])", keys_desc, i), map.containsValue(keys[i]));
+            } else { // odd: absent, was put
+                check(String.format("putIfAbsent: (%s[%d]) retVal", keys_desc, i), retVal == null);
+                check(String.format("putIfAbsent: get(%s[%d])", keys_desc, i), extraVal == map.get(keys[i]));
+                check(String.format("putIfAbsent: !containsValue(%s[%d])", keys_desc, i), !map.containsValue(keys[i]));
+            }
+            check(String.format("insertion: containsKey(%s[%d])", keys_desc, i), map.containsKey(keys[i]));
+        }
+        check(String.format("map expected size m%d != k%d", map.size(), keys.length),
+                map.size() == keys.length);
+    }
+
+    private static <T> void testRemoveMapping(Map<T, T> map, String keys_desc, T[] keys) {
+        T extraVal = getExtraVal(keys[0]);
+        boolean removed;
+        int removes = 0;
+        remapOddKeys(map, keys);
+        for (int i = 0; i < keys.length; i++) {
+            removed = map.remove(keys[i], keys[i]);
+            if (i % 2 == 0) { // even: original mapping, should be removed
+                check(String.format("removeMapping: retVal(%s[%d])", keys_desc, i), removed);
+                check(String.format("removeMapping: get(%s[%d])", keys_desc, i), null == map.get(keys[i]));
+                check(String.format("removeMapping: !containsKey(%s[%d])", keys_desc, i), !map.containsKey(keys[i]));
+                check(String.format("removeMapping: !containsValue(%s[%d])", keys_desc, i), !map.containsValue(keys[i]));
+                removes++;
+            } else { // odd: new mapping, not removed
+                check(String.format("removeMapping: retVal(%s[%d])", keys_desc, i), !removed);
+                check(String.format("removeMapping: get(%s[%d])", keys_desc, i), extraVal == map.get(keys[i]));
+                check(String.format("removeMapping: containsKey(%s[%d])", keys_desc, i), map.containsKey(keys[i]));
+                check(String.format("removeMapping: containsValue(%s[%d])", keys_desc, i), map.containsValue(extraVal));
+            }
+        }
+        check(String.format("map expected size m%d != k%d", map.size(), keys.length - removes),
+                map.size() == keys.length - removes);
+    }
+
+    private static <T> void testReplaceOldValue(Map<T, T> map, String keys_desc, T[] keys) {
+        // remap odds to extraVal
+        // call replace to replace for extraVal, for all keys
+        // check that all keys map to value from keys array
+        T extraVal = getExtraVal(keys[0]);
+        boolean replaced;
+        remapOddKeys(map, keys);
+
+        for (int i = 0; i < keys.length; i++) {
+            replaced = map.replace(keys[i], extraVal, keys[i]);
+            if (i % 2 == 0) { // even: original mapping, should not be replaced
+                check(String.format("replaceOldValue: retVal(%s[%d])", keys_desc, i), !replaced);
+            } else { // odd: new mapping, should be replaced
+                check(String.format("replaceOldValue: get(%s[%d])", keys_desc, i), replaced);
+            }
+            check(String.format("replaceOldValue: get(%s[%d])", keys_desc, i), keys[i] == map.get(keys[i]));
+            check(String.format("replaceOldValue: containsKey(%s[%d])", keys_desc, i), map.containsKey(keys[i]));
+            check(String.format("replaceOldValue: containsValue(%s[%d])", keys_desc, i), map.containsValue(keys[i]));
+//            removes++;
+        }
+        check(String.format("replaceOldValue: !containsValue(%s[%s])", keys_desc, extraVal.toString()), !map.containsValue(extraVal));
+        check(String.format("map expected size m%d != k%d", map.size(), keys.length),
+                map.size() == keys.length);
+    }
+
+    // TODO: Test case for key mapped to null value
+    private static <T> void testReplaceIfMapped(Map<T, T> map, String keys_desc, T[] keys) {
+        // remove odd keys
+        // call replace for all keys[]
+        // odd keys should remain absent, even keys should be mapped to EXTRA, no value from keys[] should be in map
+        T extraVal = getExtraVal(keys[0]);
+        int expectedSize1 = 0;
+        removeOddKeys(map, keys);
+        int expectedSize2 = map.size();
+
+        for (int i = 0; i < keys.length; i++) {
+            T retVal = map.replace(keys[i], extraVal);
+            if (i % 2 == 0) { // even: still in map, should be replaced
+                check(String.format("replaceIfMapped: retVal(%s[%d])", keys_desc, i), retVal == keys[i]);
+                check(String.format("replaceIfMapped: get(%s[%d])", keys_desc, i), extraVal == map.get(keys[i]));
+                check(String.format("replaceIfMapped: containsKey(%s[%d])", keys_desc, i), map.containsKey(keys[i]));
+                expectedSize1++;
+            } else { // odd: was removed, should not be replaced
+                check(String.format("replaceIfMapped: retVal(%s[%d])", keys_desc, i), retVal == null);
+                check(String.format("replaceIfMapped: get(%s[%d])", keys_desc, i), null == map.get(keys[i]));
+                check(String.format("replaceIfMapped: containsKey(%s[%d])", keys_desc, i), !map.containsKey(keys[i]));
+            }
+            check(String.format("replaceIfMapped: !containsValue(%s[%d])", keys_desc, i), !map.containsValue(keys[i]));
+        }
+        check(String.format("replaceIfMapped: containsValue(%s[%s])", keys_desc, extraVal.toString()), map.containsValue(extraVal));
+        check(String.format("map expected size#1 m%d != k%d", map.size(), expectedSize1),
+                map.size() == expectedSize1);
+        check(String.format("map expected size#2 m%d != k%d", map.size(), expectedSize2),
+                map.size() == expectedSize2);
+
+    }
+
+    private static <T> void testComputeIfAbsent(Map<T, T> map, String keys_desc, T[] keys,
+                                                Function<T,T> mappingFunction) {
+        // remove a third of the keys
+        // call computeIfAbsent for all keys, func returns EXTRA
+        // check that removed keys now -> EXTRA, other keys -> original val
+        T expectedVal = mappingFunction.apply(keys[0]);
+        T retVal;
+        int expectedSize = 0;
+        removeThirdKeys(map, keys);
+        for (int i = 0; i < keys.length; i++) {
+            retVal = map.computeIfAbsent(keys[i], mappingFunction);
+            if (i % 3 != 2) { // key present, not computed
+                check(String.format("computeIfAbsent: (%s[%d]) retVal", keys_desc, i), retVal == keys[i]);
+                check(String.format("computeIfAbsent: get(%s[%d])", keys_desc, i), keys[i] == map.get(keys[i]));
+                check(String.format("computeIfAbsent: containsValue(%s[%d])", keys_desc, i), map.containsValue(keys[i]));
+                check(String.format("insertion: containsKey(%s[%d])", keys_desc, i), map.containsKey(keys[i]));
+                expectedSize++;
+            } else { // key absent, computed unless function return null
+                check(String.format("computeIfAbsent: (%s[%d]) retVal", keys_desc, i), retVal == expectedVal);
+                check(String.format("computeIfAbsent: get(%s[%d])", keys_desc, i), expectedVal == map.get(keys[i]));
+                check(String.format("computeIfAbsent: !containsValue(%s[%d])", keys_desc, i), !map.containsValue(keys[i]));
+                // mapping should not be added if function returns null
+                check(String.format("insertion: containsKey(%s[%d])", keys_desc, i), map.containsKey(keys[i]) != (expectedVal == null));
+                if (expectedVal != null) { expectedSize++; }
+            }
+        }
+        if (expectedVal != null) {
+            check(String.format("computeIfAbsent: containsValue(%s[%s])", keys_desc, expectedVal), map.containsValue(expectedVal));
+        }
+        check(String.format("map expected size m%d != k%d", map.size(), expectedSize),
+                map.size() == expectedSize);
+    }
+
+    private static <T> void testComputeIfPresent(Map<T, T> map, String keys_desc, T[] keys,
+                                                BiFunction<T,T,T> mappingFunction) {
+        // remove a third of the keys
+        // call testComputeIfPresent for all keys[]
+        // removed keys should remain absent, even keys should be mapped to $RESULT
+        // no value from keys[] should be in map
+        T funcResult = mappingFunction.apply(keys[0], keys[0]);
+        int expectedSize1 = 0;
+        removeThirdKeys(map, keys);
+
+        for (int i = 0; i < keys.length; i++) {
+            T retVal = map.computeIfPresent(keys[i], mappingFunction);
+            if (i % 3 != 2) { // key present
+                if (funcResult == null) { // was removed
+                    check(String.format("replaceIfMapped: containsKey(%s[%d])", keys_desc, i), !map.containsKey(keys[i]));
+                } else { // value was replaced
+                    check(String.format("replaceIfMapped: containsKey(%s[%d])", keys_desc, i), map.containsKey(keys[i]));
+                    expectedSize1++;
+                }
+                check(String.format("computeIfPresent: retVal(%s[%s])", keys_desc, i), retVal == funcResult);
+                check(String.format("replaceIfMapped: get(%s[%d])", keys_desc, i), funcResult == map.get(keys[i]));
+
+            } else { // odd: was removed, should not be replaced
+                check(String.format("replaceIfMapped: retVal(%s[%d])", keys_desc, i), retVal == null);
+                check(String.format("replaceIfMapped: get(%s[%d])", keys_desc, i), null == map.get(keys[i]));
+                check(String.format("replaceIfMapped: containsKey(%s[%d])", keys_desc, i), !map.containsKey(keys[i]));
+            }
+            check(String.format("replaceIfMapped: !containsValue(%s[%d])", keys_desc, i), !map.containsValue(keys[i]));
+        }
+        check(String.format("map expected size#1 m%d != k%d", map.size(), expectedSize1),
+                map.size() == expectedSize1);
+    }
+
+    private static <T> void testComputeNonNull(Map<T, T> map, String keys_desc, T[] keys) {
+        // remove a third of the keys
+        // call compute() for all keys[]
+        // all keys should be present: removed keys -> EXTRA, others to k-1
+        BiFunction<T,T,T> mappingFunction = (k, v) -> {
+                if (v == null) {
+                    return getExtraVal(keys[0]);
+                } else {
+                    return keys[Integer.parseInt(k.toString()) - 1];
+                }
+            };
+        T extraVal = getExtraVal(keys[0]);
+        removeThirdKeys(map, keys);
+        for (int i = 1; i < keys.length; i++) {
+            T retVal = map.compute(keys[i], mappingFunction);
+            if (i % 3 != 2) { // key present, should be mapped to k-1
+                check(String.format("compute: retVal(%s[%d])", keys_desc, i), retVal == keys[i-1]);
+                check(String.format("compute: get(%s[%d])", keys_desc, i), keys[i-1] == map.get(keys[i]));
+            } else { // odd: was removed, should be replaced with EXTRA
+                check(String.format("compute: retVal(%s[%d])", keys_desc, i), retVal == extraVal);
+                check(String.format("compute: get(%s[%d])", keys_desc, i), extraVal == map.get(keys[i]));
+            }
+            check(String.format("compute: containsKey(%s[%d])", keys_desc, i), map.containsKey(keys[i]));
+        }
+        check(String.format("map expected size#1 m%d != k%d", map.size(), keys.length),
+                map.size() == keys.length);
+        check(String.format("compute: containsValue(%s[%s])", keys_desc, extraVal.toString()), map.containsValue(extraVal));
+        check(String.format("compute: !containsValue(%s,[null])", keys_desc), !map.containsValue(null));
+    }
+
+    private static <T> void testComputeNull(Map<T, T> map, String keys_desc, T[] keys) {
+        // remove a third of the keys
+        // call compute() for all keys[]
+        // removed keys should -> EXTRA
+        // for other keys: func returns null, should have no mapping
+        BiFunction<T,T,T> mappingFunction = (k, v) -> {
+            // if absent/null -> EXTRA
+            // if present -> null
+            if (v == null) {
+                return getExtraVal(keys[0]);
+            } else {
+                return null;
+            }
+        };
+        T extraVal = getExtraVal(keys[0]);
+        int expectedSize = 0;
+        removeThirdKeys(map, keys);
+        for (int i = 0; i < keys.length; i++) {
+            T retVal = map.compute(keys[i], mappingFunction);
+            if (i % 3 != 2) { // key present, func returned null, should be absent from map
+                check(String.format("compute: retVal(%s[%d])", keys_desc, i), retVal == null);
+                check(String.format("compute: get(%s[%d])", keys_desc, i), null == map.get(keys[i]));
+                check(String.format("compute: containsKey(%s[%d])", keys_desc, i), !map.containsKey(keys[i]));
+                check(String.format("compute: containsValue(%s[%s])", keys_desc, i), !map.containsValue(keys[i]));
+            } else { // odd: was removed, should now be mapped to EXTRA
+                check(String.format("compute: retVal(%s[%d])", keys_desc, i), retVal == extraVal);
+                check(String.format("compute: get(%s[%d])", keys_desc, i), extraVal == map.get(keys[i]));
+                check(String.format("compute: containsKey(%s[%d])", keys_desc, i), map.containsKey(keys[i]));
+                expectedSize++;
+            }
+        }
+        check(String.format("compute: containsValue(%s[%s])", keys_desc, extraVal.toString()), map.containsValue(extraVal));
+        check(String.format("map expected size#1 m%d != k%d", map.size(), expectedSize),
+                map.size() == expectedSize);
+    }
+
+    private static <T> void testMergeNonNull(Map<T, T> map, String keys_desc, T[] keys) {
+        // remove a third of the keys
+        // call merge() for all keys[]
+        // all keys should be present: removed keys now -> EXTRA, other keys -> k-1
+
+        // Map to preceding key
+        BiFunction<T,T,T> mappingFunction = (k, v) -> keys[Integer.parseInt(k.toString()) - 1];
+        T extraVal = getExtraVal(keys[0]);
+        removeThirdKeys(map, keys);
+        for (int i = 1; i < keys.length; i++) {
+            T retVal = map.merge(keys[i], extraVal, mappingFunction);
+            if (i % 3 != 2) { // key present, should be mapped to k-1
+                check(String.format("compute: retVal(%s[%d])", keys_desc, i), retVal == keys[i-1]);
+                check(String.format("compute: get(%s[%d])", keys_desc, i), keys[i-1] == map.get(keys[i]));
+            } else { // odd: was removed, should be replaced with EXTRA
+                check(String.format("compute: retVal(%s[%d])", keys_desc, i), retVal == extraVal);
+                check(String.format("compute: get(%s[%d])", keys_desc, i), extraVal == map.get(keys[i]));
+            }
+            check(String.format("compute: containsKey(%s[%d])", keys_desc, i), map.containsKey(keys[i]));
+        }
+
+        check(String.format("map expected size#1 m%d != k%d", map.size(), keys.length),
+                map.size() == keys.length);
+        check(String.format("compute: containsValue(%s[%s])", keys_desc, extraVal.toString()), map.containsValue(extraVal));
+        check(String.format("compute: !containsValue(%s,[null])", keys_desc), !map.containsValue(null));
+
+    }
+
+    private static <T> void testMergeNull(Map<T, T> map, String keys_desc, T[] keys) {
+        // remove a third of the keys
+        // call merge() for all keys[]
+        // result: removed keys -> EXTRA, other keys absent
+
+        BiFunction<T,T,T> mappingFunction = (k, v) -> null;
+        T extraVal = getExtraVal(keys[0]);
+        int expectedSize = 0;
+        removeThirdKeys(map, keys);
+        for (int i = 0; i < keys.length; i++) {
+            T retVal = map.merge(keys[i], extraVal, mappingFunction);
+            if (i % 3 != 2) { // key present, func returned null, should be absent from map
+                check(String.format("compute: retVal(%s[%d])", keys_desc, i), retVal == null);
+                check(String.format("compute: get(%s[%d])", keys_desc, i), null == map.get(keys[i]));
+                check(String.format("compute: containsKey(%s[%d])", keys_desc, i), !map.containsKey(keys[i]));
+            } else { // odd: was removed, should now be mapped to EXTRA
+                check(String.format("compute: retVal(%s[%d])", keys_desc, i), retVal == extraVal);
+                check(String.format("compute: get(%s[%d])", keys_desc, i), extraVal == map.get(keys[i]));
+                check(String.format("compute: containsKey(%s[%d])", keys_desc, i), map.containsKey(keys[i]));
+                expectedSize++;
+            }
+            check(String.format("compute: containsValue(%s[%s])", keys_desc, i), !map.containsValue(keys[i]));
+        }
+        check(String.format("compute: containsValue(%s[%s])", keys_desc, extraVal.toString()), map.containsValue(extraVal));
+        check(String.format("map expected size#1 m%d != k%d", map.size(), expectedSize),
+                map.size() == expectedSize);
+    }
+
+    /*
+     * Return the EXTRA val for the key type being used
+     */
+    private static <T> T getExtraVal(T key) {
+        if (key instanceof HashableInteger) {
+            return (T)EXTRA_INT_VAL;
+        } else {
+            return (T)EXTRA_STRING_VAL;
+        }
+    }
+
+    /*
+     * Remove half of the keys
+     */
+    private static <T> void removeOddKeys(Map<T, T> map, /*String keys_desc, */ T[] keys) {
+        int removes = 0;
+        for (int i = 0; i < keys.length; i++) {
+            if (i % 2 != 0) {
+                map.remove(keys[i]);
+                removes++;
+            }
+        }
+        check(String.format("map expected size m%d != k%d", map.size(), keys.length - removes),
+                map.size() == keys.length - removes);
+    }
+
+    /*
+     * Remove every third key
+     * This will hopefully leave some removed keys in TreeBins for, e.g., computeIfAbsent
+     * w/ a func that returns null.
+     *
+     * TODO: consider using this in other tests (and maybe adding a remapThirdKeys)
+     */
+    private static <T> void removeThirdKeys(Map<T, T> map, /*String keys_desc, */ T[] keys) {
+        int removes = 0;
+        for (int i = 0; i < keys.length; i++) {
+            if (i % 3 == 2) {
+                map.remove(keys[i]);
+                removes++;
+            }
+        }
+        check(String.format("map expected size m%d != k%d", map.size(), keys.length - removes),
+                map.size() == keys.length - removes);
+    }
+
+    /*
+     * Re-map the odd-numbered keys to map to the EXTRA value
+     */
+    private static <T> void remapOddKeys(Map<T, T> map, /*String keys_desc, */ T[] keys) {
+        T extraVal = getExtraVal(keys[0]);
+        for (int i = 0; i < keys.length; i++) {
+            if (i % 2 != 0) {
+                map.put(keys[i], extraVal);
+            }
+        }
+    }
+
+    //--------------------- Infrastructure ---------------------------
+    static volatile int passed = 0, failed = 0;
+
+    static void pass() {
+        passed++;
+    }
+
+    static void fail() {
+        failed++;
+        (new Error("Failure")).printStackTrace(System.err);
+    }
+
+    static void fail(String msg) {
+        failed++;
+        (new Error("Failure: " + msg)).printStackTrace(System.err);
+    }
+
+    static void abort() {
+        fail();
+        System.exit(1);
+    }
+
+    static void abort(String msg) {
+        fail(msg);
+        System.exit(1);
+    }
+
+    static void unexpected(String msg, Throwable t) {
+        System.err.println("Unexpected: " + msg);
+        unexpected(t);
+    }
+
+    static void unexpected(Throwable t) {
+        failed++;
+        t.printStackTrace(System.err);
+    }
+
+    static void check(boolean cond) {
+        if (cond) {
+            pass();
+        } else {
+            fail();
+        }
+    }
+
+    static void check(String desc, boolean cond) {
+        if (cond) {
+            pass();
+        } else {
+            fail(desc);
+        }
+    }
+
+    static void equal(Object x, Object y) {
+        if (Objects.equals(x, y)) {
+            pass();
+        } else {
+            fail(x + " not equal to " + y);
+        }
+    }
+
+    public static void main(String[] args) throws Throwable {
+        Thread.currentThread().setName(Collisions.class.getName());
+//        Thread.currentThread().setPriority(Thread.MAX_PRIORITY);
+        try {
+            realMain(args);
+        } catch (Throwable t) {
+            unexpected(t);
+        }
+
+        System.out.printf("%nPassed = %d, failed = %d%n%n", passed, failed);
+        if (failed > 0) {
+            throw new Error("Some tests failed");
+        }
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/test/java/util/Map/TreeBinSplitBackToEntries.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,255 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+import java.util.*;
+import java.lang.reflect.Field;
+
+/*
+ * @test
+ * @bug 8005698
+ * @summary Test the case where TreeBin.splitTreeBin() converts a bin back to an Entry list
+ * @run main TreeBinSplitBackToEntries unused
+ * @author Brent Christian
+ */
+
+public class TreeBinSplitBackToEntries {
+    private static int EXPECTED_TREE_THRESHOLD = 16;
+
+    // Easiest if this covers one bit higher then 'bit' in splitTreeBin() on the
+    // call where the TreeBin is converted back to an Entry list
+    private static int HASHMASK = 0x7F;
+    private static boolean verbose = false;
+    private static boolean fastFail = false;
+    private static boolean failed = false;
+
+    static void printlnIfVerbose(String msg) {
+        if (verbose) {System.out.println(msg); }
+    }
+
+    public static void main(String[] args) {
+        for (String arg : args) {
+            switch(arg) {
+                case "-verbose":
+                    verbose = true;
+                    break;
+                case "-fastfail":
+                    fastFail = true;
+                    break;
+            }
+        }
+        checkTreeThreshold();
+        testMapHiTree();
+        testMapLoTree();
+        if (failed) {
+            System.out.println("Test Failed");
+            System.exit(1);
+        } else {
+            System.out.println("Test Passed");
+        }
+    }
+
+    public static void checkTreeThreshold() {
+        int threshold = -1;
+        try {
+            Class treeBinClass = Class.forName("java.util.HashMap$TreeBin");
+            Field treeThreshold = treeBinClass.getDeclaredField("TREE_THRESHOLD");
+            treeThreshold.setAccessible(true);
+            threshold = treeThreshold.getInt(treeBinClass);
+        } catch (ClassNotFoundException|NoSuchFieldException|IllegalAccessException e) {
+            e.printStackTrace();
+            throw new Error("Problem accessing TreeBin.TREE_THRESHOLD", e);
+        }
+        check("Expected TREE_THRESHOLD: " + EXPECTED_TREE_THRESHOLD +", found: " + threshold,
+              threshold == EXPECTED_TREE_THRESHOLD);
+        printlnIfVerbose("TREE_THRESHOLD: " + threshold);
+    }
+
+    public static void testMapHiTree() {
+        Object[][] mapKeys = makeHiTreeTestData();
+        testMapsForKeys(mapKeys, "hiTree");
+    }
+
+    public static void testMapLoTree() {
+        Object[][] mapKeys = makeLoTreeTestData();
+
+        testMapsForKeys(mapKeys, "loTree");
+    }
+
+    public static void testMapsForKeys(Object[][] mapKeys, String desc) {
+        // loop through data sets
+        for (Object[] keys_desc : mapKeys) {
+            Map<Object, Object>[] maps = (Map<Object, Object>[]) new Map[]{
+              new HashMap<>(4, 0.8f),
+              new LinkedHashMap<>(4, 0.8f),
+            };
+            // for each map type.
+            for (Map<Object, Object> map : maps) {
+                Object[] keys = (Object[]) keys_desc[1];
+                System.out.println(desc + ": testPutThenGet() for " + map.getClass());
+                testPutThenGet(map, keys);
+            }
+        }
+    }
+
+    private static <T> void testPutThenGet(Map<T, T> map, T[] keys) {
+        for (T key : keys) {
+            printlnIfVerbose("put()ing 0x" + Integer.toHexString(Integer.parseInt(key.toString())) + ", hashCode=" + Integer.toHexString(key.hashCode()));
+            map.put(key, key);
+        }
+        for (T key : keys) {
+            check("key: 0x" + Integer.toHexString(Integer.parseInt(key.toString())) + " not found in resulting " + map.getClass().getSimpleName(), map.get(key) != null);
+        }
+    }
+
+    /* Data to force a non-empty loTree in TreeBin.splitTreeBin() to be converted back
+     * into an Entry list
+     */
+    private static Object[][] makeLoTreeTestData() {
+        HashableInteger COLLIDING_OBJECTS[] = new HashableInteger[] {
+            new HashableInteger( 0x23, HASHMASK),
+            new HashableInteger( 0x123, HASHMASK),
+            new HashableInteger( 0x323, HASHMASK),
+            new HashableInteger( 0x523, HASHMASK),
+
+            new HashableInteger( 0x723, HASHMASK),
+            new HashableInteger( 0x923, HASHMASK),
+            new HashableInteger( 0xB23, HASHMASK),
+            new HashableInteger( 0xD23, HASHMASK),
+
+            new HashableInteger( 0xF23, HASHMASK),
+            new HashableInteger( 0xF123, HASHMASK),
+            new HashableInteger( 0x1023, HASHMASK),
+            new HashableInteger( 0x1123, HASHMASK),
+
+            new HashableInteger( 0x1323, HASHMASK),
+            new HashableInteger( 0x1523, HASHMASK),
+            new HashableInteger( 0x1723, HASHMASK),
+            new HashableInteger( 0x1923, HASHMASK),
+
+            new HashableInteger( 0x1B23, HASHMASK),
+            new HashableInteger( 0x1D23, HASHMASK),
+            new HashableInteger( 0x3123, HASHMASK),
+            new HashableInteger( 0x3323, HASHMASK),
+            new HashableInteger( 0x3523, HASHMASK),
+
+            new HashableInteger( 0x3723, HASHMASK),
+            new HashableInteger( 0x1001, HASHMASK),
+            new HashableInteger( 0x4001, HASHMASK),
+            new HashableInteger( 0x1, HASHMASK),
+        };
+        return new Object[][] {
+            new Object[]{"Colliding Objects", COLLIDING_OBJECTS},
+        };
+    }
+
+    /* Data to force the hiTree in TreeBin.splitTreeBin() to be converted back
+     * into an Entry list
+     */
+    private static Object[][] makeHiTreeTestData() {
+        HashableInteger COLLIDING_OBJECTS[] = new HashableInteger[] {
+            new HashableInteger( 0x1, HASHMASK),
+            new HashableInteger( 0x101, HASHMASK),
+            new HashableInteger( 0x301, HASHMASK),
+            new HashableInteger( 0x501, HASHMASK),
+            new HashableInteger( 0x701, HASHMASK),
+
+            new HashableInteger( 0x1001, HASHMASK),
+            new HashableInteger( 0x1101, HASHMASK),
+            new HashableInteger( 0x1301, HASHMASK),
+
+            new HashableInteger( 0x1501, HASHMASK),
+            new HashableInteger( 0x1701, HASHMASK),
+            new HashableInteger( 0x4001, HASHMASK),
+            new HashableInteger( 0x4101, HASHMASK),
+            new HashableInteger( 0x4301, HASHMASK),
+
+            new HashableInteger( 0x4501, HASHMASK),
+            new HashableInteger( 0x4701, HASHMASK),
+            new HashableInteger( 0x8001, HASHMASK),
+            new HashableInteger( 0x8101, HASHMASK),
+
+
+            new HashableInteger( 0x8301, HASHMASK),
+            new HashableInteger( 0x8501, HASHMASK),
+            new HashableInteger( 0x8701, HASHMASK),
+            new HashableInteger( 0x9001, HASHMASK),
+
+            new HashableInteger( 0x23, HASHMASK),
+            new HashableInteger( 0x123, HASHMASK),
+            new HashableInteger( 0x323, HASHMASK),
+            new HashableInteger( 0x523, HASHMASK),
+        };
+        return new Object[][] {
+            new Object[]{"Colliding Objects", COLLIDING_OBJECTS},
+        };
+    }
+
+    static void check(String desc, boolean cond) {
+        if (!cond) {
+            fail(desc);
+        }
+    }
+
+    static void fail(String msg) {
+        failed = true;
+        (new Error("Failure: " + msg)).printStackTrace(System.err);
+        if (fastFail) {
+            System.exit(1);
+        }
+    }
+
+    final static class HashableInteger implements Comparable<HashableInteger> {
+        final int value;
+        final int hashmask; //yes duplication
+
+        HashableInteger(int value, int hashmask) {
+            this.value = value;
+            this.hashmask = hashmask;
+        }
+
+        @Override
+        public boolean equals(Object obj) {
+            if (obj instanceof HashableInteger) {
+                HashableInteger other = (HashableInteger) obj;
+                return other.value == value;
+            }
+            return false;
+        }
+
+        @Override
+        public int hashCode() {
+            // This version ANDs the mask
+            return value & hashmask;
+        }
+
+        @Override
+        public int compareTo(HashableInteger o) {
+            return value - o.value;
+        }
+
+        @Override
+        public String toString() {
+            return Integer.toString(value);
+        }
+    }
+}
--- a/jdk/test/java/util/PluggableLocale/BreakIteratorProviderTest.sh	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/java/util/PluggableLocale/BreakIteratorProviderTest.sh	Wed Jul 05 18:59:05 2017 +0200
@@ -1,3 +1,4 @@
+#!/bin/sh
 # 
 # Copyright (c) 2007, Oracle and/or its affiliates. All rights reserved.
 # DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
@@ -20,7 +21,6 @@
 # or visit www.oracle.com if you need additional information or have any
 # questions.
 #
-#!/bin/sh
 #
 # @test
 # @bug 4052440
--- a/jdk/test/java/util/PluggableLocale/CalendarDataProviderTest.sh	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/java/util/PluggableLocale/CalendarDataProviderTest.sh	Wed Jul 05 18:59:05 2017 +0200
@@ -1,3 +1,4 @@
+#!/bin/sh
 # 
 # Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved.
 # DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
@@ -20,7 +21,6 @@
 # or visit www.oracle.com if you need additional information or have any
 # questions.
 #
-#!/bin/sh
 #
 # @test
 # @bug 7058207 8000986
--- a/jdk/test/java/util/PluggableLocale/ClasspathTest.sh	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/java/util/PluggableLocale/ClasspathTest.sh	Wed Jul 05 18:59:05 2017 +0200
@@ -1,3 +1,4 @@
+#!/bin/sh
 # 
 # Copyright (c) 2007, Oracle and/or its affiliates. All rights reserved.
 # DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
@@ -20,7 +21,6 @@
 # or visit www.oracle.com if you need additional information or have any
 # questions.
 #
-#!/bin/sh
 #
 # @test
 # @bug 6388652
--- a/jdk/test/java/util/PluggableLocale/CollatorProviderTest.sh	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/java/util/PluggableLocale/CollatorProviderTest.sh	Wed Jul 05 18:59:05 2017 +0200
@@ -1,3 +1,4 @@
+#!/bin/sh
 # 
 # Copyright (c) 2007, Oracle and/or its affiliates. All rights reserved.
 # DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
@@ -20,7 +21,6 @@
 # or visit www.oracle.com if you need additional information or have any
 # questions.
 #
-#!/bin/sh
 #
 # @test
 # @bug 4052440
--- a/jdk/test/java/util/PluggableLocale/CurrencyNameProviderTest.sh	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/java/util/PluggableLocale/CurrencyNameProviderTest.sh	Wed Jul 05 18:59:05 2017 +0200
@@ -1,3 +1,4 @@
+#!/bin/sh
 # 
 # Copyright (c) 2007, 2012, Oracle and/or its affiliates. All rights reserved.
 # DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
@@ -20,7 +21,6 @@
 # or visit www.oracle.com if you need additional information or have any
 # questions.
 #
-#!/bin/sh
 #
 # @test
 # @bug 4052440 7199750 8000997
--- a/jdk/test/java/util/PluggableLocale/DateFormatProviderTest.sh	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/java/util/PluggableLocale/DateFormatProviderTest.sh	Wed Jul 05 18:59:05 2017 +0200
@@ -1,3 +1,4 @@
+#!/bin/sh
 # 
 # Copyright (c) 2007, 2010, Oracle and/or its affiliates. All rights reserved.
 # DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
@@ -20,7 +21,6 @@
 # or visit www.oracle.com if you need additional information or have any
 # questions.
 #
-#!/bin/sh
 #
 # @test
 # @bug 4052440 7003643
--- a/jdk/test/java/util/PluggableLocale/DateFormatSymbolsProviderTest.sh	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/java/util/PluggableLocale/DateFormatSymbolsProviderTest.sh	Wed Jul 05 18:59:05 2017 +0200
@@ -1,3 +1,4 @@
+#!/bin/sh
 # 
 # Copyright (c) 2007, 2012, Oracle and/or its affiliates. All rights reserved.
 # DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
@@ -20,7 +21,6 @@
 # or visit www.oracle.com if you need additional information or have any
 # questions.
 #
-#!/bin/sh
 #
 # @test
 # @bug 4052440 7200341
--- a/jdk/test/java/util/PluggableLocale/DecimalFormatSymbolsProviderTest.sh	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/java/util/PluggableLocale/DecimalFormatSymbolsProviderTest.sh	Wed Jul 05 18:59:05 2017 +0200
@@ -1,3 +1,4 @@
+#!/bin/sh
 # 
 # Copyright (c) 2007, Oracle and/or its affiliates. All rights reserved.
 # DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
@@ -20,7 +21,6 @@
 # or visit www.oracle.com if you need additional information or have any
 # questions.
 #
-#!/bin/sh
 #
 # @test
 # @bug 4052440
--- a/jdk/test/java/util/PluggableLocale/ExecTest.sh	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/java/util/PluggableLocale/ExecTest.sh	Wed Jul 05 18:59:05 2017 +0200
@@ -1,3 +1,4 @@
+#!/bin/sh
 #
 # Copyright (c) 2007, 2012, Oracle and/or its affiliates. All rights reserved.
 # DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
@@ -20,7 +21,6 @@
 # or visit www.oracle.com if you need additional information or have any
 # questions.
 #
-#!/bin/sh
 #
 #
 #
--- a/jdk/test/java/util/PluggableLocale/GenericTest.sh	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/java/util/PluggableLocale/GenericTest.sh	Wed Jul 05 18:59:05 2017 +0200
@@ -1,3 +1,4 @@
+#!/bin/sh
 # 
 # Copyright (c) 2007, Oracle and/or its affiliates. All rights reserved.
 # DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
@@ -20,7 +21,6 @@
 # or visit www.oracle.com if you need additional information or have any
 # questions.
 #
-#!/bin/sh
 #
 # @test
 # @bug 4052440
--- a/jdk/test/java/util/PluggableLocale/LocaleNameProviderTest.sh	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/java/util/PluggableLocale/LocaleNameProviderTest.sh	Wed Jul 05 18:59:05 2017 +0200
@@ -1,3 +1,4 @@
+#!/bin/sh
 # 
 # Copyright (c) 2007, 2012, Oracle and/or its affiliates. All rights reserved.
 # DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
@@ -20,7 +21,6 @@
 # or visit www.oracle.com if you need additional information or have any
 # questions.
 #
-#!/bin/sh
 #
 # @test
 # @bug 4052440 8000273
--- a/jdk/test/java/util/PluggableLocale/NumberFormatProviderTest.sh	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/java/util/PluggableLocale/NumberFormatProviderTest.sh	Wed Jul 05 18:59:05 2017 +0200
@@ -1,3 +1,4 @@
+#!/bin/sh
 # 
 # Copyright (c) 2007, 2010, Oracle and/or its affiliates. All rights reserved.
 # DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
@@ -20,7 +21,6 @@
 # or visit www.oracle.com if you need additional information or have any
 # questions.
 #
-#!/bin/sh
 #
 # @test
 # @bug 4052440 7003643
--- a/jdk/test/java/util/PluggableLocale/TimeZoneNameProviderTest.sh	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/java/util/PluggableLocale/TimeZoneNameProviderTest.sh	Wed Jul 05 18:59:05 2017 +0200
@@ -1,3 +1,4 @@
+#!/bin/sh
 # 
 # Copyright (c) 2007, 2012, Oracle and/or its affiliates. All rights reserved.
 # DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
@@ -20,7 +21,6 @@
 # or visit www.oracle.com if you need additional information or have any
 # questions.
 #
-#!/bin/sh
 #
 # @test
 # @bug 4052440 8003267
--- a/jdk/test/java/util/ResourceBundle/Bug6299235Test.sh	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/java/util/ResourceBundle/Bug6299235Test.sh	Wed Jul 05 18:59:05 2017 +0200
@@ -1,4 +1,4 @@
-#
+#!/bin/sh
 # Copyright (c) 2007, 2012, Oracle and/or its affiliates. All rights reserved.
 # DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
 #
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/test/java/util/Spliterator/SpliteratorCollisions.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,707 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/**
+ * @test
+ * @bug 8005698
+ * @run testng SpliteratorCollisions
+ * @summary Spliterator traversing and splitting hash maps containing colliding hashes
+ * @author Brent Christian
+ */
+
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.ArrayDeque;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Deque;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Spliterator;
+import java.util.TreeSet;
+import java.util.function.Consumer;
+import java.util.function.Function;
+import java.util.function.LongConsumer;
+import java.util.function.Supplier;
+import java.util.function.UnaryOperator;
+
+import static org.testng.Assert.*;
+import static org.testng.Assert.assertEquals;
+
+@Test
+public class SpliteratorCollisions {
+
+    private static List<Integer> SIZES = Arrays.asList(0, 1, 10, 100, 1000);
+
+    private static class SpliteratorDataBuilder<T> {
+        List<Object[]> data;
+        List<T> exp;
+        Map<T, T> mExp;
+
+        SpliteratorDataBuilder(List<Object[]> data, List<T> exp) {
+            this.data = data;
+            this.exp = exp;
+            this.mExp = createMap(exp);
+        }
+
+        Map<T, T> createMap(List<T> l) {
+            Map<T, T> m = new LinkedHashMap<>();
+            for (T t : l) {
+                m.put(t, t);
+            }
+            return m;
+        }
+
+        void add(String description, Collection<?> expected, Supplier<Spliterator<?>> s) {
+            description = joiner(description).toString();
+            data.add(new Object[]{description, expected, s});
+        }
+
+        void add(String description, Supplier<Spliterator<?>> s) {
+            add(description, exp, s);
+        }
+
+        void addCollection(Function<Collection<T>, ? extends Collection<T>> c) {
+            add("new " + c.apply(Collections.<T>emptyList()).getClass().getName() + ".spliterator()",
+                () -> c.apply(exp).spliterator());
+        }
+
+        void addList(Function<Collection<T>, ? extends List<T>> l) {
+            // @@@ If collection is instance of List then add sub-list tests
+            addCollection(l);
+        }
+
+        void addMap(Function<Map<T, T>, ? extends Map<T, T>> m) {
+            String description = "new " + m.apply(Collections.<T, T>emptyMap()).getClass().getName();
+            add(description + ".keySet().spliterator()", () -> m.apply(mExp).keySet().spliterator());
+            add(description + ".values().spliterator()", () -> m.apply(mExp).values().spliterator());
+            add(description + ".entrySet().spliterator()", mExp.entrySet(), () -> m.apply(mExp).entrySet().spliterator());
+        }
+
+        StringBuilder joiner(String description) {
+            return new StringBuilder(description).
+                    append(" {").
+                    append("size=").append(exp.size()).
+                    append("}");
+        }
+    }
+
+    static Object[][] spliteratorDataProvider;
+
+    @DataProvider(name = "HashableIntSpliterator")
+    public static Object[][] spliteratorDataProvider() {
+        if (spliteratorDataProvider != null) {
+            return spliteratorDataProvider;
+        }
+
+        List<Object[]> data = new ArrayList<>();
+        for (int size : SIZES) {
+            List<HashableInteger> exp = listIntRange(size, false);
+            SpliteratorDataBuilder<HashableInteger> db = new SpliteratorDataBuilder<>(data, exp);
+
+            // Maps
+            db.addMap(HashMap::new);
+            db.addMap(LinkedHashMap::new);
+
+            // Collections that use HashMap
+            db.addCollection(HashSet::new);
+            db.addCollection(LinkedHashSet::new);
+            db.addCollection(TreeSet::new);
+        }
+        return spliteratorDataProvider = data.toArray(new Object[0][]);
+    }
+
+    static Object[][] spliteratorDataProviderWithNull;
+
+    @DataProvider(name = "HashableIntSpliteratorWithNull")
+    public static Object[][] spliteratorNullDataProvider() {
+        if (spliteratorDataProviderWithNull != null) {
+            return spliteratorDataProviderWithNull;
+        }
+
+        List<Object[]> data = new ArrayList<>();
+        for (int size : SIZES) {
+            List<HashableInteger> exp = listIntRange(size, true);
+            exp.add(0, null);
+            SpliteratorDataBuilder<HashableInteger> db = new SpliteratorDataBuilder<>(data, exp);
+
+            // Maps
+            db.addMap(HashMap::new);
+            db.addMap(LinkedHashMap::new);
+            // TODO: add this back in if we decide to keep TreeBin in WeakHashMap
+            //db.addMap(WeakHashMap::new);
+
+            // Collections that use HashMap
+            db.addCollection(HashSet::new);
+            db.addCollection(LinkedHashSet::new);
+//            db.addCollection(TreeSet::new);
+
+        }
+        return spliteratorDataProviderWithNull = data.toArray(new Object[0][]);
+    }
+
+    final static class HashableInteger implements Comparable<HashableInteger> {
+
+        final int value;
+        final int hashmask; //yes duplication
+
+        HashableInteger(int value, int hashmask) {
+            this.value = value;
+            this.hashmask = hashmask;
+        }
+
+        @Override
+        public boolean equals(Object obj) {
+            if (obj instanceof HashableInteger) {
+                HashableInteger other = (HashableInteger) obj;
+
+                return other.value == value;
+            }
+
+            return false;
+        }
+
+        @Override
+        public int hashCode() {
+            return value % hashmask;
+        }
+
+        @Override
+        public int compareTo(HashableInteger o) {
+            return value - o.value;
+        }
+
+        @Override
+        public String toString() {
+            return Integer.toString(value);
+        }
+    }
+
+    private static List<HashableInteger> listIntRange(int upTo, boolean withNull) {
+        List<HashableInteger> exp = new ArrayList<>();
+        if (withNull) {
+            exp.add(null);
+        }
+        for (int i = 0; i < upTo; i++) {
+            exp.add(new HashableInteger(i, 10));
+        }
+        return Collections.unmodifiableList(exp);
+    }
+
+    @Test(dataProvider = "HashableIntSpliterator")
+    @SuppressWarnings({"unchecked", "rawtypes"})
+    public void testNullPointerException(String description, Collection exp, Supplier<Spliterator> s) {
+        executeAndCatch(NullPointerException.class, () -> s.get().forEachRemaining(null));
+        executeAndCatch(NullPointerException.class, () -> s.get().tryAdvance(null));
+    }
+
+    @Test(dataProvider = "HashableIntSpliteratorWithNull")
+    @SuppressWarnings({"unchecked", "rawtypes"})
+    public void testNullPointerExceptionWithNull(String description, Collection exp, Supplier<Spliterator> s) {
+        executeAndCatch(NullPointerException.class, () -> s.get().forEachRemaining(null));
+        executeAndCatch(NullPointerException.class, () -> s.get().tryAdvance(null));
+    }
+
+
+    @Test(dataProvider = "HashableIntSpliterator")
+    @SuppressWarnings({"unchecked", "rawtypes"})
+    public void testForEach(String description, Collection exp, Supplier<Spliterator> s) {
+        testForEach(exp, s, (Consumer<Object> b) -> b);
+    }
+
+    @Test(dataProvider = "HashableIntSpliteratorWithNull")
+    @SuppressWarnings({"unchecked", "rawtypes"})
+    public void testForEachWithNull(String description, Collection exp, Supplier<Spliterator> s) {
+        testForEach(exp, s, (Consumer<Object> b) -> b);
+    }
+
+
+    @Test(dataProvider = "HashableIntSpliterator")
+    @SuppressWarnings({"unchecked", "rawtypes"})
+    public void testTryAdvance(String description, Collection exp, Supplier<Spliterator> s) {
+        testTryAdvance(exp, s, (Consumer<Object> b) -> b);
+    }
+
+    @Test(dataProvider = "HashableIntSpliteratorWithNull")
+    @SuppressWarnings({"unchecked", "rawtypes"})
+    public void testTryAdvanceWithNull(String description, Collection exp, Supplier<Spliterator> s) {
+        testTryAdvance(exp, s, (Consumer<Object> b) -> b);
+    }
+
+/* skip this test until 8013649 is fixed
+    @Test(dataProvider = "HashableIntSpliterator")
+    @SuppressWarnings({"unchecked", "rawtypes"})
+    public void testMixedTryAdvanceForEach(String description, Collection exp, Supplier<Spliterator> s) {
+        testMixedTryAdvanceForEach(exp, s, (Consumer<Object> b) -> b);
+    }
+
+    @Test(dataProvider = "HashableIntSpliteratorWithNull")
+    @SuppressWarnings({"unchecked", "rawtypes"})
+    public void testMixedTryAdvanceForEachWithNull(String description, Collection exp, Supplier<Spliterator> s) {
+        testMixedTryAdvanceForEach(exp, s, (Consumer<Object> b) -> b);
+    }
+*/
+
+    @Test(dataProvider = "HashableIntSpliterator")
+    @SuppressWarnings({"unchecked", "rawtypes"})
+    public void testSplitAfterFullTraversal(String description, Collection exp, Supplier<Spliterator> s) {
+        testSplitAfterFullTraversal(s, (Consumer<Object> b) -> b);
+    }
+
+    @Test(dataProvider = "HashableIntSpliteratorWithNull")
+    @SuppressWarnings({"unchecked", "rawtypes"})
+    public void testSplitAfterFullTraversalWithNull(String description, Collection exp, Supplier<Spliterator> s) {
+        testSplitAfterFullTraversal(s, (Consumer<Object> b) -> b);
+    }
+
+
+    @Test(dataProvider = "HashableIntSpliterator")
+    @SuppressWarnings({"unchecked", "rawtypes"})
+    public void testSplitOnce(String description, Collection exp, Supplier<Spliterator> s) {
+        testSplitOnce(exp, s, (Consumer<Object> b) -> b);
+    }
+
+    @Test(dataProvider = "HashableIntSpliteratorWithNull")
+    @SuppressWarnings({"unchecked", "rawtypes"})
+    public void testSplitOnceWithNull(String description, Collection exp, Supplier<Spliterator> s) {
+        testSplitOnce(exp, s, (Consumer<Object> b) -> b);
+    }
+
+    @Test(dataProvider = "HashableIntSpliterator")
+    @SuppressWarnings({"unchecked", "rawtypes"})
+    public void testSplitSixDeep(String description, Collection exp, Supplier<Spliterator> s) {
+        testSplitSixDeep(exp, s, (Consumer<Object> b) -> b);
+    }
+
+    @Test(dataProvider = "HashableIntSpliteratorWithNull")
+    @SuppressWarnings({"unchecked", "rawtypes"})
+    public void testSplitSixDeepWithNull(String description, Collection exp, Supplier<Spliterator> s) {
+        testSplitSixDeep(exp, s, (Consumer<Object> b) -> b);
+    }
+
+    @Test(dataProvider = "HashableIntSpliterator")
+    @SuppressWarnings({"unchecked", "rawtypes"})
+    public void testSplitUntilNull(String description, Collection exp, Supplier<Spliterator> s) {
+        testSplitUntilNull(exp, s, (Consumer<Object> b) -> b);
+    }
+
+    @Test(dataProvider = "HashableIntSpliteratorWithNull")
+    @SuppressWarnings({"unchecked", "rawtypes"})
+    public void testSplitUntilNullWithNull(String description, Collection exp, Supplier<Spliterator> s) {
+        testSplitUntilNull(exp, s, (Consumer<Object> b) -> b);
+    }
+
+    private static <T, S extends Spliterator<T>> void testForEach(
+            Collection<T> exp,
+            Supplier<S> supplier,
+            UnaryOperator<Consumer<T>> boxingAdapter) {
+        S spliterator = supplier.get();
+        long sizeIfKnown = spliterator.getExactSizeIfKnown();
+        boolean isOrdered = spliterator.hasCharacteristics(Spliterator.ORDERED);
+
+        ArrayList<T> fromForEach = new ArrayList<>();
+        spliterator = supplier.get();
+        Consumer<T> addToFromForEach = boxingAdapter.apply(fromForEach::add);
+        spliterator.forEachRemaining(addToFromForEach);
+
+        // Assert that forEach now produces no elements
+        spliterator.forEachRemaining(boxingAdapter.apply(e -> fail("Spliterator.forEach produced an element after spliterator exhausted: " + e)));
+        // Assert that tryAdvance now produce no elements
+        spliterator.tryAdvance(boxingAdapter.apply(e -> fail("Spliterator.tryAdvance produced an element after spliterator exhausted: " + e)));
+
+        // assert that size, tryAdvance, and forEach are consistent
+        if (sizeIfKnown >= 0) {
+            assertEquals(sizeIfKnown, exp.size());
+        }
+        if (exp.contains(null)) {
+            assertTrue(fromForEach.contains(null));
+        }
+        assertEquals(fromForEach.size(), exp.size());
+
+        assertContents(fromForEach, exp, isOrdered);
+    }
+
+    private static <T, S extends Spliterator<T>> void testTryAdvance(
+            Collection<T> exp,
+            Supplier<S> supplier,
+            UnaryOperator<Consumer<T>> boxingAdapter) {
+        S spliterator = supplier.get();
+        long sizeIfKnown = spliterator.getExactSizeIfKnown();
+        boolean isOrdered = spliterator.hasCharacteristics(Spliterator.ORDERED);
+
+        spliterator = supplier.get();
+        ArrayList<T> fromTryAdvance = new ArrayList<>();
+        Consumer<T> addToFromTryAdvance = boxingAdapter.apply(fromTryAdvance::add);
+        while (spliterator.tryAdvance(addToFromTryAdvance)) { }
+
+        // Assert that forEach now produces no elements
+        spliterator.forEachRemaining(boxingAdapter.apply(e -> fail("Spliterator.forEach produced an element after spliterator exhausted: " + e)));
+        // Assert that tryAdvance now produce no elements
+        spliterator.tryAdvance(boxingAdapter.apply(e -> fail("Spliterator.tryAdvance produced an element after spliterator exhausted: " + e)));
+
+        // assert that size, tryAdvance, and forEach are consistent
+        if (sizeIfKnown >= 0) {
+            assertEquals(sizeIfKnown, exp.size());
+        }
+        assertEquals(fromTryAdvance.size(), exp.size());
+
+        assertContents(fromTryAdvance, exp, isOrdered);
+    }
+
+    private static <T, S extends Spliterator<T>> void testMixedTryAdvanceForEach(
+            Collection<T> exp,
+            Supplier<S> supplier,
+            UnaryOperator<Consumer<T>> boxingAdapter) {
+        S spliterator = supplier.get();
+        long sizeIfKnown = spliterator.getExactSizeIfKnown();
+        boolean isOrdered = spliterator.hasCharacteristics(Spliterator.ORDERED);
+
+        // tryAdvance first few elements, then forEach rest
+        ArrayList<T> dest = new ArrayList<>();
+        spliterator = supplier.get();
+        Consumer<T> addToDest = boxingAdapter.apply(dest::add);
+        for (int i = 0; i < 10 && spliterator.tryAdvance(addToDest); i++) { }
+        spliterator.forEachRemaining(addToDest);
+
+        // Assert that forEach now produces no elements
+        spliterator.forEachRemaining(boxingAdapter.apply(e -> fail("Spliterator.forEach produced an element after spliterator exhausted: " + e)));
+        // Assert that tryAdvance now produce no elements
+        spliterator.tryAdvance(boxingAdapter.apply(e -> fail("Spliterator.tryAdvance produced an element after spliterator exhausted: " + e)));
+
+        if (sizeIfKnown >= 0) {
+            assertEquals(sizeIfKnown, dest.size());
+        }
+        assertEquals(dest.size(), exp.size());
+
+        if (isOrdered) {
+            assertEquals(dest, exp);
+        }
+        else {
+            assertContentsUnordered(dest, exp);
+        }
+    }
+
+    private static <T, S extends Spliterator<T>> void testSplitAfterFullTraversal(
+            Supplier<S> supplier,
+            UnaryOperator<Consumer<T>> boxingAdapter) {
+        // Full traversal using tryAdvance
+        Spliterator<T> spliterator = supplier.get();
+        while (spliterator.tryAdvance(boxingAdapter.apply(e -> { }))) { }
+        Spliterator<T> split = spliterator.trySplit();
+        assertNull(split);
+
+        // Full traversal using forEach
+        spliterator = supplier.get();
+        spliterator.forEachRemaining(boxingAdapter.apply(e -> {
+        }));
+        split = spliterator.trySplit();
+        assertNull(split);
+
+        // Full traversal using tryAdvance then forEach
+        spliterator = supplier.get();
+        spliterator.tryAdvance(boxingAdapter.apply(e -> { }));
+        spliterator.forEachRemaining(boxingAdapter.apply(e -> {
+        }));
+        split = spliterator.trySplit();
+        assertNull(split);
+    }
+
+    private static <T, S extends Spliterator<T>> void testSplitOnce(
+            Collection<T> exp,
+            Supplier<S> supplier,
+            UnaryOperator<Consumer<T>> boxingAdapter) {
+        S spliterator = supplier.get();
+        long sizeIfKnown = spliterator.getExactSizeIfKnown();
+        boolean isOrdered = spliterator.hasCharacteristics(Spliterator.ORDERED);
+
+        ArrayList<T> fromSplit = new ArrayList<>();
+        Spliterator<T> s1 = supplier.get();
+        Spliterator<T> s2 = s1.trySplit();
+        long s1Size = s1.getExactSizeIfKnown();
+        long s2Size = (s2 != null) ? s2.getExactSizeIfKnown() : 0;
+
+        Consumer<T> addToFromSplit = boxingAdapter.apply(fromSplit::add);
+        if (s2 != null)
+            s2.forEachRemaining(addToFromSplit);
+        s1.forEachRemaining(addToFromSplit);
+
+        if (sizeIfKnown >= 0) {
+            assertEquals(sizeIfKnown, fromSplit.size());
+            if (s1Size >= 0 && s2Size >= 0)
+                assertEquals(sizeIfKnown, s1Size + s2Size);
+        }
+        assertContents(fromSplit, exp, isOrdered);
+    }
+
+    private static <T, S extends Spliterator<T>> void testSplitSixDeep(
+            Collection<T> exp,
+            Supplier<S> supplier,
+            UnaryOperator<Consumer<T>> boxingAdapter) {
+        S spliterator = supplier.get();
+        boolean isOrdered = spliterator.hasCharacteristics(Spliterator.ORDERED);
+
+        for (int depth=0; depth < 6; depth++) {
+            List<T> dest = new ArrayList<>();
+            spliterator = supplier.get();
+
+            assertSpliterator(spliterator);
+
+            // verify splitting with forEach
+            visit(depth, 0, dest, spliterator, boxingAdapter, spliterator.characteristics(), false);
+            assertContents(dest, exp, isOrdered);
+
+            // verify splitting with tryAdvance
+            dest.clear();
+            spliterator = supplier.get();
+            visit(depth, 0, dest, spliterator, boxingAdapter, spliterator.characteristics(), true);
+            assertContents(dest, exp, isOrdered);
+        }
+    }
+
+    private static <T, S extends Spliterator<T>> void visit(int depth, int curLevel,
+                                                            List<T> dest, S spliterator, UnaryOperator<Consumer<T>> boxingAdapter,
+                                                            int rootCharacteristics, boolean useTryAdvance) {
+        if (curLevel < depth) {
+            long beforeSize = spliterator.getExactSizeIfKnown();
+            Spliterator<T> split = spliterator.trySplit();
+            if (split != null) {
+                assertSpliterator(split, rootCharacteristics);
+                assertSpliterator(spliterator, rootCharacteristics);
+
+                if ((rootCharacteristics & Spliterator.SUBSIZED) != 0 &&
+                    (rootCharacteristics & Spliterator.SIZED) != 0) {
+                    assertEquals(beforeSize, split.estimateSize() + spliterator.estimateSize());
+                }
+                visit(depth, curLevel + 1, dest, split, boxingAdapter, rootCharacteristics, useTryAdvance);
+            }
+            visit(depth, curLevel + 1, dest, spliterator, boxingAdapter, rootCharacteristics, useTryAdvance);
+        }
+        else {
+            long sizeIfKnown = spliterator.getExactSizeIfKnown();
+            if (useTryAdvance) {
+                Consumer<T> addToDest = boxingAdapter.apply(dest::add);
+                int count = 0;
+                while (spliterator.tryAdvance(addToDest)) {
+                    ++count;
+                }
+
+                if (sizeIfKnown >= 0)
+                    assertEquals(sizeIfKnown, count);
+
+                // Assert that forEach now produces no elements
+                spliterator.forEachRemaining(boxingAdapter.apply(e -> fail("Spliterator.forEach produced an element after spliterator exhausted: " + e)));
+
+                Spliterator<T> split = spliterator.trySplit();
+                assertNull(split);
+            }
+            else {
+                List<T> leafDest = new ArrayList<>();
+                Consumer<T> addToLeafDest = boxingAdapter.apply(leafDest::add);
+                spliterator.forEachRemaining(addToLeafDest);
+
+                if (sizeIfKnown >= 0)
+                    assertEquals(sizeIfKnown, leafDest.size());
+
+                // Assert that forEach now produces no elements
+                spliterator.tryAdvance(boxingAdapter.apply(e -> fail("Spliterator.tryAdvance produced an element after spliterator exhausted: " + e)));
+
+                Spliterator<T> split = spliterator.trySplit();
+                assertNull(split);
+
+                dest.addAll(leafDest);
+            }
+        }
+    }
+
+    private static <T, S extends Spliterator<T>> void testSplitUntilNull(
+            Collection<T> exp,
+            Supplier<S> supplier,
+            UnaryOperator<Consumer<T>> boxingAdapter) {
+        Spliterator<T> s = supplier.get();
+        boolean isOrdered = s.hasCharacteristics(Spliterator.ORDERED);
+        assertSpliterator(s);
+
+        List<T> splits = new ArrayList<>();
+        Consumer<T> c = boxingAdapter.apply(splits::add);
+
+        testSplitUntilNull(new SplitNode<T>(c, s));
+        assertContents(splits, exp, isOrdered);
+    }
+
+    private static class SplitNode<T> {
+        // Constant for every node
+        final Consumer<T> c;
+        final int rootCharacteristics;
+
+        final Spliterator<T> s;
+
+        SplitNode(Consumer<T> c, Spliterator<T> s) {
+            this(c, s.characteristics(), s);
+        }
+
+        private SplitNode(Consumer<T> c, int rootCharacteristics, Spliterator<T> s) {
+            this.c = c;
+            this.rootCharacteristics = rootCharacteristics;
+            this.s = s;
+        }
+
+        SplitNode<T> fromSplit(Spliterator<T> split) {
+            return new SplitNode<>(c, rootCharacteristics, split);
+        }
+    }
+
+    /**
+     * Set the maximum stack capacity to 0.25MB. This should be more than enough to detect a bad spliterator
+     * while not unduly disrupting test infrastructure given the test data sizes that are used are small.
+     * Note that j.u.c.ForkJoinPool sets the max queue size to 64M (1 << 26).
+     */
+    private static final int MAXIMUM_STACK_CAPACITY = 1 << 18; // 0.25MB
+
+    private static <T> void testSplitUntilNull(SplitNode<T> e) {
+        // Use an explicit stack to avoid a StackOverflowException when testing a Spliterator
+        // that when repeatedly split produces a right-balanced (and maybe degenerate) tree, or
+        // for a spliterator that is badly behaved.
+        Deque<SplitNode<T>> stack = new ArrayDeque<>();
+        stack.push(e);
+
+        int iteration = 0;
+        while (!stack.isEmpty()) {
+            assertTrue(iteration++ < MAXIMUM_STACK_CAPACITY, "Exceeded maximum stack modification count of 1 << 18");
+
+            e = stack.pop();
+            Spliterator<T> parentAndRightSplit = e.s;
+
+            long parentEstimateSize = parentAndRightSplit.estimateSize();
+            assertTrue(parentEstimateSize >= 0,
+                       String.format("Split size estimate %d < 0", parentEstimateSize));
+
+            long parentSize = parentAndRightSplit.getExactSizeIfKnown();
+            Spliterator<T> leftSplit = parentAndRightSplit.trySplit();
+            if (leftSplit == null) {
+                parentAndRightSplit.forEachRemaining(e.c);
+                continue;
+            }
+
+            assertSpliterator(leftSplit, e.rootCharacteristics);
+            assertSpliterator(parentAndRightSplit, e.rootCharacteristics);
+
+            if (parentEstimateSize != Long.MAX_VALUE && leftSplit.estimateSize() > 0 && parentAndRightSplit.estimateSize() > 0) {
+                assertTrue(leftSplit.estimateSize() < parentEstimateSize,
+                           String.format("Left split size estimate %d >= parent split size estimate %d", leftSplit.estimateSize(), parentEstimateSize));
+                assertTrue(parentAndRightSplit.estimateSize() < parentEstimateSize,
+                           String.format("Right split size estimate %d >= parent split size estimate %d", leftSplit.estimateSize(), parentEstimateSize));
+            }
+            else {
+                assertTrue(leftSplit.estimateSize() <= parentEstimateSize,
+                           String.format("Left split size estimate %d > parent split size estimate %d", leftSplit.estimateSize(), parentEstimateSize));
+                assertTrue(parentAndRightSplit.estimateSize() <= parentEstimateSize,
+                           String.format("Right split size estimate %d > parent split size estimate %d", leftSplit.estimateSize(), parentEstimateSize));
+            }
+
+            long leftSize = leftSplit.getExactSizeIfKnown();
+            long rightSize = parentAndRightSplit.getExactSizeIfKnown();
+            if (parentSize >= 0 && leftSize >= 0 && rightSize >= 0)
+                assertEquals(parentSize, leftSize + rightSize,
+                             String.format("exact left split size %d + exact right split size %d != parent exact split size %d",
+                                           leftSize, rightSize, parentSize));
+
+            // Add right side to stack first so left side is popped off first
+            stack.push(e.fromSplit(parentAndRightSplit));
+            stack.push(e.fromSplit(leftSplit));
+        }
+    }
+
+    private static void assertSpliterator(Spliterator<?> s, int rootCharacteristics) {
+        if ((rootCharacteristics & Spliterator.SUBSIZED) != 0) {
+            assertTrue(s.hasCharacteristics(Spliterator.SUBSIZED),
+                       "Child split is not SUBSIZED when root split is SUBSIZED");
+        }
+        assertSpliterator(s);
+    }
+
+    private static void assertSpliterator(Spliterator<?> s) {
+        if (s.hasCharacteristics(Spliterator.SUBSIZED)) {
+            assertTrue(s.hasCharacteristics(Spliterator.SIZED));
+        }
+        if (s.hasCharacteristics(Spliterator.SIZED)) {
+            assertTrue(s.estimateSize() != Long.MAX_VALUE);
+            assertTrue(s.getExactSizeIfKnown() >= 0);
+        }
+        try {
+            s.getComparator();
+            assertTrue(s.hasCharacteristics(Spliterator.SORTED));
+        } catch (IllegalStateException e) {
+            assertFalse(s.hasCharacteristics(Spliterator.SORTED));
+        }
+    }
+
+    private static<T> void assertContents(Collection<T> actual, Collection<T> expected, boolean isOrdered) {
+        if (isOrdered) {
+            assertEquals(actual, expected);
+        }
+        else {
+            assertContentsUnordered(actual, expected);
+        }
+    }
+
+    private static<T> void assertContentsUnordered(Iterable<T> actual, Iterable<T> expected) {
+        assertEquals(toBoxedMultiset(actual), toBoxedMultiset(expected));
+    }
+
+    private static <T> Map<T, HashableInteger> toBoxedMultiset(Iterable<T> c) {
+        Map<T, HashableInteger> result = new HashMap<>();
+        c.forEach((Consumer) e -> {
+            if (result.containsKey((T)e)) {
+                result.put((T)e, new HashableInteger(((HashableInteger)result.get(e)).value + 1, 10));
+            } else {
+                result.put((T)e, new HashableInteger(1, 10));
+            }
+        });
+        return result;
+    }
+
+    private void executeAndCatch(Class<? extends Exception> expected, Runnable r) {
+        Exception caught = null;
+        try {
+            r.run();
+        }
+        catch (Exception e) {
+            caught = e;
+        }
+
+        assertNotNull(caught,
+                      String.format("No Exception was thrown, expected an Exception of %s to be thrown",
+                                    expected.getName()));
+        assertTrue(expected.isInstance(caught),
+                   String.format("Exception thrown %s not an instance of %s",
+                                 caught.getClass().getName(), expected.getName()));
+    }
+
+}
--- a/jdk/test/java/util/Spliterator/SpliteratorTraversingAndSplittingTest.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/java/util/Spliterator/SpliteratorTraversingAndSplittingTest.java	Wed Jul 05 18:59:05 2017 +0200
@@ -128,6 +128,10 @@
 
         void addMap(Function<Map<T, T>, ? extends Map<T, T>> m) {
             String description = "new " + m.apply(Collections.<T, T>emptyMap()).getClass().getName();
+            addMap(m, description);
+        }
+
+        void addMap(Function<Map<T, T>, ? extends Map<T, T>> m, String description) {
             add(description + ".keySet().spliterator()", () -> m.apply(mExp).keySet().spliterator());
             add(description + ".values().spliterator()", () -> m.apply(mExp).values().spliterator());
             add(description + ".entrySet().spliterator()", mExp.entrySet(), () -> m.apply(mExp).entrySet().spliterator());
@@ -399,12 +403,36 @@
 
             db.addMap(HashMap::new);
 
+            db.addMap(m -> {
+                // Create a Map ensuring that for large sizes
+                // buckets will contain 2 or more entries
+                HashMap<Integer, Integer> cm = new HashMap<>(1, m.size() + 1);
+                // Don't use putAll which inflates the table by
+                // m.size() * loadFactor, thus creating a very sparse
+                // map for 1000 entries defeating the purpose of this test,
+                // in addition it will cause the split until null test to fail
+                // because the number of valid splits is larger than the
+                // threshold
+                for (Map.Entry<Integer, Integer> e : m.entrySet())
+                    cm.put(e.getKey(), e.getValue());
+                return cm;
+            }, "new java.util.HashMap(1, size + 1)");
+
             db.addMap(LinkedHashMap::new);
 
             db.addMap(IdentityHashMap::new);
 
             db.addMap(WeakHashMap::new);
 
+            db.addMap(m -> {
+                // Create a Map ensuring that for large sizes
+                // buckets will be consist of 2 or more entries
+                WeakHashMap<Integer, Integer> cm = new WeakHashMap<>(1, m.size() + 1);
+                for (Map.Entry<Integer, Integer> e : m.entrySet())
+                    cm.put(e.getKey(), e.getValue());
+                return cm;
+            }, "new java.util.WeakHashMap(1, size + 1)");
+
             // @@@  Descending maps etc
             db.addMap(TreeMap::new);
 
--- a/jdk/test/java/util/jar/TestExtra.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/java/util/jar/TestExtra.java	Wed Jul 05 18:59:05 2017 +0200
@@ -23,7 +23,7 @@
 
 /**
  * @test
- * @bug 6480504
+ * @bug 6480504 6303183
  * @summary Test that client-provided data in the extra field is written and
  * read correctly, taking into account the JAR_MAGIC written into the extra
  * field of the first entry of JAR files.
@@ -117,8 +117,7 @@
         ZipInputStream zis = getInputStream();
 
         ze = zis.getNextEntry();
-        byte[] e = ze.getExtra();
-        check(e.length == 8, "expected extra length is 8, got " + e.length);
+        checkExtra(data, ze.getExtra());
         checkEntry(ze, 0, 0);
     }
 
@@ -140,10 +139,43 @@
         ZipInputStream zis = getInputStream();
         ze = zis.getNextEntry();
         byte[] e = ze.getExtra();
-        check(e.length == 8, "expected extra length is 8, got " + e.length);
+        checkExtra(data, ze.getExtra());
         checkEntry(ze, 0, 0);
     }
 
+    // check if all "expected" extra fields equal to their
+    // corresponding fields in "extra". The "extra" might have
+    // timestamp fields added by ZOS.
+    static void checkExtra(byte[] expected, byte[] extra) {
+        if (expected == null)
+            return;
+        int off = 0;
+        int len = expected.length;
+        while (off + 4 < len) {
+            int tag = get16(expected, off);
+            int sz = get16(expected, off + 2);
+            int off0 = 0;
+            int len0 = extra.length;
+            boolean matched = false;
+            while (off0 + 4 < len0) {
+                int tag0 = get16(extra, off0);
+                int sz0 = get16(extra, off0 + 2);
+                if (tag == tag0 && sz == sz0) {
+                    matched = true;
+                    for (int i = 0; i < sz; i++) {
+                        if (expected[off + i] != extra[off0 +i])
+                            matched = false;
+                    }
+                    break;
+                }
+                off0 += (4 + sz0);
+            }
+            if (!matched) {
+                fail("Expected extra data [tag=" + tag + "sz=" + sz + "] check failed");
+            }
+            off += (4 + sz);
+        }
+    }
 
     /** Check that the entry's extra data is correct. */
     void checkEntry(ZipEntry ze, int count, int dataLength) {
--- a/jdk/test/java/util/stream/test/org/openjdk/tests/java/util/stream/SpliteratorLateBindingFailFastTest.java	Mon Jun 10 17:04:18 2013 -0700
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,358 +0,0 @@
-/*
- * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
- * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
- *
- * This code is free software; you can redistribute it and/or modify it
- * under the terms of the GNU General Public License version 2 only, as
- * published by the Free Software Foundation.
- *
- * This code is distributed in the hope that it will be useful, but WITHOUT
- * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
- * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
- * version 2 for more details (a copy is included in the LICENSE file that
- * accompanied this code).
- *
- * You should have received a copy of the GNU General Public License version
- * 2 along with this work; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
- *
- * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
- * or visit www.oracle.com if you need additional information or have any
- * questions.
- */
-package org.openjdk.tests.java.util.stream;
-
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.ConcurrentModificationException;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedHashMap;
-import java.util.LinkedHashSet;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.PriorityQueue;
-import java.util.Set;
-import java.util.Spliterator;
-import java.util.Stack;
-import java.util.TreeMap;
-import java.util.TreeSet;
-import java.util.Vector;
-import java.util.WeakHashMap;
-import java.util.function.Consumer;
-import java.util.function.Function;
-import java.util.function.Supplier;
-
-import static org.testng.Assert.*;
-
-/**
- * @test
- * @summary Spliterator last-binding and fail-fast tests
- * @run testng SpliteratorLateBindingFailFastTest
- */
-
-@Test(groups = { "serialization-hostile" })
-public class SpliteratorLateBindingFailFastTest {
-
-    private interface Source<T> {
-        Collection<T> asCollection();
-        void update();
-    }
-
-    private static class SpliteratorDataBuilder<T> {
-        final List<Object[]> data;
-
-        final T newValue;
-
-        final List<T> exp;
-
-        final Map<T, T> mExp;
-
-        SpliteratorDataBuilder(List<Object[]> data, T newValue, List<T> exp) {
-            this.data = data;
-            this.newValue = newValue;
-            this.exp = exp;
-            this.mExp = createMap(exp);
-        }
-
-        Map<T, T> createMap(List<T> l) {
-            Map<T, T> m = new LinkedHashMap<>();
-            for (T t : l) {
-                m.put(t, t);
-            }
-            return m;
-        }
-
-        void add(String description, Supplier<Source<?>> s) {
-            description = joiner(description).toString();
-            data.add(new Object[]{description, s});
-        }
-
-        void addCollection(Function<Collection<T>, ? extends Collection<T>> f) {
-            class CollectionSource implements Source<T> {
-                final Collection<T> c = f.apply(exp);
-
-                final Consumer<Collection<T>> updater;
-
-                CollectionSource(Consumer<Collection<T>> updater) {
-                    this.updater = updater;
-                }
-
-                @Override
-                public Collection<T> asCollection() {
-                    return c;
-                }
-
-                @Override
-                public void update() {
-                    updater.accept(c);
-                }
-            }
-
-            String description = "new " + f.apply(Collections.<T>emptyList()).getClass().getName() + ".spliterator() ";
-            add(description + "ADD", () -> new CollectionSource(c -> c.add(newValue)));
-            add(description + "REMOVE", () -> new CollectionSource(c -> c.remove(c.iterator().next())));
-        }
-
-        void addList(Function<Collection<T>, ? extends List<T>> l) {
-            // @@@ If collection is instance of List then add sub-list tests
-            addCollection(l);
-        }
-
-        void addMap(Function<Map<T, T>, ? extends Map<T, T>> mapConstructor) {
-            class MapSource<U> implements Source<U> {
-                final Map<T, T> m = mapConstructor.apply(mExp);
-
-                final Collection<U> c;
-
-                final Consumer<Map<T, T>> updater;
-
-                MapSource(Function<Map<T, T>, Collection<U>> f, Consumer<Map<T, T>> updater) {
-                    this.c = f.apply(m);
-                    this.updater = updater;
-                }
-
-                @Override
-                public Collection<U> asCollection() {
-                    return c;
-                }
-
-                @Override
-                public void update() {
-                    updater.accept(m);
-                }
-            }
-
-            Map<String, Consumer<Map<T, T>>> actions = new HashMap<>();
-            actions.put("ADD", m -> m.put(newValue, newValue));
-            actions.put("REMOVE", m -> m.remove(m.keySet().iterator().next()));
-
-            String description = "new " + mapConstructor.apply(Collections.<T, T>emptyMap()).getClass().getName();
-            for (Map.Entry<String, Consumer<Map<T, T>>> e : actions.entrySet()) {
-                add(description + ".keySet().spliterator() " + e.getKey(),
-                    () -> new MapSource<T>(m -> m.keySet(), e.getValue()));
-                add(description + ".values().spliterator() " + e.getKey(),
-                    () -> new MapSource<T>(m -> m.values(), e.getValue()));
-                add(description + ".entrySet().spliterator() " + e.getKey(),
-                    () -> new MapSource<Map.Entry<T, T>>(m -> m.entrySet(), e.getValue()));
-            }
-        }
-
-        StringBuilder joiner(String description) {
-            return new StringBuilder(description).
-                    append(" {").
-                    append("size=").append(exp.size()).
-                    append("}");
-        }
-    }
-
-    static Object[][] spliteratorDataProvider;
-
-    @DataProvider(name = "Source")
-    public static Object[][] spliteratorDataProvider() {
-        if (spliteratorDataProvider != null) {
-            return spliteratorDataProvider;
-        }
-
-        List<Object[]> data = new ArrayList<>();
-        SpliteratorDataBuilder<Integer> db = new SpliteratorDataBuilder<>(data, 5, Arrays.asList(1, 2, 3, 4));
-
-        // Collections
-
-        db.addList(ArrayList::new);
-
-        db.addList(LinkedList::new);
-
-        db.addList(Vector::new);
-
-
-        db.addCollection(HashSet::new);
-
-        db.addCollection(LinkedHashSet::new);
-
-        db.addCollection(TreeSet::new);
-
-
-        db.addCollection(c -> { Stack<Integer> s = new Stack<>(); s.addAll(c); return s;});
-
-        db.addCollection(PriorityQueue::new);
-
-        // ArrayDeque fails some tests since it's fail-fast support is weaker
-        // than other collections and limited to detecting most, but not all,
-        // removals.  It probably requires it's own test since it is difficult
-        // to abstract out the conditions under which it fails-fast.
-//        db.addCollection(ArrayDeque::new);
-
-        // Maps
-
-        db.addMap(HashMap::new);
-
-        db.addMap(LinkedHashMap::new);
-
-        // This fails when run through jrteg but passes when run though
-        // ant
-//        db.addMap(IdentityHashMap::new);
-
-        db.addMap(WeakHashMap::new);
-
-        // @@@  Descending maps etc
-        db.addMap(TreeMap::new);
-
-        return spliteratorDataProvider = data.toArray(new Object[0][]);
-    }
-
-    @Test(dataProvider = "Source")
-    public <T> void lateBindingTestWithForEach(String description, Supplier<Source<T>> ss) {
-        Source<T> source = ss.get();
-        Collection<T> c = source.asCollection();
-        Spliterator<T> s = c.spliterator();
-
-        source.update();
-
-        Set<T> r = new HashSet<>();
-        s.forEachRemaining(r::add);
-
-        assertEquals(r, new HashSet<>(c));
-    }
-
-    @Test(dataProvider = "Source")
-    public <T> void lateBindingTestWithTryAdvance(String description, Supplier<Source<T>> ss) {
-        Source<T> source = ss.get();
-        Collection<T> c = source.asCollection();
-        Spliterator<T> s = c.spliterator();
-
-        source.update();
-
-        Set<T> r = new HashSet<>();
-        while (s.tryAdvance(r::add)) { }
-
-        assertEquals(r, new HashSet<>(c));
-    }
-
-    @Test(dataProvider = "Source")
-    public <T> void lateBindingTestWithCharacteritics(String description, Supplier<Source<T>> ss) {
-        Source<T> source = ss.get();
-        Collection<T> c = source.asCollection();
-        Spliterator<T> s = c.spliterator();
-        s.characteristics();
-
-        Set<T> r = new HashSet<>();
-        s.forEachRemaining(r::add);
-
-        assertEquals(r, new HashSet<>(c));
-    }
-
-
-    @Test(dataProvider = "Source")
-    public <T> void testFailFastTestWithTryAdvance(String description, Supplier<Source<T>> ss) {
-        {
-            Source<T> source = ss.get();
-            Collection<T> c = source.asCollection();
-            Spliterator<T> s = c.spliterator();
-
-            s.tryAdvance(e -> {
-            });
-            source.update();
-
-            executeAndCatch(() -> s.tryAdvance(e -> { }));
-        }
-
-        {
-            Source<T> source = ss.get();
-            Collection<T> c = source.asCollection();
-            Spliterator<T> s = c.spliterator();
-
-            s.tryAdvance(e -> {
-            });
-            source.update();
-
-            executeAndCatch(() -> s.forEachRemaining(e -> {
-            }));
-        }
-    }
-
-    @Test(dataProvider = "Source")
-    public <T> void testFailFastTestWithForEach(String description, Supplier<Source<T>> ss) {
-        Source<T> source = ss.get();
-        Collection<T> c = source.asCollection();
-        Spliterator<T> s = c.spliterator();
-
-        executeAndCatch(() -> s.forEachRemaining(e -> {
-            source.update();
-        }));
-    }
-
-    @Test(dataProvider = "Source")
-    public <T> void testFailFastTestWithEstimateSize(String description, Supplier<Source<T>> ss) {
-        {
-            Source<T> source = ss.get();
-            Collection<T> c = source.asCollection();
-            Spliterator<T> s = c.spliterator();
-
-            s.estimateSize();
-            source.update();
-
-            executeAndCatch(() -> s.tryAdvance(e -> { }));
-        }
-
-        {
-            Source<T> source = ss.get();
-            Collection<T> c = source.asCollection();
-            Spliterator<T> s = c.spliterator();
-
-            s.estimateSize();
-            source.update();
-
-            executeAndCatch(() -> s.forEachRemaining(e -> {
-            }));
-        }
-    }
-
-    private void executeAndCatch(Runnable r) {
-        executeAndCatch(ConcurrentModificationException.class, r);
-    }
-
-    private void executeAndCatch(Class<? extends Exception> expected, Runnable r) {
-        Exception caught = null;
-        try {
-            r.run();
-        }
-        catch (Exception e) {
-            caught = e;
-        }
-
-        assertNotNull(caught,
-                      String.format("No Exception was thrown, expected an Exception of %s to be thrown",
-                                    expected.getName()));
-        assertTrue(expected.isInstance(caught),
-                   String.format("Exception thrown %s not an instance of %s",
-                                 caught.getClass().getName(), expected.getName()));
-    }
-
-}
--- a/jdk/test/java/util/stream/test/org/openjdk/tests/java/util/stream/SpliteratorTraversingAndSplittingTest.java	Mon Jun 10 17:04:18 2013 -0700
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1411 +0,0 @@
-/*
- * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
- * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
- *
- * This code is free software; you can redistribute it and/or modify it
- * under the terms of the GNU General Public License version 2 only, as
- * published by the Free Software Foundation.
- *
- * This code is distributed in the hope that it will be useful, but WITHOUT
- * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
- * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
- * version 2 for more details (a copy is included in the LICENSE file that
- * accompanied this code).
- *
- * You should have received a copy of the GNU General Public License version
- * 2 along with this work; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
- *
- * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
- * or visit www.oracle.com if you need additional information or have any
- * questions.
- */
-package org.openjdk.tests.java.util.stream;
-
-/**
- * @test
- * @summary Spliterator traversing and splitting tests
- * @run testng SpliteratorTraversingAndSplittingTest
- */
-
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.util.AbstractCollection;
-import java.util.AbstractList;
-import java.util.AbstractSet;
-import java.util.ArrayDeque;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.Deque;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.IdentityHashMap;
-import java.util.Iterator;
-import java.util.LinkedHashMap;
-import java.util.LinkedHashSet;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.PriorityQueue;
-import java.util.Set;
-import java.util.SortedSet;
-import java.util.Spliterator;
-import java.util.Spliterators;
-import java.util.Stack;
-import java.util.TreeMap;
-import java.util.TreeSet;
-import java.util.Vector;
-import java.util.WeakHashMap;
-import java.util.concurrent.ArrayBlockingQueue;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ConcurrentLinkedQueue;
-import java.util.concurrent.ConcurrentSkipListMap;
-import java.util.concurrent.ConcurrentSkipListSet;
-import java.util.concurrent.CopyOnWriteArrayList;
-import java.util.concurrent.CopyOnWriteArraySet;
-import java.util.concurrent.LinkedBlockingDeque;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.concurrent.LinkedTransferQueue;
-import java.util.concurrent.PriorityBlockingQueue;
-import java.util.function.Consumer;
-import java.util.function.DoubleConsumer;
-import java.util.function.Function;
-import java.util.function.IntConsumer;
-import java.util.function.LongConsumer;
-import java.util.function.Supplier;
-import java.util.function.UnaryOperator;
-
-import static org.testng.Assert.*;
-import static org.testng.Assert.assertEquals;
-
-@Test(groups = { "serialization-hostile" })
-public class SpliteratorTraversingAndSplittingTest {
-
-    private static List<Integer> SIZES = Arrays.asList(0, 1, 10, 100, 1000);
-
-    private static class SpliteratorDataBuilder<T> {
-        List<Object[]> data;
-
-        List<T> exp;
-
-        Map<T, T> mExp;
-
-        SpliteratorDataBuilder(List<Object[]> data, List<T> exp) {
-            this.data = data;
-            this.exp = exp;
-            this.mExp = createMap(exp);
-        }
-
-        Map<T, T> createMap(List<T> l) {
-            Map<T, T> m = new LinkedHashMap<>();
-            for (T t : l) {
-                m.put(t, t);
-            }
-            return m;
-        }
-
-        void add(String description, Collection<?> expected, Supplier<Spliterator<?>> s) {
-            description = joiner(description).toString();
-            data.add(new Object[]{description, expected, s});
-        }
-
-        void add(String description, Supplier<Spliterator<?>> s) {
-            add(description, exp, s);
-        }
-
-        void addCollection(Function<Collection<T>, ? extends Collection<T>> c) {
-            add("new " + c.apply(Collections.<T>emptyList()).getClass().getName() + ".spliterator()",
-                () -> c.apply(exp).spliterator());
-        }
-
-        void addList(Function<Collection<T>, ? extends List<T>> l) {
-            // @@@ If collection is instance of List then add sub-list tests
-            addCollection(l);
-        }
-
-        void addMap(Function<Map<T, T>, ? extends Map<T, T>> m) {
-            String description = "new " + m.apply(Collections.<T, T>emptyMap()).getClass().getName();
-            add(description + ".keySet().spliterator()", () -> m.apply(mExp).keySet().spliterator());
-            add(description + ".values().spliterator()", () -> m.apply(mExp).values().spliterator());
-            add(description + ".entrySet().spliterator()", mExp.entrySet(), () -> m.apply(mExp).entrySet().spliterator());
-        }
-
-        StringBuilder joiner(String description) {
-            return new StringBuilder(description).
-                    append(" {").
-                    append("size=").append(exp.size()).
-                    append("}");
-        }
-    }
-
-    static Object[][] spliteratorDataProvider;
-
-    @DataProvider(name = "Spliterator<Integer>")
-    public static Object[][] spliteratorDataProvider() {
-        if (spliteratorDataProvider != null) {
-            return spliteratorDataProvider;
-        }
-
-        List<Object[]> data = new ArrayList<>();
-        for (int size : SIZES) {
-            List<Integer> exp = listIntRange(size);
-            SpliteratorDataBuilder<Integer> db = new SpliteratorDataBuilder<>(data, exp);
-
-            // Direct spliterator methods
-
-            db.add("Spliterators.spliterator(Collection, ...)",
-                   () -> Spliterators.spliterator(exp, 0));
-
-            db.add("Spliterators.spliterator(Iterator, ...)",
-                   () -> Spliterators.spliterator(exp.iterator(), exp.size(), 0));
-
-            db.add("Spliterators.spliteratorUnknownSize(Iterator, ...)",
-                   () -> Spliterators.spliteratorUnknownSize(exp.iterator(), 0));
-
-            db.add("Spliterators.spliterator(Spliterators.iteratorFromSpliterator(Spliterator ), ...)",
-                   () -> Spliterators.spliterator(Spliterators.iteratorFromSpliterator(exp.spliterator()), exp.size(), 0));
-
-            db.add("Spliterators.spliterator(T[], ...)",
-                   () -> Spliterators.spliterator(exp.toArray(new Integer[0]), 0));
-
-            db.add("Arrays.spliterator(T[], ...)",
-                   () -> Arrays.spliterator(exp.toArray(new Integer[0])));
-
-            class SpliteratorFromIterator extends Spliterators.AbstractSpliterator<Integer> {
-                Iterator<Integer> it;
-
-                SpliteratorFromIterator(Iterator<Integer> it, long est) {
-                    super(est, Spliterator.SIZED);
-                    this.it = it;
-                }
-
-                @Override
-                public boolean tryAdvance(Consumer<? super Integer> action) {
-                    if (action == null)
-                        throw new NullPointerException();
-                    if (it.hasNext()) {
-                        action.accept(it.next());
-                        return true;
-                    }
-                    else {
-                        return false;
-                    }
-                }
-            }
-            db.add("new Spliterators.AbstractSpliterator()",
-                   () -> new SpliteratorFromIterator(exp.iterator(), exp.size()));
-
-            // Collections
-
-            // default method implementations
-
-            class AbstractCollectionImpl extends AbstractCollection<Integer> {
-                Collection<Integer> c;
-
-                AbstractCollectionImpl(Collection<Integer> c) {
-                    this.c = c;
-                }
-
-                @Override
-                public Iterator<Integer> iterator() {
-                    return c.iterator();
-                }
-
-                @Override
-                public int size() {
-                    return c.size();
-                }
-            }
-            db.addCollection(
-                    c -> new AbstractCollectionImpl(c));
-
-            class AbstractListImpl extends AbstractList<Integer> {
-                List<Integer> l;
-
-                AbstractListImpl(Collection<Integer> c) {
-                    this.l = new ArrayList<>(c);
-                }
-
-                @Override
-                public Integer get(int index) {
-                    return l.get(index);
-                }
-
-                @Override
-                public int size() {
-                    return l.size();
-                }
-            }
-            db.addCollection(
-                    c -> new AbstractListImpl(c));
-
-            class AbstractSetImpl extends AbstractSet<Integer> {
-                Set<Integer> s;
-
-                AbstractSetImpl(Collection<Integer> c) {
-                    this.s = new HashSet<>(c);
-                }
-
-                @Override
-                public Iterator<Integer> iterator() {
-                    return s.iterator();
-                }
-
-                @Override
-                public int size() {
-                    return s.size();
-                }
-            }
-            db.addCollection(
-                    c -> new AbstractSetImpl(c));
-
-            class AbstractSortedSetImpl extends AbstractSet<Integer> implements SortedSet<Integer> {
-                SortedSet<Integer> s;
-
-                AbstractSortedSetImpl(Collection<Integer> c) {
-                    this.s = new TreeSet<>(c);
-                }
-
-                @Override
-                public Iterator<Integer> iterator() {
-                    return s.iterator();
-                }
-
-                @Override
-                public int size() {
-                    return s.size();
-                }
-
-                @Override
-                public Comparator<? super Integer> comparator() {
-                    return s.comparator();
-                }
-
-                @Override
-                public SortedSet<Integer> subSet(Integer fromElement, Integer toElement) {
-                    return s.subSet(fromElement, toElement);
-                }
-
-                @Override
-                public SortedSet<Integer> headSet(Integer toElement) {
-                    return s.headSet(toElement);
-                }
-
-                @Override
-                public SortedSet<Integer> tailSet(Integer fromElement) {
-                    return s.tailSet(fromElement);
-                }
-
-                @Override
-                public Integer first() {
-                    return s.first();
-                }
-
-                @Override
-                public Integer last() {
-                    return s.last();
-                }
-
-                @Override
-                public Spliterator<Integer> spliterator() {
-                    return SortedSet.super.spliterator();
-                }
-            }
-            db.addCollection(
-                    c -> new AbstractSortedSetImpl(c));
-
-            //
-
-            db.add("Arrays.asList().spliterator()",
-                   () -> Spliterators.spliterator(Arrays.asList(exp.toArray(new Integer[0])), 0));
-
-            db.addList(ArrayList::new);
-
-            db.addList(LinkedList::new);
-
-            db.addList(Vector::new);
-
-
-            db.addCollection(HashSet::new);
-
-            db.addCollection(LinkedHashSet::new);
-
-            db.addCollection(TreeSet::new);
-
-
-            db.addCollection(c -> { Stack<Integer> s = new Stack<>(); s.addAll(c); return s;});
-
-            db.addCollection(PriorityQueue::new);
-
-            db.addCollection(ArrayDeque::new);
-
-
-            db.addCollection(ConcurrentSkipListSet::new);
-
-            if (size > 0) {
-                db.addCollection(c -> {
-                    ArrayBlockingQueue<Integer> abq = new ArrayBlockingQueue<>(size);
-                    abq.addAll(c);
-                    return abq;
-                });
-            }
-
-            db.addCollection(PriorityBlockingQueue::new);
-
-            db.addCollection(LinkedBlockingQueue::new);
-
-            db.addCollection(LinkedTransferQueue::new);
-
-            db.addCollection(ConcurrentLinkedQueue::new);
-
-            db.addCollection(LinkedBlockingDeque::new);
-
-            db.addCollection(CopyOnWriteArrayList::new);
-
-            db.addCollection(CopyOnWriteArraySet::new);
-
-            if (size == 1) {
-                db.addCollection(c -> Collections.singleton(exp.get(0)));
-                db.addCollection(c -> Collections.singletonList(exp.get(0)));
-            }
-
-            // Collections.synchronized/unmodifiable/checked wrappers
-            db.addCollection(Collections::unmodifiableCollection);
-            db.addCollection(c -> Collections.unmodifiableSet(new HashSet<>(c)));
-            db.addCollection(c -> Collections.unmodifiableSortedSet(new TreeSet<>(c)));
-            db.addList(c -> Collections.unmodifiableList(new ArrayList<>(c)));
-            db.addMap(Collections::unmodifiableMap);
-            db.addMap(m -> Collections.unmodifiableSortedMap(new TreeMap<>(m)));
-
-            db.addCollection(Collections::synchronizedCollection);
-            db.addCollection(c -> Collections.synchronizedSet(new HashSet<>(c)));
-            db.addCollection(c -> Collections.synchronizedSortedSet(new TreeSet<>(c)));
-            db.addList(c -> Collections.synchronizedList(new ArrayList<>(c)));
-            db.addMap(Collections::synchronizedMap);
-            db.addMap(m -> Collections.synchronizedSortedMap(new TreeMap<>(m)));
-
-            db.addCollection(c -> Collections.checkedCollection(c, Integer.class));
-            db.addCollection(c -> Collections.checkedQueue(new ArrayDeque<>(c), Integer.class));
-            db.addCollection(c -> Collections.checkedSet(new HashSet<>(c), Integer.class));
-            db.addCollection(c -> Collections.checkedSortedSet(new TreeSet<>(c), Integer.class));
-            db.addList(c -> Collections.checkedList(new ArrayList<>(c), Integer.class));
-            db.addMap(c -> Collections.checkedMap(c, Integer.class, Integer.class));
-            db.addMap(m -> Collections.checkedSortedMap(new TreeMap<>(m), Integer.class, Integer.class));
-
-            // Maps
-
-            db.addMap(HashMap::new);
-
-            db.addMap(LinkedHashMap::new);
-
-            db.addMap(IdentityHashMap::new);
-
-            db.addMap(WeakHashMap::new);
-
-            // @@@  Descending maps etc
-            db.addMap(TreeMap::new);
-
-            db.addMap(ConcurrentHashMap::new);
-
-            db.addMap(ConcurrentSkipListMap::new);
-        }
-
-        return spliteratorDataProvider = data.toArray(new Object[0][]);
-    }
-
-    private static List<Integer> listIntRange(int upTo) {
-        List<Integer> exp = new ArrayList<>();
-        for (int i = 0; i < upTo; i++)
-            exp.add(i);
-        return Collections.unmodifiableList(exp);
-    }
-
-    @Test(dataProvider = "Spliterator<Integer>")
-    @SuppressWarnings({"unchecked", "rawtypes"})
-    public void testNullPointerException(String description, Collection exp, Supplier<Spliterator> s) {
-        executeAndCatch(NullPointerException.class, () -> s.get().forEachRemaining(null));
-        executeAndCatch(NullPointerException.class, () -> s.get().tryAdvance(null));
-    }
-
-    @Test(dataProvider = "Spliterator<Integer>")
-    @SuppressWarnings({"unchecked", "rawtypes"})
-    public void testForEach(String description, Collection exp, Supplier<Spliterator> s) {
-        testForEach(exp, s, (Consumer<Object> b) -> b);
-    }
-
-    @Test(dataProvider = "Spliterator<Integer>")
-    @SuppressWarnings({"unchecked", "rawtypes"})
-    public void testTryAdvance(String description, Collection exp, Supplier<Spliterator> s) {
-        testTryAdvance(exp, s, (Consumer<Object> b) -> b);
-    }
-
-    @Test(dataProvider = "Spliterator<Integer>")
-    @SuppressWarnings({"unchecked", "rawtypes"})
-    public void testMixedTryAdvanceForEach(String description, Collection exp, Supplier<Spliterator> s) {
-        testMixedTryAdvanceForEach(exp, s, (Consumer<Object> b) -> b);
-    }
-
-    @Test(dataProvider = "Spliterator<Integer>")
-    @SuppressWarnings({"unchecked", "rawtypes"})
-    public void testMixedTraverseAndSplit(String description, Collection exp, Supplier<Spliterator> s) {
-        testMixedTraverseAndSplit(exp, s, (Consumer<Object> b) -> b);
-    }
-
-    @Test(dataProvider = "Spliterator<Integer>")
-    @SuppressWarnings({"unchecked", "rawtypes"})
-    public void testSplitAfterFullTraversal(String description, Collection exp, Supplier<Spliterator> s) {
-        testSplitAfterFullTraversal(s, (Consumer<Object> b) -> b);
-    }
-
-    @Test(dataProvider = "Spliterator<Integer>")
-    @SuppressWarnings({"unchecked", "rawtypes"})
-    public void testSplitOnce(String description, Collection exp, Supplier<Spliterator> s) {
-        testSplitOnce(exp, s, (Consumer<Object> b) -> b);
-    }
-
-    @Test(dataProvider = "Spliterator<Integer>")
-    @SuppressWarnings({"unchecked", "rawtypes"})
-    public void testSplitSixDeep(String description, Collection exp, Supplier<Spliterator> s) {
-        testSplitSixDeep(exp, s, (Consumer<Object> b) -> b);
-    }
-
-    @Test(dataProvider = "Spliterator<Integer>")
-    @SuppressWarnings({"unchecked", "rawtypes"})
-    public void testSplitUntilNull(String description, Collection exp, Supplier<Spliterator> s) {
-        testSplitUntilNull(exp, s, (Consumer<Object> b) -> b);
-    }
-
-    //
-
-    private static class SpliteratorOfIntDataBuilder {
-        List<Object[]> data;
-
-        List<Integer> exp;
-
-        SpliteratorOfIntDataBuilder(List<Object[]> data, List<Integer> exp) {
-            this.data = data;
-            this.exp = exp;
-        }
-
-        void add(String description, List<Integer> expected, Supplier<Spliterator.OfInt> s) {
-            description = joiner(description).toString();
-            data.add(new Object[]{description, expected, s});
-        }
-
-        void add(String description, Supplier<Spliterator.OfInt> s) {
-            add(description, exp, s);
-        }
-
-        StringBuilder joiner(String description) {
-            return new StringBuilder(description).
-                    append(" {").
-                    append("size=").append(exp.size()).
-                    append("}");
-        }
-    }
-
-    static Object[][] spliteratorOfIntDataProvider;
-
-    @DataProvider(name = "Spliterator.OfInt")
-    public static Object[][] spliteratorOfIntDataProvider() {
-        if (spliteratorOfIntDataProvider != null) {
-            return spliteratorOfIntDataProvider;
-        }
-
-        List<Object[]> data = new ArrayList<>();
-        for (int size : SIZES) {
-            int exp[] = arrayIntRange(size);
-            SpliteratorOfIntDataBuilder db = new SpliteratorOfIntDataBuilder(data, listIntRange(size));
-
-            db.add("Spliterators.spliterator(int[], ...)",
-                   () -> Spliterators.spliterator(exp, 0));
-
-            db.add("Arrays.spliterator(int[], ...)",
-                   () -> Arrays.spliterator(exp));
-
-            db.add("Spliterators.spliterator(PrimitiveIterator.OfInt, ...)",
-                   () -> Spliterators.spliterator(Spliterators.iteratorFromSpliterator(Arrays.spliterator(exp)), exp.length, 0));
-
-            db.add("Spliterators.spliteratorUnknownSize(PrimitiveIterator.OfInt, ...)",
-                   () -> Spliterators.spliteratorUnknownSize(Spliterators.iteratorFromSpliterator(Arrays.spliterator(exp)), 0));
-
-            class IntSpliteratorFromArray extends Spliterators.AbstractIntSpliterator {
-                int[] a;
-                int index = 0;
-
-                IntSpliteratorFromArray(int[] a) {
-                    super(a.length, Spliterator.SIZED);
-                    this.a = a;
-                }
-
-                @Override
-                public boolean tryAdvance(IntConsumer action) {
-                    if (action == null)
-                        throw new NullPointerException();
-                    if (index < a.length) {
-                        action.accept(a[index++]);
-                        return true;
-                    }
-                    else {
-                        return false;
-                    }
-                }
-            }
-            db.add("new Spliterators.AbstractIntAdvancingSpliterator()",
-                   () -> new IntSpliteratorFromArray(exp));
-        }
-
-        return spliteratorOfIntDataProvider = data.toArray(new Object[0][]);
-    }
-
-    private static int[] arrayIntRange(int upTo) {
-        int[] exp = new int[upTo];
-        for (int i = 0; i < upTo; i++)
-            exp[i] = i;
-        return exp;
-    }
-
-    private static UnaryOperator<Consumer<Integer>> intBoxingConsumer() {
-        class BoxingAdapter implements Consumer<Integer>, IntConsumer {
-            private final Consumer<Integer> b;
-
-            BoxingAdapter(Consumer<Integer> b) {
-                this.b = b;
-            }
-
-            @Override
-            public void accept(Integer value) {
-                throw new IllegalStateException();
-            }
-
-            @Override
-            public void accept(int value) {
-                b.accept(value);
-            }
-        }
-
-        return b -> new BoxingAdapter(b);
-    }
-
-    @Test(dataProvider = "Spliterator.OfInt")
-    public void testIntNullPointerException(String description, Collection<Integer> exp, Supplier<Spliterator.OfInt> s) {
-        executeAndCatch(NullPointerException.class, () -> s.get().forEachRemaining((IntConsumer) null));
-        executeAndCatch(NullPointerException.class, () -> s.get().tryAdvance((IntConsumer) null));
-    }
-
-    @Test(dataProvider = "Spliterator.OfInt")
-    public void testIntForEach(String description, Collection<Integer> exp, Supplier<Spliterator.OfInt> s) {
-        testForEach(exp, s, intBoxingConsumer());
-    }
-
-    @Test(dataProvider = "Spliterator.OfInt")
-    public void testIntTryAdvance(String description, Collection<Integer> exp, Supplier<Spliterator.OfInt> s) {
-        testTryAdvance(exp, s, intBoxingConsumer());
-    }
-
-    @Test(dataProvider = "Spliterator.OfInt")
-    public void testIntMixedTryAdvanceForEach(String description, Collection<Integer> exp, Supplier<Spliterator.OfInt> s) {
-        testMixedTryAdvanceForEach(exp, s, intBoxingConsumer());
-    }
-
-    @Test(dataProvider = "Spliterator.OfInt")
-    public void testIntMixedTraverseAndSplit(String description, Collection<Integer> exp, Supplier<Spliterator.OfInt> s) {
-        testMixedTraverseAndSplit(exp, s, intBoxingConsumer());
-    }
-
-    @Test(dataProvider = "Spliterator.OfInt")
-    public void testIntSplitAfterFullTraversal(String description, Collection<Integer> exp, Supplier<Spliterator.OfInt> s) {
-        testSplitAfterFullTraversal(s, intBoxingConsumer());
-    }
-
-    @Test(dataProvider = "Spliterator.OfInt")
-    public void testIntSplitOnce(String description, Collection<Integer> exp, Supplier<Spliterator.OfInt> s) {
-        testSplitOnce(exp, s, intBoxingConsumer());
-    }
-
-    @Test(dataProvider = "Spliterator.OfInt")
-    public void testIntSplitSixDeep(String description, Collection<Integer> exp, Supplier<Spliterator.OfInt> s) {
-        testSplitSixDeep(exp, s, intBoxingConsumer());
-    }
-
-    @Test(dataProvider = "Spliterator.OfInt")
-    public void testIntSplitUntilNull(String description, Collection<Integer> exp, Supplier<Spliterator.OfInt> s) {
-        testSplitUntilNull(exp, s, intBoxingConsumer());
-    }
-
-    //
-
-    private static class SpliteratorOfLongDataBuilder {
-        List<Object[]> data;
-
-        List<Long> exp;
-
-        SpliteratorOfLongDataBuilder(List<Object[]> data, List<Long> exp) {
-            this.data = data;
-            this.exp = exp;
-        }
-
-        void add(String description, List<Long> expected, Supplier<Spliterator.OfLong> s) {
-            description = joiner(description).toString();
-            data.add(new Object[]{description, expected, s});
-        }
-
-        void add(String description, Supplier<Spliterator.OfLong> s) {
-            add(description, exp, s);
-        }
-
-        StringBuilder joiner(String description) {
-            return new StringBuilder(description).
-                    append(" {").
-                    append("size=").append(exp.size()).
-                    append("}");
-        }
-    }
-
-    static Object[][] spliteratorOfLongDataProvider;
-
-    @DataProvider(name = "Spliterator.OfLong")
-    public static Object[][] spliteratorOfLongDataProvider() {
-        if (spliteratorOfLongDataProvider != null) {
-            return spliteratorOfLongDataProvider;
-        }
-
-        List<Object[]> data = new ArrayList<>();
-        for (int size : SIZES) {
-            long exp[] = arrayLongRange(size);
-            SpliteratorOfLongDataBuilder db = new SpliteratorOfLongDataBuilder(data, listLongRange(size));
-
-            db.add("Spliterators.spliterator(long[], ...)",
-                   () -> Spliterators.spliterator(exp, 0));
-
-            db.add("Arrays.spliterator(long[], ...)",
-                   () -> Arrays.spliterator(exp));
-
-            db.add("Spliterators.spliterator(PrimitiveIterator.OfLong, ...)",
-                   () -> Spliterators.spliterator(Spliterators.iteratorFromSpliterator(Arrays.spliterator(exp)), exp.length, 0));
-
-            db.add("Spliterators.spliteratorUnknownSize(PrimitiveIterator.OfLong, ...)",
-                   () -> Spliterators.spliteratorUnknownSize(Spliterators.iteratorFromSpliterator(Arrays.spliterator(exp)), 0));
-
-            class LongSpliteratorFromArray extends Spliterators.AbstractLongSpliterator {
-                long[] a;
-                int index = 0;
-
-                LongSpliteratorFromArray(long[] a) {
-                    super(a.length, Spliterator.SIZED);
-                    this.a = a;
-                }
-
-                @Override
-                public boolean tryAdvance(LongConsumer action) {
-                    if (action == null)
-                        throw new NullPointerException();
-                    if (index < a.length) {
-                        action.accept(a[index++]);
-                        return true;
-                    }
-                    else {
-                        return false;
-                    }
-                }
-            }
-            db.add("new Spliterators.AbstractLongAdvancingSpliterator()",
-                   () -> new LongSpliteratorFromArray(exp));
-        }
-
-        return spliteratorOfLongDataProvider = data.toArray(new Object[0][]);
-    }
-
-    private static List<Long> listLongRange(int upTo) {
-        List<Long> exp = new ArrayList<>();
-        for (long i = 0; i < upTo; i++)
-            exp.add(i);
-        return Collections.unmodifiableList(exp);
-    }
-
-    private static long[] arrayLongRange(int upTo) {
-        long[] exp = new long[upTo];
-        for (int i = 0; i < upTo; i++)
-            exp[i] = i;
-        return exp;
-    }
-
-    private static UnaryOperator<Consumer<Long>> longBoxingConsumer() {
-        class BoxingAdapter implements Consumer<Long>, LongConsumer {
-            private final Consumer<Long> b;
-
-            BoxingAdapter(Consumer<Long> b) {
-                this.b = b;
-            }
-
-            @Override
-            public void accept(Long value) {
-                throw new IllegalStateException();
-            }
-
-            @Override
-            public void accept(long value) {
-                b.accept(value);
-            }
-        }
-
-        return b -> new BoxingAdapter(b);
-    }
-
-    @Test(dataProvider = "Spliterator.OfLong")
-    public void testLongNullPointerException(String description, Collection<Long> exp, Supplier<Spliterator.OfLong> s) {
-        executeAndCatch(NullPointerException.class, () -> s.get().forEachRemaining((LongConsumer) null));
-        executeAndCatch(NullPointerException.class, () -> s.get().tryAdvance((LongConsumer) null));
-    }
-
-    @Test(dataProvider = "Spliterator.OfLong")
-    public void testLongForEach(String description, Collection<Long> exp, Supplier<Spliterator.OfLong> s) {
-        testForEach(exp, s, longBoxingConsumer());
-    }
-
-    @Test(dataProvider = "Spliterator.OfLong")
-    public void testLongTryAdvance(String description, Collection<Long> exp, Supplier<Spliterator.OfLong> s) {
-        testTryAdvance(exp, s, longBoxingConsumer());
-    }
-
-    @Test(dataProvider = "Spliterator.OfLong")
-    public void testLongMixedTryAdvanceForEach(String description, Collection<Long> exp, Supplier<Spliterator.OfLong> s) {
-        testMixedTryAdvanceForEach(exp, s, longBoxingConsumer());
-    }
-
-    @Test(dataProvider = "Spliterator.OfLong")
-    public void testLongMixedTraverseAndSplit(String description, Collection<Long> exp, Supplier<Spliterator.OfLong> s) {
-        testMixedTraverseAndSplit(exp, s, longBoxingConsumer());
-    }
-
-    @Test(dataProvider = "Spliterator.OfLong")
-    public void testLongSplitAfterFullTraversal(String description, Collection<Long> exp, Supplier<Spliterator.OfLong> s) {
-        testSplitAfterFullTraversal(s, longBoxingConsumer());
-    }
-
-    @Test(dataProvider = "Spliterator.OfLong")
-    public void testLongSplitOnce(String description, Collection<Long> exp, Supplier<Spliterator.OfLong> s) {
-        testSplitOnce(exp, s, longBoxingConsumer());
-    }
-
-    @Test(dataProvider = "Spliterator.OfLong")
-    public void testLongSplitSixDeep(String description, Collection<Long> exp, Supplier<Spliterator.OfLong> s) {
-        testSplitSixDeep(exp, s, longBoxingConsumer());
-    }
-
-    @Test(dataProvider = "Spliterator.OfLong")
-    public void testLongSplitUntilNull(String description, Collection<Long> exp, Supplier<Spliterator.OfLong> s) {
-        testSplitUntilNull(exp, s, longBoxingConsumer());
-    }
-
-    //
-
-    private static class SpliteratorOfDoubleDataBuilder {
-        List<Object[]> data;
-
-        List<Double> exp;
-
-        SpliteratorOfDoubleDataBuilder(List<Object[]> data, List<Double> exp) {
-            this.data = data;
-            this.exp = exp;
-        }
-
-        void add(String description, List<Double> expected, Supplier<Spliterator.OfDouble> s) {
-            description = joiner(description).toString();
-            data.add(new Object[]{description, expected, s});
-        }
-
-        void add(String description, Supplier<Spliterator.OfDouble> s) {
-            add(description, exp, s);
-        }
-
-        StringBuilder joiner(String description) {
-            return new StringBuilder(description).
-                    append(" {").
-                    append("size=").append(exp.size()).
-                    append("}");
-        }
-    }
-
-    static Object[][] spliteratorOfDoubleDataProvider;
-
-    @DataProvider(name = "Spliterator.OfDouble")
-    public static Object[][] spliteratorOfDoubleDataProvider() {
-        if (spliteratorOfDoubleDataProvider != null) {
-            return spliteratorOfDoubleDataProvider;
-        }
-
-        List<Object[]> data = new ArrayList<>();
-        for (int size : SIZES) {
-            double exp[] = arrayDoubleRange(size);
-            SpliteratorOfDoubleDataBuilder db = new SpliteratorOfDoubleDataBuilder(data, listDoubleRange(size));
-
-            db.add("Spliterators.spliterator(double[], ...)",
-                   () -> Spliterators.spliterator(exp, 0));
-
-            db.add("Arrays.spliterator(double[], ...)",
-                   () -> Arrays.spliterator(exp));
-
-            db.add("Spliterators.spliterator(PrimitiveIterator.OfDouble, ...)",
-                   () -> Spliterators.spliterator(Spliterators.iteratorFromSpliterator(Arrays.spliterator(exp)), exp.length, 0));
-
-            db.add("Spliterators.spliteratorUnknownSize(PrimitiveIterator.OfDouble, ...)",
-                   () -> Spliterators.spliteratorUnknownSize(Spliterators.iteratorFromSpliterator(Arrays.spliterator(exp)), 0));
-
-            class DoubleSpliteratorFromArray extends Spliterators.AbstractDoubleSpliterator {
-                double[] a;
-                int index = 0;
-
-                DoubleSpliteratorFromArray(double[] a) {
-                    super(a.length, Spliterator.SIZED);
-                    this.a = a;
-                }
-
-                @Override
-                public boolean tryAdvance(DoubleConsumer action) {
-                    if (action == null)
-                        throw new NullPointerException();
-                    if (index < a.length) {
-                        action.accept(a[index++]);
-                        return true;
-                    }
-                    else {
-                        return false;
-                    }
-                }
-            }
-            db.add("new Spliterators.AbstractDoubleAdvancingSpliterator()",
-                   () -> new DoubleSpliteratorFromArray(exp));
-        }
-
-        return spliteratorOfDoubleDataProvider = data.toArray(new Object[0][]);
-    }
-
-    private static List<Double> listDoubleRange(int upTo) {
-        List<Double> exp = new ArrayList<>();
-        for (double i = 0; i < upTo; i++)
-            exp.add(i);
-        return Collections.unmodifiableList(exp);
-    }
-
-    private static double[] arrayDoubleRange(int upTo) {
-        double[] exp = new double[upTo];
-        for (int i = 0; i < upTo; i++)
-            exp[i] = i;
-        return exp;
-    }
-
-    private static UnaryOperator<Consumer<Double>> doubleBoxingConsumer() {
-        class BoxingAdapter implements Consumer<Double>, DoubleConsumer {
-            private final Consumer<Double> b;
-
-            BoxingAdapter(Consumer<Double> b) {
-                this.b = b;
-            }
-
-            @Override
-            public void accept(Double value) {
-                throw new IllegalStateException();
-            }
-
-            @Override
-            public void accept(double value) {
-                b.accept(value);
-            }
-        }
-
-        return b -> new BoxingAdapter(b);
-    }
-
-    @Test(dataProvider = "Spliterator.OfDouble")
-    public void testDoubleNullPointerException(String description, Collection<Double> exp, Supplier<Spliterator.OfDouble> s) {
-        executeAndCatch(NullPointerException.class, () -> s.get().forEachRemaining((DoubleConsumer) null));
-        executeAndCatch(NullPointerException.class, () -> s.get().tryAdvance((DoubleConsumer) null));
-    }
-
-    @Test(dataProvider = "Spliterator.OfDouble")
-    public void testDoubleForEach(String description, Collection<Double> exp, Supplier<Spliterator.OfDouble> s) {
-        testForEach(exp, s, doubleBoxingConsumer());
-    }
-
-    @Test(dataProvider = "Spliterator.OfDouble")
-    public void testDoubleTryAdvance(String description, Collection<Double> exp, Supplier<Spliterator.OfDouble> s) {
-        testTryAdvance(exp, s, doubleBoxingConsumer());
-    }
-
-    @Test(dataProvider = "Spliterator.OfDouble")
-    public void testDoubleMixedTryAdvanceForEach(String description, Collection<Double> exp, Supplier<Spliterator.OfDouble> s) {
-        testMixedTryAdvanceForEach(exp, s, doubleBoxingConsumer());
-    }
-
-    @Test(dataProvider = "Spliterator.OfDouble")
-    public void testDoubleMixedTraverseAndSplit(String description, Collection<Double> exp, Supplier<Spliterator.OfDouble> s) {
-        testMixedTraverseAndSplit(exp, s, doubleBoxingConsumer());
-    }
-
-    @Test(dataProvider = "Spliterator.OfDouble")
-    public void testDoubleSplitAfterFullTraversal(String description, Collection<Double> exp, Supplier<Spliterator.OfDouble> s) {
-        testSplitAfterFullTraversal(s, doubleBoxingConsumer());
-    }
-
-    @Test(dataProvider = "Spliterator.OfDouble")
-    public void testDoubleSplitOnce(String description, Collection<Double> exp, Supplier<Spliterator.OfDouble> s) {
-        testSplitOnce(exp, s, doubleBoxingConsumer());
-    }
-
-    @Test(dataProvider = "Spliterator.OfDouble")
-    public void testDoubleSplitSixDeep(String description, Collection<Double> exp, Supplier<Spliterator.OfDouble> s) {
-        testSplitSixDeep(exp, s, doubleBoxingConsumer());
-    }
-
-    @Test(dataProvider = "Spliterator.OfDouble")
-    public void testDoubleSplitUntilNull(String description, Collection<Double> exp, Supplier<Spliterator.OfDouble> s) {
-        testSplitUntilNull(exp, s, doubleBoxingConsumer());
-    }
-
-    //
-
-    private static <T, S extends Spliterator<T>> void testForEach(
-            Collection<T> exp,
-            Supplier<S> supplier,
-            UnaryOperator<Consumer<T>> boxingAdapter) {
-        S spliterator = supplier.get();
-        long sizeIfKnown = spliterator.getExactSizeIfKnown();
-        boolean isOrdered = spliterator.hasCharacteristics(Spliterator.ORDERED);
-
-        ArrayList<T> fromForEach = new ArrayList<>();
-        spliterator = supplier.get();
-        Consumer<T> addToFromForEach = boxingAdapter.apply(fromForEach::add);
-        spliterator.forEachRemaining(addToFromForEach);
-
-        // Assert that forEach now produces no elements
-        spliterator.forEachRemaining(boxingAdapter.apply(
-                e -> fail("Spliterator.forEach produced an element after spliterator exhausted: " + e)));
-        // Assert that tryAdvance now produce no elements
-        spliterator.tryAdvance(boxingAdapter.apply(
-                e -> fail("Spliterator.tryAdvance produced an element after spliterator exhausted: " + e)));
-
-        // assert that size, tryAdvance, and forEach are consistent
-        if (sizeIfKnown >= 0) {
-            assertEquals(sizeIfKnown, exp.size());
-        }
-        assertEquals(fromForEach.size(), exp.size());
-
-        assertContents(fromForEach, exp, isOrdered);
-    }
-
-    private static <T, S extends Spliterator<T>> void testTryAdvance(
-            Collection<T> exp,
-            Supplier<S> supplier,
-            UnaryOperator<Consumer<T>> boxingAdapter) {
-        S spliterator = supplier.get();
-        long sizeIfKnown = spliterator.getExactSizeIfKnown();
-        boolean isOrdered = spliterator.hasCharacteristics(Spliterator.ORDERED);
-
-        spliterator = supplier.get();
-        ArrayList<T> fromTryAdvance = new ArrayList<>();
-        Consumer<T> addToFromTryAdvance = boxingAdapter.apply(fromTryAdvance::add);
-        while (spliterator.tryAdvance(addToFromTryAdvance)) { }
-
-        // Assert that forEach now produces no elements
-        spliterator.forEachRemaining(boxingAdapter.apply(
-                e -> fail("Spliterator.forEach produced an element after spliterator exhausted: " + e)));
-        // Assert that tryAdvance now produce no elements
-        spliterator.tryAdvance(boxingAdapter.apply(
-                e -> fail("Spliterator.tryAdvance produced an element after spliterator exhausted: " + e)));
-
-        // assert that size, tryAdvance, and forEach are consistent
-        if (sizeIfKnown >= 0) {
-            assertEquals(sizeIfKnown, exp.size());
-        }
-        assertEquals(fromTryAdvance.size(), exp.size());
-
-        assertContents(fromTryAdvance, exp, isOrdered);
-    }
-
-    private static <T, S extends Spliterator<T>> void testMixedTryAdvanceForEach(
-            Collection<T> exp,
-            Supplier<S> supplier,
-            UnaryOperator<Consumer<T>> boxingAdapter) {
-        S spliterator = supplier.get();
-        long sizeIfKnown = spliterator.getExactSizeIfKnown();
-        boolean isOrdered = spliterator.hasCharacteristics(Spliterator.ORDERED);
-
-        // tryAdvance first few elements, then forEach rest
-        ArrayList<T> dest = new ArrayList<>();
-        spliterator = supplier.get();
-        Consumer<T> addToDest = boxingAdapter.apply(dest::add);
-        for (int i = 0; i < 10 && spliterator.tryAdvance(addToDest); i++) { }
-        spliterator.forEachRemaining(addToDest);
-
-        // Assert that forEach now produces no elements
-        spliterator.forEachRemaining(boxingAdapter.apply(
-                e -> fail("Spliterator.forEach produced an element after spliterator exhausted: " + e)));
-        // Assert that tryAdvance now produce no elements
-        spliterator.tryAdvance(boxingAdapter.apply(
-                e -> fail("Spliterator.tryAdvance produced an element after spliterator exhausted: " + e)));
-
-        if (sizeIfKnown >= 0) {
-            assertEquals(sizeIfKnown, dest.size());
-        }
-        assertEquals(dest.size(), exp.size());
-
-        if (isOrdered) {
-            assertEquals(dest, exp);
-        }
-        else {
-            assertContentsUnordered(dest, exp);
-        }
-    }
-
-    private static <T, S extends Spliterator<T>> void testMixedTraverseAndSplit(
-            Collection<T> exp,
-            Supplier<S> supplier,
-            UnaryOperator<Consumer<T>> boxingAdapter) {
-        S spliterator = supplier.get();
-        long sizeIfKnown = spliterator.getExactSizeIfKnown();
-        boolean isOrdered = spliterator.hasCharacteristics(Spliterator.ORDERED);
-
-        ArrayList<T> dest = new ArrayList<>();
-        spliterator = supplier.get();
-        Consumer<T> b = boxingAdapter.apply(dest::add);
-
-        Spliterator<T> spl1, spl2, spl3;
-        spliterator.tryAdvance(b);
-        spl2 = spliterator.trySplit();
-        if (spl2 != null) {
-            spl2.tryAdvance(b);
-            spl1 = spl2.trySplit();
-            if (spl1 != null) {
-                spl1.tryAdvance(b);
-                spl1.forEachRemaining(b);
-            }
-            spl2.tryAdvance(b);
-            spl2.forEachRemaining(b);
-        }
-        spliterator.tryAdvance(b);
-        spl3 = spliterator.trySplit();
-        if (spl3 != null) {
-            spl3.tryAdvance(b);
-            spl3.forEachRemaining(b);
-        }
-        spliterator.tryAdvance(b);
-        spliterator.forEachRemaining(b);
-
-        if (sizeIfKnown >= 0) {
-            assertEquals(sizeIfKnown, dest.size());
-        }
-        assertEquals(dest.size(), exp.size());
-
-        if (isOrdered) {
-            assertEquals(dest, exp);
-        }
-        else {
-            assertContentsUnordered(dest, exp);
-        }
-    }
-
-    private static <T, S extends Spliterator<T>> void testSplitAfterFullTraversal(
-            Supplier<S> supplier,
-            UnaryOperator<Consumer<T>> boxingAdapter) {
-        // Full traversal using tryAdvance
-        Spliterator<T> spliterator = supplier.get();
-        while (spliterator.tryAdvance(boxingAdapter.apply(e -> { }))) { }
-        Spliterator<T> split = spliterator.trySplit();
-        assertNull(split);
-
-        // Full traversal using forEach
-        spliterator = supplier.get();
-        spliterator.forEachRemaining(boxingAdapter.apply(e -> {
-        }));
-        split = spliterator.trySplit();
-        assertNull(split);
-
-        // Full traversal using tryAdvance then forEach
-        spliterator = supplier.get();
-        spliterator.tryAdvance(boxingAdapter.apply(e -> { }));
-        spliterator.forEachRemaining(boxingAdapter.apply(e -> {
-        }));
-        split = spliterator.trySplit();
-        assertNull(split);
-    }
-
-    private static <T, S extends Spliterator<T>> void testSplitOnce(
-            Collection<T> exp,
-            Supplier<S> supplier,
-            UnaryOperator<Consumer<T>> boxingAdapter) {
-        S spliterator = supplier.get();
-        long sizeIfKnown = spliterator.getExactSizeIfKnown();
-        boolean isOrdered = spliterator.hasCharacteristics(Spliterator.ORDERED);
-
-        ArrayList<T> fromSplit = new ArrayList<>();
-        Spliterator<T> s1 = supplier.get();
-        Spliterator<T> s2 = s1.trySplit();
-        long s1Size = s1.getExactSizeIfKnown();
-        long s2Size = (s2 != null) ? s2.getExactSizeIfKnown() : 0;
-        Consumer<T> addToFromSplit = boxingAdapter.apply(fromSplit::add);
-        if (s2 != null)
-            s2.forEachRemaining(addToFromSplit);
-        s1.forEachRemaining(addToFromSplit);
-
-        if (sizeIfKnown >= 0) {
-            assertEquals(sizeIfKnown, fromSplit.size());
-            if (s1Size >= 0 && s2Size >= 0)
-                assertEquals(sizeIfKnown, s1Size + s2Size);
-        }
-        assertContents(fromSplit, exp, isOrdered);
-    }
-
-    private static <T, S extends Spliterator<T>> void testSplitSixDeep(
-            Collection<T> exp,
-            Supplier<S> supplier,
-            UnaryOperator<Consumer<T>> boxingAdapter) {
-        S spliterator = supplier.get();
-        boolean isOrdered = spliterator.hasCharacteristics(Spliterator.ORDERED);
-
-        for (int depth=0; depth < 6; depth++) {
-            List<T> dest = new ArrayList<>();
-            spliterator = supplier.get();
-
-            assertSpliterator(spliterator);
-
-            // verify splitting with forEach
-            visit(depth, 0, dest, spliterator, boxingAdapter, spliterator.characteristics(), false);
-            assertContents(dest, exp, isOrdered);
-
-            // verify splitting with tryAdvance
-            dest.clear();
-            spliterator = supplier.get();
-            visit(depth, 0, dest, spliterator, boxingAdapter, spliterator.characteristics(), true);
-            assertContents(dest, exp, isOrdered);
-        }
-    }
-
-    private static <T, S extends Spliterator<T>>
-    void visit(int depth, int curLevel,
-               List<T> dest, S spliterator, UnaryOperator<Consumer<T>> boxingAdapter,
-               int rootCharacteristics, boolean useTryAdvance) {
-        if (curLevel < depth) {
-            long beforeSize = spliterator.getExactSizeIfKnown();
-            Spliterator<T> split = spliterator.trySplit();
-            if (split != null) {
-                assertSpliterator(split, rootCharacteristics);
-                assertSpliterator(spliterator, rootCharacteristics);
-
-                if ((rootCharacteristics & Spliterator.SUBSIZED) != 0 &&
-                    (rootCharacteristics & Spliterator.SIZED) != 0) {
-                    assertEquals(beforeSize, split.estimateSize() + spliterator.estimateSize());
-                }
-                visit(depth, curLevel + 1, dest, split, boxingAdapter, rootCharacteristics, useTryAdvance);
-            }
-            visit(depth, curLevel + 1, dest, spliterator, boxingAdapter, rootCharacteristics, useTryAdvance);
-        }
-        else {
-            long sizeIfKnown = spliterator.getExactSizeIfKnown();
-            if (useTryAdvance) {
-                Consumer<T> addToDest = boxingAdapter.apply(dest::add);
-                int count = 0;
-                while (spliterator.tryAdvance(addToDest)) {
-                    ++count;
-                }
-
-                if (sizeIfKnown >= 0)
-                    assertEquals(sizeIfKnown, count);
-
-                // Assert that forEach now produces no elements
-                spliterator.forEachRemaining(boxingAdapter.apply(
-                        e -> fail("Spliterator.forEach produced an element after spliterator exhausted: " + e)));
-
-                Spliterator<T> split = spliterator.trySplit();
-                assertNull(split);
-            }
-            else {
-                List<T> leafDest = new ArrayList<>();
-                Consumer<T> addToLeafDest = boxingAdapter.apply(leafDest::add);
-                spliterator.forEachRemaining(addToLeafDest);
-
-                if (sizeIfKnown >= 0)
-                    assertEquals(sizeIfKnown, leafDest.size());
-
-                // Assert that forEach now produces no elements
-                spliterator.tryAdvance(boxingAdapter.apply(
-                        e -> fail("Spliterator.tryAdvance produced an element after spliterator exhausted: " + e)));
-
-                Spliterator<T> split = spliterator.trySplit();
-                assertNull(split);
-
-                dest.addAll(leafDest);
-            }
-        }
-    }
-
-    private static <T, S extends Spliterator<T>> void testSplitUntilNull(
-            Collection<T> exp,
-            Supplier<S> supplier,
-            UnaryOperator<Consumer<T>> boxingAdapter) {
-        Spliterator<T> s = supplier.get();
-        boolean isOrdered = s.hasCharacteristics(Spliterator.ORDERED);
-        assertSpliterator(s);
-
-        List<T> splits = new ArrayList<>();
-        Consumer<T> c = boxingAdapter.apply(splits::add);
-
-        testSplitUntilNull(new SplitNode<T>(c, s));
-        assertContents(splits, exp, isOrdered);
-    }
-
-    private static class SplitNode<T> {
-        // Constant for every node
-        final Consumer<T> c;
-        final int rootCharacteristics;
-
-        final Spliterator<T> s;
-
-        SplitNode(Consumer<T> c, Spliterator<T> s) {
-            this(c, s.characteristics(), s);
-        }
-
-        private SplitNode(Consumer<T> c, int rootCharacteristics, Spliterator<T> s) {
-            this.c = c;
-            this.rootCharacteristics = rootCharacteristics;
-            this.s = s;
-        }
-
-        SplitNode<T> fromSplit(Spliterator<T> split) {
-            return new SplitNode<>(c, rootCharacteristics, split);
-        }
-    }
-
-    /**
-     * Set the maximum stack capacity to 0.25MB. This should be more than enough to detect a bad spliterator
-     * while not unduly disrupting test infrastructure given the test data sizes that are used are small.
-     * Note that j.u.c.ForkJoinPool sets the max queue size to 64M (1 << 26).
-     */
-    private static final int MAXIMUM_STACK_CAPACITY = 1 << 18; // 0.25MB
-
-    private static <T> void testSplitUntilNull(SplitNode<T> e) {
-        // Use an explicit stack to avoid a StackOverflowException when testing a Spliterator
-        // that when repeatedly split produces a right-balanced (and maybe degenerate) tree, or
-        // for a spliterator that is badly behaved.
-        Deque<SplitNode<T>> stack = new ArrayDeque<>();
-        stack.push(e);
-
-        int iteration = 0;
-        while (!stack.isEmpty()) {
-            assertTrue(iteration++ < MAXIMUM_STACK_CAPACITY, "Exceeded maximum stack modification count of 1 << 18");
-
-            e = stack.pop();
-            Spliterator<T> parentAndRightSplit = e.s;
-
-            long parentEstimateSize = parentAndRightSplit.estimateSize();
-            assertTrue(parentEstimateSize >= 0,
-                       String.format("Split size estimate %d < 0", parentEstimateSize));
-
-            long parentSize = parentAndRightSplit.getExactSizeIfKnown();
-            Spliterator<T> leftSplit = parentAndRightSplit.trySplit();
-            if (leftSplit == null) {
-                parentAndRightSplit.forEachRemaining(e.c);
-                continue;
-            }
-
-            assertSpliterator(leftSplit, e.rootCharacteristics);
-            assertSpliterator(parentAndRightSplit, e.rootCharacteristics);
-
-            if (parentEstimateSize != Long.MAX_VALUE && leftSplit.estimateSize() > 0 && parentAndRightSplit.estimateSize() > 0) {
-                assertTrue(leftSplit.estimateSize() < parentEstimateSize,
-                           String.format("Left split size estimate %d >= parent split size estimate %d",
-                                         leftSplit.estimateSize(), parentEstimateSize));
-                assertTrue(parentAndRightSplit.estimateSize() < parentEstimateSize,
-                           String.format("Right split size estimate %d >= parent split size estimate %d",
-                                         leftSplit.estimateSize(), parentEstimateSize));
-            }
-            else {
-                assertTrue(leftSplit.estimateSize() <= parentEstimateSize,
-                           String.format("Left split size estimate %d > parent split size estimate %d",
-                                         leftSplit.estimateSize(), parentEstimateSize));
-                assertTrue(parentAndRightSplit.estimateSize() <= parentEstimateSize,
-                           String.format("Right split size estimate %d > parent split size estimate %d",
-                                         leftSplit.estimateSize(), parentEstimateSize));
-            }
-
-            long leftSize = leftSplit.getExactSizeIfKnown();
-            long rightSize = parentAndRightSplit.getExactSizeIfKnown();
-            if (parentSize >= 0 && leftSize >= 0 && rightSize >= 0)
-                assertEquals(parentSize, leftSize + rightSize,
-                             String.format("exact left split size %d + exact right split size %d != parent exact split size %d",
-                                           leftSize, rightSize, parentSize));
-
-            // Add right side to stack first so left side is popped off first
-            stack.push(e.fromSplit(parentAndRightSplit));
-            stack.push(e.fromSplit(leftSplit));
-        }
-    }
-
-    private static void assertSpliterator(Spliterator<?> s, int rootCharacteristics) {
-        if ((rootCharacteristics & Spliterator.SUBSIZED) != 0) {
-            assertTrue(s.hasCharacteristics(Spliterator.SUBSIZED),
-                       "Child split is not SUBSIZED when root split is SUBSIZED");
-        }
-        assertSpliterator(s);
-    }
-
-    private static void assertSpliterator(Spliterator<?> s) {
-        if (s.hasCharacteristics(Spliterator.SUBSIZED)) {
-            assertTrue(s.hasCharacteristics(Spliterator.SIZED));
-        }
-        if (s.hasCharacteristics(Spliterator.SIZED)) {
-            assertTrue(s.estimateSize() != Long.MAX_VALUE);
-            assertTrue(s.getExactSizeIfKnown() >= 0);
-        }
-        try {
-            s.getComparator();
-            assertTrue(s.hasCharacteristics(Spliterator.SORTED));
-        } catch (IllegalStateException e) {
-            assertFalse(s.hasCharacteristics(Spliterator.SORTED));
-        }
-    }
-
-    private static<T> void assertContents(Collection<T> actual, Collection<T> expected, boolean isOrdered) {
-        if (isOrdered) {
-            assertEquals(actual, expected);
-        }
-        else {
-            assertContentsUnordered(actual, expected);
-        }
-    }
-
-    private static<T> void assertContentsUnordered(Iterable<T> actual, Iterable<T> expected) {
-        assertEquals(toBoxedMultiset(actual), toBoxedMultiset(expected));
-    }
-
-    private static <T> Map<T, Integer> toBoxedMultiset(Iterable<T> c) {
-        Map<T, Integer> result = new HashMap<>();
-        c.forEach(e -> {
-            if (result.containsKey(e)) result.put(e, result.get(e) + 1);
-            else result.put(e, 1);
-        });
-        return result;
-    }
-
-    private void executeAndCatch(Class<? extends Exception> expected, Runnable r) {
-        Exception caught = null;
-        try {
-            r.run();
-        }
-        catch (Exception e) {
-            caught = e;
-        }
-
-        assertNotNull(caught,
-                      String.format("No Exception was thrown, expected an Exception of %s to be thrown",
-                                    expected.getName()));
-        assertTrue(expected.isInstance(caught),
-                   String.format("Exception thrown %s not an instance of %s",
-                                 caught.getClass().getName(), expected.getName()));
-    }
-
-}
--- a/jdk/test/java/util/zip/StoredCRC.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/java/util/zip/StoredCRC.java	Wed Jul 05 18:59:05 2017 +0200
@@ -77,9 +77,9 @@
                 unexpected(t);
             }
 
-            // Test that data corruption is detected.  Offset 39 was
+            // Test that data corruption is detected.  "offset" was
             // determined to be in the entry's uncompressed data.
-            data[39] ^= 1;
+            data[getDataOffset(data) + 4] ^= 1;
 
             zis = new ZipInputStream(
                 new ByteArrayInputStream(data));
@@ -97,6 +97,15 @@
         }
     }
 
+    public static final int getDataOffset(byte b[]) {
+        final int LOCHDR = 30;       // LOC header size
+        final int LOCEXT = 28;       // extra field length
+        final int LOCNAM = 26;       // filename length
+        int lenExt = Byte.toUnsignedInt(b[LOCEXT]) | (Byte.toUnsignedInt(b[LOCEXT + 1]) << 8);
+        int lenNam = Byte.toUnsignedInt(b[LOCNAM]) | (Byte.toUnsignedInt(b[LOCNAM + 1]) << 8);
+        return LOCHDR + lenExt + lenNam;
+    }
+
     //--------------------- Infrastructure ---------------------------
     static volatile int passed = 0, failed = 0;
     static boolean pass() {passed++; return true;}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/test/java/util/zip/TestExtraTime.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,79 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/**
+ * @test
+ * @bug 4759491 6303183 7012868
+ * @summary Test ZOS and ZIS timestamp in extra field correctly
+ */
+
+import java.io.*;
+import java.util.TimeZone;
+import java.util.concurrent.TimeUnit;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipInputStream;
+import java.util.zip.ZipOutputStream;
+
+
+public class TestExtraTime {
+
+    public static void main(String[] args) throws Throwable{
+
+        File src = new File(System.getProperty("test.src", "."), "TestExtraTime.java");
+        if (src.exists()) {
+            long mtime = src.lastModified();
+            test(mtime, null);
+            test(10, null);  // ms-dos 1980 epoch problem
+            test(mtime, TimeZone.getTimeZone("Asia/Shanghai"));
+        }
+    }
+
+    private static void test(long mtime, TimeZone tz) throws Throwable {
+        TimeZone tz0 = TimeZone.getDefault();
+        if (tz != null) {
+            TimeZone.setDefault(tz);
+        }
+        ByteArrayOutputStream baos = new ByteArrayOutputStream();
+        ZipOutputStream zos = new ZipOutputStream(baos);
+        ZipEntry ze = new ZipEntry("TestExtreTime.java");
+
+        ze.setTime(mtime);
+        zos.putNextEntry(ze);
+        zos.write(new byte[] { 1,2 ,3, 4});
+        zos.close();
+        if (tz != null) {
+            TimeZone.setDefault(tz0);
+        }
+        ZipInputStream zis = new ZipInputStream(
+                                 new ByteArrayInputStream(baos.toByteArray()));
+        ze = zis.getNextEntry();
+        zis.close();
+
+        System.out.printf("%tc  => %tc%n", mtime, ze.getTime());
+
+        if (TimeUnit.MILLISECONDS.toSeconds(mtime) !=
+            TimeUnit.MILLISECONDS.toSeconds(ze.getTime()))
+            throw new RuntimeException("Timestamp storing failed!");
+
+    }
+}
--- a/jdk/test/java/util/zip/ZipFile/Assortment.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/java/util/zip/ZipFile/Assortment.java	Wed Jul 05 18:59:05 2017 +0200
@@ -22,7 +22,7 @@
  */
 
 /* @test
- * @bug 4770745 6234507
+ * @bug 4770745 6234507 6303183
  * @summary test a variety of zip file entries
  * @author Martin Buchholz
  */
@@ -54,6 +54,44 @@
         check(condition, "Something's wrong");
     }
 
+    static final int get16(byte b[], int off) {
+        return Byte.toUnsignedInt(b[off]) | (Byte.toUnsignedInt(b[off+1]) << 8);
+    }
+
+    // check if all "expected" extra fields equal to their
+    // corresponding fields in "extra". The "extra" might have
+    // timestamp fields added by ZOS.
+    static boolean equalsExtraData(byte[] expected, byte[] extra) {
+        if (expected == null)
+            return true;
+        int off = 0;
+        int len = expected.length;
+        while (off + 4 < len) {
+            int tag = get16(expected, off);
+            int sz = get16(expected, off + 2);
+            int off0 = 0;
+            int len0 = extra.length;
+            boolean matched = false;
+            while (off0 + 4 < len0) {
+                int tag0 = get16(extra, off0);
+                int sz0 = get16(extra, off0 + 2);
+                if (tag == tag0 && sz == sz0) {
+                    matched = true;
+                    for (int i = 0; i < sz; i++) {
+                        if (expected[off + i] != extra[off0 +i])
+                            matched = false;
+                    }
+                    break;
+                }
+                off0 += (4 + sz0);
+            }
+            if (!matched)
+                return false;
+            off += (4 + sz);
+        }
+        return true;
+    }
+
     private static class Entry {
         private String name;
         private int    method;
@@ -109,7 +147,7 @@
             check((((comment == null) || comment.equals(""))
                    && (e.getComment() == null))
                   || comment.equals(e.getComment()));
-            check(Arrays.equals(extra, e.getExtra()));
+            check(equalsExtraData(extra, e.getExtra()));
             check(Arrays.equals(data, getData(f, e)));
             check(e.getSize() == data.length);
             check((method == ZipEntry.DEFLATED) ||
@@ -129,8 +167,7 @@
 
             byte[] extra = (this.extra != null && this.extra.length == 0) ?
                 null : this.extra;
-            check(Arrays.equals(extra, e.getExtra()));
-
+            check(equalsExtraData(extra, e.getExtra()));
             check(name.equals(e.getName()));
             check(method == e.getMethod());
             check(e.getSize() == -1 || e.getSize() == data.length);
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/test/javax/crypto/Cipher/CipherStreamClose.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,167 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/*
+ * @test
+ * @bug 7160837
+ * @summary Make sure Cipher IO streams doesn't call extra doFinal if close()
+ * is called multiple times.  Additionally, verify the input and output streams
+ * match with encryption and decryption with non-stream crypto.
+ */
+
+import java.io.*;
+import java.security.DigestOutputStream;
+import java.security.DigestInputStream;
+import java.security.MessageDigest;
+import java.util.Arrays;
+
+import javax.crypto.Cipher;
+import javax.crypto.CipherOutputStream;
+import javax.crypto.CipherInputStream;
+import javax.crypto.SecretKey;
+import javax.crypto.spec.SecretKeySpec;
+import javax.xml.bind.DatatypeConverter;
+
+public class CipherStreamClose {
+    private static final String message = "This is the sample message";
+    static boolean debug = false;
+
+    /*
+     * This method does encryption by cipher.doFinal(), and not with
+     * CipherOutputStream
+     */
+    public static byte[] blockEncrypt(String message, SecretKey key)
+        throws Exception {
+
+        byte[] data;
+        Cipher encCipher = Cipher.getInstance("AES/ECB/PKCS5Padding");
+        encCipher.init(Cipher.ENCRYPT_MODE, key);
+        try (ByteArrayOutputStream bos = new ByteArrayOutputStream()) {
+            try (ObjectOutputStream oos = new ObjectOutputStream(bos)) {
+                oos.writeObject(message);
+            }
+            data = bos.toByteArray();
+        }
+
+        if (debug) {
+            System.out.println(DatatypeConverter.printHexBinary(data));
+        }
+        return encCipher.doFinal(data);
+
+    }
+
+    /*
+     * This method does decryption by cipher.doFinal(), and not with
+     * CipherIntputStream
+     */
+    public static Object blockDecrypt(byte[] data, SecretKey key)
+        throws Exception {
+
+        Cipher c = Cipher.getInstance("AES/ECB/PKCS5Padding");
+        c.init(Cipher.DECRYPT_MODE, key);
+        data = c.doFinal(data);
+        try (ByteArrayInputStream bis = new ByteArrayInputStream(data)) {
+            try (ObjectInputStream ois = new ObjectInputStream(bis)) {
+                return ois.readObject();
+            }
+        }
+    }
+
+    public static byte[] streamEncrypt(String message, SecretKey key,
+        MessageDigest digest)
+        throws Exception {
+
+        byte[] data;
+        Cipher encCipher = Cipher.getInstance("AES/ECB/PKCS5Padding");
+        encCipher.init(Cipher.ENCRYPT_MODE, key);
+        try (ByteArrayOutputStream bos = new ByteArrayOutputStream();
+            DigestOutputStream dos = new DigestOutputStream(bos, digest);
+            CipherOutputStream cos = new CipherOutputStream(dos, encCipher)) {
+            try (ObjectOutputStream oos = new ObjectOutputStream(cos)) {
+                oos.writeObject(message);
+            }
+            data = bos.toByteArray();
+        }
+
+        if (debug) {
+            System.out.println(DatatypeConverter.printHexBinary(data));
+        }
+        return data;
+    }
+
+    public static Object streamDecrypt(byte[] data, SecretKey key,
+        MessageDigest digest) throws Exception {
+
+        Cipher decCipher = Cipher.getInstance("AES/ECB/PKCS5Padding");
+        decCipher.init(Cipher.DECRYPT_MODE, key);
+        digest.reset();
+        try (ByteArrayInputStream bis = new ByteArrayInputStream(data);
+            DigestInputStream dis = new DigestInputStream(bis, digest);
+            CipherInputStream cis = new CipherInputStream(dis, decCipher)) {
+
+            try (ObjectInputStream ois = new ObjectInputStream(cis)) {
+                return ois.readObject();
+            }
+        }
+    }
+
+    public static void main(String[] args) throws Exception {
+        MessageDigest digest = MessageDigest.getInstance("SHA1");
+        SecretKeySpec key = new SecretKeySpec(
+            DatatypeConverter.parseHexBinary(
+            "12345678123456781234567812345678"), "AES");
+
+        // Run 'message' through streamEncrypt
+        byte[] se = streamEncrypt(message, key, digest);
+        // 'digest' already has the value from the stream, just finish the op
+        byte[] sd = digest.digest();
+        digest.reset();
+        // Run 'message' through blockEncrypt
+        byte[] be = blockEncrypt(message, key);
+        // Take digest of encrypted blockEncrypt result
+        byte[] bd = digest.digest(be);
+        // Verify both returned the same value
+        if (!Arrays.equals(sd, bd)) {
+            System.err.println("Stream: "+DatatypeConverter.printHexBinary(se)+
+                "\t Digest: "+DatatypeConverter.printHexBinary(sd));
+            System.err.println("Block : "+DatatypeConverter.printHexBinary(be)+
+                "\t Digest: "+DatatypeConverter.printHexBinary(bd));
+            throw new Exception("stream & block encryption does not match");
+        }
+
+        digest.reset();
+        // Sanity check: Decrypt separately from stream to verify operations
+        String bm = (String) blockDecrypt(be, key);
+        if (message.compareTo(bm) != 0) {
+            System.err.println("Expected: "+message+"\nBlock:    "+bm);
+            throw new Exception("Block decryption does not match expected");
+        }
+
+        // Have decryption and digest included in the object stream
+        String sm = (String) streamDecrypt(se, key, digest);
+        if (message.compareTo(sm) != 0) {
+            System.err.println("Expected: "+message+"\nStream:   "+sm);
+            throw new Exception("Stream decryption does not match expected.");
+        }
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/test/javax/swing/JComboBox/6337518/bug6337518.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,48 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/* @test
+   @bug 6337518
+   @summary Null Arrow Button Throws Exception in BasicComboBoxUI
+   @author Anton Litvinov
+*/
+
+import javax.swing.*;
+import javax.swing.plaf.basic.*;
+
+public class bug6337518 extends BasicComboBoxUI {
+    @Override
+    protected JButton createArrowButton() {
+        return null;
+    }
+
+    public static void main(String[] args) throws Exception {
+        SwingUtilities.invokeAndWait(new Runnable() {
+            @Override
+            public void run() {
+                JComboBox comboBox = new JComboBox();
+                comboBox.setUI(new bug6337518());
+            }
+        });
+    }
+}
--- a/jdk/test/javax/swing/JTabbedPane/4624207/bug4624207.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/javax/swing/JTabbedPane/4624207/bug4624207.java	Wed Jul 05 18:59:05 2017 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2011, 2013, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -37,6 +37,8 @@
 import java.awt.event.FocusEvent;
 import java.awt.event.FocusListener;
 import java.awt.event.KeyEvent;
+
+import sun.awt.OSInfo;
 import sun.awt.SunToolkit;
 
 public class bug4624207 implements ChangeListener, FocusListener {
@@ -99,7 +101,7 @@
 
         toolkit.realSync();
 
-        if ("Aqua".equals(UIManager.getLookAndFeel().getID())) {
+        if (OSInfo.getOSType() == OSInfo.OSType.MACOSX) {
             Util.hitKeys(robot, KeyEvent.VK_CONTROL, KeyEvent.VK_ALT, KeyEvent.VK_B);
         } else {
             Util.hitKeys(robot, KeyEvent.VK_ALT, KeyEvent.VK_B);
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/test/javax/swing/JTable/7068740/bug7068740.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,134 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/* @test
+   @bug 7068740
+   @summary JTable wrapped in JLayer can't use PGUP/PGDOWN keys
+   @author Vladislav Karnaukhov
+   @run main bug7068740
+*/
+
+import sun.awt.SunToolkit;
+
+import javax.swing.*;
+import javax.swing.plaf.LayerUI;
+import javax.swing.plaf.metal.MetalLookAndFeel;
+import javax.swing.table.DefaultTableModel;
+import java.awt.*;
+import java.awt.event.KeyEvent;
+import java.lang.reflect.InvocationTargetException;
+
+public class bug7068740 extends JFrame {
+
+    private static Robot robot = null;
+    private static JTable table = null;
+    private static SunToolkit toolkit = null;
+
+    bug7068740() {
+        super();
+        setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);
+
+        DefaultTableModel model = new DefaultTableModel() {
+            @Override
+            public int getRowCount() {
+                return 20;
+            }
+
+            @Override
+            public int getColumnCount() {
+                return 2;
+            }
+
+            @Override
+            public Object getValueAt(int row, int column) {
+                return "(" + row + "," + column + ")";
+            }
+        };
+
+        table = new JTable(model);
+        LayerUI<JComponent> layerUI = new LayerUI<>();
+        JLayer<JComponent> layer = new JLayer<>(table, layerUI);
+        JScrollPane scrollPane = new JScrollPane(layer);
+        add(scrollPane);
+        pack();
+        setLocationRelativeTo(null);
+    }
+
+    private static void setUp() {
+        try {
+            if (robot == null) {
+                robot = new Robot();
+                robot.setAutoDelay(20);
+            }
+
+            if (toolkit == null) {
+                toolkit = (SunToolkit) Toolkit.getDefaultToolkit();
+            }
+
+            SwingUtilities.invokeAndWait(new Runnable() {
+                @Override
+                public void run() {
+                    bug7068740 test = new bug7068740();
+                    test.setVisible(true);
+                }
+            });
+        } catch (InterruptedException e) {
+            e.printStackTrace();
+            throw new RuntimeException("Test failed");
+        } catch (InvocationTargetException e) {
+            e.printStackTrace();
+            throw new RuntimeException("Test failed");
+        } catch (AWTException e) {
+            e.printStackTrace();
+            throw new RuntimeException("Test failed");
+        }
+    }
+
+    private static void doTest() {
+        toolkit.realSync();
+        table.setRowSelectionInterval(0, 0);
+
+        robot.keyPress(KeyEvent.VK_PAGE_DOWN);
+        toolkit.realSync();
+        if (table.getSelectedRow() != 19) {
+            throw new RuntimeException("Test failed");
+        }
+
+        robot.keyPress(KeyEvent.VK_PAGE_UP);
+        toolkit.realSync();
+        if (table.getSelectedRow() != 0) {
+            throw new RuntimeException("Test failed");
+        }
+    }
+
+    public static void main(String[] args) {
+        try {
+            UIManager.setLookAndFeel(new MetalLookAndFeel());
+            setUp();
+            doTest();
+        } catch (UnsupportedLookAndFeelException e) {
+            e.printStackTrace();
+            throw new RuntimeException("Test failed");
+        }
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/test/javax/swing/KeyboardManager/8013370/Test8013370.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,110 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+import java.awt.Robot;
+import java.awt.Toolkit;
+import java.awt.event.ActionEvent;
+import java.awt.event.KeyEvent;
+
+import javax.swing.AbstractAction;
+import javax.swing.InputMap;
+import javax.swing.JFrame;
+import javax.swing.JMenuBar;
+import javax.swing.JMenuItem;
+import javax.swing.KeyStroke;
+import sun.awt.SunToolkit;
+
+import static java.awt.event.InputEvent.CTRL_DOWN_MASK;
+import static javax.swing.JComponent.WHEN_IN_FOCUSED_WINDOW;
+import static javax.swing.JOptionPane.showMessageDialog;
+import static javax.swing.SwingUtilities.invokeAndWait;
+
+/*
+ * @test
+ * @bug 8013370
+ * @summary Ensure that key stroke is not null
+ * @author Sergey Malenkov
+ */
+
+public class Test8013370 implements Runnable {
+    public static void main(String[] args) throws Exception {
+        Test8013370 task = new Test8013370();
+        invokeAndWait(task);
+
+        Robot robot = new Robot();
+        robot.waitForIdle();
+        robot.keyPress(KeyEvent.VK_CONTROL);
+        robot.keyRelease(KeyEvent.VK_CONTROL);
+        robot.waitForIdle();
+
+        invokeAndWait(task);
+        task.validate();
+    }
+
+    private JFrame frame;
+    private boolean error;
+
+    @Override
+    public void run() {
+        if (this.frame == null) {
+            JMenuBar menu = new JMenuBar() {
+                @Override
+                protected boolean processKeyBinding(KeyStroke stroke, KeyEvent event, int condition, boolean pressed) {
+                    if (stroke == null) {
+                        Test8013370.this.error = true;
+                        return false;
+                    }
+                    return super.processKeyBinding(stroke, event, condition, pressed);
+                }
+            };
+            menu.add(new JMenuItem("Menu"));
+
+            InputMap map = menu.getInputMap(WHEN_IN_FOCUSED_WINDOW);
+            // We add exactly 10 actions because the ArrayTable is converted
+            // from a array to a hashtable when more than 8 values are added.
+            for (int i = 0; i < 9; i++) {
+                String name = " Action #" + i;
+                map.put(KeyStroke.getKeyStroke(KeyEvent.VK_A + i, CTRL_DOWN_MASK), name);
+
+                menu.getActionMap().put(name, new AbstractAction(name) {
+                    @Override
+                    public void actionPerformed(ActionEvent event) {
+                        showMessageDialog(null, getValue(NAME));
+                    }
+                });
+            }
+            this.frame = new JFrame("8013370");
+            this.frame.setJMenuBar(menu);
+            this.frame.setVisible(true);
+        }
+        else {
+            this.frame.dispose();
+        }
+    }
+
+    private void validate() {
+        if (this.error) {
+            throw new Error("KeyStroke is null");
+        }
+    }
+}
--- a/jdk/test/sun/java2d/X11SurfaceData/SharedMemoryPixmapsTest/SharedMemoryPixmapsTest.sh	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/sun/java2d/X11SurfaceData/SharedMemoryPixmapsTest/SharedMemoryPixmapsTest.sh	Wed Jul 05 18:59:05 2017 +0200
@@ -1,3 +1,4 @@
+#!/bin/sh
 #
 # Copyright (c) 2005, 2008, Oracle and/or its affiliates. All rights reserved.
 # DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
@@ -21,7 +22,6 @@
 # questions.
 #
 
-#!/bin/sh
 # @test
 # @bug 6363434 6588884
 # @summary Verify that shared memory pixmaps are not broken
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/test/sun/java2d/loops/RenderToCustomBufferTest.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,115 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+/**
+ * @test
+ * @bug     8015606
+ * @summary Test verifies whether a text is rendered correctly to
+ *          a custom buffered image.
+ *
+ * @run     main RenderToCustomBufferTest
+ */
+
+import java.awt.Color;
+import java.awt.Font;
+import java.awt.Graphics2D;
+import java.awt.RenderingHints;
+import java.awt.Transparency;
+import java.awt.color.ColorSpace;
+import java.awt.image.BufferedImage;
+import java.awt.image.ColorModel;
+import java.awt.image.ComponentColorModel;
+import java.awt.image.DataBuffer;
+import java.awt.image.WritableRaster;
+
+public class RenderToCustomBufferTest {
+     public static void main(String[] args) {
+        final BufferedImage dst_custom = createCustomBuffer();
+        final BufferedImage dst_dcm = new BufferedImage(width, height,
+                BufferedImage.TYPE_INT_RGB);
+
+        renderTo(dst_custom);
+        renderTo(dst_dcm);
+
+        check(dst_custom, dst_dcm);
+    }
+
+    private static void check(BufferedImage a, BufferedImage b) {
+        for (int y = 0; y < height; y++) {
+            for (int x = 0; x < width; x++) {
+                int pa = a.getRGB(x, y);
+                int pb = b.getRGB(x, y);
+
+                if (pa != pb) {
+                    String msg = String.format(
+                        "Point [%d, %d] has different colors: %08X and %08X",
+                        x, y, pa, pb);
+                    throw new RuntimeException("Test failed: " + msg);
+                }
+            }
+        }
+    }
+
+    private static BufferedImage createCustomBuffer() {
+        ColorSpace cs = ColorSpace.getInstance(ColorSpace.CS_sRGB);
+        ColorModel cm = new ComponentColorModel(cs, false, false,
+                Transparency.OPAQUE, DataBuffer.TYPE_FLOAT);
+        WritableRaster wr = cm.createCompatibleWritableRaster(width, height);
+
+        return new BufferedImage(cm, wr, false, null);
+    }
+
+    private static void renderTo(BufferedImage dst) {
+        System.out.println("The buffer: " + dst);
+        Graphics2D g = dst.createGraphics();
+
+        final int w = dst.getWidth();
+        final int h = dst.getHeight();
+
+        g.setColor(Color.blue);
+        g.fillRect(0, 0, w, h);
+
+        g.setColor(Color.red);
+        Font f = g.getFont();
+        g.setFont(f.deriveFont(48f));
+
+        g.setRenderingHint(RenderingHints.KEY_TEXT_ANTIALIASING,
+               RenderingHints.VALUE_TEXT_ANTIALIAS_ON);
+
+        // NB: this clip ctriggers the problem
+        g.setClip(50, 50, 200, 100);
+
+        g.drawString("AA Text", 52, 90);
+
+        g.setRenderingHint(RenderingHints.KEY_TEXT_ANTIALIASING,
+               RenderingHints.VALUE_TEXT_ANTIALIAS_OFF);
+
+        // NB: this clip ctriggers the problem
+        g.setClip(50, 100, 100, 100);
+        g.drawString("Text", 52, 148);
+
+        g.dispose();
+    }
+
+    private static final int width = 230;
+    private static final int height = 150;
+}
--- a/jdk/test/sun/management/jdp/JdpUnitTest.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/sun/management/jdp/JdpUnitTest.java	Wed Jul 05 18:59:05 2017 +0200
@@ -32,6 +32,12 @@
 
 public class JdpUnitTest {
 
+
+    static byte[] russian_name = {(byte)0xd0,(byte)0xbf,(byte)0xd1,(byte)0x80,(byte)0xd0,(byte)0xbe,(byte)0xd0,(byte)0xb2,
+                                  (byte)0xd0,(byte)0xb5,(byte)0xd1,(byte)0x80,(byte)0xd0,(byte)0xba,(byte)0xd0,(byte)0xb0,
+                                  (byte)0x20,(byte)0xd1,(byte)0x81,(byte)0xd0,(byte)0xb2,(byte)0xd1,(byte)0x8f,(byte)0xd0,
+                                  (byte)0xb7,(byte)0xd0,(byte)0xb8,(byte)0x0a};
+
     /**
      * This test tests that complete packet is build correctly
      */
@@ -42,7 +48,7 @@
         {
             JdpJmxPacket p1 = new JdpJmxPacket(UUID.randomUUID(), "fake://unit-test");
             p1.setMainClass("FakeUnitTest");
-            p1.setInstanceName("Fake");
+            p1.setInstanceName( new String(russian_name,"UTF-8"));
             byte[] b = p1.getPacketData();
 
             JdpJmxPacket p2 = new JdpJmxPacket(b);
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/test/sun/net/www/protocol/http/HttpStreams.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,185 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/**
+ * @test
+ * @bug 8011719
+ * @summary Basic checks to verify behavior of returned input streams
+ */
+
+import com.sun.net.httpserver.HttpExchange;
+import com.sun.net.httpserver.HttpHandler;
+import com.sun.net.httpserver.HttpServer;
+import java.io.*;
+import java.net.*;
+import java.nio.charset.StandardCharsets;
+import java.util.*;
+
+public class HttpStreams {
+
+    void client(String u) throws Exception {
+        byte[] ba = new byte[5];
+        HttpURLConnection urlc = (HttpURLConnection)(new URL(u)).openConnection();
+        int resp = urlc.getResponseCode();
+        InputStream is;
+        if (resp == 200)
+            is = urlc.getInputStream();
+        else
+            is = urlc.getErrorStream();
+
+        expectNoThrow(() -> { is.read(); }, "read on open stream should not throw :" + u);
+        expectNoThrow(() -> { is.close(); }, "close should never throw: " + u);
+        expectNoThrow(() -> { is.close(); }, "close should never throw: " + u);
+        expectThrow(() -> { is.read(); }, "read on closed stream should throw: " + u);
+        expectThrow(() -> { is.read(ba); }, "read on closed stream should throw: " + u);
+        expectThrow(() -> { is.read(ba, 0, 2); }, "read on closed stream should throw: " + u);
+    }
+
+    void test() throws Exception {
+        HttpServer server = null;
+        try {
+            server = startHttpServer();
+            String baseUrl = "http://localhost:" + server.getAddress().getPort() + "/";
+            client(baseUrl +  "chunked/");
+            client(baseUrl +  "fixed/");
+            client(baseUrl +  "error/");
+            client(baseUrl +  "chunkedError/");
+
+            // Test with a response cache
+            ResponseCache ch = ResponseCache.getDefault();
+            ResponseCache.setDefault(new TrivialCacheHandler());
+            try {
+                client(baseUrl +  "chunked/");
+                client(baseUrl +  "fixed/");
+                client(baseUrl +  "error/");
+                client(baseUrl +  "chunkedError/");
+            } finally {
+                ResponseCache.setDefault(ch);
+            }
+        } finally {
+            if (server != null)
+                server.stop(0);
+        }
+
+        System.out.println("passed: " + pass + ", failed: " + fail);
+        if (fail > 0)
+            throw new RuntimeException("some tests failed check output");
+    }
+
+    public static void main(String[] args) throws Exception {
+        (new HttpStreams()).test();
+    }
+
+    // HTTP Server
+    HttpServer startHttpServer() throws IOException {
+        HttpServer httpServer = HttpServer.create(new InetSocketAddress(0), 0);
+        httpServer.createContext("/chunked/", new ChunkedHandler());
+        httpServer.createContext("/fixed/", new FixedHandler());
+        httpServer.createContext("/error/", new ErrorHandler());
+        httpServer.createContext("/chunkedError/", new ChunkedErrorHandler());
+        httpServer.start();
+        return httpServer;
+    }
+
+    static abstract class AbstractHandler implements HttpHandler {
+        @Override
+        public void handle(HttpExchange t) throws IOException {
+            try (InputStream is = t.getRequestBody()) {
+                while (is.read() != -1);
+            }
+            t.sendResponseHeaders(respCode(), length());
+            try (OutputStream os = t.getResponseBody()) {
+                os.write(message());
+            }
+            t.close();
+        }
+
+        abstract int respCode();
+        abstract int length();
+        abstract byte[] message();
+    }
+
+    static class ChunkedHandler extends AbstractHandler {
+        static final byte[] ba =
+                "Hello there from chunked handler!".getBytes(StandardCharsets.US_ASCII);
+        int respCode() { return 200; }
+        int length() { return 0; }
+        byte[] message() { return ba; }
+    }
+
+    static class FixedHandler extends AbstractHandler {
+        static final byte[] ba =
+                "Hello there from fixed handler!".getBytes(StandardCharsets.US_ASCII);
+        int respCode() { return 200; }
+        int length() { return ba.length; }
+        byte[] message() { return ba; }
+    }
+
+    static class ErrorHandler extends AbstractHandler {
+        static final byte[] ba =
+                "This is an error mesg from the server!".getBytes(StandardCharsets.US_ASCII);
+        int respCode() { return 400; }
+        int length() { return ba.length; }
+        byte[] message() { return ba; }
+    }
+
+    static class ChunkedErrorHandler extends ErrorHandler {
+        int length() { return 0; }
+    }
+
+    static class TrivialCacheHandler extends ResponseCache
+    {
+       public CacheResponse get(URI uri, String rqstMethod, Map rqstHeaders) {
+          return null;
+       }
+
+       public CacheRequest put(URI uri, URLConnection conn) {
+          return new TrivialCacheRequest();
+       }
+    }
+
+    static class TrivialCacheRequest extends CacheRequest
+    {
+       ByteArrayOutputStream baos = new ByteArrayOutputStream();
+       public void abort() {}
+       public OutputStream getBody() throws IOException { return baos; }
+    }
+
+    static interface ThrowableRunnable {
+        void run() throws IOException;
+    }
+
+    void expectThrow(ThrowableRunnable r, String msg) {
+        try { r.run(); fail(msg); } catch (IOException x) { pass(); }
+    }
+
+    void expectNoThrow(ThrowableRunnable r, String msg) {
+        try { r.run(); pass(); } catch (IOException x) { fail(msg, x); }
+    }
+
+    private int pass;
+    private int fail;
+    void pass() { pass++; }
+    void fail(String msg, Exception x) { System.out.println(msg); x.printStackTrace(); fail++; }
+    void fail(String msg) { System.out.println(msg); Thread.dumpStack(); fail++; }
+}
--- a/jdk/test/sun/rmi/rmic/manifestClassPath/run.sh	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/sun/rmi/rmic/manifestClassPath/run.sh	Wed Jul 05 18:59:05 2017 +0200
@@ -1,3 +1,4 @@
+#!/bin/sh
 #
 # Copyright (c) 2007, 2010, Oracle and/or its affiliates. All rights reserved.
 # DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
@@ -21,7 +22,6 @@
 # questions.
 #
 
-#!/bin/sh
 # @test
 # @bug 6473331 6485027 6934615
 # @summary Test handling of the Class-Path attribute in jar file manifests
--- a/jdk/test/sun/rmi/rmic/newrmic/equivalence/batch.sh	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/sun/rmi/rmic/newrmic/equivalence/batch.sh	Wed Jul 05 18:59:05 2017 +0200
@@ -1,3 +1,4 @@
+#!/bin/sh
 #
 # Copyright (c) 2003, Oracle and/or its affiliates. All rights reserved.
 # DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
@@ -21,7 +22,6 @@
 # questions.
 #
 
-#!/bin/sh
 #
 # Usage: batch.sh classpath classes...
 #
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/test/sun/security/pkcs11/tls/TestLeadingZeroesP11.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,410 @@
+/*
+ * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/*
+ * @test
+ * @bug 8014618
+ * @summary Need to strip leading zeros in TlsPremasterSecret of DHKeyAgreement
+ * @library ..
+ * @author Pasi Eronen
+ */
+
+import java.io.*;
+import java.security.*;
+import java.security.spec.*;
+import java.security.interfaces.*;
+import javax.crypto.*;
+import javax.crypto.spec.*;
+import javax.crypto.interfaces.*;
+
+/**
+ * Test that leading zeroes are stripped in TlsPremasterSecret case,
+ * but are left as-is in other cases.
+ *
+ * We use pre-generated keypairs, since with randomly generated keypairs,
+ * a leading zero happens only (roughly) 1 out of 256 cases.
+ */
+
+public class TestLeadingZeroesP11 extends PKCS11Test {
+
+    public static void main(String[] args) throws Exception {
+        main(new TestLeadingZeroesP11());
+    }
+
+    public void main(Provider p) throws Exception {
+
+        // decode pre-generated keypairs
+        KeyFactory kfac = KeyFactory.getInstance("DH", p);
+        PublicKey alicePubKey =
+            kfac.generatePublic(new X509EncodedKeySpec(alicePubKeyEnc));
+        PublicKey bobPubKey =
+            kfac.generatePublic(new X509EncodedKeySpec(bobPubKeyEnc));
+        PrivateKey alicePrivKey =
+            kfac.generatePrivate(new PKCS8EncodedKeySpec(alicePrivKeyEnc));
+        PrivateKey bobPrivKey =
+            kfac.generatePrivate(new PKCS8EncodedKeySpec(bobPrivKeyEnc));
+
+        // generate normal shared secret
+        KeyAgreement aliceKeyAgree = KeyAgreement.getInstance("DH", p);
+        aliceKeyAgree.init(alicePrivKey);
+        aliceKeyAgree.doPhase(bobPubKey, true);
+        byte[] sharedSecret = aliceKeyAgree.generateSecret();
+        System.out.println("shared secret:\n" + toHexString(sharedSecret));
+
+        // verify that leading zero is present
+        if (sharedSecret.length != 128) {
+            throw new Exception("Unexpected shared secret length");
+        }
+        if (sharedSecret[0] != 0) {
+            throw new Exception("First byte is not zero as expected");
+        }
+
+        // now, test TLS premaster secret
+        aliceKeyAgree.init(alicePrivKey);
+        aliceKeyAgree.doPhase(bobPubKey, true);
+        byte[] tlsPremasterSecret =
+            aliceKeyAgree.generateSecret("TlsPremasterSecret").getEncoded();
+        System.out.println(
+            "tls premaster secret:\n" + toHexString(tlsPremasterSecret));
+
+        // check that leading zero has been stripped
+        if (tlsPremasterSecret.length != 127) {
+            throw new Exception("Unexpected TLS premaster secret length");
+        }
+        if (tlsPremasterSecret[0] == 0) {
+            throw new Exception("First byte is zero");
+        }
+        for (int i = 0; i < tlsPremasterSecret.length; i++) {
+            if (tlsPremasterSecret[i] != sharedSecret[i+1]) {
+                throw new Exception("Shared secrets differ");
+            }
+        }
+
+    }
+
+    /*
+     * Converts a byte to hex digit and writes to the supplied buffer
+     */
+    private void byte2hex(byte b, StringBuffer buf) {
+        char[] hexChars = { '0', '1', '2', '3', '4', '5', '6', '7', '8',
+                            '9', 'A', 'B', 'C', 'D', 'E', 'F' };
+        int high = ((b & 0xf0) >> 4);
+        int low = (b & 0x0f);
+        buf.append(hexChars[high]);
+        buf.append(hexChars[low]);
+    }
+
+    /*
+     * Converts a byte array to hex string
+     */
+    private String toHexString(byte[] block) {
+        StringBuffer buf = new StringBuffer();
+
+        int len = block.length;
+
+        for (int i = 0; i < len; i++) {
+             byte2hex(block[i], buf);
+             if (i < len-1) {
+                 buf.append(":");
+             }
+        }
+        return buf.toString();
+    }
+
+    private static final byte alicePubKeyEnc[] = {
+        (byte)0x30, (byte)0x82, (byte)0x01, (byte)0x24,
+        (byte)0x30, (byte)0x81, (byte)0x99, (byte)0x06,
+        (byte)0x09, (byte)0x2A, (byte)0x86, (byte)0x48,
+        (byte)0x86, (byte)0xF7, (byte)0x0D, (byte)0x01,
+        (byte)0x03, (byte)0x01, (byte)0x30, (byte)0x81,
+        (byte)0x8B, (byte)0x02, (byte)0x81, (byte)0x81,
+        (byte)0x00, (byte)0xF4, (byte)0x88, (byte)0xFD,
+        (byte)0x58, (byte)0x4E, (byte)0x49, (byte)0xDB,
+        (byte)0xCD, (byte)0x20, (byte)0xB4, (byte)0x9D,
+        (byte)0xE4, (byte)0x91, (byte)0x07, (byte)0x36,
+        (byte)0x6B, (byte)0x33, (byte)0x6C, (byte)0x38,
+        (byte)0x0D, (byte)0x45, (byte)0x1D, (byte)0x0F,
+        (byte)0x7C, (byte)0x88, (byte)0xB3, (byte)0x1C,
+        (byte)0x7C, (byte)0x5B, (byte)0x2D, (byte)0x8E,
+        (byte)0xF6, (byte)0xF3, (byte)0xC9, (byte)0x23,
+        (byte)0xC0, (byte)0x43, (byte)0xF0, (byte)0xA5,
+        (byte)0x5B, (byte)0x18, (byte)0x8D, (byte)0x8E,
+        (byte)0xBB, (byte)0x55, (byte)0x8C, (byte)0xB8,
+        (byte)0x5D, (byte)0x38, (byte)0xD3, (byte)0x34,
+        (byte)0xFD, (byte)0x7C, (byte)0x17, (byte)0x57,
+        (byte)0x43, (byte)0xA3, (byte)0x1D, (byte)0x18,
+        (byte)0x6C, (byte)0xDE, (byte)0x33, (byte)0x21,
+        (byte)0x2C, (byte)0xB5, (byte)0x2A, (byte)0xFF,
+        (byte)0x3C, (byte)0xE1, (byte)0xB1, (byte)0x29,
+        (byte)0x40, (byte)0x18, (byte)0x11, (byte)0x8D,
+        (byte)0x7C, (byte)0x84, (byte)0xA7, (byte)0x0A,
+        (byte)0x72, (byte)0xD6, (byte)0x86, (byte)0xC4,
+        (byte)0x03, (byte)0x19, (byte)0xC8, (byte)0x07,
+        (byte)0x29, (byte)0x7A, (byte)0xCA, (byte)0x95,
+        (byte)0x0C, (byte)0xD9, (byte)0x96, (byte)0x9F,
+        (byte)0xAB, (byte)0xD0, (byte)0x0A, (byte)0x50,
+        (byte)0x9B, (byte)0x02, (byte)0x46, (byte)0xD3,
+        (byte)0x08, (byte)0x3D, (byte)0x66, (byte)0xA4,
+        (byte)0x5D, (byte)0x41, (byte)0x9F, (byte)0x9C,
+        (byte)0x7C, (byte)0xBD, (byte)0x89, (byte)0x4B,
+        (byte)0x22, (byte)0x19, (byte)0x26, (byte)0xBA,
+        (byte)0xAB, (byte)0xA2, (byte)0x5E, (byte)0xC3,
+        (byte)0x55, (byte)0xE9, (byte)0x2F, (byte)0x78,
+        (byte)0xC7, (byte)0x02, (byte)0x01, (byte)0x02,
+        (byte)0x02, (byte)0x02, (byte)0x02, (byte)0x00,
+        (byte)0x03, (byte)0x81, (byte)0x85, (byte)0x00,
+        (byte)0x02, (byte)0x81, (byte)0x81, (byte)0x00,
+        (byte)0xEE, (byte)0xD6, (byte)0xB1, (byte)0xA3,
+        (byte)0xB4, (byte)0x78, (byte)0x2B, (byte)0x35,
+        (byte)0xEF, (byte)0xCD, (byte)0x17, (byte)0x86,
+        (byte)0x63, (byte)0x2B, (byte)0x97, (byte)0x0E,
+        (byte)0x7A, (byte)0xD1, (byte)0xFF, (byte)0x7A,
+        (byte)0xEB, (byte)0x57, (byte)0x61, (byte)0xA1,
+        (byte)0xF7, (byte)0x90, (byte)0x11, (byte)0xA7,
+        (byte)0x79, (byte)0x28, (byte)0x69, (byte)0xBA,
+        (byte)0xA7, (byte)0xB2, (byte)0x37, (byte)0x17,
+        (byte)0xAE, (byte)0x3C, (byte)0x92, (byte)0x89,
+        (byte)0x88, (byte)0xE5, (byte)0x7E, (byte)0x8E,
+        (byte)0xF0, (byte)0x24, (byte)0xD0, (byte)0xE1,
+        (byte)0xC4, (byte)0xB0, (byte)0x26, (byte)0x5A,
+        (byte)0x1E, (byte)0xBD, (byte)0xA0, (byte)0xCF,
+        (byte)0x3E, (byte)0x97, (byte)0x2A, (byte)0x13,
+        (byte)0x92, (byte)0x3B, (byte)0x39, (byte)0xD0,
+        (byte)0x1D, (byte)0xA3, (byte)0x6B, (byte)0x3E,
+        (byte)0xC2, (byte)0xBB, (byte)0x14, (byte)0xB6,
+        (byte)0xE2, (byte)0x4C, (byte)0x0E, (byte)0x5B,
+        (byte)0x4B, (byte)0xA4, (byte)0x9D, (byte)0xA6,
+        (byte)0x21, (byte)0xB0, (byte)0xF9, (byte)0xDE,
+        (byte)0x55, (byte)0xAE, (byte)0x5C, (byte)0x29,
+        (byte)0x0E, (byte)0xC1, (byte)0xFC, (byte)0xBA,
+        (byte)0x51, (byte)0xD3, (byte)0xB6, (byte)0x6D,
+        (byte)0x75, (byte)0x72, (byte)0xDF, (byte)0x43,
+        (byte)0xAB, (byte)0x94, (byte)0x21, (byte)0x6E,
+        (byte)0x0C, (byte)0xD1, (byte)0x93, (byte)0x54,
+        (byte)0x56, (byte)0x7D, (byte)0x4B, (byte)0x90,
+        (byte)0xF1, (byte)0x94, (byte)0x45, (byte)0xD4,
+        (byte)0x2A, (byte)0x71, (byte)0xA1, (byte)0xB8,
+        (byte)0xDD, (byte)0xAA, (byte)0x05, (byte)0xF0,
+        (byte)0x27, (byte)0x37, (byte)0xBD, (byte)0x44
+    };
+
+    private static final byte alicePrivKeyEnc[] = {
+        (byte)0x30, (byte)0x81, (byte)0xE3, (byte)0x02,
+        (byte)0x01, (byte)0x00, (byte)0x30, (byte)0x81,
+        (byte)0x99, (byte)0x06, (byte)0x09, (byte)0x2A,
+        (byte)0x86, (byte)0x48, (byte)0x86, (byte)0xF7,
+        (byte)0x0D, (byte)0x01, (byte)0x03, (byte)0x01,
+        (byte)0x30, (byte)0x81, (byte)0x8B, (byte)0x02,
+        (byte)0x81, (byte)0x81, (byte)0x00, (byte)0xF4,
+        (byte)0x88, (byte)0xFD, (byte)0x58, (byte)0x4E,
+        (byte)0x49, (byte)0xDB, (byte)0xCD, (byte)0x20,
+        (byte)0xB4, (byte)0x9D, (byte)0xE4, (byte)0x91,
+        (byte)0x07, (byte)0x36, (byte)0x6B, (byte)0x33,
+        (byte)0x6C, (byte)0x38, (byte)0x0D, (byte)0x45,
+        (byte)0x1D, (byte)0x0F, (byte)0x7C, (byte)0x88,
+        (byte)0xB3, (byte)0x1C, (byte)0x7C, (byte)0x5B,
+        (byte)0x2D, (byte)0x8E, (byte)0xF6, (byte)0xF3,
+        (byte)0xC9, (byte)0x23, (byte)0xC0, (byte)0x43,
+        (byte)0xF0, (byte)0xA5, (byte)0x5B, (byte)0x18,
+        (byte)0x8D, (byte)0x8E, (byte)0xBB, (byte)0x55,
+        (byte)0x8C, (byte)0xB8, (byte)0x5D, (byte)0x38,
+        (byte)0xD3, (byte)0x34, (byte)0xFD, (byte)0x7C,
+        (byte)0x17, (byte)0x57, (byte)0x43, (byte)0xA3,
+        (byte)0x1D, (byte)0x18, (byte)0x6C, (byte)0xDE,
+        (byte)0x33, (byte)0x21, (byte)0x2C, (byte)0xB5,
+        (byte)0x2A, (byte)0xFF, (byte)0x3C, (byte)0xE1,
+        (byte)0xB1, (byte)0x29, (byte)0x40, (byte)0x18,
+        (byte)0x11, (byte)0x8D, (byte)0x7C, (byte)0x84,
+        (byte)0xA7, (byte)0x0A, (byte)0x72, (byte)0xD6,
+        (byte)0x86, (byte)0xC4, (byte)0x03, (byte)0x19,
+        (byte)0xC8, (byte)0x07, (byte)0x29, (byte)0x7A,
+        (byte)0xCA, (byte)0x95, (byte)0x0C, (byte)0xD9,
+        (byte)0x96, (byte)0x9F, (byte)0xAB, (byte)0xD0,
+        (byte)0x0A, (byte)0x50, (byte)0x9B, (byte)0x02,
+        (byte)0x46, (byte)0xD3, (byte)0x08, (byte)0x3D,
+        (byte)0x66, (byte)0xA4, (byte)0x5D, (byte)0x41,
+        (byte)0x9F, (byte)0x9C, (byte)0x7C, (byte)0xBD,
+        (byte)0x89, (byte)0x4B, (byte)0x22, (byte)0x19,
+        (byte)0x26, (byte)0xBA, (byte)0xAB, (byte)0xA2,
+        (byte)0x5E, (byte)0xC3, (byte)0x55, (byte)0xE9,
+        (byte)0x2F, (byte)0x78, (byte)0xC7, (byte)0x02,
+        (byte)0x01, (byte)0x02, (byte)0x02, (byte)0x02,
+        (byte)0x02, (byte)0x00, (byte)0x04, (byte)0x42,
+        (byte)0x02, (byte)0x40, (byte)0x36, (byte)0x4D,
+        (byte)0xD0, (byte)0x58, (byte)0x64, (byte)0x91,
+        (byte)0x78, (byte)0xA2, (byte)0x4B, (byte)0x79,
+        (byte)0x46, (byte)0xFE, (byte)0xC9, (byte)0xD9,
+        (byte)0xCA, (byte)0x5C, (byte)0xF9, (byte)0xFD,
+        (byte)0x6C, (byte)0x5D, (byte)0x76, (byte)0x3A,
+        (byte)0x41, (byte)0x6D, (byte)0x44, (byte)0x62,
+        (byte)0x75, (byte)0x93, (byte)0x81, (byte)0x93,
+        (byte)0x00, (byte)0x4C, (byte)0xB1, (byte)0xD8,
+        (byte)0x7D, (byte)0x9D, (byte)0xF3, (byte)0x16,
+        (byte)0x2C, (byte)0x6C, (byte)0x9F, (byte)0x7A,
+        (byte)0x84, (byte)0xA3, (byte)0x7A, (byte)0xC1,
+        (byte)0x4F, (byte)0x60, (byte)0xE3, (byte)0xB5,
+        (byte)0x86, (byte)0x28, (byte)0x08, (byte)0x4D,
+        (byte)0x94, (byte)0xB6, (byte)0x04, (byte)0x0D,
+        (byte)0xAC, (byte)0xBD, (byte)0x1F, (byte)0x42,
+        (byte)0x8F, (byte)0x1B
+    };
+
+    private static final byte bobPubKeyEnc[] = {
+        (byte)0x30, (byte)0x82, (byte)0x01, (byte)0x23,
+        (byte)0x30, (byte)0x81, (byte)0x99, (byte)0x06,
+        (byte)0x09, (byte)0x2A, (byte)0x86, (byte)0x48,
+        (byte)0x86, (byte)0xF7, (byte)0x0D, (byte)0x01,
+        (byte)0x03, (byte)0x01, (byte)0x30, (byte)0x81,
+        (byte)0x8B, (byte)0x02, (byte)0x81, (byte)0x81,
+        (byte)0x00, (byte)0xF4, (byte)0x88, (byte)0xFD,
+        (byte)0x58, (byte)0x4E, (byte)0x49, (byte)0xDB,
+        (byte)0xCD, (byte)0x20, (byte)0xB4, (byte)0x9D,
+        (byte)0xE4, (byte)0x91, (byte)0x07, (byte)0x36,
+        (byte)0x6B, (byte)0x33, (byte)0x6C, (byte)0x38,
+        (byte)0x0D, (byte)0x45, (byte)0x1D, (byte)0x0F,
+        (byte)0x7C, (byte)0x88, (byte)0xB3, (byte)0x1C,
+        (byte)0x7C, (byte)0x5B, (byte)0x2D, (byte)0x8E,
+        (byte)0xF6, (byte)0xF3, (byte)0xC9, (byte)0x23,
+        (byte)0xC0, (byte)0x43, (byte)0xF0, (byte)0xA5,
+        (byte)0x5B, (byte)0x18, (byte)0x8D, (byte)0x8E,
+        (byte)0xBB, (byte)0x55, (byte)0x8C, (byte)0xB8,
+        (byte)0x5D, (byte)0x38, (byte)0xD3, (byte)0x34,
+        (byte)0xFD, (byte)0x7C, (byte)0x17, (byte)0x57,
+        (byte)0x43, (byte)0xA3, (byte)0x1D, (byte)0x18,
+        (byte)0x6C, (byte)0xDE, (byte)0x33, (byte)0x21,
+        (byte)0x2C, (byte)0xB5, (byte)0x2A, (byte)0xFF,
+        (byte)0x3C, (byte)0xE1, (byte)0xB1, (byte)0x29,
+        (byte)0x40, (byte)0x18, (byte)0x11, (byte)0x8D,
+        (byte)0x7C, (byte)0x84, (byte)0xA7, (byte)0x0A,
+        (byte)0x72, (byte)0xD6, (byte)0x86, (byte)0xC4,
+        (byte)0x03, (byte)0x19, (byte)0xC8, (byte)0x07,
+        (byte)0x29, (byte)0x7A, (byte)0xCA, (byte)0x95,
+        (byte)0x0C, (byte)0xD9, (byte)0x96, (byte)0x9F,
+        (byte)0xAB, (byte)0xD0, (byte)0x0A, (byte)0x50,
+        (byte)0x9B, (byte)0x02, (byte)0x46, (byte)0xD3,
+        (byte)0x08, (byte)0x3D, (byte)0x66, (byte)0xA4,
+        (byte)0x5D, (byte)0x41, (byte)0x9F, (byte)0x9C,
+        (byte)0x7C, (byte)0xBD, (byte)0x89, (byte)0x4B,
+        (byte)0x22, (byte)0x19, (byte)0x26, (byte)0xBA,
+        (byte)0xAB, (byte)0xA2, (byte)0x5E, (byte)0xC3,
+        (byte)0x55, (byte)0xE9, (byte)0x2F, (byte)0x78,
+        (byte)0xC7, (byte)0x02, (byte)0x01, (byte)0x02,
+        (byte)0x02, (byte)0x02, (byte)0x02, (byte)0x00,
+        (byte)0x03, (byte)0x81, (byte)0x84, (byte)0x00,
+        (byte)0x02, (byte)0x81, (byte)0x80, (byte)0x2C,
+        (byte)0x40, (byte)0xFA, (byte)0xF6, (byte)0xA6,
+        (byte)0xF8, (byte)0xAC, (byte)0xC2, (byte)0x4F,
+        (byte)0xCD, (byte)0xC7, (byte)0x37, (byte)0x93,
+        (byte)0xE5, (byte)0xE4, (byte)0x5E, (byte)0x18,
+        (byte)0x14, (byte)0xE6, (byte)0x50, (byte)0xDA,
+        (byte)0x55, (byte)0x38, (byte)0x5D, (byte)0x24,
+        (byte)0xF5, (byte)0x42, (byte)0x68, (byte)0x5F,
+        (byte)0xF5, (byte)0x15, (byte)0xC8, (byte)0x9B,
+        (byte)0x5D, (byte)0x06, (byte)0x3D, (byte)0xE1,
+        (byte)0x52, (byte)0x2F, (byte)0x98, (byte)0xFF,
+        (byte)0x37, (byte)0xBB, (byte)0x75, (byte)0x48,
+        (byte)0x48, (byte)0xE9, (byte)0x65, (byte)0x84,
+        (byte)0x37, (byte)0xBB, (byte)0xB3, (byte)0xE9,
+        (byte)0x36, (byte)0x01, (byte)0xB4, (byte)0x6A,
+        (byte)0x1C, (byte)0xB2, (byte)0x11, (byte)0x82,
+        (byte)0xCE, (byte)0x3D, (byte)0x65, (byte)0xE5,
+        (byte)0x3C, (byte)0x89, (byte)0xE9, (byte)0x52,
+        (byte)0x19, (byte)0xBD, (byte)0x58, (byte)0xF6,
+        (byte)0xA2, (byte)0x03, (byte)0xA8, (byte)0xB2,
+        (byte)0xA5, (byte)0xDB, (byte)0xEB, (byte)0xF5,
+        (byte)0x94, (byte)0xF9, (byte)0x46, (byte)0xBE,
+        (byte)0x45, (byte)0x4C, (byte)0x65, (byte)0xD2,
+        (byte)0xD1, (byte)0xCF, (byte)0xFF, (byte)0xFF,
+        (byte)0xFA, (byte)0x38, (byte)0xF1, (byte)0x72,
+        (byte)0xAB, (byte)0xB9, (byte)0x14, (byte)0x4E,
+        (byte)0xF5, (byte)0xF0, (byte)0x7A, (byte)0x8E,
+        (byte)0x45, (byte)0xFD, (byte)0x5B, (byte)0xF9,
+        (byte)0xA2, (byte)0x97, (byte)0x1B, (byte)0xAE,
+        (byte)0x2C, (byte)0x7B, (byte)0x6B, (byte)0x7C,
+        (byte)0x98, (byte)0xFE, (byte)0x58, (byte)0xDD,
+        (byte)0xBE, (byte)0xF6, (byte)0x1C, (byte)0x8E,
+        (byte)0xD0, (byte)0xA1, (byte)0x72
+    };
+
+    private static final byte bobPrivKeyEnc[] = {
+        (byte)0x30, (byte)0x81, (byte)0xE4, (byte)0x02,
+        (byte)0x01, (byte)0x00, (byte)0x30, (byte)0x81,
+        (byte)0x99, (byte)0x06, (byte)0x09, (byte)0x2A,
+        (byte)0x86, (byte)0x48, (byte)0x86, (byte)0xF7,
+        (byte)0x0D, (byte)0x01, (byte)0x03, (byte)0x01,
+        (byte)0x30, (byte)0x81, (byte)0x8B, (byte)0x02,
+        (byte)0x81, (byte)0x81, (byte)0x00, (byte)0xF4,
+        (byte)0x88, (byte)0xFD, (byte)0x58, (byte)0x4E,
+        (byte)0x49, (byte)0xDB, (byte)0xCD, (byte)0x20,
+        (byte)0xB4, (byte)0x9D, (byte)0xE4, (byte)0x91,
+        (byte)0x07, (byte)0x36, (byte)0x6B, (byte)0x33,
+        (byte)0x6C, (byte)0x38, (byte)0x0D, (byte)0x45,
+        (byte)0x1D, (byte)0x0F, (byte)0x7C, (byte)0x88,
+        (byte)0xB3, (byte)0x1C, (byte)0x7C, (byte)0x5B,
+        (byte)0x2D, (byte)0x8E, (byte)0xF6, (byte)0xF3,
+        (byte)0xC9, (byte)0x23, (byte)0xC0, (byte)0x43,
+        (byte)0xF0, (byte)0xA5, (byte)0x5B, (byte)0x18,
+        (byte)0x8D, (byte)0x8E, (byte)0xBB, (byte)0x55,
+        (byte)0x8C, (byte)0xB8, (byte)0x5D, (byte)0x38,
+        (byte)0xD3, (byte)0x34, (byte)0xFD, (byte)0x7C,
+        (byte)0x17, (byte)0x57, (byte)0x43, (byte)0xA3,
+        (byte)0x1D, (byte)0x18, (byte)0x6C, (byte)0xDE,
+        (byte)0x33, (byte)0x21, (byte)0x2C, (byte)0xB5,
+        (byte)0x2A, (byte)0xFF, (byte)0x3C, (byte)0xE1,
+        (byte)0xB1, (byte)0x29, (byte)0x40, (byte)0x18,
+        (byte)0x11, (byte)0x8D, (byte)0x7C, (byte)0x84,
+        (byte)0xA7, (byte)0x0A, (byte)0x72, (byte)0xD6,
+        (byte)0x86, (byte)0xC4, (byte)0x03, (byte)0x19,
+        (byte)0xC8, (byte)0x07, (byte)0x29, (byte)0x7A,
+        (byte)0xCA, (byte)0x95, (byte)0x0C, (byte)0xD9,
+        (byte)0x96, (byte)0x9F, (byte)0xAB, (byte)0xD0,
+        (byte)0x0A, (byte)0x50, (byte)0x9B, (byte)0x02,
+        (byte)0x46, (byte)0xD3, (byte)0x08, (byte)0x3D,
+        (byte)0x66, (byte)0xA4, (byte)0x5D, (byte)0x41,
+        (byte)0x9F, (byte)0x9C, (byte)0x7C, (byte)0xBD,
+        (byte)0x89, (byte)0x4B, (byte)0x22, (byte)0x19,
+        (byte)0x26, (byte)0xBA, (byte)0xAB, (byte)0xA2,
+        (byte)0x5E, (byte)0xC3, (byte)0x55, (byte)0xE9,
+        (byte)0x2F, (byte)0x78, (byte)0xC7, (byte)0x02,
+        (byte)0x01, (byte)0x02, (byte)0x02, (byte)0x02,
+        (byte)0x02, (byte)0x00, (byte)0x04, (byte)0x43,
+        (byte)0x02, (byte)0x41, (byte)0x00, (byte)0xE0,
+        (byte)0x31, (byte)0xE7, (byte)0x77, (byte)0xB8,
+        (byte)0xD0, (byte)0x7E, (byte)0x0A, (byte)0x9B,
+        (byte)0x94, (byte)0xD5, (byte)0x3D, (byte)0x33,
+        (byte)0x62, (byte)0x32, (byte)0x51, (byte)0xCE,
+        (byte)0x74, (byte)0x5C, (byte)0xA5, (byte)0x72,
+        (byte)0xD9, (byte)0x36, (byte)0xF3, (byte)0x8A,
+        (byte)0x3F, (byte)0x8B, (byte)0xC6, (byte)0xFE,
+        (byte)0xEF, (byte)0x94, (byte)0x8B, (byte)0x50,
+        (byte)0x41, (byte)0x9B, (byte)0x14, (byte)0xC8,
+        (byte)0xE9, (byte)0x1F, (byte)0x24, (byte)0x1F,
+        (byte)0x65, (byte)0x8E, (byte)0xD3, (byte)0x85,
+        (byte)0xD0, (byte)0x68, (byte)0x6C, (byte)0xF1,
+        (byte)0x79, (byte)0x45, (byte)0xD0, (byte)0x06,
+        (byte)0xA4, (byte)0xB8, (byte)0xE0, (byte)0x64,
+        (byte)0xF5, (byte)0x38, (byte)0x72, (byte)0x97,
+        (byte)0x00, (byte)0x23, (byte)0x5F
+    };
+}
+
--- a/jdk/test/tools/launcher/MultipleJRE.sh	Mon Jun 10 17:04:18 2013 -0700
+++ b/jdk/test/tools/launcher/MultipleJRE.sh	Wed Jul 05 18:59:05 2017 +0200
@@ -1,3 +1,4 @@
+#!/bin/sh
 # @test MultipleJRE.sh
 # @bug 4811102 4953711 4955505 4956301 4991229 4998210 5018605 6387069 6733959
 # @build PrintVersion
--- a/nashorn/.hgtags	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/.hgtags	Wed Jul 05 18:59:05 2017 +0200
@@ -202,3 +202,4 @@
 67ca019e3713dd71c884d753de02fd0021981969 jdk8-b90
 6b9f4120380091b8b1751a825b9f84bf1be224fe jdk8-b91
 dee23cce5235b594a96d3361b65eacc97bd5a583 jdk8-b92
+ddbf41575a2bdb12ccb9f91e169018bf04073038 jdk8-b93
--- a/nashorn/src/jdk/nashorn/api/scripting/ScriptObjectMirror.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/api/scripting/ScriptObjectMirror.java	Wed Jul 05 18:59:05 2017 +0200
@@ -31,7 +31,7 @@
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
-import java.util.HashSet;
+import java.util.LinkedHashSet;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
@@ -48,7 +48,7 @@
  * access ScriptObject via the javax.script.Bindings interface or
  * netscape.javascript.JSObject interface.
  */
-final class ScriptObjectMirror extends JSObject implements Bindings {
+public final class ScriptObjectMirror extends JSObject implements Bindings {
     private final ScriptObject sobj;
     private final ScriptObject global;
 
@@ -217,7 +217,7 @@
         return inGlobal(new Callable<Set<Map.Entry<String, Object>>>() {
             @Override public Set<Map.Entry<String, Object>> call() {
                 final Iterator<String>               iter    = sobj.propertyIterator();
-                final Set<Map.Entry<String, Object>> entries = new HashSet<>();
+                final Set<Map.Entry<String, Object>> entries = new LinkedHashSet<>();
 
                 while (iter.hasNext()) {
                     final String key   = iter.next();
@@ -253,7 +253,7 @@
         return inGlobal(new Callable<Set<String>>() {
             @Override public Set<String> call() {
                 final Iterator<String> iter   = sobj.propertyIterator();
-                final Set<String>      keySet = new HashSet<>();
+                final Set<String>      keySet = new LinkedHashSet<>();
 
                 while (iter.hasNext()) {
                     keySet.add(iter.next());
@@ -302,6 +302,21 @@
         });
     }
 
+    /**
+     * Delete a property from this object.
+     *
+     * @param key the property to be deleted
+     *
+     * @return if the delete was successful or not
+     */
+    public boolean delete(final Object key) {
+        return inGlobal(new Callable<Boolean>() {
+            @Override public Boolean call() {
+                return sobj.delete(unwrap(key, global));
+            }
+        });
+    }
+
     @Override
     public int size() {
         return inGlobal(new Callable<Integer>() {
@@ -327,20 +342,28 @@
         });
     }
 
-    // package-privates below this.
-    ScriptObject getScriptObject() {
-        return sobj;
-    }
+
+    // These are public only so that Context can access these.
 
-    static Object translateUndefined(Object obj) {
-        return (obj == ScriptRuntime.UNDEFINED)? null : obj;
-    }
-
-    static Object wrap(final Object obj, final ScriptObject homeGlobal) {
+    /**
+     * Make a script object mirror on given object if needed.
+     *
+     * @param obj object to be wrapped
+     * @param homeGlobal global to which this object belongs
+     * @return wrapped object
+     */
+    public static Object wrap(final Object obj, final ScriptObject homeGlobal) {
         return (obj instanceof ScriptObject) ? new ScriptObjectMirror((ScriptObject)obj, homeGlobal) : obj;
     }
 
-    static Object unwrap(final Object obj, final ScriptObject homeGlobal) {
+    /**
+     * Unwrap a script object mirror if needed.
+     *
+     * @param obj object to be unwrapped
+     * @param homeGlobal global to which this object belongs
+     * @return unwrapped object
+     */
+    public static Object unwrap(final Object obj, final ScriptObject homeGlobal) {
         if (obj instanceof ScriptObjectMirror) {
             final ScriptObjectMirror mirror = (ScriptObjectMirror)obj;
             return (mirror.global == homeGlobal)? mirror.sobj : obj;
@@ -349,7 +372,14 @@
         return obj;
     }
 
-    static Object[] wrapArray(final Object[] args, final ScriptObject homeGlobal) {
+    /**
+     * Wrap an array of object to script object mirrors if needed.
+     *
+     * @param args array to be unwrapped
+     * @param homeGlobal global to which this object belongs
+     * @return wrapped array
+     */
+    public static Object[] wrapArray(final Object[] args, final ScriptObject homeGlobal) {
         if (args == null || args.length == 0) {
             return args;
         }
@@ -363,7 +393,14 @@
         return newArgs;
     }
 
-    static Object[] unwrapArray(final Object[] args, final ScriptObject homeGlobal) {
+    /**
+     * Unwrap an array of script object mirrors if needed.
+     *
+     * @param args array to be unwrapped
+     * @param homeGlobal global to which this object belongs
+     * @return unwrapped array
+     */
+    public static Object[] unwrapArray(final Object[] args, final ScriptObject homeGlobal) {
         if (args == null || args.length == 0) {
             return args;
         }
@@ -376,4 +413,13 @@
         }
         return newArgs;
     }
+
+    // package-privates below this.
+    ScriptObject getScriptObject() {
+        return sobj;
+    }
+
+    static Object translateUndefined(Object obj) {
+        return (obj == ScriptRuntime.UNDEFINED)? null : obj;
+    }
 }
--- a/nashorn/src/jdk/nashorn/internal/codegen/Attr.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/codegen/Attr.java	Wed Jul 05 18:59:05 2017 +0200
@@ -84,13 +84,12 @@
 import jdk.nashorn.internal.ir.UnaryNode;
 import jdk.nashorn.internal.ir.VarNode;
 import jdk.nashorn.internal.ir.WithNode;
+import jdk.nashorn.internal.ir.visitor.NodeOperatorVisitor;
 import jdk.nashorn.internal.ir.visitor.NodeVisitor;
-import jdk.nashorn.internal.ir.visitor.NodeOperatorVisitor;
 import jdk.nashorn.internal.parser.TokenType;
 import jdk.nashorn.internal.runtime.Context;
 import jdk.nashorn.internal.runtime.Debug;
 import jdk.nashorn.internal.runtime.DebugLogger;
-import jdk.nashorn.internal.runtime.ECMAException;
 import jdk.nashorn.internal.runtime.JSType;
 import jdk.nashorn.internal.runtime.Property;
 import jdk.nashorn.internal.runtime.PropertyMap;
@@ -1323,7 +1322,7 @@
     @Override
     public Node leaveForNode(final ForNode forNode) {
         if (forNode.isForIn()) {
-            forNode.setIterator(newInternal(lc.getCurrentFunction().uniqueName(ITERATOR_PREFIX.symbolName()), Type.OBJECT)); //NASHORN-73
+            forNode.setIterator(newInternal(lc.getCurrentFunction().uniqueName(ITERATOR_PREFIX.symbolName()), Type.typeFor(ITERATOR_PREFIX.type()))); //NASHORN-73
             /*
              * Iterators return objects, so we need to widen the scope of the
              * init variable if it, for example, has been assigned double type
@@ -1500,7 +1499,7 @@
     }
 
     private Symbol exceptionSymbol() {
-        return newInternal(lc.getCurrentFunction().uniqueName(EXCEPTION_PREFIX.symbolName()), Type.typeFor(ECMAException.class));
+        return newInternal(lc.getCurrentFunction().uniqueName(EXCEPTION_PREFIX.symbolName()), Type.typeFor(EXCEPTION_PREFIX.type()));
     }
 
     /**
--- a/nashorn/src/jdk/nashorn/internal/codegen/CodeGenerator.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/codegen/CodeGenerator.java	Wed Jul 05 18:59:05 2017 +0200
@@ -60,7 +60,6 @@
 import java.util.List;
 import java.util.Locale;
 import java.util.TreeMap;
-
 import jdk.nashorn.internal.codegen.ClassEmitter.Flag;
 import jdk.nashorn.internal.codegen.CompilerConstants.Call;
 import jdk.nashorn.internal.codegen.RuntimeCallSite.SpecializedRuntimeNode;
@@ -80,11 +79,11 @@
 import jdk.nashorn.internal.ir.ExecuteNode;
 import jdk.nashorn.internal.ir.ForNode;
 import jdk.nashorn.internal.ir.FunctionNode;
-import jdk.nashorn.internal.ir.LexicalContext;
 import jdk.nashorn.internal.ir.FunctionNode.CompilationState;
 import jdk.nashorn.internal.ir.IdentNode;
 import jdk.nashorn.internal.ir.IfNode;
 import jdk.nashorn.internal.ir.IndexNode;
+import jdk.nashorn.internal.ir.LexicalContext;
 import jdk.nashorn.internal.ir.LexicalContextNode;
 import jdk.nashorn.internal.ir.LiteralNode;
 import jdk.nashorn.internal.ir.LiteralNode.ArrayLiteralNode;
@@ -457,17 +456,18 @@
     }
 
     private void initSymbols(final LinkedList<Symbol> symbols, final Type type) {
-        if (symbols.isEmpty()) {
-            return;
-        }
-
-        method.loadUndefined(type);
-        while (!symbols.isEmpty()) {
-            final Symbol symbol = symbols.removeFirst();
-            if (!symbols.isEmpty()) {
-                method.dup();
-            }
-            method.store(symbol);
+        final Iterator<Symbol> it = symbols.iterator();
+        if(it.hasNext()) {
+            method.loadUndefined(type);
+            boolean hasNext;
+            do {
+                final Symbol symbol = it.next();
+                hasNext = it.hasNext();
+                if(hasNext) {
+                    method.dup();
+                }
+                method.store(symbol);
+            } while(hasNext);
         }
     }
 
@@ -942,11 +942,6 @@
              */
             final FieldObjectCreator<Symbol> foc = new FieldObjectCreator<Symbol>(this, nameList, newSymbols, values, true, hasArguments) {
                 @Override
-                protected Type getValueType(final Symbol value) {
-                    return value.getSymbolType();
-                }
-
-                @Override
                 protected void loadValue(final Symbol value) {
                     method.load(value);
                 }
@@ -1331,8 +1326,7 @@
 
     @Override
     public boolean enterObjectNode(final ObjectNode objectNode) {
-        final List<Node> elements = objectNode.getElements();
-        final int        size     = elements.size();
+        final List<PropertyNode> elements = objectNode.getElements();
 
         final List<String> keys    = new ArrayList<>();
         final List<Symbol> symbols = new ArrayList<>();
@@ -1340,8 +1334,7 @@
 
         boolean hasGettersSetters = false;
 
-        for (int i = 0; i < size; i++) {
-            final PropertyNode propertyNode = (PropertyNode)elements.get(i);
+        for (PropertyNode propertyNode: elements) {
             final Node         value        = propertyNode.getValue();
             final String       key          = propertyNode.getKeyName();
             final Symbol       symbol       = value == null ? null : propertyNode.getSymbol();
@@ -1357,11 +1350,6 @@
 
         new FieldObjectCreator<Node>(this, keys, symbols, values) {
             @Override
-            protected Type getValueType(final Node node) {
-                return node.getType();
-            }
-
-            @Override
             protected void loadValue(final Node node) {
                 load(node);
             }
--- a/nashorn/src/jdk/nashorn/internal/codegen/CompilerConstants.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/codegen/CompilerConstants.java	Wed Jul 05 18:59:05 2017 +0200
@@ -29,6 +29,7 @@
 
 import java.lang.invoke.MethodHandle;
 import java.lang.invoke.MethodHandles;
+import java.util.Iterator;
 import jdk.nashorn.internal.codegen.types.Type;
 import jdk.nashorn.internal.runtime.ScriptFunction;
 import jdk.nashorn.internal.runtime.ScriptObject;
@@ -105,13 +106,13 @@
     ARGUMENTS("arguments", Object.class, 2),
 
     /** prefix for iterators for for (x in ...) */
-    ITERATOR_PREFIX(":i"),
+    ITERATOR_PREFIX(":i", Iterator.class),
 
     /** prefix for tag variable used for switch evaluation */
     SWITCH_TAG_PREFIX(":s"),
 
     /** prefix for all exceptions */
-    EXCEPTION_PREFIX(":e"),
+    EXCEPTION_PREFIX(":e", Throwable.class),
 
     /** prefix for quick slots generated in Store */
     QUICK_PREFIX(":q"),
--- a/nashorn/src/jdk/nashorn/internal/codegen/FieldObjectCreator.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/codegen/FieldObjectCreator.java	Wed Jul 05 18:59:05 2017 +0200
@@ -145,15 +145,6 @@
     protected abstract void loadValue(T value);
 
     /**
-     * Determine the type of a value. Defined by anonymous subclasses in code gen.
-     *
-     * @param value Value to inspect.
-     *
-     * @return Value type.
-     */
-    protected abstract Type getValueType(T value);
-
-    /**
      * Store a value in a field of the generated class object.
      *
      * @param method      Script method.
@@ -165,13 +156,6 @@
         method.dup();
 
         loadValue(value);
-
-        final Type valueType = getValueType(value);
-        // for example when we have a with scope
-        if (valueType.isObject() || valueType.isBoolean()) {
-            method.convert(OBJECT);
-        }
-
         method.convert(OBJECT);
         method.putField(getClassName(), ObjectClassGenerator.getFieldName(fieldIndex, Type.OBJECT), typeDescriptor(Object.class));
     }
--- a/nashorn/src/jdk/nashorn/internal/codegen/Lower.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/codegen/Lower.java	Wed Jul 05 18:59:05 2017 +0200
@@ -32,7 +32,6 @@
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
-
 import jdk.nashorn.internal.ir.BaseNode;
 import jdk.nashorn.internal.ir.BinaryNode;
 import jdk.nashorn.internal.ir.Block;
@@ -258,7 +257,7 @@
         return throwNode;
     }
 
-    private static Node ensureUniqueNamesIn(final LexicalContext lc, final Node node) {
+    private static Node ensureUniqueNamesIn(final Node node) {
         return node.accept(new NodeVisitor<LexicalContext>(new LexicalContext()) {
             @Override
             public Node leaveFunctionNode(final FunctionNode functionNode) {
@@ -273,10 +272,10 @@
         });
     }
 
-    private static List<Statement> copyFinally(final LexicalContext lc, final Block finallyBody) {
+    private static List<Statement> copyFinally(final Block finallyBody) {
         final List<Statement> newStatements = new ArrayList<>();
         for (final Statement statement : finallyBody.getStatements()) {
-            newStatements.add((Statement)ensureUniqueNamesIn(lc, statement));
+            newStatements.add((Statement)ensureUniqueNamesIn(statement));
             if (statement.hasTerminalFlags()) {
                 return newStatements;
             }
@@ -340,7 +339,7 @@
             @Override
             public Node leaveThrowNode(final ThrowNode throwNode) {
                 if (rethrows.contains(throwNode)) {
-                    final List<Statement> newStatements = copyFinally(lc, finallyBody);
+                    final List<Statement> newStatements = copyFinally(finallyBody);
                     if (!isTerminal(newStatements)) {
                         newStatements.add(throwNode);
                     }
@@ -374,7 +373,7 @@
                     resultNode = null;
                 }
 
-                newStatements.addAll(copyFinally(lc, finallyBody));
+                newStatements.addAll(copyFinally(finallyBody));
                 if (!isTerminal(newStatements)) {
                     newStatements.add(expr == null ? returnNode : returnNode.setExpression(resultNode));
                 }
@@ -384,7 +383,7 @@
 
             private Node copy(final Statement endpoint, final Node targetNode) {
                 if (!insideTry.contains(targetNode)) {
-                    final List<Statement> newStatements = copyFinally(lc, finallyBody);
+                    final List<Statement> newStatements = copyFinally(finallyBody);
                     if (!isTerminal(newStatements)) {
                         newStatements.add(endpoint);
                     }
@@ -550,7 +549,7 @@
                 final FunctionNode currentFunction = lc.getCurrentFunction();
                 return callNode.setEvalArgs(
                     new CallNode.EvalArgs(
-                        ensureUniqueNamesIn(lc, args.get(0)).accept(this),
+                        ensureUniqueNamesIn(args.get(0)).accept(this),
                         compilerConstant(THIS),
                         evalLocation(callee),
                         currentFunction.isStrict()));
--- a/nashorn/src/jdk/nashorn/internal/codegen/RuntimeCallSite.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/codegen/RuntimeCallSite.java	Wed Jul 05 18:59:05 2017 +0200
@@ -41,10 +41,10 @@
 import jdk.nashorn.internal.codegen.types.Type;
 import jdk.nashorn.internal.ir.RuntimeNode;
 import jdk.nashorn.internal.ir.RuntimeNode.Request;
+import jdk.nashorn.internal.lookup.Lookup;
+import jdk.nashorn.internal.lookup.MethodHandleFactory;
 import jdk.nashorn.internal.runtime.ScriptRuntime;
 import jdk.nashorn.internal.runtime.linker.Bootstrap;
-import jdk.nashorn.internal.lookup.Lookup;
-import jdk.nashorn.internal.lookup.MethodHandleFactory;
 
 /**
  * Optimistic call site that assumes its Object arguments to be of a boxed type.
@@ -59,12 +59,10 @@
 public final class RuntimeCallSite extends MutableCallSite {
     static final Call BOOTSTRAP = staticCallNoLookup(Bootstrap.class, "runtimeBootstrap", CallSite.class, MethodHandles.Lookup.class, String.class, MethodType.class);
 
-    private static final MethodHandle NEXT = findOwnMH("next",  MethodHandle.class);
+    private static final MethodHandle NEXT = findOwnMH("next",  MethodHandle.class, String.class);
 
     private final RuntimeNode.Request request;
 
-    private String name;
-
     /**
      * A specialized runtime node, i.e. on where we know at least one more specific type than object
      */
@@ -203,7 +201,6 @@
      */
     public RuntimeCallSite(final MethodType type, final String name) {
         super(type);
-        this.name    = name;
         this.request = Request.valueOf(name.substring(0, name.indexOf(SpecializedRuntimeNode.REQUEST_SEPARATOR)));
         setTarget(makeMethod(name));
     }
@@ -292,7 +289,7 @@
                 mh = MH.explicitCastArguments(mh, type());
             }
 
-            final MethodHandle fallback = MH.foldArguments(MethodHandles.exactInvoker(type()), MH.bindTo(NEXT, this));
+            final MethodHandle fallback = MH.foldArguments(MethodHandles.exactInvoker(type()), MH.insertArguments(NEXT, 0, this, requestName));
 
             MethodHandle guard;
             if (type().parameterType(0).isPrimitive()) {
@@ -336,20 +333,15 @@
      *
      * Do not call directly
      *
+     * @param name current name (with type) of runtime call at the call site
      * @return next wider specialization method for this RuntimeCallSite
      */
-    public MethodHandle next() {
-        this.name = nextName(name);
-        final MethodHandle next = makeMethod(name);
+   public MethodHandle next(final String name) {
+        final MethodHandle next = makeMethod(nextName(name));
         setTarget(next);
         return next;
     }
 
-    @Override
-    public String toString() {
-        return super.toString() + " " + name;
-    }
-
     /** Method cache */
     private static final Map<String, MethodHandle> METHODS;
 
--- a/nashorn/src/jdk/nashorn/internal/ir/BlockLexicalContext.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/ir/BlockLexicalContext.java	Wed Jul 05 18:59:05 2017 +0200
@@ -63,7 +63,6 @@
         return sstack.pop();
     }
 
-    @SuppressWarnings("unchecked")
     @Override
     public <T extends LexicalContextNode> T pop(final T node) {
         T expected = node;
--- a/nashorn/src/jdk/nashorn/internal/ir/FunctionNode.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/ir/FunctionNode.java	Wed Jul 05 18:59:05 2017 +0200
@@ -30,7 +30,6 @@
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
-
 import jdk.nashorn.internal.codegen.CompileUnit;
 import jdk.nashorn.internal.codegen.Compiler;
 import jdk.nashorn.internal.codegen.CompilerConstants;
@@ -385,7 +384,7 @@
      * @return function node or a new one if state was changed
      */
     public FunctionNode setState(final LexicalContext lc, final CompilationState state) {
-        if (this.compilationState.equals(state)) {
+        if (this.compilationState.contains(state)) {
             return this;
         }
         final EnumSet<CompilationState> newState = EnumSet.copyOf(this.compilationState);
--- a/nashorn/src/jdk/nashorn/internal/ir/LiteralNode.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/ir/LiteralNode.java	Wed Jul 05 18:59:05 2017 +0200
@@ -28,7 +28,6 @@
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
-
 import jdk.nashorn.internal.codegen.CompileUnit;
 import jdk.nashorn.internal.codegen.types.Type;
 import jdk.nashorn.internal.ir.annotations.Immutable;
@@ -242,8 +241,8 @@
      *
      * @return the new literal node
      */
-    public static LiteralNode<Node> newInstance(final long token, final int finish) {
-        return new NodeLiteralNode(token, finish);
+    public static LiteralNode<Object> newInstance(final long token, final int finish) {
+        return new NullLiteralNode(token, finish);
     }
 
     /**
@@ -253,8 +252,8 @@
      *
      * @return the new literal node
      */
-    public static LiteralNode<?> newInstance(final Node parent) {
-        return new NodeLiteralNode(parent.getToken(), parent.getFinish());
+    public static LiteralNode<Object> newInstance(final Node parent) {
+        return new NullLiteralNode(parent.getToken(), parent.getFinish());
     }
 
     @Immutable
@@ -496,33 +495,15 @@
         return new LexerTokenLiteralNode(parent.getToken(), parent.getFinish(), value);
     }
 
-    private static final class NodeLiteralNode extends LiteralNode<Node> {
-
-        private NodeLiteralNode(final long token, final int finish) {
-            this(token, finish, null);
-        }
+    private static final class NullLiteralNode extends LiteralNode<Object> {
 
-        private NodeLiteralNode(final long token, final int finish, final Node value) {
-            super(Token.recast(token, TokenType.OBJECT), finish, value);
-        }
-
-        private NodeLiteralNode(final LiteralNode<Node> literalNode) {
-            super(literalNode);
-        }
-
-        private NodeLiteralNode(final LiteralNode<Node> literalNode, final Node value) {
-            super(literalNode, value);
+        private NullLiteralNode(final long token, final int finish) {
+            super(Token.recast(token, TokenType.OBJECT), finish, null);
         }
 
         @Override
         public Node accept(final NodeVisitor<? extends LexicalContext> visitor) {
             if (visitor.enterLiteralNode(this)) {
-                if (value != null) {
-                    final Node newValue = value.accept(visitor);
-                    if(value != newValue) {
-                        return visitor.leaveLiteralNode(new NodeLiteralNode(this, newValue));
-                    }
-                }
                 return visitor.leaveLiteralNode(this);
             }
 
@@ -531,38 +512,13 @@
 
         @Override
         public Type getType() {
-            return value == null ? Type.OBJECT : super.getType();
+            return Type.OBJECT;
         }
 
         @Override
         public Type getWidestOperationType() {
-            return value == null ? Type.OBJECT : value.getWidestOperationType();
+            return Type.OBJECT;
         }
-
-    }
-    /**
-     * Create a new node literal for an arbitrary node
-     *
-     * @param token   token
-     * @param finish  finish
-     * @param value   the literal value node
-     *
-     * @return the new literal node
-     */
-    public static LiteralNode<Node> newInstance(final long token, final int finish, final Node value) {
-        return new NodeLiteralNode(token, finish, value);
-    }
-
-    /**
-     * Create a new node literal based on a parent node (source, token, finish)
-     *
-     * @param parent parent node
-     * @param value  node value
-     *
-     * @return the new literal node
-     */
-    public static LiteralNode<?> newInstance(final Node parent, final Node value) {
-        return new NodeLiteralNode(parent.getToken(), parent.getFinish(), value);
     }
 
     /**
--- a/nashorn/src/jdk/nashorn/internal/ir/ObjectNode.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/ir/ObjectNode.java	Wed Jul 05 18:59:05 2017 +0200
@@ -27,7 +27,6 @@
 
 import java.util.Collections;
 import java.util.List;
-
 import jdk.nashorn.internal.ir.annotations.Immutable;
 import jdk.nashorn.internal.ir.visitor.NodeVisitor;
 
@@ -38,7 +37,7 @@
 public final class ObjectNode extends Node {
 
     /** Literal elements. */
-    private final List<Node> elements;
+    private final List<PropertyNode> elements;
 
     /**
      * Constructor
@@ -47,12 +46,12 @@
      * @param finish   finish
      * @param elements the elements used to initialize this ObjectNode
      */
-    public ObjectNode(final long token, final int finish, final List<Node> elements) {
+    public ObjectNode(final long token, final int finish, final List<PropertyNode> elements) {
         super(token, finish);
         this.elements = elements;
     }
 
-    private ObjectNode(final ObjectNode objectNode, final List<Node> elements) {
+    private ObjectNode(final ObjectNode objectNode, final List<PropertyNode> elements) {
         super(objectNode);
         this.elements = elements;
     }
@@ -60,7 +59,7 @@
     @Override
     public Node accept(final NodeVisitor<? extends LexicalContext> visitor) {
         if (visitor.enterObjectNode(this)) {
-            return visitor.leaveObjectNode(setElements(Node.accept(visitor, Node.class, elements)));
+            return visitor.leaveObjectNode(setElements(Node.accept(visitor, PropertyNode.class, elements)));
         }
 
         return this;
@@ -92,11 +91,11 @@
      * Get the elements of this literal node
      * @return a list of elements
      */
-    public List<Node> getElements() {
+    public List<PropertyNode> getElements() {
         return Collections.unmodifiableList(elements);
     }
 
-    private ObjectNode setElements(final List<Node> elements) {
+    private ObjectNode setElements(final List<PropertyNode> elements) {
         if (this.elements == elements) {
             return this;
         }
--- a/nashorn/src/jdk/nashorn/internal/objects/DateParser.java	Mon Jun 10 17:04:18 2013 -0700
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,706 +0,0 @@
-/*
- * Copyright (c) 2010, 2013, Oracle and/or its affiliates. All rights reserved.
- * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
- *
- * This code is free software; you can redistribute it and/or modify it
- * under the terms of the GNU General Public License version 2 only, as
- * published by the Free Software Foundation.  Oracle designates this
- * particular file as subject to the "Classpath" exception as provided
- * by Oracle in the LICENSE file that accompanied this code.
- *
- * This code is distributed in the hope that it will be useful, but WITHOUT
- * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
- * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
- * version 2 for more details (a copy is included in the LICENSE file that
- * accompanied this code).
- *
- * You should have received a copy of the GNU General Public License version
- * 2 along with this work; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
- *
- * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
- * or visit www.oracle.com if you need additional information or have any
- * questions.
- */
-
-package jdk.nashorn.internal.objects;
-
-import static java.lang.Character.DECIMAL_DIGIT_NUMBER;
-import static java.lang.Character.LOWERCASE_LETTER;
-import static java.lang.Character.OTHER_PUNCTUATION;
-import static java.lang.Character.SPACE_SEPARATOR;
-import static java.lang.Character.UPPERCASE_LETTER;
-
-import java.util.HashMap;
-import java.util.Locale;
-
-/**
- * JavaScript date parser. This class first tries to parse a date string
- * according to the extended ISO 8601 format specified in ES5 15.9.1.15.
- * If that fails, it falls back to legacy mode in which it accepts a range
- * of different formats.
- *
- * <p>This class is neither thread-safe nor reusable. Calling the
- * <tt>parse()</tt> method more than once will yield undefined results.</p>
- */
-public class DateParser {
-
-    /** Constant for index position of parsed year value. */
-    public final static int YEAR        = 0;
-    /** Constant for index position of parsed month value. */
-    public final static int MONTH       = 1;
-    /** Constant for index position of parsed day value. */
-    public final static int DAY         = 2;
-    /** Constant for index position of parsed hour value. */
-    public final static int HOUR        = 3;
-    /** Constant for index position of parsed minute value. */
-    public final static int MINUTE      = 4;
-    /** Constant for index position of parsed second value. */
-    public final static int SECOND      = 5;
-    /** Constant for index position of parsed millisecond value. */
-    public final static int MILLISECOND = 6;
-    /** Constant for index position of parsed time zone offset value. */
-    public final static int TIMEZONE    = 7;
-
-    private enum Token {
-        UNKNOWN, NUMBER, SEPARATOR, PARENTHESIS, NAME, SIGN, END
-    }
-
-    private final String string;
-    private final int length;
-    private final Integer[] fields;
-    private int pos = 0;
-    private Token token;
-    private int tokenLength;
-    private Name nameValue;
-    private int numValue;
-    private int currentField = YEAR;
-    private int yearSign = 0;
-    private boolean namedMonth = false;
-
-    private final static HashMap<String,Name> names = new HashMap<>();
-
-    static {
-        addName("monday", Name.DAY_OF_WEEK, 0);
-        addName("tuesday", Name.DAY_OF_WEEK, 0);
-        addName("wednesday", Name.DAY_OF_WEEK, 0);
-        addName("thursday", Name.DAY_OF_WEEK, 0);
-        addName("friday", Name.DAY_OF_WEEK, 0);
-        addName("saturday", Name.DAY_OF_WEEK, 0);
-        addName("sunday", Name.DAY_OF_WEEK, 0);
-        addName("january", Name.MONTH_NAME, 1);
-        addName("february", Name.MONTH_NAME, 2);
-        addName("march", Name.MONTH_NAME, 3);
-        addName("april", Name.MONTH_NAME, 4);
-        addName("may", Name.MONTH_NAME, 5);
-        addName("june", Name.MONTH_NAME, 6);
-        addName("july", Name.MONTH_NAME, 7);
-        addName("august", Name.MONTH_NAME, 8);
-        addName("september", Name.MONTH_NAME, 9);
-        addName("october", Name.MONTH_NAME, 10);
-        addName("november", Name.MONTH_NAME, 11);
-        addName("december", Name.MONTH_NAME, 12);
-        addName("am", Name.AM_PM, 0);
-        addName("pm", Name.AM_PM, 12);
-        addName("z", Name.TIMEZONE_ID, 0);
-        addName("gmt", Name.TIMEZONE_ID, 0);
-        addName("ut", Name.TIMEZONE_ID, 0);
-        addName("utc", Name.TIMEZONE_ID, 0);
-        addName("est", Name.TIMEZONE_ID, -5 * 60);
-        addName("edt", Name.TIMEZONE_ID, -4 * 60);
-        addName("cst", Name.TIMEZONE_ID, -6 * 60);
-        addName("cdt", Name.TIMEZONE_ID, -5 * 60);
-        addName("mst", Name.TIMEZONE_ID, -7 * 60);
-        addName("mdt", Name.TIMEZONE_ID, -6 * 60);
-        addName("pst", Name.TIMEZONE_ID, -8 * 60);
-        addName("pdt", Name.TIMEZONE_ID, -7 * 60);
-        addName("t", Name.TIME_SEPARATOR, 0);
-    }
-
-    /**
-     * Construct a new <tt>DateParser</tt> instance for parsing the given string.
-     * @param string the string to be parsed
-     */
-    public DateParser(final String string) {
-        this.string = string;
-        this.length = string.length();
-        this.fields = new Integer[TIMEZONE + 1];
-    }
-
-    /**
-     * Try parsing the given string as date according to the extended ISO 8601 format
-     * specified in ES5 15.9.1.15. Fall back to legacy mode if that fails.
-     * This method returns <tt>true</tt> if the string could be parsed.
-     * @return true if the string could be parsed as date
-     */
-    public boolean parse() {
-        return parseEcmaDate() || parseLegacyDate();
-    }
-
-    /**
-     * Try parsing the date string according to the rules laid out in ES5 15.9.1.15.
-     * The date string must conform to the following format:
-     *
-     * <pre>  [('-'|'+')yy]yyyy[-MM[-dd]][hh:mm[:ss[.sss]][Z|(+|-)hh:mm]] </pre>
-     *
-     * <p>If the string does not contain a time zone offset, the <tt>TIMEZONE</tt> field
-     * is set to <tt>0</tt> (GMT).</p>
-     * @return true if string represents a valid ES5 date string.
-     */
-    public boolean parseEcmaDate() {
-
-        if (token == null) {
-            token = next();
-        }
-
-        while (token != Token.END) {
-
-            switch (token) {
-                case NUMBER:
-                    if (currentField == YEAR && yearSign != 0) {
-                        // 15.9.1.15.1 Extended year must have six digits
-                        if (tokenLength != 6) {
-                            return false;
-                        }
-                        numValue *= yearSign;
-                    } else if (!checkEcmaField(currentField, numValue)) {
-                        return false;
-                    }
-                    if (!skipEcmaDelimiter()) {
-                        return false;
-                    }
-                    if (currentField < TIMEZONE) {
-                        set(currentField++, numValue);
-                    }
-                    break;
-
-                case NAME:
-                    if (nameValue == null) {
-                        return false;
-                    }
-                    switch (nameValue.type) {
-                        case Name.TIME_SEPARATOR:
-                            if (currentField == YEAR || currentField > HOUR) {
-                                return false;
-                            }
-                            currentField = HOUR;
-                            break;
-                        case Name.TIMEZONE_ID:
-                            if (!nameValue.key.equals("z") || !setTimezone(nameValue.value, false)) {
-                                return false;
-                            }
-                            break;
-                        default:
-                            return false;
-                    }
-                    break;
-
-                case SIGN:
-                    if (currentField == YEAR) {
-                        yearSign = numValue;
-                    } else if (currentField < SECOND || !setTimezone(readTimeZoneOffset(), true)) {
-                        // Note: Spidermonkey won't parse timezone unless time includes seconds and milliseconds
-                        return false;
-                    }
-                    break;
-
-                default:
-                    return false;
-            }
-            token = next();
-        }
-
-        return patchResult(true);
-    }
-
-    /**
-     * Try parsing the date using a fuzzy algorithm that can handle a variety of formats.
-     *
-     * <p>Numbers separated by <tt>':'</tt> are treated as time values, optionally followed by a
-     * millisecond value separated by <tt>'.'</tt>. Other number values are treated as date values.
-     * The exact sequence of day, month, and year values to apply is determined heuristically.</p>
-     *
-     * <p>English month names and selected time zone names as well as AM/PM markers are recognized
-     * and handled properly. Additionally, numeric time zone offsets such as <tt>(+|-)hh:mm</tt> or
-     * <tt>(+|-)hhmm</tt> are recognized. If the string does not contain a time zone offset
-     * the <tt>TIMEZONE</tt>field is left undefined, meaning the local time zone should be applied.</p>
-     *
-     * <p>English weekday names are recognized but ignored. All text in parentheses is ignored as well.
-     * All other text causes parsing to fail.</p>
-     *
-     * @return true if the string could be parsed
-     */
-    public boolean parseLegacyDate() {
-
-        if (yearSign != 0 || currentField > DAY) {
-            // we don't support signed years in legacy mode
-            return false;
-        }
-        if (token == null) {
-            token = next();
-        }
-
-        while (token != Token.END) {
-
-            switch (token) {
-                case NUMBER:
-                    if (skip(':')) {
-                        // A number followed by ':' is parsed as time
-                        if (!setTimeField(numValue)) {
-                            return false;
-                        }
-                        // consume remaining time tokens
-                        do {
-                            token = next();
-                            if (token != Token.NUMBER || !setTimeField(numValue)) {
-                                return false;
-                            }
-                        } while (skip(isSet(SECOND) ? '.' : ':'));
-
-                    } else {
-                        // Parse as date token
-                        if (!setDateField(numValue)) {
-                            return false;
-                        }
-                        skip('-');
-                    }
-                    break;
-
-                case NAME:
-                    if (nameValue == null) {
-                        return false;
-                    }
-                    switch (nameValue.type) {
-                        case Name.AM_PM:
-                            if (!setAmPm(nameValue.value)) {
-                                return false;
-                            }
-                            break;
-                        case Name.MONTH_NAME:
-                            if (!setMonth(nameValue.value)) {
-                                return false;
-                            }
-                            break;
-                        case Name.TIMEZONE_ID:
-                            if (!setTimezone(nameValue.value, false)) {
-                                return false;
-                            }
-                            break;
-                        case Name.TIME_SEPARATOR:
-                            return false;
-                        default:
-                            break;
-                    }
-                    if (nameValue.type != Name.TIMEZONE_ID) {
-                        skip('-');
-                    }
-                    break;
-
-                case SIGN:
-                    if (!setTimezone(readTimeZoneOffset(), true)) {
-                        return false;
-                    }
-                    break;
-
-                case PARENTHESIS:
-                    if (!skipParentheses()) {
-                        return false;
-                    }
-                    break;
-
-                case SEPARATOR:
-                    break;
-
-                default:
-                    return false;
-            }
-            token = next();
-        }
-
-        return patchResult(false);
-    }
-
-    /**
-     * Get the parsed date and time fields as an array of <tt>Integers</tt>.
-     *
-     * <p>If parsing was successful, all fields are guaranteed to be set except for the
-     * <tt>TIMEZONE</tt> field which may be <tt>null</tt>, meaning that local time zone
-     * offset should be applied.</p>
-     *
-     * @return the parsed date fields
-     */
-    public Integer[] getDateFields() {
-        return fields;
-    }
-
-    private boolean isSet(final int field) {
-        return fields[field] != null;
-    }
-
-    private Integer get(final int field) {
-        return fields[field];
-    }
-
-    private void set(final int field, final int value) {
-        fields[field] = value;
-    }
-
-    private int peek() {
-        return pos < length ? string.charAt(pos) : -1;
-    }
-
-    private boolean skip(final char c) {
-        if (pos < length && string.charAt(pos) == c) {
-            token = null;
-            pos++;
-            return true;
-        }
-        return false;
-    }
-
-    private Token next() {
-        if (pos >= length) {
-            tokenLength = 0;
-            return Token.END;
-        }
-
-        final char c = string.charAt(pos);
-
-        if (c > 0x80) {
-            tokenLength = 1;
-            pos++;
-            return Token.UNKNOWN; // We only deal with ASCII here
-        }
-
-        final int type = Character.getType(c);
-        switch (type) {
-            case DECIMAL_DIGIT_NUMBER:
-                numValue = readNumber(6);
-                return Token.NUMBER;
-            case SPACE_SEPARATOR :
-            case OTHER_PUNCTUATION:
-                tokenLength = 1;
-                pos++;
-                return Token.SEPARATOR;
-            case UPPERCASE_LETTER:
-            case LOWERCASE_LETTER:
-                nameValue = readName();
-                return Token.NAME;
-            default:
-                tokenLength = 1;
-                pos++;
-                switch (c) {
-                    case '(':
-                        return Token.PARENTHESIS;
-                    case '-':
-                    case '+':
-                        numValue = c == '-' ? -1 : 1;
-                        return Token.SIGN;
-                    default:
-                        return Token.UNKNOWN;
-                }
-        }
-    }
-
-    private static boolean checkLegacyField(final int field, final int value) {
-        switch (field) {
-            case HOUR:
-                return isHour(value);
-            case MINUTE:
-            case SECOND:
-                return isMinuteOrSecond(value);
-            case MILLISECOND:
-                return isMillisecond(value);
-            default:
-                // skip validation on other legacy fields as we don't know what's what
-                return true;
-        }
-    }
-
-    private boolean checkEcmaField(final int field, final int value) {
-        switch (field) {
-            case YEAR:
-                return tokenLength == 4;
-            case MONTH:
-                return tokenLength == 2 && isMonth(value);
-            case DAY:
-                return tokenLength == 2 && isDay(value);
-            case HOUR:
-                return tokenLength == 2 && isHour(value);
-            case MINUTE:
-            case SECOND:
-                return tokenLength == 2 && isMinuteOrSecond(value);
-            case MILLISECOND:
-                // we allow millisecond to be less than 3 digits
-                return tokenLength < 4 && isMillisecond(value);
-            default:
-                return true;
-        }
-    }
-
-    private boolean skipEcmaDelimiter() {
-        switch (currentField) {
-            case YEAR:
-            case MONTH:
-                return skip('-') || peek() == 'T' || peek() == -1;
-            case DAY:
-                return peek() == 'T' || peek() == -1;
-            case HOUR:
-            case MINUTE:
-                return skip(':') || endOfTime();
-            case SECOND:
-                return skip('.') || endOfTime();
-            default:
-                return true;
-        }
-    }
-
-    private boolean endOfTime() {
-        final int c = peek();
-        return c == -1 || c == 'Z' || c == '-' || c == '+' || c == ' ';
-    }
-
-    private static boolean isAsciiLetter(final char ch) {
-        return ('A' <= ch && ch <= 'Z') || ('a' <= ch && ch <= 'z');
-    }
-
-    private static boolean isAsciiDigit(final char ch) {
-        return '0' <= ch && ch <= '9';
-    }
-
-    private int readNumber(final int maxDigits) {
-        final int start = pos;
-        int n = 0;
-        final int max = Math.min(length, pos + maxDigits);
-        while (pos < max && isAsciiDigit(string.charAt(pos))) {
-            n = n * 10 + string.charAt(pos++) - '0';
-        }
-        tokenLength = pos - start;
-        return n;
-    }
-
-    private Name readName() {
-        final int start = pos;
-        final int limit = Math.min(pos + 3, length);
-
-        // first read up to the key length
-        while (pos < limit && isAsciiLetter(string.charAt(pos))) {
-            pos++;
-        }
-        final String key = string.substring(start, pos).toLowerCase(Locale.ENGLISH);
-        final Name name = names.get(key);
-        // then advance to end of name
-        while (pos < length && isAsciiLetter(string.charAt(pos))) {
-            pos++;
-        }
-
-        tokenLength = pos - start;
-        // make sure we have the full name or a prefix
-        if (name != null && name.matches(string, start, tokenLength)) {
-            return name;
-        }
-        return null;
-    }
-
-    private int readTimeZoneOffset() {
-        final int sign = string.charAt(pos - 1) == '+' ? 1 : -1;
-        int offset = readNumber(2);
-        skip(':');
-        offset = offset * 60 + readNumber(2);
-        return sign * offset;
-    }
-
-    private boolean skipParentheses() {
-        int parenCount = 1;
-        while (pos < length && parenCount != 0) {
-            final char c = string.charAt(pos++);
-            if (c == '(') {
-                parenCount++;
-            } else if (c == ')') {
-                parenCount--;
-            }
-        }
-        return true;
-    }
-
-    private static int getDefaultValue(final int field) {
-        switch (field) {
-            case MONTH:
-            case DAY:
-                return 1;
-            default:
-                return 0;
-        }
-    }
-
-    private static boolean isDay(final int n) {
-        return 1 <= n && n <= 31;
-    }
-
-    private static boolean isMonth(final int n) {
-        return 1 <= n && n <= 12;
-    }
-
-    private static boolean isHour(final int n) {
-        return 0 <= n && n <= 24;
-    }
-
-    private static boolean isMinuteOrSecond(final int n) {
-        return 0 <= n && n < 60;
-    }
-
-    private static boolean isMillisecond(final int n) {
-        return 0<= n && n < 1000;
-    }
-
-    private boolean setMonth(final int m) {
-        if (!isSet(MONTH)) {
-            namedMonth = true;
-            set(MONTH, m);
-            return true;
-        }
-        return false;
-    }
-
-    private boolean setDateField(final int n) {
-        for (int field = YEAR; field != HOUR; field++) {
-            if (!isSet(field)) {
-                // no validation on legacy date fields
-                set(field, n);
-                return true;
-            }
-        }
-        return false;
-    }
-
-    private boolean setTimeField(final int n) {
-        for (int field = HOUR; field != TIMEZONE; field++) {
-            if (!isSet(field)) {
-                if (checkLegacyField(field, n)) {
-                    set(field, n);
-                    return true;
-                }
-                return false;
-            }
-        }
-        return false;
-    }
-
-    private boolean setTimezone(final int offset, final boolean asNumericOffset) {
-        if (!isSet(TIMEZONE) || (asNumericOffset && get(TIMEZONE) == 0)) {
-            set(TIMEZONE, offset);
-            return true;
-        }
-        return false;
-    }
-
-    private boolean setAmPm(final int offset) {
-        if (!isSet(HOUR)) {
-            return false;
-        }
-        final int hour = get(HOUR);
-        if (hour >= 0 && hour <= 12) {
-            set(HOUR, hour + offset);
-        }
-        return true;
-    }
-
-    private boolean patchResult(final boolean strict) {
-        // sanity checks - make sure we have something
-        if (!isSet(YEAR) && !isSet(HOUR)) {
-            return false;
-        }
-        if (isSet(HOUR) && !isSet(MINUTE)) {
-            return false;
-        }
-        // fill in default values for unset fields except timezone
-        for (int field = YEAR; field <= TIMEZONE; field++) {
-            if (get(field) == null) {
-                if (field == TIMEZONE && !strict) {
-                    // We only use UTC as default timezone for dates parsed complying with
-                    // the format specified in ES5 15.9.1.15. Otherwise the slot is left empty
-                    // and local timezone is used.
-                    continue;
-                }
-                final int value = getDefaultValue(field);
-                set(field, value);
-            }
-        }
-
-        if (!strict) {
-            // swap year, month, and day if it looks like the right thing to do
-            if (isDay(get(YEAR))) {
-                final int d = get(YEAR);
-                set(YEAR, get(DAY));
-                if (namedMonth) {
-                    // d-m-y
-                    set(DAY, d);
-                } else {
-                    // m-d-y
-                    final int d2 = get(MONTH);
-                    set(MONTH, d);
-                    set(DAY, d2);
-                }
-            }
-            // sanity checks now that we know what's what
-            if (!isMonth(get(MONTH)) || !isDay(get(DAY))) {
-                return false;
-            }
-
-            // add 1900 or 2000 to year if it's between 0 and 100
-            final int year = get(YEAR);
-            if (year >= 0 && year < 100) {
-                set(YEAR, year >= 50 ? 1900 + year : 2000 + year);
-            }
-        } else {
-            // 24 hour value is only allowed if all other time values are zero
-            if (get(HOUR) == 24 &&
-                    (get(MINUTE) != 0 || get(SECOND) != 0 || get(MILLISECOND) != 0)) {
-                return false;
-            }
-        }
-
-        // set month to 0-based
-        set(MONTH, get(MONTH) - 1);
-        return true;
-    }
-
-    private static void addName(final String str, final int type, final int value) {
-        final Name name = new Name(str, type, value);
-        names.put(name.key, name);
-    }
-
-    private static class Name {
-        final String name;
-        final String key;
-        final int value;
-        final int type;
-
-        final static int DAY_OF_WEEK    = -1;
-        final static int MONTH_NAME     = 0;
-        final static int AM_PM          = 1;
-        final static int TIMEZONE_ID    = 2;
-        final static int TIME_SEPARATOR = 3;
-
-        Name(final String name, final int type, final int value) {
-            assert name != null;
-            assert name.equals(name.toLowerCase(Locale.ENGLISH));
-
-            this.name = name;
-            // use first three characters as lookup key
-            this.key = name.substring(0, Math.min(3, name.length()));
-            this.type = type;
-            this.value = value;
-        }
-
-        public boolean matches(final String str, final int offset, final int len) {
-            return name.regionMatches(true, 0, str, offset, len);
-        }
-
-        @Override
-        public String toString() {
-            return name;
-        }
-    }
-
-}
--- a/nashorn/src/jdk/nashorn/internal/objects/GenericPropertyDescriptor.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/objects/GenericPropertyDescriptor.java	Wed Jul 05 18:59:05 2017 +0200
@@ -150,11 +150,11 @@
         if (this == obj) {
             return true;
         }
-        if (!(obj instanceof AccessorPropertyDescriptor)) {
+        if (!(obj instanceof GenericPropertyDescriptor)) {
             return false;
         }
 
-        final AccessorPropertyDescriptor other = (AccessorPropertyDescriptor)obj;
+        final GenericPropertyDescriptor other = (GenericPropertyDescriptor)obj;
         return ScriptRuntime.sameValue(configurable, other.configurable) &&
                ScriptRuntime.sameValue(enumerable, other.enumerable);
     }
--- a/nashorn/src/jdk/nashorn/internal/objects/Global.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/objects/Global.java	Wed Jul 05 18:59:05 2017 +0200
@@ -119,6 +119,10 @@
     @Property(attributes = Attribute.NOT_ENUMERABLE)
     public Object load;
 
+    /** Nashorn extension: global.loadWithNewGlobal */
+    @Property(attributes = Attribute.NOT_ENUMERABLE)
+    public Object loadWithNewGlobal;
+
     /** Nashorn extension: global.exit */
     @Property(attributes = Attribute.NOT_ENUMERABLE)
     public Object exit;
@@ -364,11 +368,12 @@
     // Used to store the last RegExp result to support deprecated RegExp constructor properties
     private RegExpResult lastRegExpResult;
 
-    private static final MethodHandle EVAL    = findOwnMH("eval",    Object.class, Object.class, Object.class);
-    private static final MethodHandle PRINT   = findOwnMH("print",   Object.class, Object.class, Object[].class);
-    private static final MethodHandle PRINTLN = findOwnMH("println", Object.class, Object.class, Object[].class);
-    private static final MethodHandle LOAD    = findOwnMH("load",    Object.class, Object.class, Object.class);
-    private static final MethodHandle EXIT    = findOwnMH("exit",    Object.class, Object.class, Object.class);
+    private static final MethodHandle EVAL              = findOwnMH("eval",              Object.class, Object.class, Object.class);
+    private static final MethodHandle PRINT             = findOwnMH("print",             Object.class, Object.class, Object[].class);
+    private static final MethodHandle PRINTLN           = findOwnMH("println",           Object.class, Object.class, Object[].class);
+    private static final MethodHandle LOAD              = findOwnMH("load",              Object.class, Object.class, Object.class);
+    private static final MethodHandle LOADWITHNEWGLOBAL = findOwnMH("loadWithNewGlobal", Object.class, Object.class, Object.class);
+    private static final MethodHandle EXIT              = findOwnMH("exit",              Object.class, Object.class, Object.class);
 
     private final Context context;
 
@@ -743,6 +748,21 @@
     }
 
     /**
+     * Global loadWithNewGlobal implementation - Nashorn extension
+     *
+     * @param self    scope
+     * @param source  source to load
+     *
+     * @return result of load (undefined)
+     *
+     * @throws IOException if source could not be read
+     */
+    public static Object loadWithNewGlobal(final Object self, final Object source) throws IOException {
+        final Global global = Global.instance();
+        return global.context.loadWithNewGlobal(source);
+    }
+
+    /**
      * Global exit and quit implementation - Nashorn extension: perform a {@code System.exit} call from the script
      *
      * @param self  self reference
@@ -1387,6 +1407,7 @@
         this.unescape           = ScriptFunctionImpl.makeFunction("unescape",   GlobalFunctions.UNESCAPE);
         this.print              = ScriptFunctionImpl.makeFunction("print",      env._print_no_newline ? PRINT : PRINTLN);
         this.load               = ScriptFunctionImpl.makeFunction("load",       LOAD);
+        this.loadWithNewGlobal  = ScriptFunctionImpl.makeFunction("loadWithNewGlobal", LOADWITHNEWGLOBAL);
         this.exit               = ScriptFunctionImpl.makeFunction("exit",       EXIT);
         this.quit               = ScriptFunctionImpl.makeFunction("quit",       EXIT);
 
@@ -1628,20 +1649,21 @@
     @SuppressWarnings("resource")
     private static Object printImpl(final boolean newLine, final Object... objects) {
         final PrintWriter out = Global.getEnv().getOut();
+        final StringBuilder sb = new StringBuilder();
 
-        boolean first = true;
         for (final Object object : objects) {
-            if (first) {
-                first = false;
-            } else {
-                out.print(' ');
+            if (sb.length() != 0) {
+                sb.append(' ');
             }
 
-            out.print(JSType.toString(object));
+            sb.append(JSType.toString(object));
         }
 
+        // Print all at once to ensure thread friendly result.
         if (newLine) {
-            out.println();
+            out.println(sb.toString());
+        } else {
+            out.print(sb.toString());
         }
 
         out.flush();
--- a/nashorn/src/jdk/nashorn/internal/objects/NativeArray.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/objects/NativeArray.java	Wed Jul 05 18:59:05 2017 +0200
@@ -75,7 +75,7 @@
     private static final MethodHandle FILTER_CALLBACK_INVOKER  = createIteratorCallbackInvoker(boolean.class);
 
     private static final MethodHandle REDUCE_CALLBACK_INVOKER = Bootstrap.createDynamicInvoker("dyn:call", Object.class,
-            Object.class, Undefined.class, Object.class, Object.class, int.class, Object.class);
+            Object.class, Undefined.class, Object.class, Object.class, long.class, Object.class);
     private static final MethodHandle CALL_CMP                = Bootstrap.createDynamicInvoker("dyn:call", double.class,
             ScriptFunction.class, Object.class, Object.class, Object.class);
 
@@ -1086,7 +1086,7 @@
     private static boolean applyEvery(final Object self, final Object callbackfn, final Object thisArg) {
         return new IteratorAction<Boolean>(Global.toObject(self), callbackfn, thisArg, true) {
             @Override
-            protected boolean forEach(final Object val, final int i) throws Throwable {
+            protected boolean forEach(final Object val, final long i) throws Throwable {
                 return (result = (boolean)EVERY_CALLBACK_INVOKER.invokeExact(callbackfn, thisArg, val, i, self));
             }
         }.apply();
@@ -1104,7 +1104,7 @@
     public static Object some(final Object self, final Object callbackfn, final Object thisArg) {
         return new IteratorAction<Boolean>(Global.toObject(self), callbackfn, thisArg, false) {
             @Override
-            protected boolean forEach(final Object val, final int i) throws Throwable {
+            protected boolean forEach(final Object val, final long i) throws Throwable {
                 return !(result = (boolean)SOME_CALLBACK_INVOKER.invokeExact(callbackfn, thisArg, val, i, self));
             }
         }.apply();
@@ -1122,7 +1122,7 @@
     public static Object forEach(final Object self, final Object callbackfn, final Object thisArg) {
         return new IteratorAction<Object>(Global.toObject(self), callbackfn, thisArg, ScriptRuntime.UNDEFINED) {
             @Override
-            protected boolean forEach(final Object val, final int i) throws Throwable {
+            protected boolean forEach(final Object val, final long i) throws Throwable {
                 FOREACH_CALLBACK_INVOKER.invokeExact(callbackfn, thisArg, val, i, self);
                 return true;
             }
@@ -1141,9 +1141,9 @@
     public static Object map(final Object self, final Object callbackfn, final Object thisArg) {
         return new IteratorAction<NativeArray>(Global.toObject(self), callbackfn, thisArg, null) {
             @Override
-            protected boolean forEach(final Object val, final int i) throws Throwable {
+            protected boolean forEach(final Object val, final long i) throws Throwable {
                 final Object r = MAP_CALLBACK_INVOKER.invokeExact(callbackfn, thisArg, val, i, self);
-                result.defineOwnProperty(index, r);
+                result.defineOwnProperty((int)index, r);
                 return true;
             }
 
@@ -1167,12 +1167,12 @@
     @Function(attributes = Attribute.NOT_ENUMERABLE, arity = 1)
     public static Object filter(final Object self, final Object callbackfn, final Object thisArg) {
         return new IteratorAction<NativeArray>(Global.toObject(self), callbackfn, thisArg, new NativeArray()) {
-            private int to = 0;
+            private long to = 0;
 
             @Override
-            protected boolean forEach(final Object val, final int i) throws Throwable {
+            protected boolean forEach(final Object val, final long i) throws Throwable {
                 if ((boolean)FILTER_CALLBACK_INVOKER.invokeExact(callbackfn, thisArg, val, i, self)) {
-                    result.defineOwnProperty(to++, val);
+                    result.defineOwnProperty((int)(to++), val);
                 }
                 return true;
             }
@@ -1200,7 +1200,7 @@
         //if initial value is ScriptRuntime.UNDEFINED - step forward once.
         return new IteratorAction<Object>(Global.toObject(self), callbackfn, ScriptRuntime.UNDEFINED, initialValue, iter) {
             @Override
-            protected boolean forEach(final Object val, final int i) throws Throwable {
+            protected boolean forEach(final Object val, final long i) throws Throwable {
                 // TODO: why can't I declare the second arg as Undefined.class?
                 result = REDUCE_CALLBACK_INVOKER.invokeExact(callbackfn, ScriptRuntime.UNDEFINED, result, val, i, self);
                 return true;
@@ -1258,7 +1258,7 @@
 
     private static MethodHandle createIteratorCallbackInvoker(final Class<?> rtype) {
         return Bootstrap.createDynamicInvoker("dyn:call", rtype, Object.class, Object.class, Object.class,
-                int.class, Object.class);
+                long.class, Object.class);
 
     }
 }
--- a/nashorn/src/jdk/nashorn/internal/objects/NativeDate.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/objects/NativeDate.java	Wed Jul 05 18:59:05 2017 +0200
@@ -39,6 +39,7 @@
 import jdk.nashorn.internal.objects.annotations.ScriptClass;
 import jdk.nashorn.internal.objects.annotations.SpecializedConstructor;
 import jdk.nashorn.internal.objects.annotations.Where;
+import jdk.nashorn.internal.parser.DateParser;
 import jdk.nashorn.internal.runtime.ConsString;
 import jdk.nashorn.internal.runtime.JSType;
 import jdk.nashorn.internal.runtime.ScriptEnvironment;
--- a/nashorn/src/jdk/nashorn/internal/objects/NativeError.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/objects/NativeError.java	Wed Jul 05 18:59:05 2017 +0200
@@ -32,6 +32,7 @@
 import java.lang.invoke.MethodHandles;
 import java.util.ArrayList;
 import java.util.List;
+import jdk.nashorn.internal.codegen.CompilerConstants;
 import jdk.nashorn.internal.objects.annotations.Attribute;
 import jdk.nashorn.internal.objects.annotations.Constructor;
 import jdk.nashorn.internal.objects.annotations.Function;
@@ -248,7 +249,13 @@
             final List<StackTraceElement> filtered = new ArrayList<>();
             for (final StackTraceElement st : frames) {
                 if (ECMAErrors.isScriptFrame(st)) {
-                    filtered.add(st);
+                    final String className = "<" + st.getFileName() + ">";
+                    String methodName = st.getMethodName();
+                    if (methodName.equals(CompilerConstants.RUN_SCRIPT.symbolName())) {
+                        methodName = "<program>";
+                    }
+                    filtered.add(new StackTraceElement(className, methodName,
+                            st.getFileName(), st.getLineNumber()));
                 }
             }
             res = filtered.toArray();
--- a/nashorn/src/jdk/nashorn/internal/objects/NativeFunction.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/objects/NativeFunction.java	Wed Jul 05 18:59:05 2017 +0200
@@ -33,10 +33,14 @@
 import jdk.nashorn.internal.objects.annotations.Constructor;
 import jdk.nashorn.internal.objects.annotations.Function;
 import jdk.nashorn.internal.objects.annotations.ScriptClass;
+import jdk.nashorn.internal.parser.Parser;
+import jdk.nashorn.internal.runtime.Context;
 import jdk.nashorn.internal.runtime.JSType;
+import jdk.nashorn.internal.runtime.ParserException;
 import jdk.nashorn.internal.runtime.ScriptFunction;
 import jdk.nashorn.internal.runtime.ScriptObject;
 import jdk.nashorn.internal.runtime.ScriptRuntime;
+import jdk.nashorn.internal.runtime.Source;
 
 /**
  * ECMA 15.3 Function Objects
@@ -187,16 +191,25 @@
 
         sb.append("(function (");
         if (args.length > 0) {
+            final StringBuilder paramListBuf = new StringBuilder();
             for (int i = 0; i < args.length - 1; i++) {
-                sb.append(JSType.toString(args[i]));
+                paramListBuf.append(JSType.toString(args[i]));
                 if (i < args.length - 2) {
-                    sb.append(",");
+                    paramListBuf.append(",");
                 }
             }
+
+            final String paramList = paramListBuf.toString();
+            if (! paramList.isEmpty()) {
+                checkFunctionParameters(paramList);
+                sb.append(paramList);
+            }
         }
         sb.append(") {\n");
         if (args.length > 0) {
-            sb.append(JSType.toString(args[args.length - 1]));
+            final String funcBody = JSType.toString(args[args.length - 1]);
+            checkFunctionBody(funcBody);
+            sb.append(funcBody);
             sb.append('\n');
         }
         sb.append("})");
@@ -205,4 +218,24 @@
 
         return Global.directEval(global, sb.toString(), global, "<function>", Global.isStrict());
     }
+
+    private static void checkFunctionParameters(final String params) {
+        final Source src = new Source("<function>", params);
+        final Parser parser = new Parser(Global.getEnv(), src, new Context.ThrowErrorManager());
+        try {
+            parser.parseFormalParameterList();
+        } catch (final ParserException pe) {
+            pe.throwAsEcmaException();
+        }
+    }
+
+    private static void checkFunctionBody(final String funcBody) {
+        final Source src = new Source("<function>", funcBody);
+        final Parser parser = new Parser(Global.getEnv(), src, new Context.ThrowErrorManager());
+        try {
+            parser.parseFunctionBody();
+        } catch (final ParserException pe) {
+            pe.throwAsEcmaException();
+        }
+    }
 }
--- a/nashorn/src/jdk/nashorn/internal/objects/NativeMath.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/objects/NativeMath.java	Wed Jul 05 18:59:05 2017 +0200
@@ -31,7 +31,6 @@
 import jdk.nashorn.internal.objects.annotations.ScriptClass;
 import jdk.nashorn.internal.objects.annotations.SpecializedFunction;
 import jdk.nashorn.internal.objects.annotations.Where;
-import jdk.nashorn.internal.runtime.GlobalFunctions;
 import jdk.nashorn.internal.runtime.JSType;
 import jdk.nashorn.internal.runtime.ScriptObject;
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/nashorn/src/jdk/nashorn/internal/parser/DateParser.java	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,716 @@
+/*
+ * Copyright (c) 2010, 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.  Oracle designates this
+ * particular file as subject to the "Classpath" exception as provided
+ * by Oracle in the LICENSE file that accompanied this code.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+package jdk.nashorn.internal.parser;
+
+import static java.lang.Character.DECIMAL_DIGIT_NUMBER;
+import static java.lang.Character.LOWERCASE_LETTER;
+import static java.lang.Character.OTHER_PUNCTUATION;
+import static java.lang.Character.SPACE_SEPARATOR;
+import static java.lang.Character.UPPERCASE_LETTER;
+
+import java.util.HashMap;
+import java.util.Locale;
+
+/**
+ * JavaScript date parser. This class first tries to parse a date string
+ * according to the extended ISO 8601 format specified in ES5 15.9.1.15.
+ * If that fails, it falls back to legacy mode in which it accepts a range
+ * of different formats.
+ *
+ * <p>This class is neither thread-safe nor reusable. Calling the
+ * <tt>parse()</tt> method more than once will yield undefined results.</p>
+ */
+public class DateParser {
+
+    /** Constant for index position of parsed year value. */
+    public final static int YEAR        = 0;
+    /** Constant for index position of parsed month value. */
+    public final static int MONTH       = 1;
+    /** Constant for index position of parsed day value. */
+    public final static int DAY         = 2;
+    /** Constant for index position of parsed hour value. */
+    public final static int HOUR        = 3;
+    /** Constant for index position of parsed minute value. */
+    public final static int MINUTE      = 4;
+    /** Constant for index position of parsed second value. */
+    public final static int SECOND      = 5;
+    /** Constant for index position of parsed millisecond value. */
+    public final static int MILLISECOND = 6;
+    /** Constant for index position of parsed time zone offset value. */
+    public final static int TIMEZONE    = 7;
+
+    private enum Token {
+        UNKNOWN, NUMBER, SEPARATOR, PARENTHESIS, NAME, SIGN, END
+    }
+
+    private final String string;
+    private final int length;
+    private final Integer[] fields;
+    private int pos = 0;
+    private Token token;
+    private int tokenLength;
+    private Name nameValue;
+    private int numValue;
+    private int currentField = YEAR;
+    private int yearSign = 0;
+    private boolean namedMonth = false;
+
+    private final static HashMap<String,Name> names = new HashMap<>();
+
+    static {
+        addName("monday", Name.DAY_OF_WEEK, 0);
+        addName("tuesday", Name.DAY_OF_WEEK, 0);
+        addName("wednesday", Name.DAY_OF_WEEK, 0);
+        addName("thursday", Name.DAY_OF_WEEK, 0);
+        addName("friday", Name.DAY_OF_WEEK, 0);
+        addName("saturday", Name.DAY_OF_WEEK, 0);
+        addName("sunday", Name.DAY_OF_WEEK, 0);
+        addName("january", Name.MONTH_NAME, 1);
+        addName("february", Name.MONTH_NAME, 2);
+        addName("march", Name.MONTH_NAME, 3);
+        addName("april", Name.MONTH_NAME, 4);
+        addName("may", Name.MONTH_NAME, 5);
+        addName("june", Name.MONTH_NAME, 6);
+        addName("july", Name.MONTH_NAME, 7);
+        addName("august", Name.MONTH_NAME, 8);
+        addName("september", Name.MONTH_NAME, 9);
+        addName("october", Name.MONTH_NAME, 10);
+        addName("november", Name.MONTH_NAME, 11);
+        addName("december", Name.MONTH_NAME, 12);
+        addName("am", Name.AM_PM, 0);
+        addName("pm", Name.AM_PM, 12);
+        addName("z", Name.TIMEZONE_ID, 0);
+        addName("gmt", Name.TIMEZONE_ID, 0);
+        addName("ut", Name.TIMEZONE_ID, 0);
+        addName("utc", Name.TIMEZONE_ID, 0);
+        addName("est", Name.TIMEZONE_ID, -5 * 60);
+        addName("edt", Name.TIMEZONE_ID, -4 * 60);
+        addName("cst", Name.TIMEZONE_ID, -6 * 60);
+        addName("cdt", Name.TIMEZONE_ID, -5 * 60);
+        addName("mst", Name.TIMEZONE_ID, -7 * 60);
+        addName("mdt", Name.TIMEZONE_ID, -6 * 60);
+        addName("pst", Name.TIMEZONE_ID, -8 * 60);
+        addName("pdt", Name.TIMEZONE_ID, -7 * 60);
+        addName("t", Name.TIME_SEPARATOR, 0);
+    }
+
+    /**
+     * Construct a new <tt>DateParser</tt> instance for parsing the given string.
+     * @param string the string to be parsed
+     */
+    public DateParser(final String string) {
+        this.string = string;
+        this.length = string.length();
+        this.fields = new Integer[TIMEZONE + 1];
+    }
+
+    /**
+     * Try parsing the given string as date according to the extended ISO 8601 format
+     * specified in ES5 15.9.1.15. Fall back to legacy mode if that fails.
+     * This method returns <tt>true</tt> if the string could be parsed.
+     * @return true if the string could be parsed as date
+     */
+    public boolean parse() {
+        return parseEcmaDate() || parseLegacyDate();
+    }
+
+    /**
+     * Try parsing the date string according to the rules laid out in ES5 15.9.1.15.
+     * The date string must conform to the following format:
+     *
+     * <pre>  [('-'|'+')yy]yyyy[-MM[-dd]][hh:mm[:ss[.sss]][Z|(+|-)hh:mm]] </pre>
+     *
+     * <p>If the string does not contain a time zone offset, the <tt>TIMEZONE</tt> field
+     * is set to <tt>0</tt> (GMT).</p>
+     * @return true if string represents a valid ES5 date string.
+     */
+    public boolean parseEcmaDate() {
+
+        if (token == null) {
+            token = next();
+        }
+
+        while (token != Token.END) {
+
+            switch (token) {
+                case NUMBER:
+                    if (currentField == YEAR && yearSign != 0) {
+                        // 15.9.1.15.1 Extended year must have six digits
+                        if (tokenLength != 6) {
+                            return false;
+                        }
+                        numValue *= yearSign;
+                    } else if (!checkEcmaField(currentField, numValue)) {
+                        return false;
+                    }
+                    if (!skipEcmaDelimiter()) {
+                        return false;
+                    }
+                    if (currentField < TIMEZONE) {
+                        set(currentField++, numValue);
+                    }
+                    break;
+
+                case NAME:
+                    if (nameValue == null) {
+                        return false;
+                    }
+                    switch (nameValue.type) {
+                        case Name.TIME_SEPARATOR:
+                            if (currentField == YEAR || currentField > HOUR) {
+                                return false;
+                            }
+                            currentField = HOUR;
+                            break;
+                        case Name.TIMEZONE_ID:
+                            if (!nameValue.key.equals("z") || !setTimezone(nameValue.value, false)) {
+                                return false;
+                            }
+                            break;
+                        default:
+                            return false;
+                    }
+                    break;
+
+                case SIGN:
+                    if (peek() == -1) {
+                        // END after sign - wrong!
+                        return false;
+                    }
+
+                    if (currentField == YEAR) {
+                        yearSign = numValue;
+                    } else if (currentField < SECOND || !setTimezone(readTimeZoneOffset(), true)) {
+                        // Note: Spidermonkey won't parse timezone unless time includes seconds and milliseconds
+                        return false;
+                    }
+                    break;
+
+                default:
+                    return false;
+            }
+            token = next();
+        }
+
+        return patchResult(true);
+    }
+
+    /**
+     * Try parsing the date using a fuzzy algorithm that can handle a variety of formats.
+     *
+     * <p>Numbers separated by <tt>':'</tt> are treated as time values, optionally followed by a
+     * millisecond value separated by <tt>'.'</tt>. Other number values are treated as date values.
+     * The exact sequence of day, month, and year values to apply is determined heuristically.</p>
+     *
+     * <p>English month names and selected time zone names as well as AM/PM markers are recognized
+     * and handled properly. Additionally, numeric time zone offsets such as <tt>(+|-)hh:mm</tt> or
+     * <tt>(+|-)hhmm</tt> are recognized. If the string does not contain a time zone offset
+     * the <tt>TIMEZONE</tt>field is left undefined, meaning the local time zone should be applied.</p>
+     *
+     * <p>English weekday names are recognized but ignored. All text in parentheses is ignored as well.
+     * All other text causes parsing to fail.</p>
+     *
+     * @return true if the string could be parsed
+     */
+    public boolean parseLegacyDate() {
+
+        if (yearSign != 0 || currentField > DAY) {
+            // we don't support signed years in legacy mode
+            return false;
+        }
+        if (token == null) {
+            token = next();
+        }
+
+        while (token != Token.END) {
+
+            switch (token) {
+                case NUMBER:
+                    if (skip(':')) {
+                        // A number followed by ':' is parsed as time
+                        if (!setTimeField(numValue)) {
+                            return false;
+                        }
+                        // consume remaining time tokens
+                        do {
+                            token = next();
+                            if (token != Token.NUMBER || !setTimeField(numValue)) {
+                                return false;
+                            }
+                        } while (skip(isSet(SECOND) ? '.' : ':'));
+
+                    } else {
+                        // Parse as date token
+                        if (!setDateField(numValue)) {
+                            return false;
+                        }
+                        skip('-');
+                    }
+                    break;
+
+                case NAME:
+                    if (nameValue == null) {
+                        return false;
+                    }
+                    switch (nameValue.type) {
+                        case Name.AM_PM:
+                            if (!setAmPm(nameValue.value)) {
+                                return false;
+                            }
+                            break;
+                        case Name.MONTH_NAME:
+                            if (!setMonth(nameValue.value)) {
+                                return false;
+                            }
+                            break;
+                        case Name.TIMEZONE_ID:
+                            if (!setTimezone(nameValue.value, false)) {
+                                return false;
+                            }
+                            break;
+                        case Name.TIME_SEPARATOR:
+                            return false;
+                        default:
+                            break;
+                    }
+                    if (nameValue.type != Name.TIMEZONE_ID) {
+                        skip('-');
+                    }
+                    break;
+
+                case SIGN:
+                    if (peek() == -1) {
+                        // END after sign - wrong!
+                        return false;
+                    }
+
+                    if (!setTimezone(readTimeZoneOffset(), true)) {
+                        return false;
+                    }
+                    break;
+
+                case PARENTHESIS:
+                    if (!skipParentheses()) {
+                        return false;
+                    }
+                    break;
+
+                case SEPARATOR:
+                    break;
+
+                default:
+                    return false;
+            }
+            token = next();
+        }
+
+        return patchResult(false);
+    }
+
+    /**
+     * Get the parsed date and time fields as an array of <tt>Integers</tt>.
+     *
+     * <p>If parsing was successful, all fields are guaranteed to be set except for the
+     * <tt>TIMEZONE</tt> field which may be <tt>null</tt>, meaning that local time zone
+     * offset should be applied.</p>
+     *
+     * @return the parsed date fields
+     */
+    public Integer[] getDateFields() {
+        return fields;
+    }
+
+    private boolean isSet(final int field) {
+        return fields[field] != null;
+    }
+
+    private Integer get(final int field) {
+        return fields[field];
+    }
+
+    private void set(final int field, final int value) {
+        fields[field] = value;
+    }
+
+    private int peek() {
+        return pos < length ? string.charAt(pos) : -1;
+    }
+
+    private boolean skip(final char c) {
+        if (pos < length && string.charAt(pos) == c) {
+            token = null;
+            pos++;
+            return true;
+        }
+        return false;
+    }
+
+    private Token next() {
+        if (pos >= length) {
+            tokenLength = 0;
+            return Token.END;
+        }
+
+        final char c = string.charAt(pos);
+
+        if (c > 0x80) {
+            tokenLength = 1;
+            pos++;
+            return Token.UNKNOWN; // We only deal with ASCII here
+        }
+
+        final int type = Character.getType(c);
+        switch (type) {
+            case DECIMAL_DIGIT_NUMBER:
+                numValue = readNumber(6);
+                return Token.NUMBER;
+            case SPACE_SEPARATOR :
+            case OTHER_PUNCTUATION:
+                tokenLength = 1;
+                pos++;
+                return Token.SEPARATOR;
+            case UPPERCASE_LETTER:
+            case LOWERCASE_LETTER:
+                nameValue = readName();
+                return Token.NAME;
+            default:
+                tokenLength = 1;
+                pos++;
+                switch (c) {
+                    case '(':
+                        return Token.PARENTHESIS;
+                    case '-':
+                    case '+':
+                        numValue = c == '-' ? -1 : 1;
+                        return Token.SIGN;
+                    default:
+                        return Token.UNKNOWN;
+                }
+        }
+    }
+
+    private static boolean checkLegacyField(final int field, final int value) {
+        switch (field) {
+            case HOUR:
+                return isHour(value);
+            case MINUTE:
+            case SECOND:
+                return isMinuteOrSecond(value);
+            case MILLISECOND:
+                return isMillisecond(value);
+            default:
+                // skip validation on other legacy fields as we don't know what's what
+                return true;
+        }
+    }
+
+    private boolean checkEcmaField(final int field, final int value) {
+        switch (field) {
+            case YEAR:
+                return tokenLength == 4;
+            case MONTH:
+                return tokenLength == 2 && isMonth(value);
+            case DAY:
+                return tokenLength == 2 && isDay(value);
+            case HOUR:
+                return tokenLength == 2 && isHour(value);
+            case MINUTE:
+            case SECOND:
+                return tokenLength == 2 && isMinuteOrSecond(value);
+            case MILLISECOND:
+                // we allow millisecond to be less than 3 digits
+                return tokenLength < 4 && isMillisecond(value);
+            default:
+                return true;
+        }
+    }
+
+    private boolean skipEcmaDelimiter() {
+        switch (currentField) {
+            case YEAR:
+            case MONTH:
+                return skip('-') || peek() == 'T' || peek() == -1;
+            case DAY:
+                return peek() == 'T' || peek() == -1;
+            case HOUR:
+            case MINUTE:
+                return skip(':') || endOfTime();
+            case SECOND:
+                return skip('.') || endOfTime();
+            default:
+                return true;
+        }
+    }
+
+    private boolean endOfTime() {
+        final int c = peek();
+        return c == -1 || c == 'Z' || c == '-' || c == '+' || c == ' ';
+    }
+
+    private static boolean isAsciiLetter(final char ch) {
+        return ('A' <= ch && ch <= 'Z') || ('a' <= ch && ch <= 'z');
+    }
+
+    private static boolean isAsciiDigit(final char ch) {
+        return '0' <= ch && ch <= '9';
+    }
+
+    private int readNumber(final int maxDigits) {
+        final int start = pos;
+        int n = 0;
+        final int max = Math.min(length, pos + maxDigits);
+        while (pos < max && isAsciiDigit(string.charAt(pos))) {
+            n = n * 10 + string.charAt(pos++) - '0';
+        }
+        tokenLength = pos - start;
+        return n;
+    }
+
+    private Name readName() {
+        final int start = pos;
+        final int limit = Math.min(pos + 3, length);
+
+        // first read up to the key length
+        while (pos < limit && isAsciiLetter(string.charAt(pos))) {
+            pos++;
+        }
+        final String key = string.substring(start, pos).toLowerCase(Locale.ENGLISH);
+        final Name name = names.get(key);
+        // then advance to end of name
+        while (pos < length && isAsciiLetter(string.charAt(pos))) {
+            pos++;
+        }
+
+        tokenLength = pos - start;
+        // make sure we have the full name or a prefix
+        if (name != null && name.matches(string, start, tokenLength)) {
+            return name;
+        }
+        return null;
+    }
+
+    private int readTimeZoneOffset() {
+        final int sign = string.charAt(pos - 1) == '+' ? 1 : -1;
+        int offset = readNumber(2);
+        skip(':');
+        offset = offset * 60 + readNumber(2);
+        return sign * offset;
+    }
+
+    private boolean skipParentheses() {
+        int parenCount = 1;
+        while (pos < length && parenCount != 0) {
+            final char c = string.charAt(pos++);
+            if (c == '(') {
+                parenCount++;
+            } else if (c == ')') {
+                parenCount--;
+            }
+        }
+        return true;
+    }
+
+    private static int getDefaultValue(final int field) {
+        switch (field) {
+            case MONTH:
+            case DAY:
+                return 1;
+            default:
+                return 0;
+        }
+    }
+
+    private static boolean isDay(final int n) {
+        return 1 <= n && n <= 31;
+    }
+
+    private static boolean isMonth(final int n) {
+        return 1 <= n && n <= 12;
+    }
+
+    private static boolean isHour(final int n) {
+        return 0 <= n && n <= 24;
+    }
+
+    private static boolean isMinuteOrSecond(final int n) {
+        return 0 <= n && n < 60;
+    }
+
+    private static boolean isMillisecond(final int n) {
+        return 0<= n && n < 1000;
+    }
+
+    private boolean setMonth(final int m) {
+        if (!isSet(MONTH)) {
+            namedMonth = true;
+            set(MONTH, m);
+            return true;
+        }
+        return false;
+    }
+
+    private boolean setDateField(final int n) {
+        for (int field = YEAR; field != HOUR; field++) {
+            if (!isSet(field)) {
+                // no validation on legacy date fields
+                set(field, n);
+                return true;
+            }
+        }
+        return false;
+    }
+
+    private boolean setTimeField(final int n) {
+        for (int field = HOUR; field != TIMEZONE; field++) {
+            if (!isSet(field)) {
+                if (checkLegacyField(field, n)) {
+                    set(field, n);
+                    return true;
+                }
+                return false;
+            }
+        }
+        return false;
+    }
+
+    private boolean setTimezone(final int offset, final boolean asNumericOffset) {
+        if (!isSet(TIMEZONE) || (asNumericOffset && get(TIMEZONE) == 0)) {
+            set(TIMEZONE, offset);
+            return true;
+        }
+        return false;
+    }
+
+    private boolean setAmPm(final int offset) {
+        if (!isSet(HOUR)) {
+            return false;
+        }
+        final int hour = get(HOUR);
+        if (hour >= 0 && hour <= 12) {
+            set(HOUR, hour + offset);
+        }
+        return true;
+    }
+
+    private boolean patchResult(final boolean strict) {
+        // sanity checks - make sure we have something
+        if (!isSet(YEAR) && !isSet(HOUR)) {
+            return false;
+        }
+        if (isSet(HOUR) && !isSet(MINUTE)) {
+            return false;
+        }
+        // fill in default values for unset fields except timezone
+        for (int field = YEAR; field <= TIMEZONE; field++) {
+            if (get(field) == null) {
+                if (field == TIMEZONE && !strict) {
+                    // We only use UTC as default timezone for dates parsed complying with
+                    // the format specified in ES5 15.9.1.15. Otherwise the slot is left empty
+                    // and local timezone is used.
+                    continue;
+                }
+                final int value = getDefaultValue(field);
+                set(field, value);
+            }
+        }
+
+        if (!strict) {
+            // swap year, month, and day if it looks like the right thing to do
+            if (isDay(get(YEAR))) {
+                final int d = get(YEAR);
+                set(YEAR, get(DAY));
+                if (namedMonth) {
+                    // d-m-y
+                    set(DAY, d);
+                } else {
+                    // m-d-y
+                    final int d2 = get(MONTH);
+                    set(MONTH, d);
+                    set(DAY, d2);
+                }
+            }
+            // sanity checks now that we know what's what
+            if (!isMonth(get(MONTH)) || !isDay(get(DAY))) {
+                return false;
+            }
+
+            // add 1900 or 2000 to year if it's between 0 and 100
+            final int year = get(YEAR);
+            if (year >= 0 && year < 100) {
+                set(YEAR, year >= 50 ? 1900 + year : 2000 + year);
+            }
+        } else {
+            // 24 hour value is only allowed if all other time values are zero
+            if (get(HOUR) == 24 &&
+                    (get(MINUTE) != 0 || get(SECOND) != 0 || get(MILLISECOND) != 0)) {
+                return false;
+            }
+        }
+
+        // set month to 0-based
+        set(MONTH, get(MONTH) - 1);
+        return true;
+    }
+
+    private static void addName(final String str, final int type, final int value) {
+        final Name name = new Name(str, type, value);
+        names.put(name.key, name);
+    }
+
+    private static class Name {
+        final String name;
+        final String key;
+        final int value;
+        final int type;
+
+        final static int DAY_OF_WEEK    = -1;
+        final static int MONTH_NAME     = 0;
+        final static int AM_PM          = 1;
+        final static int TIMEZONE_ID    = 2;
+        final static int TIME_SEPARATOR = 3;
+
+        Name(final String name, final int type, final int value) {
+            assert name != null;
+            assert name.equals(name.toLowerCase(Locale.ENGLISH));
+
+            this.name = name;
+            // use first three characters as lookup key
+            this.key = name.substring(0, Math.min(3, name.length()));
+            this.type = type;
+            this.value = value;
+        }
+
+        public boolean matches(final String str, final int offset, final int len) {
+            return name.regionMatches(true, 0, str, offset, len);
+        }
+
+        @Override
+        public String toString() {
+            return name;
+        }
+    }
+
+}
--- a/nashorn/src/jdk/nashorn/internal/parser/JSONParser.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/parser/JSONParser.java	Wed Jul 05 18:59:05 2017 +0200
@@ -282,7 +282,7 @@
         next();
 
         // Prepare to accumulate elements.
-        final List<Node> elements = new ArrayList<>();
+        final List<PropertyNode> elements = new ArrayList<>();
 
         // Create a block for the object literal.
 loop:
@@ -298,7 +298,7 @@
 
             default:
                 // Get and add the next property.
-                final Node property = propertyAssignment();
+                final PropertyNode property = propertyAssignment();
                 elements.add(property);
 
                 // Comma between property assigments is mandatory in JSON.
@@ -317,7 +317,7 @@
      * Parse a property assignment from the token stream
      * @return the property assignment as a Node
      */
-    private Node propertyAssignment() {
+    private PropertyNode propertyAssignment() {
         // Capture firstToken.
         final long propertyToken = token;
         LiteralNode<?> name = null;
--- a/nashorn/src/jdk/nashorn/internal/parser/Parser.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/parser/Parser.java	Wed Jul 05 18:59:05 2017 +0200
@@ -59,7 +59,6 @@
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
-
 import jdk.nashorn.internal.codegen.CompilerConstants;
 import jdk.nashorn.internal.codegen.Namespace;
 import jdk.nashorn.internal.ir.AccessNode;
@@ -192,36 +191,110 @@
             // Begin parse.
             return program(scriptName);
         } catch (final Exception e) {
-            // Extract message from exception.  The message will be in error
-            // message format.
-            String message = e.getMessage();
-
-            // If empty message.
-            if (message == null) {
-                message = e.toString();
-            }
-
-            // Issue message.
-            if (e instanceof ParserException) {
-                errors.error((ParserException)e);
-            } else {
-                errors.error(message);
-            }
-
-            if (env._dump_on_error) {
-                e.printStackTrace(env.getErr());
-            }
+            handleParseException(e);
 
             return null;
-         } finally {
-             final String end = this + " end '" + scriptName + "'";
-             if (Timing.isEnabled()) {
-                 Timing.accumulateTime(toString(), System.currentTimeMillis() - t0);
-                 LOG.info(end, "' in ", (System.currentTimeMillis() - t0), " ms");
-             } else {
-                 LOG.info(end);
-             }
-         }
+        } finally {
+            final String end = this + " end '" + scriptName + "'";
+            if (Timing.isEnabled()) {
+                Timing.accumulateTime(toString(), System.currentTimeMillis() - t0);
+                LOG.info(end, "' in ", (System.currentTimeMillis() - t0), " ms");
+            } else {
+                LOG.info(end);
+            }
+        }
+    }
+
+    /**
+     * Parse and return the list of function parameter list. A comma
+     * separated list of function parameter identifiers is expected to be parsed.
+     * Errors will be thrown and the error manager will contain information
+     * if parsing should fail. This method is used to check if parameter Strings
+     * passed to "Function" constructor is a valid or not.
+     *
+     * @return the list of IdentNodes representing the formal parameter list
+     */
+    public List<IdentNode> parseFormalParameterList() {
+        try {
+            stream = new TokenStream();
+            lexer  = new Lexer(source, stream, scripting && !env._no_syntax_extensions);
+
+            // Set up first token (skips opening EOL.)
+            k = -1;
+            next();
+
+            return formalParameterList(TokenType.EOF);
+        } catch (final Exception e) {
+            handleParseException(e);
+            return null;
+        }
+    }
+
+    /**
+     * Execute parse and return the resulting function node.
+     * Errors will be thrown and the error manager will contain information
+     * if parsing should fail. This method is used to check if code String
+     * passed to "Function" constructor is a valid function body or not.
+     *
+     * @return function node resulting from successful parse
+     */
+    public FunctionNode parseFunctionBody() {
+        try {
+            stream = new TokenStream();
+            lexer  = new Lexer(source, stream, scripting && !env._no_syntax_extensions);
+
+            // Set up first token (skips opening EOL.)
+            k = -1;
+            next();
+
+            // Make a fake token for the function.
+            final long functionToken = Token.toDesc(FUNCTION, 0, source.getLength());
+            // Set up the function to append elements.
+
+            FunctionNode function = newFunctionNode(
+                functionToken,
+                new IdentNode(functionToken, Token.descPosition(functionToken), RUN_SCRIPT.symbolName()),
+                new ArrayList<IdentNode>(),
+                FunctionNode.Kind.NORMAL);
+
+            functionDeclarations = new ArrayList<>();
+            sourceElements();
+            addFunctionDeclarations(function);
+            functionDeclarations = null;
+
+            expect(EOF);
+
+            function.setFinish(source.getLength() - 1);
+
+            function = restoreFunctionNode(function, token); //commit code
+            function = function.setBody(lc, function.getBody().setNeedsScope(lc));
+            return function;
+        } catch (final Exception e) {
+            handleParseException(e);
+            return null;
+        }
+    }
+
+    private void handleParseException(final Exception e) {
+        // Extract message from exception.  The message will be in error
+        // message format.
+        String message = e.getMessage();
+
+        // If empty message.
+        if (message == null) {
+            message = e.toString();
+        }
+
+        // Issue message.
+        if (e instanceof ParserException) {
+            errors.error((ParserException)e);
+        } else {
+            errors.error(message);
+        }
+
+        if (env._dump_on_error) {
+            e.printStackTrace(env.getErr());
+        }
     }
 
     /**
@@ -1954,7 +2027,7 @@
             }
         }
 
-        return new ObjectNode(objectToken, finish, new ArrayList<Node>(map.values()));
+        return new ObjectNode(objectToken, finish, new ArrayList<>(map.values()));
     }
 
     /**
@@ -2424,12 +2497,29 @@
      * @return List of parameter nodes.
      */
     private List<IdentNode> formalParameterList() {
+        return formalParameterList(RPAREN);
+    }
+
+    /**
+     * Same as the other method of the same name - except that the end
+     * token type expected is passed as argument to this method.
+     *
+     * FormalParameterList :
+     *      Identifier
+     *      FormalParameterList , Identifier
+     *
+     * See 13
+     *
+     * Parse function parameter list.
+     * @return List of parameter nodes.
+     */
+    private List<IdentNode> formalParameterList(final TokenType endType) {
         // Prepare to gather parameters.
         final List<IdentNode> parameters = new ArrayList<>();
         // Track commas.
         boolean first = true;
 
-        while (type != RPAREN) {
+        while (type != endType) {
             // Comma prior to every argument except the first.
             if (!first) {
                 expect(COMMARIGHT);
--- a/nashorn/src/jdk/nashorn/internal/runtime/Context.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/runtime/Context.java	Wed Jul 05 18:59:05 2017 +0200
@@ -48,6 +48,7 @@
 import java.security.ProtectionDomain;
 import jdk.internal.org.objectweb.asm.ClassReader;
 import jdk.internal.org.objectweb.asm.util.CheckClassAdapter;
+import jdk.nashorn.api.scripting.ScriptObjectMirror;
 import jdk.nashorn.internal.codegen.Compiler;
 import jdk.nashorn.internal.codegen.ObjectClassGenerator;
 import jdk.nashorn.internal.ir.FunctionNode;
@@ -491,6 +492,40 @@
     }
 
     /**
+     * Implementation of {@code loadWithNewGlobal} Nashorn extension. Load a script file from a source
+     * expression, after creating a new global scope.
+     *
+     * @param from source expression for script
+     *
+     * @return return value for load call (undefined)
+     *
+     * @throws IOException if source cannot be found or loaded
+     */
+    public Object loadWithNewGlobal(final Object from) throws IOException {
+        final ScriptObject oldGlobal = getGlobalTrusted();
+        final ScriptObject newGlobal = AccessController.doPrivileged(new PrivilegedAction<ScriptObject>() {
+           @Override
+           public ScriptObject run() {
+               try {
+                   return createGlobal();
+               } catch (final RuntimeException e) {
+                   if (Context.DEBUG) {
+                       e.printStackTrace();
+                   }
+                   throw e;
+               }
+           }
+        });
+        setGlobalTrusted(newGlobal);
+
+        try {
+            return ScriptObjectMirror.wrap(load(newGlobal, from), newGlobal);
+        } finally {
+            setGlobalTrusted(oldGlobal);
+        }
+    }
+
+    /**
      * Load or get a structure class. Structure class names are based on the number of parameter fields
      * and {@link AccessorProperty} fields in them. Structure classes are used to represent ScriptObjects
      *
--- a/nashorn/src/jdk/nashorn/internal/runtime/JSONFunctions.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/runtime/JSONFunctions.java	Wed Jul 05 18:59:05 2017 +0200
@@ -25,9 +25,11 @@
 
 package jdk.nashorn.internal.runtime;
 
+import static jdk.nashorn.internal.runtime.arrays.ArrayIndex.getArrayIndexNoThrow;
+import static jdk.nashorn.internal.runtime.arrays.ArrayIndex.isValidArrayIndex;
+
 import java.lang.invoke.MethodHandle;
 import java.util.Iterator;
-import java.util.List;
 import jdk.nashorn.internal.ir.LiteralNode;
 import jdk.nashorn.internal.ir.Node;
 import jdk.nashorn.internal.ir.ObjectNode;
@@ -36,8 +38,6 @@
 import jdk.nashorn.internal.parser.JSONParser;
 import jdk.nashorn.internal.parser.TokenType;
 import jdk.nashorn.internal.runtime.linker.Bootstrap;
-import static jdk.nashorn.internal.runtime.arrays.ArrayIndex.getArrayIndexNoThrow;
-import static jdk.nashorn.internal.runtime.arrays.ArrayIndex.isValidArrayIndex;
 
 /**
  * Utilities used by "JSON" object implementation.
@@ -171,10 +171,8 @@
             final ObjectNode   objNode  = (ObjectNode) node;
             final ScriptObject object   = ((GlobalObject)global).newObject();
             final boolean      strict   = global.isStrictContext();
-            final List<Node>   elements = objNode.getElements();
 
-            for (final Node elem : elements) {
-                final PropertyNode pNode     = (PropertyNode) elem;
+            for (final PropertyNode pNode: objNode.getElements()) {
                 final Node         valueNode = pNode.getValue();
 
                 final String name = pNode.getKeyName();
--- a/nashorn/src/jdk/nashorn/internal/runtime/ListAdapter.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/runtime/ListAdapter.java	Wed Jul 05 18:59:05 2017 +0200
@@ -15,10 +15,11 @@
  * as dequeues, it's still slightly more efficient to be able to translate dequeue operations into pushes, pops, shifts,
  * and unshifts, than to blindly translate all list's add/remove operations into splices. Also, it is conceivable that a
  * custom script object that implements an Array-like API can have a background data representation that is optimized
- * for dequeue-like access. Note that with ECMAScript arrays, {@code push} and {@pop} operate at the end of the array,
- * while in Java {@code Deque} they operate on the front of the queue and as such the Java dequeue {@link #push(Object)}
- * and {@link #pop()} operations will translate to {@code unshift} and {@code shift} script operations respectively,
- * while {@link #addLast(Object)} and {@link #removeLast()} will translate to {@code push} and {@code pop}.
+ * for dequeue-like access. Note that with ECMAScript arrays, {@code push} and {@code pop} operate at the end of the
+ * array, while in Java {@code Deque} they operate on the front of the queue and as such the Java dequeue
+ * {@link #push(Object)} and {@link #pop()} operations will translate to {@code unshift} and {@code shift} script
+ * operations respectively, while {@link #addLast(Object)} and {@link #removeLast()} will translate to {@code push} and
+ * {@code pop}.
  */
 public class ListAdapter extends AbstractList<Object> implements RandomAccess, Deque<Object> {
     // These add to the back and front of the list
--- a/nashorn/src/jdk/nashorn/internal/runtime/ScriptObject.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/runtime/ScriptObject.java	Wed Jul 05 18:59:05 2017 +0200
@@ -1512,6 +1512,17 @@
     }
 
     /**
+     * Delete a property from the ScriptObject.
+     * (to help ScriptObjectMirror implementation)
+     *
+     * @param key the key of the property
+     * @return if the delete was successful or not
+     */
+    public boolean delete(final Object key) {
+        return delete(key, getContext()._strict);
+    }
+
+    /**
      * Return the size of the ScriptObject - i.e. the number of properties
      * it contains
      * (java.util.Map-like method to help ScriptObjectMirror implementation)
--- a/nashorn/src/jdk/nashorn/internal/runtime/ScriptRuntime.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/runtime/ScriptRuntime.java	Wed Jul 05 18:59:05 2017 +0200
@@ -40,6 +40,7 @@
 import java.util.NoSuchElementException;
 import java.util.Objects;
 import jdk.internal.dynalink.beans.StaticClass;
+import jdk.nashorn.api.scripting.ScriptObjectMirror;
 import jdk.nashorn.internal.codegen.CompilerConstants.Call;
 import jdk.nashorn.internal.ir.debug.JSONWriter;
 import jdk.nashorn.internal.parser.Lexer;
@@ -240,6 +241,10 @@
             };
         }
 
+        if (obj instanceof ScriptObjectMirror) {
+            return ((ScriptObjectMirror)obj).keySet().iterator();
+        }
+
         return Collections.emptyIterator();
     }
 
@@ -280,6 +285,10 @@
             };
         }
 
+        if (obj instanceof ScriptObjectMirror) {
+            return ((ScriptObjectMirror)obj).values().iterator();
+        }
+
         if (obj instanceof Iterable) {
             return ((Iterable<?>)obj).iterator();
         }
@@ -591,6 +600,10 @@
             throw typeError("cant.delete.property", safeToString(property), "null");
         }
 
+        if (obj instanceof ScriptObjectMirror) {
+            return ((ScriptObjectMirror)obj).delete(property);
+        }
+
         if (JSType.isPrimitive(obj)) {
             return ((ScriptObject) JSType.toScriptObject(obj)).delete(property, Boolean.TRUE.equals(strict));
         }
--- a/nashorn/src/jdk/nashorn/internal/runtime/ScriptingFunctions.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/runtime/ScriptingFunctions.java	Wed Jul 05 18:59:05 2017 +0200
@@ -32,9 +32,8 @@
 import java.io.BufferedReader;
 import java.io.File;
 import java.io.IOException;
-import java.io.InputStream;
 import java.io.InputStreamReader;
-import java.io.OutputStream;
+import java.io.OutputStreamWriter;
 import java.lang.invoke.MethodHandle;
 import java.lang.invoke.MethodHandles;
 import java.util.Map;
@@ -165,36 +164,61 @@
 
         // Start the process.
         final Process process = processBuilder.start();
+        final IOException exception[] = new IOException[2];
+
+        // Collect output.
+        final StringBuilder outBuffer = new StringBuilder();
+        Thread outThread = new Thread(new Runnable() {
+            @Override
+            public void run() {
+                char buffer[] = new char[1024];
+                try (final InputStreamReader inputStream = new InputStreamReader(process.getInputStream())) {
+                    for (int length; (length = inputStream.read(buffer, 0, buffer.length)) != -1; ) {
+                        outBuffer.append(buffer, 0, length);
+                    }
+                } catch (IOException ex) {
+                    exception[0] = ex;
+                }
+            }
+        }, "$EXEC output");
+
+        // Collect errors.
+        final StringBuilder errBuffer = new StringBuilder();
+        Thread errThread = new Thread(new Runnable() {
+            @Override
+            public void run() {
+                char buffer[] = new char[1024];
+                try (final InputStreamReader inputStream = new InputStreamReader(process.getErrorStream())) {
+                    for (int length; (length = inputStream.read(buffer, 0, buffer.length)) != -1; ) {
+                        outBuffer.append(buffer, 0, length);
+                    }
+                } catch (IOException ex) {
+                    exception[1] = ex;
+                }
+            }
+        }, "$EXEC error");
+
+        // Start gathering output.
+        outThread.start();
+        errThread.start();
 
         // If input is present, pass on to process.
-        try (OutputStream outputStream = process.getOutputStream()) {
+        try (OutputStreamWriter outputStream = new OutputStreamWriter(process.getOutputStream())) {
             if (input != UNDEFINED) {
-                outputStream.write(JSType.toString(input).getBytes());
+                String in = JSType.toString(input);
+                outputStream.write(in, 0, in.length());
             }
+        } catch (IOException ex) {
+            // Process was not expecting input.  May be normal state of affairs.
         }
 
         // Wait for the process to complete.
         final int exit = process.waitFor();
+        outThread.join();
+        errThread.join();
 
-        // Collect output.
-        String out;
-         try (InputStream inputStream = process.getInputStream()) {
-            final StringBuilder outBuffer = new StringBuilder();
-            for (int ch; (ch = inputStream.read()) != -1; ) {
-                outBuffer.append((char)ch);
-            }
-            out = outBuffer.toString();
-        }
-
-        // Collect errors.
-        String err;
-        try (InputStream errorStream = process.getErrorStream()) {
-            final StringBuilder errBuffer = new StringBuilder();
-            for (int ch; (ch = errorStream.read()) != -1; ) {
-                errBuffer.append((char)ch);
-            }
-            err = errBuffer.toString();
-        }
+        final String out = outBuffer.toString();
+        final String err = errBuffer.toString();
 
         // Set globals for secondary results.
         final boolean isStrict = global.isStrictContext();
@@ -202,6 +226,13 @@
         global.set(ERR_NAME, err, isStrict);
         global.set(EXIT_NAME, exit, isStrict);
 
+        // Propagate exception if present.
+        for (int i = 0; i < exception.length; i++) {
+            if (exception[i] != null) {
+                throw exception[i];
+            }
+        }
+
         // Return the result from stdout.
         return out;
     }
--- a/nashorn/src/jdk/nashorn/internal/runtime/arrays/ArrayIterator.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/runtime/arrays/ArrayIterator.java	Wed Jul 05 18:59:05 2017 +0200
@@ -36,7 +36,7 @@
     protected final ScriptObject array;
 
     /** length of array */
-    protected final int length;
+    protected final long length;
 
     /**
      * Constructor
@@ -46,7 +46,7 @@
     protected ArrayIterator(final ScriptObject array, final boolean includeUndefined) {
         super(includeUndefined);
         this.array = array;
-        this.length = (int) array.getArray().length();
+        this.length = array.getArray().length();
     }
 
     /**
@@ -63,7 +63,7 @@
     }
 
     @Override
-    public int getLength() {
+    public long getLength() {
         return length;
     }
 
--- a/nashorn/src/jdk/nashorn/internal/runtime/arrays/ArrayLikeIterator.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/runtime/arrays/ArrayLikeIterator.java	Wed Jul 05 18:59:05 2017 +0200
@@ -38,7 +38,7 @@
 abstract public class ArrayLikeIterator<T> implements Iterator<T> {
 
     /** current element index in iteration */
-    protected int index;
+    protected long index;
 
     /** should undefined elements be included in the iteration? */
     protected final boolean includeUndefined;
@@ -65,7 +65,7 @@
      * Go the the next valid element index of the iterator
      * @return next index
      */
-    protected int bumpIndex() {
+    protected long bumpIndex() {
         return index++;
     }
 
@@ -73,7 +73,7 @@
      * Return the next valid element index of the iterator
      * @return next index
      */
-    public int nextIndex() {
+    public long nextIndex() {
         return index;
     }
 
@@ -86,7 +86,7 @@
      * Get the length of the iteration
      * @return length
      */
-    public abstract int getLength();
+    public abstract long getLength();
 
     /**
      * ArrayLikeIterator factory
--- a/nashorn/src/jdk/nashorn/internal/runtime/arrays/EmptyArrayLikeIterator.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/runtime/arrays/EmptyArrayLikeIterator.java	Wed Jul 05 18:59:05 2017 +0200
@@ -47,7 +47,7 @@
     }
 
     @Override
-    public int getLength() {
+    public long getLength() {
         return 0;
     }
 }
--- a/nashorn/src/jdk/nashorn/internal/runtime/arrays/IteratorAction.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/runtime/arrays/IteratorAction.java	Wed Jul 05 18:59:05 2017 +0200
@@ -49,7 +49,7 @@
     protected T result;
 
     /** Current array index of iterator */
-    protected int index;
+    protected long index;
 
     /** Iterator object */
     private final ArrayLikeIterator<Object> iter;
@@ -134,6 +134,6 @@
      *
      * @throws Throwable if invocation throws an exception/error
      */
-    protected abstract boolean forEach(final Object val, final int i) throws Throwable;
+    protected abstract boolean forEach(final Object val, final long i) throws Throwable;
 
 }
--- a/nashorn/src/jdk/nashorn/internal/runtime/arrays/MapIterator.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/runtime/arrays/MapIterator.java	Wed Jul 05 18:59:05 2017 +0200
@@ -49,8 +49,8 @@
     }
 
     @Override
-    public int getLength() {
-        return (int) length;
+    public long getLength() {
+        return length;
     }
 
     @Override
--- a/nashorn/src/jdk/nashorn/internal/runtime/arrays/ReverseArrayIterator.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/runtime/arrays/ReverseArrayIterator.java	Wed Jul 05 18:59:05 2017 +0200
@@ -39,7 +39,7 @@
      */
     public ReverseArrayIterator(final ScriptObject array, final boolean includeUndefined) {
         super(array, includeUndefined);
-        this.index = (int) (array.getArray().length() - 1);
+        this.index = array.getArray().length() - 1;
     }
 
     @Override
@@ -53,7 +53,7 @@
     }
 
     @Override
-    protected int bumpIndex() {
+    protected long bumpIndex() {
         return index--;
     }
 }
--- a/nashorn/src/jdk/nashorn/internal/runtime/arrays/ReverseMapIterator.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/runtime/arrays/ReverseMapIterator.java	Wed Jul 05 18:59:05 2017 +0200
@@ -35,7 +35,7 @@
 
     ReverseMapIterator(final ScriptObject obj, final boolean includeUndefined) {
         super(obj, includeUndefined);
-        this.index = JSType.toInt32(obj.getLength()) - 1;
+        this.index = JSType.toUint32(obj.getLength()) - 1;
     }
 
     @Override
@@ -49,7 +49,7 @@
     }
 
     @Override
-    protected int bumpIndex() {
+    protected long bumpIndex() {
         return index--;
     }
 }
--- a/nashorn/src/jdk/nashorn/internal/runtime/regexp/joni/Parser.java	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/runtime/regexp/joni/Parser.java	Wed Jul 05 18:59:05 2017 +0200
@@ -23,23 +23,23 @@
 import static jdk.nashorn.internal.runtime.regexp.joni.Option.isDontCaptureGroup;
 import static jdk.nashorn.internal.runtime.regexp.joni.Option.isIgnoreCase;
 
-import jdk.nashorn.internal.runtime.regexp.joni.encoding.CharacterType;
 import jdk.nashorn.internal.runtime.regexp.joni.ast.AnchorNode;
 import jdk.nashorn.internal.runtime.regexp.joni.ast.AnyCharNode;
 import jdk.nashorn.internal.runtime.regexp.joni.ast.BackRefNode;
 import jdk.nashorn.internal.runtime.regexp.joni.ast.CClassNode;
+import jdk.nashorn.internal.runtime.regexp.joni.ast.CClassNode.CCStateArg;
 import jdk.nashorn.internal.runtime.regexp.joni.ast.ConsAltNode;
 import jdk.nashorn.internal.runtime.regexp.joni.ast.EncloseNode;
 import jdk.nashorn.internal.runtime.regexp.joni.ast.Node;
 import jdk.nashorn.internal.runtime.regexp.joni.ast.QuantifierNode;
 import jdk.nashorn.internal.runtime.regexp.joni.ast.StringNode;
-import jdk.nashorn.internal.runtime.regexp.joni.ast.CClassNode.CCStateArg;
 import jdk.nashorn.internal.runtime.regexp.joni.constants.AnchorType;
 import jdk.nashorn.internal.runtime.regexp.joni.constants.CCSTATE;
 import jdk.nashorn.internal.runtime.regexp.joni.constants.CCVALTYPE;
 import jdk.nashorn.internal.runtime.regexp.joni.constants.EncloseType;
 import jdk.nashorn.internal.runtime.regexp.joni.constants.NodeType;
 import jdk.nashorn.internal.runtime.regexp.joni.constants.TokenType;
+import jdk.nashorn.internal.runtime.regexp.joni.encoding.CharacterType;
 
 class Parser extends Lexer {
 
--- a/nashorn/src/jdk/nashorn/internal/runtime/resources/fx/controls.js	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/runtime/resources/fx/controls.js	Wed Jul 05 18:59:05 2017 +0200
@@ -70,7 +70,7 @@
 CheckBoxTreeCell                           = Java.type("javafx.scene.control.cell.CheckBoxTreeCell");
 CheckBoxTreeCellBuilder                    = Java.type("javafx.scene.control.cell.CheckBoxTreeCellBuilder");
 CheckBoxTreeTableCell                      = Java.type("javafx.scene.control.cell.CheckBoxTreeTableCell");
-CheckBoxTreeTableCellBuilder               = Java.type("javafx.scene.control.cell.CheckBoxTreeTableCellBuilder");
+//CheckBoxTreeTableCellBuilder               = Java.type("javafx.scene.control.cell.CheckBoxTreeTableCellBuilder");
 ChoiceBoxListCell                          = Java.type("javafx.scene.control.cell.ChoiceBoxListCell");
 ChoiceBoxListCellBuilder                   = Java.type("javafx.scene.control.cell.ChoiceBoxListCellBuilder");
 ChoiceBoxTableCell                         = Java.type("javafx.scene.control.cell.ChoiceBoxTableCell");
@@ -78,7 +78,7 @@
 ChoiceBoxTreeCell                          = Java.type("javafx.scene.control.cell.ChoiceBoxTreeCell");
 ChoiceBoxTreeCellBuilder                   = Java.type("javafx.scene.control.cell.ChoiceBoxTreeCellBuilder");
 ChoiceBoxTreeTableCell                     = Java.type("javafx.scene.control.cell.ChoiceBoxTreeTableCell");
-ChoiceBoxTreeTableCellBuilder              = Java.type("javafx.scene.control.cell.ChoiceBoxTreeTableCellBuilder");
+//ChoiceBoxTreeTableCellBuilder              = Java.type("javafx.scene.control.cell.ChoiceBoxTreeTableCellBuilder");
 ComboBoxListCell                           = Java.type("javafx.scene.control.cell.ComboBoxListCell");
 ComboBoxListCellBuilder                    = Java.type("javafx.scene.control.cell.ComboBoxListCellBuilder");
 ComboBoxTableCell                          = Java.type("javafx.scene.control.cell.ComboBoxTableCell");
@@ -86,7 +86,7 @@
 ComboBoxTreeCell                           = Java.type("javafx.scene.control.cell.ComboBoxTreeCell");
 ComboBoxTreeCellBuilder                    = Java.type("javafx.scene.control.cell.ComboBoxTreeCellBuilder");
 ComboBoxTreeTableCell                      = Java.type("javafx.scene.control.cell.ComboBoxTreeTableCell");
-ComboBoxTreeTableCellBuilder               = Java.type("javafx.scene.control.cell.ComboBoxTreeTableCellBuilder");
+//ComboBoxTreeTableCellBuilder               = Java.type("javafx.scene.control.cell.ComboBoxTreeTableCellBuilder");
 MapValueFactory                            = Java.type("javafx.scene.control.cell.MapValueFactory");
 ProgressBarTableCell                       = Java.type("javafx.scene.control.cell.ProgressBarTableCell");
 ProgressBarTreeTableCell                   = Java.type("javafx.scene.control.cell.ProgressBarTreeTableCell");
@@ -99,9 +99,9 @@
 TextFieldTreeCell                          = Java.type("javafx.scene.control.cell.TextFieldTreeCell");
 TextFieldTreeCellBuilder                   = Java.type("javafx.scene.control.cell.TextFieldTreeCellBuilder");
 TextFieldTreeTableCell                     = Java.type("javafx.scene.control.cell.TextFieldTreeTableCell");
-TextFieldTreeTableCellBuilder              = Java.type("javafx.scene.control.cell.TextFieldTreeTableCellBuilder");
+//TextFieldTreeTableCellBuilder              = Java.type("javafx.scene.control.cell.TextFieldTreeTableCellBuilder");
 TreeItemPropertyValueFactory               = Java.type("javafx.scene.control.cell.TreeItemPropertyValueFactory");
-TreeItemPropertyValueFactoryBuilder        = Java.type("javafx.scene.control.cell.TreeItemPropertyValueFactoryBuilder");
+//TreeItemPropertyValueFactoryBuilder        = Java.type("javafx.scene.control.cell.TreeItemPropertyValueFactoryBuilder");
 CellBuilder                                = Java.type("javafx.scene.control.CellBuilder");
 CheckBox                                   = Java.type("javafx.scene.control.CheckBox");
 CheckBoxBuilder                            = Java.type("javafx.scene.control.CheckBoxBuilder");
@@ -167,7 +167,7 @@
 RadioMenuItem                              = Java.type("javafx.scene.control.RadioMenuItem");
 RadioMenuItemBuilder                       = Java.type("javafx.scene.control.RadioMenuItemBuilder");
 ResizeFeaturesBase                         = Java.type("javafx.scene.control.ResizeFeaturesBase");
-ResizeFeaturesBaseBuilder                  = Java.type("javafx.scene.control.ResizeFeaturesBaseBuilder");
+//ResizeFeaturesBaseBuilder                  = Java.type("javafx.scene.control.ResizeFeaturesBaseBuilder");
 ScrollBar                                  = Java.type("javafx.scene.control.ScrollBar");
 ScrollBarBuilder                           = Java.type("javafx.scene.control.ScrollBarBuilder");
 ScrollPane                                 = Java.type("javafx.scene.control.ScrollPane");
@@ -183,7 +183,7 @@
 SingleSelectionModel                       = Java.type("javafx.scene.control.SingleSelectionModel");
 Skin                                       = Java.type("javafx.scene.control.Skin");
 SkinBase                                   = Java.type("javafx.scene.control.SkinBase");
-SkinBaseBuilder                            = Java.type("javafx.scene.control.SkinBaseBuilder");
+//SkinBaseBuilder                            = Java.type("javafx.scene.control.SkinBaseBuilder");
 Skinnable                                  = Java.type("javafx.scene.control.Skinnable");
 Slider                                     = Java.type("javafx.scene.control.Slider");
 SliderBuilder                              = Java.type("javafx.scene.control.SliderBuilder");
@@ -202,7 +202,7 @@
 TableColumn$CellEditEvent                  = Java.type("javafx.scene.control.TableColumn$CellEditEvent");
 TableColumn$SortType                       = Java.type("javafx.scene.control.TableColumn$SortType");
 TableColumnBase                            = Java.type("javafx.scene.control.TableColumnBase");
-TableColumnBaseBuilder                     = Java.type("javafx.scene.control.TableColumnBaseBuilder");
+//TableColumnBaseBuilder                     = Java.type("javafx.scene.control.TableColumnBaseBuilder");
 TableColumnBuilder                         = Java.type("javafx.scene.control.TableColumnBuilder");
 TableFocusModel                            = Java.type("javafx.scene.control.TableFocusModel");
 TablePosition                              = Java.type("javafx.scene.control.TablePosition");
@@ -210,7 +210,7 @@
 TableRow                                   = Java.type("javafx.scene.control.TableRow");
 TableRowBuilder                            = Java.type("javafx.scene.control.TableRowBuilder");
 TableSelectionModel                        = Java.type("javafx.scene.control.TableSelectionModel");
-TableSelectionModelBuilder                 = Java.type("javafx.scene.control.TableSelectionModelBuilder");
+//TableSelectionModelBuilder                 = Java.type("javafx.scene.control.TableSelectionModelBuilder");
 TableView                                  = Java.type("javafx.scene.control.TableView");
 TableView$ResizeFeatures                   = Java.type("javafx.scene.control.TableView$ResizeFeatures");
 TableView$TableViewFocusModel              = Java.type("javafx.scene.control.TableView$TableViewFocusModel");
@@ -244,21 +244,21 @@
 TreeItemBuilder                            = Java.type("javafx.scene.control.TreeItemBuilder");
 TreeSortMode                               = Java.type("javafx.scene.control.TreeSortMode");
 TreeTableCell                              = Java.type("javafx.scene.control.TreeTableCell");
-TreeTableCellBuilder                       = Java.type("javafx.scene.control.TreeTableCellBuilder");
+//TreeTableCellBuilder                       = Java.type("javafx.scene.control.TreeTableCellBuilder");
 TreeTableColumn                            = Java.type("javafx.scene.control.TreeTableColumn");
 TreeTableColumn$CellDataFeatures           = Java.type("javafx.scene.control.TreeTableColumn$CellDataFeatures");
 TreeTableColumn$CellEditEvent              = Java.type("javafx.scene.control.TreeTableColumn$CellEditEvent");
 TreeTableColumn$SortType                   = Java.type("javafx.scene.control.TreeTableColumn$SortType");
-TreeTableColumnBuilder                     = Java.type("javafx.scene.control.TreeTableColumnBuilder");
+//TreeTableColumnBuilder                     = Java.type("javafx.scene.control.TreeTableColumnBuilder");
 TreeTablePosition                          = Java.type("javafx.scene.control.TreeTablePosition");
 TreeTableRow                               = Java.type("javafx.scene.control.TreeTableRow");
-TreeTableRowBuilder                        = Java.type("javafx.scene.control.TreeTableRowBuilder");
+//TreeTableRowBuilder                        = Java.type("javafx.scene.control.TreeTableRowBuilder");
 TreeTableView                              = Java.type("javafx.scene.control.TreeTableView");
 TreeTableView$EditEvent                    = Java.type("javafx.scene.control.TreeTableView$EditEvent");
 TreeTableView$ResizeFeatures               = Java.type("javafx.scene.control.TreeTableView$ResizeFeatures");
 TreeTableView$TreeTableViewFocusModel      = Java.type("javafx.scene.control.TreeTableView$TreeTableViewFocusModel");
 TreeTableView$TreeTableViewSelectionModel  = Java.type("javafx.scene.control.TreeTableView$TreeTableViewSelectionModel");
-TreeTableViewBuilder                       = Java.type("javafx.scene.control.TreeTableViewBuilder");
+//TreeTableViewBuilder                       = Java.type("javafx.scene.control.TreeTableViewBuilder");
 TreeView                                   = Java.type("javafx.scene.control.TreeView");
 TreeView$EditEvent                         = Java.type("javafx.scene.control.TreeView$EditEvent");
 TreeViewBuilder                            = Java.type("javafx.scene.control.TreeViewBuilder");
--- a/nashorn/src/jdk/nashorn/internal/runtime/resources/fx/graphics.js	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/runtime/resources/fx/graphics.js	Wed Jul 05 18:59:05 2017 +0200
@@ -134,10 +134,10 @@
 PrintResolution                        = Java.type("javafx.print.PrintResolution");
 PrintSides                             = Java.type("javafx.print.PrintSides");
 AmbientLight                           = Java.type("javafx.scene.AmbientLight");
-AmbientLightBuilder                    = Java.type("javafx.scene.AmbientLightBuilder");
+//AmbientLightBuilder                    = Java.type("javafx.scene.AmbientLightBuilder");
 CacheHint                              = Java.type("javafx.scene.CacheHint");
 Camera                                 = Java.type("javafx.scene.Camera");
-CameraBuilder                          = Java.type("javafx.scene.CameraBuilder");
+//CameraBuilder                          = Java.type("javafx.scene.CameraBuilder");
 Canvas                                 = Java.type("javafx.scene.canvas.Canvas");
 CanvasBuilder                          = Java.type("javafx.scene.canvas.CanvasBuilder");
 GraphicsContext                        = Java.type("javafx.scene.canvas.GraphicsContext");
@@ -209,12 +209,12 @@
 DragEvent                              = Java.type("javafx.scene.input.DragEvent");
 GestureEvent                           = Java.type("javafx.scene.input.GestureEvent");
 InputEvent                             = Java.type("javafx.scene.input.InputEvent");
-InputEventBuilder                      = Java.type("javafx.scene.input.InputEventBuilder");
+//InputEventBuilder                      = Java.type("javafx.scene.input.InputEventBuilder");
 InputMethodEvent                       = Java.type("javafx.scene.input.InputMethodEvent");
 InputMethodHighlight                   = Java.type("javafx.scene.input.InputMethodHighlight");
 InputMethodRequests                    = Java.type("javafx.scene.input.InputMethodRequests");
 InputMethodTextRun                     = Java.type("javafx.scene.input.InputMethodTextRun");
-InputMethodTextRunBuilder              = Java.type("javafx.scene.input.InputMethodTextRunBuilder");
+//InputMethodTextRunBuilder              = Java.type("javafx.scene.input.InputMethodTextRunBuilder");
 KeyCharacterCombination                = Java.type("javafx.scene.input.KeyCharacterCombination");
 KeyCharacterCombinationBuilder         = Java.type("javafx.scene.input.KeyCharacterCombinationBuilder");
 KeyCode                                = Java.type("javafx.scene.input.KeyCode");
@@ -238,35 +238,35 @@
 TouchEvent                             = Java.type("javafx.scene.input.TouchEvent");
 TouchPoint                             = Java.type("javafx.scene.input.TouchPoint");
 TouchPoint$State                       = Java.type("javafx.scene.input.TouchPoint$State");
-TouchPointBuilder                      = Java.type("javafx.scene.input.TouchPointBuilder");
+//TouchPointBuilder                      = Java.type("javafx.scene.input.TouchPointBuilder");
 TransferMode                           = Java.type("javafx.scene.input.TransferMode");
 ZoomEvent                              = Java.type("javafx.scene.input.ZoomEvent");
 AnchorPane                             = Java.type("javafx.scene.layout.AnchorPane");
 AnchorPaneBuilder                      = Java.type("javafx.scene.layout.AnchorPaneBuilder");
 Background                             = Java.type("javafx.scene.layout.Background");
-BackgroundBuilder                      = Java.type("javafx.scene.layout.BackgroundBuilder");
+//BackgroundBuilder                      = Java.type("javafx.scene.layout.BackgroundBuilder");
 BackgroundFill                         = Java.type("javafx.scene.layout.BackgroundFill");
-BackgroundFillBuilder                  = Java.type("javafx.scene.layout.BackgroundFillBuilder");
+//BackgroundFillBuilder                  = Java.type("javafx.scene.layout.BackgroundFillBuilder");
 BackgroundImage                        = Java.type("javafx.scene.layout.BackgroundImage");
-BackgroundImageBuilder                 = Java.type("javafx.scene.layout.BackgroundImageBuilder");
+//BackgroundImageBuilder                 = Java.type("javafx.scene.layout.BackgroundImageBuilder");
 BackgroundPosition                     = Java.type("javafx.scene.layout.BackgroundPosition");
-BackgroundPositionBuilder              = Java.type("javafx.scene.layout.BackgroundPositionBuilder");
+//BackgroundPositionBuilder              = Java.type("javafx.scene.layout.BackgroundPositionBuilder");
 BackgroundRepeat                       = Java.type("javafx.scene.layout.BackgroundRepeat");
 BackgroundSize                         = Java.type("javafx.scene.layout.BackgroundSize");
-BackgroundSizeBuilder                  = Java.type("javafx.scene.layout.BackgroundSizeBuilder");
+//BackgroundSizeBuilder                  = Java.type("javafx.scene.layout.BackgroundSizeBuilder");
 Border                                 = Java.type("javafx.scene.layout.Border");
-BorderBuilder                          = Java.type("javafx.scene.layout.BorderBuilder");
+//BorderBuilder                          = Java.type("javafx.scene.layout.BorderBuilder");
 BorderImage                            = Java.type("javafx.scene.layout.BorderImage");
-BorderImageBuilder                     = Java.type("javafx.scene.layout.BorderImageBuilder");
+//BorderImageBuilder                     = Java.type("javafx.scene.layout.BorderImageBuilder");
 BorderPane                             = Java.type("javafx.scene.layout.BorderPane");
 BorderPaneBuilder                      = Java.type("javafx.scene.layout.BorderPaneBuilder");
 BorderRepeat                           = Java.type("javafx.scene.layout.BorderRepeat");
 BorderStroke                           = Java.type("javafx.scene.layout.BorderStroke");
-BorderStrokeBuilder                    = Java.type("javafx.scene.layout.BorderStrokeBuilder");
+//BorderStrokeBuilder                    = Java.type("javafx.scene.layout.BorderStrokeBuilder");
 BorderStrokeStyle                      = Java.type("javafx.scene.layout.BorderStrokeStyle");
-BorderStrokeStyleBuilder               = Java.type("javafx.scene.layout.BorderStrokeStyleBuilder");
+//BorderStrokeStyleBuilder               = Java.type("javafx.scene.layout.BorderStrokeStyleBuilder");
 BorderWidths                           = Java.type("javafx.scene.layout.BorderWidths");
-BorderWidthsBuilder                    = Java.type("javafx.scene.layout.BorderWidthsBuilder");
+//BorderWidthsBuilder                    = Java.type("javafx.scene.layout.BorderWidthsBuilder");
 ColumnConstraints                      = Java.type("javafx.scene.layout.ColumnConstraints");
 ColumnConstraintsBuilder               = Java.type("javafx.scene.layout.ColumnConstraintsBuilder");
 ConstraintsBase                        = Java.type("javafx.scene.layout.ConstraintsBase");
@@ -291,7 +291,7 @@
 VBox                                   = Java.type("javafx.scene.layout.VBox");
 VBoxBuilder                            = Java.type("javafx.scene.layout.VBoxBuilder");
 LightBase                              = Java.type("javafx.scene.LightBase");
-LightBaseBuilder                       = Java.type("javafx.scene.LightBaseBuilder");
+//LightBaseBuilder                       = Java.type("javafx.scene.LightBaseBuilder");
 Node                                   = Java.type("javafx.scene.Node");
 NodeBuilder                            = Java.type("javafx.scene.NodeBuilder");
 Color                                  = Java.type("javafx.scene.paint.Color");
@@ -304,19 +304,19 @@
 Material                               = Java.type("javafx.scene.paint.Material");
 Paint                                  = Java.type("javafx.scene.paint.Paint");
 PhongMaterial                          = Java.type("javafx.scene.paint.PhongMaterial");
-PhongMaterialBuilder                   = Java.type("javafx.scene.paint.PhongMaterialBuilder");
+//PhongMaterialBuilder                   = Java.type("javafx.scene.paint.PhongMaterialBuilder");
 RadialGradient                         = Java.type("javafx.scene.paint.RadialGradient");
 RadialGradientBuilder                  = Java.type("javafx.scene.paint.RadialGradientBuilder");
 Stop                                   = Java.type("javafx.scene.paint.Stop");
 StopBuilder                            = Java.type("javafx.scene.paint.StopBuilder");
 ParallelCamera                         = Java.type("javafx.scene.ParallelCamera");
-ParallelCameraBuilder                  = Java.type("javafx.scene.ParallelCameraBuilder");
+//ParallelCameraBuilder                  = Java.type("javafx.scene.ParallelCameraBuilder");
 Parent                                 = Java.type("javafx.scene.Parent");
 ParentBuilder                          = Java.type("javafx.scene.ParentBuilder");
 PerspectiveCamera                      = Java.type("javafx.scene.PerspectiveCamera");
 PerspectiveCameraBuilder               = Java.type("javafx.scene.PerspectiveCameraBuilder");
 PointLight                             = Java.type("javafx.scene.PointLight");
-PointLightBuilder                      = Java.type("javafx.scene.PointLightBuilder");
+//PointLightBuilder                      = Java.type("javafx.scene.PointLightBuilder");
 //Scene                                  = Java.type("javafx.scene.Scene");
 SceneBuilder                           = Java.type("javafx.scene.SceneBuilder");
 Arc                                    = Java.type("javafx.scene.shape.Arc");
@@ -325,7 +325,7 @@
 ArcToBuilder                           = Java.type("javafx.scene.shape.ArcToBuilder");
 ArcType                                = Java.type("javafx.scene.shape.ArcType");
 Box                                    = Java.type("javafx.scene.shape.Box");
-BoxBuilder                             = Java.type("javafx.scene.shape.BoxBuilder");
+//BoxBuilder                             = Java.type("javafx.scene.shape.BoxBuilder");
 Circle                                 = Java.type("javafx.scene.shape.Circle");
 CircleBuilder                          = Java.type("javafx.scene.shape.CircleBuilder");
 ClosePath                              = Java.type("javafx.scene.shape.ClosePath");
@@ -336,7 +336,7 @@
 CubicCurveToBuilder                    = Java.type("javafx.scene.shape.CubicCurveToBuilder");
 CullFace                               = Java.type("javafx.scene.shape.CullFace");
 Cylinder                               = Java.type("javafx.scene.shape.Cylinder");
-CylinderBuilder                        = Java.type("javafx.scene.shape.CylinderBuilder");
+//CylinderBuilder                        = Java.type("javafx.scene.shape.CylinderBuilder");
 DrawMode                               = Java.type("javafx.scene.shape.DrawMode");
 Ellipse                                = Java.type("javafx.scene.shape.Ellipse");
 EllipseBuilder                         = Java.type("javafx.scene.shape.EllipseBuilder");
@@ -349,7 +349,7 @@
 LineToBuilder                          = Java.type("javafx.scene.shape.LineToBuilder");
 Mesh                                   = Java.type("javafx.scene.shape.Mesh");
 MeshView                               = Java.type("javafx.scene.shape.MeshView");
-MeshViewBuilder                        = Java.type("javafx.scene.shape.MeshViewBuilder");
+//MeshViewBuilder                        = Java.type("javafx.scene.shape.MeshViewBuilder");
 MoveTo                                 = Java.type("javafx.scene.shape.MoveTo");
 MoveToBuilder                          = Java.type("javafx.scene.shape.MoveToBuilder");
 Path                                   = Java.type("javafx.scene.shape.Path");
@@ -368,10 +368,10 @@
 RectangleBuilder                       = Java.type("javafx.scene.shape.RectangleBuilder");
 Shape                                  = Java.type("javafx.scene.shape.Shape");
 Shape3D                                = Java.type("javafx.scene.shape.Shape3D");
-Shape3DBuilder                         = Java.type("javafx.scene.shape.Shape3DBuilder");
+//Shape3DBuilder                         = Java.type("javafx.scene.shape.Shape3DBuilder");
 ShapeBuilder                           = Java.type("javafx.scene.shape.ShapeBuilder");
 Sphere                                 = Java.type("javafx.scene.shape.Sphere");
-SphereBuilder                          = Java.type("javafx.scene.shape.SphereBuilder");
+//SphereBuilder                          = Java.type("javafx.scene.shape.SphereBuilder");
 StrokeLineCap                          = Java.type("javafx.scene.shape.StrokeLineCap");
 StrokeLineJoin                         = Java.type("javafx.scene.shape.StrokeLineJoin");
 StrokeType                             = Java.type("javafx.scene.shape.StrokeType");
@@ -384,7 +384,7 @@
 SnapshotParametersBuilder              = Java.type("javafx.scene.SnapshotParametersBuilder");
 SnapshotResult                         = Java.type("javafx.scene.SnapshotResult");
 SubScene                               = Java.type("javafx.scene.SubScene");
-SubSceneBuilder                        = Java.type("javafx.scene.SubSceneBuilder");
+//SubSceneBuilder                        = Java.type("javafx.scene.SubSceneBuilder");
 Font                                   = Java.type("javafx.scene.text.Font");
 FontBuilder                            = Java.type("javafx.scene.text.FontBuilder");
 FontPosture                            = Java.type("javafx.scene.text.FontPosture");
@@ -395,7 +395,7 @@
 TextBoundsType                         = Java.type("javafx.scene.text.TextBoundsType");
 TextBuilder                            = Java.type("javafx.scene.text.TextBuilder");
 TextFlow                               = Java.type("javafx.scene.text.TextFlow");
-TextFlowBuilder                        = Java.type("javafx.scene.text.TextFlowBuilder");
+//TextFlowBuilder                        = Java.type("javafx.scene.text.TextFlowBuilder");
 Affine                                 = Java.type("javafx.scene.transform.Affine");
 AffineBuilder                          = Java.type("javafx.scene.transform.AffineBuilder");
 MatrixType                             = Java.type("javafx.scene.transform.MatrixType");
@@ -407,7 +407,7 @@
 Shear                                  = Java.type("javafx.scene.transform.Shear");
 ShearBuilder                           = Java.type("javafx.scene.transform.ShearBuilder");
 Transform                              = Java.type("javafx.scene.transform.Transform");
-TransformBuilder                       = Java.type("javafx.scene.transform.TransformBuilder");
+//TransformBuilder                       = Java.type("javafx.scene.transform.TransformBuilder");
 TransformChangedEvent                  = Java.type("javafx.scene.transform.TransformChangedEvent");
 Translate                              = Java.type("javafx.scene.transform.Translate");
 TranslateBuilder                       = Java.type("javafx.scene.transform.TranslateBuilder");
--- a/nashorn/src/jdk/nashorn/internal/runtime/resources/fx/swt.js	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/runtime/resources/fx/swt.js	Wed Jul 05 18:59:05 2017 +0200
@@ -24,6 +24,6 @@
  */
 
 CustomTransfer        = Java.type("javafx.embed.swt.CustomTransfer");
-CustomTransferBuilder = Java.type("javafx.embed.swt.CustomTransferBuilder");
+//CustomTransferBuilder = Java.type("javafx.embed.swt.CustomTransferBuilder");
 FXCanvas              = Java.type("javafx.embed.swt.FXCanvas");
 SWTFXUtils            = Java.type("javafx.embed.swt.SWTFXUtils");
--- a/nashorn/src/jdk/nashorn/internal/runtime/resources/fx/web.js	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/src/jdk/nashorn/internal/runtime/resources/fx/web.js	Wed Jul 05 18:59:05 2017 +0200
@@ -24,10 +24,10 @@
  */
 
 HTMLEditor        = Java.type("javafx.scene.web.HTMLEditor");
-HTMLEditorBuilder = Java.type("javafx.scene.web.HTMLEditorBuilder");
+//HTMLEditorBuilder = Java.type("javafx.scene.web.HTMLEditorBuilder");
 PopupFeatures     = Java.type("javafx.scene.web.PopupFeatures");
 PromptData        = Java.type("javafx.scene.web.PromptData");
-PromptDataBuilder = Java.type("javafx.scene.web.PromptDataBuilder");
+//PromptDataBuilder = Java.type("javafx.scene.web.PromptDataBuilder");
 WebEngine         = Java.type("javafx.scene.web.WebEngine");
 WebEngineBuilder  = Java.type("javafx.scene.web.WebEngineBuilder");
 WebEvent          = Java.type("javafx.scene.web.WebEvent");
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/nashorn/test/script/basic/JDK-8012164.js	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,55 @@
+/*
+ * Copyright (c) 2010, 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ * 
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ * 
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ * 
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ * 
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+
+/**
+ * JDK-8012164: Error.stack needs trimming
+ *
+ * @test
+ * @run
+ */
+
+function func() {
+   error();
+}
+
+function error() {
+  try {
+      throw new Error('foo');
+  } catch (e) {
+      for (i in e.stack) {
+          printFrame(e.stack[i]);
+      }
+  }
+}
+
+func();
+
+// See JDK-8015855: test/script/basic/JDK-8012164.js fails on Windows 
+// Replace '\' to '/' in class and file names of StackFrameElement objects
+function printFrame(stack) {
+   var fileName = stack.fileName.replace(/\\/g, '/');
+   var className = stack.className.replace(/\\/g, '/');
+   print(className + '.' + stack.methodName + '(' +
+         fileName + ':' + stack.lineNumber + ')'); 
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/nashorn/test/script/basic/JDK-8012164.js.EXPECTED	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,3 @@
+<test/script/basic/JDK-8012164.js>.error(test/script/basic/JDK-8012164.js:38)
+<test/script/basic/JDK-8012164.js>.func(test/script/basic/JDK-8012164.js:33)
+<test/script/basic/JDK-8012164.js>.<program>(test/script/basic/JDK-8012164.js:46)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/nashorn/test/script/basic/JDK-8015345.js	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,64 @@
+/*
+ * Copyright (c) 2010, 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ * 
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ * 
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ * 
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ * 
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/**
+ * JDK-8015345: Function("}),print('test'),({") should throw SyntaxError
+ *
+ * @test
+ * @run
+ */
+
+function checkFunction(code) {
+    try {
+        Function(code);
+        fail("should have thrown SyntaxError for :" + code);
+    } catch (e) {
+        if (! (e instanceof SyntaxError)) {
+            fail("SyntaxError expected, but got " + e);
+        }
+        print(e);
+    }
+}
+
+// invalid body
+checkFunction("}),print('test'),({");
+
+// invalid param list
+checkFunction("x**y", "print('x')");
+
+// invalid param identifier
+checkFunction("in", "print('hello')");
+//checkFunction("<>", "print('hello')")
+
+// invalid param list and body
+checkFunction("x--y", ")");
+
+// check few valid cases as well
+var f = Function("x", "return x*x");
+print(f(10))
+
+f = Function("x", "y", "return x+y");
+print(f(33, 22));
+
+f = Function("x,y", "return x/y");
+print(f(24, 2));
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/nashorn/test/script/basic/JDK-8015345.js.EXPECTED	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,15 @@
+SyntaxError: <function>:1:0 Expected eof but found }
+}),print('test'),({
+^
+SyntaxError: <function>:1:2 Expected an operand but found *
+x**y
+  ^
+SyntaxError: <function>:1:0 Expected an operand but found in
+in
+^
+SyntaxError: <function>:1:3 Expected ; but found y
+x--y
+   ^
+100
+55
+12
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/nashorn/test/script/basic/JDK-8015350.js	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2010, 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ * 
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ * 
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ * 
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ * 
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/**
+ * JDK-8015350: Array.prototype.reduceRight issue with large length and index
+ *
+ * @test
+ * @run
+ */
+
+function reduce(obj) {
+    try {
+        Array.prototype.reduceRight.call(obj, function(acc, v, i, o){
+            print(v + i);
+            throw "stop";
+        }, 0);
+    } catch (error) {
+        print(error);
+    }
+}
+
+// array-like object
+reduce({
+    length:0xffffffff,
+    0xfffffffe: "index: "
+});
+
+// actual sparse array
+var array = [];
+array[0xfffffffe] = "index: ";
+reduce(array);
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/nashorn/test/script/basic/JDK-8015350.js.EXPECTED	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,4 @@
+index: 4294967294
+stop
+index: 4294967294
+stop
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/nashorn/test/script/basic/JDK-8015353.js	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,38 @@
+/*
+ * Copyright (c) 2010, 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ * 
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ * 
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ * 
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ * 
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/**
+ * JDK-8015353: Date.parse illegal string parsing issues
+ *
+ * @test
+ * @run
+ */
+
+function checkDate(str) {
+    if (! isNaN(Date.parse(str))) {
+        fail(str + " is parsed as legal Date");
+    }
+}
+
+checkDate("2012-01-10T00:00:00.000-");
+checkDate("2012-01-01T00:00+");
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/nashorn/test/script/basic/JDK-8015741.js	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,57 @@
+/*
+ * Copyright (c) 2010, 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ * 
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ * 
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ * 
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ * 
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/**
+ * JDK-8015741 : Need a global.load function that starts with a new global scope.
+ *
+ * @test
+ * @run
+ */
+
+var Thread = java.lang.Thread;
+
+myGlobal = "#0";
+var script1 = {name: "script 1", script: 'myGlobal = "#1"; print(myGlobal);'};
+var script2 = {name: "script 2", script: 'myGlobal = "#2"; print(myGlobal);'};
+var script3 = {name: "script 3", script: 'myGlobal = "#3"; print(myGlobal);'};
+var script4 = {name: "script 4", script: 'myGlobal = "#4"; print(myGlobal);'};
+
+print(myGlobal);
+load(script1);
+print(myGlobal);
+
+print(myGlobal);
+var thread1 = new Thread(function() { load(script2); });
+thread1.start();
+thread1.join();
+print(myGlobal);
+
+print(myGlobal);
+loadWithNewGlobal(script3);
+print(myGlobal);
+
+print(myGlobal);
+var thread2 = new Thread(function() { loadWithNewGlobal(script4); });
+thread2.start();
+thread2.join();
+print(myGlobal);
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/nashorn/test/script/basic/JDK-8015741.js.EXPECTED	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,12 @@
+#0
+#1
+#1
+#1
+#2
+#2
+#2
+#3
+#2
+#2
+#4
+#2
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/nashorn/test/script/basic/JDK-8015830.js	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,56 @@
+/*
+ * Copyright (c) 2010, 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ * 
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ * 
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ * 
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ * 
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/**
+ * JDK-8015830: Javascript mapping of ScriptEngine bindings does not expose keys
+ *
+ * @test
+ * @run
+ */
+
+var m = new javax.script.ScriptEngineManager();
+var engine = m.getEngineByName("nashorn");
+
+engine.eval("x = 100; doit = function () { }");
+
+var global = engine.getBindings(javax.script.ScriptContext.ENGINE_SCOPE);
+
+for(k in global){
+    print(k + " = " + global[k]);
+}
+
+for each (k in global) {
+    print(k);
+}
+
+for(k in global) {
+    delete global[k];
+}
+
+for(k in global){
+    print(k + " = " + global[k]);
+}
+
+for each(k in global) {
+    print(k);
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/nashorn/test/script/basic/JDK-8015830.js.EXPECTED	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,4 @@
+x = 100
+doit = function () { }
+100
+function () { }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/nashorn/test/script/basic/JDK-8015945.js	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,55 @@
+/*
+ * Copyright (c) 2010, 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ * 
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ * 
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ * 
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ * 
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/**
+ * JDK-8015945: loadWithNewGlobal return value has to be properly wrapped
+ *
+ * @test
+ * @option -scripting
+ * @run
+ */
+
+var global = loadWithNewGlobal({ name: "<code>",
+    script: <<EOF
+
+function squares() {
+    var res = new Array(arguments.length);
+    for (var i in arguments) {
+        res[i] = arguments[i]*arguments[i]
+    }
+    return res;
+}
+
+this;
+
+EOF
+})
+
+print("global an Object? " + (global instanceof Object));
+var res = global.squares(2, 3, 4, 5);
+print("global.squares returns Array? " + (res instanceof Array));
+// still can access array index properties and length
+print("result length " + res.length);
+for (var i in res) {
+    print(i + " = " + res[i]);
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/nashorn/test/script/basic/JDK-8015945.js.EXPECTED	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,7 @@
+global an Object? false
+global.squares returns Array? false
+result length 4
+0 = 4
+1 = 9
+2 = 16
+3 = 25
--- a/nashorn/test/script/basic/NASHORN-108.js.EXPECTED	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/test/script/basic/NASHORN-108.js.EXPECTED	Wed Jul 05 18:59:05 2017 +0200
@@ -1,3 +1,3 @@
-runScript 33
-runScript 32
+<program> 33
+<program> 32
 done
--- a/nashorn/test/script/basic/NASHORN-109.js.EXPECTED	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/test/script/basic/NASHORN-109.js.EXPECTED	Wed Jul 05 18:59:05 2017 +0200
@@ -1,2 +1,2 @@
-runScript 33
+<program> 33
 done
--- a/nashorn/test/script/basic/errorstack.js.EXPECTED	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/test/script/basic/errorstack.js.EXPECTED	Wed Jul 05 18:59:05 2017 +0200
@@ -1,4 +1,4 @@
 func3 : 40
 func2 : 36
 func1 : 32
-runScript : 44
+<program> : 44
--- a/nashorn/test/script/basic/funcconstructor.js.EXPECTED	Mon Jun 10 17:04:18 2013 -0700
+++ b/nashorn/test/script/basic/funcconstructor.js.EXPECTED	Wed Jul 05 18:59:05 2017 +0200
@@ -4,7 +4,7 @@
 print('anon func'); return x*x;
 }
 syntax error? true
-SyntaxError: <function>:2:13 Missing close quote
+SyntaxError: <function>:1:13 Missing close quote
 print('hello)
              ^
 done
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/nashorn/test/script/basic/typedarrays.js	Wed Jul 05 18:59:05 2017 +0200
@@ -0,0 +1,96 @@
+/*
+ * Copyright (c) 2010, 2013, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ * 
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ * 
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ * 
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ * 
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/**
+ * typedarray test.
+ *
+ * @test
+ * @run 
+ */
+
+
+var typeDefinitions = [
+Int8Array, 
+Uint8Array, 
+Uint8ClampedArray, 
+Int16Array, 
+Uint16Array, 
+Int32Array, 
+Uint32Array, 
+Float32Array, 
+Float64Array, 
+];
+
+var mem1 = new ArrayBuffer(1024);
+mem1.byteLength;
+mem1.slice(512);
+mem1.slice(512, 748);
+
+var size = 128;
+var arr = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
+var arr2 = [99, 89];
+var partial = [];
+var all = [];
+
+typeDefinitions.forEach(function(arrayDef) {
+    var p = arrayDef.prototype;
+    var sub = [];
+    sub.push(new arrayDef(mem1, arrayDef.BYTES_PER_ELEMENT, 3));   
+    sub.push(new arrayDef(size));
+    sub.push(new arrayDef(arr));
+    //push the instances, they will be reused to do instance based construction
+    partial.push({
+        instances:sub, 
+        type:arrayDef
+    });
+    
+    all.concat(all, sub);
+    
+});
+
+partial.forEach(function(inst) {
+    // build new instances with TypeArray instance as parameter.
+    partial.forEach(function(other) {
+        other.instances.forEach(function(otherInstance) {
+            var ii = new inst.type(otherInstance);
+            all.push(ii);
+        });
+    })
+});
+
+all.forEach(function(instance) {
+    // cover instance props and functions
+    var arr = Object.getOwnPropertyNames(instance);
+    arr.forEach(function(p) {
+        var val = instance[p];
+        if(!isNaN(p)){
+            val[p] = 99;
+        }       
+    });
+        
+    instance.set(instance, 0);
+    instance.set(instance);
+    instance.set(arr2);
+    instance.subarray(5, 9);
+    instance.subarray(5);
+});