# HG changeset patch # User mfang # Date 1333063016 25200 # Node ID 4b7653ae5caaf870b76598f2c77520547e2f28a0 # Parent 8f55316ee4ee06a7f8e155dd1b6e14f68b251586# Parent a6e6d42203e6d35f9e8b31eac25b0021b4dd58ad Merge diff -r 8f55316ee4ee -r 4b7653ae5caa .hgtags --- a/.hgtags Wed Mar 28 16:22:31 2012 -0700 +++ b/.hgtags Thu Mar 29 16:16:56 2012 -0700 @@ -153,3 +153,4 @@ e070119aa56ee4dc5506c19d2c4d2eecab8ad429 jdk8-b29 23da7804aca0c9c4e6e86532a1453125a76d95ee jdk8-b30 bac81e9f7d57b75fba5ab31b571f3fe0dc08af69 jdk8-b31 +2c5208ccb863db936eab523f49450b3fcd230348 jdk8-b32 diff -r 8f55316ee4ee -r 4b7653ae5caa .hgtags-top-repo --- a/.hgtags-top-repo Wed Mar 28 16:22:31 2012 -0700 +++ b/.hgtags-top-repo Thu Mar 29 16:16:56 2012 -0700 @@ -153,3 +153,4 @@ 41460de042580bc4a4ce3f863779c66f39cb8578 jdk8-b29 6cea54809b51db92979c22fd8aa8fcb1cb13d12e jdk8-b30 0b66f43b89a6c0ac1c15d7ec51992c541cdc9089 jdk8-b31 +88176171e940f02916a312c265a34c32552a8376 jdk8-b32 diff -r 8f55316ee4ee -r 4b7653ae5caa corba/.hgtags --- a/corba/.hgtags Wed Mar 28 16:22:31 2012 -0700 +++ b/corba/.hgtags Thu Mar 29 16:16:56 2012 -0700 @@ -153,3 +153,4 @@ 6117395d422682f89d228347e319fcaac7edc729 jdk8-b29 4605f8418bf562e78be79b25b6b8a5110281acae jdk8-b30 1954151dfae8f73db24e396380f7c02bdd47c486 jdk8-b31 +5d820cb6b1afd75b619e7fd69e4f2b0eb1d5d6a1 jdk8-b32 diff -r 8f55316ee4ee -r 4b7653ae5caa hotspot/.hgtags --- a/hotspot/.hgtags Wed Mar 28 16:22:31 2012 -0700 +++ b/hotspot/.hgtags Thu Mar 29 16:16:56 2012 -0700 @@ -234,3 +234,5 @@ f4767e53d6e0d5da7e3f1775904076cce54247c1 hs24-b04 0cd147eaa673d1642b2f466f5dc257cf192db524 jdk8-b31 27863e4586de38be7dd17da4163f542038f4d1d7 hs24-b05 +25410a347ebb0bef166c4338a90d9dea82463a20 jdk8-b32 +cd47da9383cd932cb2b659064057feafa2a91134 hs24-b06 diff -r 8f55316ee4ee -r 4b7653ae5caa hotspot/agent/src/share/classes/sun/jvm/hotspot/oops/InstanceKlass.java --- a/hotspot/agent/src/share/classes/sun/jvm/hotspot/oops/InstanceKlass.java Wed Mar 28 16:22:31 2012 -0700 +++ b/hotspot/agent/src/share/classes/sun/jvm/hotspot/oops/InstanceKlass.java Thu Mar 29 16:16:56 2012 -0700 @@ -359,6 +359,12 @@ public static final int innerClassNextOffset = 4; }; + public static interface EnclosingMethodAttributeOffset { + public static final int enclosing_method_class_index_offset = 0; + public static final int enclosing_method_method_index_offset = 1; + public static final int enclosing_method_attribute_size = 2; + }; + // refer to compute_modifier_flags in VM code. public long computeModifierFlags() { long access = getAccessFlags(); @@ -367,9 +373,14 @@ int length = ( innerClassList == null)? 0 : (int) innerClassList.getLength(); if (length > 0) { if (Assert.ASSERTS_ENABLED) { - Assert.that(length % InnerClassAttributeOffset.innerClassNextOffset == 0, "just checking"); + Assert.that(length % InnerClassAttributeOffset.innerClassNextOffset == 0 || + length % InnerClassAttributeOffset.innerClassNextOffset == EnclosingMethodAttributeOffset.enclosing_method_attribute_size, + "just checking"); } for (int i = 0; i < length; i += InnerClassAttributeOffset.innerClassNextOffset) { + if (i == length - EnclosingMethodAttributeOffset.enclosing_method_attribute_size) { + break; + } int ioff = innerClassList.getShortAt(i + InnerClassAttributeOffset.innerClassInnerClassInfoOffset); // 'ioff' can be zero. @@ -419,9 +430,14 @@ int length = ( innerClassList == null)? 0 : (int) innerClassList.getLength(); if (length > 0) { if (Assert.ASSERTS_ENABLED) { - Assert.that(length % InnerClassAttributeOffset.innerClassNextOffset == 0, "just checking"); + Assert.that(length % InnerClassAttributeOffset.innerClassNextOffset == 0 || + length % InnerClassAttributeOffset.innerClassNextOffset == EnclosingMethodAttributeOffset.enclosing_method_attribute_size, + "just checking"); } for (int i = 0; i < length; i += InnerClassAttributeOffset.innerClassNextOffset) { + if (i == length - EnclosingMethodAttributeOffset.enclosing_method_attribute_size) { + break; + } int ioff = innerClassList.getShortAt(i + InnerClassAttributeOffset.innerClassInnerClassInfoOffset); // 'ioff' can be zero. diff -r 8f55316ee4ee -r 4b7653ae5caa hotspot/make/hotspot_version --- a/hotspot/make/hotspot_version Wed Mar 28 16:22:31 2012 -0700 +++ b/hotspot/make/hotspot_version Thu Mar 29 16:16:56 2012 -0700 @@ -35,7 +35,7 @@ HS_MAJOR_VER=24 HS_MINOR_VER=0 -HS_BUILD_NUMBER=05 +HS_BUILD_NUMBER=06 JDK_MAJOR_VER=1 JDK_MINOR_VER=8 diff -r 8f55316ee4ee -r 4b7653ae5caa hotspot/make/jprt.properties --- a/hotspot/make/jprt.properties Wed Mar 28 16:22:31 2012 -0700 +++ b/hotspot/make/jprt.properties Thu Mar 29 16:16:56 2012 -0700 @@ -446,6 +446,7 @@ jprt.test.targets.jdk8=${jprt.test.targets.standard} jprt.test.targets.jdk7=${jprt.test.targets.standard} +jprt.test.targets.jdk7u4=${jprt.test.targets.jdk7} jprt.test.targets=${jprt.test.targets.${jprt.tools.default.release}} # The default test/Makefile targets that should be run @@ -505,5 +506,6 @@ jprt.make.rule.test.targets.jdk8=${jprt.make.rule.test.targets.standard} jprt.make.rule.test.targets.jdk7=${jprt.make.rule.test.targets.standard} +jprt.make.rule.test.targets.jdk7u4=${jprt.make.rule.test.targets.jdk7} jprt.make.rule.test.targets=${jprt.make.rule.test.targets.${jprt.tools.default.release}} diff -r 8f55316ee4ee -r 4b7653ae5caa hotspot/src/share/vm/classfile/classFileParser.cpp --- a/hotspot/src/share/vm/classfile/classFileParser.cpp Wed Mar 28 16:22:31 2012 -0700 +++ b/hotspot/src/share/vm/classfile/classFileParser.cpp Thu Mar 29 16:16:56 2012 -0700 @@ -2315,13 +2315,32 @@ #define RECOGNIZED_INNER_CLASS_MODIFIERS (JVM_RECOGNIZED_CLASS_MODIFIERS | JVM_ACC_PRIVATE | JVM_ACC_PROTECTED | JVM_ACC_STATIC) // Return number of classes in the inner classes attribute table -u2 ClassFileParser::parse_classfile_inner_classes_attribute(constantPoolHandle cp, instanceKlassHandle k, TRAPS) { +u2 ClassFileParser::parse_classfile_inner_classes_attribute(u1* inner_classes_attribute_start, + bool parsed_enclosingmethod_attribute, + u2 enclosing_method_class_index, + u2 enclosing_method_method_index, + constantPoolHandle cp, + instanceKlassHandle k, TRAPS) { ClassFileStream* cfs = stream(); - cfs->guarantee_more(2, CHECK_0); // length - u2 length = cfs->get_u2_fast(); - - // 4-tuples of shorts [inner_class_info_index, outer_class_info_index, inner_name_index, inner_class_access_flags] - typeArrayOop ic = oopFactory::new_permanent_shortArray(length*4, CHECK_0); + u1* current_mark = cfs->current(); + u2 length = 0; + if (inner_classes_attribute_start != NULL) { + cfs->set_current(inner_classes_attribute_start); + cfs->guarantee_more(2, CHECK_0); // length + length = cfs->get_u2_fast(); + } + + // 4-tuples of shorts of inner classes data and 2 shorts of enclosing + // method data: + // [inner_class_info_index, + // outer_class_info_index, + // inner_name_index, + // inner_class_access_flags, + // ... + // enclosing_method_class_index, + // enclosing_method_method_index] + int size = length * 4 + (parsed_enclosingmethod_attribute ? 2 : 0); + typeArrayOop ic = oopFactory::new_permanent_shortArray(size, CHECK_0); typeArrayHandle inner_classes(THREAD, ic); int index = 0; int cp_size = cp->length(); @@ -2372,8 +2391,8 @@ // 4347400: make sure there's no duplicate entry in the classes array if (_need_verify && _major_version >= JAVA_1_5_VERSION) { - for(int i = 0; i < inner_classes->length(); i += 4) { - for(int j = i + 4; j < inner_classes->length(); j += 4) { + for(int i = 0; i < length * 4; i += 4) { + for(int j = i + 4; j < length * 4; j += 4) { guarantee_property((inner_classes->ushort_at(i) != inner_classes->ushort_at(j) || inner_classes->ushort_at(i+1) != inner_classes->ushort_at(j+1) || inner_classes->ushort_at(i+2) != inner_classes->ushort_at(j+2) || @@ -2384,8 +2403,19 @@ } } + // Set EnclosingMethod class and method indexes. + if (parsed_enclosingmethod_attribute) { + inner_classes->short_at_put(index++, enclosing_method_class_index); + inner_classes->short_at_put(index++, enclosing_method_method_index); + } + assert(index == size, "wrong size"); + // Update instanceKlass with inner class info. k->set_inner_classes(inner_classes()); + + // Restore buffer's current position. + cfs->set_current(current_mark); + return length; } @@ -2490,6 +2520,10 @@ int runtime_visible_annotations_length = 0; u1* runtime_invisible_annotations = NULL; int runtime_invisible_annotations_length = 0; + u1* inner_classes_attribute_start = NULL; + u4 inner_classes_attribute_length = 0; + u2 enclosing_method_class_index = 0; + u2 enclosing_method_method_index = 0; // Iterate over attributes while (attributes_count--) { cfs->guarantee_more(6, CHECK); // attribute_name_index, attribute_length @@ -2522,11 +2556,9 @@ } else { parsed_innerclasses_attribute = true; } - u2 num_of_classes = parse_classfile_inner_classes_attribute(cp, k, CHECK); - if (_need_verify && _major_version >= JAVA_1_5_VERSION) { - guarantee_property(attribute_length == sizeof(num_of_classes) + 4 * sizeof(u2) * num_of_classes, - "Wrong InnerClasses attribute length in class file %s", CHECK); - } + inner_classes_attribute_start = cfs->get_u1_buffer(); + inner_classes_attribute_length = attribute_length; + cfs->skip_u1(inner_classes_attribute_length, CHECK); } else if (tag == vmSymbols::tag_synthetic()) { // Check for Synthetic tag // Shouldn't we check that the synthetic flags wasn't already set? - not required in spec @@ -2568,22 +2600,21 @@ parsed_enclosingmethod_attribute = true; } cfs->guarantee_more(4, CHECK); // class_index, method_index - u2 class_index = cfs->get_u2_fast(); - u2 method_index = cfs->get_u2_fast(); - if (class_index == 0) { + enclosing_method_class_index = cfs->get_u2_fast(); + enclosing_method_method_index = cfs->get_u2_fast(); + if (enclosing_method_class_index == 0) { classfile_parse_error("Invalid class index in EnclosingMethod attribute in class file %s", CHECK); } // Validate the constant pool indices and types - if (!cp->is_within_bounds(class_index) || - !is_klass_reference(cp, class_index)) { + if (!cp->is_within_bounds(enclosing_method_class_index) || + !is_klass_reference(cp, enclosing_method_class_index)) { classfile_parse_error("Invalid or out-of-bounds class index in EnclosingMethod attribute in class file %s", CHECK); } - if (method_index != 0 && - (!cp->is_within_bounds(method_index) || - !cp->tag_at(method_index).is_name_and_type())) { + if (enclosing_method_method_index != 0 && + (!cp->is_within_bounds(enclosing_method_method_index) || + !cp->tag_at(enclosing_method_method_index).is_name_and_type())) { classfile_parse_error("Invalid or out-of-bounds method index in EnclosingMethod attribute in class file %s", CHECK); } - k->set_enclosing_method_indices(class_index, method_index); } else if (tag == vmSymbols::tag_bootstrap_methods() && _major_version >= Verifier::INVOKEDYNAMIC_MAJOR_VERSION) { if (parsed_bootstrap_methods_attribute) @@ -2606,6 +2637,20 @@ CHECK); k->set_class_annotations(annotations()); + if (parsed_innerclasses_attribute || parsed_enclosingmethod_attribute) { + u2 num_of_classes = parse_classfile_inner_classes_attribute( + inner_classes_attribute_start, + parsed_innerclasses_attribute, + enclosing_method_class_index, + enclosing_method_method_index, + cp, k, CHECK); + if (parsed_innerclasses_attribute &&_need_verify && _major_version >= JAVA_1_5_VERSION) { + guarantee_property( + inner_classes_attribute_length == sizeof(num_of_classes) + 4 * sizeof(u2) * num_of_classes, + "Wrong InnerClasses attribute length in class file %s", CHECK); + } + } + if (_max_bootstrap_specifier_index >= 0) { guarantee_property(parsed_bootstrap_methods_attribute, "Missing BootstrapMethods attribute in class file %s", CHECK); diff -r 8f55316ee4ee -r 4b7653ae5caa hotspot/src/share/vm/classfile/classFileParser.hpp --- a/hotspot/src/share/vm/classfile/classFileParser.hpp Wed Mar 28 16:22:31 2012 -0700 +++ b/hotspot/src/share/vm/classfile/classFileParser.hpp Thu Mar 29 16:16:56 2012 -0700 @@ -130,7 +130,11 @@ void parse_classfile_sourcefile_attribute(constantPoolHandle cp, instanceKlassHandle k, TRAPS); void parse_classfile_source_debug_extension_attribute(constantPoolHandle cp, instanceKlassHandle k, int length, TRAPS); - u2 parse_classfile_inner_classes_attribute(constantPoolHandle cp, + u2 parse_classfile_inner_classes_attribute(u1* inner_classes_attribute_start, + bool parsed_enclosingmethod_attribute, + u2 enclosing_method_class_index, + u2 enclosing_method_method_index, + constantPoolHandle cp, instanceKlassHandle k, TRAPS); void parse_classfile_attributes(constantPoolHandle cp, instanceKlassHandle k, TRAPS); void parse_classfile_synthetic_attribute(constantPoolHandle cp, instanceKlassHandle k, TRAPS); diff -r 8f55316ee4ee -r 4b7653ae5caa hotspot/src/share/vm/gc_implementation/g1/collectionSetChooser.cpp --- a/hotspot/src/share/vm/gc_implementation/g1/collectionSetChooser.cpp Wed Mar 28 16:22:31 2012 -0700 +++ b/hotspot/src/share/vm/gc_implementation/g1/collectionSetChooser.cpp Thu Mar 29 16:16:56 2012 -0700 @@ -285,7 +285,7 @@ // that the result is the same during all mixed GCs that follow a cycle. const size_t region_num = (size_t) _length; - const size_t gc_num = (size_t) G1MaxMixedGCNum; + const size_t gc_num = (size_t) G1MixedGCCountTarget; size_t result = region_num / gc_num; // emulate ceiling if (result * gc_num < region_num) { diff -r 8f55316ee4ee -r 4b7653ae5caa hotspot/src/share/vm/gc_implementation/g1/concurrentMarkThread.cpp --- a/hotspot/src/share/vm/gc_implementation/g1/concurrentMarkThread.cpp Wed Mar 28 16:22:31 2012 -0700 +++ b/hotspot/src/share/vm/gc_implementation/g1/concurrentMarkThread.cpp Thu Mar 29 16:16:56 2012 -0700 @@ -155,7 +155,7 @@ CMCheckpointRootsFinalClosure final_cl(_cm); sprintf(verbose_str, "GC remark"); - VM_CGC_Operation op(&final_cl, verbose_str); + VM_CGC_Operation op(&final_cl, verbose_str, true /* needs_pll */); VMThread::execute(&op); } if (cm()->restart_for_overflow() && @@ -189,7 +189,7 @@ CMCleanUp cl_cl(_cm); sprintf(verbose_str, "GC cleanup"); - VM_CGC_Operation op(&cl_cl, verbose_str); + VM_CGC_Operation op(&cl_cl, verbose_str, false /* needs_pll */); VMThread::execute(&op); } else { // We don't want to update the marking status if a GC pause diff -r 8f55316ee4ee -r 4b7653ae5caa hotspot/src/share/vm/gc_implementation/g1/g1CollectedHeap.cpp --- a/hotspot/src/share/vm/gc_implementation/g1/g1CollectedHeap.cpp Wed Mar 28 16:22:31 2012 -0700 +++ b/hotspot/src/share/vm/gc_implementation/g1/g1CollectedHeap.cpp Thu Mar 29 16:16:56 2012 -0700 @@ -993,7 +993,7 @@ // iteration (after taking the Heap_lock). result = _mutator_alloc_region.attempt_allocation(word_size, false /* bot_updates */); - if (result != NULL ){ + if (result != NULL) { return result; } @@ -2437,20 +2437,22 @@ true, /* should_initiate_conc_mark */ g1_policy()->max_pause_time_ms(), cause); + VMThread::execute(&op); if (!op.pause_succeeded()) { - // Another GC got scheduled and prevented us from scheduling - // the initial-mark GC. It's unlikely that the GC that - // pre-empted us was also an initial-mark GC. So, we'll retry - // the initial-mark GC. - if (full_gc_count_before == total_full_collections()) { - retry_gc = true; + retry_gc = op.should_retry_gc(); } else { // A Full GC happened while we were trying to schedule the // initial-mark GC. No point in starting a new cycle given // that the whole heap was collected anyway. } + + if (retry_gc) { + if (GC_locker::is_active_and_needs_gc()) { + GC_locker::stall_until_clear(); + } + } } } else { if (cause == GCCause::_gc_locker diff -r 8f55316ee4ee -r 4b7653ae5caa hotspot/src/share/vm/gc_implementation/g1/g1CollectorPolicy.cpp --- a/hotspot/src/share/vm/gc_implementation/g1/g1CollectorPolicy.cpp Wed Mar 28 16:22:31 2012 -0700 +++ b/hotspot/src/share/vm/gc_implementation/g1/g1CollectorPolicy.cpp Thu Mar 29 16:16:56 2012 -0700 @@ -2608,7 +2608,7 @@ size_t reclaimable_bytes = cset_chooser->remainingReclaimableBytes(); size_t capacity_bytes = _g1->capacity(); double perc = (double) reclaimable_bytes * 100.0 / (double) capacity_bytes; - double threshold = (double) G1OldReclaimableThresholdPercent; + double threshold = (double) G1HeapWastePercent; if (perc < threshold) { ergo_verbose4(ErgoMixedGCs, false_action_str, diff -r 8f55316ee4ee -r 4b7653ae5caa hotspot/src/share/vm/gc_implementation/g1/g1CollectorPolicy.hpp --- a/hotspot/src/share/vm/gc_implementation/g1/g1CollectorPolicy.hpp Wed Mar 28 16:22:31 2012 -0700 +++ b/hotspot/src/share/vm/gc_implementation/g1/g1CollectorPolicy.hpp Thu Mar 29 16:16:56 2012 -0700 @@ -940,10 +940,9 @@ return _bytes_copied_during_gc; } - // Determine whether the next GC should be mixed. Called to determine - // whether to start mixed GCs or whether to carry on doing mixed - // GCs. The two action strings are used in the ergo output when the - // method returns true or false. + // Determine whether there are candidate regions so that the + // next GC should be mixed. The two action strings are used + // in the ergo output when the method returns true or false. bool next_gc_should_be_mixed(const char* true_action_str, const char* false_action_str); diff -r 8f55316ee4ee -r 4b7653ae5caa hotspot/src/share/vm/gc_implementation/g1/g1_globals.hpp --- a/hotspot/src/share/vm/gc_implementation/g1/g1_globals.hpp Wed Mar 28 16:22:31 2012 -0700 +++ b/hotspot/src/share/vm/gc_implementation/g1/g1_globals.hpp Thu Mar 29 16:16:56 2012 -0700 @@ -299,17 +299,16 @@ "Percentage (0-100) of the heap size to use as maximum " \ "young gen size.") \ \ - develop(uintx, G1OldCSetRegionLiveThresholdPercent, 95, \ + develop(uintx, G1OldCSetRegionLiveThresholdPercent, 90, \ "Threshold for regions to be added to the collection set. " \ "Regions with more live bytes that this will not be collected.") \ \ - develop(uintx, G1OldReclaimableThresholdPercent, 1, \ - "Threshold for the remaining old reclaimable bytes, expressed " \ - "as a percentage of the heap size. If the old reclaimable bytes " \ - "are under this we will not collect them with more mixed GCs.") \ + product(uintx, G1HeapWastePercent, 5, \ + "Amount of space, expressed as a percentage of the heap size, " \ + "that G1 is willing not to collect to avoid expensive GCs.") \ \ - develop(uintx, G1MaxMixedGCNum, 4, \ - "The maximum desired number of mixed GCs after a marking cycle.") \ + product(uintx, G1MixedGCCountTarget, 4, \ + "The target number of mixed GCs after a marking cycle.") \ \ develop(uintx, G1OldCSetRegionThresholdPercent, 10, \ "An upper bound for the number of old CSet regions expressed " \ diff -r 8f55316ee4ee -r 4b7653ae5caa hotspot/src/share/vm/gc_implementation/g1/vm_operations_g1.cpp --- a/hotspot/src/share/vm/gc_implementation/g1/vm_operations_g1.cpp Wed Mar 28 16:22:31 2012 -0700 +++ b/hotspot/src/share/vm/gc_implementation/g1/vm_operations_g1.cpp Thu Mar 29 16:16:56 2012 -0700 @@ -34,7 +34,8 @@ VM_G1CollectForAllocation::VM_G1CollectForAllocation( unsigned int gc_count_before, size_t word_size) - : VM_G1OperationWithAllocRequest(gc_count_before, word_size) { + : VM_G1OperationWithAllocRequest(gc_count_before, word_size, + GCCause::_allocation_failure) { guarantee(word_size > 0, "an allocation should always be requested"); } @@ -57,9 +58,10 @@ bool should_initiate_conc_mark, double target_pause_time_ms, GCCause::Cause gc_cause) - : VM_G1OperationWithAllocRequest(gc_count_before, word_size), + : VM_G1OperationWithAllocRequest(gc_count_before, word_size, gc_cause), _should_initiate_conc_mark(should_initiate_conc_mark), _target_pause_time_ms(target_pause_time_ms), + _should_retry_gc(false), _full_collections_completed_before(0) { guarantee(target_pause_time_ms > 0.0, err_msg("target_pause_time_ms = %1.6lf should be positive", @@ -70,6 +72,22 @@ _gc_cause = gc_cause; } +bool VM_G1IncCollectionPause::doit_prologue() { + bool res = VM_GC_Operation::doit_prologue(); + if (!res) { + if (_should_initiate_conc_mark) { + // The prologue can fail for a couple of reasons. The first is that another GC + // got scheduled and prevented the scheduling of the initial mark GC. The + // second is that the GC locker may be active and the heap can't be expanded. + // In both cases we want to retry the GC so that the initial mark pause is + // actually scheduled. In the second case, however, we should stall until + // until the GC locker is no longer active and then retry the initial mark GC. + _should_retry_gc = true; + } + } + return res; +} + void VM_G1IncCollectionPause::doit() { G1CollectedHeap* g1h = G1CollectedHeap::heap(); assert(!_should_initiate_conc_mark || @@ -106,11 +124,25 @@ // next GC pause to be an initial mark; it returns false if a // marking cycle is already in progress. // - // If a marking cycle is already in progress just return and skip - // the pause - the requesting thread should block in doit_epilogue - // until the marking cycle is complete. + // If a marking cycle is already in progress just return and skip the + // pause below - if the reason for requesting this initial mark pause + // was due to a System.gc() then the requesting thread should block in + // doit_epilogue() until the marking cycle is complete. + // + // If this initial mark pause was requested as part of a humongous + // allocation then we know that the marking cycle must just have + // been started by another thread (possibly also allocating a humongous + // object) as there was no active marking cycle when the requesting + // thread checked before calling collect() in + // attempt_allocation_humongous(). Retrying the GC, in this case, + // will cause the requesting thread to spin inside collect() until the + // just started marking cycle is complete - which may be a while. So + // we do NOT retry the GC. if (!res) { - assert(_word_size == 0, "ExplicitGCInvokesConcurrent shouldn't be allocating"); + assert(_word_size == 0, "Concurrent Full GC/Humongous Object IM shouldn't be allocating"); + if (_gc_cause != GCCause::_g1_humongous_allocation) { + _should_retry_gc = true; + } return; } } @@ -123,6 +155,13 @@ true /* expect_null_cur_alloc_region */); } else { assert(_result == NULL, "invariant"); + if (!_pause_succeeded) { + // Another possible reason reason for the pause to not be successful + // is that, again, the GC locker is active (and has become active + // since the prologue was executed). In this case we should retry + // the pause after waiting for the GC locker to become inactive. + _should_retry_gc = true; + } } } @@ -168,6 +207,7 @@ } void VM_CGC_Operation::acquire_pending_list_lock() { + assert(_needs_pll, "don't call this otherwise"); // The caller may block while communicating // with the SLT thread in order to acquire/release the PLL. ConcurrentMarkThread::slt()-> @@ -175,6 +215,7 @@ } void VM_CGC_Operation::release_and_notify_pending_list_lock() { + assert(_needs_pll, "don't call this otherwise"); // The caller may block while communicating // with the SLT thread in order to acquire/release the PLL. ConcurrentMarkThread::slt()-> @@ -198,7 +239,9 @@ bool VM_CGC_Operation::doit_prologue() { // Note the relative order of the locks must match that in // VM_GC_Operation::doit_prologue() or deadlocks can occur - acquire_pending_list_lock(); + if (_needs_pll) { + acquire_pending_list_lock(); + } Heap_lock->lock(); SharedHeap::heap()->_thread_holds_heap_lock_for_gc = true; @@ -210,5 +253,7 @@ // VM_GC_Operation::doit_epilogue() SharedHeap::heap()->_thread_holds_heap_lock_for_gc = false; Heap_lock->unlock(); - release_and_notify_pending_list_lock(); + if (_needs_pll) { + release_and_notify_pending_list_lock(); + } } diff -r 8f55316ee4ee -r 4b7653ae5caa hotspot/src/share/vm/gc_implementation/g1/vm_operations_g1.hpp --- a/hotspot/src/share/vm/gc_implementation/g1/vm_operations_g1.hpp Wed Mar 28 16:22:31 2012 -0700 +++ b/hotspot/src/share/vm/gc_implementation/g1/vm_operations_g1.hpp Thu Mar 29 16:16:56 2012 -0700 @@ -1,5 +1,5 @@ /* - * Copyright (c) 2001, 2011, Oracle and/or its affiliates. All rights reserved. + * Copyright (c) 2001, 2012, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it @@ -43,8 +43,9 @@ public: VM_G1OperationWithAllocRequest(unsigned int gc_count_before, - size_t word_size) - : VM_GC_Operation(gc_count_before, GCCause::_allocation_failure), + size_t word_size, + GCCause::Cause gc_cause) + : VM_GC_Operation(gc_count_before, gc_cause), _word_size(word_size), _result(NULL), _pause_succeeded(false) { } HeapWord* result() { return _result; } bool pause_succeeded() { return _pause_succeeded; } @@ -77,6 +78,7 @@ class VM_G1IncCollectionPause: public VM_G1OperationWithAllocRequest { private: bool _should_initiate_conc_mark; + bool _should_retry_gc; double _target_pause_time_ms; unsigned int _full_collections_completed_before; public: @@ -86,11 +88,13 @@ double target_pause_time_ms, GCCause::Cause gc_cause); virtual VMOp_Type type() const { return VMOp_G1IncCollectionPause; } + virtual bool doit_prologue(); virtual void doit(); virtual void doit_epilogue(); virtual const char* name() const { return "garbage-first incremental collection pause"; } + bool should_retry_gc() const { return _should_retry_gc; } }; // Concurrent GC stop-the-world operations such as remark and cleanup; @@ -98,6 +102,7 @@ class VM_CGC_Operation: public VM_Operation { VoidClosure* _cl; const char* _printGCMessage; + bool _needs_pll; protected: // java.lang.ref.Reference support @@ -105,8 +110,8 @@ void release_and_notify_pending_list_lock(); public: - VM_CGC_Operation(VoidClosure* cl, const char *printGCMsg) - : _cl(cl), _printGCMessage(printGCMsg) { } + VM_CGC_Operation(VoidClosure* cl, const char *printGCMsg, bool needs_pll) + : _cl(cl), _printGCMessage(printGCMsg), _needs_pll(needs_pll) { } virtual VMOp_Type type() const { return VMOp_CGC_Operation; } virtual void doit(); virtual bool doit_prologue(); diff -r 8f55316ee4ee -r 4b7653ae5caa hotspot/src/share/vm/gc_implementation/parallelScavenge/psPromotionManager.inline.hpp --- a/hotspot/src/share/vm/gc_implementation/parallelScavenge/psPromotionManager.inline.hpp Wed Mar 28 16:22:31 2012 -0700 +++ b/hotspot/src/share/vm/gc_implementation/parallelScavenge/psPromotionManager.inline.hpp Thu Mar 29 16:16:56 2012 -0700 @@ -25,6 +25,7 @@ #ifndef SHARE_VM_GC_IMPLEMENTATION_PARALLELSCAVENGE_PSPROMOTIONMANAGER_INLINE_HPP #define SHARE_VM_GC_IMPLEMENTATION_PARALLELSCAVENGE_PSPROMOTIONMANAGER_INLINE_HPP +#include "gc_implementation/parallelScavenge/psOldGen.hpp" #include "gc_implementation/parallelScavenge/psPromotionManager.hpp" #include "gc_implementation/parallelScavenge/psScavenge.hpp" diff -r 8f55316ee4ee -r 4b7653ae5caa hotspot/src/share/vm/gc_implementation/shared/mutableNUMASpace.cpp --- a/hotspot/src/share/vm/gc_implementation/shared/mutableNUMASpace.cpp Wed Mar 28 16:22:31 2012 -0700 +++ b/hotspot/src/share/vm/gc_implementation/shared/mutableNUMASpace.cpp Thu Mar 29 16:16:56 2012 -0700 @@ -1,6 +1,6 @@ /* - * Copyright (c) 2006, 2010, Oracle and/or its affiliates. All rights reserved. + * Copyright (c) 2006, 2012, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it @@ -91,29 +91,37 @@ MutableSpace *s = ls->space(); if (s->top() < top()) { // For all spaces preceding the one containing top() if (s->free_in_words() > 0) { - size_t area_touched_words = pointer_delta(s->end(), s->top()); - CollectedHeap::fill_with_object(s->top(), area_touched_words); + intptr_t cur_top = (intptr_t)s->top(); + size_t words_left_to_fill = pointer_delta(s->end(), s->top());; + while (words_left_to_fill > 0) { + size_t words_to_fill = MIN2(words_left_to_fill, CollectedHeap::filler_array_max_size()); + assert(words_to_fill >= CollectedHeap::min_fill_size(), + err_msg("Remaining size ("SIZE_FORMAT ") is too small to fill (based on " SIZE_FORMAT " and " SIZE_FORMAT ")", + words_to_fill, words_left_to_fill, CollectedHeap::filler_array_max_size())); + CollectedHeap::fill_with_object((HeapWord*)cur_top, words_to_fill); + if (!os::numa_has_static_binding()) { + size_t touched_words = words_to_fill; #ifndef ASSERT - if (!ZapUnusedHeapArea) { - area_touched_words = MIN2((size_t)align_object_size(typeArrayOopDesc::header_size(T_INT)), - area_touched_words); - } + if (!ZapUnusedHeapArea) { + touched_words = MIN2((size_t)align_object_size(typeArrayOopDesc::header_size(T_INT)), + touched_words); + } #endif - if (!os::numa_has_static_binding()) { - MemRegion invalid; - HeapWord *crossing_start = (HeapWord*)round_to((intptr_t)s->top(), os::vm_page_size()); - HeapWord *crossing_end = (HeapWord*)round_to((intptr_t)(s->top() + area_touched_words), - os::vm_page_size()); - if (crossing_start != crossing_end) { - // If object header crossed a small page boundary we mark the area - // as invalid rounding it to a page_size(). - HeapWord *start = MAX2((HeapWord*)round_down((intptr_t)s->top(), page_size()), s->bottom()); - HeapWord *end = MIN2((HeapWord*)round_to((intptr_t)(s->top() + area_touched_words), page_size()), - s->end()); - invalid = MemRegion(start, end); + MemRegion invalid; + HeapWord *crossing_start = (HeapWord*)round_to(cur_top, os::vm_page_size()); + HeapWord *crossing_end = (HeapWord*)round_to(cur_top + touched_words, os::vm_page_size()); + if (crossing_start != crossing_end) { + // If object header crossed a small page boundary we mark the area + // as invalid rounding it to a page_size(). + HeapWord *start = MAX2((HeapWord*)round_down(cur_top, page_size()), s->bottom()); + HeapWord *end = MIN2((HeapWord*)round_to(cur_top + touched_words, page_size()), s->end()); + invalid = MemRegion(start, end); + } + + ls->add_invalid_region(invalid); } - - ls->add_invalid_region(invalid); + cur_top = cur_top + (words_to_fill * HeapWordSize); + words_left_to_fill -= words_to_fill; } } } else { diff -r 8f55316ee4ee -r 4b7653ae5caa hotspot/src/share/vm/gc_interface/collectedHeap.cpp --- a/hotspot/src/share/vm/gc_interface/collectedHeap.cpp Wed Mar 28 16:22:31 2012 -0700 +++ b/hotspot/src/share/vm/gc_interface/collectedHeap.cpp Thu Mar 29 16:16:56 2012 -0700 @@ -85,7 +85,7 @@ const size_t max_len = size_t(arrayOopDesc::max_array_length(T_INT)); const size_t elements_per_word = HeapWordSize / sizeof(jint); _filler_array_max_size = align_object_size(filler_array_hdr_size() + - max_len * elements_per_word); + max_len / elements_per_word); _barrier_set = NULL; _is_gc_active = false; @@ -303,10 +303,6 @@ return align_object_size(filler_array_hdr_size()); // align to MinObjAlignment } -size_t CollectedHeap::filler_array_max_size() { - return _filler_array_max_size; -} - #ifdef ASSERT void CollectedHeap::fill_args_check(HeapWord* start, size_t words) { @@ -333,10 +329,11 @@ const size_t payload_size = words - filler_array_hdr_size(); const size_t len = payload_size * HeapWordSize / sizeof(jint); + assert((int)len >= 0, err_msg("size too large " SIZE_FORMAT " becomes %d", words, (int)len)); // Set the length first for concurrent GC. ((arrayOop)start)->set_length((int)len); - post_allocation_setup_common(Universe::intArrayKlassObj(), start, words); + post_allocation_setup_common(Universe::intArrayKlassObj(), start); DEBUG_ONLY(zap_filler_array(start, words, zap);) } @@ -349,8 +346,7 @@ fill_with_array(start, words, zap); } else if (words > 0) { assert(words == min_fill_size(), "unaligned size"); - post_allocation_setup_common(SystemDictionary::Object_klass(), start, - words); + post_allocation_setup_common(SystemDictionary::Object_klass(), start); } } @@ -480,7 +476,7 @@ assert(ScavengeRootsInCode > 0, "must be"); obj = common_mem_allocate_init(size, CHECK_NULL); } - post_allocation_setup_common(klass, obj, size); + post_allocation_setup_common(klass, obj); assert(Universe::is_bootstrapping() || !((oop)obj)->blueprint()->oop_is_array(), "must not be an array"); NOT_PRODUCT(Universe::heap()->check_for_bad_heap_word_value(obj, size)); diff -r 8f55316ee4ee -r 4b7653ae5caa hotspot/src/share/vm/gc_interface/collectedHeap.hpp --- a/hotspot/src/share/vm/gc_interface/collectedHeap.hpp Wed Mar 28 16:22:31 2012 -0700 +++ b/hotspot/src/share/vm/gc_interface/collectedHeap.hpp Thu Mar 29 16:16:56 2012 -0700 @@ -128,7 +128,6 @@ // Reinitialize tlabs before resuming mutators. virtual void resize_all_tlabs(); - protected: // Allocate from the current thread's TLAB, with broken-out slow path. inline static HeapWord* allocate_from_tlab(Thread* thread, size_t size); static HeapWord* allocate_from_tlab_slow(Thread* thread, size_t size); @@ -150,18 +149,14 @@ inline static HeapWord* common_permanent_mem_allocate_init(size_t size, TRAPS); // Helper functions for (VM) allocation. - inline static void post_allocation_setup_common(KlassHandle klass, - HeapWord* obj, size_t size); + inline static void post_allocation_setup_common(KlassHandle klass, HeapWord* obj); inline static void post_allocation_setup_no_klass_install(KlassHandle klass, - HeapWord* objPtr, - size_t size); + HeapWord* objPtr); - inline static void post_allocation_setup_obj(KlassHandle klass, - HeapWord* obj, size_t size); + inline static void post_allocation_setup_obj(KlassHandle klass, HeapWord* obj); inline static void post_allocation_setup_array(KlassHandle klass, - HeapWord* obj, size_t size, - int length); + HeapWord* obj, int length); // Clears an allocated object. inline static void init_obj(HeapWord* obj, size_t size); @@ -169,7 +164,6 @@ // Filler object utilities. static inline size_t filler_array_hdr_size(); static inline size_t filler_array_min_size(); - static inline size_t filler_array_max_size(); DEBUG_ONLY(static void fill_args_check(HeapWord* start, size_t words);) DEBUG_ONLY(static void zap_filler_array(HeapWord* start, size_t words, bool zap = true);) @@ -197,6 +191,10 @@ G1CollectedHeap }; + static inline size_t filler_array_max_size() { + return _filler_array_max_size; + } + virtual CollectedHeap::Name kind() const { return CollectedHeap::Abstract; } /** @@ -366,9 +364,7 @@ inline static oop permanent_obj_allocate_no_klass_install(KlassHandle klass, int size, TRAPS); - inline static void post_allocation_install_obj_klass(KlassHandle klass, - oop obj, - int size); + inline static void post_allocation_install_obj_klass(KlassHandle klass, oop obj); inline static oop permanent_array_allocate(KlassHandle klass, int size, int length, TRAPS); // Raw memory allocation facilities @@ -662,9 +658,6 @@ } } - // Allocate GCHeapLog during VM startup - static void initialize_heap_log(); - // Heap verification virtual void verify(bool allow_dirty, bool silent, VerifyOption option) = 0; diff -r 8f55316ee4ee -r 4b7653ae5caa hotspot/src/share/vm/gc_interface/collectedHeap.inline.hpp --- a/hotspot/src/share/vm/gc_interface/collectedHeap.inline.hpp Wed Mar 28 16:22:31 2012 -0700 +++ b/hotspot/src/share/vm/gc_interface/collectedHeap.inline.hpp Thu Mar 29 16:16:56 2012 -0700 @@ -1,5 +1,5 @@ /* - * Copyright (c) 2001, 2011, Oracle and/or its affiliates. All rights reserved. + * Copyright (c) 2001, 2012, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it @@ -50,15 +50,13 @@ // Inline allocation implementations. void CollectedHeap::post_allocation_setup_common(KlassHandle klass, - HeapWord* obj, - size_t size) { - post_allocation_setup_no_klass_install(klass, obj, size); - post_allocation_install_obj_klass(klass, oop(obj), (int) size); + HeapWord* obj) { + post_allocation_setup_no_klass_install(klass, obj); + post_allocation_install_obj_klass(klass, oop(obj)); } void CollectedHeap::post_allocation_setup_no_klass_install(KlassHandle klass, - HeapWord* objPtr, - size_t size) { + HeapWord* objPtr) { oop obj = (oop)objPtr; assert(obj != NULL, "NULL object pointer"); @@ -71,8 +69,7 @@ } void CollectedHeap::post_allocation_install_obj_klass(KlassHandle klass, - oop obj, - int size) { + oop obj) { // These asserts are kind of complicated because of klassKlass // and the beginning of the world. assert(klass() != NULL || !Universe::is_fully_initialized(), "NULL klass"); @@ -101,9 +98,8 @@ } void CollectedHeap::post_allocation_setup_obj(KlassHandle klass, - HeapWord* obj, - size_t size) { - post_allocation_setup_common(klass, obj, size); + HeapWord* obj) { + post_allocation_setup_common(klass, obj); assert(Universe::is_bootstrapping() || !((oop)obj)->blueprint()->oop_is_array(), "must not be an array"); // notify jvmti and dtrace @@ -112,14 +108,13 @@ void CollectedHeap::post_allocation_setup_array(KlassHandle klass, HeapWord* obj, - size_t size, int length) { // Set array length before setting the _klass field // in post_allocation_setup_common() because the klass field // indicates that the object is parsable by concurrent GC. assert(length >= 0, "length should be non-negative"); ((arrayOop)obj)->set_length(length); - post_allocation_setup_common(klass, obj, size); + post_allocation_setup_common(klass, obj); assert(((oop)obj)->blueprint()->oop_is_array(), "must be an array"); // notify jvmti and dtrace (must be after length is set for dtrace) post_allocation_notify(klass, (oop)obj); @@ -256,7 +251,7 @@ assert(!Universe::heap()->is_gc_active(), "Allocation during gc not allowed"); assert(size >= 0, "int won't convert to size_t"); HeapWord* obj = common_mem_allocate_init(size, CHECK_NULL); - post_allocation_setup_obj(klass, obj, size); + post_allocation_setup_obj(klass, obj); NOT_PRODUCT(Universe::heap()->check_for_bad_heap_word_value(obj, size)); return (oop)obj; } @@ -269,7 +264,7 @@ assert(!Universe::heap()->is_gc_active(), "Allocation during gc not allowed"); assert(size >= 0, "int won't convert to size_t"); HeapWord* obj = common_mem_allocate_init(size, CHECK_NULL); - post_allocation_setup_array(klass, obj, size, length); + post_allocation_setup_array(klass, obj, length); NOT_PRODUCT(Universe::heap()->check_for_bad_heap_word_value(obj, size)); return (oop)obj; } @@ -283,7 +278,7 @@ assert(size >= 0, "int won't convert to size_t"); HeapWord* obj = common_mem_allocate_noinit(size, CHECK_NULL); ((oop)obj)->set_klass_gap(0); - post_allocation_setup_array(klass, obj, size, length); + post_allocation_setup_array(klass, obj, length); #ifndef PRODUCT const size_t hs = oopDesc::header_size()+1; Universe::heap()->check_for_non_bad_heap_word_value(obj+hs, size-hs); @@ -293,7 +288,7 @@ oop CollectedHeap::permanent_obj_allocate(KlassHandle klass, int size, TRAPS) { oop obj = permanent_obj_allocate_no_klass_install(klass, size, CHECK_NULL); - post_allocation_install_obj_klass(klass, obj, size); + post_allocation_install_obj_klass(klass, obj); NOT_PRODUCT(Universe::heap()->check_for_bad_heap_word_value((HeapWord*) obj, size)); return obj; @@ -306,7 +301,7 @@ assert(!Universe::heap()->is_gc_active(), "Allocation during gc not allowed"); assert(size >= 0, "int won't convert to size_t"); HeapWord* obj = common_permanent_mem_allocate_init(size, CHECK_NULL); - post_allocation_setup_no_klass_install(klass, obj, size); + post_allocation_setup_no_klass_install(klass, obj); #ifndef PRODUCT const size_t hs = oopDesc::header_size(); Universe::heap()->check_for_bad_heap_word_value(obj+hs, size-hs); @@ -322,7 +317,7 @@ assert(!Universe::heap()->is_gc_active(), "Allocation during gc not allowed"); assert(size >= 0, "int won't convert to size_t"); HeapWord* obj = common_permanent_mem_allocate_init(size, CHECK_NULL); - post_allocation_setup_array(klass, obj, size, length); + post_allocation_setup_array(klass, obj, length); NOT_PRODUCT(Universe::heap()->check_for_bad_heap_word_value(obj, size)); return (oop)obj; } diff -r 8f55316ee4ee -r 4b7653ae5caa hotspot/src/share/vm/memory/dump.cpp --- a/hotspot/src/share/vm/memory/dump.cpp Wed Mar 28 16:22:31 2012 -0700 +++ b/hotspot/src/share/vm/memory/dump.cpp Thu Mar 29 16:16:56 2012 -0700 @@ -297,16 +297,14 @@ if (obj->blueprint()->oop_is_instanceKlass()) { instanceKlass* ik = instanceKlass::cast((klassOop)obj); - typeArrayOop inner_classes = ik->inner_classes(); - if (inner_classes != NULL) { - constantPoolOop constants = ik->constants(); - int n = inner_classes->length(); - for (int i = 0; i < n; i += instanceKlass::inner_class_next_offset) { - int ioff = i + instanceKlass::inner_class_inner_name_offset; - int index = inner_classes->ushort_at(ioff); - if (index != 0) { - _closure->do_symbol(constants->symbol_at_addr(index)); - } + instanceKlassHandle ik_h((klassOop)obj); + InnerClassesIterator iter(ik_h); + constantPoolOop constants = ik->constants(); + for (; !iter.done(); iter.next()) { + int index = iter.inner_name_index(); + + if (index != 0) { + _closure->do_symbol(constants->symbol_at_addr(index)); } } } diff -r 8f55316ee4ee -r 4b7653ae5caa hotspot/src/share/vm/oops/instanceKlass.cpp --- a/hotspot/src/share/vm/oops/instanceKlass.cpp Wed Mar 28 16:22:31 2012 -0700 +++ b/hotspot/src/share/vm/oops/instanceKlass.cpp Thu Mar 29 16:16:56 2012 -0700 @@ -1133,6 +1133,36 @@ return probe; } +u2 instanceKlass::enclosing_method_data(int offset) { + typeArrayOop inner_class_list = inner_classes(); + if (inner_class_list == NULL) { + return 0; + } + int length = inner_class_list->length(); + if (length % inner_class_next_offset == 0) { + return 0; + } else { + int index = length - enclosing_method_attribute_size; + typeArrayHandle inner_class_list_h(inner_class_list); + assert(offset < enclosing_method_attribute_size, "invalid offset"); + return inner_class_list_h->ushort_at(index + offset); + } +} + +void instanceKlass::set_enclosing_method_indices(u2 class_index, + u2 method_index) { + typeArrayOop inner_class_list = inner_classes(); + assert (inner_class_list != NULL, "_inner_classes list is not set up"); + int length = inner_class_list->length(); + if (length % inner_class_next_offset == enclosing_method_attribute_size) { + int index = length - enclosing_method_attribute_size; + typeArrayHandle inner_class_list_h(inner_class_list); + inner_class_list_h->ushort_at_put( + index + enclosing_method_class_index_offset, class_index); + inner_class_list_h->ushort_at_put( + index + enclosing_method_method_index_offset, method_index); + } +} // Lookup or create a jmethodID. // This code is called by the VMThread and JavaThreads so the @@ -2107,28 +2137,21 @@ jint access = access_flags().as_int(); // But check if it happens to be member class. - typeArrayOop inner_class_list = inner_classes(); - int length = (inner_class_list == NULL) ? 0 : inner_class_list->length(); - assert (length % instanceKlass::inner_class_next_offset == 0, "just checking"); - if (length > 0) { - typeArrayHandle inner_class_list_h(THREAD, inner_class_list); - instanceKlassHandle ik(THREAD, k); - for (int i = 0; i < length; i += instanceKlass::inner_class_next_offset) { - int ioff = inner_class_list_h->ushort_at( - i + instanceKlass::inner_class_inner_class_info_offset); - - // Inner class attribute can be zero, skip it. - // Strange but true: JVM spec. allows null inner class refs. - if (ioff == 0) continue; - - // only look at classes that are already loaded - // since we are looking for the flags for our self. - Symbol* inner_name = ik->constants()->klass_name_at(ioff); - if ((ik->name() == inner_name)) { - // This is really a member class. - access = inner_class_list_h->ushort_at(i + instanceKlass::inner_class_access_flags_offset); - break; - } + instanceKlassHandle ik(THREAD, k); + InnerClassesIterator iter(ik); + for (; !iter.done(); iter.next()) { + int ioff = iter.inner_class_info_index(); + // Inner class attribute can be zero, skip it. + // Strange but true: JVM spec. allows null inner class refs. + if (ioff == 0) continue; + + // only look at classes that are already loaded + // since we are looking for the flags for our self. + Symbol* inner_name = ik->constants()->klass_name_at(ioff); + if ((ik->name() == inner_name)) { + // This is really a member class. + access = iter.inner_access_flags(); + break; } } // Remember to strip ACC_SUPER bit diff -r 8f55316ee4ee -r 4b7653ae5caa hotspot/src/share/vm/oops/instanceKlass.hpp --- a/hotspot/src/share/vm/oops/instanceKlass.hpp Wed Mar 28 16:22:31 2012 -0700 +++ b/hotspot/src/share/vm/oops/instanceKlass.hpp Thu Mar 29 16:16:56 2012 -0700 @@ -188,7 +188,17 @@ klassOop _host_klass; // Class signers. objArrayOop _signers; - // inner_classes attribute. + // The InnerClasses attribute and EnclosingMethod attribute. The + // _inner_classes is an array of shorts. If the class has InnerClasses + // attribute, then the _inner_classes array begins with 4-tuples of shorts + // [inner_class_info_index, outer_class_info_index, + // inner_name_index, inner_class_access_flags] for the InnerClasses + // attribute. If the EnclosingMethod attribute exists, it occupies the + // last two shorts [class_index, method_index] of the array. If only + // the InnerClasses attribute exists, the _inner_classes array length is + // number_of_inner_classes * 4. If the class has both InnerClasses + // and EnclosingMethod attributes the _inner_classes array length is + // number_of_inner_classes * 4 + enclosing_method_attribute_size. typeArrayOop _inner_classes; // Implementors of this interface (not valid if it overflows) klassOop _implementors[implementors_limit]; @@ -251,8 +261,6 @@ // Array of interesting part(s) of the previous version(s) of this // instanceKlass. See PreviousVersionWalker below. GrowableArray* _previous_versions; - u2 _enclosing_method_class_index; // Constant pool index for class of enclosing method, or 0 if none - u2 _enclosing_method_method_index; // Constant pool index for name and type of enclosing method, or 0 if none // JVMTI fields can be moved to their own structure - see 6315920 unsigned char * _cached_class_file_bytes; // JVMTI: cached class file, before retransformable agent modified it in CFLH jint _cached_class_file_len; // JVMTI: length of above @@ -351,6 +359,12 @@ inner_class_next_offset = 4 }; + enum EnclosingMethodAttributeOffset { + enclosing_method_class_index_offset = 0, + enclosing_method_method_index_offset = 1, + enclosing_method_attribute_size = 2 + }; + // method override check bool is_override(methodHandle super_method, Handle targetclassloader, Symbol* targetclassname, TRAPS); @@ -533,11 +547,15 @@ Symbol* generic_signature() const { return _generic_signature; } void set_generic_signature(Symbol* sig) { _generic_signature = sig; } - u2 enclosing_method_class_index() const { return _enclosing_method_class_index; } - u2 enclosing_method_method_index() const { return _enclosing_method_method_index; } + u2 enclosing_method_data(int offset); + u2 enclosing_method_class_index() { + return enclosing_method_data(enclosing_method_class_index_offset); + } + u2 enclosing_method_method_index() { + return enclosing_method_data(enclosing_method_method_index_offset); + } void set_enclosing_method_indices(u2 class_index, - u2 method_index) { _enclosing_method_class_index = class_index; - _enclosing_method_method_index = method_index; } + u2 method_index); // jmethodID support static jmethodID get_jmethod_id(instanceKlassHandle ik_h, @@ -1053,4 +1071,83 @@ nmethod* get_nmethod() { return _nmethod; } }; +// An iterator that's used to access the inner classes indices in the +// instanceKlass::_inner_classes array. +class InnerClassesIterator : public StackObj { + private: + typeArrayHandle _inner_classes; + int _length; + int _idx; + public: + + InnerClassesIterator(instanceKlassHandle k) { + _inner_classes = k->inner_classes(); + if (k->inner_classes() != NULL) { + _length = _inner_classes->length(); + // The inner class array's length should be the multiple of + // inner_class_next_offset if it only contains the InnerClasses + // attribute data, or it should be + // n*inner_class_next_offset+enclosing_method_attribute_size + // if it also contains the EnclosingMethod data. + assert((_length % instanceKlass::inner_class_next_offset == 0 || + _length % instanceKlass::inner_class_next_offset == instanceKlass::enclosing_method_attribute_size), + "just checking"); + // Remove the enclosing_method portion if exists. + if (_length % instanceKlass::inner_class_next_offset == instanceKlass::enclosing_method_attribute_size) { + _length -= instanceKlass::enclosing_method_attribute_size; + } + } else { + _length = 0; + } + _idx = 0; + } + + int length() const { + return _length; + } + + void next() { + _idx += instanceKlass::inner_class_next_offset; + } + + bool done() const { + return (_idx >= _length); + } + + u2 inner_class_info_index() const { + return _inner_classes->ushort_at( + _idx + instanceKlass::inner_class_inner_class_info_offset); + } + + void set_inner_class_info_index(u2 index) { + _inner_classes->ushort_at_put( + _idx + instanceKlass::inner_class_inner_class_info_offset, index); + } + + u2 outer_class_info_index() const { + return _inner_classes->ushort_at( + _idx + instanceKlass::inner_class_outer_class_info_offset); + } + + void set_outer_class_info_index(u2 index) { + _inner_classes->ushort_at_put( + _idx + instanceKlass::inner_class_outer_class_info_offset, index); + } + + u2 inner_name_index() const { + return _inner_classes->ushort_at( + _idx + instanceKlass::inner_class_inner_name_offset); + } + + void set_inner_name_index(u2 index) { + _inner_classes->ushort_at_put( + _idx + instanceKlass::inner_class_inner_name_offset, index); + } + + u2 inner_access_flags() const { + return _inner_classes->ushort_at( + _idx + instanceKlass::inner_class_access_flags_offset); + } +}; + #endif // SHARE_VM_OOPS_INSTANCEKLASS_HPP diff -r 8f55316ee4ee -r 4b7653ae5caa hotspot/src/share/vm/oops/instanceKlassKlass.cpp --- a/hotspot/src/share/vm/oops/instanceKlassKlass.cpp Wed Mar 28 16:22:31 2012 -0700 +++ b/hotspot/src/share/vm/oops/instanceKlassKlass.cpp Thu Mar 29 16:16:56 2012 -0700 @@ -416,7 +416,6 @@ ik->set_methods_annotations(NULL); ik->set_methods_parameter_annotations(NULL); ik->set_methods_default_annotations(NULL); - ik->set_enclosing_method_indices(0, 0); ik->set_jvmti_cached_class_field_map(NULL); ik->set_initial_method_idnum(0); assert(k()->is_parsable(), "should be parsable here."); diff -r 8f55316ee4ee -r 4b7653ae5caa hotspot/src/share/vm/oops/klass.cpp --- a/hotspot/src/share/vm/oops/klass.cpp Wed Mar 28 16:22:31 2012 -0700 +++ b/hotspot/src/share/vm/oops/klass.cpp Thu Mar 29 16:16:56 2012 -0700 @@ -1,5 +1,5 @@ /* - * Copyright (c) 1997, 2011, Oracle and/or its affiliates. All rights reserved. + * Copyright (c) 1997, 2012, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it @@ -174,10 +174,9 @@ } void Klass_vtbl::post_new_init_klass(KlassHandle& klass, - klassOop new_klass, - int size) const { + klassOop new_klass) const { assert(!new_klass->klass_part()->null_vtbl(), "Not a complete klass"); - CollectedHeap::post_allocation_install_obj_klass(klass, new_klass, size); + CollectedHeap::post_allocation_install_obj_klass(klass, new_klass); } void* Klass_vtbl::operator new(size_t ignored, KlassHandle& klass, diff -r 8f55316ee4ee -r 4b7653ae5caa hotspot/src/share/vm/oops/klass.hpp --- a/hotspot/src/share/vm/oops/klass.hpp Wed Mar 28 16:22:31 2012 -0700 +++ b/hotspot/src/share/vm/oops/klass.hpp Thu Mar 29 16:16:56 2012 -0700 @@ -1,5 +1,5 @@ /* - * Copyright (c) 1997, 2011, Oracle and/or its affiliates. All rights reserved. + * Copyright (c) 1997, 2012, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it @@ -149,7 +149,7 @@ // by the shared "base_create" subroutines. // virtual void* allocate_permanent(KlassHandle& klass, int size, TRAPS) const = 0; - void post_new_init_klass(KlassHandle& klass, klassOop obj, int size) const; + void post_new_init_klass(KlassHandle& klass, klassOop obj) const; // Every subclass on which vtbl_value is called must include this macro. // Delay the installation of the klassKlass pointer until after the @@ -160,7 +160,7 @@ if (HAS_PENDING_EXCEPTION) return NULL; \ klassOop new_klass = ((Klass*) result)->as_klassOop(); \ OrderAccess::storestore(); \ - post_new_init_klass(klass_klass, new_klass, size); \ + post_new_init_klass(klass_klass, new_klass); \ return result; \ } diff -r 8f55316ee4ee -r 4b7653ae5caa hotspot/src/share/vm/prims/jvm.cpp --- a/hotspot/src/share/vm/prims/jvm.cpp Wed Mar 28 16:22:31 2012 -0700 +++ b/hotspot/src/share/vm/prims/jvm.cpp Thu Mar 29 16:16:56 2012 -0700 @@ -1301,9 +1301,6 @@ // Inner class reflection /////////////////////////////////////////////////////////////////////////////// JVM_ENTRY(jobjectArray, JVM_GetDeclaredClasses(JNIEnv *env, jclass ofClass)) - const int inner_class_info_index = 0; - const int outer_class_info_index = 1; - JvmtiVMObjectAllocEventCollector oam; // ofClass is a reference to a java_lang_Class object. The mirror object // of an instanceKlass @@ -1315,26 +1312,26 @@ } instanceKlassHandle k(thread, java_lang_Class::as_klassOop(JNIHandles::resolve_non_null(ofClass))); - - if (k->inner_classes()->length() == 0) { + InnerClassesIterator iter(k); + + if (iter.length() == 0) { // Neither an inner nor outer class oop result = oopFactory::new_objArray(SystemDictionary::Class_klass(), 0, CHECK_NULL); return (jobjectArray)JNIHandles::make_local(env, result); } // find inner class info - typeArrayHandle icls(thread, k->inner_classes()); constantPoolHandle cp(thread, k->constants()); - int length = icls->length(); + int length = iter.length(); // Allocate temp. result array objArrayOop r = oopFactory::new_objArray(SystemDictionary::Class_klass(), length/4, CHECK_NULL); objArrayHandle result (THREAD, r); int members = 0; - for(int i = 0; i < length; i += 4) { - int ioff = icls->ushort_at(i + inner_class_info_index); - int ooff = icls->ushort_at(i + outer_class_info_index); + for (; !iter.done(); iter.next()) { + int ioff = iter.inner_class_info_index(); + int ooff = iter.outer_class_info_index(); if (ioff != 0 && ooff != 0) { // Check to see if the name matches the class we're looking for @@ -1392,17 +1389,13 @@ bool* inner_is_member, TRAPS) { Thread* thread = THREAD; - const int inner_class_info_index = inner_class_inner_class_info_offset; - const int outer_class_info_index = inner_class_outer_class_info_offset; - - if (k->inner_classes()->length() == 0) { + InnerClassesIterator iter(k); + if (iter.length() == 0) { // No inner class info => no declaring class return NULL; } - typeArrayHandle i_icls(thread, k->inner_classes()); constantPoolHandle i_cp(thread, k->constants()); - int i_length = i_icls->length(); bool found = false; klassOop ok; @@ -1410,10 +1403,10 @@ *inner_is_member = false; // Find inner_klass attribute - for (int i = 0; i < i_length && !found; i += inner_class_next_offset) { - int ioff = i_icls->ushort_at(i + inner_class_info_index); - int ooff = i_icls->ushort_at(i + outer_class_info_index); - int noff = i_icls->ushort_at(i + inner_class_inner_name_offset); + for (; !iter.done() && !found; iter.next()) { + int ioff = iter.inner_class_info_index(); + int ooff = iter.outer_class_info_index(); + int noff = iter.inner_name_index(); if (ioff != 0) { // Check to see if the name matches the class we're looking for // before attempting to find the class. diff -r 8f55316ee4ee -r 4b7653ae5caa hotspot/src/share/vm/prims/jvmtiClassFileReconstituter.cpp --- a/hotspot/src/share/vm/prims/jvmtiClassFileReconstituter.cpp Wed Mar 28 16:22:31 2012 -0700 +++ b/hotspot/src/share/vm/prims/jvmtiClassFileReconstituter.cpp Thu Mar 29 16:16:56 2012 -0700 @@ -292,8 +292,8 @@ // Compute the number of entries in the InnerClasses attribute u2 JvmtiClassFileReconstituter::inner_classes_attribute_length() { - typeArrayOop inner_class_list = ikh()->inner_classes(); - return (inner_class_list == NULL) ? 0 : inner_class_list->length(); + InnerClassesIterator iter(ikh()); + return iter.length(); } // Write an annotation attribute. The VM stores them in raw form, so all we need @@ -324,26 +324,20 @@ // JVMSpec| } classes[number_of_classes]; // JVMSpec| } void JvmtiClassFileReconstituter::write_inner_classes_attribute(int length) { - typeArrayOop inner_class_list = ikh()->inner_classes(); - guarantee(inner_class_list != NULL && inner_class_list->length() == length, + InnerClassesIterator iter(ikh()); + guarantee(iter.length() != 0 && iter.length() == length, "caller must check"); - typeArrayHandle inner_class_list_h(thread(), inner_class_list); - assert (length % instanceKlass::inner_class_next_offset == 0, "just checking"); u2 entry_count = length / instanceKlass::inner_class_next_offset; u4 size = 2 + entry_count * (2+2+2+2); write_attribute_name_index("InnerClasses"); write_u4(size); write_u2(entry_count); - for (int i = 0; i < length; i += instanceKlass::inner_class_next_offset) { - write_u2(inner_class_list_h->ushort_at( - i + instanceKlass::inner_class_inner_class_info_offset)); - write_u2(inner_class_list_h->ushort_at( - i + instanceKlass::inner_class_outer_class_info_offset)); - write_u2(inner_class_list_h->ushort_at( - i + instanceKlass::inner_class_inner_name_offset)); - write_u2(inner_class_list_h->ushort_at( - i + instanceKlass::inner_class_access_flags_offset)); + for (; !iter.done(); iter.next()) { + write_u2(iter.inner_class_info_index()); + write_u2(iter.outer_class_info_index()); + write_u2(iter.inner_name_index()); + write_u2(iter.inner_access_flags()); } } diff -r 8f55316ee4ee -r 4b7653ae5caa hotspot/src/share/vm/prims/jvmtiRedefineClasses.cpp --- a/hotspot/src/share/vm/prims/jvmtiRedefineClasses.cpp Wed Mar 28 16:22:31 2012 -0700 +++ b/hotspot/src/share/vm/prims/jvmtiRedefineClasses.cpp Thu Mar 29 16:16:56 2012 -0700 @@ -2400,44 +2400,33 @@ // new constant indices as needed. The inner classes info is a // quadruple: // (inner_class_info, outer_class_info, inner_name, inner_access_flags) - typeArrayOop inner_class_list = scratch_class->inner_classes(); - int icl_length = (inner_class_list == NULL) ? 0 : inner_class_list->length(); - if (icl_length > 0) { - typeArrayHandle inner_class_list_h(THREAD, inner_class_list); - for (int i = 0; i < icl_length; - i += instanceKlass::inner_class_next_offset) { - int cur_index = inner_class_list_h->ushort_at(i - + instanceKlass::inner_class_inner_class_info_offset); - if (cur_index == 0) { - continue; // JVM spec. allows null inner class refs so skip it - } - int new_index = find_new_index(cur_index); - if (new_index != 0) { - RC_TRACE_WITH_THREAD(0x00080000, THREAD, - ("inner_class_info change: %d to %d", cur_index, new_index)); - inner_class_list_h->ushort_at_put(i - + instanceKlass::inner_class_inner_class_info_offset, new_index); - } - cur_index = inner_class_list_h->ushort_at(i - + instanceKlass::inner_class_outer_class_info_offset); - new_index = find_new_index(cur_index); - if (new_index != 0) { - RC_TRACE_WITH_THREAD(0x00080000, THREAD, - ("outer_class_info change: %d to %d", cur_index, new_index)); - inner_class_list_h->ushort_at_put(i - + instanceKlass::inner_class_outer_class_info_offset, new_index); - } - cur_index = inner_class_list_h->ushort_at(i - + instanceKlass::inner_class_inner_name_offset); - new_index = find_new_index(cur_index); - if (new_index != 0) { - RC_TRACE_WITH_THREAD(0x00080000, THREAD, - ("inner_name change: %d to %d", cur_index, new_index)); - inner_class_list_h->ushort_at_put(i - + instanceKlass::inner_class_inner_name_offset, new_index); - } - } // end for each inner class - } // end if we have inner classes + InnerClassesIterator iter(scratch_class); + for (; !iter.done(); iter.next()) { + int cur_index = iter.inner_class_info_index(); + if (cur_index == 0) { + continue; // JVM spec. allows null inner class refs so skip it + } + int new_index = find_new_index(cur_index); + if (new_index != 0) { + RC_TRACE_WITH_THREAD(0x00080000, THREAD, + ("inner_class_info change: %d to %d", cur_index, new_index)); + iter.set_inner_class_info_index(new_index); + } + cur_index = iter.outer_class_info_index(); + new_index = find_new_index(cur_index); + if (new_index != 0) { + RC_TRACE_WITH_THREAD(0x00080000, THREAD, + ("outer_class_info change: %d to %d", cur_index, new_index)); + iter.set_outer_class_info_index(new_index); + } + cur_index = iter.inner_name_index(); + new_index = find_new_index(cur_index); + if (new_index != 0) { + RC_TRACE_WITH_THREAD(0x00080000, THREAD, + ("inner_name change: %d to %d", cur_index, new_index)); + iter.set_inner_name_index(new_index); + } + } // end for each inner class // Attach each method in klass to the new constant pool and update // to use new constant pool indices as needed: diff -r 8f55316ee4ee -r 4b7653ae5caa hotspot/src/share/vm/runtime/reflection.cpp --- a/hotspot/src/share/vm/runtime/reflection.cpp Wed Mar 28 16:22:31 2012 -0700 +++ b/hotspot/src/share/vm/runtime/reflection.cpp Thu Mar 29 16:16:56 2012 -0700 @@ -591,14 +591,11 @@ // Caller is responsible for figuring out in advance which case must be true. void Reflection::check_for_inner_class(instanceKlassHandle outer, instanceKlassHandle inner, bool inner_is_member, TRAPS) { - const int inner_class_info_index = 0; - const int outer_class_info_index = 1; - - typeArrayHandle icls (THREAD, outer->inner_classes()); + InnerClassesIterator iter(outer); constantPoolHandle cp (THREAD, outer->constants()); - for(int i = 0; i < icls->length(); i += 4) { - int ioff = icls->ushort_at(i + inner_class_info_index); - int ooff = icls->ushort_at(i + outer_class_info_index); + for (; !iter.done(); iter.next()) { + int ioff = iter.inner_class_info_index(); + int ooff = iter.outer_class_info_index(); if (inner_is_member && ioff != 0 && ooff != 0) { klassOop o = cp->klass_at(ooff, CHECK); diff -r 8f55316ee4ee -r 4b7653ae5caa hotspot/test/Makefile --- a/hotspot/test/Makefile Wed Mar 28 16:22:31 2012 -0700 +++ b/hotspot/test/Makefile Thu Mar 29 16:16:56 2012 -0700 @@ -26,6 +26,8 @@ # Makefile to run various jdk tests # +GETMIXEDPATH=echo + # Get OS/ARCH specifics OSNAME = $(shell uname -s) ifeq ($(OSNAME), SunOS) @@ -60,7 +62,14 @@ ARCH = i586 endif endif -ifeq ($(OSNAME), Windows_NT) +ifeq ($(PLATFORM),) + # detect wether we're running in MKS or cygwin + ifeq ($(OSNAME), Windows_NT) # MKS + GETMIXEDPATH=dosname -s + endif + ifeq ($(findstring CYGWIN,$(OSNAME)), CYGWIN) + GETMIXEDPATH=cygpath -m -s + endif PLATFORM = windows SLASH_JAVA = J: ifeq ($(word 1, $(PROCESSOR_IDENTIFIER)),ia64) @@ -234,11 +243,11 @@ $(JTREG) -a -v:fail,error \ $(JTREG_KEY_OPTION) \ $(EXTRA_JTREG_OPTIONS) \ - -r:$(ABS_TEST_OUTPUT_DIR)/JTreport \ - -w:$(ABS_TEST_OUTPUT_DIR)/JTwork \ - -jdk:$(PRODUCT_HOME) \ + -r:$(shell $(GETMIXEDPATH) "$(ABS_TEST_OUTPUT_DIR)")/JTreport \ + -w:$(shell $(GETMIXEDPATH) "$(ABS_TEST_OUTPUT_DIR)")/JTwork \ + -jdk:$(shell $(GETMIXEDPATH) "$(PRODUCT_HOME)") \ $(JAVA_OPTIONS:%=-vmoption:%) \ - $(TEST_ROOT)/sanity \ + $(shell $(GETMIXEDPATH) "$(TEST_ROOT)")/sanity \ || $(BUNDLE_UP_FAILED) $(BUNDLE_UP) diff -r 8f55316ee4ee -r 4b7653ae5caa jaxp/.hgtags --- a/jaxp/.hgtags Wed Mar 28 16:22:31 2012 -0700 +++ b/jaxp/.hgtags Thu Mar 29 16:16:56 2012 -0700 @@ -153,3 +153,4 @@ 25099a745e1a43579b6af86b3e052b2e50958753 jdk8-b29 3be30c25a8255803652b5c466336055d36e2ba21 jdk8-b30 94aabe098916440ae7911866311c9617d8481a36 jdk8-b31 +60960fbc75df8be4c1a2504aa69fc1428cc94f93 jdk8-b32 diff -r 8f55316ee4ee -r 4b7653ae5caa jaxws/.hgtags --- a/jaxws/.hgtags Wed Mar 28 16:22:31 2012 -0700 +++ b/jaxws/.hgtags Thu Mar 29 16:16:56 2012 -0700 @@ -153,3 +153,4 @@ 4897d9d2d04838e3479745efa238a99bacd939c9 jdk8-b29 6882b10e85d6f6ba110dbb50926d6fe2222cc7ad jdk8-b30 4c41c6d0e15de3b56919a5ba0a0f248a2d07f2b2 jdk8-b31 +017a7dbfaa92f5a8b144e6c890d1cebdaecaf681 jdk8-b32 diff -r 8f55316ee4ee -r 4b7653ae5caa jdk/.hgtags --- a/jdk/.hgtags Wed Mar 28 16:22:31 2012 -0700 +++ b/jdk/.hgtags Thu Mar 29 16:16:56 2012 -0700 @@ -153,3 +153,4 @@ c5b882dce0fe27e05dc64debc92b1fb9ebf880ec jdk8-b29 cdbb33303ea344d5e9013e2dd642e7a6e7768db6 jdk8-b30 27f0c08c427c65fcab6917edf646f59058e59524 jdk8-b31 +ddfe5562f61f54ed2121ac0c73b688b94f3e66b5 jdk8-b32 diff -r 8f55316ee4ee -r 4b7653ae5caa langtools/.hgtags --- a/langtools/.hgtags Wed Mar 28 16:22:31 2012 -0700 +++ b/langtools/.hgtags Thu Mar 29 16:16:56 2012 -0700 @@ -153,3 +153,4 @@ e974e82abe51ef66dc32bb6ab5d0733753d3c7d7 jdk8-b29 08a3425f39f829502ca0ddbfb2d051c31710cb19 jdk8-b30 b28cfbe7e8b196da954bed9a22bfd790e55333aa jdk8-b31 +be069d72dde2bfe6f996c46325a320961ca854c2 jdk8-b32