hotspot/src/share/vm/gc/parallel/psMarkSweep.cpp
changeset 35061 be6025ebffea
parent 34666 1c7168ea0034
child 35492 c8c0273e6b91
equal deleted inserted replaced
35060:382d0689141c 35061:be6025ebffea
    39 #include "gc/shared/gcHeapSummary.hpp"
    39 #include "gc/shared/gcHeapSummary.hpp"
    40 #include "gc/shared/gcId.hpp"
    40 #include "gc/shared/gcId.hpp"
    41 #include "gc/shared/gcLocker.inline.hpp"
    41 #include "gc/shared/gcLocker.inline.hpp"
    42 #include "gc/shared/gcTimer.hpp"
    42 #include "gc/shared/gcTimer.hpp"
    43 #include "gc/shared/gcTrace.hpp"
    43 #include "gc/shared/gcTrace.hpp"
    44 #include "gc/shared/gcTraceTime.hpp"
    44 #include "gc/shared/gcTraceTime.inline.hpp"
    45 #include "gc/shared/isGCActiveMark.hpp"
    45 #include "gc/shared/isGCActiveMark.hpp"
    46 #include "gc/shared/referencePolicy.hpp"
    46 #include "gc/shared/referencePolicy.hpp"
    47 #include "gc/shared/referenceProcessor.hpp"
    47 #include "gc/shared/referenceProcessor.hpp"
    48 #include "gc/shared/spaceDecorator.hpp"
    48 #include "gc/shared/spaceDecorator.hpp"
       
    49 #include "logging/log.hpp"
    49 #include "oops/oop.inline.hpp"
    50 #include "oops/oop.inline.hpp"
    50 #include "runtime/biasedLocking.hpp"
    51 #include "runtime/biasedLocking.hpp"
    51 #include "runtime/fprofiler.hpp"
    52 #include "runtime/fprofiler.hpp"
    52 #include "runtime/safepoint.hpp"
    53 #include "runtime/safepoint.hpp"
    53 #include "runtime/vmThread.hpp"
    54 #include "runtime/vmThread.hpp"
   135   heap->record_gen_tops_before_GC();
   136   heap->record_gen_tops_before_GC();
   136 
   137 
   137   // We need to track unique mark sweep invocations as well.
   138   // We need to track unique mark sweep invocations as well.
   138   _total_invocations++;
   139   _total_invocations++;
   139 
   140 
   140   AdaptiveSizePolicyOutput(size_policy, heap->total_collections());
       
   141 
       
   142   heap->print_heap_before_gc();
   141   heap->print_heap_before_gc();
   143   heap->trace_heap_before_gc(_gc_tracer);
   142   heap->trace_heap_before_gc(_gc_tracer);
   144 
   143 
   145   // Fill in TLABs
   144   // Fill in TLABs
   146   heap->accumulate_statistics_all_tlabs();
   145   heap->accumulate_statistics_all_tlabs();
   147   heap->ensure_parsability(true);  // retire TLABs
   146   heap->ensure_parsability(true);  // retire TLABs
   148 
   147 
   149   if (VerifyBeforeGC && heap->total_collections() >= VerifyGCStartAt) {
   148   if (VerifyBeforeGC && heap->total_collections() >= VerifyGCStartAt) {
   150     HandleMark hm;  // Discard invalid handles created during verification
   149     HandleMark hm;  // Discard invalid handles created during verification
   151     Universe::verify(" VerifyBeforeGC:");
   150     Universe::verify("Before GC");
   152   }
   151   }
   153 
   152 
   154   // Verify object start arrays
   153   // Verify object start arrays
   155   if (VerifyObjectStartArray &&
   154   if (VerifyObjectStartArray &&
   156       VerifyBeforeGC) {
   155       VerifyBeforeGC) {
   165   bool young_gen_empty;
   164   bool young_gen_empty;
   166 
   165 
   167   {
   166   {
   168     HandleMark hm;
   167     HandleMark hm;
   169 
   168 
   170     TraceCPUTime tcpu(PrintGCDetails, true, gclog_or_tty);
   169     GCTraceCPUTime tcpu;
   171     GCTraceTime t1(GCCauseString("Full GC", gc_cause), PrintGC, !PrintGCDetails, NULL);
   170     GCTraceTime(Info, gc) t("Pause Full", NULL, gc_cause, true);
   172     TraceCollectorStats tcs(counters());
   171     TraceCollectorStats tcs(counters());
   173     TraceMemoryManagerStats tms(true /* Full GC */,gc_cause);
   172     TraceMemoryManagerStats tms(true /* Full GC */,gc_cause);
   174 
   173 
   175     if (TraceOldGenTime) accumulated_time()->start();
   174     if (TraceOldGenTime) accumulated_time()->start();
   176 
   175 
   178     size_policy->major_collection_begin();
   177     size_policy->major_collection_begin();
   179 
   178 
   180     CodeCache::gc_prologue();
   179     CodeCache::gc_prologue();
   181     BiasedLocking::preserve_marks();
   180     BiasedLocking::preserve_marks();
   182 
   181 
   183     // Capture heap size before collection for printing.
       
   184     size_t prev_used = heap->used();
       
   185 
       
   186     // Capture metadata size before collection for sizing.
   182     // Capture metadata size before collection for sizing.
   187     size_t metadata_prev_used = MetaspaceAux::used_bytes();
   183     size_t metadata_prev_used = MetaspaceAux::used_bytes();
   188 
   184 
   189     // For PrintGCDetails
       
   190     size_t old_gen_prev_used = old_gen->used_in_bytes();
   185     size_t old_gen_prev_used = old_gen->used_in_bytes();
   191     size_t young_gen_prev_used = young_gen->used_in_bytes();
   186     size_t young_gen_prev_used = young_gen->used_in_bytes();
   192 
   187 
   193     allocate_stacks();
   188     allocate_stacks();
   194 
   189 
   264     // Let the size policy know we're done
   259     // Let the size policy know we're done
   265     size_policy->major_collection_end(old_gen->used_in_bytes(), gc_cause);
   260     size_policy->major_collection_end(old_gen->used_in_bytes(), gc_cause);
   266 
   261 
   267     if (UseAdaptiveSizePolicy) {
   262     if (UseAdaptiveSizePolicy) {
   268 
   263 
   269       if (PrintAdaptiveSizePolicy) {
   264      log_debug(gc, ergo)("AdaptiveSizeStart: collection: %d ", heap->total_collections());
   270         gclog_or_tty->print("AdaptiveSizeStart: ");
   265      log_trace(gc, ergo)("old_gen_capacity: " SIZE_FORMAT " young_gen_capacity: " SIZE_FORMAT,
   271         gclog_or_tty->stamp();
   266                          old_gen->capacity_in_bytes(), young_gen->capacity_in_bytes());
   272         gclog_or_tty->print_cr(" collection: %d ",
       
   273                        heap->total_collections());
       
   274         if (Verbose) {
       
   275           gclog_or_tty->print("old_gen_capacity: " SIZE_FORMAT
       
   276             " young_gen_capacity: " SIZE_FORMAT,
       
   277             old_gen->capacity_in_bytes(), young_gen->capacity_in_bytes());
       
   278         }
       
   279       }
       
   280 
   267 
   281       // Don't check if the size_policy is ready here.  Let
   268       // Don't check if the size_policy is ready here.  Let
   282       // the size_policy check that internally.
   269       // the size_policy check that internally.
   283       if (UseAdaptiveGenerationSizePolicyAtMajorCollection &&
   270       if (UseAdaptiveGenerationSizePolicyAtMajorCollection &&
   284           AdaptiveSizePolicy::should_update_promo_stats(gc_cause)) {
   271           AdaptiveSizePolicy::should_update_promo_stats(gc_cause)) {
   331         heap->resize_old_gen(size_policy->calculated_old_free_size_in_bytes());
   318         heap->resize_old_gen(size_policy->calculated_old_free_size_in_bytes());
   332 
   319 
   333         heap->resize_young_gen(size_policy->calculated_eden_size_in_bytes(),
   320         heap->resize_young_gen(size_policy->calculated_eden_size_in_bytes(),
   334                                size_policy->calculated_survivor_size_in_bytes());
   321                                size_policy->calculated_survivor_size_in_bytes());
   335       }
   322       }
   336       if (PrintAdaptiveSizePolicy) {
   323       log_debug(gc, ergo)("AdaptiveSizeStop: collection: %d ", heap->total_collections());
   337         gclog_or_tty->print_cr("AdaptiveSizeStop: collection: %d ",
       
   338                        heap->total_collections());
       
   339       }
       
   340     }
   324     }
   341 
   325 
   342     if (UsePerfData) {
   326     if (UsePerfData) {
   343       heap->gc_policy_counters()->update_counters();
   327       heap->gc_policy_counters()->update_counters();
   344       heap->gc_policy_counters()->update_old_capacity(
   328       heap->gc_policy_counters()->update_old_capacity(
   352     // We collected the heap, recalculate the metaspace capacity
   336     // We collected the heap, recalculate the metaspace capacity
   353     MetaspaceGC::compute_new_size();
   337     MetaspaceGC::compute_new_size();
   354 
   338 
   355     if (TraceOldGenTime) accumulated_time()->stop();
   339     if (TraceOldGenTime) accumulated_time()->stop();
   356 
   340 
   357     if (PrintGC) {
   341     young_gen->print_used_change(young_gen_prev_used);
   358       if (PrintGCDetails) {
   342     old_gen->print_used_change(old_gen_prev_used);
   359         // Don't print a GC timestamp here.  This is after the GC so
   343     MetaspaceAux::print_metaspace_change(metadata_prev_used);
   360         // would be confusing.
       
   361         young_gen->print_used_change(young_gen_prev_used);
       
   362         old_gen->print_used_change(old_gen_prev_used);
       
   363       }
       
   364       heap->print_heap_change(prev_used);
       
   365       if (PrintGCDetails) {
       
   366         MetaspaceAux::print_metaspace_change(metadata_prev_used);
       
   367       }
       
   368     }
       
   369 
   344 
   370     // Track memory usage and detect low memory
   345     // Track memory usage and detect low memory
   371     MemoryService::track_memory_usage();
   346     MemoryService::track_memory_usage();
   372     heap->update_counters();
   347     heap->update_counters();
   373   }
   348   }
   374 
   349 
   375   if (VerifyAfterGC && heap->total_collections() >= VerifyGCStartAt) {
   350   if (VerifyAfterGC && heap->total_collections() >= VerifyGCStartAt) {
   376     HandleMark hm;  // Discard invalid handles created during verification
   351     HandleMark hm;  // Discard invalid handles created during verification
   377     Universe::verify(" VerifyAfterGC:");
   352     Universe::verify("After GC");
   378   }
   353   }
   379 
   354 
   380   // Re-verify object start arrays
   355   // Re-verify object start arrays
   381   if (VerifyObjectStartArray &&
   356   if (VerifyObjectStartArray &&
   382       VerifyAfterGC) {
   357       VerifyAfterGC) {
   395   heap->post_full_gc_dump(_gc_timer);
   370   heap->post_full_gc_dump(_gc_timer);
   396 
   371 
   397 #ifdef TRACESPINNING
   372 #ifdef TRACESPINNING
   398   ParallelTaskTerminator::print_termination_counts();
   373   ParallelTaskTerminator::print_termination_counts();
   399 #endif
   374 #endif
       
   375 
       
   376   AdaptiveSizePolicyOutput::print(size_policy, heap->total_collections());
   400 
   377 
   401   _gc_timer->register_gc_end();
   378   _gc_timer->register_gc_end();
   402 
   379 
   403   _gc_tracer->report_gc_end(_gc_timer->gc_end(), _gc_timer->time_partitions());
   380   _gc_tracer->report_gc_end(_gc_timer->gc_end(), _gc_timer->time_partitions());
   404 
   381 
   441   const size_t new_young_size = young_gen->capacity_in_bytes() - absorb_size;
   418   const size_t new_young_size = young_gen->capacity_in_bytes() - absorb_size;
   442   if (new_young_size < young_gen->min_gen_size()) {
   419   if (new_young_size < young_gen->min_gen_size()) {
   443     return false; // Respect young gen minimum size.
   420     return false; // Respect young gen minimum size.
   444   }
   421   }
   445 
   422 
   446   if (TraceAdaptiveGCBoundary && Verbose) {
   423   log_trace(heap, ergo)(" absorbing " SIZE_FORMAT "K:  "
   447     gclog_or_tty->print(" absorbing " SIZE_FORMAT "K:  "
       
   448                         "eden " SIZE_FORMAT "K->" SIZE_FORMAT "K "
   424                         "eden " SIZE_FORMAT "K->" SIZE_FORMAT "K "
   449                         "from " SIZE_FORMAT "K, to " SIZE_FORMAT "K "
   425                         "from " SIZE_FORMAT "K, to " SIZE_FORMAT "K "
   450                         "young_gen " SIZE_FORMAT "K->" SIZE_FORMAT "K ",
   426                         "young_gen " SIZE_FORMAT "K->" SIZE_FORMAT "K ",
   451                         absorb_size / K,
   427                         absorb_size / K,
   452                         eden_capacity / K, (eden_capacity - absorb_size) / K,
   428                         eden_capacity / K, (eden_capacity - absorb_size) / K,
   453                         young_gen->from_space()->used_in_bytes() / K,
   429                         young_gen->from_space()->used_in_bytes() / K,
   454                         young_gen->to_space()->used_in_bytes() / K,
   430                         young_gen->to_space()->used_in_bytes() / K,
   455                         young_gen->capacity_in_bytes() / K, new_young_size / K);
   431                         young_gen->capacity_in_bytes() / K, new_young_size / K);
   456   }
       
   457 
   432 
   458   // Fill the unused part of the old gen.
   433   // Fill the unused part of the old gen.
   459   MutableSpace* const old_space = old_gen->object_space();
   434   MutableSpace* const old_space = old_gen->object_space();
   460   HeapWord* const unused_start = old_space->top();
   435   HeapWord* const unused_start = old_space->top();
   461   size_t const unused_words = pointer_delta(old_space->end(), unused_start);
   436   size_t const unused_words = pointer_delta(old_space->end(), unused_start);
   515   _objarray_stack.clear(true);
   490   _objarray_stack.clear(true);
   516 }
   491 }
   517 
   492 
   518 void PSMarkSweep::mark_sweep_phase1(bool clear_all_softrefs) {
   493 void PSMarkSweep::mark_sweep_phase1(bool clear_all_softrefs) {
   519   // Recursively traverse all live objects and mark them
   494   // Recursively traverse all live objects and mark them
   520   GCTraceTime tm("phase 1", PrintGCDetails && Verbose, true, _gc_timer);
   495   GCTraceTime(Trace, gc) tm("Phase 1: Mark live objects", _gc_timer);
   521 
   496 
   522   ParallelScavengeHeap* heap = ParallelScavengeHeap::heap();
   497   ParallelScavengeHeap* heap = ParallelScavengeHeap::heap();
   523 
   498 
   524   // Need to clear claim bits before the tracing starts.
   499   // Need to clear claim bits before the tracing starts.
   525   ClassLoaderDataGraph::clear_claimed_marks();
   500   ClassLoaderDataGraph::clear_claimed_marks();
   574   _gc_tracer->report_object_count_after_gc(is_alive_closure());
   549   _gc_tracer->report_object_count_after_gc(is_alive_closure());
   575 }
   550 }
   576 
   551 
   577 
   552 
   578 void PSMarkSweep::mark_sweep_phase2() {
   553 void PSMarkSweep::mark_sweep_phase2() {
   579   GCTraceTime tm("phase 2", PrintGCDetails && Verbose, true, _gc_timer);
   554   GCTraceTime(Trace, gc) tm("Phase 2: Compute new object addresses", _gc_timer);
   580 
   555 
   581   // Now all live objects are marked, compute the new object addresses.
   556   // Now all live objects are marked, compute the new object addresses.
   582 
   557 
   583   // It is not required that we traverse spaces in the same order in
   558   // It is not required that we traverse spaces in the same order in
   584   // phase2, phase3 and phase4, but the ValidateMarkSweep live oops
   559   // phase2, phase3 and phase4, but the ValidateMarkSweep live oops
   601 };
   576 };
   602 static PSAlwaysTrueClosure always_true;
   577 static PSAlwaysTrueClosure always_true;
   603 
   578 
   604 void PSMarkSweep::mark_sweep_phase3() {
   579 void PSMarkSweep::mark_sweep_phase3() {
   605   // Adjust the pointers to reflect the new locations
   580   // Adjust the pointers to reflect the new locations
   606   GCTraceTime tm("phase 3", PrintGCDetails && Verbose, true, _gc_timer);
   581   GCTraceTime(Trace, gc) tm("Phase 3: Adjust pointers", _gc_timer);
   607 
   582 
   608   ParallelScavengeHeap* heap = ParallelScavengeHeap::heap();
   583   ParallelScavengeHeap* heap = ParallelScavengeHeap::heap();
   609   PSYoungGen* young_gen = heap->young_gen();
   584   PSYoungGen* young_gen = heap->young_gen();
   610   PSOldGen* old_gen = heap->old_gen();
   585   PSOldGen* old_gen = heap->old_gen();
   611 
   586 
   641   old_gen->adjust_pointers();
   616   old_gen->adjust_pointers();
   642 }
   617 }
   643 
   618 
   644 void PSMarkSweep::mark_sweep_phase4() {
   619 void PSMarkSweep::mark_sweep_phase4() {
   645   EventMark m("4 compact heap");
   620   EventMark m("4 compact heap");
   646   GCTraceTime tm("phase 4", PrintGCDetails && Verbose, true, _gc_timer);
   621   GCTraceTime(Trace, gc) tm("Phase 4: Move objects", _gc_timer);
   647 
   622 
   648   // All pointers are now adjusted, move objects accordingly
   623   // All pointers are now adjusted, move objects accordingly
   649 
   624 
   650   ParallelScavengeHeap* heap = ParallelScavengeHeap::heap();
   625   ParallelScavengeHeap* heap = ParallelScavengeHeap::heap();
   651   PSYoungGen* young_gen = heap->young_gen();
   626   PSYoungGen* young_gen = heap->young_gen();