src/hotspot/share/gc/shenandoah/shenandoahControlThread.cpp
branchdatagramsocketimpl-branch
changeset 58678 9cf78a70fa4f
parent 54623 1126f0607c70
child 58679 9c3209ff7550
equal deleted inserted replaced
58677:13588c901957 58678:9cf78a70fa4f
    66 }
    66 }
    67 
    67 
    68 void ShenandoahControlThread::run_service() {
    68 void ShenandoahControlThread::run_service() {
    69   ShenandoahHeap* heap = ShenandoahHeap::heap();
    69   ShenandoahHeap* heap = ShenandoahHeap::heap();
    70 
    70 
       
    71   GCMode default_mode = heap->is_traversal_mode() ?
       
    72                            concurrent_traversal : concurrent_normal;
       
    73   GCCause::Cause default_cause = heap->is_traversal_mode() ?
       
    74                            GCCause::_shenandoah_traversal_gc : GCCause::_shenandoah_concurrent_gc;
    71   int sleep = ShenandoahControlIntervalMin;
    75   int sleep = ShenandoahControlIntervalMin;
    72 
    76 
    73   double last_shrink_time = os::elapsedTime();
    77   double last_shrink_time = os::elapsedTime();
    74   double last_sleep_adjust_time = os::elapsedTime();
    78   double last_sleep_adjust_time = os::elapsedTime();
    75 
    79 
   121 
   125 
   122       heuristics->record_requested_gc();
   126       heuristics->record_requested_gc();
   123 
   127 
   124       if (ExplicitGCInvokesConcurrent) {
   128       if (ExplicitGCInvokesConcurrent) {
   125         policy->record_explicit_to_concurrent();
   129         policy->record_explicit_to_concurrent();
   126         if (heuristics->can_do_traversal_gc()) {
   130         mode = default_mode;
   127           mode = concurrent_traversal;
       
   128         } else {
       
   129           mode = concurrent_normal;
       
   130         }
       
   131         // Unload and clean up everything
   131         // Unload and clean up everything
   132         heap->set_process_references(heuristics->can_process_references());
   132         heap->set_process_references(heuristics->can_process_references());
   133         heap->set_unload_classes(heuristics->can_unload_classes());
   133         heap->set_unload_classes(heuristics->can_unload_classes());
   134       } else {
   134       } else {
   135         policy->record_explicit_to_full();
   135         policy->record_explicit_to_full();
   141 
   141 
   142       heuristics->record_requested_gc();
   142       heuristics->record_requested_gc();
   143 
   143 
   144       if (ShenandoahImplicitGCInvokesConcurrent) {
   144       if (ShenandoahImplicitGCInvokesConcurrent) {
   145         policy->record_implicit_to_concurrent();
   145         policy->record_implicit_to_concurrent();
   146         if (heuristics->can_do_traversal_gc()) {
   146         mode = default_mode;
   147           mode = concurrent_traversal;
       
   148         } else {
       
   149           mode = concurrent_normal;
       
   150         }
       
   151 
   147 
   152         // Unload and clean up everything
   148         // Unload and clean up everything
   153         heap->set_process_references(heuristics->can_process_references());
   149         heap->set_process_references(heuristics->can_process_references());
   154         heap->set_unload_classes(heuristics->can_unload_classes());
   150         heap->set_unload_classes(heuristics->can_unload_classes());
   155       } else {
   151       } else {
   156         policy->record_implicit_to_full();
   152         policy->record_implicit_to_full();
   157         mode = stw_full;
   153         mode = stw_full;
   158       }
   154       }
   159     } else {
   155     } else {
   160       // Potential normal cycle: ask heuristics if it wants to act
   156       // Potential normal cycle: ask heuristics if it wants to act
   161       if (heuristics->should_start_traversal_gc()) {
   157       if (heuristics->should_start_gc()) {
   162         mode = concurrent_traversal;
   158         mode = default_mode;
   163         cause = GCCause::_shenandoah_traversal_gc;
   159         cause = default_cause;
   164       } else if (heuristics->should_start_normal_gc()) {
       
   165         mode = concurrent_normal;
       
   166         cause = GCCause::_shenandoah_concurrent_gc;
       
   167       }
   160       }
   168 
   161 
   169       // Ask policy if this cycle wants to process references or unload classes
   162       // Ask policy if this cycle wants to process references or unload classes
   170       heap->set_process_references(heuristics->should_process_references());
   163       heap->set_process_references(heuristics->should_process_references());
   171       heap->set_unload_classes(heuristics->should_unload_classes());
   164       heap->set_unload_classes(heuristics->should_unload_classes());
   375   heap->entry_preclean();
   368   heap->entry_preclean();
   376 
   369 
   377   // Complete marking under STW, and start evacuation
   370   // Complete marking under STW, and start evacuation
   378   heap->vmop_entry_final_mark();
   371   heap->vmop_entry_final_mark();
   379 
   372 
       
   373   // Evacuate concurrent roots
       
   374   heap->entry_roots();
       
   375 
   380   // Final mark might have reclaimed some immediate garbage, kick cleanup to reclaim
   376   // Final mark might have reclaimed some immediate garbage, kick cleanup to reclaim
   381   // the space. This would be the last action if there is nothing to evacuate.
   377   // the space. This would be the last action if there is nothing to evacuate.
   382   heap->entry_cleanup();
   378   heap->entry_cleanup();
   383 
   379 
   384   {
   380   {