src/hotspot/share/gc/g1/g1Policy.cpp
changeset 49643 a3453bbd5418
parent 49632 64f9ebc85e67
child 49659 0ed1370f52bb
--- a/src/hotspot/share/gc/g1/g1Policy.cpp	Thu Mar 29 12:56:06 2018 +0200
+++ b/src/hotspot/share/gc/g1/g1Policy.cpp	Thu Mar 29 14:07:59 2018 +0200
@@ -229,7 +229,7 @@
 
   uint young_list_target_length = 0;
   if (adaptive_young_list_length()) {
-    if (collector_state()->gcs_are_young()) {
+    if (collector_state()->in_young_only_phase()) {
       young_list_target_length =
                         calculate_young_list_target_length(rs_lengths,
                                                            base_min_length,
@@ -281,7 +281,7 @@
                                                     uint desired_min_length,
                                                     uint desired_max_length) const {
   assert(adaptive_young_list_length(), "pre-condition");
-  assert(collector_state()->gcs_are_young(), "only call this for young GCs");
+  assert(collector_state()->in_young_only_phase(), "only call this for young GCs");
 
   // In case some edge-condition makes the desired max length too small...
   if (desired_max_length <= desired_min_length) {
@@ -302,7 +302,7 @@
   const double survivor_regions_evac_time = predict_survivor_regions_evac_time();
   const size_t pending_cards = _analytics->predict_pending_cards();
   const size_t adj_rs_lengths = rs_lengths + _analytics->predict_rs_length_diff();
-  const size_t scanned_cards = _analytics->predict_card_num(adj_rs_lengths, /* gcs_are_young */ true);
+  const size_t scanned_cards = _analytics->predict_card_num(adj_rs_lengths, true /* for_young_gc */);
   const double base_time_ms =
     predict_base_elapsed_time_ms(pending_cards, scanned_cards) +
     survivor_regions_evac_time;
@@ -313,7 +313,7 @@
   // Here, we will make sure that the shortest young length that
   // makes sense fits within the target pause time.
 
-  G1YoungLengthPredictor p(collector_state()->during_concurrent_mark(),
+  G1YoungLengthPredictor p(collector_state()->mark_or_rebuild_in_progress(),
                            base_time_ms,
                            base_free_regions,
                            target_pause_time_ms,
@@ -384,7 +384,7 @@
   for (GrowableArrayIterator<HeapRegion*> it = survivor_regions->begin();
        it != survivor_regions->end();
        ++it) {
-    survivor_regions_evac_time += predict_region_elapsed_time_ms(*it, collector_state()->gcs_are_young());
+    survivor_regions_evac_time += predict_region_elapsed_time_ms(*it, collector_state()->in_young_only_phase());
   }
   return survivor_regions_evac_time;
 }
@@ -406,7 +406,7 @@
 }
 
 void G1Policy::update_rs_lengths_prediction(size_t prediction) {
-  if (collector_state()->gcs_are_young() && adaptive_young_list_length()) {
+  if (collector_state()->in_young_only_phase() && adaptive_young_list_length()) {
     _rs_lengths_prediction = prediction;
   }
 }
@@ -414,7 +414,8 @@
 void G1Policy::record_full_collection_start() {
   _full_collection_start_sec = os::elapsedTime();
   // Release the future to-space so that it is available for compaction into.
-  collector_state()->set_full_collection(true);
+  collector_state()->set_in_young_only_phase(false);
+  collector_state()->set_in_full_gc(true);
   cset_chooser()->clear();
 }
 
@@ -427,16 +428,15 @@
 
   _analytics->update_recent_gc_times(end_sec, full_gc_time_ms);
 
-  collector_state()->set_full_collection(false);
+  collector_state()->set_in_full_gc(false);
 
   // "Nuke" the heuristics that control the young/mixed GC
   // transitions and make sure we start with young GCs after the Full GC.
-  collector_state()->set_gcs_are_young(true);
-  collector_state()->set_last_young_gc(false);
+  collector_state()->set_in_young_only_phase(true);
+  collector_state()->set_in_young_gc_before_mixed(false);
   collector_state()->set_initiate_conc_mark_if_possible(need_to_start_conc_mark("end of Full GC", 0));
-  collector_state()->set_during_initial_mark_pause(false);
-  collector_state()->set_in_marking_window(false);
-  collector_state()->set_in_marking_window_im(false);
+  collector_state()->set_in_initial_mark_gc(false);
+  collector_state()->set_mark_or_rebuild_in_progress(false);
 
   _short_lived_surv_rate_group->start_adding_regions();
   // also call this on any additional surv rate groups
@@ -468,8 +468,6 @@
   _collection_set->reset_bytes_used_before();
   _bytes_copied_during_gc = 0;
 
-  collector_state()->set_last_gc_was_young(false);
-
   // do that for any other surv rate groups
   _short_lived_surv_rate_group->stop_adding_regions();
   _survivors_age_table.clear();
@@ -478,14 +476,12 @@
 }
 
 void G1Policy::record_concurrent_mark_init_end(double mark_init_elapsed_time_ms) {
-  collector_state()->set_during_marking(true);
   assert(!collector_state()->initiate_conc_mark_if_possible(), "we should have cleared it by now");
-  collector_state()->set_during_initial_mark_pause(false);
+  collector_state()->set_in_initial_mark_gc(false);
 }
 
 void G1Policy::record_concurrent_mark_remark_start() {
   _mark_remark_start_sec = os::elapsedTime();
-  collector_state()->set_during_marking(false);
 }
 
 void G1Policy::record_concurrent_mark_remark_end() {
@@ -528,7 +524,7 @@
 }
 
 bool G1Policy::about_to_start_mixed_phase() const {
-  return _g1->concurrent_mark()->cm_thread()->during_cycle() || collector_state()->last_young_gc();
+  return _g1->concurrent_mark()->cm_thread()->during_cycle() || collector_state()->in_young_gc_before_mixed();
 }
 
 bool G1Policy::need_to_start_conc_mark(const char* source, size_t alloc_word_size) {
@@ -544,7 +540,7 @@
 
   bool result = false;
   if (marking_request_bytes > marking_initiating_used_threshold) {
-    result = collector_state()->gcs_are_young() && !collector_state()->last_young_gc();
+    result = collector_state()->in_young_only_phase() && !collector_state()->in_young_gc_before_mixed();
     log_debug(gc, ergo, ihop)("%s occupancy: " SIZE_FORMAT "B allocation request: " SIZE_FORMAT "B threshold: " SIZE_FORMAT "B (%1.2f) source: %s",
                               result ? "Request concurrent cycle initiation (occupancy higher than threshold)" : "Do not request concurrent cycle initiation (still doing mixed collections)",
                               cur_used_bytes, alloc_byte_size, marking_initiating_used_threshold, (double) marking_initiating_used_threshold / _g1->capacity() * 100, source);
@@ -561,15 +557,17 @@
 
   size_t cur_used_bytes = _g1->used();
   assert(cur_used_bytes == _g1->recalculate_used(), "It should!");
-  bool last_pause_included_initial_mark = false;
+  bool this_pause_included_initial_mark = false;
+  bool this_pause_was_young_only = collector_state()->in_young_only_phase();
+
   bool update_stats = !_g1->evacuation_failed();
 
   record_pause(young_gc_pause_kind(), end_time_sec - pause_time_ms / 1000.0, end_time_sec);
 
   _collection_pause_end_millis = os::javaTimeNanos() / NANOSECS_PER_MILLISEC;
 
-  last_pause_included_initial_mark = collector_state()->during_initial_mark_pause();
-  if (last_pause_included_initial_mark) {
+  this_pause_included_initial_mark = collector_state()->in_initial_mark_gc();
+  if (this_pause_included_initial_mark) {
     record_concurrent_mark_init_end(0.0);
   } else {
     maybe_start_marking();
@@ -602,28 +600,19 @@
     _analytics->compute_pause_time_ratio(interval_ms, pause_time_ms);
   }
 
-  bool new_in_marking_window = collector_state()->in_marking_window();
-  bool new_in_marking_window_im = false;
-  if (last_pause_included_initial_mark) {
-    new_in_marking_window = true;
-    new_in_marking_window_im = true;
-  }
-
-  if (collector_state()->last_young_gc()) {
-    assert(!last_pause_included_initial_mark, "The last young GC is not allowed to be an initial mark GC");
-    // This has been the "last young GC" before we start doing mixed GCs. We already
+  if (collector_state()->in_young_gc_before_mixed()) {
+    assert(!this_pause_included_initial_mark, "The young GC before mixed is not allowed to be an initial mark GC");
+    // This has been the young GC before we start doing mixed GCs. We already
     // decided to start mixed GCs much earlier, so there is nothing to do except
     // advancing the state.
-    collector_state()->set_gcs_are_young(false);
-    collector_state()->set_last_young_gc(false);
-  }
-
-  if (!collector_state()->last_gc_was_young()) {
+    collector_state()->set_in_young_only_phase(false);
+    collector_state()->set_in_young_gc_before_mixed(false);
+  } else if (!this_pause_was_young_only) {
     // This is a mixed GC. Here we decide whether to continue doing more
     // mixed GCs or not.
     if (!next_gc_should_be_mixed("continue mixed GCs",
                                  "do not continue mixed GCs")) {
-      collector_state()->set_gcs_are_young(true);
+      collector_state()->set_in_young_only_phase(true);
 
       clear_collection_set_candidates();
       maybe_start_marking();
@@ -646,13 +635,13 @@
     double cost_per_entry_ms = 0.0;
     if (cards_scanned > 10) {
       cost_per_entry_ms = average_time_ms(G1GCPhaseTimes::ScanRS) / (double) cards_scanned;
-      _analytics->report_cost_per_entry_ms(cost_per_entry_ms, collector_state()->last_gc_was_young());
+      _analytics->report_cost_per_entry_ms(cost_per_entry_ms, this_pause_was_young_only);
     }
 
     if (_max_rs_lengths > 0) {
       double cards_per_entry_ratio =
         (double) cards_scanned / (double) _max_rs_lengths;
-      _analytics->report_cards_per_entry_ratio(cards_per_entry_ratio, collector_state()->last_gc_was_young());
+      _analytics->report_cards_per_entry_ratio(cards_per_entry_ratio, this_pause_was_young_only);
     }
 
     // This is defensive. For a while _max_rs_lengths could get
@@ -681,7 +670,7 @@
 
     if (copied_bytes > 0) {
       cost_per_byte_ms = average_time_ms(G1GCPhaseTimes::ObjCopy) / (double) copied_bytes;
-      _analytics->report_cost_per_byte_ms(cost_per_byte_ms, collector_state()->in_marking_window());
+      _analytics->report_cost_per_byte_ms(cost_per_byte_ms, collector_state()->mark_or_rebuild_in_progress());
     }
 
     if (_collection_set->young_region_length() > 0) {
@@ -700,8 +689,12 @@
     _analytics->report_rs_lengths((double) _max_rs_lengths);
   }
 
-  collector_state()->set_in_marking_window(new_in_marking_window);
-  collector_state()->set_in_marking_window_im(new_in_marking_window_im);
+  assert(!(this_pause_included_initial_mark && collector_state()->mark_or_rebuild_in_progress()),
+         "If the last pause has been an initial mark, we should not have been in the marking window");
+  if (this_pause_included_initial_mark) {
+    collector_state()->set_mark_or_rebuild_in_progress(true);
+  }
+
   _free_regions_at_end_of_collection = _g1->num_free_regions();
   // IHOP control wants to know the expected young gen length if it were not
   // restrained by the heap reserve. Using the actual length would make the
@@ -712,7 +705,8 @@
 
   update_ihop_prediction(app_time_ms / 1000.0,
                          _bytes_allocated_in_old_since_last_gc,
-                         last_unrestrained_young_length * HeapRegion::GrainBytes);
+                         last_unrestrained_young_length * HeapRegion::GrainBytes,
+                         this_pause_was_young_only);
   _bytes_allocated_in_old_since_last_gc = 0;
 
   _ihop_control->send_trace_event(_g1->gc_tracer_stw());
@@ -730,8 +724,8 @@
     update_rs_time_goal_ms -= scan_hcc_time_ms;
   }
   _g1->concurrent_refine()->adjust(average_time_ms(G1GCPhaseTimes::UpdateRS) - scan_hcc_time_ms,
-                                      phase_times()->sum_thread_work_items(G1GCPhaseTimes::UpdateRS),
-                                      update_rs_time_goal_ms);
+                                   phase_times()->sum_thread_work_items(G1GCPhaseTimes::UpdateRS),
+                                   update_rs_time_goal_ms);
 
   cset_chooser()->verify();
 }
@@ -749,7 +743,8 @@
 
 void G1Policy::update_ihop_prediction(double mutator_time_s,
                                       size_t mutator_alloc_bytes,
-                                      size_t young_gen_size) {
+                                      size_t young_gen_size,
+                                      bool this_gc_was_young_only) {
   // Always try to update IHOP prediction. Even evacuation failures give information
   // about e.g. whether to start IHOP earlier next time.
 
@@ -760,7 +755,7 @@
   bool report = false;
 
   double marking_to_mixed_time = -1.0;
-  if (!collector_state()->last_gc_was_young() && _initial_mark_to_mixed.has_result()) {
+  if (!this_gc_was_young_only && _initial_mark_to_mixed.has_result()) {
     marking_to_mixed_time = _initial_mark_to_mixed.last_marking_time();
     assert(marking_to_mixed_time > 0.0,
            "Initial mark to mixed time must be larger than zero but is %.3f",
@@ -775,7 +770,7 @@
   // all of them. In many applications there are only a few if any young gcs during
   // marking, which makes any prediction useless. This increases the accuracy of the
   // prediction.
-  if (collector_state()->last_gc_was_young() && mutator_time_s > min_valid_time) {
+  if (this_gc_was_young_only && mutator_time_s > min_valid_time) {
     _ihop_control->update_allocation_info(mutator_time_s, mutator_alloc_bytes, young_gen_size);
     report = true;
   }
@@ -811,13 +806,13 @@
                                               size_t scanned_cards) const {
   return
     _analytics->predict_rs_update_time_ms(pending_cards) +
-    _analytics->predict_rs_scan_time_ms(scanned_cards, collector_state()->gcs_are_young()) +
+    _analytics->predict_rs_scan_time_ms(scanned_cards, collector_state()->in_young_only_phase()) +
     _analytics->predict_constant_other_time_ms();
 }
 
 double G1Policy::predict_base_elapsed_time_ms(size_t pending_cards) const {
   size_t rs_length = _analytics->predict_rs_lengths() + _analytics->predict_rs_length_diff();
-  size_t card_num = _analytics->predict_card_num(rs_length, collector_state()->gcs_are_young());
+  size_t card_num = _analytics->predict_card_num(rs_length, collector_state()->in_young_only_phase());
   return predict_base_elapsed_time_ms(pending_cards, card_num);
 }
 
@@ -843,8 +838,8 @@
   size_t bytes_to_copy = predict_bytes_to_copy(hr);
 
   double region_elapsed_time_ms =
-    _analytics->predict_rs_scan_time_ms(card_num, collector_state()->gcs_are_young()) +
-    _analytics->predict_object_copy_time_ms(bytes_to_copy, collector_state()->during_concurrent_mark());
+    _analytics->predict_rs_scan_time_ms(card_num, collector_state()->in_young_only_phase()) +
+    _analytics->predict_object_copy_time_ms(bytes_to_copy, collector_state()->mark_or_rebuild_in_progress());
 
   // The prediction of the "other" time for this region is based
   // upon the region type and NOT the GC type.
@@ -927,7 +922,7 @@
 }
 
 void G1Policy::initiate_conc_mark() {
-  collector_state()->set_during_initial_mark_pause(true);
+  collector_state()->set_in_initial_mark_gc(true);
   collector_state()->set_initiate_conc_mark_if_possible(false);
 }
 
@@ -935,26 +930,26 @@
   // We are about to decide on whether this pause will be an
   // initial-mark pause.
 
-  // First, collector_state()->during_initial_mark_pause() should not be already set. We
+  // First, collector_state()->in_initial_mark_gc() should not be already set. We
   // will set it here if we have to. However, it should be cleared by
   // the end of the pause (it's only set for the duration of an
   // initial-mark pause).
-  assert(!collector_state()->during_initial_mark_pause(), "pre-condition");
+  assert(!collector_state()->in_initial_mark_gc(), "pre-condition");
 
   if (collector_state()->initiate_conc_mark_if_possible()) {
     // We had noticed on a previous pause that the heap occupancy has
     // gone over the initiating threshold and we should start a
     // concurrent marking cycle. So we might initiate one.
 
-    if (!about_to_start_mixed_phase() && collector_state()->gcs_are_young()) {
+    if (!about_to_start_mixed_phase() && collector_state()->in_young_only_phase()) {
       // Initiate a new initial mark if there is no marking or reclamation going on.
       initiate_conc_mark();
       log_debug(gc, ergo)("Initiate concurrent cycle (concurrent cycle initiation requested)");
     } else if (_g1->is_user_requested_concurrent_full_gc(_g1->gc_cause())) {
       // Initiate a user requested initial mark. An initial mark must be young only
       // GC, so the collector state must be updated to reflect this.
-      collector_state()->set_gcs_are_young(true);
-      collector_state()->set_last_young_gc(false);
+      collector_state()->set_in_young_only_phase(true);
+      collector_state()->set_in_young_gc_before_mixed(false);
 
       // We might have ended up coming here about to start a mixed phase with a collection set
       // active. The following remark might change the change the "evacuation efficiency" of
@@ -990,8 +985,8 @@
     clear_collection_set_candidates();
     abort_time_to_mixed_tracking();
   }
-  collector_state()->set_last_young_gc(mixed_gc_pending);
-  collector_state()->set_in_marking_window(false);
+  collector_state()->set_in_young_gc_before_mixed(mixed_gc_pending);
+  collector_state()->set_mark_or_rebuild_in_progress(false);
 
   double end_sec = os::elapsedTime();
   double elapsed_time_ms = (end_sec - _mark_cleanup_start_sec) * 1000.0;
@@ -1030,23 +1025,20 @@
 }
 
 G1Policy::PauseKind G1Policy::young_gc_pause_kind() const {
-  assert(!collector_state()->full_collection(), "must be");
-  if (collector_state()->during_initial_mark_pause()) {
-    assert(collector_state()->last_gc_was_young(), "must be");
-    assert(!collector_state()->last_young_gc(), "must be");
+  assert(!collector_state()->in_full_gc(), "must be");
+  if (collector_state()->in_initial_mark_gc()) {
+    assert(!collector_state()->in_young_gc_before_mixed(), "must be");
     return InitialMarkGC;
-  } else if (collector_state()->last_young_gc()) {
-    assert(!collector_state()->during_initial_mark_pause(), "must be");
-    assert(collector_state()->last_gc_was_young(), "must be");
+  } else if (collector_state()->in_young_gc_before_mixed()) {
+    assert(!collector_state()->in_initial_mark_gc(), "must be");
     return LastYoungGC;
-  } else if (!collector_state()->last_gc_was_young()) {
-    assert(!collector_state()->during_initial_mark_pause(), "must be");
-    assert(!collector_state()->last_young_gc(), "must be");
+  } else if (collector_state()->in_mixed_phase()) {
+    assert(!collector_state()->in_initial_mark_gc(), "must be");
+    assert(!collector_state()->in_young_gc_before_mixed(), "must be");
     return MixedGC;
   } else {
-    assert(collector_state()->last_gc_was_young(), "must be");
-    assert(!collector_state()->during_initial_mark_pause(), "must be");
-    assert(!collector_state()->last_young_gc(), "must be");
+    assert(!collector_state()->in_initial_mark_gc(), "must be");
+    assert(!collector_state()->in_young_gc_before_mixed(), "must be");
     return YoungOnlyGC;
   }
 }