src/hotspot/share/gc/shenandoah/shenandoahHeap.cpp
branchdatagramsocketimpl-branch
changeset 58678 9cf78a70fa4f
parent 55076 785a12e0f89b
child 58679 9c3209ff7550
equal deleted inserted replaced
58677:13588c901957 58678:9cf78a70fa4f
    26 #include "memory/universe.hpp"
    26 #include "memory/universe.hpp"
    27 
    27 
    28 #include "gc/shared/gcArguments.hpp"
    28 #include "gc/shared/gcArguments.hpp"
    29 #include "gc/shared/gcTimer.hpp"
    29 #include "gc/shared/gcTimer.hpp"
    30 #include "gc/shared/gcTraceTime.inline.hpp"
    30 #include "gc/shared/gcTraceTime.inline.hpp"
       
    31 #include "gc/shared/locationPrinter.inline.hpp"
    31 #include "gc/shared/memAllocator.hpp"
    32 #include "gc/shared/memAllocator.hpp"
    32 #include "gc/shared/parallelCleaning.hpp"
       
    33 #include "gc/shared/plab.hpp"
    33 #include "gc/shared/plab.hpp"
    34 
    34 
    35 #include "gc/shenandoah/shenandoahAllocTracker.hpp"
    35 #include "gc/shenandoah/shenandoahAllocTracker.hpp"
    36 #include "gc/shenandoah/shenandoahBarrierSet.hpp"
    36 #include "gc/shenandoah/shenandoahBarrierSet.hpp"
    37 #include "gc/shenandoah/shenandoahClosures.inline.hpp"
    37 #include "gc/shenandoah/shenandoahClosures.inline.hpp"
    38 #include "gc/shenandoah/shenandoahCollectionSet.hpp"
    38 #include "gc/shenandoah/shenandoahCollectionSet.hpp"
    39 #include "gc/shenandoah/shenandoahCollectorPolicy.hpp"
    39 #include "gc/shenandoah/shenandoahCollectorPolicy.hpp"
    40 #include "gc/shenandoah/shenandoahConcurrentMark.inline.hpp"
    40 #include "gc/shenandoah/shenandoahConcurrentMark.inline.hpp"
       
    41 #include "gc/shenandoah/shenandoahConcurrentRoots.hpp"
    41 #include "gc/shenandoah/shenandoahControlThread.hpp"
    42 #include "gc/shenandoah/shenandoahControlThread.hpp"
    42 #include "gc/shenandoah/shenandoahFreeSet.hpp"
    43 #include "gc/shenandoah/shenandoahFreeSet.hpp"
    43 #include "gc/shenandoah/shenandoahPhaseTimings.hpp"
    44 #include "gc/shenandoah/shenandoahPhaseTimings.hpp"
    44 #include "gc/shenandoah/shenandoahHeap.inline.hpp"
    45 #include "gc/shenandoah/shenandoahHeap.inline.hpp"
    45 #include "gc/shenandoah/shenandoahHeapRegion.hpp"
    46 #include "gc/shenandoah/shenandoahHeapRegion.hpp"
    47 #include "gc/shenandoah/shenandoahMarkCompact.hpp"
    48 #include "gc/shenandoah/shenandoahMarkCompact.hpp"
    48 #include "gc/shenandoah/shenandoahMarkingContext.inline.hpp"
    49 #include "gc/shenandoah/shenandoahMarkingContext.inline.hpp"
    49 #include "gc/shenandoah/shenandoahMemoryPool.hpp"
    50 #include "gc/shenandoah/shenandoahMemoryPool.hpp"
    50 #include "gc/shenandoah/shenandoahMetrics.hpp"
    51 #include "gc/shenandoah/shenandoahMetrics.hpp"
    51 #include "gc/shenandoah/shenandoahMonitoringSupport.hpp"
    52 #include "gc/shenandoah/shenandoahMonitoringSupport.hpp"
       
    53 #include "gc/shenandoah/shenandoahNormalMode.hpp"
    52 #include "gc/shenandoah/shenandoahOopClosures.inline.hpp"
    54 #include "gc/shenandoah/shenandoahOopClosures.inline.hpp"
    53 #include "gc/shenandoah/shenandoahPacer.inline.hpp"
    55 #include "gc/shenandoah/shenandoahPacer.inline.hpp"
       
    56 #include "gc/shenandoah/shenandoahParallelCleaning.inline.hpp"
       
    57 #include "gc/shenandoah/shenandoahPassiveMode.hpp"
    54 #include "gc/shenandoah/shenandoahRootProcessor.inline.hpp"
    58 #include "gc/shenandoah/shenandoahRootProcessor.inline.hpp"
    55 #include "gc/shenandoah/shenandoahStringDedup.hpp"
    59 #include "gc/shenandoah/shenandoahStringDedup.hpp"
    56 #include "gc/shenandoah/shenandoahTaskqueue.hpp"
    60 #include "gc/shenandoah/shenandoahTaskqueue.hpp"
       
    61 #include "gc/shenandoah/shenandoahTraversalMode.hpp"
    57 #include "gc/shenandoah/shenandoahUtils.hpp"
    62 #include "gc/shenandoah/shenandoahUtils.hpp"
    58 #include "gc/shenandoah/shenandoahVerifier.hpp"
    63 #include "gc/shenandoah/shenandoahVerifier.hpp"
    59 #include "gc/shenandoah/shenandoahCodeRoots.hpp"
    64 #include "gc/shenandoah/shenandoahCodeRoots.hpp"
    60 #include "gc/shenandoah/shenandoahVMOperations.hpp"
    65 #include "gc/shenandoah/shenandoahVMOperations.hpp"
    61 #include "gc/shenandoah/shenandoahWorkGroup.hpp"
    66 #include "gc/shenandoah/shenandoahWorkGroup.hpp"
    62 #include "gc/shenandoah/shenandoahWorkerPolicy.hpp"
    67 #include "gc/shenandoah/shenandoahWorkerPolicy.hpp"
    63 #include "gc/shenandoah/heuristics/shenandoahAdaptiveHeuristics.hpp"
       
    64 #include "gc/shenandoah/heuristics/shenandoahAggressiveHeuristics.hpp"
       
    65 #include "gc/shenandoah/heuristics/shenandoahCompactHeuristics.hpp"
       
    66 #include "gc/shenandoah/heuristics/shenandoahPassiveHeuristics.hpp"
       
    67 #include "gc/shenandoah/heuristics/shenandoahStaticHeuristics.hpp"
       
    68 #include "gc/shenandoah/heuristics/shenandoahTraversalHeuristics.hpp"
       
    69 #if INCLUDE_JFR
    68 #if INCLUDE_JFR
    70 #include "gc/shenandoah/shenandoahJfrSupport.hpp"
    69 #include "gc/shenandoah/shenandoahJfrSupport.hpp"
    71 #endif
    70 #endif
    72 
    71 
    73 #include "memory/metaspace.hpp"
    72 #include "memory/metaspace.hpp"
   178 
   177 
   179   //
   178   //
   180   // Reserve and commit memory for heap
   179   // Reserve and commit memory for heap
   181   //
   180   //
   182 
   181 
   183   ReservedSpace heap_rs = Universe::reserve_heap(max_byte_size, heap_alignment);
   182   ReservedHeapSpace heap_rs = Universe::reserve_heap(max_byte_size, heap_alignment);
   184   initialize_reserved_region((HeapWord*)heap_rs.base(), (HeapWord*) (heap_rs.base() + heap_rs.size()));
   183   initialize_reserved_region(heap_rs);
   185   _heap_region = MemRegion((HeapWord*)heap_rs.base(), heap_rs.size() / HeapWordSize);
   184   _heap_region = MemRegion((HeapWord*)heap_rs.base(), heap_rs.size() / HeapWordSize);
   186   _heap_region_special = heap_rs.special();
   185   _heap_region_special = heap_rs.special();
   187 
   186 
   188   assert((((size_t) base()) & ShenandoahHeapRegion::region_size_bytes_mask()) == 0,
   187   assert((((size_t) base()) & ShenandoahHeapRegion::region_size_bytes_mask()) == 0,
   189          "Misaligned heap: " PTR_FORMAT, p2i(base()));
   188          "Misaligned heap: " PTR_FORMAT, p2i(base()));
   342   for (uint worker = 0; worker < _max_workers; worker++) {
   341   for (uint worker = 0; worker < _max_workers; worker++) {
   343     _liveness_cache[worker] = NEW_C_HEAP_ARRAY(jushort, _num_regions, mtGC);
   342     _liveness_cache[worker] = NEW_C_HEAP_ARRAY(jushort, _num_regions, mtGC);
   344     Copy::fill_to_bytes(_liveness_cache[worker], _num_regions * sizeof(jushort));
   343     Copy::fill_to_bytes(_liveness_cache[worker], _num_regions * sizeof(jushort));
   345   }
   344   }
   346 
   345 
   347   // The call below uses stuff (the SATB* things) that are in G1, but probably
   346   // There should probably be Shenandoah-specific options for these,
   348   // belong into a shared location.
   347   // just as there are G1-specific options.
   349   ShenandoahBarrierSet::satb_mark_queue_set().initialize(this,
   348   {
   350                                                          SATB_Q_CBL_mon,
   349     ShenandoahSATBMarkQueueSet& satbqs = ShenandoahBarrierSet::satb_mark_queue_set();
   351                                                          20 /* G1SATBProcessCompletedThreshold */,
   350     satbqs.set_process_completed_buffers_threshold(20); // G1SATBProcessCompletedThreshold
   352                                                          60 /* G1SATBBufferEnqueueingThresholdPercent */);
   351     satbqs.set_buffer_enqueue_threshold_percentage(60); // G1SATBBufferEnqueueingThresholdPercent
       
   352   }
   353 
   353 
   354   _monitoring_support = new ShenandoahMonitoringSupport(this);
   354   _monitoring_support = new ShenandoahMonitoringSupport(this);
   355   _phase_timings = new ShenandoahPhaseTimings();
   355   _phase_timings = new ShenandoahPhaseTimings();
   356   ShenandoahStringDedup::initialize();
   356   ShenandoahStringDedup::initialize();
   357   ShenandoahCodeRoots::initialize();
   357   ShenandoahCodeRoots::initialize();
   365     _pacer->setup_for_idle();
   365     _pacer->setup_for_idle();
   366   } else {
   366   } else {
   367     _pacer = NULL;
   367     _pacer = NULL;
   368   }
   368   }
   369 
   369 
   370   _traversal_gc = heuristics()->can_do_traversal_gc() ?
   370   _traversal_gc = strcmp(ShenandoahGCMode, "traversal") == 0 ?
   371                   new ShenandoahTraversalGC(this, _num_regions) :
   371                   new ShenandoahTraversalGC(this, _num_regions) :
   372                   NULL;
   372                   NULL;
   373 
   373 
   374   _control_thread = new ShenandoahControlThread();
   374   _control_thread = new ShenandoahControlThread();
   375 
   375 
   385 
   385 
   386   return JNI_OK;
   386   return JNI_OK;
   387 }
   387 }
   388 
   388 
   389 void ShenandoahHeap::initialize_heuristics() {
   389 void ShenandoahHeap::initialize_heuristics() {
   390   if (ShenandoahGCHeuristics != NULL) {
   390   if (ShenandoahGCMode != NULL) {
   391     if (strcmp(ShenandoahGCHeuristics, "aggressive") == 0) {
   391     if (strcmp(ShenandoahGCMode, "traversal") == 0) {
   392       _heuristics = new ShenandoahAggressiveHeuristics();
   392       _gc_mode = new ShenandoahTraversalMode();
   393     } else if (strcmp(ShenandoahGCHeuristics, "static") == 0) {
   393     } else if (strcmp(ShenandoahGCMode, "normal") == 0) {
   394       _heuristics = new ShenandoahStaticHeuristics();
   394       _gc_mode = new ShenandoahNormalMode();
   395     } else if (strcmp(ShenandoahGCHeuristics, "adaptive") == 0) {
   395     } else if (strcmp(ShenandoahGCMode, "passive") == 0) {
   396       _heuristics = new ShenandoahAdaptiveHeuristics();
   396       _gc_mode = new ShenandoahPassiveMode();
   397     } else if (strcmp(ShenandoahGCHeuristics, "passive") == 0) {
       
   398       _heuristics = new ShenandoahPassiveHeuristics();
       
   399     } else if (strcmp(ShenandoahGCHeuristics, "compact") == 0) {
       
   400       _heuristics = new ShenandoahCompactHeuristics();
       
   401     } else if (strcmp(ShenandoahGCHeuristics, "traversal") == 0) {
       
   402       _heuristics = new ShenandoahTraversalHeuristics();
       
   403     } else {
   397     } else {
   404       vm_exit_during_initialization("Unknown -XX:ShenandoahGCHeuristics option");
   398       vm_exit_during_initialization("Unknown -XX:ShenandoahGCMode option");
   405     }
   399     }
   406 
       
   407     if (_heuristics->is_diagnostic() && !UnlockDiagnosticVMOptions) {
       
   408       vm_exit_during_initialization(
       
   409               err_msg("Heuristics \"%s\" is diagnostic, and must be enabled via -XX:+UnlockDiagnosticVMOptions.",
       
   410                       _heuristics->name()));
       
   411     }
       
   412     if (_heuristics->is_experimental() && !UnlockExperimentalVMOptions) {
       
   413       vm_exit_during_initialization(
       
   414               err_msg("Heuristics \"%s\" is experimental, and must be enabled via -XX:+UnlockExperimentalVMOptions.",
       
   415                       _heuristics->name()));
       
   416     }
       
   417     log_info(gc, init)("Shenandoah heuristics: %s",
       
   418                        _heuristics->name());
       
   419   } else {
   400   } else {
   420       ShouldNotReachHere();
   401     ShouldNotReachHere();
   421   }
   402   }
   422 
   403   _gc_mode->initialize_flags();
       
   404   _heuristics = _gc_mode->initialize_heuristics();
       
   405 
       
   406   if (_heuristics->is_diagnostic() && !UnlockDiagnosticVMOptions) {
       
   407     vm_exit_during_initialization(
       
   408             err_msg("Heuristics \"%s\" is diagnostic, and must be enabled via -XX:+UnlockDiagnosticVMOptions.",
       
   409                     _heuristics->name()));
       
   410   }
       
   411   if (_heuristics->is_experimental() && !UnlockExperimentalVMOptions) {
       
   412     vm_exit_during_initialization(
       
   413             err_msg("Heuristics \"%s\" is experimental, and must be enabled via -XX:+UnlockExperimentalVMOptions.",
       
   414                     _heuristics->name()));
       
   415   }
       
   416   log_info(gc, init)("Shenandoah heuristics: %s",
       
   417                      _heuristics->name());
   423 }
   418 }
   424 
   419 
   425 #ifdef _MSC_VER
   420 #ifdef _MSC_VER
   426 #pragma warning( push )
   421 #pragma warning( push )
   427 #pragma warning( disable:4355 ) // 'this' : used in base member initializer list
   422 #pragma warning( disable:4355 ) // 'this' : used in base member initializer list
   525   _workers->run_task(&task);
   520   _workers->run_task(&task);
   526 }
   521 }
   527 
   522 
   528 void ShenandoahHeap::print_on(outputStream* st) const {
   523 void ShenandoahHeap::print_on(outputStream* st) const {
   529   st->print_cr("Shenandoah Heap");
   524   st->print_cr("Shenandoah Heap");
   530   st->print_cr(" " SIZE_FORMAT "K total, " SIZE_FORMAT "K committed, " SIZE_FORMAT "K used",
   525   st->print_cr(" " SIZE_FORMAT "%s total, " SIZE_FORMAT "%s committed, " SIZE_FORMAT "%s used",
   531                max_capacity() / K, committed() / K, used() / K);
   526                byte_size_in_proper_unit(max_capacity()), proper_unit_for_byte_size(max_capacity()),
   532   st->print_cr(" " SIZE_FORMAT " x " SIZE_FORMAT"K regions",
   527                byte_size_in_proper_unit(committed()),    proper_unit_for_byte_size(committed()),
   533                num_regions(), ShenandoahHeapRegion::region_size_bytes() / K);
   528                byte_size_in_proper_unit(used()),         proper_unit_for_byte_size(used()));
       
   529   st->print_cr(" " SIZE_FORMAT " x " SIZE_FORMAT"%s regions",
       
   530                num_regions(),
       
   531                byte_size_in_proper_unit(ShenandoahHeapRegion::region_size_bytes()),
       
   532                proper_unit_for_byte_size(ShenandoahHeapRegion::region_size_bytes()));
   534 
   533 
   535   st->print("Status: ");
   534   st->print("Status: ");
   536   if (has_forwarded_objects())               st->print("has forwarded objects, ");
   535   if (has_forwarded_objects())               st->print("has forwarded objects, ");
   537   if (is_concurrent_mark_in_progress())      st->print("marking, ");
   536   if (is_concurrent_mark_in_progress())      st->print("marking, ");
   538   if (is_evacuation_in_progress())           st->print("evacuating, ");
   537   if (is_evacuation_in_progress())           st->print("evacuating, ");
   962 private:
   961 private:
   963   void do_work() {
   962   void do_work() {
   964     ShenandoahConcurrentEvacuateRegionObjectClosure cl(_sh);
   963     ShenandoahConcurrentEvacuateRegionObjectClosure cl(_sh);
   965     ShenandoahHeapRegion* r;
   964     ShenandoahHeapRegion* r;
   966     while ((r =_cs->claim_next()) != NULL) {
   965     while ((r =_cs->claim_next()) != NULL) {
   967       assert(r->has_live(), "all-garbage regions are reclaimed early");
   966       assert(r->has_live(), "Region " SIZE_FORMAT " should have been reclaimed early", r->region_number());
   968       _sh->marked_object_iterate(r, &cl);
   967       _sh->marked_object_iterate(r, &cl);
   969 
   968 
   970       if (ShenandoahPacing) {
   969       if (ShenandoahPacing) {
   971         _sh->pacer()->report_evac(r->used() >> LogHeapWordSize);
   970         _sh->pacer()->report_evac(r->used() >> LogHeapWordSize);
   972       }
   971       }
  1065     _rp->roots_do(worker_id, &cl);
  1064     _rp->roots_do(worker_id, &cl);
  1066   }
  1065   }
  1067 };
  1066 };
  1068 
  1067 
  1069 void ShenandoahHeap::evacuate_and_update_roots() {
  1068 void ShenandoahHeap::evacuate_and_update_roots() {
  1070 #if defined(COMPILER2) || INCLUDE_JVMCI
  1069 #if COMPILER2_OR_JVMCI
  1071   DerivedPointerTable::clear();
  1070   DerivedPointerTable::clear();
  1072 #endif
  1071 #endif
  1073   assert(ShenandoahSafepoint::is_at_shenandoah_safepoint(), "Only iterate roots while world is stopped");
  1072   assert(ShenandoahSafepoint::is_at_shenandoah_safepoint(), "Only iterate roots while world is stopped");
  1074 
       
  1075   {
  1073   {
  1076     ShenandoahRootEvacuator rp(workers()->active_workers(), ShenandoahPhaseTimings::init_evac);
  1074     // Include concurrent roots if current cycle can not process those roots concurrently
       
  1075     ShenandoahRootEvacuator rp(workers()->active_workers(),
       
  1076                                ShenandoahPhaseTimings::init_evac,
       
  1077                                !ShenandoahConcurrentRoots::should_do_concurrent_roots());
  1077     ShenandoahEvacuateUpdateRootsTask roots_task(&rp);
  1078     ShenandoahEvacuateUpdateRootsTask roots_task(&rp);
  1078     workers()->run_task(&roots_task);
  1079     workers()->run_task(&roots_task);
  1079   }
  1080   }
  1080 
  1081 
  1081 #if defined(COMPILER2) || INCLUDE_JVMCI
  1082 #if COMPILER2_OR_JVMCI
  1082   DerivedPointerTable::update_pointers();
  1083   DerivedPointerTable::update_pointers();
  1083 #endif
  1084 #endif
  1084 }
  1085 }
  1085 
  1086 
  1086 // Returns size in bytes
  1087 // Returns size in bytes
  1137 bool ShenandoahHeap::block_is_obj(const HeapWord* addr) const {
  1138 bool ShenandoahHeap::block_is_obj(const HeapWord* addr) const {
  1138   Space* sp = heap_region_containing(addr);
  1139   Space* sp = heap_region_containing(addr);
  1139   return sp->block_is_obj(addr);
  1140   return sp->block_is_obj(addr);
  1140 }
  1141 }
  1141 
  1142 
       
  1143 bool ShenandoahHeap::print_location(outputStream* st, void* addr) const {
       
  1144   return BlockLocationPrinter<ShenandoahHeap>::print_location(st, addr);
       
  1145 }
       
  1146 
  1142 jlong ShenandoahHeap::millis_since_last_gc() {
  1147 jlong ShenandoahHeap::millis_since_last_gc() {
  1143   double v = heuristics()->time_since_last_gc() * 1000;
  1148   double v = heuristics()->time_since_last_gc() * 1000;
  1144   assert(0 <= v && v <= max_jlong, "value should fit: %f", v);
  1149   assert(0 <= v && v <= max_jlong, "value should fit: %f", v);
  1145   return (jlong)v;
  1150   return (jlong)v;
  1146 }
  1151 }
  1222   template <class T>
  1227   template <class T>
  1223   void do_oop_work(T* p) {
  1228   void do_oop_work(T* p) {
  1224     T o = RawAccess<>::oop_load(p);
  1229     T o = RawAccess<>::oop_load(p);
  1225     if (!CompressedOops::is_null(o)) {
  1230     if (!CompressedOops::is_null(o)) {
  1226       oop obj = CompressedOops::decode_not_null(o);
  1231       oop obj = CompressedOops::decode_not_null(o);
  1227       obj = ShenandoahBarrierSet::resolve_forwarded_not_null(obj);
  1232       oop fwd = (oop) ShenandoahForwarding::get_forwardee_raw_unchecked(obj);
       
  1233       if (fwd == NULL) {
       
  1234         // There is an odd interaction with VM_HeapWalkOperation, see jvmtiTagMap.cpp.
       
  1235         //
       
  1236         // That operation walks the reachable objects on its own, storing the marking
       
  1237         // wavefront in the object marks. When it is done, it calls the CollectedHeap
       
  1238         // to iterate over all objects to clean up the mess. When it reaches here,
       
  1239         // the Shenandoah fwdptr resolution code encounters the marked objects with
       
  1240         // NULL forwardee. Trying to act on that would crash the VM. Or fail the
       
  1241         // asserts, should we go for resolve_forwarded_pointer(obj).
       
  1242         //
       
  1243         // Therefore, we have to dodge it by doing the raw access to forwardee, and
       
  1244         // assuming the object had no forwardee, if that thing is NULL.
       
  1245       } else {
       
  1246         obj = fwd;
       
  1247       }
  1228       assert(oopDesc::is_oop(obj), "must be a valid oop");
  1248       assert(oopDesc::is_oop(obj), "must be a valid oop");
  1229       if (!_bitmap->is_marked((HeapWord*) obj)) {
  1249       if (!_bitmap->is_marked((HeapWord*) obj)) {
  1230         _bitmap->mark((HeapWord*) obj);
  1250         _bitmap->mark((HeapWord*) obj);
  1231         _oop_stack->push(obj);
  1251         _oop_stack->push(obj);
  1232       }
  1252       }
  1274   // Reset bitmap
  1294   // Reset bitmap
  1275   _aux_bit_map.clear();
  1295   _aux_bit_map.clear();
  1276 
  1296 
  1277   Stack<oop,mtGC> oop_stack;
  1297   Stack<oop,mtGC> oop_stack;
  1278 
  1298 
  1279   // First, we process all GC roots. This populates the work stack with initial objects.
  1299   // First, we process GC roots according to current GC cycle. This populates the work stack with initial objects.
  1280   ShenandoahAllRootScanner rp(1, ShenandoahPhaseTimings::_num_phases);
  1300   ShenandoahHeapIterationRootScanner rp;
  1281   ObjectIterateScanRootClosure oops(&_aux_bit_map, &oop_stack);
  1301   ObjectIterateScanRootClosure oops(&_aux_bit_map, &oop_stack);
  1282   rp.roots_do(0, &oops);
  1302 
       
  1303   // If we are unloading classes right now, we should not touch weak roots,
       
  1304   // on the off-chance we would evacuate them and make them live accidentally.
       
  1305   // In other cases, we have to scan all roots.
       
  1306   if (is_evacuation_in_progress() && unload_classes()) {
       
  1307     rp.strong_roots_do(&oops);
       
  1308   } else {
       
  1309     rp.roots_do(&oops);
       
  1310   }
  1283 
  1311 
  1284   // Work through the oop stack to traverse heap.
  1312   // Work through the oop stack to traverse heap.
  1285   while (! oop_stack.is_empty()) {
  1313   while (! oop_stack.is_empty()) {
  1286     oop obj = oop_stack.pop();
  1314     oop obj = oop_stack.pop();
  1287     assert(oopDesc::is_oop(obj), "must be a valid oop");
  1315     assert(oopDesc::is_oop(obj), "must be a valid oop");
  1463       verifier()->verify_roots_no_forwarded();
  1491       verifier()->verify_roots_no_forwarded();
  1464     }
  1492     }
  1465 
  1493 
  1466     stop_concurrent_marking();
  1494     stop_concurrent_marking();
  1467 
  1495 
       
  1496     // All allocations past TAMS are implicitly live, adjust the region data.
       
  1497     // Bitmaps/TAMS are swapped at this point, so we need to poll complete bitmap.
  1468     {
  1498     {
  1469       ShenandoahGCPhase phase(ShenandoahPhaseTimings::complete_liveness);
  1499       ShenandoahGCPhase phase(ShenandoahPhaseTimings::complete_liveness);
  1470 
       
  1471       // All allocations past TAMS are implicitly live, adjust the region data.
       
  1472       // Bitmaps/TAMS are swapped at this point, so we need to poll complete bitmap.
       
  1473       ShenandoahCompleteLivenessClosure cl;
  1500       ShenandoahCompleteLivenessClosure cl;
  1474       parallel_heap_region_iterate(&cl);
  1501       parallel_heap_region_iterate(&cl);
  1475     }
  1502     }
  1476 
  1503 
       
  1504     // Force the threads to reacquire their TLABs outside the collection set.
  1477     {
  1505     {
  1478       ShenandoahGCPhase prepare_evac(ShenandoahPhaseTimings::prepare_evac);
  1506       ShenandoahGCPhase phase(ShenandoahPhaseTimings::retire_tlabs);
  1479 
       
  1480       make_parsable(true);
  1507       make_parsable(true);
  1481 
  1508     }
       
  1509 
       
  1510     // Trash the collection set left over from previous cycle, if any.
       
  1511     {
       
  1512       ShenandoahGCPhase phase(ShenandoahPhaseTimings::trash_cset);
  1482       trash_cset_regions();
  1513       trash_cset_regions();
  1483 
  1514     }
  1484       {
  1515 
  1485         ShenandoahHeapLocker locker(lock());
  1516     {
  1486         _collection_set->clear();
  1517       ShenandoahGCPhase phase(ShenandoahPhaseTimings::prepare_evac);
  1487         _free_set->clear();
  1518 
  1488 
  1519       ShenandoahHeapLocker locker(lock());
  1489         heuristics()->choose_collection_set(_collection_set);
  1520       _collection_set->clear();
  1490 
  1521       _free_set->clear();
  1491         _free_set->rebuild();
  1522 
  1492       }
  1523       heuristics()->choose_collection_set(_collection_set);
       
  1524 
       
  1525       _free_set->rebuild();
  1493     }
  1526     }
  1494 
  1527 
  1495     // If collection set has candidates, start evacuation.
  1528     // If collection set has candidates, start evacuation.
  1496     // Otherwise, bypass the rest of the cycle.
  1529     // Otherwise, bypass the rest of the cycle.
  1497     if (!collection_set()->is_empty()) {
  1530     if (!collection_set()->is_empty()) {
  1503 
  1536 
  1504       set_evacuation_in_progress(true);
  1537       set_evacuation_in_progress(true);
  1505       // From here on, we need to update references.
  1538       // From here on, we need to update references.
  1506       set_has_forwarded_objects(true);
  1539       set_has_forwarded_objects(true);
  1507 
  1540 
  1508       evacuate_and_update_roots();
  1541       if (!is_degenerated_gc_in_progress()) {
       
  1542         evacuate_and_update_roots();
       
  1543       }
  1509 
  1544 
  1510       if (ShenandoahPacing) {
  1545       if (ShenandoahPacing) {
  1511         pacer()->setup_for_evac();
  1546         pacer()->setup_for_evac();
  1512       }
  1547       }
  1513 
  1548 
  1514       if (ShenandoahVerify) {
  1549       if (ShenandoahVerify) {
  1515         verifier()->verify_roots_no_forwarded();
  1550         if (ShenandoahConcurrentRoots::should_do_concurrent_roots()) {
       
  1551           ShenandoahRootVerifier::RootTypes types = ShenandoahRootVerifier::combine(ShenandoahRootVerifier::JNIHandleRoots, ShenandoahRootVerifier::WeakRoots);
       
  1552           types = ShenandoahRootVerifier::combine(types, ShenandoahRootVerifier::CLDGRoots);
       
  1553           verifier()->verify_roots_no_forwarded_except(types);
       
  1554         } else {
       
  1555           verifier()->verify_roots_no_forwarded();
       
  1556         }
  1516         verifier()->verify_during_evacuation();
  1557         verifier()->verify_during_evacuation();
  1517       }
  1558       }
  1518     } else {
  1559     } else {
  1519       if (ShenandoahVerify) {
  1560       if (ShenandoahVerify) {
  1520         verifier()->verify_after_concmark();
  1561         verifier()->verify_after_concmark();
  1542 void ShenandoahHeap::op_final_evac() {
  1583 void ShenandoahHeap::op_final_evac() {
  1543   assert(ShenandoahSafepoint::is_at_shenandoah_safepoint(), "Should be at safepoint");
  1584   assert(ShenandoahSafepoint::is_at_shenandoah_safepoint(), "Should be at safepoint");
  1544 
  1585 
  1545   set_evacuation_in_progress(false);
  1586   set_evacuation_in_progress(false);
  1546 
  1587 
  1547   retire_and_reset_gclabs();
  1588   {
       
  1589     ShenandoahGCPhase phase(ShenandoahPhaseTimings::final_evac_retire_gclabs);
       
  1590     retire_and_reset_gclabs();
       
  1591   }
  1548 
  1592 
  1549   if (ShenandoahVerify) {
  1593   if (ShenandoahVerify) {
  1550     verifier()->verify_after_evacuation();
  1594     verifier()->verify_after_evacuation();
  1551   }
  1595   }
  1552 
  1596 
  1569   update_heap_references(true);
  1613   update_heap_references(true);
  1570 }
  1614 }
  1571 
  1615 
  1572 void ShenandoahHeap::op_cleanup() {
  1616 void ShenandoahHeap::op_cleanup() {
  1573   free_set()->recycle_trash();
  1617   free_set()->recycle_trash();
       
  1618 }
       
  1619 
       
  1620 class ShenandoahConcurrentRootsEvacUpdateTask : public AbstractGangTask {
       
  1621 private:
       
  1622   ShenandoahVMRoots<true /*concurrent*/>        _vm_roots;
       
  1623   ShenandoahWeakRoots<true /*concurrent*/>      _weak_roots;
       
  1624   ShenandoahClassLoaderDataRoots<true /*concurrent*/, false /*single threaded*/> _cld_roots;
       
  1625 
       
  1626 public:
       
  1627   ShenandoahConcurrentRootsEvacUpdateTask() :
       
  1628     AbstractGangTask("Shenandoah Evacuate/Update Concurrent Roots Task") {
       
  1629   }
       
  1630 
       
  1631   void work(uint worker_id) {
       
  1632     ShenandoahEvacOOMScope oom;
       
  1633     {
       
  1634       // jni_roots and weak_roots are OopStorage backed roots, concurrent iteration
       
  1635       // may race against OopStorage::release() calls.
       
  1636       ShenandoahEvacUpdateOopStorageRootsClosure cl;
       
  1637       _vm_roots.oops_do<ShenandoahEvacUpdateOopStorageRootsClosure>(&cl);
       
  1638       _weak_roots.oops_do<ShenandoahEvacUpdateOopStorageRootsClosure>(&cl);
       
  1639     }
       
  1640 
       
  1641     {
       
  1642       ShenandoahEvacuateUpdateRootsClosure cl;
       
  1643       CLDToOopClosure clds(&cl, ClassLoaderData::_claim_strong);
       
  1644       _cld_roots.cld_do(&clds);
       
  1645     }
       
  1646   }
       
  1647 };
       
  1648 
       
  1649 void ShenandoahHeap::op_roots() {
       
  1650   if (is_evacuation_in_progress() &&
       
  1651       ShenandoahConcurrentRoots::should_do_concurrent_roots()) {
       
  1652     ShenandoahConcurrentRootsEvacUpdateTask task;
       
  1653     workers()->run_task(&task);
       
  1654   }
  1574 }
  1655 }
  1575 
  1656 
  1576 void ShenandoahHeap::op_reset() {
  1657 void ShenandoahHeap::op_reset() {
  1577   reset_mark_bitmap();
  1658   reset_mark_bitmap();
  1578 }
  1659 }
  1602     ShenandoahGCPhase phase(ShenandoahPhaseTimings::full_gc_resize_tlabs);
  1683     ShenandoahGCPhase phase(ShenandoahPhaseTimings::full_gc_resize_tlabs);
  1603     resize_all_tlabs();
  1684     resize_all_tlabs();
  1604   }
  1685   }
  1605 
  1686 
  1606   metrics.snap_after();
  1687   metrics.snap_after();
  1607   metrics.print();
  1688 
  1608 
  1689   if (metrics.is_good_progress()) {
  1609   if (metrics.is_good_progress("Full GC")) {
       
  1610     _progress_last_gc.set();
  1690     _progress_last_gc.set();
  1611   } else {
  1691   } else {
  1612     // Nothing to do. Tell the allocation path that we have failed to make
  1692     // Nothing to do. Tell the allocation path that we have failed to make
  1613     // progress, and it can finally fail.
  1693     // progress, and it can finally fail.
  1614     _progress_last_gc.unset();
  1694     _progress_last_gc.unset();
  1658       // Note that we can only do this for "outside-cycle" degens, otherwise we would risk
  1738       // Note that we can only do this for "outside-cycle" degens, otherwise we would risk
  1659       // changing the cycle parameters mid-cycle during concurrent -> degenerated handover.
  1739       // changing the cycle parameters mid-cycle during concurrent -> degenerated handover.
  1660       set_process_references(heuristics()->can_process_references());
  1740       set_process_references(heuristics()->can_process_references());
  1661       set_unload_classes(heuristics()->can_unload_classes());
  1741       set_unload_classes(heuristics()->can_unload_classes());
  1662 
  1742 
  1663       if (heuristics()->can_do_traversal_gc()) {
  1743       if (is_traversal_mode()) {
  1664         // Not possible to degenerate from here, upgrade to Full GC right away.
  1744         // Not possible to degenerate from here, upgrade to Full GC right away.
  1665         cancel_gc(GCCause::_shenandoah_upgrade_to_full_gc);
  1745         cancel_gc(GCCause::_shenandoah_upgrade_to_full_gc);
  1666         op_degenerated_fail();
  1746         op_degenerated_fail();
  1667         return;
  1747         return;
  1668       }
  1748       }
  1693         // collection set un-evacuated. Restart evacuation from the beginning to
  1773         // collection set un-evacuated. Restart evacuation from the beginning to
  1694         // capture all objects. For all the objects that are already evacuated,
  1774         // capture all objects. For all the objects that are already evacuated,
  1695         // it would be a simple check, which is supposed to be fast. This is also
  1775         // it would be a simple check, which is supposed to be fast. This is also
  1696         // safe to do even without degeneration, as CSet iterator is at beginning
  1776         // safe to do even without degeneration, as CSet iterator is at beginning
  1697         // in preparation for evacuation anyway.
  1777         // in preparation for evacuation anyway.
  1698         collection_set()->clear_current_index();
  1778         //
       
  1779         // Before doing that, we need to make sure we never had any cset-pinned
       
  1780         // regions. This may happen if allocation failure happened when evacuating
       
  1781         // the about-to-be-pinned object, oom-evac protocol left the object in
       
  1782         // the collection set, and then the pin reached the cset region. If we continue
       
  1783         // the cycle here, we would trash the cset and alive objects in it. To avoid
       
  1784         // it, we fail degeneration right away and slide into Full GC to recover.
       
  1785 
       
  1786         {
       
  1787           collection_set()->clear_current_index();
       
  1788 
       
  1789           ShenandoahHeapRegion* r;
       
  1790           while ((r = collection_set()->next()) != NULL) {
       
  1791             if (r->is_pinned()) {
       
  1792               cancel_gc(GCCause::_shenandoah_upgrade_to_full_gc);
       
  1793               op_degenerated_fail();
       
  1794               return;
       
  1795             }
       
  1796           }
       
  1797 
       
  1798           collection_set()->clear_current_index();
       
  1799         }
  1699 
  1800 
  1700         op_stw_evac();
  1801         op_stw_evac();
  1701         if (cancelled_gc()) {
  1802         if (cancelled_gc()) {
  1702           op_degenerated_fail();
  1803           op_degenerated_fail();
  1703           return;
  1804           return;
  1737   if (VerifyAfterGC) {
  1838   if (VerifyAfterGC) {
  1738     Universe::verify();
  1839     Universe::verify();
  1739   }
  1840   }
  1740 
  1841 
  1741   metrics.snap_after();
  1842   metrics.snap_after();
  1742   metrics.print();
       
  1743 
  1843 
  1744   // Check for futility and fail. There is no reason to do several back-to-back Degenerated cycles,
  1844   // Check for futility and fail. There is no reason to do several back-to-back Degenerated cycles,
  1745   // because that probably means the heap is overloaded and/or fragmented.
  1845   // because that probably means the heap is overloaded and/or fragmented.
  1746   if (!metrics.is_good_progress("Degenerated GC")) {
  1846   if (!metrics.is_good_progress()) {
  1747     _progress_last_gc.unset();
  1847     _progress_last_gc.unset();
  1748     cancel_gc(GCCause::_shenandoah_upgrade_to_full_gc);
  1848     cancel_gc(GCCause::_shenandoah_upgrade_to_full_gc);
  1749     op_degenerated_futile();
  1849     op_degenerated_futile();
  1750   } else {
  1850   } else {
  1751     _progress_last_gc.set();
  1851     _progress_last_gc.set();
  1808   }
  1908   }
  1809   ShenandoahBarrierSet::satb_mark_queue_set().set_active_all_threads(in_progress, !in_progress);
  1909   ShenandoahBarrierSet::satb_mark_queue_set().set_active_all_threads(in_progress, !in_progress);
  1810 }
  1910 }
  1811 
  1911 
  1812 void ShenandoahHeap::set_concurrent_traversal_in_progress(bool in_progress) {
  1912 void ShenandoahHeap::set_concurrent_traversal_in_progress(bool in_progress) {
  1813    set_gc_state_mask(TRAVERSAL | HAS_FORWARDED | UPDATEREFS, in_progress);
  1913    set_gc_state_mask(TRAVERSAL, in_progress);
  1814    ShenandoahBarrierSet::satb_mark_queue_set().set_active_all_threads(in_progress, !in_progress);
  1914    ShenandoahBarrierSet::satb_mark_queue_set().set_active_all_threads(in_progress, !in_progress);
  1815 }
  1915 }
  1816 
  1916 
  1817 void ShenandoahHeap::set_evacuation_in_progress(bool in_progress) {
  1917 void ShenandoahHeap::set_evacuation_in_progress(bool in_progress) {
  1818   assert(ShenandoahSafepoint::is_at_shenandoah_safepoint(), "Only call this at safepoint");
  1918   assert(ShenandoahSafepoint::is_at_shenandoah_safepoint(), "Only call this at safepoint");
  1848     jbyte prev = _cancelled_gc.cmpxchg(CANCELLED, CANCELLABLE);
  1948     jbyte prev = _cancelled_gc.cmpxchg(CANCELLED, CANCELLABLE);
  1849     if (prev == CANCELLABLE) return true;
  1949     if (prev == CANCELLABLE) return true;
  1850     else if (prev == CANCELLED) return false;
  1950     else if (prev == CANCELLED) return false;
  1851     assert(ShenandoahSuspendibleWorkers, "should not get here when not using suspendible workers");
  1951     assert(ShenandoahSuspendibleWorkers, "should not get here when not using suspendible workers");
  1852     assert(prev == NOT_CANCELLED, "must be NOT_CANCELLED");
  1952     assert(prev == NOT_CANCELLED, "must be NOT_CANCELLED");
  1853     {
  1953     if (Thread::current()->is_Java_thread()) {
  1854       // We need to provide a safepoint here, otherwise we might
  1954       // We need to provide a safepoint here, otherwise we might
  1855       // spin forever if a SP is pending.
  1955       // spin forever if a SP is pending.
  1856       ThreadBlockInVM sp(JavaThread::current());
  1956       ThreadBlockInVM sp(JavaThread::current());
  1857       SpinPause();
  1957       SpinPause();
  1858     }
  1958     }
  1892   if (ShenandoahStringDedup::is_enabled()) {
  1992   if (ShenandoahStringDedup::is_enabled()) {
  1893     ShenandoahStringDedup::stop();
  1993     ShenandoahStringDedup::stop();
  1894   }
  1994   }
  1895 }
  1995 }
  1896 
  1996 
  1897 void ShenandoahHeap::unload_classes_and_cleanup_tables(bool full_gc) {
  1997 void ShenandoahHeap::stw_unload_classes(bool full_gc) {
  1898   assert(heuristics()->can_unload_classes(), "Class unloading should be enabled");
  1998   if (!unload_classes()) return;
  1899 
       
  1900   ShenandoahGCPhase root_phase(full_gc ?
       
  1901                                ShenandoahPhaseTimings::full_gc_purge :
       
  1902                                ShenandoahPhaseTimings::purge);
       
  1903 
       
  1904   ShenandoahIsAliveSelector alive;
       
  1905   BoolObjectClosure* is_alive = alive.is_alive_closure();
       
  1906 
       
  1907   bool purged_class;
  1999   bool purged_class;
  1908 
  2000 
  1909   // Unload classes and purge SystemDictionary.
  2001   // Unload classes and purge SystemDictionary.
  1910   {
  2002   {
  1911     ShenandoahGCPhase phase(full_gc ?
  2003     ShenandoahGCPhase phase(full_gc ?
  1916 
  2008 
  1917   {
  2009   {
  1918     ShenandoahGCPhase phase(full_gc ?
  2010     ShenandoahGCPhase phase(full_gc ?
  1919                             ShenandoahPhaseTimings::full_gc_purge_par :
  2011                             ShenandoahPhaseTimings::full_gc_purge_par :
  1920                             ShenandoahPhaseTimings::purge_par);
  2012                             ShenandoahPhaseTimings::purge_par);
  1921     uint active = _workers->active_workers();
  2013     ShenandoahIsAliveSelector is_alive;
  1922     ParallelCleaningTask unlink_task(is_alive, active, purged_class, true);
  2014     uint num_workers = _workers->active_workers();
       
  2015     ShenandoahClassUnloadingTask unlink_task(is_alive.is_alive_closure(), num_workers, purged_class);
  1923     _workers->run_task(&unlink_task);
  2016     _workers->run_task(&unlink_task);
  1924   }
  2017   }
  1925 
  2018 
  1926   {
  2019   {
  1927     ShenandoahGCPhase phase(full_gc ?
  2020     ShenandoahGCPhase phase(full_gc ?
  1928                       ShenandoahPhaseTimings::full_gc_purge_cldg :
  2021                             ShenandoahPhaseTimings::full_gc_purge_cldg :
  1929                       ShenandoahPhaseTimings::purge_cldg);
  2022                             ShenandoahPhaseTimings::purge_cldg);
  1930     ClassLoaderDataGraph::purge();
  2023     ClassLoaderDataGraph::purge();
  1931   }
  2024   }
       
  2025   // Resize and verify metaspace
       
  2026   MetaspaceGC::compute_new_size();
       
  2027   MetaspaceUtils::verify_metrics();
       
  2028 }
       
  2029 
       
  2030 // Process leftover weak oops: update them, if needed or assert they do not
       
  2031 // need updating otherwise.
       
  2032 // Weak processor API requires us to visit the oops, even if we are not doing
       
  2033 // anything to them.
       
  2034 void ShenandoahHeap::stw_process_weak_roots(bool full_gc) {
       
  2035   ShenandoahGCPhase root_phase(full_gc ?
       
  2036                                ShenandoahPhaseTimings::full_gc_purge :
       
  2037                                ShenandoahPhaseTimings::purge);
       
  2038   uint num_workers = _workers->active_workers();
       
  2039   ShenandoahPhaseTimings::Phase timing_phase = full_gc ?
       
  2040                                                ShenandoahPhaseTimings::full_gc_purge_par :
       
  2041                                                ShenandoahPhaseTimings::purge_par;
       
  2042   // Cleanup weak roots
       
  2043   ShenandoahGCPhase phase(timing_phase);
       
  2044   if (has_forwarded_objects()) {
       
  2045     ShenandoahForwardedIsAliveClosure is_alive;
       
  2046     ShenandoahUpdateRefsClosure keep_alive;
       
  2047     ShenandoahParallelWeakRootsCleaningTask<ShenandoahForwardedIsAliveClosure, ShenandoahUpdateRefsClosure>
       
  2048       cleaning_task(&is_alive, &keep_alive, num_workers);
       
  2049     _workers->run_task(&cleaning_task);
       
  2050   } else {
       
  2051     ShenandoahIsAliveClosure is_alive;
       
  2052 #ifdef ASSERT
       
  2053   ShenandoahAssertNotForwardedClosure verify_cl;
       
  2054   ShenandoahParallelWeakRootsCleaningTask<ShenandoahIsAliveClosure, ShenandoahAssertNotForwardedClosure>
       
  2055     cleaning_task(&is_alive, &verify_cl, num_workers);
       
  2056 #else
       
  2057   ShenandoahParallelWeakRootsCleaningTask<ShenandoahIsAliveClosure, DoNothingClosure>
       
  2058     cleaning_task(&is_alive, &do_nothing_cl, num_workers);
       
  2059 #endif
       
  2060     _workers->run_task(&cleaning_task);
       
  2061   }
       
  2062 }
       
  2063 
       
  2064 void ShenandoahHeap::parallel_cleaning(bool full_gc) {
       
  2065   assert(SafepointSynchronize::is_at_safepoint(), "Must be at a safepoint");
       
  2066   stw_process_weak_roots(full_gc);
       
  2067   stw_unload_classes(full_gc);
  1932 }
  2068 }
  1933 
  2069 
  1934 void ShenandoahHeap::set_has_forwarded_objects(bool cond) {
  2070 void ShenandoahHeap::set_has_forwarded_objects(bool cond) {
  1935   set_gc_state_mask(HAS_FORWARDED, cond);
  2071   if (is_traversal_mode()) {
       
  2072     set_gc_state_mask(HAS_FORWARDED | UPDATEREFS, cond);
       
  2073   } else {
       
  2074     set_gc_state_mask(HAS_FORWARDED, cond);
       
  2075   }
       
  2076 
  1936 }
  2077 }
  1937 
  2078 
  1938 void ShenandoahHeap::set_process_references(bool pr) {
  2079 void ShenandoahHeap::set_process_references(bool pr) {
  1939   _process_references.set_cond(pr);
  2080   _process_references.set_cond(pr);
  1940 }
  2081 }
  2099 void ShenandoahHeap::op_init_updaterefs() {
  2240 void ShenandoahHeap::op_init_updaterefs() {
  2100   assert(ShenandoahSafepoint::is_at_shenandoah_safepoint(), "must be at safepoint");
  2241   assert(ShenandoahSafepoint::is_at_shenandoah_safepoint(), "must be at safepoint");
  2101 
  2242 
  2102   set_evacuation_in_progress(false);
  2243   set_evacuation_in_progress(false);
  2103 
  2244 
  2104   retire_and_reset_gclabs();
  2245   {
       
  2246     ShenandoahGCPhase phase(ShenandoahPhaseTimings::init_update_refs_retire_gclabs);
       
  2247     retire_and_reset_gclabs();
       
  2248   }
  2105 
  2249 
  2106   if (ShenandoahVerify) {
  2250   if (ShenandoahVerify) {
  2107     if (!is_degenerated_gc_in_progress()) {
  2251     if (!is_degenerated_gc_in_progress()) {
  2108       verifier()->verify_roots_no_forwarded_except(ShenandoahRootVerifier::ThreadRoots);
  2252       verifier()->verify_roots_no_forwarded_except(ShenandoahRootVerifier::ThreadRoots);
  2109     }
  2253     }
  2110     verifier()->verify_before_updaterefs();
  2254     verifier()->verify_before_updaterefs();
  2111   }
  2255   }
  2112 
  2256 
  2113   set_update_refs_in_progress(true);
  2257   set_update_refs_in_progress(true);
  2114   make_parsable(true);
  2258 
  2115   for (uint i = 0; i < num_regions(); i++) {
  2259   {
  2116     ShenandoahHeapRegion* r = get_region(i);
  2260     ShenandoahGCPhase phase(ShenandoahPhaseTimings::init_update_refs_prepare);
  2117     r->set_concurrent_iteration_safe_limit(r->top());
  2261 
  2118   }
  2262     make_parsable(true);
  2119 
  2263     for (uint i = 0; i < num_regions(); i++) {
  2120   // Reset iterator.
  2264       ShenandoahHeapRegion* r = get_region(i);
  2121   _update_refs_iterator.reset();
  2265       r->set_concurrent_iteration_safe_limit(r->top());
       
  2266     }
       
  2267 
       
  2268     // Reset iterator.
       
  2269     _update_refs_iterator.reset();
       
  2270   }
  2122 
  2271 
  2123   if (ShenandoahPacing) {
  2272   if (ShenandoahPacing) {
  2124     pacer()->setup_for_updaterefs();
  2273     pacer()->setup_for_updaterefs();
  2125   }
  2274   }
  2126 }
  2275 }
  2128 void ShenandoahHeap::op_final_updaterefs() {
  2277 void ShenandoahHeap::op_final_updaterefs() {
  2129   assert(ShenandoahSafepoint::is_at_shenandoah_safepoint(), "must be at safepoint");
  2278   assert(ShenandoahSafepoint::is_at_shenandoah_safepoint(), "must be at safepoint");
  2130 
  2279 
  2131   // Check if there is left-over work, and finish it
  2280   // Check if there is left-over work, and finish it
  2132   if (_update_refs_iterator.has_next()) {
  2281   if (_update_refs_iterator.has_next()) {
  2133     ShenandoahGCPhase final_work(ShenandoahPhaseTimings::final_update_refs_finish_work);
  2282     ShenandoahGCPhase phase(ShenandoahPhaseTimings::final_update_refs_finish_work);
  2134 
  2283 
  2135     // Finish updating references where we left off.
  2284     // Finish updating references where we left off.
  2136     clear_cancelled_gc();
  2285     clear_cancelled_gc();
  2137     update_heap_references(false);
  2286     update_heap_references(false);
  2138   }
  2287   }
  2152     concurrent_mark()->update_roots(ShenandoahPhaseTimings::degen_gc_update_roots);
  2301     concurrent_mark()->update_roots(ShenandoahPhaseTimings::degen_gc_update_roots);
  2153   } else {
  2302   } else {
  2154     concurrent_mark()->update_thread_roots(ShenandoahPhaseTimings::final_update_refs_roots);
  2303     concurrent_mark()->update_thread_roots(ShenandoahPhaseTimings::final_update_refs_roots);
  2155   }
  2304   }
  2156 
  2305 
  2157   ShenandoahGCPhase final_update_refs(ShenandoahPhaseTimings::final_update_refs_recycle);
  2306   // Has to be done before cset is clear
  2158 
  2307   if (ShenandoahVerify) {
  2159   trash_cset_regions();
  2308     verifier()->verify_roots_in_to_space();
       
  2309   }
       
  2310 
       
  2311   {
       
  2312     ShenandoahGCPhase phase(ShenandoahPhaseTimings::final_update_refs_trash_cset);
       
  2313     trash_cset_regions();
       
  2314   }
       
  2315 
  2160   set_has_forwarded_objects(false);
  2316   set_has_forwarded_objects(false);
  2161   set_update_refs_in_progress(false);
  2317   set_update_refs_in_progress(false);
  2162 
  2318 
  2163   if (ShenandoahVerify) {
  2319   if (ShenandoahVerify) {
  2164     verifier()->verify_roots_no_forwarded();
       
  2165     verifier()->verify_after_updaterefs();
  2320     verifier()->verify_after_updaterefs();
  2166   }
  2321   }
  2167 
  2322 
  2168   if (VerifyAfterGC) {
  2323   if (VerifyAfterGC) {
  2169     Universe::verify();
  2324     Universe::verify();
  2529                               "concurrent reference update");
  2684                               "concurrent reference update");
  2530 
  2685 
  2531   try_inject_alloc_failure();
  2686   try_inject_alloc_failure();
  2532   op_updaterefs();
  2687   op_updaterefs();
  2533 }
  2688 }
       
  2689 
       
  2690 void ShenandoahHeap::entry_roots() {
       
  2691   ShenandoahGCPhase phase(ShenandoahPhaseTimings::conc_roots);
       
  2692 
       
  2693   static const char* msg = "Concurrent roots processing";
       
  2694   GCTraceTime(Info, gc) time(msg, NULL, GCCause::_no_gc, true);
       
  2695   EventMark em("%s", msg);
       
  2696 
       
  2697   ShenandoahWorkerScope scope(workers(),
       
  2698                               ShenandoahWorkerPolicy::calc_workers_for_conc_root_processing(),
       
  2699                               "concurrent root processing");
       
  2700 
       
  2701   try_inject_alloc_failure();
       
  2702   op_roots();
       
  2703 }
       
  2704 
  2534 void ShenandoahHeap::entry_cleanup() {
  2705 void ShenandoahHeap::entry_cleanup() {
  2535   ShenandoahGCPhase phase(ShenandoahPhaseTimings::conc_cleanup);
  2706   ShenandoahGCPhase phase(ShenandoahPhaseTimings::conc_cleanup);
  2536 
  2707 
  2537   static const char* msg = "Concurrent cleanup";
  2708   static const char* msg = "Concurrent cleanup";
  2538   GCTraceTime(Info, gc) time(msg, NULL, GCCause::_no_gc, true);
  2709   GCTraceTime(Info, gc) time(msg, NULL, GCCause::_no_gc, true);