hotspot/src/share/vm/gc/serial/defNewGeneration.cpp
changeset 35492 c8c0273e6b91
parent 35471 deb38c83d759
child 35862 411842d0c882
equal deleted inserted replaced
35491:663c609dfeee 35492:c8c0273e6b91
   355   // unnecessary.  Also a second call to expand-to-reserve
   355   // unnecessary.  Also a second call to expand-to-reserve
   356   // value potentially can cause an undue expansion.
   356   // value potentially can cause an undue expansion.
   357   // For example if the first expand fail for unknown reasons,
   357   // For example if the first expand fail for unknown reasons,
   358   // but the second succeeds and expands the heap to its maximum
   358   // but the second succeeds and expands the heap to its maximum
   359   // value.
   359   // value.
   360   if (GC_locker::is_active()) {
   360   if (GCLocker::is_active()) {
   361     log_debug(gc)("Garbage collection disabled, expanded heap instead");
   361     log_debug(gc)("Garbage collection disabled, expanded heap instead");
   362   }
   362   }
   363 
   363 
   364   return success;
   364   return success;
   365 }
   365 }
   525 }
   525 }
   526 
   526 
   527 // The last collection bailed out, we are running out of heap space,
   527 // The last collection bailed out, we are running out of heap space,
   528 // so we try to allocate the from-space, too.
   528 // so we try to allocate the from-space, too.
   529 HeapWord* DefNewGeneration::allocate_from_space(size_t size) {
   529 HeapWord* DefNewGeneration::allocate_from_space(size_t size) {
   530   bool should_try_alloc = should_allocate_from_space() || GC_locker::is_active_and_needs_gc();
   530   bool should_try_alloc = should_allocate_from_space() || GCLocker::is_active_and_needs_gc();
   531 
   531 
   532   // If the Heap_lock is not locked by this thread, this will be called
   532   // If the Heap_lock is not locked by this thread, this will be called
   533   // again later with the Heap_lock held.
   533   // again later with the Heap_lock held.
   534   bool do_alloc = should_try_alloc && (Heap_lock->owned_by_self() || (SafepointSynchronize::is_at_safepoint() && Thread::current()->is_VM_thread()));
   534   bool do_alloc = should_try_alloc && (Heap_lock->owned_by_self() || (SafepointSynchronize::is_at_safepoint() && Thread::current()->is_VM_thread()));
   535 
   535 
   908 }
   908 }
   909 
   909 
   910 void DefNewGeneration::gc_epilogue(bool full) {
   910 void DefNewGeneration::gc_epilogue(bool full) {
   911   DEBUG_ONLY(static bool seen_incremental_collection_failed = false;)
   911   DEBUG_ONLY(static bool seen_incremental_collection_failed = false;)
   912 
   912 
   913   assert(!GC_locker::is_active(), "We should not be executing here");
   913   assert(!GCLocker::is_active(), "We should not be executing here");
   914   // Check if the heap is approaching full after a collection has
   914   // Check if the heap is approaching full after a collection has
   915   // been done.  Generally the young generation is empty at
   915   // been done.  Generally the young generation is empty at
   916   // a minimum at the end of a collection.  If it is not, then
   916   // a minimum at the end of a collection.  If it is not, then
   917   // the heap is approaching full.
   917   // the heap is approaching full.
   918   GenCollectedHeap* gch = GenCollectedHeap::heap();
   918   GenCollectedHeap* gch = GenCollectedHeap::heap();