hotspot/src/share/vm/gc/g1/g1ConcurrentMark.inline.hpp
changeset 46750 bcab0128a16f
parent 46502 116a09d8f142
child 46751 d2e0cecdbcb0
equal deleted inserted replaced
46749:9db1408787eb 46750:bcab0128a16f
    31 #include "gc/g1/suspendibleThreadSet.hpp"
    31 #include "gc/g1/suspendibleThreadSet.hpp"
    32 #include "gc/shared/taskqueue.inline.hpp"
    32 #include "gc/shared/taskqueue.inline.hpp"
    33 #include "utilities/bitMap.inline.hpp"
    33 #include "utilities/bitMap.inline.hpp"
    34 
    34 
    35 inline bool G1ConcurrentMark::par_mark(oop obj) {
    35 inline bool G1ConcurrentMark::par_mark(oop obj) {
    36   return _nextMarkBitMap->parMark((HeapWord*)obj);
    36   return _nextMarkBitMap->par_mark((HeapWord*)obj);
    37 }
    37 }
    38 
    38 
    39 inline bool G1CMBitMapRO::iterate(BitMapClosure* cl, MemRegion mr) {
    39 inline bool G1CMBitMap::iterate(G1CMBitMapClosure* cl, MemRegion mr) {
    40   HeapWord* start_addr = MAX2(startWord(), mr.start());
    40   assert(!mr.is_empty(), "Does not support empty memregion to iterate over");
    41   HeapWord* end_addr = MIN2(endWord(), mr.end());
    41   assert(_covered.contains(mr),
    42 
    42          "Given MemRegion from " PTR_FORMAT " to " PTR_FORMAT " not contained in heap area",
    43   if (end_addr > start_addr) {
    43          p2i(mr.start()), p2i(mr.end()));
    44     // Right-open interval [start-offset, end-offset).
    44 
    45     BitMap::idx_t start_offset = heapWordToOffset(start_addr);
    45   BitMap::idx_t const end_offset = addr_to_offset(mr.end());
    46     BitMap::idx_t end_offset = heapWordToOffset(end_addr);
    46   BitMap::idx_t offset = _bm.get_next_one_offset(addr_to_offset(mr.start()), end_offset);
    47 
    47 
    48     start_offset = _bm.get_next_one_offset(start_offset, end_offset);
    48   while (offset < end_offset) {
    49     while (start_offset < end_offset) {
    49     HeapWord* const addr = offset_to_addr(offset);
    50       if (!cl->do_bit(start_offset)) {
    50     if (!cl->do_addr(addr)) {
    51         return false;
    51       return false;
    52       }
    52     }
    53       HeapWord* next_addr = MIN2(nextObject(offsetToHeapWord(start_offset)), end_addr);
    53     size_t const obj_size = (size_t)((oop)addr)->size();
    54       BitMap::idx_t next_offset = heapWordToOffset(next_addr);
    54     offset = _bm.get_next_one_offset(offset + (obj_size >> _shifter), end_offset);
    55       start_offset = _bm.get_next_one_offset(next_offset, end_offset);
       
    56     }
       
    57   }
    55   }
    58   return true;
    56   return true;
    59 }
    57 }
    60 
    58 
    61 // The argument addr should be the start address of a valid object
    59 inline HeapWord* G1CMBitMap::get_next_marked_addr(const HeapWord* addr,
    62 HeapWord* G1CMBitMapRO::nextObject(HeapWord* addr) {
    60                                                   const HeapWord* limit) const {
    63   oop obj = (oop) addr;
    61   assert(limit != NULL, "limit must not be NULL");
    64   HeapWord* res =  addr + obj->size();
    62   // Round addr up to a possible object boundary to be safe.
    65   assert(offsetToHeapWord(heapWordToOffset(res)) == res, "sanity");
    63   size_t const addr_offset = addr_to_offset(align_up(addr, HeapWordSize << _shifter));
    66   return res;
    64   size_t const limit_offset = addr_to_offset(limit);
    67 }
    65   size_t const nextOffset = _bm.get_next_one_offset(addr_offset, limit_offset);
    68 
    66   return offset_to_addr(nextOffset);
    69 #define check_mark(addr)                                                       \
    67 }
    70   assert(_bmStartWord <= (addr) && (addr) < (_bmStartWord + _bmWordSize),      \
    68 
    71          "outside underlying space?");                                         \
    69 #ifdef ASSERT
    72   assert(G1CollectedHeap::heap()->is_in_exact(addr),                           \
    70 inline void G1CMBitMap::check_mark(HeapWord* addr) {
    73          "Trying to access not available bitmap " PTR_FORMAT                   \
    71   assert(G1CollectedHeap::heap()->is_in_exact(addr),
    74          " corresponding to " PTR_FORMAT " (%u)",                              \
    72          "Trying to access bitmap " PTR_FORMAT " for address " PTR_FORMAT " not in the heap.",
    75          p2i(this), p2i(addr), G1CollectedHeap::heap()->addr_to_region(addr));
    73          p2i(this), p2i(addr));
       
    74 }
       
    75 #endif
    76 
    76 
    77 inline void G1CMBitMap::mark(HeapWord* addr) {
    77 inline void G1CMBitMap::mark(HeapWord* addr) {
    78   check_mark(addr);
    78   check_mark(addr);
    79   _bm.set_bit(heapWordToOffset(addr));
    79   _bm.set_bit(addr_to_offset(addr));
    80 }
    80 }
    81 
    81 
    82 inline void G1CMBitMap::clear(HeapWord* addr) {
    82 inline void G1CMBitMap::clear(HeapWord* addr) {
    83   check_mark(addr);
    83   check_mark(addr);
    84   _bm.clear_bit(heapWordToOffset(addr));
    84   _bm.clear_bit(addr_to_offset(addr));
    85 }
    85 }
    86 
    86 
    87 inline bool G1CMBitMap::parMark(HeapWord* addr) {
    87 inline bool G1CMBitMap::par_mark(HeapWord* addr) {
    88   check_mark(addr);
    88   check_mark(addr);
    89   return _bm.par_set_bit(heapWordToOffset(addr));
    89   return _bm.par_set_bit(addr_to_offset(addr));
    90 }
    90 }
    91 
       
    92 #undef check_mark
       
    93 
    91 
    94 #ifndef PRODUCT
    92 #ifndef PRODUCT
    95 template<typename Fn>
    93 template<typename Fn>
    96 inline void G1CMMarkStack::iterate(Fn fn) const {
    94 inline void G1CMMarkStack::iterate(Fn fn) const {
    97   assert_at_safepoint(true);
    95   assert_at_safepoint(true);
   120 inline void G1CMTask::push(G1TaskQueueEntry task_entry) {
   118 inline void G1CMTask::push(G1TaskQueueEntry task_entry) {
   121   assert(task_entry.is_array_slice() || _g1h->is_in_g1_reserved(task_entry.obj()), "invariant");
   119   assert(task_entry.is_array_slice() || _g1h->is_in_g1_reserved(task_entry.obj()), "invariant");
   122   assert(task_entry.is_array_slice() || !_g1h->is_on_master_free_list(
   120   assert(task_entry.is_array_slice() || !_g1h->is_on_master_free_list(
   123               _g1h->heap_region_containing(task_entry.obj())), "invariant");
   121               _g1h->heap_region_containing(task_entry.obj())), "invariant");
   124   assert(task_entry.is_array_slice() || !_g1h->is_obj_ill(task_entry.obj()), "invariant");  // FIXME!!!
   122   assert(task_entry.is_array_slice() || !_g1h->is_obj_ill(task_entry.obj()), "invariant");  // FIXME!!!
   125   assert(task_entry.is_array_slice() || _nextMarkBitMap->isMarked((HeapWord*)task_entry.obj()), "invariant");
   123   assert(task_entry.is_array_slice() || _nextMarkBitMap->is_marked((HeapWord*)task_entry.obj()), "invariant");
   126 
   124 
   127   if (!_task_queue->push(task_entry)) {
   125   if (!_task_queue->push(task_entry)) {
   128     // The local task queue looks full. We need to push some entries
   126     // The local task queue looks full. We need to push some entries
   129     // to the global stack.
   127     // to the global stack.
   130     move_entries_to_global_stack();
   128     move_entries_to_global_stack();
   168 }
   166 }
   169 
   167 
   170 template<bool scan>
   168 template<bool scan>
   171 inline void G1CMTask::process_grey_task_entry(G1TaskQueueEntry task_entry) {
   169 inline void G1CMTask::process_grey_task_entry(G1TaskQueueEntry task_entry) {
   172   assert(scan || (task_entry.is_oop() && task_entry.obj()->is_typeArray()), "Skipping scan of grey non-typeArray");
   170   assert(scan || (task_entry.is_oop() && task_entry.obj()->is_typeArray()), "Skipping scan of grey non-typeArray");
   173   assert(task_entry.is_array_slice() || _nextMarkBitMap->isMarked((HeapWord*)task_entry.obj()),
   171   assert(task_entry.is_array_slice() || _nextMarkBitMap->is_marked((HeapWord*)task_entry.obj()),
   174          "Any stolen object should be a slice or marked");
   172          "Any stolen object should be a slice or marked");
   175 
   173 
   176   if (scan) {
   174   if (scan) {
   177     if (task_entry.is_array_slice()) {
   175     if (task_entry.is_array_slice()) {
   178       _words_scanned += _objArray_processor.process_slice(task_entry.slice());
   176       _words_scanned += _objArray_processor.process_slice(task_entry.slice());
   238 
   236 
   239   HeapWord* objAddr = (HeapWord*) obj;
   237   HeapWord* objAddr = (HeapWord*) obj;
   240   assert(obj->is_oop_or_null(true /* ignore mark word */), "Expected an oop or NULL at " PTR_FORMAT, p2i(obj));
   238   assert(obj->is_oop_or_null(true /* ignore mark word */), "Expected an oop or NULL at " PTR_FORMAT, p2i(obj));
   241   if (_g1h->is_in_g1_reserved(objAddr)) {
   239   if (_g1h->is_in_g1_reserved(objAddr)) {
   242     assert(obj != NULL, "null check is implicit");
   240     assert(obj != NULL, "null check is implicit");
   243     if (!_nextMarkBitMap->isMarked(objAddr)) {
   241     if (!_nextMarkBitMap->is_marked(objAddr)) {
   244       // Only get the containing region if the object is not marked on the
   242       // Only get the containing region if the object is not marked on the
   245       // bitmap (otherwise, it's a waste of time since we won't do
   243       // bitmap (otherwise, it's a waste of time since we won't do
   246       // anything with it).
   244       // anything with it).
   247       HeapRegion* hr = _g1h->heap_region_containing(obj);
   245       HeapRegion* hr = _g1h->heap_region_containing(obj);
   248       if (!hr->obj_allocated_since_next_marking(obj)) {
   246       if (!hr->obj_allocated_since_next_marking(obj)) {
   251     }
   249     }
   252   }
   250   }
   253 }
   251 }
   254 
   252 
   255 inline void G1ConcurrentMark::markPrev(oop p) {
   253 inline void G1ConcurrentMark::markPrev(oop p) {
   256   assert(!_prevMarkBitMap->isMarked((HeapWord*) p), "sanity");
   254   assert(!_prevMarkBitMap->is_marked((HeapWord*) p), "sanity");
   257   // Note we are overriding the read-only view of the prev map here, via
   255  _prevMarkBitMap->mark((HeapWord*) p);
   258   // the cast.
       
   259   ((G1CMBitMap*)_prevMarkBitMap)->mark((HeapWord*) p);
       
   260 }
   256 }
   261 
   257 
   262 bool G1ConcurrentMark::isPrevMarked(oop p) const {
   258 bool G1ConcurrentMark::isPrevMarked(oop p) const {
   263   assert(p != NULL && p->is_oop(), "expected an oop");
   259   assert(p != NULL && p->is_oop(), "expected an oop");
   264   HeapWord* addr = (HeapWord*)p;
   260   return _prevMarkBitMap->is_marked((HeapWord*)p);
   265   assert(addr >= _prevMarkBitMap->startWord() ||
       
   266          addr < _prevMarkBitMap->endWord(), "in a region");
       
   267 
       
   268   return _prevMarkBitMap->isMarked(addr);
       
   269 }
   261 }
   270 
   262 
   271 inline void G1ConcurrentMark::grayRoot(oop obj, HeapRegion* hr) {
   263 inline void G1ConcurrentMark::grayRoot(oop obj, HeapRegion* hr) {
   272   assert(obj != NULL, "pre-condition");
   264   assert(obj != NULL, "pre-condition");
   273   HeapWord* addr = (HeapWord*) obj;
   265   HeapWord* addr = (HeapWord*) obj;
   280   // Given that we're looking for a region that contains an object
   272   // Given that we're looking for a region that contains an object
   281   // header it's impossible to get back a HC region.
   273   // header it's impossible to get back a HC region.
   282   assert(!hr->is_continues_humongous(), "sanity");
   274   assert(!hr->is_continues_humongous(), "sanity");
   283 
   275 
   284   if (addr < hr->next_top_at_mark_start()) {
   276   if (addr < hr->next_top_at_mark_start()) {
   285     if (!_nextMarkBitMap->isMarked(addr)) {
   277     if (!_nextMarkBitMap->is_marked(addr)) {
   286       par_mark(obj);
   278       par_mark(obj);
   287     }
   279     }
   288   }
   280   }
   289 }
   281 }
   290 
   282