hotspot/src/share/vm/gc/g1/g1Allocator.cpp
changeset 46619 a3919f5e8d2b
parent 46618 d503911aa948
child 46625 edefffab74e2
equal deleted inserted replaced
46618:d503911aa948 46619:a3919f5e8d2b
   423 
   423 
   424 void G1ArchiveAllocator::complete_archive(GrowableArray<MemRegion>* ranges,
   424 void G1ArchiveAllocator::complete_archive(GrowableArray<MemRegion>* ranges,
   425                                           size_t end_alignment_in_bytes) {
   425                                           size_t end_alignment_in_bytes) {
   426   assert((end_alignment_in_bytes >> LogHeapWordSize) < HeapRegion::min_region_size_in_words(),
   426   assert((end_alignment_in_bytes >> LogHeapWordSize) < HeapRegion::min_region_size_in_words(),
   427          "alignment " SIZE_FORMAT " too large", end_alignment_in_bytes);
   427          "alignment " SIZE_FORMAT " too large", end_alignment_in_bytes);
   428   assert(is_size_aligned(end_alignment_in_bytes, HeapWordSize),
   428   assert(is_aligned(end_alignment_in_bytes, HeapWordSize),
   429          "alignment " SIZE_FORMAT " is not HeapWord (%u) aligned", end_alignment_in_bytes, HeapWordSize);
   429          "alignment " SIZE_FORMAT " is not HeapWord (%u) aligned", end_alignment_in_bytes, HeapWordSize);
   430 
   430 
   431   // If we've allocated nothing, simply return.
   431   // If we've allocated nothing, simply return.
   432   if (_allocation_region == NULL) {
   432   if (_allocation_region == NULL) {
   433     return;
   433     return;
   434   }
   434   }
   435 
   435 
   436   // If an end alignment was requested, insert filler objects.
   436   // If an end alignment was requested, insert filler objects.
   437   if (end_alignment_in_bytes != 0) {
   437   if (end_alignment_in_bytes != 0) {
   438     HeapWord* currtop = _allocation_region->top();
   438     HeapWord* currtop = _allocation_region->top();
   439     HeapWord* newtop = align_ptr_up(currtop, end_alignment_in_bytes);
   439     HeapWord* newtop = align_up(currtop, end_alignment_in_bytes);
   440     size_t fill_size = pointer_delta(newtop, currtop);
   440     size_t fill_size = pointer_delta(newtop, currtop);
   441     if (fill_size != 0) {
   441     if (fill_size != 0) {
   442       if (fill_size < CollectedHeap::min_fill_size()) {
   442       if (fill_size < CollectedHeap::min_fill_size()) {
   443         // If the required fill is smaller than we can represent,
   443         // If the required fill is smaller than we can represent,
   444         // bump up to the next aligned address. We know we won't exceed the current
   444         // bump up to the next aligned address. We know we won't exceed the current
   445         // region boundary because the max supported alignment is smaller than the min
   445         // region boundary because the max supported alignment is smaller than the min
   446         // region size, and because the allocation code never leaves space smaller than
   446         // region size, and because the allocation code never leaves space smaller than
   447         // the min_fill_size at the top of the current allocation region.
   447         // the min_fill_size at the top of the current allocation region.
   448         newtop = align_ptr_up(currtop + CollectedHeap::min_fill_size(),
   448         newtop = align_up(currtop + CollectedHeap::min_fill_size(),
   449                               end_alignment_in_bytes);
   449                           end_alignment_in_bytes);
   450         fill_size = pointer_delta(newtop, currtop);
   450         fill_size = pointer_delta(newtop, currtop);
   451       }
   451       }
   452       HeapWord* fill = archive_mem_allocate(fill_size);
   452       HeapWord* fill = archive_mem_allocate(fill_size);
   453       CollectedHeap::fill_with_objects(fill, fill_size);
   453       CollectedHeap::fill_with_objects(fill, fill_size);
   454     }
   454     }