8151436: Leaner ArrayAllocator and BitMaps
authorstefank
Wed, 09 Mar 2016 12:44:12 +0100
changeset 37057 03b3e1870228
parent 37056 109f610020fa
child 37058 47faedc8085b
8151436: Leaner ArrayAllocator and BitMaps Reviewed-by: tschatzl, pliden, kbarrett
hotspot/src/share/vm/gc/g1/g1HotCardCache.cpp
hotspot/src/share/vm/gc/g1/g1HotCardCache.hpp
hotspot/src/share/vm/gc/shared/taskqueue.hpp
hotspot/src/share/vm/gc/shared/taskqueue.inline.hpp
hotspot/src/share/vm/memory/allocation.hpp
hotspot/src/share/vm/memory/allocation.inline.hpp
hotspot/src/share/vm/utilities/bitMap.cpp
hotspot/src/share/vm/utilities/bitMap.hpp
--- a/hotspot/src/share/vm/gc/g1/g1HotCardCache.cpp	Thu Feb 25 13:08:19 2016 +0100
+++ b/hotspot/src/share/vm/gc/g1/g1HotCardCache.cpp	Wed Mar 09 12:44:12 2016 +0100
@@ -36,7 +36,7 @@
     _use_cache = true;
 
     _hot_cache_size = (size_t)1 << G1ConcRSLogCacheSize;
-    _hot_cache = _hot_cache_memory.allocate(_hot_cache_size);
+    _hot_cache = ArrayAllocator<jbyte*, mtGC>::allocate(_hot_cache_size);
 
     reset_hot_cache_internal();
 
@@ -51,7 +51,7 @@
 G1HotCardCache::~G1HotCardCache() {
   if (default_use_cache()) {
     assert(_hot_cache != NULL, "Logic");
-    _hot_cache_memory.free();
+    ArrayAllocator<jbyte*, mtGC>::free(_hot_cache, _hot_cache_size);
     _hot_cache = NULL;
   }
 }
--- a/hotspot/src/share/vm/gc/g1/g1HotCardCache.hpp	Thu Feb 25 13:08:19 2016 +0100
+++ b/hotspot/src/share/vm/gc/g1/g1HotCardCache.hpp	Wed Mar 09 12:44:12 2016 +0100
@@ -61,7 +61,6 @@
 
   G1CardCounts      _card_counts;
 
-  ArrayAllocator<jbyte*, mtGC> _hot_cache_memory;
 
   // The card cache table
   jbyte**           _hot_cache;
--- a/hotspot/src/share/vm/gc/shared/taskqueue.hpp	Thu Feb 25 13:08:19 2016 +0100
+++ b/hotspot/src/share/vm/gc/shared/taskqueue.hpp	Wed Mar 09 12:44:12 2016 +0100
@@ -248,7 +248,6 @@
 
 template <class E, MEMFLAGS F, unsigned int N = TASKQUEUE_SIZE>
 class GenericTaskQueue: public TaskQueueSuper<N, F> {
-  ArrayAllocator<E, F> _array_allocator;
 protected:
   typedef typename TaskQueueSuper<N, F>::Age Age;
   typedef typename TaskQueueSuper<N, F>::idx_t idx_t;
--- a/hotspot/src/share/vm/gc/shared/taskqueue.inline.hpp	Thu Feb 25 13:08:19 2016 +0100
+++ b/hotspot/src/share/vm/gc/shared/taskqueue.inline.hpp	Wed Mar 09 12:44:12 2016 +0100
@@ -44,12 +44,13 @@
 
 template<class E, MEMFLAGS F, unsigned int N>
 inline void GenericTaskQueue<E, F, N>::initialize() {
-  _elems = _array_allocator.allocate(N);
+  _elems = ArrayAllocator<E, F>::allocate(N);
 }
 
 template<class E, MEMFLAGS F, unsigned int N>
 inline GenericTaskQueue<E, F, N>::~GenericTaskQueue() {
-  FREE_C_HEAP_ARRAY(E, _elems);
+  assert(false, "This code is currently never called");
+  ArrayAllocator<E, F>::free(const_cast<E*>(_elems), N);
 }
 
 template<class E, MEMFLAGS F, unsigned int N>
--- a/hotspot/src/share/vm/memory/allocation.hpp	Thu Feb 25 13:08:19 2016 +0100
+++ b/hotspot/src/share/vm/memory/allocation.hpp	Wed Mar 09 12:44:12 2016 +0100
@@ -724,30 +724,23 @@
 // is set so that we always use malloc except for Solaris where we set the
 // limit to get mapped memory.
 template <class E, MEMFLAGS F>
-class ArrayAllocator VALUE_OBJ_CLASS_SPEC {
-  char* _addr;
-  bool _use_malloc;
-  size_t _size;
-  bool _free_in_destructor;
+class ArrayAllocator : public AllStatic {
+ private:
+  static bool should_use_malloc(size_t length);
+
+  static size_t size_for_malloc(size_t length);
+  static size_t size_for_mmap(size_t length);
 
-  static bool should_use_malloc(size_t size) {
-    return size < ArrayAllocatorMallocLimit;
-  }
+  static E* allocate_malloc(size_t length);
+  static E* allocate_mmap(size_t length);
 
-  static char* allocate_inner(size_t& size, bool& use_malloc);
+  static void free_malloc(E* addr, size_t length);
+  static void free_mmap(E* addr, size_t length);
+
  public:
-  ArrayAllocator(bool free_in_destructor = true) :
-    _addr(NULL), _use_malloc(false), _size(0), _free_in_destructor(free_in_destructor) { }
-
-  ~ArrayAllocator() {
-    if (_free_in_destructor) {
-      free();
-    }
-  }
-
-  E* allocate(size_t length);
-  E* reallocate(size_t new_length);
-  void free();
+  static E* allocate(size_t length);
+  static E* reallocate(E* old_addr, size_t old_length, size_t new_length);
+  static void free(E* addr, size_t length);
 };
 
 #endif // SHARE_VM_MEMORY_ALLOCATION_HPP
--- a/hotspot/src/share/vm/memory/allocation.inline.hpp	Thu Feb 25 13:08:19 2016 +0100
+++ b/hotspot/src/share/vm/memory/allocation.inline.hpp	Wed Mar 09 12:44:12 2016 +0100
@@ -151,66 +151,87 @@
 }
 
 template <class E, MEMFLAGS F>
-char* ArrayAllocator<E, F>::allocate_inner(size_t &size, bool &use_malloc) {
-  char* addr = NULL;
+size_t ArrayAllocator<E, F>::size_for_malloc(size_t length) {
+  return length * sizeof(E);
+}
+
+template <class E, MEMFLAGS F>
+size_t ArrayAllocator<E, F>::size_for_mmap(size_t length) {
+  size_t size = length * sizeof(E);
+  int alignment = os::vm_allocation_granularity();
+  return align_size_up(size, alignment);
+}
 
-  if (use_malloc) {
-    addr = AllocateHeap(size, F);
-    if (addr == NULL && size >= (size_t)os::vm_allocation_granularity()) {
-      // malloc failed let's try with mmap instead
-      use_malloc = false;
-    } else {
-      return addr;
-    }
-  }
+template <class E, MEMFLAGS F>
+bool ArrayAllocator<E, F>::should_use_malloc(size_t length) {
+  return size_for_malloc(length) < ArrayAllocatorMallocLimit;
+}
 
+template <class E, MEMFLAGS F>
+E* ArrayAllocator<E, F>::allocate_malloc(size_t length) {
+  return (E*)AllocateHeap(size_for_malloc(length), F);
+}
+
+template <class E, MEMFLAGS F>
+E* ArrayAllocator<E, F>::allocate_mmap(size_t length) {
+  size_t size = size_for_mmap(length);
   int alignment = os::vm_allocation_granularity();
-  size = align_size_up(size, alignment);
 
-  addr = os::reserve_memory(size, NULL, alignment, F);
+  char* addr = os::reserve_memory(size, NULL, alignment, F);
   if (addr == NULL) {
     vm_exit_out_of_memory(size, OOM_MMAP_ERROR, "Allocator (reserve)");
   }
 
   os::commit_memory_or_exit(addr, size, !ExecMem, "Allocator (commit)");
-  return addr;
+
+  return (E*)addr;
 }
 
 template <class E, MEMFLAGS F>
 E* ArrayAllocator<E, F>::allocate(size_t length) {
-  assert(_addr == NULL, "Already in use");
+  if (should_use_malloc(length)) {
+    return allocate_malloc(length);
+  }
 
-  _size = sizeof(E) * length;
-  _use_malloc = should_use_malloc(_size);
-  _addr = allocate_inner(_size, _use_malloc);
-
-  return (E*)_addr;
+  return allocate_mmap(length);
 }
 
 template <class E, MEMFLAGS F>
-E* ArrayAllocator<E, F>::reallocate(size_t new_length) {
-  size_t new_size = sizeof(E) * new_length;
-  bool use_malloc = should_use_malloc(new_size);
-  char* new_addr = allocate_inner(new_size, use_malloc);
+E* ArrayAllocator<E, F>::reallocate(E* old_addr, size_t old_length, size_t new_length) {
+  E* new_addr = (new_length > 0)
+      ? allocate(new_length)
+      : NULL;
 
-  memcpy(new_addr, _addr, MIN2(new_size, _size));
+  if (new_addr != NULL && old_addr != NULL) {
+    memcpy(new_addr, old_addr, MIN2(old_length, new_length) * sizeof(E));
+  }
 
-  free();
-  _size = new_size;
-  _use_malloc = use_malloc;
-  _addr = new_addr;
-  return (E*)new_addr;
+  if (old_addr != NULL) {
+    free(old_addr, old_length);
+  }
+
+  return new_addr;
 }
 
 template<class E, MEMFLAGS F>
-void ArrayAllocator<E, F>::free() {
-  if (_addr != NULL) {
-    if (_use_malloc) {
-      FreeHeap(_addr);
+void ArrayAllocator<E, F>::free_malloc(E* addr, size_t /*length*/) {
+  FreeHeap(addr);
+}
+
+template<class E, MEMFLAGS F>
+void ArrayAllocator<E, F>::free_mmap(E* addr, size_t length) {
+  bool result = os::release_memory((char*)addr, size_for_mmap(length));
+  assert(result, "Failed to release memory");
+}
+
+template<class E, MEMFLAGS F>
+void ArrayAllocator<E, F>::free(E* addr, size_t length) {
+  if (addr != NULL) {
+    if (should_use_malloc(length)) {
+      free_malloc(addr, length);
     } else {
-      os::release_memory(_addr, _size);
+      free_mmap(addr, length);
     }
-    _addr = NULL;
   }
 }
 
--- a/hotspot/src/share/vm/utilities/bitMap.cpp	Thu Feb 25 13:08:19 2016 +0100
+++ b/hotspot/src/share/vm/utilities/bitMap.cpp	Wed Mar 09 12:44:12 2016 +0100
@@ -30,14 +30,14 @@
 #include "utilities/copy.hpp"
 
 BitMap::BitMap(bm_word_t* map, idx_t size_in_bits) :
-  _map(map), _size(size_in_bits), _map_allocator(false)
+  _map(map), _size(size_in_bits)
 {
   assert(sizeof(bm_word_t) == BytesPerWord, "Implementation assumption.");
 }
 
 
 BitMap::BitMap(idx_t size_in_bits, bool in_resource_area) :
-  _map(NULL), _size(0), _map_allocator(false)
+  _map(NULL), _size(0)
 {
   assert(sizeof(bm_word_t) == BytesPerWord, "Implementation assumption.");
   resize(size_in_bits, in_resource_area);
@@ -54,7 +54,7 @@
     Copy::disjoint_words((HeapWord*)old_map, (HeapWord*) _map,
                          MIN2(old_size_in_words, new_size_in_words));
   } else {
-    _map = _map_allocator.reallocate(new_size_in_words);
+    _map = ArrayAllocator<bm_word_t, mtInternal>::reallocate(old_map, old_size_in_words, new_size_in_words);
   }
 
   if (new_size_in_words > old_size_in_words) {
--- a/hotspot/src/share/vm/utilities/bitMap.hpp	Thu Feb 25 13:08:19 2016 +0100
+++ b/hotspot/src/share/vm/utilities/bitMap.hpp	Wed Mar 09 12:44:12 2016 +0100
@@ -48,7 +48,6 @@
   } RangeSizeHint;
 
  private:
-  ArrayAllocator<bm_word_t, mtInternal> _map_allocator;
   bm_word_t* _map;     // First word in bitmap
   idx_t      _size;    // Size of bitmap (in bits)
 
@@ -114,7 +113,7 @@
  public:
 
   // Constructs a bitmap with no map, and size 0.
-  BitMap() : _map(NULL), _size(0), _map_allocator(false) {}
+  BitMap() : _map(NULL), _size(0) {}
 
   // Constructs a bitmap with the given map and size.
   BitMap(bm_word_t* map, idx_t size_in_bits);