src/hotspot/share/gc/shared/threadLocalAllocBuffer.inline.hpp
changeset 49945 9425445633cf
parent 47216 71c04702a3d5
child 50070 c036b84c6bbf
equal deleted inserted replaced
49944:4690a2871b44 49945:9425445633cf
    52   }
    52   }
    53   return NULL;
    53   return NULL;
    54 }
    54 }
    55 
    55 
    56 inline size_t ThreadLocalAllocBuffer::compute_size(size_t obj_size) {
    56 inline size_t ThreadLocalAllocBuffer::compute_size(size_t obj_size) {
    57   const size_t aligned_obj_size = align_object_size(obj_size);
       
    58 
       
    59   // Compute the size for the new TLAB.
    57   // Compute the size for the new TLAB.
    60   // The "last" tlab may be smaller to reduce fragmentation.
    58   // The "last" tlab may be smaller to reduce fragmentation.
    61   // unsafe_max_tlab_alloc is just a hint.
    59   // unsafe_max_tlab_alloc is just a hint.
    62   const size_t available_size = Universe::heap()->unsafe_max_tlab_alloc(myThread()) /
    60   const size_t available_size = Universe::heap()->unsafe_max_tlab_alloc(myThread()) /
    63                                                   HeapWordSize;
    61                                                   HeapWordSize;
    64   size_t new_tlab_size = MIN3(available_size, desired_size() + aligned_obj_size, max_size());
    62   size_t new_tlab_size = MIN3(available_size, desired_size() + align_object_size(obj_size), max_size());
    65 
    63 
    66   // Make sure there's enough room for object and filler int[].
    64   // Make sure there's enough room for object and filler int[].
    67   const size_t obj_plus_filler_size = aligned_obj_size + alignment_reserve();
    65   if (new_tlab_size < compute_min_size(obj_size)) {
    68   if (new_tlab_size < obj_plus_filler_size) {
       
    69     // If there isn't enough room for the allocation, return failure.
    66     // If there isn't enough room for the allocation, return failure.
    70     log_trace(gc, tlab)("ThreadLocalAllocBuffer::compute_size(" SIZE_FORMAT ") returns failure",
    67     log_trace(gc, tlab)("ThreadLocalAllocBuffer::compute_size(" SIZE_FORMAT ") returns failure",
    71                         obj_size);
    68                         obj_size);
    72     return 0;
    69     return 0;
    73   }
    70   }
    74   log_trace(gc, tlab)("ThreadLocalAllocBuffer::compute_size(" SIZE_FORMAT ") returns " SIZE_FORMAT,
    71   log_trace(gc, tlab)("ThreadLocalAllocBuffer::compute_size(" SIZE_FORMAT ") returns " SIZE_FORMAT,
    75                       obj_size, new_tlab_size);
    72                       obj_size, new_tlab_size);
    76   return new_tlab_size;
    73   return new_tlab_size;
    77 }
    74 }
    78 
    75 
       
    76 inline size_t ThreadLocalAllocBuffer::compute_min_size(size_t obj_size) {
       
    77   const size_t aligned_obj_size = align_object_size(obj_size);
       
    78   const size_t size_with_reserve = aligned_obj_size + alignment_reserve();
       
    79   return MAX2(size_with_reserve, MinTLABSize);
       
    80 }
    79 
    81 
    80 void ThreadLocalAllocBuffer::record_slow_allocation(size_t obj_size) {
    82 void ThreadLocalAllocBuffer::record_slow_allocation(size_t obj_size) {
    81   // Raise size required to bypass TLAB next time. Why? Else there's
    83   // Raise size required to bypass TLAB next time. Why? Else there's
    82   // a risk that a thread that repeatedly allocates objects of one
    84   // a risk that a thread that repeatedly allocates objects of one
    83   // size will get stuck on this slow path.
    85   // size will get stuck on this slow path.