hotspot/src/share/vm/services/memReporter.cpp
changeset 25946 1572c9f03fb9
parent 24424 2658d7834c6e
child 46489 40abcea5a9d5
equal deleted inserted replaced
25902:7e9ffb1fe1df 25946:1572c9f03fb9
    20  * or visit www.oracle.com if you need additional information or have any
    20  * or visit www.oracle.com if you need additional information or have any
    21  * questions.
    21  * questions.
    22  *
    22  *
    23  */
    23  */
    24 #include "precompiled.hpp"
    24 #include "precompiled.hpp"
    25 #include "classfile/systemDictionary.hpp"
    25 
    26 #include "runtime/os.hpp"
    26 #include "memory/allocation.hpp"
       
    27 #include "services/mallocTracker.hpp"
    27 #include "services/memReporter.hpp"
    28 #include "services/memReporter.hpp"
    28 #include "services/memPtrArray.hpp"
    29 #include "services/virtualMemoryTracker.hpp"
    29 #include "services/memTracker.hpp"
    30 #include "utilities/globalDefinitions.hpp"
    30 
    31 
    31 PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC
    32 size_t MemReporterBase::reserved_total(const MallocMemory* malloc, const VirtualMemory* vm) const {
    32 
    33   return malloc->malloc_size() + malloc->arena_size() + vm->reserved();
    33 const char* BaselineOutputer::memory_unit(size_t scale) {
    34 }
    34   switch(scale) {
    35 
    35     case K: return "KB";
    36 size_t MemReporterBase::committed_total(const MallocMemory* malloc, const VirtualMemory* vm) const {
    36     case M: return "MB";
    37   return malloc->malloc_size() + malloc->arena_size() + vm->committed();
    37     case G: return "GB";
    38 }
    38   }
    39 
    39   ShouldNotReachHere();
    40 void MemReporterBase::print_total(size_t reserved, size_t committed) const {
    40   return NULL;
    41   const char* scale = current_scale();
    41 }
    42   output()->print("reserved=" SIZE_FORMAT "%s, committed=" SIZE_FORMAT "%s",
    42 
    43     amount_in_current_scale(reserved), scale, amount_in_current_scale(committed), scale);
    43 
    44 }
    44 void BaselineReporter::report_baseline(const MemBaseline& baseline, bool summary_only) {
    45 
    45   assert(MemTracker::is_on(), "Native memory tracking is off");
    46 void MemReporterBase::print_malloc(size_t amount, size_t count) const {
    46   _outputer.start(scale());
    47   const char* scale = current_scale();
    47   _outputer.total_usage(
    48   outputStream* out = output();
    48     amount_in_current_scale(baseline.total_malloc_amount() + baseline.total_reserved_amount()),
    49   out->print("(malloc=" SIZE_FORMAT "%s",
    49     amount_in_current_scale(baseline.total_malloc_amount() + baseline.total_committed_amount()));
    50     amount_in_current_scale(amount), scale);
    50 
    51 
    51   _outputer.num_of_classes(baseline.number_of_classes());
    52   if (count > 0) {
    52   _outputer.num_of_threads(baseline.number_of_threads());
    53     out->print(" #" SIZE_FORMAT "", count);
    53 
    54   }
    54   report_summaries(baseline);
    55 
    55   if (!summary_only && MemTracker::track_callsite()) {
    56   out->print(")");
    56     report_virtual_memory_map(baseline);
    57 }
    57     report_callsites(baseline);
    58 
    58   }
    59 void MemReporterBase::print_virtual_memory(size_t reserved, size_t committed) const {
    59   _outputer.done();
    60   const char* scale = current_scale();
    60 }
    61   output()->print("(mmap: reserved=" SIZE_FORMAT "%s, committed=" SIZE_FORMAT "%s)",
    61 
    62     amount_in_current_scale(reserved), scale, amount_in_current_scale(committed), scale);
    62 void BaselineReporter::report_summaries(const MemBaseline& baseline) {
    63 }
    63   _outputer.start_category_summary();
    64 
    64   MEMFLAGS type;
    65 void MemReporterBase::print_malloc_line(size_t amount, size_t count) const {
    65 
    66   output()->print("%28s", " ");
    66   for (int index = 0; index < NUMBER_OF_MEMORY_TYPE; index ++) {
    67   print_malloc(amount, count);
    67     type = MemBaseline::MemType2NameMap[index]._flag;
    68   output()->print_cr(" ");
    68     _outputer.category_summary(type,
    69 }
    69       amount_in_current_scale(baseline.reserved_amount(type)),
    70 
    70       amount_in_current_scale(baseline.committed_amount(type)),
    71 void MemReporterBase::print_virtual_memory_line(size_t reserved, size_t committed) const {
    71       amount_in_current_scale(baseline.malloc_amount(type)),
    72   output()->print("%28s", " ");
    72       baseline.malloc_count(type),
    73   print_virtual_memory(reserved, committed);
    73       amount_in_current_scale(baseline.arena_amount(type)),
    74   output()->print_cr(" ");
    74       baseline.arena_count(type));
    75 }
    75   }
    76 
    76 
    77 void MemReporterBase::print_arena_line(size_t amount, size_t count) const {
    77   _outputer.done_category_summary();
    78   const char* scale = current_scale();
    78 }
    79   output()->print_cr("%27s (arena=" SIZE_FORMAT "%s #" SIZE_FORMAT ")", " ",
    79 
    80     amount_in_current_scale(amount), scale, count);
    80 void BaselineReporter::report_virtual_memory_map(const MemBaseline& baseline) {
    81 }
    81   _outputer.start_virtual_memory_map();
    82 
    82   MemBaseline* pBL = const_cast<MemBaseline*>(&baseline);
    83 void MemReporterBase::print_virtual_memory_region(const char* type, address base, size_t size) const {
    83   MemPointerArrayIteratorImpl itr = MemPointerArrayIteratorImpl(pBL->_vm_map);
    84   const char* scale = current_scale();
    84   VMMemRegionEx* rgn = (VMMemRegionEx*)itr.current();
    85   output()->print("[" PTR_FORMAT " - " PTR_FORMAT "] %s " SIZE_FORMAT "%s",
    85   while (rgn != NULL) {
    86     p2i(base), p2i(base + size), type, amount_in_current_scale(size), scale);
    86     if (rgn->is_reserved_region()) {
    87 }
    87       _outputer.reserved_memory_region(FLAGS_TO_MEMORY_TYPE(rgn->flags()),
    88 
    88         rgn->base(), rgn->base() + rgn->size(), amount_in_current_scale(rgn->size()), rgn->pc());
    89 
       
    90 void MemSummaryReporter::report() {
       
    91   const char* scale = current_scale();
       
    92   outputStream* out = output();
       
    93   size_t total_reserved_amount = _malloc_snapshot->total() +
       
    94     _vm_snapshot->total_reserved();
       
    95   size_t total_committed_amount = _malloc_snapshot->total() +
       
    96     _vm_snapshot->total_committed();
       
    97 
       
    98   // Overall total
       
    99   out->print_cr("\nNative Memory Tracking:\n");
       
   100   out->print("Total: ");
       
   101   print_total(total_reserved_amount, total_committed_amount);
       
   102   out->print("\n");
       
   103 
       
   104   // Summary by memory type
       
   105   for (int index = 0; index < mt_number_of_types; index ++) {
       
   106     MEMFLAGS flag = NMTUtil::index_to_flag(index);
       
   107     // thread stack is reported as part of thread category
       
   108     if (flag == mtThreadStack) continue;
       
   109     MallocMemory* malloc_memory = _malloc_snapshot->by_type(flag);
       
   110     VirtualMemory* virtual_memory = _vm_snapshot->by_type(flag);
       
   111 
       
   112     report_summary_of_type(flag, malloc_memory, virtual_memory);
       
   113   }
       
   114 }
       
   115 
       
   116 void MemSummaryReporter::report_summary_of_type(MEMFLAGS flag,
       
   117   MallocMemory*  malloc_memory, VirtualMemory* virtual_memory) {
       
   118 
       
   119   size_t reserved_amount  = reserved_total (malloc_memory, virtual_memory);
       
   120   size_t committed_amount = committed_total(malloc_memory, virtual_memory);
       
   121 
       
   122   // Count thread's native stack in "Thread" category
       
   123   if (flag == mtThread) {
       
   124     const VirtualMemory* thread_stack_usage =
       
   125       (const VirtualMemory*)_vm_snapshot->by_type(mtThreadStack);
       
   126     reserved_amount  += thread_stack_usage->reserved();
       
   127     committed_amount += thread_stack_usage->committed();
       
   128   } else if (flag == mtNMT) {
       
   129     // Count malloc headers in "NMT" category
       
   130     reserved_amount  += _malloc_snapshot->malloc_overhead()->size();
       
   131     committed_amount += _malloc_snapshot->malloc_overhead()->size();
       
   132   }
       
   133 
       
   134   if (amount_in_current_scale(reserved_amount) > 0) {
       
   135     outputStream* out   = output();
       
   136     const char*   scale = current_scale();
       
   137     out->print("-%26s (", NMTUtil::flag_to_name(flag));
       
   138     print_total(reserved_amount, committed_amount);
       
   139     out->print_cr(")");
       
   140 
       
   141     if (flag == mtClass) {
       
   142       // report class count
       
   143       out->print_cr("%27s (classes #" SIZE_FORMAT ")", " ", _class_count);
       
   144     } else if (flag == mtThread) {
       
   145       // report thread count
       
   146       out->print_cr("%27s (thread #" SIZE_FORMAT ")", " ", _malloc_snapshot->thread_count());
       
   147       const VirtualMemory* thread_stack_usage =
       
   148        _vm_snapshot->by_type(mtThreadStack);
       
   149       out->print("%27s (stack: ", " ");
       
   150       print_total(thread_stack_usage->reserved(), thread_stack_usage->committed());
       
   151       out->print_cr(")");
       
   152     }
       
   153 
       
   154      // report malloc'd memory
       
   155     if (amount_in_current_scale(malloc_memory->malloc_size()) > 0) {
       
   156       // We don't know how many arena chunks are in used, so don't report the count
       
   157       size_t count = (flag == mtChunk) ? 0 : malloc_memory->malloc_count();
       
   158       print_malloc_line(malloc_memory->malloc_size(), count);
       
   159     }
       
   160 
       
   161     if (amount_in_current_scale(virtual_memory->reserved()) > 0) {
       
   162       print_virtual_memory_line(virtual_memory->reserved(), virtual_memory->committed());
       
   163     }
       
   164 
       
   165     if (amount_in_current_scale(malloc_memory->arena_size()) > 0) {
       
   166       print_arena_line(malloc_memory->arena_size(), malloc_memory->arena_count());
       
   167     }
       
   168 
       
   169     if (flag == mtNMT &&
       
   170       amount_in_current_scale(_malloc_snapshot->malloc_overhead()->size()) > 0) {
       
   171       out->print_cr("%27s (tracking overhead=" SIZE_FORMAT "%s)", " ",
       
   172         amount_in_current_scale(_malloc_snapshot->malloc_overhead()->size()), scale);
       
   173     }
       
   174 
       
   175     out->print_cr(" ");
       
   176   }
       
   177 }
       
   178 
       
   179 void MemDetailReporter::report_detail() {
       
   180   // Start detail report
       
   181   outputStream* out = output();
       
   182   out->print_cr("Details:\n");
       
   183 
       
   184   report_malloc_sites();
       
   185   report_virtual_memory_allocation_sites();
       
   186 }
       
   187 
       
   188 void MemDetailReporter::report_malloc_sites() {
       
   189   MallocSiteIterator         malloc_itr = _baseline.malloc_sites(MemBaseline::by_size);
       
   190   if (malloc_itr.is_empty()) return;
       
   191 
       
   192   outputStream* out = output();
       
   193 
       
   194   const MallocSite* malloc_site;
       
   195   while ((malloc_site = malloc_itr.next()) != NULL) {
       
   196     // Don't report if size is too small
       
   197     if (amount_in_current_scale(malloc_site->size()) == 0)
       
   198       continue;
       
   199 
       
   200     const NativeCallStack* stack = malloc_site->call_stack();
       
   201     stack->print_on(out);
       
   202     out->print("%29s", " ");
       
   203     print_malloc(malloc_site->size(), malloc_site->count());
       
   204     out->print_cr("\n");
       
   205   }
       
   206 }
       
   207 
       
   208 void MemDetailReporter::report_virtual_memory_allocation_sites()  {
       
   209   VirtualMemorySiteIterator  virtual_memory_itr =
       
   210     _baseline.virtual_memory_sites(MemBaseline::by_size);
       
   211 
       
   212   if (virtual_memory_itr.is_empty()) return;
       
   213 
       
   214   outputStream* out = output();
       
   215   const VirtualMemoryAllocationSite*  virtual_memory_site;
       
   216 
       
   217   while ((virtual_memory_site = virtual_memory_itr.next()) != NULL) {
       
   218     // Don't report if size is too small
       
   219     if (amount_in_current_scale(virtual_memory_site->reserved()) == 0)
       
   220       continue;
       
   221 
       
   222     const NativeCallStack* stack = virtual_memory_site->call_stack();
       
   223     stack->print_on(out);
       
   224     out->print("%28s (", " ");
       
   225     print_total(virtual_memory_site->reserved(), virtual_memory_site->committed());
       
   226     out->print_cr(")\n");
       
   227   }
       
   228 }
       
   229 
       
   230 
       
   231 void MemDetailReporter::report_virtual_memory_map() {
       
   232   // Virtual memory map always in base address order
       
   233   VirtualMemoryAllocationIterator itr = _baseline.virtual_memory_allocations();
       
   234   const ReservedMemoryRegion* rgn;
       
   235 
       
   236   output()->print_cr("Virtual memory map:");
       
   237   while ((rgn = itr.next()) != NULL) {
       
   238     report_virtual_memory_region(rgn);
       
   239   }
       
   240 }
       
   241 
       
   242 void MemDetailReporter::report_virtual_memory_region(const ReservedMemoryRegion* reserved_rgn) {
       
   243   assert(reserved_rgn != NULL, "NULL pointer");
       
   244 
       
   245   // Don't report if size is too small
       
   246   if (amount_in_current_scale(reserved_rgn->size()) == 0) return;
       
   247 
       
   248   outputStream* out = output();
       
   249   const char* scale = current_scale();
       
   250   const NativeCallStack*  stack = reserved_rgn->call_stack();
       
   251   bool all_committed = reserved_rgn->all_committed();
       
   252   const char* region_type = (all_committed ? "reserved and committed" : "reserved");
       
   253   out->print_cr(" ");
       
   254   print_virtual_memory_region(region_type, reserved_rgn->base(), reserved_rgn->size());
       
   255   out->print(" for %s", NMTUtil::flag_to_name(reserved_rgn->flag()));
       
   256   if (stack->is_empty()) {
       
   257     out->print_cr(" ");
       
   258   } else {
       
   259     out->print_cr(" from");
       
   260     stack->print_on(out, 4);
       
   261   }
       
   262 
       
   263   if (all_committed) return;
       
   264 
       
   265   CommittedRegionIterator itr = reserved_rgn->iterate_committed_regions();
       
   266   const CommittedMemoryRegion* committed_rgn;
       
   267   while ((committed_rgn = itr.next()) != NULL) {
       
   268     // Don't report if size is too small
       
   269     if (amount_in_current_scale(committed_rgn->size()) == 0) continue;
       
   270     stack = committed_rgn->call_stack();
       
   271     out->print("\n\t");
       
   272     print_virtual_memory_region("committed", committed_rgn->base(), committed_rgn->size());
       
   273     if (stack->is_empty()) {
       
   274       out->print_cr(" ");
    89     } else {
   275     } else {
    90       _outputer.committed_memory_region(rgn->base(), rgn->base() + rgn->size(),
   276       out->print_cr(" from");
    91         amount_in_current_scale(rgn->size()), rgn->pc());
   277       stack->print_on(out, 12);
    92     }
   278     }
    93     rgn = (VMMemRegionEx*)itr.next();
   279   }
    94   }
   280 }
    95 
   281 
    96   _outputer.done_virtual_memory_map();
   282 void MemSummaryDiffReporter::report_diff() {
    97 }
   283   const char* scale = current_scale();
    98 
   284   outputStream* out = output();
    99 void BaselineReporter::report_callsites(const MemBaseline& baseline) {
   285   out->print_cr("\nNative Memory Tracking:\n");
   100   _outputer.start_callsite();
   286 
   101   MemBaseline* pBL = const_cast<MemBaseline*>(&baseline);
   287   // Overall diff
   102 
   288   out->print("Total: ");
   103   pBL->_malloc_cs->sort((FN_SORT)MemBaseline::bl_malloc_sort_by_size);
   289   print_virtual_memory_diff(_current_baseline.total_reserved_memory(),
   104   pBL->_vm_cs->sort((FN_SORT)MemBaseline::bl_vm_sort_by_size);
   290     _current_baseline.total_committed_memory(), _early_baseline.total_reserved_memory(),
   105 
   291     _early_baseline.total_committed_memory());
   106   // walk malloc callsites
   292 
   107   MemPointerArrayIteratorImpl malloc_itr(pBL->_malloc_cs);
   293   out->print_cr("\n");
   108   MallocCallsitePointer*      malloc_callsite =
   294 
   109                   (MallocCallsitePointer*)malloc_itr.current();
   295   // Summary diff by memory type
   110   while (malloc_callsite != NULL) {
   296   for (int index = 0; index < mt_number_of_types; index ++) {
   111     _outputer.malloc_callsite(malloc_callsite->addr(),
   297     MEMFLAGS flag = NMTUtil::index_to_flag(index);
   112         amount_in_current_scale(malloc_callsite->amount()), malloc_callsite->count());
   298     // thread stack is reported as part of thread category
   113     malloc_callsite = (MallocCallsitePointer*)malloc_itr.next();
   299     if (flag == mtThreadStack) continue;
   114   }
   300     diff_summary_of_type(flag, _early_baseline.malloc_memory(flag),
   115 
   301       _early_baseline.virtual_memory(flag), _current_baseline.malloc_memory(flag),
   116   // walk virtual memory callsite
   302       _current_baseline.virtual_memory(flag));
   117   MemPointerArrayIteratorImpl vm_itr(pBL->_vm_cs);
   303   }
   118   VMCallsitePointer*          vm_callsite = (VMCallsitePointer*)vm_itr.current();
   304 }
   119   while (vm_callsite != NULL) {
   305 
   120     _outputer.virtual_memory_callsite(vm_callsite->addr(),
   306 void MemSummaryDiffReporter::print_malloc_diff(size_t current_amount, size_t current_count,
   121       amount_in_current_scale(vm_callsite->reserved_amount()),
   307     size_t early_amount, size_t early_count) const {
   122       amount_in_current_scale(vm_callsite->committed_amount()));
   308   const char* scale = current_scale();
   123     vm_callsite = (VMCallsitePointer*)vm_itr.next();
   309   outputStream* out = output();
   124   }
   310 
   125   pBL->_malloc_cs->sort((FN_SORT)MemBaseline::bl_malloc_sort_by_pc);
   311   out->print("malloc=" SIZE_FORMAT "%s", amount_in_current_scale(current_amount), scale);
   126   pBL->_vm_cs->sort((FN_SORT)MemBaseline::bl_vm_sort_by_pc);
   312   long amount_diff = diff_in_current_scale(current_amount, early_amount);
   127   _outputer.done_callsite();
   313   if (amount_diff != 0) {
   128 }
   314     out->print(" %+ld%s", amount_diff, scale);
   129 
   315   }
   130 void BaselineReporter::diff_baselines(const MemBaseline& cur, const MemBaseline& prev,
   316   if (current_count > 0) {
   131   bool summary_only) {
   317     out->print(" #" SIZE_FORMAT "", current_count);
   132   assert(MemTracker::is_on(), "Native memory tracking is off");
   318     if (current_count != early_count) {
   133   _outputer.start(scale());
   319       out->print(" %+d", (int)(current_count - early_count));
   134   size_t total_reserved = cur.total_malloc_amount() + cur.total_reserved_amount();
   320     }
   135   size_t total_committed = cur.total_malloc_amount() + cur.total_committed_amount();
   321   }
   136 
   322 }
   137   _outputer.diff_total_usage(
   323 
   138     amount_in_current_scale(total_reserved), amount_in_current_scale(total_committed),
   324 void MemSummaryDiffReporter::print_arena_diff(size_t current_amount, size_t current_count,
   139     diff_in_current_scale(total_reserved,  (prev.total_malloc_amount() + prev.total_reserved_amount())),
   325   size_t early_amount, size_t early_count) const {
   140     diff_in_current_scale(total_committed, (prev.total_committed_amount() + prev.total_malloc_amount())));
   326   const char* scale = current_scale();
   141 
   327   outputStream* out = output();
   142   _outputer.diff_num_of_classes(cur.number_of_classes(),
   328   out->print("arena=" SIZE_FORMAT "%s", amount_in_current_scale(current_amount), scale);
   143        diff(cur.number_of_classes(), prev.number_of_classes()));
   329   if (diff_in_current_scale(current_amount, early_amount) != 0) {
   144   _outputer.diff_num_of_threads(cur.number_of_threads(),
   330     out->print(" %+ld", diff_in_current_scale(current_amount, early_amount));
   145        diff(cur.number_of_threads(), prev.number_of_threads()));
   331   }
   146 
   332 
   147   diff_summaries(cur, prev);
   333   out->print(" #" SIZE_FORMAT "", current_count);
   148   if (!summary_only && MemTracker::track_callsite()) {
   334   if (current_count != early_count) {
   149     diff_callsites(cur, prev);
   335     out->print(" %+d", (int)(current_count - early_count));
   150   }
   336   }
   151   _outputer.done();
   337 }
   152 }
   338 
   153 
   339 void MemSummaryDiffReporter::print_virtual_memory_diff(size_t current_reserved, size_t current_committed,
   154 void BaselineReporter::diff_summaries(const MemBaseline& cur, const MemBaseline& prev) {
   340     size_t early_reserved, size_t early_committed) const {
   155   _outputer.start_category_summary();
   341   const char* scale = current_scale();
   156   MEMFLAGS type;
   342   outputStream* out = output();
   157 
   343   out->print("reserved=" SIZE_FORMAT "%s", amount_in_current_scale(current_reserved), scale);
   158   for (int index = 0; index < NUMBER_OF_MEMORY_TYPE; index ++) {
   344   long reserved_diff = diff_in_current_scale(current_reserved, early_reserved);
   159     type = MemBaseline::MemType2NameMap[index]._flag;
   345   if (reserved_diff != 0) {
   160     _outputer.diff_category_summary(type,
   346     out->print(" %+ld%s", reserved_diff, scale);
   161       amount_in_current_scale(cur.reserved_amount(type)),
   347   }
   162       amount_in_current_scale(cur.committed_amount(type)),
   348 
   163       amount_in_current_scale(cur.malloc_amount(type)),
   349   out->print(", committed=" SIZE_FORMAT "%s", amount_in_current_scale(current_committed), scale);
   164       cur.malloc_count(type),
   350   long committed_diff = diff_in_current_scale(current_committed, early_committed);
   165       amount_in_current_scale(cur.arena_amount(type)),
   351   if (committed_diff != 0) {
   166       cur.arena_count(type),
   352     out->print(" %+ld%s", committed_diff, scale);
   167       diff_in_current_scale(cur.reserved_amount(type), prev.reserved_amount(type)),
   353   }
   168       diff_in_current_scale(cur.committed_amount(type), prev.committed_amount(type)),
   354 }
   169       diff_in_current_scale(cur.malloc_amount(type), prev.malloc_amount(type)),
   355 
   170       diff(cur.malloc_count(type), prev.malloc_count(type)),
   356 
   171       diff_in_current_scale(cur.arena_amount(type), prev.arena_amount(type)),
   357 void MemSummaryDiffReporter::diff_summary_of_type(MEMFLAGS flag, const MallocMemory* early_malloc,
   172       diff(cur.arena_count(type), prev.arena_count(type)));
   358   const VirtualMemory* early_vm, const MallocMemory* current_malloc,
   173   }
   359   const VirtualMemory* current_vm) const {
   174 
   360 
   175   _outputer.done_category_summary();
   361   outputStream* out = output();
   176 }
   362   const char* scale = current_scale();
   177 
   363 
   178 void BaselineReporter::diff_callsites(const MemBaseline& cur, const MemBaseline& prev) {
   364   // Total reserved and committed memory in current baseline
   179   _outputer.start_callsite();
   365   size_t current_reserved_amount  = reserved_total (current_malloc, current_vm);
   180   MemBaseline* pBL_cur = const_cast<MemBaseline*>(&cur);
   366   size_t current_committed_amount = committed_total(current_malloc, current_vm);
   181   MemBaseline* pBL_prev = const_cast<MemBaseline*>(&prev);
   367 
   182 
   368   // Total reserved and committed memory in early baseline
   183   // walk malloc callsites
   369   size_t early_reserved_amount  = reserved_total(early_malloc, early_vm);
   184   MemPointerArrayIteratorImpl cur_malloc_itr(pBL_cur->_malloc_cs);
   370   size_t early_committed_amount = committed_total(early_malloc, early_vm);
   185   MemPointerArrayIteratorImpl prev_malloc_itr(pBL_prev->_malloc_cs);
   371 
   186 
   372   // Adjust virtual memory total
   187   MallocCallsitePointer*      cur_malloc_callsite =
   373   if (flag == mtThread) {
   188                   (MallocCallsitePointer*)cur_malloc_itr.current();
   374     const VirtualMemory* early_thread_stack_usage =
   189   MallocCallsitePointer*      prev_malloc_callsite =
   375       _early_baseline.virtual_memory(mtThreadStack);
   190                   (MallocCallsitePointer*)prev_malloc_itr.current();
   376     const VirtualMemory* current_thread_stack_usage =
   191 
   377       _current_baseline.virtual_memory(mtThreadStack);
   192   while (cur_malloc_callsite != NULL || prev_malloc_callsite != NULL) {
   378 
   193     if (prev_malloc_callsite == NULL) {
   379     early_reserved_amount  += early_thread_stack_usage->reserved();
   194       assert(cur_malloc_callsite != NULL, "sanity check");
   380     early_committed_amount += early_thread_stack_usage->committed();
   195       // this is a new callsite
   381 
   196       _outputer.diff_malloc_callsite(cur_malloc_callsite->addr(),
   382     current_reserved_amount  += current_thread_stack_usage->reserved();
   197         amount_in_current_scale(cur_malloc_callsite->amount()),
   383     current_committed_amount += current_thread_stack_usage->committed();
   198         cur_malloc_callsite->count(),
   384   } else if (flag == mtNMT) {
   199         diff_in_current_scale(cur_malloc_callsite->amount(), 0),
   385     early_reserved_amount  += _early_baseline.malloc_tracking_overhead();
   200         diff(cur_malloc_callsite->count(), 0));
   386     early_committed_amount += _early_baseline.malloc_tracking_overhead();
   201       cur_malloc_callsite = (MallocCallsitePointer*)cur_malloc_itr.next();
   387 
   202     } else if (cur_malloc_callsite == NULL) {
   388     current_reserved_amount  += _current_baseline.malloc_tracking_overhead();
   203       assert(prev_malloc_callsite != NULL, "Sanity check");
   389     current_committed_amount += _current_baseline.malloc_tracking_overhead();
   204       // this callsite is already gone
   390   }
   205       _outputer.diff_malloc_callsite(prev_malloc_callsite->addr(),
   391 
   206         0, 0,
   392   if (amount_in_current_scale(current_reserved_amount) > 0 ||
   207         diff_in_current_scale(0, prev_malloc_callsite->amount()),
   393       diff_in_current_scale(current_reserved_amount, early_reserved_amount) != 0) {
   208         diff(0, prev_malloc_callsite->count()));
   394 
   209       prev_malloc_callsite = (MallocCallsitePointer*)prev_malloc_itr.next();
   395     // print summary line
       
   396     out->print("-%26s (", NMTUtil::flag_to_name(flag));
       
   397     print_virtual_memory_diff(current_reserved_amount, current_committed_amount,
       
   398       early_reserved_amount, early_committed_amount);
       
   399     out->print_cr(")");
       
   400 
       
   401     // detail lines
       
   402     if (flag == mtClass) {
       
   403       // report class count
       
   404       out->print("%27s (classes #" SIZE_FORMAT "", " ", _current_baseline.class_count());
       
   405       int class_count_diff = (int)(_current_baseline.class_count() -
       
   406         _early_baseline.class_count());
       
   407       if (_current_baseline.class_count() != _early_baseline.class_count()) {
       
   408         out->print(" %+d", (int)(_current_baseline.class_count() - _early_baseline.class_count()));
       
   409       }
       
   410       out->print_cr(")");
       
   411     } else if (flag == mtThread) {
       
   412       // report thread count
       
   413       out->print("%27s (thread #" SIZE_FORMAT "", " ", _current_baseline.thread_count());
       
   414       int thread_count_diff = (int)(_current_baseline.thread_count() -
       
   415           _early_baseline.thread_count());
       
   416       if (thread_count_diff != 0) {
       
   417         out->print(" %+d", thread_count_diff);
       
   418       }
       
   419       out->print_cr(")");
       
   420 
       
   421       // report thread stack
       
   422       const VirtualMemory* current_thread_stack =
       
   423           _current_baseline.virtual_memory(mtThreadStack);
       
   424       const VirtualMemory* early_thread_stack =
       
   425         _early_baseline.virtual_memory(mtThreadStack);
       
   426 
       
   427       out->print("%27s (stack: ", " ");
       
   428       print_virtual_memory_diff(current_thread_stack->reserved(), current_thread_stack->committed(),
       
   429         early_thread_stack->reserved(), early_thread_stack->committed());
       
   430       out->print_cr(")");
       
   431     }
       
   432 
       
   433     // Report malloc'd memory
       
   434     size_t current_malloc_amount = current_malloc->malloc_size();
       
   435     size_t early_malloc_amount   = early_malloc->malloc_size();
       
   436     if (amount_in_current_scale(current_malloc_amount) > 0 ||
       
   437         diff_in_current_scale(current_malloc_amount, early_malloc_amount) != 0) {
       
   438       out->print("%28s(", " ");
       
   439       print_malloc_diff(current_malloc_amount, (flag == mtChunk) ? 0 : current_malloc->malloc_count(),
       
   440         early_malloc_amount, early_malloc->malloc_count());
       
   441       out->print_cr(")");
       
   442     }
       
   443 
       
   444     // Report virtual memory
       
   445     if (amount_in_current_scale(current_vm->reserved()) > 0 ||
       
   446         diff_in_current_scale(current_vm->reserved(), early_vm->reserved()) != 0) {
       
   447       out->print("%27s (mmap: ", " ");
       
   448       print_virtual_memory_diff(current_vm->reserved(), current_vm->committed(),
       
   449         early_vm->reserved(), early_vm->committed());
       
   450       out->print_cr(")");
       
   451     }
       
   452 
       
   453     // Report arena memory
       
   454     if (amount_in_current_scale(current_malloc->arena_size()) > 0 ||
       
   455         diff_in_current_scale(current_malloc->arena_size(), early_malloc->arena_size()) != 0) {
       
   456       out->print("%28s(", " ");
       
   457       print_arena_diff(current_malloc->arena_size(), current_malloc->arena_count(),
       
   458         early_malloc->arena_size(), early_malloc->arena_count());
       
   459       out->print_cr(")");
       
   460     }
       
   461 
       
   462     // Report native memory tracking overhead
       
   463     if (flag == mtNMT) {
       
   464       size_t current_tracking_overhead = amount_in_current_scale(_current_baseline.malloc_tracking_overhead());
       
   465       size_t early_tracking_overhead   = amount_in_current_scale(_early_baseline.malloc_tracking_overhead());
       
   466 
       
   467       out->print("%27s (tracking overhead=" SIZE_FORMAT "%s", " ",
       
   468         amount_in_current_scale(_current_baseline.malloc_tracking_overhead()), scale);
       
   469 
       
   470       long overhead_diff = diff_in_current_scale(_current_baseline.malloc_tracking_overhead(),
       
   471            _early_baseline.malloc_tracking_overhead());
       
   472       if (overhead_diff != 0) {
       
   473         out->print(" %+ld%s", overhead_diff, scale);
       
   474       }
       
   475       out->print_cr(")");
       
   476     }
       
   477     out->print_cr(" ");
       
   478   }
       
   479 }
       
   480 
       
   481 void MemDetailDiffReporter::report_diff() {
       
   482   MemSummaryDiffReporter::report_diff();
       
   483   diff_malloc_sites();
       
   484   diff_virtual_memory_sites();
       
   485 }
       
   486 
       
   487 void MemDetailDiffReporter::diff_malloc_sites() const {
       
   488   MallocSiteIterator early_itr = _early_baseline.malloc_sites(MemBaseline::by_site);
       
   489   MallocSiteIterator current_itr = _current_baseline.malloc_sites(MemBaseline::by_site);
       
   490 
       
   491   const MallocSite* early_site   = early_itr.next();
       
   492   const MallocSite* current_site = current_itr.next();
       
   493 
       
   494   while (early_site != NULL || current_site != NULL) {
       
   495     if (early_site == NULL) {
       
   496       new_malloc_site(current_site);
       
   497       current_site = current_itr.next();
       
   498     } else if (current_site == NULL) {
       
   499       old_malloc_site(early_site);
       
   500       early_site = early_itr.next();
   210     } else {
   501     } else {
   211       assert(cur_malloc_callsite  != NULL,  "Sanity check");
   502       int compVal = current_site->call_stack()->compare(*early_site->call_stack());
   212       assert(prev_malloc_callsite != NULL,  "Sanity check");
   503       if (compVal < 0) {
   213       if (cur_malloc_callsite->addr() < prev_malloc_callsite->addr()) {
   504         new_malloc_site(current_site);
   214         // this is a new callsite
   505         current_site = current_itr.next();
   215         _outputer.diff_malloc_callsite(cur_malloc_callsite->addr(),
   506       } else if (compVal > 0) {
   216           amount_in_current_scale(cur_malloc_callsite->amount()),
   507         old_malloc_site(early_site);
   217           cur_malloc_callsite->count(),
   508         early_site = early_itr.next();
   218           diff_in_current_scale(cur_malloc_callsite->amount(), 0),
       
   219           diff(cur_malloc_callsite->count(), 0));
       
   220           cur_malloc_callsite = (MallocCallsitePointer*)cur_malloc_itr.next();
       
   221       } else if (cur_malloc_callsite->addr() > prev_malloc_callsite->addr()) {
       
   222         // this callsite is already gone
       
   223         _outputer.diff_malloc_callsite(prev_malloc_callsite->addr(),
       
   224           0, 0,
       
   225           diff_in_current_scale(0, prev_malloc_callsite->amount()),
       
   226           diff(0, prev_malloc_callsite->count()));
       
   227         prev_malloc_callsite = (MallocCallsitePointer*)prev_malloc_itr.next();
       
   228       } else {
   509       } else {
   229         // the same callsite
   510         diff_malloc_site(early_site, current_site);
   230         _outputer.diff_malloc_callsite(cur_malloc_callsite->addr(),
   511         early_site   = early_itr.next();
   231           amount_in_current_scale(cur_malloc_callsite->amount()),
   512         current_site = current_itr.next();
   232           cur_malloc_callsite->count(),
       
   233           diff_in_current_scale(cur_malloc_callsite->amount(), prev_malloc_callsite->amount()),
       
   234           diff(cur_malloc_callsite->count(), prev_malloc_callsite->count()));
       
   235         cur_malloc_callsite = (MallocCallsitePointer*)cur_malloc_itr.next();
       
   236         prev_malloc_callsite = (MallocCallsitePointer*)prev_malloc_itr.next();
       
   237       }
   513       }
   238     }
   514     }
   239   }
   515   }
   240 
   516 }
   241   // walk virtual memory callsite
   517 
   242   MemPointerArrayIteratorImpl cur_vm_itr(pBL_cur->_vm_cs);
   518 void MemDetailDiffReporter::diff_virtual_memory_sites() const {
   243   MemPointerArrayIteratorImpl prev_vm_itr(pBL_prev->_vm_cs);
   519   VirtualMemorySiteIterator early_itr = _early_baseline.virtual_memory_sites(MemBaseline::by_site);
   244   VMCallsitePointer*          cur_vm_callsite = (VMCallsitePointer*)cur_vm_itr.current();
   520   VirtualMemorySiteIterator current_itr = _current_baseline.virtual_memory_sites(MemBaseline::by_site);
   245   VMCallsitePointer*          prev_vm_callsite = (VMCallsitePointer*)prev_vm_itr.current();
   521 
   246   while (cur_vm_callsite != NULL || prev_vm_callsite != NULL) {
   522   const VirtualMemoryAllocationSite* early_site   = early_itr.next();
   247     if (prev_vm_callsite == NULL || cur_vm_callsite->addr() < prev_vm_callsite->addr()) {
   523   const VirtualMemoryAllocationSite* current_site = current_itr.next();
   248       // this is a new callsite
   524 
   249       _outputer.diff_virtual_memory_callsite(cur_vm_callsite->addr(),
   525   while (early_site != NULL || current_site != NULL) {
   250         amount_in_current_scale(cur_vm_callsite->reserved_amount()),
   526     if (early_site == NULL) {
   251         amount_in_current_scale(cur_vm_callsite->committed_amount()),
   527       new_virtual_memory_site(current_site);
   252         diff_in_current_scale(cur_vm_callsite->reserved_amount(), 0),
   528       current_site = current_itr.next();
   253         diff_in_current_scale(cur_vm_callsite->committed_amount(), 0));
   529     } else if (current_site == NULL) {
   254       cur_vm_callsite = (VMCallsitePointer*)cur_vm_itr.next();
   530       old_virtual_memory_site(early_site);
   255     } else if (cur_vm_callsite == NULL || cur_vm_callsite->addr() > prev_vm_callsite->addr()) {
   531       early_site = early_itr.next();
   256       // this callsite is already gone
   532     } else {
   257       _outputer.diff_virtual_memory_callsite(prev_vm_callsite->addr(),
   533       int compVal = current_site->call_stack()->compare(*early_site->call_stack());
   258         amount_in_current_scale(0),
   534       if (compVal < 0) {
   259         amount_in_current_scale(0),
   535         new_virtual_memory_site(current_site);
   260         diff_in_current_scale(0, prev_vm_callsite->reserved_amount()),
   536         current_site = current_itr.next();
   261         diff_in_current_scale(0, prev_vm_callsite->committed_amount()));
   537       } else if (compVal > 0) {
   262       prev_vm_callsite = (VMCallsitePointer*)prev_vm_itr.next();
   538         old_virtual_memory_site(early_site);
   263     } else { // the same callsite
   539         early_site = early_itr.next();
   264       _outputer.diff_virtual_memory_callsite(cur_vm_callsite->addr(),
   540       } else {
   265         amount_in_current_scale(cur_vm_callsite->reserved_amount()),
   541         diff_virtual_memory_site(early_site, current_site);
   266         amount_in_current_scale(cur_vm_callsite->committed_amount()),
   542         early_site   = early_itr.next();
   267         diff_in_current_scale(cur_vm_callsite->reserved_amount(), prev_vm_callsite->reserved_amount()),
   543         current_site = current_itr.next();
   268         diff_in_current_scale(cur_vm_callsite->committed_amount(), prev_vm_callsite->committed_amount()));
       
   269       cur_vm_callsite  = (VMCallsitePointer*)cur_vm_itr.next();
       
   270       prev_vm_callsite = (VMCallsitePointer*)prev_vm_itr.next();
       
   271     }
       
   272   }
       
   273 
       
   274   _outputer.done_callsite();
       
   275 }
       
   276 
       
   277 size_t BaselineReporter::amount_in_current_scale(size_t amt) const {
       
   278   return (size_t)(((float)amt/(float)_scale) + 0.5);
       
   279 }
       
   280 
       
   281 int BaselineReporter::diff_in_current_scale(size_t value1, size_t value2) const {
       
   282   return (int)(((float)value1 - (float)value2)/((float)_scale) + 0.5);
       
   283 }
       
   284 
       
   285 int BaselineReporter::diff(size_t value1, size_t value2) const {
       
   286   return ((int)value1 - (int)value2);
       
   287 }
       
   288 
       
   289 void BaselineTTYOutputer::start(size_t scale, bool report_diff) {
       
   290   _scale = scale;
       
   291   _output->print_cr(" ");
       
   292   _output->print_cr("Native Memory Tracking:");
       
   293   _output->print_cr(" ");
       
   294 }
       
   295 
       
   296 void BaselineTTYOutputer::done() {
       
   297 
       
   298 }
       
   299 
       
   300 void BaselineTTYOutputer::total_usage(size_t total_reserved, size_t total_committed) {
       
   301   const char* unit = memory_unit(_scale);
       
   302   _output->print_cr("Total:  reserved=%d%s,  committed=%d%s",
       
   303     total_reserved, unit, total_committed, unit);
       
   304 }
       
   305 
       
   306 void BaselineTTYOutputer::start_category_summary() {
       
   307   _output->print_cr(" ");
       
   308 }
       
   309 
       
   310 /**
       
   311  * report a summary of memory type
       
   312  */
       
   313 void BaselineTTYOutputer::category_summary(MEMFLAGS type,
       
   314   size_t reserved_amt, size_t committed_amt, size_t malloc_amt,
       
   315   size_t malloc_count, size_t arena_amt, size_t arena_count) {
       
   316 
       
   317   // we report mtThreadStack under mtThread category
       
   318   if (type == mtThreadStack) {
       
   319     assert(malloc_amt == 0 && malloc_count == 0 && arena_amt == 0,
       
   320       "Just check");
       
   321     _thread_stack_reserved = reserved_amt;
       
   322     _thread_stack_committed = committed_amt;
       
   323   } else {
       
   324     const char* unit = memory_unit(_scale);
       
   325     size_t total_reserved = (reserved_amt + malloc_amt + arena_amt);
       
   326     size_t total_committed = (committed_amt + malloc_amt + arena_amt);
       
   327     if (type == mtThread) {
       
   328       total_reserved += _thread_stack_reserved;
       
   329       total_committed += _thread_stack_committed;
       
   330     }
       
   331 
       
   332     if (total_reserved > 0) {
       
   333       _output->print_cr("-%26s (reserved=%d%s, committed=%d%s)",
       
   334         MemBaseline::type2name(type), total_reserved, unit,
       
   335         total_committed, unit);
       
   336 
       
   337       if (type == mtClass) {
       
   338         _output->print_cr("%27s (classes #%d)", " ", _num_of_classes);
       
   339       } else if (type == mtThread) {
       
   340         _output->print_cr("%27s (thread #%d)", " ", _num_of_threads);
       
   341         _output->print_cr("%27s (stack: reserved=%d%s, committed=%d%s)", " ",
       
   342           _thread_stack_reserved, unit, _thread_stack_committed, unit);
       
   343       }
   544       }
   344 
   545     }
   345       if (malloc_amt > 0) {
   546   }
   346         if (type != mtChunk) {
   547 }
   347           _output->print_cr("%27s (malloc=%d%s, #%d)", " ", malloc_amt, unit,
   548 
   348             malloc_count);
   549 
   349         } else {
   550 void MemDetailDiffReporter::new_malloc_site(const MallocSite* malloc_site) const {
   350           _output->print_cr("%27s (malloc=%d%s)", " ", malloc_amt, unit);
   551   diff_malloc_site(malloc_site->call_stack(), malloc_site->size(), malloc_site->count(),
   351         }
   552     0, 0);
   352       }
   553 }
   353 
   554 
   354       if (reserved_amt > 0) {
   555 void MemDetailDiffReporter::old_malloc_site(const MallocSite* malloc_site) const {
   355         _output->print_cr("%27s (mmap: reserved=%d%s, committed=%d%s)",
   556   diff_malloc_site(malloc_site->call_stack(), 0, 0, malloc_site->size(),
   356           " ", reserved_amt, unit, committed_amt, unit);
   557     malloc_site->count());
   357       }
   558 }
   358 
   559 
   359       if (arena_amt > 0) {
   560 void MemDetailDiffReporter::diff_malloc_site(const MallocSite* early,
   360         _output->print_cr("%27s (arena=%d%s, #%d)", " ", arena_amt, unit, arena_count);
   561   const MallocSite* current)  const {
   361       }
   562   diff_malloc_site(current->call_stack(), current->size(), current->count(),
   362 
   563     early->size(), early->count());
   363       _output->print_cr(" ");
   564 }
   364     }
   565 
   365   }
   566 void MemDetailDiffReporter::diff_malloc_site(const NativeCallStack* stack, size_t current_size,
   366 }
   567   size_t current_count, size_t early_size, size_t early_count) const {
   367 
   568   outputStream* out = output();
   368 void BaselineTTYOutputer::done_category_summary() {
   569 
   369   _output->print_cr(" ");
   570   assert(stack != NULL, "NULL stack");
   370 }
   571 
   371 
   572   if (diff_in_current_scale(current_size, early_size) == 0) {
   372 
       
   373 void BaselineTTYOutputer::start_virtual_memory_map() {
       
   374   _output->print_cr("Virtual memory map:");
       
   375 }
       
   376 
       
   377 void BaselineTTYOutputer::reserved_memory_region(MEMFLAGS type, address base, address end,
       
   378                                                  size_t size, address pc) {
       
   379   const char* unit = memory_unit(_scale);
       
   380   char buf[128];
       
   381   int  offset;
       
   382   _output->print_cr(" ");
       
   383   _output->print_cr("[" PTR_FORMAT " - " PTR_FORMAT "] reserved %d%s for %s", base, end, size, unit,
       
   384             MemBaseline::type2name(type));
       
   385   if (os::dll_address_to_function_name(pc, buf, sizeof(buf), &offset)) {
       
   386       _output->print_cr("\t\tfrom [%s+0x%x]", buf, offset);
       
   387   }
       
   388 }
       
   389 
       
   390 void BaselineTTYOutputer::committed_memory_region(address base, address end, size_t size, address pc) {
       
   391   const char* unit = memory_unit(_scale);
       
   392   char buf[128];
       
   393   int  offset;
       
   394   _output->print("\t[" PTR_FORMAT " - " PTR_FORMAT "] committed %d%s", base, end, size, unit);
       
   395   if (os::dll_address_to_function_name(pc, buf, sizeof(buf), &offset)) {
       
   396       _output->print_cr(" from [%s+0x%x]", buf, offset);
       
   397   }
       
   398 }
       
   399 
       
   400 void BaselineTTYOutputer::done_virtual_memory_map() {
       
   401   _output->print_cr(" ");
       
   402 }
       
   403 
       
   404 
       
   405 
       
   406 void BaselineTTYOutputer::start_callsite() {
       
   407   _output->print_cr("Details:");
       
   408   _output->print_cr(" ");
       
   409 }
       
   410 
       
   411 void BaselineTTYOutputer::done_callsite() {
       
   412   _output->print_cr(" ");
       
   413 }
       
   414 
       
   415 void BaselineTTYOutputer::malloc_callsite(address pc, size_t malloc_amt,
       
   416   size_t malloc_count) {
       
   417   if (malloc_amt > 0) {
       
   418     const char* unit = memory_unit(_scale);
       
   419     char buf[128];
       
   420     int  offset;
       
   421     if (pc == 0) {
       
   422       _output->print("[BOOTSTRAP]%18s", " ");
       
   423     } else if (os::dll_address_to_function_name(pc, buf, sizeof(buf), &offset)) {
       
   424       _output->print_cr("[" PTR_FORMAT "] %s+0x%x", pc, buf, offset);
       
   425       _output->print("%28s", " ");
       
   426     } else {
       
   427       _output->print("[" PTR_FORMAT "]%18s", pc, " ");
       
   428     }
       
   429 
       
   430     _output->print_cr("(malloc=%d%s #%d)", malloc_amt, unit, malloc_count);
       
   431     _output->print_cr(" ");
       
   432   }
       
   433 }
       
   434 
       
   435 void BaselineTTYOutputer::virtual_memory_callsite(address pc, size_t reserved_amt,
       
   436   size_t committed_amt) {
       
   437   if (reserved_amt > 0) {
       
   438     const char* unit = memory_unit(_scale);
       
   439     char buf[128];
       
   440     int  offset;
       
   441     if (pc == 0) {
       
   442       _output->print("[BOOTSTRAP]%18s", " ");
       
   443     } else if (os::dll_address_to_function_name(pc, buf, sizeof(buf), &offset)) {
       
   444       _output->print_cr("[" PTR_FORMAT "] %s+0x%x", pc, buf, offset);
       
   445       _output->print("%28s", " ");
       
   446     } else {
       
   447       _output->print("[" PTR_FORMAT "]%18s", pc, " ");
       
   448     }
       
   449 
       
   450     _output->print_cr("(mmap: reserved=%d%s, committed=%d%s)",
       
   451       reserved_amt, unit, committed_amt, unit);
       
   452     _output->print_cr(" ");
       
   453   }
       
   454 }
       
   455 
       
   456 void BaselineTTYOutputer::diff_total_usage(size_t total_reserved,
       
   457   size_t total_committed, int reserved_diff, int committed_diff) {
       
   458   const char* unit = memory_unit(_scale);
       
   459   _output->print_cr("Total:  reserved=%d%s  %+d%s, committed=%d%s %+d%s",
       
   460     total_reserved, unit, reserved_diff, unit, total_committed, unit,
       
   461     committed_diff, unit);
       
   462 }
       
   463 
       
   464 void BaselineTTYOutputer::diff_category_summary(MEMFLAGS type,
       
   465   size_t cur_reserved_amt, size_t cur_committed_amt,
       
   466   size_t cur_malloc_amt, size_t cur_malloc_count,
       
   467   size_t cur_arena_amt, size_t cur_arena_count,
       
   468   int reserved_diff, int committed_diff, int malloc_diff,
       
   469   int malloc_count_diff, int arena_diff, int arena_count_diff) {
       
   470 
       
   471   if (type == mtThreadStack) {
       
   472     assert(cur_malloc_amt == 0 && cur_malloc_count == 0 &&
       
   473       cur_arena_amt == 0, "Just check");
       
   474     _thread_stack_reserved = cur_reserved_amt;
       
   475     _thread_stack_committed = cur_committed_amt;
       
   476     _thread_stack_reserved_diff = reserved_diff;
       
   477     _thread_stack_committed_diff = committed_diff;
       
   478   } else {
       
   479     const char* unit = memory_unit(_scale);
       
   480     size_t total_reserved = (cur_reserved_amt + cur_malloc_amt + cur_arena_amt);
       
   481     // nothing to report in this category
       
   482     if (total_reserved == 0) {
       
   483       return;
   573       return;
   484     }
   574   }
   485     int    diff_reserved = (reserved_diff + malloc_diff + arena_diff);
   575 
   486 
   576   stack->print_on(out);
   487     // category summary
   577   out->print("%28s (", " ");
   488     _output->print("-%26s (reserved=%d%s", MemBaseline::type2name(type),
   578   print_malloc_diff(current_size, current_count,
   489       total_reserved, unit);
   579     early_size, early_count);
   490 
   580 
   491     if (diff_reserved != 0) {
   581   out->print_cr(")\n");
   492       _output->print(" %+d%s", diff_reserved, unit);
   582 }
   493     }
   583 
   494 
   584 
   495     size_t total_committed = cur_committed_amt + cur_malloc_amt + cur_arena_amt;
   585 void MemDetailDiffReporter::new_virtual_memory_site(const VirtualMemoryAllocationSite* site) const {
   496     _output->print(", committed=%d%s", total_committed, unit);
   586   diff_virtual_memory_site(site->call_stack(), site->reserved(), site->committed(), 0, 0);
   497 
   587 }
   498     int total_committed_diff = committed_diff + malloc_diff + arena_diff;
   588 
   499     if (total_committed_diff != 0) {
   589 void MemDetailDiffReporter::old_virtual_memory_site(const VirtualMemoryAllocationSite* site) const {
   500       _output->print(" %+d%s", total_committed_diff, unit);
   590   diff_virtual_memory_site(site->call_stack(), 0, 0, site->reserved(), site->committed());
   501     }
   591 }
   502 
   592 
   503     _output->print_cr(")");
   593 void MemDetailDiffReporter::diff_virtual_memory_site(const VirtualMemoryAllocationSite* early,
   504 
   594   const VirtualMemoryAllocationSite* current) const {
   505     // special cases
   595   diff_virtual_memory_site(current->call_stack(), current->reserved(), current->committed(),
   506     if (type == mtClass) {
   596     early->reserved(), early->committed());
   507       _output->print("%27s (classes #%d", " ", _num_of_classes);
   597 }
   508       if (_num_of_classes_diff != 0) {
   598 
   509         _output->print(" %+d", _num_of_classes_diff);
   599 void MemDetailDiffReporter::diff_virtual_memory_site(const NativeCallStack* stack, size_t current_reserved,
   510       }
   600   size_t current_committed, size_t early_reserved, size_t early_committed) const  {
   511       _output->print_cr(")");
   601   outputStream* out = output();
   512     } else if (type == mtThread) {
   602 
   513       // thread count
   603   // no change
   514       _output->print("%27s (thread #%d", " ", _num_of_threads);
   604   if (diff_in_current_scale(current_reserved, early_reserved) == 0 &&
   515       if (_num_of_threads_diff != 0) {
   605       diff_in_current_scale(current_committed, early_committed) == 0) {
   516         _output->print_cr(" %+d)", _num_of_threads_diff);
   606     return;
   517       } else {
   607   }
   518         _output->print_cr(")");
   608 
   519       }
   609   stack->print_on(out);
   520       _output->print("%27s (stack: reserved=%d%s", " ", _thread_stack_reserved, unit);
   610   out->print("%28s (mmap: ", " ");
   521       if (_thread_stack_reserved_diff != 0) {
   611   print_virtual_memory_diff(current_reserved, current_committed,
   522         _output->print(" %+d%s", _thread_stack_reserved_diff, unit);
   612     early_reserved, early_committed);
   523       }
   613 
   524 
   614   out->print_cr(")\n");
   525       _output->print(", committed=%d%s", _thread_stack_committed, unit);
   615  }
   526       if (_thread_stack_committed_diff != 0) {
   616 
   527         _output->print(" %+d%s",_thread_stack_committed_diff, unit);
       
   528       }
       
   529 
       
   530       _output->print_cr(")");
       
   531     }
       
   532 
       
   533     // malloc'd memory
       
   534     if (cur_malloc_amt > 0) {
       
   535       _output->print("%27s (malloc=%d%s", " ", cur_malloc_amt, unit);
       
   536       if (malloc_diff != 0) {
       
   537         _output->print(" %+d%s", malloc_diff, unit);
       
   538       }
       
   539       if (type != mtChunk) {
       
   540         _output->print(", #%d", cur_malloc_count);
       
   541         if (malloc_count_diff) {
       
   542           _output->print(" %+d", malloc_count_diff);
       
   543         }
       
   544       }
       
   545       _output->print_cr(")");
       
   546     }
       
   547 
       
   548     // mmap'd memory
       
   549     if (cur_reserved_amt > 0) {
       
   550       _output->print("%27s (mmap: reserved=%d%s", " ", cur_reserved_amt, unit);
       
   551       if (reserved_diff != 0) {
       
   552         _output->print(" %+d%s", reserved_diff, unit);
       
   553       }
       
   554 
       
   555       _output->print(", committed=%d%s", cur_committed_amt, unit);
       
   556       if (committed_diff != 0) {
       
   557         _output->print(" %+d%s", committed_diff, unit);
       
   558       }
       
   559       _output->print_cr(")");
       
   560     }
       
   561 
       
   562     // arena memory
       
   563     if (cur_arena_amt > 0) {
       
   564       _output->print("%27s (arena=%d%s", " ", cur_arena_amt, unit);
       
   565       if (arena_diff != 0) {
       
   566         _output->print(" %+d%s", arena_diff, unit);
       
   567       }
       
   568       _output->print(", #%d", cur_arena_count);
       
   569       if (arena_count_diff != 0) {
       
   570         _output->print(" %+d", arena_count_diff);
       
   571       }
       
   572       _output->print_cr(")");
       
   573     }
       
   574 
       
   575     _output->print_cr(" ");
       
   576   }
       
   577 }
       
   578 
       
   579 void BaselineTTYOutputer::diff_malloc_callsite(address pc,
       
   580     size_t cur_malloc_amt, size_t cur_malloc_count,
       
   581     int malloc_diff, int malloc_count_diff) {
       
   582   if (malloc_diff != 0) {
       
   583     const char* unit = memory_unit(_scale);
       
   584     char buf[128];
       
   585     int  offset;
       
   586     if (pc == 0) {
       
   587       _output->print_cr("[BOOTSTRAP]%18s", " ");
       
   588     } else {
       
   589       if (os::dll_address_to_function_name(pc, buf, sizeof(buf), &offset)) {
       
   590         _output->print_cr("[" PTR_FORMAT "] %s+0x%x", pc, buf, offset);
       
   591         _output->print("%28s", " ");
       
   592       } else {
       
   593         _output->print("[" PTR_FORMAT "]%18s", pc, " ");
       
   594       }
       
   595     }
       
   596 
       
   597     _output->print("(malloc=%d%s", cur_malloc_amt, unit);
       
   598     if (malloc_diff != 0) {
       
   599       _output->print(" %+d%s", malloc_diff, unit);
       
   600     }
       
   601     _output->print(", #%d", cur_malloc_count);
       
   602     if (malloc_count_diff != 0) {
       
   603       _output->print(" %+d", malloc_count_diff);
       
   604     }
       
   605     _output->print_cr(")");
       
   606     _output->print_cr(" ");
       
   607   }
       
   608 }
       
   609 
       
   610 void BaselineTTYOutputer::diff_virtual_memory_callsite(address pc,
       
   611     size_t cur_reserved_amt, size_t cur_committed_amt,
       
   612     int reserved_diff, int committed_diff) {
       
   613   if (reserved_diff != 0 || committed_diff != 0) {
       
   614     const char* unit = memory_unit(_scale);
       
   615     char buf[64];
       
   616     int  offset;
       
   617     if (pc == 0) {
       
   618       _output->print_cr("[BOOSTRAP]%18s", " ");
       
   619     } else {
       
   620       if (os::dll_address_to_function_name(pc, buf, sizeof(buf), &offset)) {
       
   621         _output->print_cr("[" PTR_FORMAT "] %s+0x%x", pc, buf, offset);
       
   622         _output->print("%28s", " ");
       
   623       } else {
       
   624         _output->print("[" PTR_FORMAT "]%18s", pc, " ");
       
   625       }
       
   626     }
       
   627 
       
   628     _output->print("(mmap: reserved=%d%s", cur_reserved_amt, unit);
       
   629     if (reserved_diff != 0) {
       
   630       _output->print(" %+d%s", reserved_diff, unit);
       
   631     }
       
   632     _output->print(", committed=%d%s", cur_committed_amt, unit);
       
   633     if (committed_diff != 0) {
       
   634       _output->print(" %+d%s", committed_diff, unit);
       
   635     }
       
   636     _output->print_cr(")");
       
   637     _output->print_cr(" ");
       
   638   }
       
   639 }