hotspot/src/share/vm/gc/parallel/mutableNUMASpace.cpp
changeset 46620 750c6edff33b
parent 46312 385a8b027e7d
child 46625 edefffab74e2
equal deleted inserted replaced
46619:a3919f5e8d2b 46620:750c6edff33b
   104               touched_words = MIN2((size_t)align_object_size(typeArrayOopDesc::header_size(T_INT)),
   104               touched_words = MIN2((size_t)align_object_size(typeArrayOopDesc::header_size(T_INT)),
   105                 touched_words);
   105                 touched_words);
   106             }
   106             }
   107 #endif
   107 #endif
   108             MemRegion invalid;
   108             MemRegion invalid;
   109             HeapWord *crossing_start = (HeapWord*)round_to(cur_top, os::vm_page_size());
   109             HeapWord *crossing_start = align_up((HeapWord*)cur_top, os::vm_page_size());
   110             HeapWord *crossing_end = (HeapWord*)round_to(cur_top + touched_words, os::vm_page_size());
   110             HeapWord *crossing_end = align_down((HeapWord*)(cur_top + touched_words), os::vm_page_size());
   111             if (crossing_start != crossing_end) {
   111             if (crossing_start != crossing_end) {
   112               // If object header crossed a small page boundary we mark the area
   112               // If object header crossed a small page boundary we mark the area
   113               // as invalid rounding it to a page_size().
   113               // as invalid rounding it to a page_size().
   114               HeapWord *start = MAX2((HeapWord*)round_down(cur_top, page_size()), s->bottom());
   114               HeapWord *start = MAX2(align_down((HeapWord*)cur_top, page_size()), s->bottom());
   115               HeapWord *end = MIN2((HeapWord*)round_to(cur_top + touched_words, page_size()), s->end());
   115               HeapWord *end = MIN2(align_up((HeapWord*)(cur_top + touched_words), page_size()), s->end());
   116               invalid = MemRegion(start, end);
   116               invalid = MemRegion(start, end);
   117             }
   117             }
   118 
   118 
   119             ls->add_invalid_region(invalid);
   119             ls->add_invalid_region(invalid);
   120           }
   120           }
   295   return false;
   295   return false;
   296 }
   296 }
   297 
   297 
   298 // Bias region towards the first-touching lgrp. Set the right page sizes.
   298 // Bias region towards the first-touching lgrp. Set the right page sizes.
   299 void MutableNUMASpace::bias_region(MemRegion mr, int lgrp_id) {
   299 void MutableNUMASpace::bias_region(MemRegion mr, int lgrp_id) {
   300   HeapWord *start = (HeapWord*)round_to((intptr_t)mr.start(), page_size());
   300   HeapWord *start = align_up(mr.start(), page_size());
   301   HeapWord *end = (HeapWord*)round_down((intptr_t)mr.end(), page_size());
   301   HeapWord *end = align_down(mr.end(), page_size());
   302   if (end > start) {
   302   if (end > start) {
   303     MemRegion aligned_region(start, end);
   303     MemRegion aligned_region(start, end);
   304     assert((intptr_t)aligned_region.start()     % page_size() == 0 &&
   304     assert((intptr_t)aligned_region.start()     % page_size() == 0 &&
   305            (intptr_t)aligned_region.byte_size() % page_size() == 0, "Bad alignment");
   305            (intptr_t)aligned_region.byte_size() % page_size() == 0, "Bad alignment");
   306     assert(region().contains(aligned_region), "Sanity");
   306     assert(region().contains(aligned_region), "Sanity");
   314   }
   314   }
   315 }
   315 }
   316 
   316 
   317 // Free all pages in the region.
   317 // Free all pages in the region.
   318 void MutableNUMASpace::free_region(MemRegion mr) {
   318 void MutableNUMASpace::free_region(MemRegion mr) {
   319   HeapWord *start = (HeapWord*)round_to((intptr_t)mr.start(), page_size());
   319   HeapWord *start = align_up(mr.start(), page_size());
   320   HeapWord *end = (HeapWord*)round_down((intptr_t)mr.end(), page_size());
   320   HeapWord *end = align_down(mr.end(), page_size());
   321   if (end > start) {
   321   if (end > start) {
   322     MemRegion aligned_region(start, end);
   322     MemRegion aligned_region(start, end);
   323     assert((intptr_t)aligned_region.start()     % page_size() == 0 &&
   323     assert((intptr_t)aligned_region.start()     % page_size() == 0 &&
   324            (intptr_t)aligned_region.byte_size() % page_size() == 0, "Bad alignment");
   324            (intptr_t)aligned_region.byte_size() % page_size() == 0, "Bad alignment");
   325     assert(region().contains(aligned_region), "Sanity");
   325     assert(region().contains(aligned_region), "Sanity");
   435 // This function is expected to be called on sequence of i's from 0 to
   435 // This function is expected to be called on sequence of i's from 0 to
   436 // lgrp_spaces()->length().
   436 // lgrp_spaces()->length().
   437 size_t MutableNUMASpace::adaptive_chunk_size(int i, size_t limit) {
   437 size_t MutableNUMASpace::adaptive_chunk_size(int i, size_t limit) {
   438   size_t pages_available = base_space_size();
   438   size_t pages_available = base_space_size();
   439   for (int j = 0; j < i; j++) {
   439   for (int j = 0; j < i; j++) {
   440     pages_available -= round_down(current_chunk_size(j), page_size()) / page_size();
   440     pages_available -= align_down(current_chunk_size(j), page_size()) / page_size();
   441   }
   441   }
   442   pages_available -= lgrp_spaces()->length() - i - 1;
   442   pages_available -= lgrp_spaces()->length() - i - 1;
   443   assert(pages_available > 0, "No pages left");
   443   assert(pages_available > 0, "No pages left");
   444   float alloc_rate = 0;
   444   float alloc_rate = 0;
   445   for (int j = i; j < lgrp_spaces()->length(); j++) {
   445   for (int j = i; j < lgrp_spaces()->length(); j++) {
   451     chunk_size = (size_t)(ls->alloc_rate()->average() / alloc_rate * pages_available) * page_size();
   451     chunk_size = (size_t)(ls->alloc_rate()->average() / alloc_rate * pages_available) * page_size();
   452   }
   452   }
   453   chunk_size = MAX2(chunk_size, page_size());
   453   chunk_size = MAX2(chunk_size, page_size());
   454 
   454 
   455   if (limit > 0) {
   455   if (limit > 0) {
   456     limit = round_down(limit, page_size());
   456     limit = align_down(limit, page_size());
   457     if (chunk_size > current_chunk_size(i)) {
   457     if (chunk_size > current_chunk_size(i)) {
   458       size_t upper_bound = pages_available * page_size();
   458       size_t upper_bound = pages_available * page_size();
   459       if (upper_bound > limit &&
   459       if (upper_bound > limit &&
   460           current_chunk_size(i) < upper_bound - limit) {
   460           current_chunk_size(i) < upper_bound - limit) {
   461         // The resulting upper bound should not exceed the available
   461         // The resulting upper bound should not exceed the available
   483                                     MemRegion* bottom_region, MemRegion *top_region) {
   483                                     MemRegion* bottom_region, MemRegion *top_region) {
   484   // Is there bottom?
   484   // Is there bottom?
   485   if (new_region.start() < intersection.start()) { // Yes
   485   if (new_region.start() < intersection.start()) { // Yes
   486     // Try to coalesce small pages into a large one.
   486     // Try to coalesce small pages into a large one.
   487     if (UseLargePages && page_size() >= alignment()) {
   487     if (UseLargePages && page_size() >= alignment()) {
   488       HeapWord* p = (HeapWord*)round_to((intptr_t) intersection.start(), alignment());
   488       HeapWord* p = align_up(intersection.start(), alignment());
   489       if (new_region.contains(p)
   489       if (new_region.contains(p)
   490           && pointer_delta(p, new_region.start(), sizeof(char)) >= alignment()) {
   490           && pointer_delta(p, new_region.start(), sizeof(char)) >= alignment()) {
   491         if (intersection.contains(p)) {
   491         if (intersection.contains(p)) {
   492           intersection = MemRegion(p, intersection.end());
   492           intersection = MemRegion(p, intersection.end());
   493         } else {
   493         } else {
   502 
   502 
   503   // Is there top?
   503   // Is there top?
   504   if (intersection.end() < new_region.end()) { // Yes
   504   if (intersection.end() < new_region.end()) { // Yes
   505     // Try to coalesce small pages into a large one.
   505     // Try to coalesce small pages into a large one.
   506     if (UseLargePages && page_size() >= alignment()) {
   506     if (UseLargePages && page_size() >= alignment()) {
   507       HeapWord* p = (HeapWord*)round_down((intptr_t) intersection.end(), alignment());
   507       HeapWord* p = align_down(intersection.end(), alignment());
   508       if (new_region.contains(p)
   508       if (new_region.contains(p)
   509           && pointer_delta(new_region.end(), p, sizeof(char)) >= alignment()) {
   509           && pointer_delta(new_region.end(), p, sizeof(char)) >= alignment()) {
   510         if (intersection.contains(p)) {
   510         if (intersection.contains(p)) {
   511           intersection = MemRegion(intersection.start(), p);
   511           intersection = MemRegion(intersection.start(), p);
   512         } else {
   512         } else {
   544         if (intersection->contains(invalid_region)) {
   544         if (intersection->contains(invalid_region)) {
   545             // That's the only case we have to make an additional bias_region() call.
   545             // That's the only case we have to make an additional bias_region() call.
   546             HeapWord* start = invalid_region->start();
   546             HeapWord* start = invalid_region->start();
   547             HeapWord* end = invalid_region->end();
   547             HeapWord* end = invalid_region->end();
   548             if (UseLargePages && page_size() >= alignment()) {
   548             if (UseLargePages && page_size() >= alignment()) {
   549               HeapWord *p = (HeapWord*)round_down((intptr_t) start, alignment());
   549               HeapWord *p = align_down(start, alignment());
   550               if (new_region.contains(p)) {
   550               if (new_region.contains(p)) {
   551                 start = p;
   551                 start = p;
   552               }
   552               }
   553               p = (HeapWord*)round_to((intptr_t) end, alignment());
   553               p = align_up(end, alignment());
   554               if (new_region.contains(end)) {
   554               if (new_region.contains(end)) {
   555                 end = p;
   555                 end = p;
   556               }
   556               }
   557             }
   557             }
   558             if (intersection->start() > start) {
   558             if (intersection->start() > start) {
   579   clear(SpaceDecorator::DontMangle);
   579   clear(SpaceDecorator::DontMangle);
   580 
   580 
   581   // Compute chunk sizes
   581   // Compute chunk sizes
   582   size_t prev_page_size = page_size();
   582   size_t prev_page_size = page_size();
   583   set_page_size(UseLargePages ? alignment() : os::vm_page_size());
   583   set_page_size(UseLargePages ? alignment() : os::vm_page_size());
   584   HeapWord* rounded_bottom = (HeapWord*)round_to((intptr_t) bottom(), page_size());
   584   HeapWord* rounded_bottom = align_up(bottom(), page_size());
   585   HeapWord* rounded_end = (HeapWord*)round_down((intptr_t) end(), page_size());
   585   HeapWord* rounded_end = align_down(end(), page_size());
   586   size_t base_space_size_pages = pointer_delta(rounded_end, rounded_bottom, sizeof(char)) / page_size();
   586   size_t base_space_size_pages = pointer_delta(rounded_end, rounded_bottom, sizeof(char)) / page_size();
   587 
   587 
   588   // Try small pages if the chunk size is too small
   588   // Try small pages if the chunk size is too small
   589   if (base_space_size_pages / lgrp_spaces()->length() == 0
   589   if (base_space_size_pages / lgrp_spaces()->length() == 0
   590       && page_size() > (size_t)os::vm_page_size()) {
   590       && page_size() > (size_t)os::vm_page_size()) {
   591     // Changing the page size below can lead to freeing of memory. So we fail initialization.
   591     // Changing the page size below can lead to freeing of memory. So we fail initialization.
   592     if (_must_use_large_pages) {
   592     if (_must_use_large_pages) {
   593       vm_exit_during_initialization("Failed initializing NUMA with large pages. Too small heap size");
   593       vm_exit_during_initialization("Failed initializing NUMA with large pages. Too small heap size");
   594     }
   594     }
   595     set_page_size(os::vm_page_size());
   595     set_page_size(os::vm_page_size());
   596     rounded_bottom = (HeapWord*)round_to((intptr_t) bottom(), page_size());
   596     rounded_bottom = align_up(bottom(), page_size());
   597     rounded_end = (HeapWord*)round_down((intptr_t) end(), page_size());
   597     rounded_end = align_down(end(), page_size());
   598     base_space_size_pages = pointer_delta(rounded_end, rounded_bottom, sizeof(char)) / page_size();
   598     base_space_size_pages = pointer_delta(rounded_end, rounded_bottom, sizeof(char)) / page_size();
   599   }
   599   }
   600   guarantee(base_space_size_pages / lgrp_spaces()->length() > 0, "Space too small");
   600   guarantee(base_space_size_pages / lgrp_spaces()->length() > 0, "Space too small");
   601   set_base_space_size(base_space_size_pages);
   601   set_base_space_size(base_space_size_pages);
   602 
   602 
   723 void MutableNUMASpace::set_top(HeapWord* value) {
   723 void MutableNUMASpace::set_top(HeapWord* value) {
   724   bool found_top = false;
   724   bool found_top = false;
   725   for (int i = 0; i < lgrp_spaces()->length();) {
   725   for (int i = 0; i < lgrp_spaces()->length();) {
   726     LGRPSpace *ls = lgrp_spaces()->at(i);
   726     LGRPSpace *ls = lgrp_spaces()->at(i);
   727     MutableSpace *s = ls->space();
   727     MutableSpace *s = ls->space();
   728     HeapWord *top = MAX2((HeapWord*)round_down((intptr_t)s->top(), page_size()), s->bottom());
   728     HeapWord *top = MAX2(align_down(s->top(), page_size()), s->bottom());
   729 
   729 
   730     if (s->contains(value)) {
   730     if (s->contains(value)) {
   731       // Check if setting the chunk's top to a given value would create a hole less than
   731       // Check if setting the chunk's top to a given value would create a hole less than
   732       // a minimal object; assuming that's not the last chunk in which case we don't care.
   732       // a minimal object; assuming that's not the last chunk in which case we don't care.
   733       if (i < lgrp_spaces()->length() - 1) {
   733       if (i < lgrp_spaces()->length() - 1) {
   924 }
   924 }
   925 
   925 
   926 // Scan pages and gather stats about page placement and size.
   926 // Scan pages and gather stats about page placement and size.
   927 void MutableNUMASpace::LGRPSpace::accumulate_statistics(size_t page_size) {
   927 void MutableNUMASpace::LGRPSpace::accumulate_statistics(size_t page_size) {
   928   clear_space_stats();
   928   clear_space_stats();
   929   char *start = (char*)round_to((intptr_t) space()->bottom(), page_size);
   929   char *start = (char*)align_up(space()->bottom(), page_size);
   930   char* end = (char*)round_down((intptr_t) space()->end(), page_size);
   930   char* end = (char*)align_down(space()->end(), page_size);
   931   if (start < end) {
   931   if (start < end) {
   932     for (char *p = start; p < end;) {
   932     for (char *p = start; p < end;) {
   933       os::page_info info;
   933       os::page_info info;
   934       if (os::get_page_info(p, &info)) {
   934       if (os::get_page_info(p, &info)) {
   935         if (info.size > 0) {
   935         if (info.size > 0) {
   961 // Scan page_count pages and verify if they have the right size and right placement.
   961 // Scan page_count pages and verify if they have the right size and right placement.
   962 // If invalid pages are found they are freed in hope that subsequent reallocation
   962 // If invalid pages are found they are freed in hope that subsequent reallocation
   963 // will be more successful.
   963 // will be more successful.
   964 void MutableNUMASpace::LGRPSpace::scan_pages(size_t page_size, size_t page_count)
   964 void MutableNUMASpace::LGRPSpace::scan_pages(size_t page_size, size_t page_count)
   965 {
   965 {
   966   char* range_start = (char*)round_to((intptr_t) space()->bottom(), page_size);
   966   char* range_start = (char*)align_up(space()->bottom(), page_size);
   967   char* range_end = (char*)round_down((intptr_t) space()->end(), page_size);
   967   char* range_end = (char*)align_down(space()->end(), page_size);
   968 
   968 
   969   if (range_start > last_page_scanned() || last_page_scanned() >= range_end) {
   969   if (range_start > last_page_scanned() || last_page_scanned() >= range_end) {
   970     set_last_page_scanned(range_start);
   970     set_last_page_scanned(range_start);
   971   }
   971   }
   972 
   972