hotspot/src/share/vm/memory/universe.cpp
changeset 22828 17ecb098bc1e
parent 22827 07d991d45a51
parent 19546 f6b7c9e96ea3
child 22838 82c7497fbad4
equal deleted inserted replaced
22827:07d991d45a51 22828:17ecb098bc1e
   103 oop Universe::_system_thread_group                    = NULL;
   103 oop Universe::_system_thread_group                    = NULL;
   104 objArrayOop Universe::_the_empty_class_klass_array    = NULL;
   104 objArrayOop Universe::_the_empty_class_klass_array    = NULL;
   105 Array<Klass*>* Universe::_the_array_interfaces_array = NULL;
   105 Array<Klass*>* Universe::_the_array_interfaces_array = NULL;
   106 oop Universe::_the_null_string                        = NULL;
   106 oop Universe::_the_null_string                        = NULL;
   107 oop Universe::_the_min_jint_string                   = NULL;
   107 oop Universe::_the_min_jint_string                   = NULL;
   108 LatestMethodOopCache* Universe::_finalizer_register_cache = NULL;
   108 LatestMethodCache* Universe::_finalizer_register_cache = NULL;
   109 LatestMethodOopCache* Universe::_loader_addClass_cache    = NULL;
   109 LatestMethodCache* Universe::_loader_addClass_cache    = NULL;
   110 LatestMethodOopCache* Universe::_pd_implies_cache         = NULL;
   110 LatestMethodCache* Universe::_pd_implies_cache         = NULL;
   111 ActiveMethodOopsCache* Universe::_reflect_invoke_cache    = NULL;
       
   112 oop Universe::_out_of_memory_error_java_heap          = NULL;
   111 oop Universe::_out_of_memory_error_java_heap          = NULL;
   113 oop Universe::_out_of_memory_error_metaspace          = NULL;
   112 oop Universe::_out_of_memory_error_metaspace          = NULL;
   114 oop Universe::_out_of_memory_error_class_metaspace    = NULL;
   113 oop Universe::_out_of_memory_error_class_metaspace    = NULL;
   115 oop Universe::_out_of_memory_error_array_size         = NULL;
   114 oop Universe::_out_of_memory_error_array_size         = NULL;
   116 oop Universe::_out_of_memory_error_gc_overhead_limit  = NULL;
   115 oop Universe::_out_of_memory_error_gc_overhead_limit  = NULL;
   144 
   143 
   145 NarrowPtrStruct Universe::_narrow_oop = { NULL, 0, true };
   144 NarrowPtrStruct Universe::_narrow_oop = { NULL, 0, true };
   146 NarrowPtrStruct Universe::_narrow_klass = { NULL, 0, true };
   145 NarrowPtrStruct Universe::_narrow_klass = { NULL, 0, true };
   147 address Universe::_narrow_ptrs_base;
   146 address Universe::_narrow_ptrs_base;
   148 
   147 
   149 size_t          Universe::_class_metaspace_size;
       
   150 
       
   151 void Universe::basic_type_classes_do(void f(Klass*)) {
   148 void Universe::basic_type_classes_do(void f(Klass*)) {
   152   f(boolArrayKlassObj());
   149   f(boolArrayKlassObj());
   153   f(byteArrayKlassObj());
   150   f(byteArrayKlassObj());
   154   f(charArrayKlassObj());
   151   f(charArrayKlassObj());
   155   f(intArrayKlassObj());
   152   f(intArrayKlassObj());
   223   f->do_ptr((void**)&_the_empty_short_array);
   220   f->do_ptr((void**)&_the_empty_short_array);
   224   f->do_ptr((void**)&_the_empty_method_array);
   221   f->do_ptr((void**)&_the_empty_method_array);
   225   f->do_ptr((void**)&_the_empty_klass_array);
   222   f->do_ptr((void**)&_the_empty_klass_array);
   226   _finalizer_register_cache->serialize(f);
   223   _finalizer_register_cache->serialize(f);
   227   _loader_addClass_cache->serialize(f);
   224   _loader_addClass_cache->serialize(f);
   228   _reflect_invoke_cache->serialize(f);
       
   229   _pd_implies_cache->serialize(f);
   225   _pd_implies_cache->serialize(f);
   230 }
   226 }
   231 
   227 
   232 void Universe::check_alignment(uintx size, uintx alignment, const char* name) {
   228 void Universe::check_alignment(uintx size, uintx alignment, const char* name) {
   233   if (size < alignment || size % alignment != 0) {
   229   if (size < alignment || size % alignment != 0) {
   641   jint status = Universe::initialize_heap();
   637   jint status = Universe::initialize_heap();
   642   if (status != JNI_OK) {
   638   if (status != JNI_OK) {
   643     return status;
   639     return status;
   644   }
   640   }
   645 
   641 
       
   642   Metaspace::global_initialize();
       
   643 
   646   // Create memory for metadata.  Must be after initializing heap for
   644   // Create memory for metadata.  Must be after initializing heap for
   647   // DumpSharedSpaces.
   645   // DumpSharedSpaces.
   648   ClassLoaderData::init_null_class_loader_data();
   646   ClassLoaderData::init_null_class_loader_data();
   649 
   647 
   650   // We have a heap so create the Method* caches before
   648   // We have a heap so create the Method* caches before
   651   // Metaspace::initialize_shared_spaces() tries to populate them.
   649   // Metaspace::initialize_shared_spaces() tries to populate them.
   652   Universe::_finalizer_register_cache = new LatestMethodOopCache();
   650   Universe::_finalizer_register_cache = new LatestMethodCache();
   653   Universe::_loader_addClass_cache    = new LatestMethodOopCache();
   651   Universe::_loader_addClass_cache    = new LatestMethodCache();
   654   Universe::_pd_implies_cache         = new LatestMethodOopCache();
   652   Universe::_pd_implies_cache         = new LatestMethodCache();
   655   Universe::_reflect_invoke_cache     = new ActiveMethodOopsCache();
       
   656 
   653 
   657   if (UseSharedSpaces) {
   654   if (UseSharedSpaces) {
   658     // Read the data structures supporting the shared spaces (shared
   655     // Read the data structures supporting the shared spaces (shared
   659     // system dictionary, symbol table, etc.).  After that, access to
   656     // system dictionary, symbol table, etc.).  After that, access to
   660     // the file (other than the mapped regions) is no longer needed, and
   657     // the file (other than the mapped regions) is no longer needed, and
   682 // 4Gb
   679 // 4Gb
   683 static const uint64_t NarrowOopHeapMax = (uint64_t(max_juint) + 1);
   680 static const uint64_t NarrowOopHeapMax = (uint64_t(max_juint) + 1);
   684 // 32Gb
   681 // 32Gb
   685 // OopEncodingHeapMax == NarrowOopHeapMax << LogMinObjAlignmentInBytes;
   682 // OopEncodingHeapMax == NarrowOopHeapMax << LogMinObjAlignmentInBytes;
   686 
   683 
   687 char* Universe::preferred_heap_base(size_t heap_size, NARROW_OOP_MODE mode) {
   684 char* Universe::preferred_heap_base(size_t heap_size, size_t alignment, NARROW_OOP_MODE mode) {
       
   685   assert(is_size_aligned((size_t)OopEncodingHeapMax, alignment), "Must be");
       
   686   assert(is_size_aligned((size_t)NarrowOopHeapMax, alignment), "Must be");
       
   687   assert(is_size_aligned(heap_size, alignment), "Must be");
       
   688 
       
   689   uintx heap_base_min_address_aligned = align_size_up(HeapBaseMinAddress, alignment);
       
   690 
   688   size_t base = 0;
   691   size_t base = 0;
   689 #ifdef _LP64
   692 #ifdef _LP64
   690   if (UseCompressedOops) {
   693   if (UseCompressedOops) {
   691     assert(mode == UnscaledNarrowOop  ||
   694     assert(mode == UnscaledNarrowOop  ||
   692            mode == ZeroBasedNarrowOop ||
   695            mode == ZeroBasedNarrowOop ||
   693            mode == HeapBasedNarrowOop, "mode is invalid");
   696            mode == HeapBasedNarrowOop, "mode is invalid");
   694     const size_t total_size = heap_size + HeapBaseMinAddress;
   697     const size_t total_size = heap_size + heap_base_min_address_aligned;
   695     // Return specified base for the first request.
   698     // Return specified base for the first request.
   696     if (!FLAG_IS_DEFAULT(HeapBaseMinAddress) && (mode == UnscaledNarrowOop)) {
   699     if (!FLAG_IS_DEFAULT(HeapBaseMinAddress) && (mode == UnscaledNarrowOop)) {
   697       base = HeapBaseMinAddress;
   700       base = heap_base_min_address_aligned;
   698 
   701 
   699     // If the total size and the metaspace size are small enough to allow
   702     // If the total size is small enough to allow UnscaledNarrowOop then
   700     // UnscaledNarrowOop then just use UnscaledNarrowOop.
   703     // just use UnscaledNarrowOop.
   701     } else if ((total_size <= OopEncodingHeapMax) && (mode != HeapBasedNarrowOop) &&
   704     } else if ((total_size <= OopEncodingHeapMax) && (mode != HeapBasedNarrowOop)) {
   702         (!UseCompressedKlassPointers ||
       
   703           (((OopEncodingHeapMax - heap_size) + Universe::class_metaspace_size()) <= KlassEncodingMetaspaceMax))) {
       
   704       // We don't need to check the metaspace size here because it is always smaller
       
   705       // than total_size.
       
   706       if ((total_size <= NarrowOopHeapMax) && (mode == UnscaledNarrowOop) &&
   705       if ((total_size <= NarrowOopHeapMax) && (mode == UnscaledNarrowOop) &&
   707           (Universe::narrow_oop_shift() == 0)) {
   706           (Universe::narrow_oop_shift() == 0)) {
   708         // Use 32-bits oops without encoding and
   707         // Use 32-bits oops without encoding and
   709         // place heap's top on the 4Gb boundary
   708         // place heap's top on the 4Gb boundary
   710         base = (NarrowOopHeapMax - heap_size);
   709         base = (NarrowOopHeapMax - heap_size);
   717           // place heap's top on the 32Gb boundary in case
   716           // place heap's top on the 32Gb boundary in case
   718           // total_size > 4Gb or failed to reserve below 4Gb.
   717           // total_size > 4Gb or failed to reserve below 4Gb.
   719           base = (OopEncodingHeapMax - heap_size);
   718           base = (OopEncodingHeapMax - heap_size);
   720         }
   719         }
   721       }
   720       }
   722 
       
   723     // See if ZeroBaseNarrowOop encoding will work for a heap based at
       
   724     // (KlassEncodingMetaspaceMax - class_metaspace_size()).
       
   725     } else if (UseCompressedKlassPointers && (mode != HeapBasedNarrowOop) &&
       
   726         (Universe::class_metaspace_size() + HeapBaseMinAddress <= KlassEncodingMetaspaceMax) &&
       
   727         (KlassEncodingMetaspaceMax + heap_size - Universe::class_metaspace_size() <= OopEncodingHeapMax)) {
       
   728       base = (KlassEncodingMetaspaceMax - Universe::class_metaspace_size());
       
   729     } else {
   721     } else {
   730       // UnscaledNarrowOop encoding didn't work, and no base was found for ZeroBasedOops or
   722       // UnscaledNarrowOop encoding didn't work, and no base was found for ZeroBasedOops or
   731       // HeapBasedNarrowOop encoding was requested.  So, can't reserve below 32Gb.
   723       // HeapBasedNarrowOop encoding was requested.  So, can't reserve below 32Gb.
   732       Universe::set_narrow_oop_shift(LogMinObjAlignmentInBytes);
   724       Universe::set_narrow_oop_shift(LogMinObjAlignmentInBytes);
   733     }
   725     }
   734 
   726 
   735     // Set narrow_oop_base and narrow_oop_use_implicit_null_checks
   727     // Set narrow_oop_base and narrow_oop_use_implicit_null_checks
   736     // used in ReservedHeapSpace() constructors.
   728     // used in ReservedHeapSpace() constructors.
   737     // The final values will be set in initialize_heap() below.
   729     // The final values will be set in initialize_heap() below.
   738     if ((base != 0) && ((base + heap_size) <= OopEncodingHeapMax) &&
   730     if ((base != 0) && ((base + heap_size) <= OopEncodingHeapMax)) {
   739         (!UseCompressedKlassPointers || (base + Universe::class_metaspace_size()) <= KlassEncodingMetaspaceMax)) {
       
   740       // Use zero based compressed oops
   731       // Use zero based compressed oops
   741       Universe::set_narrow_oop_base(NULL);
   732       Universe::set_narrow_oop_base(NULL);
   742       // Don't need guard page for implicit checks in indexed
   733       // Don't need guard page for implicit checks in indexed
   743       // addressing mode with zero based Compressed Oops.
   734       // addressing mode with zero based Compressed Oops.
   744       Universe::set_narrow_oop_use_implicit_null_checks(true);
   735       Universe::set_narrow_oop_use_implicit_null_checks(true);
   755       }
   746       }
   756 #endif //  _WIN64
   747 #endif //  _WIN64
   757     }
   748     }
   758   }
   749   }
   759 #endif
   750 #endif
       
   751 
       
   752   assert(is_ptr_aligned((char*)base, alignment), "Must be");
   760   return (char*)base; // also return NULL (don't care) for 32-bit VM
   753   return (char*)base; // also return NULL (don't care) for 32-bit VM
   761 }
   754 }
   762 
   755 
   763 jint Universe::initialize_heap() {
   756 jint Universe::initialize_heap() {
   764 
   757 
   817     if (verbose) {
   810     if (verbose) {
   818       tty->cr();
   811       tty->cr();
   819       tty->print("heap address: " PTR_FORMAT ", size: " SIZE_FORMAT " MB",
   812       tty->print("heap address: " PTR_FORMAT ", size: " SIZE_FORMAT " MB",
   820                  Universe::heap()->base(), Universe::heap()->reserved_region().byte_size()/M);
   813                  Universe::heap()->base(), Universe::heap()->reserved_region().byte_size()/M);
   821     }
   814     }
   822     if (((uint64_t)Universe::heap()->reserved_region().end() > OopEncodingHeapMax) ||
   815     if (((uint64_t)Universe::heap()->reserved_region().end() > OopEncodingHeapMax)) {
   823         (UseCompressedKlassPointers &&
       
   824         ((uint64_t)Universe::heap()->base() + Universe::class_metaspace_size() > KlassEncodingMetaspaceMax))) {
       
   825       // Can't reserve heap below 32Gb.
   816       // Can't reserve heap below 32Gb.
   826       // keep the Universe::narrow_oop_base() set in Universe::reserve_heap()
   817       // keep the Universe::narrow_oop_base() set in Universe::reserve_heap()
   827       Universe::set_narrow_oop_shift(LogMinObjAlignmentInBytes);
   818       Universe::set_narrow_oop_shift(LogMinObjAlignmentInBytes);
   828 #ifdef AIX
   819 #ifdef AIX
   829       // There is no protected page before the heap. This assures all oops
   820       // There is no protected page before the heap. This assures all oops
   855         if (verbose) {
   846         if (verbose) {
   856           tty->print(", %s", narrow_oop_mode_to_string(UnscaledNarrowOop));
   847           tty->print(", %s", narrow_oop_mode_to_string(UnscaledNarrowOop));
   857         }
   848         }
   858       }
   849       }
   859     }
   850     }
       
   851 
   860     if (verbose) {
   852     if (verbose) {
   861       tty->cr();
   853       tty->cr();
   862       tty->cr();
   854       tty->cr();
   863     }
   855     }
   864     if (UseCompressedKlassPointers) {
       
   865       Universe::set_narrow_klass_base(Universe::narrow_oop_base());
       
   866       Universe::set_narrow_klass_shift(MIN2(Universe::narrow_oop_shift(), LogKlassAlignmentInBytes));
       
   867     }
       
   868     Universe::set_narrow_ptrs_base(Universe::narrow_oop_base());
   856     Universe::set_narrow_ptrs_base(Universe::narrow_oop_base());
   869   }
   857   }
   870   // Universe::narrow_oop_base() is one page below the metaspace
   858   // Universe::narrow_oop_base() is one page below the heap.
   871   // base. The actual metaspace base depends on alignment constraints
   859   assert((intptr_t)Universe::narrow_oop_base() <= (intptr_t)(Universe::heap()->base() -
   872   // so we don't know its exact location here.
   860          os::vm_page_size()) ||
   873   assert((intptr_t)Universe::narrow_oop_base() <= (intptr_t)(Universe::heap()->base() - os::vm_page_size() - ClassMetaspaceSize) ||
       
   874          Universe::narrow_oop_base() == NULL, "invalid value");
   861          Universe::narrow_oop_base() == NULL, "invalid value");
   875   assert(Universe::narrow_oop_shift() == LogMinObjAlignmentInBytes ||
   862   assert(Universe::narrow_oop_shift() == LogMinObjAlignmentInBytes ||
   876          Universe::narrow_oop_shift() == 0, "invalid value");
   863          Universe::narrow_oop_shift() == 0, "invalid value");
   877 #endif
   864 #endif
   878 
   865 
   888 }
   875 }
   889 
   876 
   890 
   877 
   891 // Reserve the Java heap, which is now the same for all GCs.
   878 // Reserve the Java heap, which is now the same for all GCs.
   892 ReservedSpace Universe::reserve_heap(size_t heap_size, size_t alignment) {
   879 ReservedSpace Universe::reserve_heap(size_t heap_size, size_t alignment) {
   893   // Add in the class metaspace area so the classes in the headers can
   880   size_t total_reserved = align_size_up(heap_size, alignment);
   894   // be compressed the same as instances.
       
   895   // Need to round class space size up because it's below the heap and
       
   896   // the actual alignment depends on its size.
       
   897   Universe::set_class_metaspace_size(align_size_up(ClassMetaspaceSize, alignment));
       
   898   size_t total_reserved = align_size_up(heap_size + Universe::class_metaspace_size(), alignment);
       
   899   assert(!UseCompressedOops || (total_reserved <= (OopEncodingHeapMax - os::vm_page_size())),
   881   assert(!UseCompressedOops || (total_reserved <= (OopEncodingHeapMax - os::vm_page_size())),
   900       "heap size is too big for compressed oops");
   882       "heap size is too big for compressed oops");
   901   char* addr = Universe::preferred_heap_base(total_reserved, Universe::UnscaledNarrowOop);
   883 
   902 
   884   bool use_large_pages = UseLargePages && is_size_aligned(alignment, os::large_page_size());
   903   ReservedHeapSpace total_rs(total_reserved, alignment, UseLargePages, addr);
   885   assert(!UseLargePages
       
   886       || UseParallelOldGC
       
   887       || use_large_pages, "Wrong alignment to use large pages");
       
   888 
       
   889   char* addr = Universe::preferred_heap_base(total_reserved, alignment, Universe::UnscaledNarrowOop);
       
   890 
       
   891   ReservedHeapSpace total_rs(total_reserved, alignment, use_large_pages, addr);
   904 
   892 
   905   if (UseCompressedOops) {
   893   if (UseCompressedOops) {
   906     if (addr != NULL && !total_rs.is_reserved()) {
   894     if (addr != NULL && !total_rs.is_reserved()) {
   907       // Failed to reserve at specified address - the requested memory
   895       // Failed to reserve at specified address - the requested memory
   908       // region is taken already, for example, by 'java' launcher.
   896       // region is taken already, for example, by 'java' launcher.
   909       // Try again to reserver heap higher.
   897       // Try again to reserver heap higher.
   910       addr = Universe::preferred_heap_base(total_reserved, Universe::ZeroBasedNarrowOop);
   898       addr = Universe::preferred_heap_base(total_reserved, alignment, Universe::ZeroBasedNarrowOop);
   911 
   899 
   912       ReservedHeapSpace total_rs0(total_reserved, alignment,
   900       ReservedHeapSpace total_rs0(total_reserved, alignment,
   913                                   UseLargePages, addr);
   901           use_large_pages, addr);
   914 
   902 
   915       if (addr != NULL && !total_rs0.is_reserved()) {
   903       if (addr != NULL && !total_rs0.is_reserved()) {
   916         // Failed to reserve at specified address again - give up.
   904         // Failed to reserve at specified address again - give up.
   917         addr = Universe::preferred_heap_base(total_reserved, Universe::HeapBasedNarrowOop);
   905         addr = Universe::preferred_heap_base(total_reserved, alignment, Universe::HeapBasedNarrowOop);
   918         assert(addr == NULL, "");
   906         assert(addr == NULL, "");
   919 
   907 
   920         ReservedHeapSpace total_rs1(total_reserved, alignment,
   908         ReservedHeapSpace total_rs1(total_reserved, alignment,
   921                                     UseLargePages, addr);
   909             use_large_pages, addr);
   922         total_rs = total_rs1;
   910         total_rs = total_rs1;
   923       } else {
   911       } else {
   924         total_rs = total_rs0;
   912         total_rs = total_rs0;
   925       }
   913       }
   926     }
   914     }
   928 
   916 
   929   if (!total_rs.is_reserved()) {
   917   if (!total_rs.is_reserved()) {
   930     vm_exit_during_initialization(err_msg("Could not reserve enough space for " SIZE_FORMAT "KB object heap", total_reserved/K));
   918     vm_exit_during_initialization(err_msg("Could not reserve enough space for " SIZE_FORMAT "KB object heap", total_reserved/K));
   931     return total_rs;
   919     return total_rs;
   932   }
   920   }
   933 
       
   934   // Split the reserved space into main Java heap and a space for
       
   935   // classes so that they can be compressed using the same algorithm
       
   936   // as compressed oops. If compress oops and compress klass ptrs are
       
   937   // used we need the meta space first: if the alignment used for
       
   938   // compressed oops is greater than the one used for compressed klass
       
   939   // ptrs, a metadata space on top of the heap could become
       
   940   // unreachable.
       
   941   ReservedSpace class_rs = total_rs.first_part(Universe::class_metaspace_size());
       
   942   ReservedSpace heap_rs = total_rs.last_part(Universe::class_metaspace_size(), alignment);
       
   943   Metaspace::initialize_class_space(class_rs);
       
   944 
   921 
   945   if (UseCompressedOops) {
   922   if (UseCompressedOops) {
   946     // Universe::initialize_heap() will reset this to NULL if unscaled
   923     // Universe::initialize_heap() will reset this to NULL if unscaled
   947     // or zero-based narrow oops are actually used.
   924     // or zero-based narrow oops are actually used.
   948     address base = (address)(total_rs.base() - os::vm_page_size());
   925     address base = (address)(total_rs.base() - os::vm_page_size());
   949     Universe::set_narrow_oop_base(base);
   926     Universe::set_narrow_oop_base(base);
   950   }
   927   }
   951   return heap_rs;
   928   return total_rs;
   952 }
   929 }
   953 
   930 
   954 
   931 
   955 // It's the caller's repsonsibility to ensure glitch-freedom
   932 // It's the caller's responsibility to ensure glitch-freedom
   956 // (if required).
   933 // (if required).
   957 void Universe::update_heap_info_at_gc() {
   934 void Universe::update_heap_info_at_gc() {
   958   _heap_capacity_at_last_gc = heap()->capacity();
   935   _heap_capacity_at_last_gc = heap()->capacity();
   959   _heap_used_at_last_gc     = heap()->used();
   936   _heap_used_at_last_gc     = heap()->used();
   960 }
   937 }
  1091   InstanceKlass::cast(SystemDictionary::Finalizer_klass())->link_class(CHECK_false);
  1068   InstanceKlass::cast(SystemDictionary::Finalizer_klass())->link_class(CHECK_false);
  1092   Method* m = InstanceKlass::cast(SystemDictionary::Finalizer_klass())->find_method(
  1069   Method* m = InstanceKlass::cast(SystemDictionary::Finalizer_klass())->find_method(
  1093                                   vmSymbols::register_method_name(),
  1070                                   vmSymbols::register_method_name(),
  1094                                   vmSymbols::register_method_signature());
  1071                                   vmSymbols::register_method_signature());
  1095   if (m == NULL || !m->is_static()) {
  1072   if (m == NULL || !m->is_static()) {
  1096     THROW_MSG_(vmSymbols::java_lang_NoSuchMethodException(),
  1073     tty->print_cr("Unable to link/verify Finalizer.register method");
  1097       "java.lang.ref.Finalizer.register", false);
  1074     return false; // initialization failed (cannot throw exception yet)
  1098   }
  1075   }
  1099   Universe::_finalizer_register_cache->init(
  1076   Universe::_finalizer_register_cache->init(
  1100     SystemDictionary::Finalizer_klass(), m, CHECK_false);
  1077     SystemDictionary::Finalizer_klass(), m);
  1101 
       
  1102   // Resolve on first use and initialize class.
       
  1103   // Note: No race-condition here, since a resolve will always return the same result
       
  1104 
       
  1105   // Setup method for security checks
       
  1106   k = SystemDictionary::resolve_or_fail(vmSymbols::java_lang_reflect_Method(), true, CHECK_false);
       
  1107   k_h = instanceKlassHandle(THREAD, k);
       
  1108   k_h->link_class(CHECK_false);
       
  1109   m = k_h->find_method(vmSymbols::invoke_name(), vmSymbols::object_object_array_object_signature());
       
  1110   if (m == NULL || m->is_static()) {
       
  1111     THROW_MSG_(vmSymbols::java_lang_NoSuchMethodException(),
       
  1112       "java.lang.reflect.Method.invoke", false);
       
  1113   }
       
  1114   Universe::_reflect_invoke_cache->init(k_h(), m, CHECK_false);
       
  1115 
  1078 
  1116   // Setup method for registering loaded classes in class loader vector
  1079   // Setup method for registering loaded classes in class loader vector
  1117   InstanceKlass::cast(SystemDictionary::ClassLoader_klass())->link_class(CHECK_false);
  1080   InstanceKlass::cast(SystemDictionary::ClassLoader_klass())->link_class(CHECK_false);
  1118   m = InstanceKlass::cast(SystemDictionary::ClassLoader_klass())->find_method(vmSymbols::addClass_name(), vmSymbols::class_void_signature());
  1081   m = InstanceKlass::cast(SystemDictionary::ClassLoader_klass())->find_method(vmSymbols::addClass_name(), vmSymbols::class_void_signature());
  1119   if (m == NULL || m->is_static()) {
  1082   if (m == NULL || m->is_static()) {
  1120     THROW_MSG_(vmSymbols::java_lang_NoSuchMethodException(),
  1083     tty->print_cr("Unable to link/verify ClassLoader.addClass method");
  1121       "java.lang.ClassLoader.addClass", false);
  1084     return false; // initialization failed (cannot throw exception yet)
  1122   }
  1085   }
  1123   Universe::_loader_addClass_cache->init(
  1086   Universe::_loader_addClass_cache->init(
  1124     SystemDictionary::ClassLoader_klass(), m, CHECK_false);
  1087     SystemDictionary::ClassLoader_klass(), m);
  1125 
  1088 
  1126   // Setup method for checking protection domain
  1089   // Setup method for checking protection domain
  1127   InstanceKlass::cast(SystemDictionary::ProtectionDomain_klass())->link_class(CHECK_false);
  1090   InstanceKlass::cast(SystemDictionary::ProtectionDomain_klass())->link_class(CHECK_false);
  1128   m = InstanceKlass::cast(SystemDictionary::ProtectionDomain_klass())->
  1091   m = InstanceKlass::cast(SystemDictionary::ProtectionDomain_klass())->
  1129             find_method(vmSymbols::impliesCreateAccessControlContext_name(),
  1092             find_method(vmSymbols::impliesCreateAccessControlContext_name(),
  1135       // <init> function before java_lang_Class is linked. Print error and exit.
  1098       // <init> function before java_lang_Class is linked. Print error and exit.
  1136       tty->print_cr("ProtectionDomain.impliesCreateAccessControlContext() has the wrong linkage");
  1099       tty->print_cr("ProtectionDomain.impliesCreateAccessControlContext() has the wrong linkage");
  1137       return false; // initialization failed
  1100       return false; // initialization failed
  1138     }
  1101     }
  1139     Universe::_pd_implies_cache->init(
  1102     Universe::_pd_implies_cache->init(
  1140       SystemDictionary::ProtectionDomain_klass(), m, CHECK_false);;
  1103       SystemDictionary::ProtectionDomain_klass(), m);;
  1141   }
  1104   }
  1142 
  1105 
  1143   // The folowing is initializing converter functions for serialization in
  1106   // The folowing is initializing converter functions for serialization in
  1144   // JVM.cpp. If we clean up the StrictMath code above we may want to find
  1107   // JVM.cpp. If we clean up the StrictMath code above we may want to find
  1145   // a better solution for this as well.
  1108   // a better solution for this as well.
  1155   // ("weak") refs processing infrastructure initialization
  1118   // ("weak") refs processing infrastructure initialization
  1156   Universe::heap()->post_initialize();
  1119   Universe::heap()->post_initialize();
  1157 
  1120 
  1158   // Initialize performance counters for metaspaces
  1121   // Initialize performance counters for metaspaces
  1159   MetaspaceCounters::initialize_performance_counters();
  1122   MetaspaceCounters::initialize_performance_counters();
       
  1123   CompressedClassSpaceCounters::initialize_performance_counters();
       
  1124 
  1160   MemoryService::add_metaspace_memory_pools();
  1125   MemoryService::add_metaspace_memory_pools();
  1161 
  1126 
  1162   GC_locker::unlock();  // allow gc after bootstrapping
  1127   GC_locker::unlock();  // allow gc after bootstrapping
  1163 
  1128 
  1164   MemoryService::set_universe_heap(Universe::_collectedHeap);
  1129   MemoryService::set_universe_heap(Universe::_collectedHeap);
  1458   verify_mark_mask();
  1423   verify_mark_mask();
  1459   verify_mark_bits();
  1424   verify_mark_bits();
  1460 }
  1425 }
  1461 
  1426 
  1462 
  1427 
  1463 void CommonMethodOopCache::init(Klass* k, Method* m, TRAPS) {
  1428 void LatestMethodCache::init(Klass* k, Method* m) {
  1464   if (!UseSharedSpaces) {
  1429   if (!UseSharedSpaces) {
  1465     _klass = k;
  1430     _klass = k;
  1466   }
  1431   }
  1467 #ifndef PRODUCT
  1432 #ifndef PRODUCT
  1468   else {
  1433   else {
  1474   _method_idnum = m->method_idnum();
  1439   _method_idnum = m->method_idnum();
  1475   assert(_method_idnum >= 0, "sanity check");
  1440   assert(_method_idnum >= 0, "sanity check");
  1476 }
  1441 }
  1477 
  1442 
  1478 
  1443 
  1479 ActiveMethodOopsCache::~ActiveMethodOopsCache() {
  1444 Method* LatestMethodCache::get_method() {
  1480   if (_prev_methods != NULL) {
       
  1481     delete _prev_methods;
       
  1482     _prev_methods = NULL;
       
  1483   }
       
  1484 }
       
  1485 
       
  1486 
       
  1487 void ActiveMethodOopsCache::add_previous_version(Method* method) {
       
  1488   assert(Thread::current()->is_VM_thread(),
       
  1489     "only VMThread can add previous versions");
       
  1490 
       
  1491   // Only append the previous method if it is executing on the stack.
       
  1492   if (method->on_stack()) {
       
  1493 
       
  1494     if (_prev_methods == NULL) {
       
  1495       // This is the first previous version so make some space.
       
  1496       // Start with 2 elements under the assumption that the class
       
  1497       // won't be redefined much.
       
  1498       _prev_methods = new (ResourceObj::C_HEAP, mtClass) GrowableArray<Method*>(2, true);
       
  1499     }
       
  1500 
       
  1501     // RC_TRACE macro has an embedded ResourceMark
       
  1502     RC_TRACE(0x00000100,
       
  1503       ("add: %s(%s): adding prev version ref for cached method @%d",
       
  1504         method->name()->as_C_string(), method->signature()->as_C_string(),
       
  1505         _prev_methods->length()));
       
  1506 
       
  1507     _prev_methods->append(method);
       
  1508   }
       
  1509 
       
  1510 
       
  1511   // Since the caller is the VMThread and we are at a safepoint, this is a good
       
  1512   // time to clear out unused method references.
       
  1513 
       
  1514   if (_prev_methods == NULL) return;
       
  1515 
       
  1516   for (int i = _prev_methods->length() - 1; i >= 0; i--) {
       
  1517     Method* method = _prev_methods->at(i);
       
  1518     assert(method != NULL, "weak method ref was unexpectedly cleared");
       
  1519 
       
  1520     if (!method->on_stack()) {
       
  1521       // This method isn't running anymore so remove it
       
  1522       _prev_methods->remove_at(i);
       
  1523       MetadataFactory::free_metadata(method->method_holder()->class_loader_data(), method);
       
  1524     } else {
       
  1525       // RC_TRACE macro has an embedded ResourceMark
       
  1526       RC_TRACE(0x00000400,
       
  1527         ("add: %s(%s): previous cached method @%d is alive",
       
  1528          method->name()->as_C_string(), method->signature()->as_C_string(), i));
       
  1529     }
       
  1530   }
       
  1531 } // end add_previous_version()
       
  1532 
       
  1533 
       
  1534 bool ActiveMethodOopsCache::is_same_method(const Method* method) const {
       
  1535   InstanceKlass* ik = InstanceKlass::cast(klass());
       
  1536   const Method* check_method = ik->method_with_idnum(method_idnum());
       
  1537   assert(check_method != NULL, "sanity check");
       
  1538   if (check_method == method) {
       
  1539     // done with the easy case
       
  1540     return true;
       
  1541   }
       
  1542 
       
  1543   if (_prev_methods != NULL) {
       
  1544     // The cached method has been redefined at least once so search
       
  1545     // the previous versions for a match.
       
  1546     for (int i = 0; i < _prev_methods->length(); i++) {
       
  1547       check_method = _prev_methods->at(i);
       
  1548       if (check_method == method) {
       
  1549         // a previous version matches
       
  1550         return true;
       
  1551       }
       
  1552     }
       
  1553   }
       
  1554 
       
  1555   // either no previous versions or no previous version matched
       
  1556   return false;
       
  1557 }
       
  1558 
       
  1559 
       
  1560 Method* LatestMethodOopCache::get_Method() {
       
  1561   if (klass() == NULL) return NULL;
  1445   if (klass() == NULL) return NULL;
  1562   InstanceKlass* ik = InstanceKlass::cast(klass());
  1446   InstanceKlass* ik = InstanceKlass::cast(klass());
  1563   Method* m = ik->method_with_idnum(method_idnum());
  1447   Method* m = ik->method_with_idnum(method_idnum());
  1564   assert(m != NULL, "sanity check");
  1448   assert(m != NULL, "sanity check");
  1565   return m;
  1449   return m;