685 } |
685 } |
686 |
686 |
687 return JNI_OK; |
687 return JNI_OK; |
688 } |
688 } |
689 |
689 |
|
690 template <class Heap, class Policy> |
|
691 jint Universe::create_heap() { |
|
692 assert(_collectedHeap == NULL, "Heap already created"); |
|
693 Policy* policy = new Policy(); |
|
694 policy->initialize_all(); |
|
695 _collectedHeap = new Heap(policy); |
|
696 return _collectedHeap->initialize(); |
|
697 } |
|
698 |
690 // Choose the heap base address and oop encoding mode |
699 // Choose the heap base address and oop encoding mode |
691 // when compressed oops are used: |
700 // when compressed oops are used: |
692 // Unscaled - Use 32-bits oops without encoding when |
701 // Unscaled - Use 32-bits oops without encoding when |
693 // NarrowOopHeapBaseMin + heap_size < 4Gb |
702 // NarrowOopHeapBaseMin + heap_size < 4Gb |
694 // ZeroBased - Use zero based compressed oops with encoding when |
703 // ZeroBased - Use zero based compressed oops with encoding when |
695 // NarrowOopHeapBaseMin + heap_size < 32Gb |
704 // NarrowOopHeapBaseMin + heap_size < 32Gb |
696 // HeapBased - Use compressed oops with heap base + encoding. |
705 // HeapBased - Use compressed oops with heap base + encoding. |
697 |
706 |
698 jint Universe::initialize_heap() { |
707 jint Universe::initialize_heap() { |
699 |
708 jint status = JNI_ERR; |
|
709 |
|
710 #if !INCLUDE_ALL_GCS |
700 if (UseParallelGC) { |
711 if (UseParallelGC) { |
701 #if INCLUDE_ALL_GCS |
|
702 Universe::_collectedHeap = new ParallelScavengeHeap(); |
|
703 #else // INCLUDE_ALL_GCS |
|
704 fatal("UseParallelGC not supported in this VM."); |
712 fatal("UseParallelGC not supported in this VM."); |
705 #endif // INCLUDE_ALL_GCS |
|
706 |
|
707 } else if (UseG1GC) { |
713 } else if (UseG1GC) { |
708 #if INCLUDE_ALL_GCS |
714 fatal("UseG1GC not supported in this VM."); |
709 G1CollectorPolicyExt* g1p = new G1CollectorPolicyExt(); |
715 } else if (UseConcMarkSweepGC) { |
710 g1p->initialize_all(); |
716 fatal("UseConcMarkSweepGC not supported in this VM."); |
711 G1CollectedHeap* g1h = new G1CollectedHeap(g1p); |
717 } |
712 Universe::_collectedHeap = g1h; |
718 #else |
713 #else // INCLUDE_ALL_GCS |
719 if (UseParallelGC) { |
714 fatal("UseG1GC not supported in java kernel vm."); |
720 status = Universe::create_heap<ParallelScavengeHeap, GenerationSizer>(); |
715 #endif // INCLUDE_ALL_GCS |
721 } else if (UseG1GC) { |
716 |
722 status = Universe::create_heap<G1CollectedHeap, G1CollectorPolicyExt>(); |
717 } else { |
723 } else if (UseConcMarkSweepGC) { |
718 GenCollectorPolicy *gc_policy; |
724 status = Universe::create_heap<GenCollectedHeap, ConcurrentMarkSweepPolicy>(); |
719 |
725 } |
720 if (UseSerialGC) { |
726 #endif |
721 gc_policy = new MarkSweepPolicy(); |
727 else { // UseSerialGC |
722 } else if (UseConcMarkSweepGC) { |
728 // Don't assert that UseSerialGC is set here because there are cases |
723 #if INCLUDE_ALL_GCS |
729 // where no GC it set and we then fall back to using SerialGC. |
724 gc_policy = new ConcurrentMarkSweepPolicy(); |
730 status = Universe::create_heap<GenCollectedHeap, MarkSweepPolicy>(); |
725 #else // INCLUDE_ALL_GCS |
731 } |
726 fatal("UseConcMarkSweepGC not supported in this VM."); |
732 |
727 #endif // INCLUDE_ALL_GCS |
|
728 } else { // default old generation |
|
729 gc_policy = new MarkSweepPolicy(); |
|
730 } |
|
731 gc_policy->initialize_all(); |
|
732 |
|
733 Universe::_collectedHeap = new GenCollectedHeap(gc_policy); |
|
734 } |
|
735 |
|
736 ThreadLocalAllocBuffer::set_max_size(Universe::heap()->max_tlab_size()); |
|
737 |
|
738 jint status = Universe::heap()->initialize(); |
|
739 if (status != JNI_OK) { |
733 if (status != JNI_OK) { |
740 return status; |
734 return status; |
741 } |
735 } |
|
736 |
|
737 ThreadLocalAllocBuffer::set_max_size(Universe::heap()->max_tlab_size()); |
742 |
738 |
743 #ifdef _LP64 |
739 #ifdef _LP64 |
744 if (UseCompressedOops) { |
740 if (UseCompressedOops) { |
745 // Subtract a page because something can get allocated at heap base. |
741 // Subtract a page because something can get allocated at heap base. |
746 // This also makes implicit null checking work, because the |
742 // This also makes implicit null checking work, because the |