--- a/hotspot/make/jprt.gmk Wed Jul 05 17:48:14 2017 +0200
+++ b/hotspot/make/jprt.gmk Sat Jul 23 10:42:20 2011 -0400
@@ -34,13 +34,13 @@
endif
jprt_build_productEmb:
- $(MAKE) JAVASE_EMBEDDED=true jprt_build_product
+ $(MAKE) JAVASE_EMBEDDED=true MINIMIZE_RAM_USAGE=true jprt_build_product
jprt_build_debugEmb:
- $(MAKE) JAVASE_EMBEDDED=true jprt_build_debug
+ $(MAKE) JAVASE_EMBEDDED=true MINIMIZE_RAM_USAGE=true jprt_build_debug
jprt_build_fastdebugEmb:
- $(MAKE) JAVASE_EMBEDDED=true jprt_build_fastdebug
+ $(MAKE) JAVASE_EMBEDDED=true MINIMIZE_RAM_USAGE=true jprt_build_fastdebug
jprt_build_productOpen:
$(MAKE) OPENJDK=true jprt_build_product
--- a/hotspot/src/share/vm/classfile/javaClasses.cpp Wed Jul 05 17:48:14 2017 +0200
+++ b/hotspot/src/share/vm/classfile/javaClasses.cpp Sat Jul 23 10:42:20 2011 -0400
@@ -1019,6 +1019,16 @@
compute_offset(_ngroups_offset, k, vmSymbols::ngroups_name(), vmSymbols::int_signature());
}
+oop java_lang_Throwable::unassigned_stacktrace() {
+ instanceKlass* ik = instanceKlass::cast(SystemDictionary::Throwable_klass());
+ address addr = ik->static_field_addr(static_unassigned_stacktrace_offset);
+ if (UseCompressedOops) {
+ return oopDesc::load_decode_heap_oop((narrowOop *)addr);
+ } else {
+ return oopDesc::load_decode_heap_oop((oop*)addr);
+ }
+}
+
oop java_lang_Throwable::backtrace(oop throwable) {
return throwable->obj_field_acquire(backtrace_offset);
}
@@ -1044,9 +1054,13 @@
}
+void java_lang_Throwable::set_stacktrace(oop throwable, oop st_element_array) {
+ throwable->obj_field_put(stackTrace_offset, st_element_array);
+}
+
void java_lang_Throwable::clear_stacktrace(oop throwable) {
assert(JDK_Version::is_gte_jdk14x_version(), "should only be called in >= 1.4");
- throwable->obj_field_put(stackTrace_offset, NULL);
+ set_stacktrace(throwable, NULL);
}
@@ -1340,6 +1354,7 @@
if (JDK_Version::is_gte_jdk14x_version()) {
// New since 1.4, clear lazily constructed Java level stacktrace if
// refilling occurs
+ // This is unnecessary in 1.7+ but harmless
clear_stacktrace(throwable());
}
@@ -1541,6 +1556,15 @@
// Bail-out for deep stacks
if (chunk_count >= max_chunks) break;
}
+
+ // For Java 7+ we support the Throwable immutability protocol defined for Java 7. This support
+ // was missing in 7u0 so in 7u0 there is a workaround in the Throwable class. That workaround
+ // can be removed in a JDK using this JVM version
+ if (JDK_Version::is_gte_jdk17x_version()) {
+ java_lang_Throwable::set_stacktrace(throwable(), java_lang_Throwable::unassigned_stacktrace());
+ assert(java_lang_Throwable::unassigned_stacktrace() != NULL, "not initialized");
+ }
+
}
@@ -2770,6 +2794,7 @@
int java_lang_Throwable::detailMessage_offset;
int java_lang_Throwable::cause_offset;
int java_lang_Throwable::stackTrace_offset;
+int java_lang_Throwable::static_unassigned_stacktrace_offset;
int java_lang_reflect_AccessibleObject::override_offset;
int java_lang_reflect_Method::clazz_offset;
int java_lang_reflect_Method::name_offset;
@@ -2947,6 +2972,7 @@
java_lang_Throwable::detailMessage_offset = java_lang_Throwable::hc_detailMessage_offset * x + header;
java_lang_Throwable::cause_offset = java_lang_Throwable::hc_cause_offset * x + header;
java_lang_Throwable::stackTrace_offset = java_lang_Throwable::hc_stackTrace_offset * x + header;
+ java_lang_Throwable::static_unassigned_stacktrace_offset = java_lang_Throwable::hc_static_unassigned_stacktrace_offset * x;
// java_lang_boxing_object
java_lang_boxing_object::value_offset = java_lang_boxing_object::hc_value_offset + header;
--- a/hotspot/src/share/vm/classfile/javaClasses.hpp Wed Jul 05 17:48:14 2017 +0200
+++ b/hotspot/src/share/vm/classfile/javaClasses.hpp Sat Jul 23 10:42:20 2011 -0400
@@ -393,6 +393,9 @@
hc_cause_offset = 2, // New since 1.4
hc_stackTrace_offset = 3 // New since 1.4
};
+ enum {
+ hc_static_unassigned_stacktrace_offset = 0 // New since 1.7
+ };
// Trace constants
enum {
trace_methods_offset = 0,
@@ -406,6 +409,7 @@
static int detailMessage_offset;
static int cause_offset;
static int stackTrace_offset;
+ static int static_unassigned_stacktrace_offset;
// Printing
static char* print_stack_element_to_buffer(methodOop method, int bci);
@@ -414,6 +418,9 @@
static void clear_stacktrace(oop throwable);
// No stack trace available
static const char* no_stack_trace_message();
+ // Stacktrace (post JDK 1.7.0 to allow immutability protocol to be followed)
+ static void set_stacktrace(oop throwable, oop st_element_array);
+ static oop unassigned_stacktrace();
public:
// Backtrace
@@ -438,7 +445,6 @@
static void allocate_backtrace(Handle throwable, TRAPS);
// Fill in current stack trace for throwable with preallocated backtrace (no GC)
static void fill_in_stack_trace_of_preallocated_backtrace(Handle throwable);
-
// Fill in current stack trace, can cause GC
static void fill_in_stack_trace(Handle throwable, methodHandle method, TRAPS);
static void fill_in_stack_trace(Handle throwable, methodHandle method = methodHandle());
--- a/hotspot/src/share/vm/prims/jvmtiTagMap.cpp Wed Jul 05 17:48:14 2017 +0200
+++ b/hotspot/src/share/vm/prims/jvmtiTagMap.cpp Sat Jul 23 10:42:20 2011 -0400
@@ -1647,6 +1647,7 @@
// saved headers
static GrowableArray<oop>* _saved_oop_stack;
static GrowableArray<markOop>* _saved_mark_stack;
+ static bool _needs_reset; // do we need to reset mark bits?
public:
static void init(); // initialize
@@ -1654,10 +1655,14 @@
static inline void mark(oop o); // mark an object
static inline bool visited(oop o); // check if object has been visited
+
+ static inline bool needs_reset() { return _needs_reset; }
+ static inline void set_needs_reset(bool v) { _needs_reset = v; }
};
GrowableArray<oop>* ObjectMarker::_saved_oop_stack = NULL;
GrowableArray<markOop>* ObjectMarker::_saved_mark_stack = NULL;
+bool ObjectMarker::_needs_reset = true; // need to reset mark bits by default
// initialize ObjectMarker - prepares for object marking
void ObjectMarker::init() {
@@ -1680,7 +1685,13 @@
// iterate over all objects and restore the mark bits to
// their initial value
RestoreMarksClosure blk;
- Universe::heap()->object_iterate(&blk);
+ if (needs_reset()) {
+ Universe::heap()->object_iterate(&blk);
+ } else {
+ // We don't need to reset mark bits on this call, but reset the
+ // flag to the default for the next call.
+ set_needs_reset(true);
+ }
// When sharing is enabled we need to restore the headers of the objects
// in the readwrite space too.
@@ -3023,7 +3034,8 @@
}
-// collects all simple (non-stack) roots.
+// Collects all simple (non-stack) roots except for threads;
+// threads are handled in collect_stack_roots() as an optimization.
// if there's a heap root callback provided then the callback is
// invoked for each simple root.
// if an object reference callback is provided then all simple
@@ -3054,16 +3066,7 @@
return false;
}
- // Threads
- for (JavaThread* thread = Threads::first(); thread != NULL ; thread = thread->next()) {
- oop threadObj = thread->threadObj();
- if (threadObj != NULL && !thread->is_exiting() && !thread->is_hidden_from_external_view()) {
- bool cont = CallbackInvoker::report_simple_root(JVMTI_HEAP_REFERENCE_THREAD, threadObj);
- if (!cont) {
- return false;
- }
- }
- }
+ // threads are now handled in collect_stack_roots()
// Other kinds of roots maintained by HotSpot
// Many of these won't be visible but others (such as instances of important
@@ -3175,13 +3178,20 @@
}
-// collects all stack roots - for each thread it walks the execution
+// Collects the simple roots for all threads and collects all
+// stack roots - for each thread it walks the execution
// stack to find all references and local JNI refs.
inline bool VM_HeapWalkOperation::collect_stack_roots() {
JNILocalRootsClosure blk;
for (JavaThread* thread = Threads::first(); thread != NULL ; thread = thread->next()) {
oop threadObj = thread->threadObj();
if (threadObj != NULL && !thread->is_exiting() && !thread->is_hidden_from_external_view()) {
+ // Collect the simple root for this thread before we
+ // collect its stack roots
+ if (!CallbackInvoker::report_simple_root(JVMTI_HEAP_REFERENCE_THREAD,
+ threadObj)) {
+ return false;
+ }
if (!collect_stack_roots(thread, &blk)) {
return false;
}
@@ -3235,8 +3245,20 @@
// the heap walk starts with an initial object or the heap roots
if (initial_object().is_null()) {
+ // If either collect_stack_roots() or collect_simple_roots()
+ // returns false at this point, then there are no mark bits
+ // to reset.
+ ObjectMarker::set_needs_reset(false);
+
+ // Calling collect_stack_roots() before collect_simple_roots()
+ // can result in a big performance boost for an agent that is
+ // focused on analyzing references in the thread stacks.
+ if (!collect_stack_roots()) return;
+
if (!collect_simple_roots()) return;
- if (!collect_stack_roots()) return;
+
+ // no early return so enable heap traversal to reset the mark bits
+ ObjectMarker::set_needs_reset(true);
} else {
visit_stack()->push(initial_object()());
}