# HG changeset patch # User rkennke # Date 1574776084 -3600 # Node ID d5a1c6545a2bd4baaa36944943f8582ba3f72069 # Parent ac6f7738a0eef31ca11d9807e461950ee09d8e28 8234768: Shenandoah: Streamline enqueueing runtime barriers Reviewed-by: zgu diff -r ac6f7738a0ee -r d5a1c6545a2b src/hotspot/share/gc/shenandoah/shenandoahBarrierSet.cpp --- a/src/hotspot/share/gc/shenandoah/shenandoahBarrierSet.cpp Tue Nov 26 09:27:16 2019 -0500 +++ b/src/hotspot/share/gc/shenandoah/shenandoahBarrierSet.cpp Tue Nov 26 14:48:04 2019 +0100 @@ -101,36 +101,6 @@ return (on_weak_ref || unknown) && (keep_alive || is_traversal_mode); } -template -inline void ShenandoahBarrierSet::inline_write_ref_field_pre(T* field, oop new_val) { - shenandoah_assert_not_in_cset_loc_except(field, _heap->cancelled_gc()); - if (_heap->is_concurrent_mark_in_progress()) { - T heap_oop = RawAccess<>::oop_load(field); - if (!CompressedOops::is_null(heap_oop)) { - enqueue(CompressedOops::decode(heap_oop)); - } - } -} - -// These are the more general virtual versions. -void ShenandoahBarrierSet::write_ref_field_pre_work(oop* field, oop new_val) { - inline_write_ref_field_pre(field, new_val); -} - -void ShenandoahBarrierSet::write_ref_field_pre_work(narrowOop* field, oop new_val) { - inline_write_ref_field_pre(field, new_val); -} - -void ShenandoahBarrierSet::write_ref_field_pre_work(void* field, oop new_val) { - guarantee(false, "Not needed"); -} - -void ShenandoahBarrierSet::write_ref_field_work(void* v, oop o, bool release) { - shenandoah_assert_not_in_cset_loc_except(v, _heap->cancelled_gc()); - shenandoah_assert_not_forwarded_except (v, o, o == NULL || _heap->cancelled_gc() || !_heap->is_concurrent_mark_in_progress()); - shenandoah_assert_not_in_cset_except (v, o, o == NULL || _heap->cancelled_gc() || !_heap->is_concurrent_mark_in_progress()); -} - oop ShenandoahBarrierSet::load_reference_barrier_not_null(oop obj) { if (ShenandoahLoadRefBarrier && _heap->has_forwarded_objects()) { return load_reference_barrier_impl(obj); @@ -234,30 +204,6 @@ } } -void ShenandoahBarrierSet::storeval_barrier(oop obj) { - if (ShenandoahStoreValEnqueueBarrier && !CompressedOops::is_null(obj) && _heap->is_concurrent_traversal_in_progress()) { - enqueue(obj); - } -} - -void ShenandoahBarrierSet::keep_alive_barrier(oop obj) { - if (ShenandoahKeepAliveBarrier && _heap->is_concurrent_mark_in_progress()) { - enqueue(obj); - } -} - -void ShenandoahBarrierSet::enqueue(oop obj) { - shenandoah_assert_not_forwarded_if(NULL, obj, _heap->is_concurrent_traversal_in_progress()); - assert(_satb_mark_queue_set.is_active(), "only get here when SATB active"); - - // Filter marked objects before hitting the SATB queues. The same predicate would - // be used by SATBMQ::filter to eliminate already marked objects downstream, but - // filtering here helps to avoid wasteful SATB queueing work to begin with. - if (!_heap->requires_marking(obj)) return; - - ShenandoahThreadLocalData::satb_mark_queue(Thread::current()).enqueue_known_active(obj); -} - void ShenandoahBarrierSet::on_thread_create(Thread* thread) { // Create thread local data ShenandoahThreadLocalData::create(thread); diff -r ac6f7738a0ee -r d5a1c6545a2b src/hotspot/share/gc/shenandoah/shenandoahBarrierSet.hpp --- a/src/hotspot/share/gc/shenandoah/shenandoahBarrierSet.hpp Tue Nov 26 09:27:16 2019 -0500 +++ b/src/hotspot/share/gc/shenandoah/shenandoahBarrierSet.hpp Tue Nov 26 14:48:04 2019 +0100 @@ -77,17 +77,6 @@ inline void clone_barrier(oop src); void clone_barrier_runtime(oop src); - // We export this to make it available in cases where the static - // type of the barrier set is known. Note that it is non-virtual. - template inline void inline_write_ref_field_pre(T* field, oop new_val); - - // These are the more general virtual versions. - void write_ref_field_pre_work(oop* field, oop new_val); - void write_ref_field_pre_work(narrowOop* field, oop new_val); - void write_ref_field_pre_work(void* field, oop new_val); - - void write_ref_field_work(void* v, oop o, bool release = false); - virtual void on_thread_create(Thread* thread); virtual void on_thread_destroy(Thread* thread); virtual void on_thread_attach(Thread* thread); @@ -96,8 +85,17 @@ static inline oop resolve_forwarded_not_null(oop p); static inline oop resolve_forwarded(oop p); - void storeval_barrier(oop obj); - void keep_alive_barrier(oop obj); + template + inline void satb_barrier(T* field); + inline void satb_enqueue(oop value); + inline void storeval_barrier(oop obj); + + template + inline void keep_alive_if_weak(oop value); + inline void keep_alive_if_weak(DecoratorSet decorators, oop value); + inline void keep_alive_barrier(oop value); + + inline void enqueue(oop obj); oop load_reference_barrier(oop obj); oop load_reference_barrier_not_null(oop obj); @@ -111,8 +109,6 @@ oop load_reference_barrier_native(oop obj, oop* load_addr); oop load_reference_barrier_native(oop obj, narrowOop* load_addr); - void enqueue(oop obj); - private: template inline void arraycopy_pre_work(T* src, T* dst, size_t count); @@ -126,27 +122,12 @@ template oop load_reference_barrier_native_impl(oop obj, T* load_addr); - static void keep_alive_if_weak(DecoratorSet decorators, oop value) { - assert((decorators & ON_UNKNOWN_OOP_REF) == 0, "Reference strength must be known"); - const bool on_strong_oop_ref = (decorators & ON_STRONG_OOP_REF) != 0; - const bool peek = (decorators & AS_NO_KEEPALIVE) != 0; - if (!peek && !on_strong_oop_ref && value != NULL) { - ShenandoahBarrierSet::barrier_set()->keep_alive_barrier(value); - } - } - public: // Callbacks for runtime accesses. template class AccessBarrier: public BarrierSet::AccessBarrier { typedef BarrierSet::AccessBarrier Raw; - template - static oop oop_atomic_cmpxchg_in_heap_impl(T* addr, oop compare_value, oop new_value); - - template - static oop oop_atomic_xchg_in_heap_impl(T* addr, oop new_value); - public: // Heap oop accesses. These accessors get resolved when // IN_HEAP is set (e.g. when using the HeapAccess API), it is diff -r ac6f7738a0ee -r d5a1c6545a2b src/hotspot/share/gc/shenandoah/shenandoahBarrierSet.inline.hpp --- a/src/hotspot/share/gc/shenandoah/shenandoahBarrierSet.inline.hpp Tue Nov 26 09:27:16 2019 -0500 +++ b/src/hotspot/share/gc/shenandoah/shenandoahBarrierSet.inline.hpp Tue Nov 26 14:48:04 2019 +0100 @@ -48,41 +48,124 @@ } } +inline void ShenandoahBarrierSet::enqueue(oop obj) { + shenandoah_assert_not_forwarded_if(NULL, obj, _heap->is_concurrent_traversal_in_progress()); + assert(_satb_mark_queue_set.is_active(), "only get here when SATB active"); + + // Filter marked objects before hitting the SATB queues. The same predicate would + // be used by SATBMQ::filter to eliminate already marked objects downstream, but + // filtering here helps to avoid wasteful SATB queueing work to begin with. + if (!_heap->requires_marking(obj)) return; + + ShenandoahThreadLocalData::satb_mark_queue(Thread::current()).enqueue_known_active(obj); +} + +template +inline void ShenandoahBarrierSet::satb_barrier(T *field) { + if (HasDecorator::value || + HasDecorator::value) { + return; + } + if (ShenandoahSATBBarrier && _heap->is_concurrent_mark_in_progress()) { + T heap_oop = RawAccess<>::oop_load(field); + if (!CompressedOops::is_null(heap_oop)) { + enqueue(CompressedOops::decode(heap_oop)); + } + } +} + +inline void ShenandoahBarrierSet::satb_enqueue(oop value) { + assert(value != NULL, "checked before"); + if (ShenandoahSATBBarrier && _heap->is_concurrent_mark_in_progress()) { + enqueue(value); + } +} + +inline void ShenandoahBarrierSet::storeval_barrier(oop obj) { + if (obj != NULL && ShenandoahStoreValEnqueueBarrier && _heap->is_concurrent_traversal_in_progress()) { + enqueue(obj); + } +} + +inline void ShenandoahBarrierSet::keep_alive_barrier(oop value) { + assert(value != NULL, "checked before"); + if (ShenandoahKeepAliveBarrier && _heap->is_concurrent_mark_in_progress()) { + enqueue(value); + } +} + +inline void ShenandoahBarrierSet::keep_alive_if_weak(DecoratorSet decorators, oop value) { + assert((decorators & ON_UNKNOWN_OOP_REF) == 0, "Reference strength must be known"); + const bool on_strong_oop_ref = (decorators & ON_STRONG_OOP_REF) != 0; + const bool peek = (decorators & AS_NO_KEEPALIVE) != 0; + if (!peek && !on_strong_oop_ref) { + keep_alive_barrier(value); + } +} + +template +inline void ShenandoahBarrierSet::keep_alive_if_weak(oop value) { + assert((decorators & ON_UNKNOWN_OOP_REF) == 0, "Reference strength must be known"); + if (!HasDecorator::value && + !HasDecorator::value) { + keep_alive_barrier(value); + } +} + +template +template +inline oop ShenandoahBarrierSet::AccessBarrier::oop_load_not_in_heap(T* addr) { + oop value = Raw::oop_load_not_in_heap(addr); + if (value != NULL) { + ShenandoahBarrierSet *const bs = ShenandoahBarrierSet::barrier_set(); + value = bs->load_reference_barrier_native(value, addr); + bs->keep_alive_if_weak(value); + } + return value; +} + template template inline oop ShenandoahBarrierSet::AccessBarrier::oop_load_in_heap(T* addr) { oop value = Raw::oop_load_in_heap(addr); - value = ShenandoahBarrierSet::barrier_set()->load_reference_barrier(value); - keep_alive_if_weak(decorators, value); + if (value != NULL) { + ShenandoahBarrierSet *const bs = ShenandoahBarrierSet::barrier_set(); + value = bs->load_reference_barrier_not_null(value); + bs->keep_alive_if_weak(value); + } return value; } template inline oop ShenandoahBarrierSet::AccessBarrier::oop_load_in_heap_at(oop base, ptrdiff_t offset) { oop value = Raw::oop_load_in_heap_at(base, offset); - value = ShenandoahBarrierSet::barrier_set()->load_reference_barrier(value); - keep_alive_if_weak(AccessBarrierSupport::resolve_possibly_unknown_oop_ref_strength(base, offset), value); + if (value != NULL) { + ShenandoahBarrierSet *const bs = ShenandoahBarrierSet::barrier_set(); + value = bs->load_reference_barrier_not_null(value); + bs->keep_alive_if_weak(AccessBarrierSupport::resolve_possibly_unknown_oop_ref_strength(base, offset), + value); + } return value; } template template -inline oop ShenandoahBarrierSet::AccessBarrier::oop_load_not_in_heap(T* addr) { - oop value = Raw::oop_load_not_in_heap(addr); - value = ShenandoahBarrierSet::barrier_set()->load_reference_barrier_native(value, addr); - keep_alive_if_weak(decorators, value); - return value; +inline void ShenandoahBarrierSet::AccessBarrier::oop_store_not_in_heap(T* addr, oop value) { + shenandoah_assert_marked_if(NULL, value, !CompressedOops::is_null(value) && ShenandoahHeap::heap()->is_evacuation_in_progress()); + ShenandoahBarrierSet* const bs = ShenandoahBarrierSet::barrier_set(); + bs->storeval_barrier(value); + bs->satb_barrier(addr); + Raw::oop_store(addr, value); } template template inline void ShenandoahBarrierSet::AccessBarrier::oop_store_in_heap(T* addr, oop value) { - ShenandoahBarrierSet::barrier_set()->storeval_barrier(value); - const bool keep_alive = (decorators & AS_NO_KEEPALIVE) == 0; - if (keep_alive) { - ShenandoahBarrierSet::barrier_set()->write_ref_field_pre_work(addr, value); - } - Raw::oop_store_in_heap(addr, value); + shenandoah_assert_not_in_cset_loc_except(addr, ShenandoahHeap::heap()->cancelled_gc()); + shenandoah_assert_not_forwarded_except (addr, value, value == NULL || ShenandoahHeap::heap()->cancelled_gc() || !ShenandoahHeap::heap()->is_concurrent_mark_in_progress()); + shenandoah_assert_not_in_cset_except (addr, value, value == NULL || ShenandoahHeap::heap()->cancelled_gc() || !ShenandoahHeap::heap()->is_concurrent_mark_in_progress()); + + oop_store_not_in_heap(addr, value); } template @@ -92,14 +175,10 @@ template template -inline void ShenandoahBarrierSet::AccessBarrier::oop_store_not_in_heap(T* addr, oop value) { - shenandoah_assert_marked_if(NULL, value, !CompressedOops::is_null(value) && ShenandoahHeap::heap()->is_evacuation_in_progress()); - Raw::oop_store(addr, value); -} +inline oop ShenandoahBarrierSet::AccessBarrier::oop_atomic_cmpxchg_not_in_heap(T* addr, oop compare_value, oop new_value) { + ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set(); + bs->storeval_barrier(new_value); -template -template -inline oop ShenandoahBarrierSet::AccessBarrier::oop_atomic_cmpxchg_not_in_heap(T* addr, oop compare_value, oop new_value) { oop res; oop expected = compare_value; do { @@ -107,79 +186,53 @@ res = Raw::oop_atomic_cmpxchg(addr, compare_value, new_value); expected = res; } while ((compare_value != expected) && (resolve_forwarded(compare_value) == resolve_forwarded(expected))); + + // Note: We don't need a keep-alive-barrier here. We already enqueue any loaded reference for SATB anyway, + // because it must be the previous value. if (res != NULL) { - return ShenandoahBarrierSet::barrier_set()->load_reference_barrier_not_null(res); - } else { - return res; + res = ShenandoahBarrierSet::barrier_set()->load_reference_barrier_not_null(res); + bs->satb_enqueue(res); } -} - -template -template -inline oop ShenandoahBarrierSet::AccessBarrier::oop_atomic_cmpxchg_in_heap_impl(T* addr, oop compare_value, oop new_value) { - ShenandoahBarrierSet::barrier_set()->storeval_barrier(new_value); - oop result = oop_atomic_cmpxchg_not_in_heap(addr, compare_value, new_value); - const bool keep_alive = (decorators & AS_NO_KEEPALIVE) == 0; - if (keep_alive && ShenandoahSATBBarrier && !CompressedOops::is_null(result) && - (result == compare_value) && - ShenandoahHeap::heap()->is_concurrent_mark_in_progress()) { - ShenandoahBarrierSet::barrier_set()->enqueue(result); - } - return result; + return res; } template template inline oop ShenandoahBarrierSet::AccessBarrier::oop_atomic_cmpxchg_in_heap(T* addr, oop compare_value, oop new_value) { - oop result = oop_atomic_cmpxchg_in_heap_impl(addr, compare_value, new_value); - keep_alive_if_weak(decorators, result); - return result; + return oop_atomic_cmpxchg_not_in_heap(addr, compare_value, new_value); } template inline oop ShenandoahBarrierSet::AccessBarrier::oop_atomic_cmpxchg_in_heap_at(oop base, ptrdiff_t offset, oop compare_value, oop new_value) { - oop result = oop_atomic_cmpxchg_in_heap_impl(AccessInternal::oop_field_addr(base, offset), compare_value, new_value); - keep_alive_if_weak(AccessBarrierSupport::resolve_possibly_unknown_oop_ref_strength(base, offset), result); - return result; + return oop_atomic_cmpxchg_in_heap(AccessInternal::oop_field_addr(base, offset), compare_value, new_value); } template template inline oop ShenandoahBarrierSet::AccessBarrier::oop_atomic_xchg_not_in_heap(T* addr, oop new_value) { + ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set(); + bs->storeval_barrier(new_value); + oop previous = Raw::oop_atomic_xchg(addr, new_value); - if (previous != NULL) { - return ShenandoahBarrierSet::barrier_set()->load_reference_barrier_not_null(previous); - } else { - return previous; - } -} -template -template -inline oop ShenandoahBarrierSet::AccessBarrier::oop_atomic_xchg_in_heap_impl(T* addr, oop new_value) { - ShenandoahBarrierSet::barrier_set()->storeval_barrier(new_value); - oop result = oop_atomic_xchg_not_in_heap(addr, new_value); - const bool keep_alive = (decorators & AS_NO_KEEPALIVE) == 0; - if (keep_alive && ShenandoahSATBBarrier && !CompressedOops::is_null(result) && - ShenandoahHeap::heap()->is_concurrent_mark_in_progress()) { - ShenandoahBarrierSet::barrier_set()->enqueue(result); + // Note: We don't need a keep-alive-barrier here. We already enqueue any loaded reference for SATB anyway, + // because it must be the previous value. + if (previous != NULL) { + previous = ShenandoahBarrierSet::barrier_set()->load_reference_barrier_not_null(previous); + bs->satb_enqueue(previous); } - return result; + return previous; } template template inline oop ShenandoahBarrierSet::AccessBarrier::oop_atomic_xchg_in_heap(T* addr, oop new_value) { - oop result = oop_atomic_xchg_in_heap_impl(addr, new_value); - keep_alive_if_weak(addr, result); - return result; + return oop_atomic_xchg_not_in_heap(addr, new_value); } template inline oop ShenandoahBarrierSet::AccessBarrier::oop_atomic_xchg_in_heap_at(oop base, ptrdiff_t offset, oop new_value) { - oop result = oop_atomic_xchg_in_heap_impl(AccessInternal::oop_field_addr(base, offset), new_value); - keep_alive_if_weak(AccessBarrierSupport::resolve_possibly_unknown_oop_ref_strength(base, offset), result); - return result; + return oop_atomic_xchg_in_heap(AccessInternal::oop_field_addr(base, offset), new_value); } // Clone barrier support