8202676: AArch64: Missing enter/leave around barrier leads to infinite loop
Reviewed-by: aph, eosterlund
/*
* Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
#include "precompiled.hpp"
#include "gc/shared/barrierSetAssembler.hpp"
#include "runtime/jniHandles.hpp"
#define __ masm->
void BarrierSetAssembler::load_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
Register dst, Address src, Register tmp1, Register tmp_thread) {
// LR is live. It must be saved around calls.
bool on_heap = (decorators & IN_HEAP) != 0;
bool on_root = (decorators & IN_ROOT) != 0;
switch (type) {
case T_OBJECT:
case T_ARRAY: {
if (on_heap) {
__ load_heap_oop(dst, src);
} else {
assert(on_root, "why else?");
__ ldr(dst, src);
}
break;
}
default: Unimplemented();
}
}
void BarrierSetAssembler::store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
Address dst, Register val, Register tmp1, Register tmp2) {
bool on_heap = (decorators & IN_HEAP) != 0;
bool on_root = (decorators & IN_ROOT) != 0;
switch (type) {
case T_OBJECT:
case T_ARRAY: {
if (on_heap) {
__ store_heap_oop(dst, val);
} else {
assert(on_root, "why else?");
__ str(val, dst);
}
break;
}
default: Unimplemented();
}
}
void BarrierSetAssembler::try_resolve_jobject_in_native(MacroAssembler* masm, Register jni_env,
Register obj, Register tmp, Label& slowpath) {
// If mask changes we need to ensure that the inverse is still encodable as an immediate
STATIC_ASSERT(JNIHandles::weak_tag_mask == 1);
__ andr(obj, obj, ~JNIHandles::weak_tag_mask);
__ ldr(obj, Address(obj, 0)); // *obj
}