--- a/src/hotspot/cpu/x86/templateTable_x86.cpp Fri May 17 09:00:39 2019 +0100
+++ b/src/hotspot/cpu/x86/templateTable_x86.cpp Fri May 17 11:44:44 2019 +0200
@@ -2986,7 +2986,8 @@
__ jcc(Assembler::notEqual, notDouble);
#endif
// dtos
- __ access_load_at(T_DOUBLE, IN_HEAP, noreg /* dtos */, field, noreg, noreg);
+ // MO_RELAXED: for the case of volatile field, in fact it adds no extra work for the underlying implementation
+ __ access_load_at(T_DOUBLE, IN_HEAP | MO_RELAXED, noreg /* dtos */, field, noreg, noreg);
__ push(dtos);
// Rewrite bytecode to be faster
if (!is_static && rc == may_rewrite) {
@@ -3255,7 +3256,8 @@
{
__ pop(ltos);
if (!is_static) pop_and_check_object(obj);
- __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos*/, noreg, noreg);
+ // MO_RELAXED: generate atomic store for the case of volatile field (important for x86_32)
+ __ access_store_at(T_LONG, IN_HEAP | MO_RELAXED, field, noreg /* ltos*/, noreg, noreg);
#ifdef _LP64
if (!is_static && rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_lputfield, bc, rbx, true, byte_no);
@@ -3290,7 +3292,8 @@
{
__ pop(dtos);
if (!is_static) pop_and_check_object(obj);
- __ access_store_at(T_DOUBLE, IN_HEAP, field, noreg /* dtos */, noreg, noreg);
+ // MO_RELAXED: for the case of volatile field, in fact it adds no extra work for the underlying implementation
+ __ access_store_at(T_DOUBLE, IN_HEAP | MO_RELAXED, field, noreg /* dtos */, noreg, noreg);
if (!is_static && rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_dputfield, bc, rbx, true, byte_no);
}