# HG changeset patch # User kbarrett # Date 1496704067 14400 # Node ID cbcc0ebaa04456ad9a552431c5eedc193b0f64fa # Parent 86b13b03a053ee53d6bd617a56d65a4477f96c49 8166651: OrderAccess::load_acquire &etc should have const parameters Summary: Added const qualifiers to load/load_acquire source parameters. Reviewed-by: dholmes, coleenp, adinn, eosterlund diff -r 86b13b03a053 -r cbcc0ebaa044 hotspot/src/os_cpu/aix_ppc/vm/atomic_aix_ppc.hpp --- a/hotspot/src/os_cpu/aix_ppc/vm/atomic_aix_ppc.hpp Mon Jun 05 13:13:38 2017 -0400 +++ b/hotspot/src/os_cpu/aix_ppc/vm/atomic_aix_ppc.hpp Mon Jun 05 19:07:47 2017 -0400 @@ -1,5 +1,5 @@ /* - * Copyright (c) 1997, 2016, Oracle and/or its affiliates. All rights reserved. + * Copyright (c) 1997, 2017, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2012, 2014 SAP SE. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * @@ -46,7 +46,7 @@ inline void Atomic::store_ptr(intptr_t store_value, volatile intptr_t* dest) { *dest = store_value; } inline void Atomic::store_ptr(void* store_value, volatile void* dest) { *(void* volatile *)dest = store_value; } -inline jlong Atomic::load(volatile jlong* src) { return *src; } +inline jlong Atomic::load(const volatile jlong* src) { return *src; } // // machine barrier instructions: diff -r 86b13b03a053 -r cbcc0ebaa044 hotspot/src/os_cpu/aix_ppc/vm/orderAccess_aix_ppc.inline.hpp --- a/hotspot/src/os_cpu/aix_ppc/vm/orderAccess_aix_ppc.inline.hpp Mon Jun 05 13:13:38 2017 -0400 +++ b/hotspot/src/os_cpu/aix_ppc/vm/orderAccess_aix_ppc.inline.hpp Mon Jun 05 19:07:47 2017 -0400 @@ -1,5 +1,5 @@ /* - * Copyright (c) 1997, 2015, Oracle and/or its affiliates. All rights reserved. + * Copyright (c) 1997, 2017, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2012, 2014 SAP SE. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * @@ -78,10 +78,10 @@ inline void OrderAccess::release() { inlasm_lwsync(); } inline void OrderAccess::fence() { inlasm_sync(); } -template<> inline jbyte OrderAccess::specialized_load_acquire (volatile jbyte* p) { register jbyte t = load(p); inlasm_acquire_reg(t); return t; } -template<> inline jshort OrderAccess::specialized_load_acquire(volatile jshort* p) { register jshort t = load(p); inlasm_acquire_reg(t); return t; } -template<> inline jint OrderAccess::specialized_load_acquire (volatile jint* p) { register jint t = load(p); inlasm_acquire_reg(t); return t; } -template<> inline jlong OrderAccess::specialized_load_acquire (volatile jlong* p) { register jlong t = load(p); inlasm_acquire_reg(t); return t; } +template<> inline jbyte OrderAccess::specialized_load_acquire (const volatile jbyte* p) { register jbyte t = load(p); inlasm_acquire_reg(t); return t; } +template<> inline jshort OrderAccess::specialized_load_acquire(const volatile jshort* p) { register jshort t = load(p); inlasm_acquire_reg(t); return t; } +template<> inline jint OrderAccess::specialized_load_acquire (const volatile jint* p) { register jint t = load(p); inlasm_acquire_reg(t); return t; } +template<> inline jlong OrderAccess::specialized_load_acquire (const volatile jlong* p) { register jlong t = load(p); inlasm_acquire_reg(t); return t; } #undef inlasm_sync #undef inlasm_lwsync diff -r 86b13b03a053 -r cbcc0ebaa044 hotspot/src/os_cpu/bsd_x86/vm/atomic_bsd_x86.hpp --- a/hotspot/src/os_cpu/bsd_x86/vm/atomic_bsd_x86.hpp Mon Jun 05 13:13:38 2017 -0400 +++ b/hotspot/src/os_cpu/bsd_x86/vm/atomic_bsd_x86.hpp Mon Jun 05 19:07:47 2017 -0400 @@ -153,7 +153,7 @@ return (void*)cmpxchg((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value, order); } -inline jlong Atomic::load(volatile jlong* src) { return *src; } +inline jlong Atomic::load(const volatile jlong* src) { return *src; } #else // !AMD64 @@ -181,7 +181,7 @@ extern "C" { // defined in bsd_x86.s jlong _Atomic_cmpxchg_long(jlong, volatile jlong*, jlong, bool); - void _Atomic_move_long(volatile jlong* src, volatile jlong* dst); + void _Atomic_move_long(const volatile jlong* src, volatile jlong* dst); } inline jlong Atomic::cmpxchg (jlong exchange_value, volatile jlong* dest, jlong compare_value, cmpxchg_memory_order order) { @@ -196,7 +196,7 @@ return (void*)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value, order); } -inline jlong Atomic::load(volatile jlong* src) { +inline jlong Atomic::load(const volatile jlong* src) { volatile jlong dest; _Atomic_move_long(src, &dest); return dest; diff -r 86b13b03a053 -r cbcc0ebaa044 hotspot/src/os_cpu/bsd_x86/vm/bsd_x86_32.s --- a/hotspot/src/os_cpu/bsd_x86/vm/bsd_x86_32.s Mon Jun 05 13:13:38 2017 -0400 +++ b/hotspot/src/os_cpu/bsd_x86/vm/bsd_x86_32.s Mon Jun 05 19:07:47 2017 -0400 @@ -1,5 +1,5 @@ # -# Copyright (c) 2004, 2013, Oracle and/or its affiliates. All rights reserved. +# Copyright (c) 2004, 2017, Oracle and/or its affiliates. All rights reserved. # DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. # # This code is free software; you can redistribute it and/or modify it @@ -659,7 +659,7 @@ # Support for jlong Atomic::load and Atomic::store. - # void _Atomic_move_long(volatile jlong* src, volatile jlong* dst) + # void _Atomic_move_long(const volatile jlong* src, volatile jlong* dst) .p2align 4,,15 ELF_TYPE(_Atomic_move_long,@function) SYMBOL(_Atomic_move_long): diff -r 86b13b03a053 -r cbcc0ebaa044 hotspot/src/os_cpu/bsd_zero/vm/atomic_bsd_zero.hpp --- a/hotspot/src/os_cpu/bsd_zero/vm/atomic_bsd_zero.hpp Mon Jun 05 13:13:38 2017 -0400 +++ b/hotspot/src/os_cpu/bsd_zero/vm/atomic_bsd_zero.hpp Mon Jun 05 19:07:47 2017 -0400 @@ -1,5 +1,5 @@ /* - * Copyright (c) 2003, 2016, Oracle and/or its affiliates. All rights reserved. + * Copyright (c) 2003, 2017, Oracle and/or its affiliates. All rights reserved. * Copyright 2007, 2008, 2011, 2015, Red Hat, Inc. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * @@ -316,7 +316,7 @@ order); } -inline jlong Atomic::load(volatile jlong* src) { +inline jlong Atomic::load(const volatile jlong* src) { volatile jlong dest; os::atomic_copy64(src, &dest); return dest; diff -r 86b13b03a053 -r cbcc0ebaa044 hotspot/src/os_cpu/bsd_zero/vm/os_bsd_zero.hpp --- a/hotspot/src/os_cpu/bsd_zero/vm/os_bsd_zero.hpp Mon Jun 05 13:13:38 2017 -0400 +++ b/hotspot/src/os_cpu/bsd_zero/vm/os_bsd_zero.hpp Mon Jun 05 19:07:47 2017 -0400 @@ -1,5 +1,5 @@ /* - * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved. + * Copyright (c) 2003, 2017, Oracle and/or its affiliates. All rights reserved. * Copyright 2007, 2008, 2010 Red Hat, Inc. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * @@ -35,7 +35,7 @@ static bool register_code_area(char *low, char *high) { return true; } // Atomically copy 64 bits of data - static void atomic_copy64(volatile void *src, volatile void *dst) { + static void atomic_copy64(const volatile void *src, volatile void *dst) { #if defined(PPC32) double tmp; asm volatile ("lfd %0, 0(%1)\n" @@ -49,7 +49,7 @@ : "=r"(tmp) : "a"(src), "a"(dst)); #else - *(jlong *) dst = *(jlong *) src; + *(jlong *) dst = *(const jlong *) src; #endif } diff -r 86b13b03a053 -r cbcc0ebaa044 hotspot/src/os_cpu/linux_aarch64/vm/atomic_linux_aarch64.hpp --- a/hotspot/src/os_cpu/linux_aarch64/vm/atomic_linux_aarch64.hpp Mon Jun 05 13:13:38 2017 -0400 +++ b/hotspot/src/os_cpu/linux_aarch64/vm/atomic_linux_aarch64.hpp Mon Jun 05 19:07:47 2017 -0400 @@ -1,5 +1,5 @@ /* - * Copyright (c) 1999, 2016, Oracle and/or its affiliates. All rights reserved. + * Copyright (c) 1999, 2017, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2014, Red Hat Inc. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * @@ -157,6 +157,6 @@ order); } -inline jlong Atomic::load(volatile jlong* src) { return *src; } +inline jlong Atomic::load(const volatile jlong* src) { return *src; } #endif // OS_CPU_LINUX_AARCH64_VM_ATOMIC_LINUX_AARCH64_HPP diff -r 86b13b03a053 -r cbcc0ebaa044 hotspot/src/os_cpu/linux_aarch64/vm/orderAccess_linux_aarch64.inline.hpp --- a/hotspot/src/os_cpu/linux_aarch64/vm/orderAccess_linux_aarch64.inline.hpp Mon Jun 05 13:13:38 2017 -0400 +++ b/hotspot/src/os_cpu/linux_aarch64/vm/orderAccess_linux_aarch64.inline.hpp Mon Jun 05 19:07:47 2017 -0400 @@ -1,5 +1,5 @@ /* - * Copyright (c) 2003, 2016, Oracle and/or its affiliates. All rights reserved. + * Copyright (c) 2003, 2017, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2014, Red Hat Inc. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * @@ -50,30 +50,28 @@ FULL_MEM_BARRIER; } -inline jbyte OrderAccess::load_acquire(volatile jbyte* p) +inline jbyte OrderAccess::load_acquire(const volatile jbyte* p) { jbyte data; __atomic_load(p, &data, __ATOMIC_ACQUIRE); return data; } -inline jshort OrderAccess::load_acquire(volatile jshort* p) +inline jshort OrderAccess::load_acquire(const volatile jshort* p) { jshort data; __atomic_load(p, &data, __ATOMIC_ACQUIRE); return data; } -inline jint OrderAccess::load_acquire(volatile jint* p) +inline jint OrderAccess::load_acquire(const volatile jint* p) { jint data; __atomic_load(p, &data, __ATOMIC_ACQUIRE); return data; } -inline jlong OrderAccess::load_acquire(volatile jlong* p) +inline jlong OrderAccess::load_acquire(const volatile jlong* p) { jlong data; __atomic_load(p, &data, __ATOMIC_ACQUIRE); return data; } -inline jubyte OrderAccess::load_acquire(volatile jubyte* p) +inline jubyte OrderAccess::load_acquire(const volatile jubyte* p) { jubyte data; __atomic_load(p, &data, __ATOMIC_ACQUIRE); return data; } -inline jushort OrderAccess::load_acquire(volatile jushort* p) +inline jushort OrderAccess::load_acquire(const volatile jushort* p) { jushort data; __atomic_load(p, &data, __ATOMIC_ACQUIRE); return data; } -inline juint OrderAccess::load_acquire(volatile juint* p) +inline juint OrderAccess::load_acquire(const volatile juint* p) { juint data; __atomic_load(p, &data, __ATOMIC_ACQUIRE); return data; } -inline julong OrderAccess::load_acquire(volatile julong* p) +inline julong OrderAccess::load_acquire(const volatile julong* p) { julong data; __atomic_load(p, &data, __ATOMIC_ACQUIRE); return data; } -inline jfloat OrderAccess::load_acquire(volatile jfloat* p) +inline jfloat OrderAccess::load_acquire(const volatile jfloat* p) { jfloat data; __atomic_load(p, &data, __ATOMIC_ACQUIRE); return data; } -inline jdouble OrderAccess::load_acquire(volatile jdouble* p) +inline jdouble OrderAccess::load_acquire(const volatile jdouble* p) { jdouble data; __atomic_load(p, &data, __ATOMIC_ACQUIRE); return data; } -inline intptr_t OrderAccess::load_ptr_acquire(volatile intptr_t* p) +inline intptr_t OrderAccess::load_ptr_acquire(const volatile intptr_t* p) { intptr_t data; __atomic_load(p, &data, __ATOMIC_ACQUIRE); return data; } -inline void* OrderAccess::load_ptr_acquire(volatile void* p) -{ void* data; __atomic_load((void* volatile *)p, &data, __ATOMIC_ACQUIRE); return data; } inline void* OrderAccess::load_ptr_acquire(const volatile void* p) { void* data; __atomic_load((void* const volatile *)p, &data, __ATOMIC_ACQUIRE); return data; } diff -r 86b13b03a053 -r cbcc0ebaa044 hotspot/src/os_cpu/linux_aarch64/vm/os_linux_aarch64.hpp --- a/hotspot/src/os_cpu/linux_aarch64/vm/os_linux_aarch64.hpp Mon Jun 05 13:13:38 2017 -0400 +++ b/hotspot/src/os_cpu/linux_aarch64/vm/os_linux_aarch64.hpp Mon Jun 05 19:07:47 2017 -0400 @@ -1,5 +1,5 @@ /* - * Copyright (c) 1999, 2011, Oracle and/or its affiliates. All rights reserved. + * Copyright (c) 1999, 2017, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2014, Red Hat Inc. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * @@ -38,8 +38,8 @@ static bool register_code_area(char *low, char *high) { return true; } // Atomically copy 64 bits of data - static void atomic_copy64(volatile void *src, volatile void *dst) { - *(jlong *) dst = *(jlong *) src; + static void atomic_copy64(const volatile void *src, volatile void *dst) { + *(jlong *) dst = *(const jlong *) src; } #endif // OS_CPU_LINUX_AARCH64_VM_OS_LINUX_AARCH64_HPP diff -r 86b13b03a053 -r cbcc0ebaa044 hotspot/src/os_cpu/linux_arm/vm/atomic_linux_arm.hpp --- a/hotspot/src/os_cpu/linux_arm/vm/atomic_linux_arm.hpp Mon Jun 05 13:13:38 2017 -0400 +++ b/hotspot/src/os_cpu/linux_arm/vm/atomic_linux_arm.hpp Mon Jun 05 19:07:47 2017 -0400 @@ -1,5 +1,5 @@ /* - * Copyright (c) 2008, 2016, Oracle and/or its affiliates. All rights reserved. + * Copyright (c) 2008, 2017, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it @@ -56,7 +56,7 @@ inline void Atomic::store_ptr(intptr_t store_value, volatile intptr_t* dest) { *dest = store_value; } inline void Atomic::store_ptr(void* store_value, volatile void* dest) { *(void* volatile *)dest = store_value; } -inline jlong Atomic::load (volatile jlong* src) { +inline jlong Atomic::load (const volatile jlong* src) { assert(((intx)src & (sizeof(jlong)-1)) == 0, "Atomic load jlong mis-aligned"); #ifdef AARCH64 return *src; diff -r 86b13b03a053 -r cbcc0ebaa044 hotspot/src/os_cpu/linux_arm/vm/orderAccess_linux_arm.inline.hpp --- a/hotspot/src/os_cpu/linux_arm/vm/orderAccess_linux_arm.inline.hpp Mon Jun 05 13:13:38 2017 -0400 +++ b/hotspot/src/os_cpu/linux_arm/vm/orderAccess_linux_arm.inline.hpp Mon Jun 05 19:07:47 2017 -0400 @@ -1,5 +1,5 @@ /* - * Copyright (c) 2008, 2016, Oracle and/or its affiliates. All rights reserved. + * Copyright (c) 2008, 2017, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it @@ -131,7 +131,7 @@ #ifdef AARCH64 -template<> inline jbyte OrderAccess::specialized_load_acquire(volatile jbyte* p) { +template<> inline jbyte OrderAccess::specialized_load_acquire(const volatile jbyte* p) { volatile jbyte result; __asm__ volatile( "ldarb %w[res], [%[ptr]]" @@ -141,7 +141,7 @@ return result; } -template<> inline jshort OrderAccess::specialized_load_acquire(volatile jshort* p) { +template<> inline jshort OrderAccess::specialized_load_acquire(const volatile jshort* p) { volatile jshort result; __asm__ volatile( "ldarh %w[res], [%[ptr]]" @@ -151,7 +151,7 @@ return result; } -template<> inline jint OrderAccess::specialized_load_acquire(volatile jint* p) { +template<> inline jint OrderAccess::specialized_load_acquire(const volatile jint* p) { volatile jint result; __asm__ volatile( "ldar %w[res], [%[ptr]]" @@ -161,16 +161,16 @@ return result; } -template<> inline jfloat OrderAccess::specialized_load_acquire(volatile jfloat* p) { - return jfloat_cast(specialized_load_acquire((volatile jint*)p)); +template<> inline jfloat OrderAccess::specialized_load_acquire(const volatile jfloat* p) { + return jfloat_cast(specialized_load_acquire((const volatile jint*)p)); } // This is implicit as jlong and intptr_t are both "long int" -//template<> inline jlong OrderAccess::specialized_load_acquire(volatile jlong* p) { -// return (volatile jlong)specialized_load_acquire((volatile intptr_t*)p); +//template<> inline jlong OrderAccess::specialized_load_acquire(const volatile jlong* p) { +// return (volatile jlong)specialized_load_acquire((const volatile intptr_t*)p); //} -template<> inline intptr_t OrderAccess::specialized_load_acquire(volatile intptr_t* p) { +template<> inline intptr_t OrderAccess::specialized_load_acquire(const volatile intptr_t* p) { volatile intptr_t result; __asm__ volatile( "ldar %[res], [%[ptr]]" @@ -180,8 +180,8 @@ return result; } -template<> inline jdouble OrderAccess::specialized_load_acquire(volatile jdouble* p) { - return jdouble_cast(specialized_load_acquire((volatile intptr_t*)p)); +template<> inline jdouble OrderAccess::specialized_load_acquire(const volatile jdouble* p) { + return jdouble_cast(specialized_load_acquire((const volatile intptr_t*)p)); } diff -r 86b13b03a053 -r cbcc0ebaa044 hotspot/src/os_cpu/linux_arm/vm/os_linux_arm.cpp --- a/hotspot/src/os_cpu/linux_arm/vm/os_linux_arm.cpp Mon Jun 05 13:13:38 2017 -0400 +++ b/hotspot/src/os_cpu/linux_arm/vm/os_linux_arm.cpp Mon Jun 05 19:07:47 2017 -0400 @@ -1,5 +1,5 @@ /* - * Copyright (c) 2008, 2016, Oracle and/or its affiliates. All rights reserved. + * Copyright (c) 2008, 2017, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it @@ -618,11 +618,11 @@ *dest = exchange_value; return old_value; } -typedef jlong load_long_func_t(volatile jlong*); +typedef jlong load_long_func_t(const volatile jlong*); load_long_func_t* os::atomic_load_long_func = os::atomic_load_long_bootstrap; -jlong os::atomic_load_long_bootstrap(volatile jlong* src) { +jlong os::atomic_load_long_bootstrap(const volatile jlong* src) { // try to use the stub: load_long_func_t* func = CAST_TO_FN_PTR(load_long_func_t*, StubRoutines::atomic_load_long_entry()); diff -r 86b13b03a053 -r cbcc0ebaa044 hotspot/src/os_cpu/linux_arm/vm/os_linux_arm.hpp --- a/hotspot/src/os_cpu/linux_arm/vm/os_linux_arm.hpp Mon Jun 05 13:13:38 2017 -0400 +++ b/hotspot/src/os_cpu/linux_arm/vm/os_linux_arm.hpp Mon Jun 05 19:07:47 2017 -0400 @@ -1,5 +1,5 @@ /* - * Copyright (c) 2008, 2015, Oracle and/or its affiliates. All rights reserved. + * Copyright (c) 2008, 2017, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it @@ -49,7 +49,7 @@ jlong exchange_value, volatile jlong *dest); - static jlong (*atomic_load_long_func)(volatile jlong*); + static jlong (*atomic_load_long_func)(const volatile jlong*); static void (*atomic_store_long_func)(jlong, volatile jlong*); @@ -63,7 +63,7 @@ static jlong atomic_cmpxchg_long_bootstrap(jlong, jlong, volatile jlong*); - static jlong atomic_load_long_bootstrap(volatile jlong*); + static jlong atomic_load_long_bootstrap(const volatile jlong*); static void atomic_store_long_bootstrap(jlong, volatile jlong*); diff -r 86b13b03a053 -r cbcc0ebaa044 hotspot/src/os_cpu/linux_ppc/vm/atomic_linux_ppc.hpp --- a/hotspot/src/os_cpu/linux_ppc/vm/atomic_linux_ppc.hpp Mon Jun 05 13:13:38 2017 -0400 +++ b/hotspot/src/os_cpu/linux_ppc/vm/atomic_linux_ppc.hpp Mon Jun 05 19:07:47 2017 -0400 @@ -1,5 +1,5 @@ /* - * Copyright (c) 1997, 2016, Oracle and/or its affiliates. All rights reserved. + * Copyright (c) 1997, 2017, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2012, 2014 SAP SE. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * @@ -46,7 +46,7 @@ inline void Atomic::store_ptr(intptr_t store_value, volatile intptr_t* dest) { *dest = store_value; } inline void Atomic::store_ptr(void* store_value, volatile void* dest) { *(void* volatile *)dest = store_value; } -inline jlong Atomic::load(volatile jlong* src) { return *src; } +inline jlong Atomic::load(const volatile jlong* src) { return *src; } // // machine barrier instructions: diff -r 86b13b03a053 -r cbcc0ebaa044 hotspot/src/os_cpu/linux_ppc/vm/orderAccess_linux_ppc.inline.hpp --- a/hotspot/src/os_cpu/linux_ppc/vm/orderAccess_linux_ppc.inline.hpp Mon Jun 05 13:13:38 2017 -0400 +++ b/hotspot/src/os_cpu/linux_ppc/vm/orderAccess_linux_ppc.inline.hpp Mon Jun 05 19:07:47 2017 -0400 @@ -1,5 +1,5 @@ /* - * Copyright (c) 1997, 2015, Oracle and/or its affiliates. All rights reserved. + * Copyright (c) 1997, 2017, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2012, 2014 SAP SE. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * @@ -80,10 +80,10 @@ inline void OrderAccess::release() { inlasm_lwsync(); } inline void OrderAccess::fence() { inlasm_sync(); } -template<> inline jbyte OrderAccess::specialized_load_acquire (volatile jbyte* p) { register jbyte t = load(p); inlasm_acquire_reg(t); return t; } -template<> inline jshort OrderAccess::specialized_load_acquire(volatile jshort* p) { register jshort t = load(p); inlasm_acquire_reg(t); return t; } -template<> inline jint OrderAccess::specialized_load_acquire (volatile jint* p) { register jint t = load(p); inlasm_acquire_reg(t); return t; } -template<> inline jlong OrderAccess::specialized_load_acquire (volatile jlong* p) { register jlong t = load(p); inlasm_acquire_reg(t); return t; } +template<> inline jbyte OrderAccess::specialized_load_acquire (const volatile jbyte* p) { register jbyte t = load(p); inlasm_acquire_reg(t); return t; } +template<> inline jshort OrderAccess::specialized_load_acquire(const volatile jshort* p) { register jshort t = load(p); inlasm_acquire_reg(t); return t; } +template<> inline jint OrderAccess::specialized_load_acquire (const volatile jint* p) { register jint t = load(p); inlasm_acquire_reg(t); return t; } +template<> inline jlong OrderAccess::specialized_load_acquire (const volatile jlong* p) { register jlong t = load(p); inlasm_acquire_reg(t); return t; } #undef inlasm_sync #undef inlasm_lwsync diff -r 86b13b03a053 -r cbcc0ebaa044 hotspot/src/os_cpu/linux_s390/vm/atomic_linux_s390.hpp --- a/hotspot/src/os_cpu/linux_s390/vm/atomic_linux_s390.hpp Mon Jun 05 13:13:38 2017 -0400 +++ b/hotspot/src/os_cpu/linux_s390/vm/atomic_linux_s390.hpp Mon Jun 05 19:07:47 2017 -0400 @@ -1,5 +1,5 @@ /* - * Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved. + * Copyright (c) 2016, 2017, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2016 SAP SE. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * @@ -522,6 +522,6 @@ return (intptr_t)cmpxchg((jlong)xchg_val, (volatile jlong*)dest, (jlong)cmp_val, unused); } -inline jlong Atomic::load(volatile jlong* src) { return *src; } +inline jlong Atomic::load(const volatile jlong* src) { return *src; } #endif // OS_CPU_LINUX_S390_VM_ATOMIC_LINUX_S390_INLINE_HPP diff -r 86b13b03a053 -r cbcc0ebaa044 hotspot/src/os_cpu/linux_s390/vm/orderAccess_linux_s390.inline.hpp --- a/hotspot/src/os_cpu/linux_s390/vm/orderAccess_linux_s390.inline.hpp Mon Jun 05 13:13:38 2017 -0400 +++ b/hotspot/src/os_cpu/linux_s390/vm/orderAccess_linux_s390.inline.hpp Mon Jun 05 19:07:47 2017 -0400 @@ -1,5 +1,5 @@ /* - * Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved. + * Copyright (c) 2016, 2017, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2016 SAP SE. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * @@ -74,10 +74,10 @@ inline void OrderAccess::release() { inlasm_zarch_release(); } inline void OrderAccess::fence() { inlasm_zarch_sync(); } -template<> inline jbyte OrderAccess::specialized_load_acquire (volatile jbyte* p) { register jbyte t = *p; inlasm_zarch_acquire(); return t; } -template<> inline jshort OrderAccess::specialized_load_acquire(volatile jshort* p) { register jshort t = *p; inlasm_zarch_acquire(); return t; } -template<> inline jint OrderAccess::specialized_load_acquire (volatile jint* p) { register jint t = *p; inlasm_zarch_acquire(); return t; } -template<> inline jlong OrderAccess::specialized_load_acquire (volatile jlong* p) { register jlong t = *p; inlasm_zarch_acquire(); return t; } +template<> inline jbyte OrderAccess::specialized_load_acquire (const volatile jbyte* p) { register jbyte t = *p; inlasm_zarch_acquire(); return t; } +template<> inline jshort OrderAccess::specialized_load_acquire(const volatile jshort* p) { register jshort t = *p; inlasm_zarch_acquire(); return t; } +template<> inline jint OrderAccess::specialized_load_acquire (const volatile jint* p) { register jint t = *p; inlasm_zarch_acquire(); return t; } +template<> inline jlong OrderAccess::specialized_load_acquire (const volatile jlong* p) { register jlong t = *p; inlasm_zarch_acquire(); return t; } #undef inlasm_compiler_barrier #undef inlasm_zarch_sync diff -r 86b13b03a053 -r cbcc0ebaa044 hotspot/src/os_cpu/linux_sparc/vm/atomic_linux_sparc.inline.hpp --- a/hotspot/src/os_cpu/linux_sparc/vm/atomic_linux_sparc.inline.hpp Mon Jun 05 13:13:38 2017 -0400 +++ b/hotspot/src/os_cpu/linux_sparc/vm/atomic_linux_sparc.inline.hpp Mon Jun 05 19:07:47 2017 -0400 @@ -1,5 +1,5 @@ /* - * Copyright (c) 1999, 2016, Oracle and/or its affiliates. All rights reserved. + * Copyright (c) 1999, 2017, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it @@ -49,7 +49,7 @@ inline void Atomic::dec_ptr(volatile intptr_t* dest) { (void)add_ptr(-1, dest); } inline void Atomic::dec_ptr(volatile void* dest) { (void)add_ptr(-1, dest); } -inline jlong Atomic::load(volatile jlong* src) { return *src; } +inline jlong Atomic::load(const volatile jlong* src) { return *src; } inline jint Atomic::add (jint add_value, volatile jint* dest) { intptr_t rv; diff -r 86b13b03a053 -r cbcc0ebaa044 hotspot/src/os_cpu/linux_x86/vm/atomic_linux_x86.hpp --- a/hotspot/src/os_cpu/linux_x86/vm/atomic_linux_x86.hpp Mon Jun 05 13:13:38 2017 -0400 +++ b/hotspot/src/os_cpu/linux_x86/vm/atomic_linux_x86.hpp Mon Jun 05 19:07:47 2017 -0400 @@ -153,7 +153,7 @@ return (void*)cmpxchg((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value, order); } -inline jlong Atomic::load(volatile jlong* src) { return *src; } +inline jlong Atomic::load(const volatile jlong* src) { return *src; } #else // !AMD64 @@ -181,7 +181,7 @@ extern "C" { // defined in linux_x86.s jlong _Atomic_cmpxchg_long(jlong, volatile jlong*, jlong); - void _Atomic_move_long(volatile jlong* src, volatile jlong* dst); + void _Atomic_move_long(const volatile jlong* src, volatile jlong* dst); } inline jlong Atomic::cmpxchg (jlong exchange_value, volatile jlong* dest, jlong compare_value, cmpxchg_memory_order order) { @@ -196,7 +196,7 @@ return (void*)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value, order); } -inline jlong Atomic::load(volatile jlong* src) { +inline jlong Atomic::load(const volatile jlong* src) { volatile jlong dest; _Atomic_move_long(src, &dest); return dest; diff -r 86b13b03a053 -r cbcc0ebaa044 hotspot/src/os_cpu/linux_x86/vm/linux_x86_32.s --- a/hotspot/src/os_cpu/linux_x86/vm/linux_x86_32.s Mon Jun 05 13:13:38 2017 -0400 +++ b/hotspot/src/os_cpu/linux_x86/vm/linux_x86_32.s Mon Jun 05 19:07:47 2017 -0400 @@ -634,7 +634,7 @@ # Support for jlong Atomic::load and Atomic::store. - # void _Atomic_move_long(volatile jlong* src, volatile jlong* dst) + # void _Atomic_move_long(const volatile jlong* src, volatile jlong* dst) .p2align 4,,15 .type _Atomic_move_long,@function _Atomic_move_long: diff -r 86b13b03a053 -r cbcc0ebaa044 hotspot/src/os_cpu/linux_zero/vm/atomic_linux_zero.hpp --- a/hotspot/src/os_cpu/linux_zero/vm/atomic_linux_zero.hpp Mon Jun 05 13:13:38 2017 -0400 +++ b/hotspot/src/os_cpu/linux_zero/vm/atomic_linux_zero.hpp Mon Jun 05 19:07:47 2017 -0400 @@ -1,5 +1,5 @@ /* - * Copyright (c) 2003, 2016, Oracle and/or its affiliates. All rights reserved. + * Copyright (c) 2003, 2017, Oracle and/or its affiliates. All rights reserved. * Copyright 2007, 2008, 2011, 2015, Red Hat, Inc. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * @@ -310,7 +310,7 @@ order); } -inline jlong Atomic::load(volatile jlong* src) { +inline jlong Atomic::load(const volatile jlong* src) { volatile jlong dest; os::atomic_copy64(src, &dest); return dest; diff -r 86b13b03a053 -r cbcc0ebaa044 hotspot/src/os_cpu/linux_zero/vm/os_linux_zero.hpp --- a/hotspot/src/os_cpu/linux_zero/vm/os_linux_zero.hpp Mon Jun 05 13:13:38 2017 -0400 +++ b/hotspot/src/os_cpu/linux_zero/vm/os_linux_zero.hpp Mon Jun 05 19:07:47 2017 -0400 @@ -1,5 +1,5 @@ /* - * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved. + * Copyright (c) 2003, 2017, Oracle and/or its affiliates. All rights reserved. * Copyright 2007, 2008, 2010 Red Hat, Inc. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * @@ -35,7 +35,7 @@ static bool register_code_area(char *low, char *high) { return true; } // Atomically copy 64 bits of data - static void atomic_copy64(volatile void *src, volatile void *dst) { + static void atomic_copy64(const volatile void *src, volatile void *dst) { #if defined(PPC32) double tmp; asm volatile ("lfd %0, 0(%1)\n" @@ -49,7 +49,7 @@ : "=r"(tmp) : "a"(src), "a"(dst)); #else - *(jlong *) dst = *(jlong *) src; + *(jlong *) dst = *(const jlong *) src; #endif } diff -r 86b13b03a053 -r cbcc0ebaa044 hotspot/src/os_cpu/solaris_sparc/vm/atomic_solaris_sparc.hpp --- a/hotspot/src/os_cpu/solaris_sparc/vm/atomic_solaris_sparc.hpp Mon Jun 05 13:13:38 2017 -0400 +++ b/hotspot/src/os_cpu/solaris_sparc/vm/atomic_solaris_sparc.hpp Mon Jun 05 19:07:47 2017 -0400 @@ -52,7 +52,7 @@ inline void Atomic::store(jlong store_value, jlong* dest) { *dest = store_value; } inline void Atomic::store(jlong store_value, volatile jlong* dest) { *dest = store_value; } -inline jlong Atomic::load(volatile jlong* src) { return *src; } +inline jlong Atomic::load(const volatile jlong* src) { return *src; } #ifdef _GNU_SOURCE diff -r 86b13b03a053 -r cbcc0ebaa044 hotspot/src/os_cpu/solaris_x86/vm/atomic_solaris_x86.hpp --- a/hotspot/src/os_cpu/solaris_x86/vm/atomic_solaris_x86.hpp Mon Jun 05 13:13:38 2017 -0400 +++ b/hotspot/src/os_cpu/solaris_x86/vm/atomic_solaris_x86.hpp Mon Jun 05 19:07:47 2017 -0400 @@ -115,7 +115,7 @@ return (void*)_Atomic_cmpxchg_long((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value); } -inline jlong Atomic::load(volatile jlong* src) { return *src; } +inline jlong Atomic::load(const volatile jlong* src) { return *src; } #else // !AMD64 @@ -143,9 +143,9 @@ return (void*)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value, order); } -extern "C" void _Atomic_move_long(volatile jlong* src, volatile jlong* dst); +extern "C" void _Atomic_move_long(const volatile jlong* src, volatile jlong* dst); -inline jlong Atomic::load(volatile jlong* src) { +inline jlong Atomic::load(const volatile jlong* src) { volatile jlong dest; _Atomic_move_long(src, &dest); return dest; diff -r 86b13b03a053 -r cbcc0ebaa044 hotspot/src/os_cpu/solaris_x86/vm/solaris_x86_32.il --- a/hotspot/src/os_cpu/solaris_x86/vm/solaris_x86_32.il Mon Jun 05 13:13:38 2017 -0400 +++ b/hotspot/src/os_cpu/solaris_x86/vm/solaris_x86_32.il Mon Jun 05 19:07:47 2017 -0400 @@ -107,7 +107,7 @@ .end // Support for jlong Atomic::load and Atomic::store. - // void _Atomic_move_long(volatile jlong* src, volatile jlong* dst) + // void _Atomic_move_long(const volatile jlong* src, volatile jlong* dst) .inline _Atomic_move_long,2 movl 0(%esp), %eax // src fildll (%eax) diff -r 86b13b03a053 -r cbcc0ebaa044 hotspot/src/os_cpu/windows_x86/vm/atomic_windows_x86.hpp --- a/hotspot/src/os_cpu/windows_x86/vm/atomic_windows_x86.hpp Mon Jun 05 13:13:38 2017 -0400 +++ b/hotspot/src/os_cpu/windows_x86/vm/atomic_windows_x86.hpp Mon Jun 05 19:07:47 2017 -0400 @@ -130,7 +130,7 @@ return (void*)cmpxchg((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value, order); } -inline jlong Atomic::load(volatile jlong* src) { return *src; } +inline jlong Atomic::load(const volatile jlong* src) { return *src; } #else // !AMD64 @@ -249,7 +249,7 @@ return (void*)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value, order); } -inline jlong Atomic::load(volatile jlong* src) { +inline jlong Atomic::load(const volatile jlong* src) { volatile jlong dest; volatile jlong* pdest = &dest; __asm { diff -r 86b13b03a053 -r cbcc0ebaa044 hotspot/src/share/vm/runtime/atomic.hpp --- a/hotspot/src/share/vm/runtime/atomic.hpp Mon Jun 05 13:13:38 2017 -0400 +++ b/hotspot/src/share/vm/runtime/atomic.hpp Mon Jun 05 19:07:47 2017 -0400 @@ -72,7 +72,7 @@ inline static void store_ptr(void* store_value, volatile void* dest); // See comment above about using jlong atomics on 32-bit platforms - inline static jlong load(volatile jlong* src); + inline static jlong load(const volatile jlong* src); // Atomically add to a location. Returns updated value. add*() provide: // add-value-to-dest diff -r 86b13b03a053 -r cbcc0ebaa044 hotspot/src/share/vm/runtime/orderAccess.hpp --- a/hotspot/src/share/vm/runtime/orderAccess.hpp Mon Jun 05 13:13:38 2017 -0400 +++ b/hotspot/src/share/vm/runtime/orderAccess.hpp Mon Jun 05 19:07:47 2017 -0400 @@ -1,5 +1,5 @@ /* - * Copyright (c) 2003, 2015, Oracle and/or its affiliates. All rights reserved. + * Copyright (c) 2003, 2017, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it @@ -264,20 +264,19 @@ static void release(); static void fence(); - static jbyte load_acquire(volatile jbyte* p); - static jshort load_acquire(volatile jshort* p); - static jint load_acquire(volatile jint* p); - static jlong load_acquire(volatile jlong* p); - static jubyte load_acquire(volatile jubyte* p); - static jushort load_acquire(volatile jushort* p); - static juint load_acquire(volatile juint* p); - static julong load_acquire(volatile julong* p); - static jfloat load_acquire(volatile jfloat* p); - static jdouble load_acquire(volatile jdouble* p); + static jbyte load_acquire(const volatile jbyte* p); + static jshort load_acquire(const volatile jshort* p); + static jint load_acquire(const volatile jint* p); + static jlong load_acquire(const volatile jlong* p); + static jubyte load_acquire(const volatile jubyte* p); + static jushort load_acquire(const volatile jushort* p); + static juint load_acquire(const volatile juint* p); + static julong load_acquire(const volatile julong* p); + static jfloat load_acquire(const volatile jfloat* p); + static jdouble load_acquire(const volatile jdouble* p); - static intptr_t load_ptr_acquire(volatile intptr_t* p); - static void* load_ptr_acquire(volatile void* p); - static void* load_ptr_acquire(const volatile void* p); + static intptr_t load_ptr_acquire(const volatile intptr_t* p); + static void* load_ptr_acquire(const volatile void* p); static void release_store(volatile jbyte* p, jbyte v); static void release_store(volatile jshort* p, jshort v); @@ -314,7 +313,7 @@ static void StubRoutines_fence(); // Give platforms a variation point to specialize. - template static T specialized_load_acquire (volatile T* p ); + template static T specialized_load_acquire (const volatile T* p); template static void specialized_release_store (volatile T* p, T v); template static void specialized_release_store_fence(volatile T* p, T v); @@ -322,7 +321,7 @@ static void ordered_store(volatile FieldType* p, FieldType v); template - static FieldType ordered_load(volatile FieldType* p); + static FieldType ordered_load(const volatile FieldType* p); static void store(volatile jbyte* p, jbyte v); static void store(volatile jshort* p, jshort v); @@ -331,12 +330,12 @@ static void store(volatile jdouble* p, jdouble v); static void store(volatile jfloat* p, jfloat v); - static jbyte load (volatile jbyte* p); - static jshort load (volatile jshort* p); - static jint load (volatile jint* p); - static jlong load (volatile jlong* p); - static jdouble load (volatile jdouble* p); - static jfloat load (volatile jfloat* p); + static jbyte load(const volatile jbyte* p); + static jshort load(const volatile jshort* p); + static jint load(const volatile jint* p); + static jlong load(const volatile jlong* p); + static jdouble load(const volatile jdouble* p); + static jfloat load(const volatile jfloat* p); // The following store_fence methods are deprecated and will be removed // when all repos conform to the new generalized OrderAccess. diff -r 86b13b03a053 -r cbcc0ebaa044 hotspot/src/share/vm/runtime/orderAccess.inline.hpp --- a/hotspot/src/share/vm/runtime/orderAccess.inline.hpp Mon Jun 05 13:13:38 2017 -0400 +++ b/hotspot/src/share/vm/runtime/orderAccess.inline.hpp Mon Jun 05 19:07:47 2017 -0400 @@ -1,5 +1,5 @@ /* - * Copyright (c) 2014, 2016, Oracle and/or its affiliates. All rights reserved. + * Copyright (c) 2014, 2017, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2014, 2016 SAP SE. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * @@ -47,25 +47,24 @@ } template -inline FieldType OrderAccess::ordered_load(volatile FieldType* p) { +inline FieldType OrderAccess::ordered_load(const volatile FieldType* p) { ScopedFence f((void*)p); return load(p); } -inline jbyte OrderAccess::load_acquire(volatile jbyte* p) { return specialized_load_acquire(p); } -inline jshort OrderAccess::load_acquire(volatile jshort* p) { return specialized_load_acquire(p); } -inline jint OrderAccess::load_acquire(volatile jint* p) { return specialized_load_acquire(p); } -inline jlong OrderAccess::load_acquire(volatile jlong* p) { return specialized_load_acquire(p); } -inline jfloat OrderAccess::load_acquire(volatile jfloat* p) { return specialized_load_acquire(p); } -inline jdouble OrderAccess::load_acquire(volatile jdouble* p) { return specialized_load_acquire(p); } -inline jubyte OrderAccess::load_acquire(volatile jubyte* p) { return (jubyte) specialized_load_acquire((volatile jbyte*)p); } -inline jushort OrderAccess::load_acquire(volatile jushort* p) { return (jushort)specialized_load_acquire((volatile jshort*)p); } -inline juint OrderAccess::load_acquire(volatile juint* p) { return (juint) specialized_load_acquire((volatile jint*)p); } -inline julong OrderAccess::load_acquire(volatile julong* p) { return (julong) specialized_load_acquire((volatile jlong*)p); } +inline jbyte OrderAccess::load_acquire(const volatile jbyte* p) { return specialized_load_acquire(p); } +inline jshort OrderAccess::load_acquire(const volatile jshort* p) { return specialized_load_acquire(p); } +inline jint OrderAccess::load_acquire(const volatile jint* p) { return specialized_load_acquire(p); } +inline jlong OrderAccess::load_acquire(const volatile jlong* p) { return specialized_load_acquire(p); } +inline jfloat OrderAccess::load_acquire(const volatile jfloat* p) { return specialized_load_acquire(p); } +inline jdouble OrderAccess::load_acquire(const volatile jdouble* p) { return specialized_load_acquire(p); } +inline jubyte OrderAccess::load_acquire(const volatile jubyte* p) { return (jubyte) specialized_load_acquire((const volatile jbyte*)p); } +inline jushort OrderAccess::load_acquire(const volatile jushort* p) { return (jushort)specialized_load_acquire((const volatile jshort*)p); } +inline juint OrderAccess::load_acquire(const volatile juint* p) { return (juint) specialized_load_acquire((const volatile jint*)p); } +inline julong OrderAccess::load_acquire(const volatile julong* p) { return (julong) specialized_load_acquire((const volatile jlong*)p); } -inline intptr_t OrderAccess::load_ptr_acquire(volatile intptr_t* p) { return (intptr_t)specialized_load_acquire(p); } -inline void* OrderAccess::load_ptr_acquire(volatile void* p) { return (void*)specialized_load_acquire((volatile intptr_t*)p); } -inline void* OrderAccess::load_ptr_acquire(const volatile void* p) { return (void*)specialized_load_acquire((volatile intptr_t*)p); } +inline intptr_t OrderAccess::load_ptr_acquire(const volatile intptr_t* p) { return (intptr_t)specialized_load_acquire(p); } +inline void* OrderAccess::load_ptr_acquire(const volatile void* p) { return (void*)specialized_load_acquire((const volatile intptr_t*)p); } inline void OrderAccess::release_store(volatile jbyte* p, jbyte v) { specialized_release_store(p, v); } inline void OrderAccess::release_store(volatile jshort* p, jshort v) { specialized_release_store(p, v); } @@ -98,7 +97,7 @@ // The following methods can be specialized using simple template specialization // in the platform specific files for optimization purposes. Otherwise the // generalized variant is used. -template inline T OrderAccess::specialized_load_acquire (volatile T* p) { return ordered_load(p); } +template inline T OrderAccess::specialized_load_acquire (const volatile T* p) { return ordered_load(p); } template inline void OrderAccess::specialized_release_store (volatile T* p, T v) { ordered_store(p, v); } template inline void OrderAccess::specialized_release_store_fence(volatile T* p, T v) { ordered_store(p, v); } @@ -111,12 +110,12 @@ inline void OrderAccess::store(volatile jdouble* p, jdouble v) { Atomic::store(jlong_cast(v), (volatile jlong*)p); } inline void OrderAccess::store(volatile jfloat* p, jfloat v) { *p = v; } -inline jbyte OrderAccess::load(volatile jbyte* p) { return *p; } -inline jshort OrderAccess::load(volatile jshort* p) { return *p; } -inline jint OrderAccess::load(volatile jint* p) { return *p; } -inline jlong OrderAccess::load(volatile jlong* p) { return Atomic::load(p); } -inline jdouble OrderAccess::load(volatile jdouble* p) { return jdouble_cast(Atomic::load((volatile jlong*)p)); } -inline jfloat OrderAccess::load(volatile jfloat* p) { return *p; } +inline jbyte OrderAccess::load(const volatile jbyte* p) { return *p; } +inline jshort OrderAccess::load(const volatile jshort* p) { return *p; } +inline jint OrderAccess::load(const volatile jint* p) { return *p; } +inline jlong OrderAccess::load(const volatile jlong* p) { return Atomic::load(p); } +inline jdouble OrderAccess::load(const volatile jdouble* p) { return jdouble_cast(Atomic::load((const volatile jlong*)p)); } +inline jfloat OrderAccess::load(const volatile jfloat* p) { return *p; } #endif // VM_HAS_GENERALIZED_ORDER_ACCESS