1 /* |
1 /* |
2 * Copyright (c) 2003, 2009, Oracle and/or its affiliates. All rights reserved. |
2 * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved. |
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. |
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. |
4 * |
4 * |
5 * This code is free software; you can redistribute it and/or modify it |
5 * This code is free software; you can redistribute it and/or modify it |
6 * under the terms of the GNU General Public License version 2 only, as |
6 * under the terms of the GNU General Public License version 2 only, as |
7 * published by the Free Software Foundation. |
7 * published by the Free Software Foundation. |
164 // "Program Execution". This means that all barrier implementations, |
164 // "Program Execution". This means that all barrier implementations, |
165 // including standalone loadload, storestore, loadstore, storeload, acquire |
165 // including standalone loadload, storestore, loadstore, storeload, acquire |
166 // and release must include a sequence point, usually via a volatile memory |
166 // and release must include a sequence point, usually via a volatile memory |
167 // access. Other ways to guarantee a sequence point are, e.g., use of |
167 // access. Other ways to guarantee a sequence point are, e.g., use of |
168 // indirect calls and linux's __asm__ volatile. |
168 // indirect calls and linux's __asm__ volatile. |
|
169 // Note: as of 6973570, we have replaced the originally static "dummy" field |
|
170 // (see above) by a volatile store to the stack. All of the versions of the |
|
171 // compilers that we currently use (SunStudio, gcc and VC++) respect the |
|
172 // semantics of volatile here. If you build HotSpot using other |
|
173 // compilers, you may need to verify that no compiler reordering occurs |
|
174 // across the sequence point respresented by the volatile access. |
169 // |
175 // |
170 // |
176 // |
171 // os::is_MP Considered Redundant |
177 // os::is_MP Considered Redundant |
172 // |
178 // |
173 // Callers of this interface do not need to test os::is_MP() before |
179 // Callers of this interface do not need to test os::is_MP() before |
295 static void release_store_fence(volatile jdouble* p, jdouble v); |
301 static void release_store_fence(volatile jdouble* p, jdouble v); |
296 |
302 |
297 static void release_store_ptr_fence(volatile intptr_t* p, intptr_t v); |
303 static void release_store_ptr_fence(volatile intptr_t* p, intptr_t v); |
298 static void release_store_ptr_fence(volatile void* p, void* v); |
304 static void release_store_ptr_fence(volatile void* p, void* v); |
299 |
305 |
300 // In order to force a memory access, implementations may |
|
301 // need a volatile externally visible dummy variable. |
|
302 static volatile intptr_t dummy; |
|
303 |
|
304 private: |
306 private: |
305 // This is a helper that invokes the StubRoutines::fence_entry() |
307 // This is a helper that invokes the StubRoutines::fence_entry() |
306 // routine if it exists, It should only be used by platforms that |
308 // routine if it exists, It should only be used by platforms that |
307 // don't another way to do the inline eassembly. |
309 // don't another way to do the inline eassembly. |
308 static void StubRoutines_fence(); |
310 static void StubRoutines_fence(); |