176 v = oopDesc::decode_heap_oop(n); \ |
176 v = oopDesc::decode_heap_oop(n); \ |
177 } else { \ |
177 } else { \ |
178 v = *(oop*)index_oop_from_field_offset_long(p, offset); \ |
178 v = *(oop*)index_oop_from_field_offset_long(p, offset); \ |
179 } |
179 } |
180 |
180 |
181 #define GET_OOP_FIELD_VOLATILE(obj, offset, v) \ |
|
182 oop p = JNIHandles::resolve(obj); \ |
|
183 volatile oop v; \ |
|
184 if (UseCompressedOops) { \ |
|
185 volatile narrowOop n = *(volatile narrowOop*)index_oop_from_field_offset_long(p, offset); \ |
|
186 v = oopDesc::decode_heap_oop(n); \ |
|
187 } else { \ |
|
188 v = *(volatile oop*)index_oop_from_field_offset_long(p, offset); \ |
|
189 } \ |
|
190 OrderAccess::acquire(); |
|
191 |
|
192 |
181 |
193 // Get/SetObject must be special-cased, since it works with handles. |
182 // Get/SetObject must be special-cased, since it works with handles. |
194 |
183 |
195 // The xxx140 variants for backward compatibility do not allow a full-width offset. |
184 // The xxx140 variants for backward compatibility do not allow a full-width offset. |
196 UNSAFE_ENTRY(jobject, Unsafe_GetObject140(JNIEnv *env, jobject unsafe, jobject obj, jint offset)) |
185 UNSAFE_ENTRY(jobject, Unsafe_GetObject140(JNIEnv *env, jobject unsafe, jobject obj, jint offset)) |
294 } |
283 } |
295 UNSAFE_END |
284 UNSAFE_END |
296 |
285 |
297 UNSAFE_ENTRY(jobject, Unsafe_GetObjectVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) |
286 UNSAFE_ENTRY(jobject, Unsafe_GetObjectVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) |
298 UnsafeWrapper("Unsafe_GetObjectVolatile"); |
287 UnsafeWrapper("Unsafe_GetObjectVolatile"); |
299 GET_OOP_FIELD_VOLATILE(obj, offset, v) |
288 oop p = JNIHandles::resolve(obj); |
|
289 void* addr = index_oop_from_field_offset_long(p, offset); |
|
290 volatile oop v; |
|
291 if (UseCompressedOops) { |
|
292 volatile narrowOop n = *(volatile narrowOop*) addr; |
|
293 v = oopDesc::decode_heap_oop(n); |
|
294 } else { |
|
295 v = *(volatile oop*) addr; |
|
296 } |
|
297 OrderAccess::acquire(); |
300 return JNIHandles::make_local(env, v); |
298 return JNIHandles::make_local(env, v); |
301 UNSAFE_END |
299 UNSAFE_END |
302 |
300 |
303 UNSAFE_ENTRY(void, Unsafe_SetObjectVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) |
301 UNSAFE_ENTRY(void, Unsafe_SetObjectVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) |
304 UnsafeWrapper("Unsafe_SetObjectVolatile"); |
302 UnsafeWrapper("Unsafe_SetObjectVolatile"); |
305 { |
|
306 // Catch VolatileCallSite.target stores (via |
|
307 // CallSite.setTargetVolatile) and check call site dependencies. |
|
308 oop p = JNIHandles::resolve(obj); |
|
309 if ((offset == java_lang_invoke_CallSite::target_offset_in_bytes()) && p->is_a(SystemDictionary::CallSite_klass())) { |
|
310 Handle call_site (THREAD, p); |
|
311 Handle method_handle(THREAD, JNIHandles::resolve(x_h)); |
|
312 assert(call_site ->is_a(SystemDictionary::CallSite_klass()), "must be"); |
|
313 assert(method_handle->is_a(SystemDictionary::MethodHandle_klass()), "must be"); |
|
314 { |
|
315 // Walk all nmethods depending on this call site. |
|
316 MutexLocker mu(Compile_lock, thread); |
|
317 Universe::flush_dependents_on(call_site(), method_handle()); |
|
318 } |
|
319 } |
|
320 } |
|
321 oop x = JNIHandles::resolve(x_h); |
303 oop x = JNIHandles::resolve(x_h); |
322 oop p = JNIHandles::resolve(obj); |
304 oop p = JNIHandles::resolve(obj); |
323 void* addr = index_oop_from_field_offset_long(p, offset); |
305 void* addr = index_oop_from_field_offset_long(p, offset); |
324 OrderAccess::release(); |
306 OrderAccess::release(); |
325 if (UseCompressedOops) { |
307 if (UseCompressedOops) { |