src/hotspot/cpu/x86/sharedRuntime_x86_64.cpp
changeset 51996 84743156e780
parent 51756 4bd35a5ec694
child 52142 ca0c25e01c5b
equal deleted inserted replaced
51995:f7babf9d1592 51996:84743156e780
  2462     __ orptr(swap_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
  2462     __ orptr(swap_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
  2463 
  2463 
  2464     // Save (object->mark() | 1) into BasicLock's displaced header
  2464     // Save (object->mark() | 1) into BasicLock's displaced header
  2465     __ movptr(Address(lock_reg, mark_word_offset), swap_reg);
  2465     __ movptr(Address(lock_reg, mark_word_offset), swap_reg);
  2466 
  2466 
  2467     if (os::is_MP()) {
       
  2468       __ lock();
       
  2469     }
       
  2470 
       
  2471     // src -> dest iff dest == rax else rax <- dest
  2467     // src -> dest iff dest == rax else rax <- dest
       
  2468     __ lock();
  2472     __ cmpxchgptr(lock_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
  2469     __ cmpxchgptr(lock_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
  2473     __ jcc(Assembler::equal, lock_done);
  2470     __ jcc(Assembler::equal, lock_done);
  2474 
  2471 
  2475     // Hmm should this move to the slow path code area???
  2472     // Hmm should this move to the slow path code area???
  2476 
  2473 
  2556   //     VM thread changes sync state to synchronizing and suspends threads for GC.
  2553   //     VM thread changes sync state to synchronizing and suspends threads for GC.
  2557   //     Thread A is resumed to finish this native method, but doesn't block here since it
  2554   //     Thread A is resumed to finish this native method, but doesn't block here since it
  2558   //     didn't see any synchronization is progress, and escapes.
  2555   //     didn't see any synchronization is progress, and escapes.
  2559   __ movl(Address(r15_thread, JavaThread::thread_state_offset()), _thread_in_native_trans);
  2556   __ movl(Address(r15_thread, JavaThread::thread_state_offset()), _thread_in_native_trans);
  2560 
  2557 
  2561   if(os::is_MP()) {
  2558   if (UseMembar) {
  2562     if (UseMembar) {
  2559     // Force this write out before the read below
  2563       // Force this write out before the read below
  2560     __ membar(Assembler::Membar_mask_bits(
  2564       __ membar(Assembler::Membar_mask_bits(
  2561                 Assembler::LoadLoad | Assembler::LoadStore |
  2565            Assembler::LoadLoad | Assembler::LoadStore |
  2562                 Assembler::StoreLoad | Assembler::StoreStore));
  2566            Assembler::StoreLoad | Assembler::StoreStore));
  2563   } else {
  2567     } else {
  2564     // Write serialization page so VM thread can do a pseudo remote membar.
  2568       // Write serialization page so VM thread can do a pseudo remote membar.
  2565     // We use the current thread pointer to calculate a thread specific
  2569       // We use the current thread pointer to calculate a thread specific
  2566     // offset to write to within the page. This minimizes bus traffic
  2570       // offset to write to within the page. This minimizes bus traffic
  2567     // due to cache line collision.
  2571       // due to cache line collision.
  2568     __ serialize_memory(r15_thread, rcx);
  2572       __ serialize_memory(r15_thread, rcx);
       
  2573     }
       
  2574   }
  2569   }
  2575 
  2570 
  2576   Label after_transition;
  2571   Label after_transition;
  2577 
  2572 
  2578   // check for safepoint operation in progress and/or pending suspend requests
  2573   // check for safepoint operation in progress and/or pending suspend requests
  2659     __ lea(rax, Address(rsp, lock_slot_offset * VMRegImpl::stack_slot_size));
  2654     __ lea(rax, Address(rsp, lock_slot_offset * VMRegImpl::stack_slot_size));
  2660     //  get old displaced header
  2655     //  get old displaced header
  2661     __ movptr(old_hdr, Address(rax, 0));
  2656     __ movptr(old_hdr, Address(rax, 0));
  2662 
  2657 
  2663     // Atomic swap old header if oop still contains the stack lock
  2658     // Atomic swap old header if oop still contains the stack lock
  2664     if (os::is_MP()) {
  2659     __ lock();
  2665       __ lock();
       
  2666     }
       
  2667     __ cmpxchgptr(old_hdr, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
  2660     __ cmpxchgptr(old_hdr, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
  2668     __ jcc(Assembler::notEqual, slow_path_unlock);
  2661     __ jcc(Assembler::notEqual, slow_path_unlock);
  2669 
  2662 
  2670     // slow path re-enters here
  2663     // slow path re-enters here
  2671     __ bind(unlock_done);
  2664     __ bind(unlock_done);