src/hotspot/cpu/x86/sharedRuntime_x86_32.cpp
changeset 51996 84743156e780
parent 49642 7bad9c9efdf3
child 52460 f1bb77833b59
equal deleted inserted replaced
51995:f7babf9d1592 51996:84743156e780
  2005     __ orptr(swap_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
  2005     __ orptr(swap_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
  2006 
  2006 
  2007     // Save (object->mark() | 1) into BasicLock's displaced header
  2007     // Save (object->mark() | 1) into BasicLock's displaced header
  2008     __ movptr(Address(lock_reg, mark_word_offset), swap_reg);
  2008     __ movptr(Address(lock_reg, mark_word_offset), swap_reg);
  2009 
  2009 
  2010     if (os::is_MP()) {
       
  2011       __ lock();
       
  2012     }
       
  2013 
       
  2014     // src -> dest iff dest == rax, else rax, <- dest
  2010     // src -> dest iff dest == rax, else rax, <- dest
  2015     // *obj_reg = lock_reg iff *obj_reg == rax, else rax, = *(obj_reg)
  2011     // *obj_reg = lock_reg iff *obj_reg == rax, else rax, = *(obj_reg)
       
  2012     __ lock();
  2016     __ cmpxchgptr(lock_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
  2013     __ cmpxchgptr(lock_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
  2017     __ jcc(Assembler::equal, lock_done);
  2014     __ jcc(Assembler::equal, lock_done);
  2018 
  2015 
  2019     // Test if the oopMark is an obvious stack pointer, i.e.,
  2016     // Test if the oopMark is an obvious stack pointer, i.e.,
  2020     //  1) (mark & 3) == 0, and
  2017     //  1) (mark & 3) == 0, and
  2089   //     VM thread changes sync state to synchronizing and suspends threads for GC.
  2086   //     VM thread changes sync state to synchronizing and suspends threads for GC.
  2090   //     Thread A is resumed to finish this native method, but doesn't block here since it
  2087   //     Thread A is resumed to finish this native method, but doesn't block here since it
  2091   //     didn't see any synchronization is progress, and escapes.
  2088   //     didn't see any synchronization is progress, and escapes.
  2092   __ movl(Address(thread, JavaThread::thread_state_offset()), _thread_in_native_trans);
  2089   __ movl(Address(thread, JavaThread::thread_state_offset()), _thread_in_native_trans);
  2093 
  2090 
  2094   if(os::is_MP()) {
  2091   if (UseMembar) {
  2095     if (UseMembar) {
  2092     // Force this write out before the read below
  2096       // Force this write out before the read below
  2093     __ membar(Assembler::Membar_mask_bits(
  2097       __ membar(Assembler::Membar_mask_bits(
  2094               Assembler::LoadLoad | Assembler::LoadStore |
  2098            Assembler::LoadLoad | Assembler::LoadStore |
  2095               Assembler::StoreLoad | Assembler::StoreStore));
  2099            Assembler::StoreLoad | Assembler::StoreStore));
  2096   } else {
  2100     } else {
  2097     // Write serialization page so VM thread can do a pseudo remote membar.
  2101       // Write serialization page so VM thread can do a pseudo remote membar.
  2098     // We use the current thread pointer to calculate a thread specific
  2102       // We use the current thread pointer to calculate a thread specific
  2099     // offset to write to within the page. This minimizes bus traffic
  2103       // offset to write to within the page. This minimizes bus traffic
  2100     // due to cache line collision.
  2104       // due to cache line collision.
  2101     __ serialize_memory(thread, rcx);
  2105       __ serialize_memory(thread, rcx);
       
  2106     }
       
  2107   }
  2102   }
  2108 
  2103 
  2109   if (AlwaysRestoreFPU) {
  2104   if (AlwaysRestoreFPU) {
  2110     // Make sure the control word is correct.
  2105     // Make sure the control word is correct.
  2111     __ fldcw(ExternalAddress(StubRoutines::addr_fpu_cntrl_wrd_std()));
  2106     __ fldcw(ExternalAddress(StubRoutines::addr_fpu_cntrl_wrd_std()));
  2197 
  2192 
  2198     // get address of the stack lock
  2193     // get address of the stack lock
  2199     __ lea(rax, Address(rbp, lock_slot_rbp_offset));
  2194     __ lea(rax, Address(rbp, lock_slot_rbp_offset));
  2200 
  2195 
  2201     // Atomic swap old header if oop still contains the stack lock
  2196     // Atomic swap old header if oop still contains the stack lock
  2202     if (os::is_MP()) {
       
  2203     __ lock();
       
  2204     }
       
  2205 
       
  2206     // src -> dest iff dest == rax, else rax, <- dest
  2197     // src -> dest iff dest == rax, else rax, <- dest
  2207     // *obj_reg = rbx, iff *obj_reg == rax, else rax, = *(obj_reg)
  2198     // *obj_reg = rbx, iff *obj_reg == rax, else rax, = *(obj_reg)
       
  2199     __ lock();
  2208     __ cmpxchgptr(rbx, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
  2200     __ cmpxchgptr(rbx, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
  2209     __ jcc(Assembler::notEqual, slow_path_unlock);
  2201     __ jcc(Assembler::notEqual, slow_path_unlock);
  2210 
  2202 
  2211     // slow path re-enters here
  2203     // slow path re-enters here
  2212     __ bind(unlock_done);
  2204     __ bind(unlock_done);