hotspot/src/cpu/x86/vm/templateTable_x86_64.cpp
changeset 6453 970dc585ab63
parent 6176 4d9030fe341f
child 6772 2563324665d5
equal deleted inserted replaced
6452:cc624b341ab2 6453:970dc585ab63
  1581     // rdx: target offset
  1581     // rdx: target offset
  1582     // r13: target bcp
  1582     // r13: target bcp
  1583     // r14: locals pointer
  1583     // r14: locals pointer
  1584     __ testl(rdx, rdx);             // check if forward or backward branch
  1584     __ testl(rdx, rdx);             // check if forward or backward branch
  1585     __ jcc(Assembler::positive, dispatch); // count only if backward branch
  1585     __ jcc(Assembler::positive, dispatch); // count only if backward branch
  1586 
  1586     if (TieredCompilation) {
  1587     // increment counter
  1587       Label no_mdo;
  1588     __ movl(rax, Address(rcx, be_offset));        // load backedge counter
  1588       int increment = InvocationCounter::count_increment;
  1589     __ incrementl(rax, InvocationCounter::count_increment); // increment
  1589       int mask = ((1 << Tier0BackedgeNotifyFreqLog) - 1) << InvocationCounter::count_shift;
  1590                                                             // counter
  1590       if (ProfileInterpreter) {
  1591     __ movl(Address(rcx, be_offset), rax);        // store counter
  1591         // Are we profiling?
  1592 
  1592         __ movptr(rbx, Address(rcx, in_bytes(methodOopDesc::method_data_offset())));
  1593     __ movl(rax, Address(rcx, inv_offset));    // load invocation counter
  1593         __ testptr(rbx, rbx);
  1594     __ andl(rax, InvocationCounter::count_mask_value); // and the status bits
  1594         __ jccb(Assembler::zero, no_mdo);
  1595     __ addl(rax, Address(rcx, be_offset));        // add both counters
  1595         // Increment the MDO backedge counter
  1596 
  1596         const Address mdo_backedge_counter(rbx, in_bytes(methodDataOopDesc::backedge_counter_offset()) +
  1597     if (ProfileInterpreter) {
  1597                                            in_bytes(InvocationCounter::counter_offset()));
  1598       // Test to see if we should create a method data oop
  1598         __ increment_mask_and_jump(mdo_backedge_counter, increment, mask,
  1599       __ cmp32(rax,
  1599                                    rax, false, Assembler::zero, &backedge_counter_overflow);
  1600                ExternalAddress((address) &InvocationCounter::InterpreterProfileLimit));
  1600         __ jmp(dispatch);
  1601       __ jcc(Assembler::less, dispatch);
       
  1602 
       
  1603       // if no method data exists, go to profile method
       
  1604       __ test_method_data_pointer(rax, profile_method);
       
  1605 
       
  1606       if (UseOnStackReplacement) {
       
  1607         // check for overflow against ebx which is the MDO taken count
       
  1608         __ cmp32(rbx,
       
  1609                  ExternalAddress((address) &InvocationCounter::InterpreterBackwardBranchLimit));
       
  1610         __ jcc(Assembler::below, dispatch);
       
  1611 
       
  1612         // When ProfileInterpreter is on, the backedge_count comes
       
  1613         // from the methodDataOop, which value does not get reset on
       
  1614         // the call to frequency_counter_overflow().  To avoid
       
  1615         // excessive calls to the overflow routine while the method is
       
  1616         // being compiled, add a second test to make sure the overflow
       
  1617         // function is called only once every overflow_frequency.
       
  1618         const int overflow_frequency = 1024;
       
  1619         __ andl(rbx, overflow_frequency - 1);
       
  1620         __ jcc(Assembler::zero, backedge_counter_overflow);
       
  1621 
       
  1622       }
  1601       }
       
  1602       __ bind(no_mdo);
       
  1603       // Increment backedge counter in methodOop
       
  1604       __ increment_mask_and_jump(Address(rcx, be_offset), increment, mask,
       
  1605                                  rax, false, Assembler::zero, &backedge_counter_overflow);
  1623     } else {
  1606     } else {
  1624       if (UseOnStackReplacement) {
  1607       // increment counter
  1625         // check for overflow against eax, which is the sum of the
  1608       __ movl(rax, Address(rcx, be_offset));        // load backedge counter
  1626         // counters
  1609       __ incrementl(rax, InvocationCounter::count_increment); // increment counter
       
  1610       __ movl(Address(rcx, be_offset), rax);        // store counter
       
  1611 
       
  1612       __ movl(rax, Address(rcx, inv_offset));    // load invocation counter
       
  1613       __ andl(rax, InvocationCounter::count_mask_value); // and the status bits
       
  1614       __ addl(rax, Address(rcx, be_offset));        // add both counters
       
  1615 
       
  1616       if (ProfileInterpreter) {
       
  1617         // Test to see if we should create a method data oop
  1627         __ cmp32(rax,
  1618         __ cmp32(rax,
  1628                  ExternalAddress((address) &InvocationCounter::InterpreterBackwardBranchLimit));
  1619                  ExternalAddress((address) &InvocationCounter::InterpreterProfileLimit));
  1629         __ jcc(Assembler::aboveEqual, backedge_counter_overflow);
  1620         __ jcc(Assembler::less, dispatch);
  1630 
  1621 
       
  1622         // if no method data exists, go to profile method
       
  1623         __ test_method_data_pointer(rax, profile_method);
       
  1624 
       
  1625         if (UseOnStackReplacement) {
       
  1626           // check for overflow against ebx which is the MDO taken count
       
  1627           __ cmp32(rbx,
       
  1628                    ExternalAddress((address) &InvocationCounter::InterpreterBackwardBranchLimit));
       
  1629           __ jcc(Assembler::below, dispatch);
       
  1630 
       
  1631           // When ProfileInterpreter is on, the backedge_count comes
       
  1632           // from the methodDataOop, which value does not get reset on
       
  1633           // the call to frequency_counter_overflow().  To avoid
       
  1634           // excessive calls to the overflow routine while the method is
       
  1635           // being compiled, add a second test to make sure the overflow
       
  1636           // function is called only once every overflow_frequency.
       
  1637           const int overflow_frequency = 1024;
       
  1638           __ andl(rbx, overflow_frequency - 1);
       
  1639           __ jcc(Assembler::zero, backedge_counter_overflow);
       
  1640 
       
  1641         }
       
  1642       } else {
       
  1643         if (UseOnStackReplacement) {
       
  1644           // check for overflow against eax, which is the sum of the
       
  1645           // counters
       
  1646           __ cmp32(rax,
       
  1647                    ExternalAddress((address) &InvocationCounter::InterpreterBackwardBranchLimit));
       
  1648           __ jcc(Assembler::aboveEqual, backedge_counter_overflow);
       
  1649 
       
  1650         }
  1631       }
  1651       }
  1632     }
  1652     }
  1633     __ bind(dispatch);
  1653     __ bind(dispatch);
  1634   }
  1654   }
  1635 
  1655 
  2910 
  2930 
  2911 
  2931 
  2912 void TemplateTable::invokevirtual_helper(Register index,
  2932 void TemplateTable::invokevirtual_helper(Register index,
  2913                                          Register recv,
  2933                                          Register recv,
  2914                                          Register flags) {
  2934                                          Register flags) {
  2915   // Uses temporary registers rax, rdx  assert_different_registers(index, recv, rax, rdx);
  2935   // Uses temporary registers rax, rdx
       
  2936   assert_different_registers(index, recv, rax, rdx);
  2916 
  2937 
  2917   // Test for an invoke of a final method
  2938   // Test for an invoke of a final method
  2918   Label notFinal;
  2939   Label notFinal;
  2919   __ movl(rax, flags);
  2940   __ movl(rax, flags);
  2920   __ andl(rax, (1 << ConstantPoolCacheEntry::vfinalMethod));
  2941   __ andl(rax, (1 << ConstantPoolCacheEntry::vfinalMethod));