src/hotspot/cpu/s390/templateTable_s390.cpp
changeset 47216 71c04702a3d5
parent 46427 54713555867e
child 47580 96392e113a0a
equal deleted inserted replaced
47215:4ebc2e2fb97c 47216:71c04702a3d5
       
     1 /*
       
     2  * Copyright (c) 2016, 2017, Oracle and/or its affiliates. All rights reserved.
       
     3  * Copyright (c) 2016, 2017 SAP SE. All rights reserved.
       
     4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
       
     5  *
       
     6  * This code is free software; you can redistribute it and/or modify it
       
     7  * under the terms of the GNU General Public License version 2 only, as
       
     8  * published by the Free Software Foundation.
       
     9  *
       
    10  * This code is distributed in the hope that it will be useful, but WITHOUT
       
    11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
       
    12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
       
    13  * version 2 for more details (a copy is included in the LICENSE file that
       
    14  * accompanied this code).
       
    15  *
       
    16  * You should have received a copy of the GNU General Public License version
       
    17  * 2 along with this work; if not, write to the Free Software Foundation,
       
    18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
       
    19  *
       
    20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
       
    21  * or visit www.oracle.com if you need additional information or have any
       
    22  * questions.
       
    23  *
       
    24  */
       
    25 
       
    26 #include "precompiled.hpp"
       
    27 #include "asm/macroAssembler.inline.hpp"
       
    28 #include "interpreter/interpreter.hpp"
       
    29 #include "interpreter/interpreterRuntime.hpp"
       
    30 #include "interpreter/interp_masm.hpp"
       
    31 #include "interpreter/templateTable.hpp"
       
    32 #include "memory/universe.inline.hpp"
       
    33 #include "oops/objArrayKlass.hpp"
       
    34 #include "oops/oop.inline.hpp"
       
    35 #include "prims/methodHandles.hpp"
       
    36 #include "runtime/sharedRuntime.hpp"
       
    37 #include "runtime/stubRoutines.hpp"
       
    38 #include "runtime/synchronizer.hpp"
       
    39 
       
    40 #ifdef PRODUCT
       
    41 #define __ _masm->
       
    42 #define BLOCK_COMMENT(str)
       
    43 #define BIND(label)        __ bind(label);
       
    44 #else
       
    45 #define __ (PRODUCT_ONLY(false&&)Verbose ? (_masm->block_comment(FILE_AND_LINE),_masm):_masm)->
       
    46 #define BLOCK_COMMENT(str) __ block_comment(str)
       
    47 #define BIND(label)        __ bind(label); BLOCK_COMMENT(#label ":")
       
    48 #endif
       
    49 
       
    50 // The assumed minimum size of a BranchTableBlock.
       
    51 // The actual size of each block heavily depends on the CPU capabilities and,
       
    52 // of course, on the logic implemented in each block.
       
    53 #ifdef ASSERT
       
    54   #define BTB_MINSIZE 256
       
    55 #else
       
    56   #define BTB_MINSIZE  64
       
    57 #endif
       
    58 
       
    59 #ifdef ASSERT
       
    60 // Macro to open a BranchTableBlock (a piece of code that is branched to by a calculated branch).
       
    61 #define BTB_BEGIN(lbl, alignment, name)                                        \
       
    62   __ align_address(alignment);                                                 \
       
    63   __ bind(lbl);                                                                \
       
    64   { unsigned int b_off = __ offset();                                          \
       
    65     uintptr_t   b_addr = (uintptr_t)__ pc();                                   \
       
    66     __ z_larl(Z_R0, (int64_t)0);     /* Check current address alignment. */    \
       
    67     __ z_slgr(Z_R0, br_tab);         /* Current Address must be equal    */    \
       
    68     __ z_slgr(Z_R0, flags);          /* to calculated branch target.     */    \
       
    69     __ z_brc(Assembler::bcondLogZero, 3); /* skip trap if ok. */               \
       
    70     __ z_illtrap(0x55);                                                        \
       
    71     guarantee(b_addr%alignment == 0, "bad alignment at begin of block" name);
       
    72 
       
    73 // Macro to close a BranchTableBlock (a piece of code that is branched to by a calculated branch).
       
    74 #define BTB_END(lbl, alignment, name)                                          \
       
    75     uintptr_t   e_addr = (uintptr_t)__ pc();                                   \
       
    76     unsigned int e_off = __ offset();                                          \
       
    77     unsigned int len   = e_off-b_off;                                          \
       
    78     if (len > alignment) {                                                     \
       
    79       tty->print_cr("%4d of %4d @ " INTPTR_FORMAT ": Block len for %s",        \
       
    80                     len, alignment, e_addr-len, name);                         \
       
    81       guarantee(len <= alignment, "block too large");                          \
       
    82     }                                                                          \
       
    83     guarantee(len == e_addr-b_addr, "block len mismatch");                     \
       
    84   }
       
    85 #else
       
    86 // Macro to open a BranchTableBlock (a piece of code that is branched to by a calculated branch).
       
    87 #define BTB_BEGIN(lbl, alignment, name)                                        \
       
    88   __ align_address(alignment);                                                 \
       
    89   __ bind(lbl);                                                                \
       
    90   { unsigned int b_off = __ offset();                                          \
       
    91     uintptr_t   b_addr = (uintptr_t)__ pc();                                   \
       
    92     guarantee(b_addr%alignment == 0, "bad alignment at begin of block" name);
       
    93 
       
    94 // Macro to close a BranchTableBlock (a piece of code that is branched to by a calculated branch).
       
    95 #define BTB_END(lbl, alignment, name)                                          \
       
    96     uintptr_t   e_addr = (uintptr_t)__ pc();                                   \
       
    97     unsigned int e_off = __ offset();                                          \
       
    98     unsigned int len   = e_off-b_off;                                          \
       
    99     if (len > alignment) {                                                     \
       
   100       tty->print_cr("%4d of %4d @ " INTPTR_FORMAT ": Block len for %s",        \
       
   101                     len, alignment, e_addr-len, name);                         \
       
   102       guarantee(len <= alignment, "block too large");                          \
       
   103     }                                                                          \
       
   104     guarantee(len == e_addr-b_addr, "block len mismatch");                     \
       
   105   }
       
   106 #endif // ASSERT
       
   107 
       
   108 // Platform-dependent initialization.
       
   109 
       
   110 void TemplateTable::pd_initialize() {
       
   111   // No specific initialization.
       
   112 }
       
   113 
       
   114 // Address computation: local variables
       
   115 
       
   116 static inline Address iaddress(int n) {
       
   117   return Address(Z_locals, Interpreter::local_offset_in_bytes(n));
       
   118 }
       
   119 
       
   120 static inline Address laddress(int n) {
       
   121   return iaddress(n + 1);
       
   122 }
       
   123 
       
   124 static inline Address faddress(int n) {
       
   125   return iaddress(n);
       
   126 }
       
   127 
       
   128 static inline Address daddress(int n) {
       
   129   return laddress(n);
       
   130 }
       
   131 
       
   132 static inline Address aaddress(int n) {
       
   133   return iaddress(n);
       
   134 }
       
   135 
       
   136 // Pass NULL, if no shift instruction should be emitted.
       
   137 static inline Address iaddress(InterpreterMacroAssembler *masm, Register r) {
       
   138   if (masm) {
       
   139     masm->z_sllg(r, r, LogBytesPerWord);  // index2bytes
       
   140   }
       
   141   return Address(Z_locals, r, Interpreter::local_offset_in_bytes(0));
       
   142 }
       
   143 
       
   144 // Pass NULL, if no shift instruction should be emitted.
       
   145 static inline Address laddress(InterpreterMacroAssembler *masm, Register r) {
       
   146   if (masm) {
       
   147     masm->z_sllg(r, r, LogBytesPerWord);  // index2bytes
       
   148   }
       
   149   return Address(Z_locals, r, Interpreter::local_offset_in_bytes(1) );
       
   150 }
       
   151 
       
   152 static inline Address faddress(InterpreterMacroAssembler *masm, Register r) {
       
   153   return iaddress(masm, r);
       
   154 }
       
   155 
       
   156 static inline Address daddress(InterpreterMacroAssembler *masm, Register r) {
       
   157   return laddress(masm, r);
       
   158 }
       
   159 
       
   160 static inline Address aaddress(InterpreterMacroAssembler *masm, Register r) {
       
   161   return iaddress(masm, r);
       
   162 }
       
   163 
       
   164 // At top of Java expression stack which may be different than esp(). It
       
   165 // isn't for category 1 objects.
       
   166 static inline Address at_tos(int slot = 0) {
       
   167   return Address(Z_esp, Interpreter::expr_offset_in_bytes(slot));
       
   168 }
       
   169 
       
   170 // Condition conversion
       
   171 static Assembler::branch_condition j_not(TemplateTable::Condition cc) {
       
   172   switch (cc) {
       
   173     case TemplateTable::equal :
       
   174       return Assembler::bcondNotEqual;
       
   175     case TemplateTable::not_equal :
       
   176       return Assembler::bcondEqual;
       
   177     case TemplateTable::less :
       
   178       return Assembler::bcondNotLow;
       
   179     case TemplateTable::less_equal :
       
   180       return Assembler::bcondHigh;
       
   181     case TemplateTable::greater :
       
   182       return Assembler::bcondNotHigh;
       
   183     case TemplateTable::greater_equal:
       
   184       return Assembler::bcondLow;
       
   185   }
       
   186   ShouldNotReachHere();
       
   187   return Assembler::bcondZero;
       
   188 }
       
   189 
       
   190 // Do an oop store like *(base + offset) = val
       
   191 // offset can be a register or a constant.
       
   192 static void do_oop_store(InterpreterMacroAssembler* _masm,
       
   193                          Register base,
       
   194                          RegisterOrConstant offset,
       
   195                          Register val,
       
   196                          bool val_is_null, // == false does not guarantee that val really is not equal NULL.
       
   197                          Register tmp1,    // If tmp3 is volatile, either tmp1 or tmp2 must be
       
   198                          Register tmp2,    // non-volatile to hold a copy of pre_val across runtime calls.
       
   199                          Register tmp3,    // Ideally, this tmp register is non-volatile, as it is used to
       
   200                                            // hold pre_val (must survive runtime calls).
       
   201                          BarrierSet::Name barrier,
       
   202                          bool precise) {
       
   203   BLOCK_COMMENT("do_oop_store {");
       
   204   assert(val != noreg, "val must always be valid, even if it is zero");
       
   205   assert_different_registers(tmp1, tmp2, tmp3, val, base, offset.register_or_noreg());
       
   206   __ verify_oop(val);
       
   207   switch (barrier) {
       
   208 #if INCLUDE_ALL_GCS
       
   209     case BarrierSet::G1SATBCTLogging:
       
   210       {
       
   211 #ifdef ASSERT
       
   212         if (val_is_null) { // Check if the flag setting reflects reality.
       
   213           Label OK;
       
   214           __ z_ltgr(val, val);
       
   215           __ z_bre(OK);
       
   216           __ z_illtrap(0x11);
       
   217           __ bind(OK);
       
   218         }
       
   219 #endif
       
   220         Register pre_val = tmp3;
       
   221         // Load and record the previous value.
       
   222         __ g1_write_barrier_pre(base, offset, pre_val, val,
       
   223                                 tmp1, tmp2,
       
   224                                 false);  // Needs to hold pre_val in non_volatile register?
       
   225 
       
   226         if (val_is_null) {
       
   227           __ store_heap_oop_null(val, offset, base);
       
   228         } else {
       
   229           Label Done;
       
   230           // val_is_null == false does not guarantee that val really is not equal NULL.
       
   231           // Checking for this case dynamically has some cost, but also some benefit (in GC).
       
   232           // It's hard to say if cost or benefit is greater.
       
   233           { Label OK;
       
   234             __ z_ltgr(val, val);
       
   235             __ z_brne(OK);
       
   236             __ store_heap_oop_null(val, offset, base);
       
   237             __ z_bru(Done);
       
   238             __ bind(OK);
       
   239           }
       
   240           // G1 barrier needs uncompressed oop for region cross check.
       
   241           // Store_heap_oop compresses the oop in the argument register.
       
   242           Register val_work = val;
       
   243           if (UseCompressedOops) {
       
   244             val_work = tmp3;
       
   245             __ z_lgr(val_work, val);
       
   246           }
       
   247           __ store_heap_oop_not_null(val_work, offset, base);
       
   248 
       
   249           // We need precise card marks for oop array stores.
       
   250           // Otherwise, cardmarking the object which contains the oop is sufficient.
       
   251           if (precise && !(offset.is_constant() && offset.as_constant() == 0)) {
       
   252             __ add2reg_with_index(base,
       
   253                                   offset.constant_or_zero(),
       
   254                                   offset.register_or_noreg(),
       
   255                                   base);
       
   256           }
       
   257           __ g1_write_barrier_post(base /* store_adr */, val, tmp1, tmp2, tmp3);
       
   258           __ bind(Done);
       
   259         }
       
   260       }
       
   261       break;
       
   262 #endif // INCLUDE_ALL_GCS
       
   263     case BarrierSet::CardTableForRS:
       
   264     case BarrierSet::CardTableExtension:
       
   265     {
       
   266       if (val_is_null) {
       
   267         __ store_heap_oop_null(val, offset, base);
       
   268       } else {
       
   269         __ store_heap_oop(val, offset, base);
       
   270         // Flatten object address if needed.
       
   271         if (precise && ((offset.register_or_noreg() != noreg) || (offset.constant_or_zero() != 0))) {
       
   272           __ load_address(base, Address(base, offset.register_or_noreg(), offset.constant_or_zero()));
       
   273         }
       
   274         __ card_write_barrier_post(base, tmp1);
       
   275       }
       
   276     }
       
   277     break;
       
   278   case BarrierSet::ModRef:
       
   279     // fall through
       
   280   default:
       
   281     ShouldNotReachHere();
       
   282 
       
   283   }
       
   284   BLOCK_COMMENT("} do_oop_store");
       
   285 }
       
   286 
       
   287 Address TemplateTable::at_bcp(int offset) {
       
   288   assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
       
   289   return Address(Z_bcp, offset);
       
   290 }
       
   291 
       
   292 void TemplateTable::patch_bytecode(Bytecodes::Code bc,
       
   293                                    Register        bc_reg,
       
   294                                    Register        temp_reg,
       
   295                                    bool            load_bc_into_bc_reg, // = true
       
   296                                    int             byte_no) {
       
   297   if (!RewriteBytecodes) { return; }
       
   298 
       
   299   NearLabel L_patch_done;
       
   300   BLOCK_COMMENT("patch_bytecode {");
       
   301 
       
   302   switch (bc) {
       
   303     case Bytecodes::_fast_aputfield:
       
   304     case Bytecodes::_fast_bputfield:
       
   305     case Bytecodes::_fast_zputfield:
       
   306     case Bytecodes::_fast_cputfield:
       
   307     case Bytecodes::_fast_dputfield:
       
   308     case Bytecodes::_fast_fputfield:
       
   309     case Bytecodes::_fast_iputfield:
       
   310     case Bytecodes::_fast_lputfield:
       
   311     case Bytecodes::_fast_sputfield:
       
   312       {
       
   313         // We skip bytecode quickening for putfield instructions when
       
   314         // the put_code written to the constant pool cache is zero.
       
   315         // This is required so that every execution of this instruction
       
   316         // calls out to InterpreterRuntime::resolve_get_put to do
       
   317         // additional, required work.
       
   318         assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
       
   319         assert(load_bc_into_bc_reg, "we use bc_reg as temp");
       
   320         __ get_cache_and_index_and_bytecode_at_bcp(Z_R1_scratch, bc_reg,
       
   321                                                    temp_reg, byte_no, 1);
       
   322         __ load_const_optimized(bc_reg, bc);
       
   323         __ compareU32_and_branch(temp_reg, (intptr_t)0,
       
   324                                  Assembler::bcondZero, L_patch_done);
       
   325       }
       
   326       break;
       
   327     default:
       
   328       assert(byte_no == -1, "sanity");
       
   329       // The pair bytecodes have already done the load.
       
   330       if (load_bc_into_bc_reg) {
       
   331         __ load_const_optimized(bc_reg, bc);
       
   332       }
       
   333       break;
       
   334   }
       
   335 
       
   336   if (JvmtiExport::can_post_breakpoint()) {
       
   337 
       
   338     Label   L_fast_patch;
       
   339 
       
   340     // If a breakpoint is present we can't rewrite the stream directly.
       
   341     __ z_cli(at_bcp(0), Bytecodes::_breakpoint);
       
   342     __ z_brne(L_fast_patch);
       
   343     __ get_method(temp_reg);
       
   344     // Let breakpoint table handling rewrite to quicker bytecode.
       
   345     __ call_VM_static(noreg,
       
   346                       CAST_FROM_FN_PTR(address, InterpreterRuntime::set_original_bytecode_at),
       
   347                       temp_reg, Z_R13, bc_reg);
       
   348     __ z_bru(L_patch_done);
       
   349 
       
   350     __ bind(L_fast_patch);
       
   351   }
       
   352 
       
   353 #ifdef ASSERT
       
   354   NearLabel   L_okay;
       
   355 
       
   356   // We load into 64 bits, since this works on any CPU.
       
   357   __ z_llgc(temp_reg, at_bcp(0));
       
   358   __ compareU32_and_branch(temp_reg, Bytecodes::java_code(bc),
       
   359                             Assembler::bcondEqual, L_okay        );
       
   360   __ compareU32_and_branch(temp_reg, bc_reg, Assembler::bcondEqual, L_okay);
       
   361   __ stop_static("patching the wrong bytecode");
       
   362   __ bind(L_okay);
       
   363 #endif
       
   364 
       
   365   // Patch bytecode.
       
   366   __ z_stc(bc_reg, at_bcp(0));
       
   367 
       
   368   __ bind(L_patch_done);
       
   369   BLOCK_COMMENT("} patch_bytecode");
       
   370 }
       
   371 
       
   372 // Individual instructions
       
   373 
       
   374 void TemplateTable::nop() {
       
   375   transition(vtos, vtos);
       
   376 }
       
   377 
       
   378 void TemplateTable::shouldnotreachhere() {
       
   379   transition(vtos, vtos);
       
   380   __ stop("shouldnotreachhere bytecode");
       
   381 }
       
   382 
       
   383 void TemplateTable::aconst_null() {
       
   384   transition(vtos, atos);
       
   385   __ clear_reg(Z_tos, true, false);
       
   386 }
       
   387 
       
   388 void TemplateTable::iconst(int value) {
       
   389   transition(vtos, itos);
       
   390   // Zero extension of the iconst makes zero extension at runtime obsolete.
       
   391   __ load_const_optimized(Z_tos, ((unsigned long)(unsigned int)value));
       
   392 }
       
   393 
       
   394 void TemplateTable::lconst(int value) {
       
   395   transition(vtos, ltos);
       
   396   __ load_const_optimized(Z_tos, value);
       
   397 }
       
   398 
       
   399 // No pc-relative load/store for floats.
       
   400 void TemplateTable::fconst(int value) {
       
   401   transition(vtos, ftos);
       
   402   static float   one = 1.0f, two = 2.0f;
       
   403 
       
   404   switch (value) {
       
   405     case 0:
       
   406       __ z_lzer(Z_ftos);
       
   407       return;
       
   408     case 1:
       
   409       __ load_absolute_address(Z_R1_scratch, (address) &one);
       
   410       __ mem2freg_opt(Z_ftos, Address(Z_R1_scratch), false);
       
   411       return;
       
   412     case 2:
       
   413       __ load_absolute_address(Z_R1_scratch, (address) &two);
       
   414       __ mem2freg_opt(Z_ftos, Address(Z_R1_scratch), false);
       
   415       return;
       
   416     default:
       
   417       ShouldNotReachHere();
       
   418       return;
       
   419   }
       
   420 }
       
   421 
       
   422 void TemplateTable::dconst(int value) {
       
   423   transition(vtos, dtos);
       
   424   static double one = 1.0;
       
   425 
       
   426   switch (value) {
       
   427     case 0:
       
   428       __ z_lzdr(Z_ftos);
       
   429       return;
       
   430     case 1:
       
   431       __ load_absolute_address(Z_R1_scratch, (address) &one);
       
   432       __ mem2freg_opt(Z_ftos, Address(Z_R1_scratch));
       
   433       return;
       
   434     default:
       
   435       ShouldNotReachHere();
       
   436       return;
       
   437   }
       
   438 }
       
   439 
       
   440 void TemplateTable::bipush() {
       
   441   transition(vtos, itos);
       
   442   __ z_lb(Z_tos, at_bcp(1));
       
   443 }
       
   444 
       
   445 void TemplateTable::sipush() {
       
   446   transition(vtos, itos);
       
   447   __ get_2_byte_integer_at_bcp(Z_tos, 1, InterpreterMacroAssembler::Signed);
       
   448 }
       
   449 
       
   450 
       
   451 void TemplateTable::ldc(bool wide) {
       
   452   transition(vtos, vtos);
       
   453   Label call_ldc, notFloat, notClass, Done;
       
   454   const Register RcpIndex = Z_tmp_1;
       
   455   const Register Rtags = Z_ARG2;
       
   456 
       
   457   if (wide) {
       
   458     __ get_2_byte_integer_at_bcp(RcpIndex, 1, InterpreterMacroAssembler::Unsigned);
       
   459   } else {
       
   460     __ z_llgc(RcpIndex, at_bcp(1));
       
   461   }
       
   462 
       
   463   __ get_cpool_and_tags(Z_tmp_2, Rtags);
       
   464 
       
   465   const int      base_offset = ConstantPool::header_size() * wordSize;
       
   466   const int      tags_offset = Array<u1>::base_offset_in_bytes();
       
   467   const Register Raddr_type = Rtags;
       
   468 
       
   469   // Get address of type.
       
   470   __ add2reg_with_index(Raddr_type, tags_offset, RcpIndex, Rtags);
       
   471 
       
   472   __ z_cli(0, Raddr_type, JVM_CONSTANT_UnresolvedClass);
       
   473   __ z_bre(call_ldc);    // Unresolved class - get the resolved class.
       
   474 
       
   475   __ z_cli(0, Raddr_type, JVM_CONSTANT_UnresolvedClassInError);
       
   476   __ z_bre(call_ldc);    // Unresolved class in error state - call into runtime
       
   477                          // to throw the error from the first resolution attempt.
       
   478 
       
   479   __ z_cli(0, Raddr_type, JVM_CONSTANT_Class);
       
   480   __ z_brne(notClass);   // Resolved class - need to call vm to get java
       
   481                          // mirror of the class.
       
   482 
       
   483   // We deal with a class. Call vm to do the appropriate.
       
   484   __ bind(call_ldc);
       
   485   __ load_const_optimized(Z_ARG2, wide);
       
   486   call_VM(Z_RET, CAST_FROM_FN_PTR(address, InterpreterRuntime::ldc), Z_ARG2);
       
   487   __ push_ptr(Z_RET);
       
   488   __ z_bru(Done);
       
   489 
       
   490   // Not a class.
       
   491   __ bind(notClass);
       
   492   Register RcpOffset = RcpIndex;
       
   493   __ z_sllg(RcpOffset, RcpIndex, LogBytesPerWord); // Convert index to offset.
       
   494   __ z_cli(0, Raddr_type, JVM_CONSTANT_Float);
       
   495   __ z_brne(notFloat);
       
   496 
       
   497   // ftos
       
   498   __ mem2freg_opt(Z_ftos, Address(Z_tmp_2, RcpOffset, base_offset), false);
       
   499   __ push_f();
       
   500   __ z_bru(Done);
       
   501 
       
   502   __ bind(notFloat);
       
   503 #ifdef ASSERT
       
   504   {
       
   505     Label   L;
       
   506 
       
   507     __ z_cli(0, Raddr_type, JVM_CONSTANT_Integer);
       
   508     __ z_bre(L);
       
   509     // String and Object are rewritten to fast_aldc.
       
   510     __ stop("unexpected tag type in ldc");
       
   511 
       
   512     __ bind(L);
       
   513   }
       
   514 #endif
       
   515 
       
   516   // itos
       
   517   __ mem2reg_opt(Z_tos, Address(Z_tmp_2, RcpOffset, base_offset), false);
       
   518   __ push_i(Z_tos);
       
   519 
       
   520   __ bind(Done);
       
   521 }
       
   522 
       
   523 // Fast path for caching oop constants.
       
   524 // %%% We should use this to handle Class and String constants also.
       
   525 // %%% It will simplify the ldc/primitive path considerably.
       
   526 void TemplateTable::fast_aldc(bool wide) {
       
   527   transition(vtos, atos);
       
   528 
       
   529   const Register index = Z_tmp_2;
       
   530   int            index_size = wide ? sizeof(u2) : sizeof(u1);
       
   531   Label          L_resolved;
       
   532 
       
   533   // We are resolved if the resolved reference cache entry contains a
       
   534   // non-null object (CallSite, etc.).
       
   535   __ get_cache_index_at_bcp(index, 1, index_size);  // Load index.
       
   536   __ load_resolved_reference_at_index(Z_tos, index);
       
   537   __ z_ltgr(Z_tos, Z_tos);
       
   538   __ z_brne(L_resolved);
       
   539 
       
   540   // First time invocation - must resolve first.
       
   541   address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_ldc);
       
   542   __ load_const_optimized(Z_ARG1, (int)bytecode());
       
   543   __ call_VM(Z_tos, entry, Z_ARG1);
       
   544 
       
   545   __ bind(L_resolved);
       
   546   __ verify_oop(Z_tos);
       
   547 }
       
   548 
       
   549 void TemplateTable::ldc2_w() {
       
   550   transition(vtos, vtos);
       
   551   Label Long, Done;
       
   552 
       
   553   // Z_tmp_1 = index of cp entry
       
   554   __ get_2_byte_integer_at_bcp(Z_tmp_1, 1, InterpreterMacroAssembler::Unsigned);
       
   555 
       
   556   __ get_cpool_and_tags(Z_tmp_2, Z_tos);
       
   557 
       
   558   const int base_offset = ConstantPool::header_size() * wordSize;
       
   559   const int tags_offset = Array<u1>::base_offset_in_bytes();
       
   560 
       
   561   // Get address of type.
       
   562   __ add2reg_with_index(Z_tos, tags_offset, Z_tos, Z_tmp_1);
       
   563 
       
   564   // Index needed in both branches, so calculate here.
       
   565   __ z_sllg(Z_tmp_1, Z_tmp_1, LogBytesPerWord);  // index2bytes
       
   566 
       
   567   // Check type.
       
   568   __ z_cli(0, Z_tos, JVM_CONSTANT_Double);
       
   569   __ z_brne(Long);
       
   570 
       
   571   // dtos
       
   572   __ mem2freg_opt(Z_ftos, Address(Z_tmp_2, Z_tmp_1, base_offset));
       
   573   __ push_d();
       
   574   __ z_bru(Done);
       
   575 
       
   576   __ bind(Long);
       
   577   // ltos
       
   578   __ mem2reg_opt(Z_tos, Address(Z_tmp_2, Z_tmp_1, base_offset));
       
   579   __ push_l();
       
   580 
       
   581   __ bind(Done);
       
   582 }
       
   583 
       
   584 void TemplateTable::locals_index(Register reg, int offset) {
       
   585   __ z_llgc(reg, at_bcp(offset));
       
   586   __ z_lcgr(reg);
       
   587 }
       
   588 
       
   589 void TemplateTable::iload() {
       
   590   iload_internal();
       
   591 }
       
   592 
       
   593 void TemplateTable::nofast_iload() {
       
   594   iload_internal(may_not_rewrite);
       
   595 }
       
   596 
       
   597 void TemplateTable::iload_internal(RewriteControl rc) {
       
   598   transition(vtos, itos);
       
   599 
       
   600   if (RewriteFrequentPairs && rc == may_rewrite) {
       
   601     NearLabel rewrite, done;
       
   602     const Register bc = Z_ARG4;
       
   603 
       
   604     assert(Z_R1_scratch != bc, "register damaged");
       
   605 
       
   606     // Get next byte.
       
   607     __ z_llgc(Z_R1_scratch, at_bcp(Bytecodes::length_for (Bytecodes::_iload)));
       
   608 
       
   609     // If _iload, wait to rewrite to iload2. We only want to rewrite the
       
   610     // last two iloads in a pair. Comparing against fast_iload means that
       
   611     // the next bytecode is neither an iload or a caload, and therefore
       
   612     // an iload pair.
       
   613     __ compareU32_and_branch(Z_R1_scratch, Bytecodes::_iload,
       
   614                              Assembler::bcondEqual, done);
       
   615 
       
   616     __ load_const_optimized(bc, Bytecodes::_fast_iload2);
       
   617     __ compareU32_and_branch(Z_R1_scratch, Bytecodes::_fast_iload,
       
   618                              Assembler::bcondEqual, rewrite);
       
   619 
       
   620     // If _caload, rewrite to fast_icaload.
       
   621     __ load_const_optimized(bc, Bytecodes::_fast_icaload);
       
   622     __ compareU32_and_branch(Z_R1_scratch, Bytecodes::_caload,
       
   623                              Assembler::bcondEqual, rewrite);
       
   624 
       
   625     // Rewrite so iload doesn't check again.
       
   626     __ load_const_optimized(bc, Bytecodes::_fast_iload);
       
   627 
       
   628     // rewrite
       
   629     // bc: fast bytecode
       
   630     __ bind(rewrite);
       
   631     patch_bytecode(Bytecodes::_iload, bc, Z_R1_scratch, false);
       
   632 
       
   633     __ bind(done);
       
   634 
       
   635   }
       
   636 
       
   637   // Get the local value into tos.
       
   638   locals_index(Z_R1_scratch);
       
   639   __ mem2reg_opt(Z_tos, iaddress(_masm, Z_R1_scratch), false);
       
   640 }
       
   641 
       
   642 void TemplateTable::fast_iload2() {
       
   643   transition(vtos, itos);
       
   644 
       
   645   locals_index(Z_R1_scratch);
       
   646   __ mem2reg_opt(Z_tos, iaddress(_masm, Z_R1_scratch), false);
       
   647   __ push_i(Z_tos);
       
   648   locals_index(Z_R1_scratch, 3);
       
   649   __ mem2reg_opt(Z_tos, iaddress(_masm, Z_R1_scratch), false);
       
   650 }
       
   651 
       
   652 void TemplateTable::fast_iload() {
       
   653   transition(vtos, itos);
       
   654 
       
   655   locals_index(Z_R1_scratch);
       
   656   __ mem2reg_opt(Z_tos, iaddress(_masm, Z_R1_scratch), false);
       
   657 }
       
   658 
       
   659 void TemplateTable::lload() {
       
   660   transition(vtos, ltos);
       
   661 
       
   662   locals_index(Z_R1_scratch);
       
   663   __ mem2reg_opt(Z_tos, laddress(_masm, Z_R1_scratch));
       
   664 }
       
   665 
       
   666 void TemplateTable::fload() {
       
   667   transition(vtos, ftos);
       
   668 
       
   669   locals_index(Z_R1_scratch);
       
   670   __ mem2freg_opt(Z_ftos, faddress(_masm, Z_R1_scratch), false);
       
   671 }
       
   672 
       
   673 void TemplateTable::dload() {
       
   674   transition(vtos, dtos);
       
   675 
       
   676   locals_index(Z_R1_scratch);
       
   677   __ mem2freg_opt(Z_ftos, daddress(_masm, Z_R1_scratch));
       
   678 }
       
   679 
       
   680 void TemplateTable::aload() {
       
   681   transition(vtos, atos);
       
   682 
       
   683   locals_index(Z_R1_scratch);
       
   684   __ mem2reg_opt(Z_tos, aaddress(_masm, Z_R1_scratch));
       
   685 }
       
   686 
       
   687 void TemplateTable::locals_index_wide(Register reg) {
       
   688   __ get_2_byte_integer_at_bcp(reg, 2, InterpreterMacroAssembler::Unsigned);
       
   689   __ z_lcgr(reg);
       
   690 }
       
   691 
       
   692 void TemplateTable::wide_iload() {
       
   693   transition(vtos, itos);
       
   694 
       
   695   locals_index_wide(Z_tmp_1);
       
   696   __ mem2reg_opt(Z_tos, iaddress(_masm, Z_tmp_1), false);
       
   697 }
       
   698 
       
   699 void TemplateTable::wide_lload() {
       
   700   transition(vtos, ltos);
       
   701 
       
   702   locals_index_wide(Z_tmp_1);
       
   703   __ mem2reg_opt(Z_tos, laddress(_masm, Z_tmp_1));
       
   704 }
       
   705 
       
   706 void TemplateTable::wide_fload() {
       
   707   transition(vtos, ftos);
       
   708 
       
   709   locals_index_wide(Z_tmp_1);
       
   710   __ mem2freg_opt(Z_ftos, faddress(_masm, Z_tmp_1), false);
       
   711 }
       
   712 
       
   713 void TemplateTable::wide_dload() {
       
   714   transition(vtos, dtos);
       
   715 
       
   716   locals_index_wide(Z_tmp_1);
       
   717   __ mem2freg_opt(Z_ftos, daddress(_masm, Z_tmp_1));
       
   718 }
       
   719 
       
   720 void TemplateTable::wide_aload() {
       
   721   transition(vtos, atos);
       
   722 
       
   723   locals_index_wide(Z_tmp_1);
       
   724   __ mem2reg_opt(Z_tos, aaddress(_masm, Z_tmp_1));
       
   725 }
       
   726 
       
   727 void TemplateTable::index_check(Register array, Register index, unsigned int shift) {
       
   728   assert_different_registers(Z_R1_scratch, array, index);
       
   729 
       
   730   // Check array.
       
   731   __ null_check(array, Z_R0_scratch, arrayOopDesc::length_offset_in_bytes());
       
   732 
       
   733   // Sign extend index for use by indexed load.
       
   734   __ z_lgfr(index, index);
       
   735 
       
   736   // Check index.
       
   737   Label index_ok;
       
   738   __ z_cl(index, Address(array, arrayOopDesc::length_offset_in_bytes()));
       
   739   __ z_brl(index_ok);
       
   740   __ lgr_if_needed(Z_ARG3, index); // See generate_ArrayIndexOutOfBounds_handler().
       
   741   // Give back the array to create more detailed exceptions.
       
   742   __ lgr_if_needed(Z_ARG2, array); // See generate_ArrayIndexOutOfBounds_handler().
       
   743   __ load_absolute_address(Z_R1_scratch,
       
   744                            Interpreter::_throw_ArrayIndexOutOfBoundsException_entry);
       
   745   __ z_bcr(Assembler::bcondAlways, Z_R1_scratch);
       
   746   __ bind(index_ok);
       
   747 
       
   748   if (shift > 0)
       
   749     __ z_sllg(index, index, shift);
       
   750 }
       
   751 
       
   752 void TemplateTable::iaload() {
       
   753   transition(itos, itos);
       
   754 
       
   755   __ pop_ptr(Z_tmp_1);  // array
       
   756   // Index is in Z_tos.
       
   757   Register index = Z_tos;
       
   758   index_check(Z_tmp_1, index, LogBytesPerInt); // Kills Z_ARG3.
       
   759   // Load the value.
       
   760   __ mem2reg_opt(Z_tos,
       
   761                  Address(Z_tmp_1, index, arrayOopDesc::base_offset_in_bytes(T_INT)),
       
   762                  false);
       
   763 }
       
   764 
       
   765 void TemplateTable::laload() {
       
   766   transition(itos, ltos);
       
   767 
       
   768   __ pop_ptr(Z_tmp_2);
       
   769   // Z_tos   : index
       
   770   // Z_tmp_2 : array
       
   771   Register index = Z_tos;
       
   772   index_check(Z_tmp_2, index, LogBytesPerLong);
       
   773   __ mem2reg_opt(Z_tos,
       
   774                  Address(Z_tmp_2, index, arrayOopDesc::base_offset_in_bytes(T_LONG)));
       
   775 }
       
   776 
       
   777 void TemplateTable::faload() {
       
   778   transition(itos, ftos);
       
   779 
       
   780   __ pop_ptr(Z_tmp_2);
       
   781   // Z_tos   : index
       
   782   // Z_tmp_2 : array
       
   783   Register index = Z_tos;
       
   784   index_check(Z_tmp_2, index, LogBytesPerInt);
       
   785   __ mem2freg_opt(Z_ftos,
       
   786                   Address(Z_tmp_2, index, arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
       
   787                   false);
       
   788 }
       
   789 
       
   790 void TemplateTable::daload() {
       
   791   transition(itos, dtos);
       
   792 
       
   793   __ pop_ptr(Z_tmp_2);
       
   794   // Z_tos   : index
       
   795   // Z_tmp_2 : array
       
   796   Register index = Z_tos;
       
   797   index_check(Z_tmp_2, index, LogBytesPerLong);
       
   798   __ mem2freg_opt(Z_ftos,
       
   799                   Address(Z_tmp_2, index, arrayOopDesc::base_offset_in_bytes(T_DOUBLE)));
       
   800 }
       
   801 
       
   802 void TemplateTable::aaload() {
       
   803   transition(itos, atos);
       
   804 
       
   805   unsigned const int shift = LogBytesPerHeapOop;
       
   806   __ pop_ptr(Z_tmp_1);  // array
       
   807   // Index is in Z_tos.
       
   808   Register index = Z_tos;
       
   809   index_check(Z_tmp_1, index, shift);
       
   810   // Now load array element.
       
   811   __ load_heap_oop(Z_tos,
       
   812                    Address(Z_tmp_1, index, arrayOopDesc::base_offset_in_bytes(T_OBJECT)));
       
   813   __ verify_oop(Z_tos);
       
   814 }
       
   815 
       
   816 void TemplateTable::baload() {
       
   817   transition(itos, itos);
       
   818 
       
   819   __ pop_ptr(Z_tmp_1);
       
   820   // Z_tos   : index
       
   821   // Z_tmp_1 : array
       
   822   Register index = Z_tos;
       
   823   index_check(Z_tmp_1, index, 0);
       
   824   __ z_lb(Z_tos,
       
   825           Address(Z_tmp_1, index, arrayOopDesc::base_offset_in_bytes(T_BYTE)));
       
   826 }
       
   827 
       
   828 void TemplateTable::caload() {
       
   829   transition(itos, itos);
       
   830 
       
   831   __ pop_ptr(Z_tmp_2);
       
   832   // Z_tos   : index
       
   833   // Z_tmp_2 : array
       
   834   Register index = Z_tos;
       
   835   index_check(Z_tmp_2, index, LogBytesPerShort);
       
   836   // Load into 64 bits, works on all CPUs.
       
   837   __ z_llgh(Z_tos,
       
   838             Address(Z_tmp_2, index, arrayOopDesc::base_offset_in_bytes(T_CHAR)));
       
   839 }
       
   840 
       
   841 // Iload followed by caload frequent pair.
       
   842 void TemplateTable::fast_icaload() {
       
   843   transition(vtos, itos);
       
   844 
       
   845   // Load index out of locals.
       
   846   locals_index(Z_R1_scratch);
       
   847   __ mem2reg_opt(Z_ARG3, iaddress(_masm, Z_R1_scratch), false);
       
   848   // Z_ARG3  : index
       
   849   // Z_tmp_2 : array
       
   850   __ pop_ptr(Z_tmp_2);
       
   851   index_check(Z_tmp_2, Z_ARG3, LogBytesPerShort);
       
   852   // Load into 64 bits, works on all CPUs.
       
   853   __ z_llgh(Z_tos,
       
   854             Address(Z_tmp_2, Z_ARG3, arrayOopDesc::base_offset_in_bytes(T_CHAR)));
       
   855 }
       
   856 
       
   857 void TemplateTable::saload() {
       
   858   transition(itos, itos);
       
   859 
       
   860   __ pop_ptr(Z_tmp_2);
       
   861   // Z_tos   : index
       
   862   // Z_tmp_2 : array
       
   863   Register index = Z_tos;
       
   864   index_check(Z_tmp_2, index, LogBytesPerShort);
       
   865   __ z_lh(Z_tos,
       
   866           Address(Z_tmp_2, index, arrayOopDesc::base_offset_in_bytes(T_SHORT)));
       
   867 }
       
   868 
       
   869 void TemplateTable::iload(int n) {
       
   870   transition(vtos, itos);
       
   871   __ z_ly(Z_tos, iaddress(n));
       
   872 }
       
   873 
       
   874 void TemplateTable::lload(int n) {
       
   875   transition(vtos, ltos);
       
   876   __ z_lg(Z_tos, laddress(n));
       
   877 }
       
   878 
       
   879 void TemplateTable::fload(int n) {
       
   880   transition(vtos, ftos);
       
   881   __ mem2freg_opt(Z_ftos, faddress(n), false);
       
   882 }
       
   883 
       
   884 void TemplateTable::dload(int n) {
       
   885   transition(vtos, dtos);
       
   886   __ mem2freg_opt(Z_ftos, daddress(n));
       
   887 }
       
   888 
       
   889 void TemplateTable::aload(int n) {
       
   890   transition(vtos, atos);
       
   891   __ mem2reg_opt(Z_tos, aaddress(n));
       
   892 }
       
   893 
       
   894 void TemplateTable::aload_0() {
       
   895   aload_0_internal();
       
   896 }
       
   897 
       
   898 void TemplateTable::nofast_aload_0() {
       
   899   aload_0_internal(may_not_rewrite);
       
   900 }
       
   901 
       
   902 void TemplateTable::aload_0_internal(RewriteControl rc) {
       
   903   transition(vtos, atos);
       
   904 
       
   905   // According to bytecode histograms, the pairs:
       
   906   //
       
   907   // _aload_0, _fast_igetfield
       
   908   // _aload_0, _fast_agetfield
       
   909   // _aload_0, _fast_fgetfield
       
   910   //
       
   911   // occur frequently. If RewriteFrequentPairs is set, the (slow)
       
   912   // _aload_0 bytecode checks if the next bytecode is either
       
   913   // _fast_igetfield, _fast_agetfield or _fast_fgetfield and then
       
   914   // rewrites the current bytecode into a pair bytecode; otherwise it
       
   915   // rewrites the current bytecode into _fast_aload_0 that doesn't do
       
   916   // the pair check anymore.
       
   917   //
       
   918   // Note: If the next bytecode is _getfield, the rewrite must be
       
   919   //       delayed, otherwise we may miss an opportunity for a pair.
       
   920   //
       
   921   // Also rewrite frequent pairs
       
   922   //   aload_0, aload_1
       
   923   //   aload_0, iload_1
       
   924   // These bytecodes with a small amount of code are most profitable
       
   925   // to rewrite.
       
   926   if (!(RewriteFrequentPairs && (rc == may_rewrite))) {
       
   927     aload(0);
       
   928     return;
       
   929   }
       
   930 
       
   931   NearLabel rewrite, done;
       
   932   const Register bc = Z_ARG4;
       
   933 
       
   934   assert(Z_R1_scratch != bc, "register damaged");
       
   935   // Get next byte.
       
   936   __ z_llgc(Z_R1_scratch, at_bcp(Bytecodes::length_for (Bytecodes::_aload_0)));
       
   937 
       
   938   // Do actual aload_0.
       
   939   aload(0);
       
   940 
       
   941   // If _getfield then wait with rewrite.
       
   942   __ compareU32_and_branch(Z_R1_scratch, Bytecodes::_getfield,
       
   943                            Assembler::bcondEqual, done);
       
   944 
       
   945   // If _igetfield then rewrite to _fast_iaccess_0.
       
   946   assert(Bytecodes::java_code(Bytecodes::_fast_iaccess_0)
       
   947             == Bytecodes::_aload_0, "fix bytecode definition");
       
   948 
       
   949   __ load_const_optimized(bc, Bytecodes::_fast_iaccess_0);
       
   950   __ compareU32_and_branch(Z_R1_scratch, Bytecodes::_fast_igetfield,
       
   951                            Assembler::bcondEqual, rewrite);
       
   952 
       
   953   // If _agetfield then rewrite to _fast_aaccess_0.
       
   954   assert(Bytecodes::java_code(Bytecodes::_fast_aaccess_0)
       
   955             == Bytecodes::_aload_0, "fix bytecode definition");
       
   956 
       
   957   __ load_const_optimized(bc, Bytecodes::_fast_aaccess_0);
       
   958   __ compareU32_and_branch(Z_R1_scratch, Bytecodes::_fast_agetfield,
       
   959                            Assembler::bcondEqual, rewrite);
       
   960 
       
   961   // If _fgetfield then rewrite to _fast_faccess_0.
       
   962   assert(Bytecodes::java_code(Bytecodes::_fast_faccess_0)
       
   963             == Bytecodes::_aload_0, "fix bytecode definition");
       
   964 
       
   965   __ load_const_optimized(bc, Bytecodes::_fast_faccess_0);
       
   966   __ compareU32_and_branch(Z_R1_scratch, Bytecodes::_fast_fgetfield,
       
   967                            Assembler::bcondEqual, rewrite);
       
   968 
       
   969   // Else rewrite to _fast_aload0.
       
   970   assert(Bytecodes::java_code(Bytecodes::_fast_aload_0)
       
   971             == Bytecodes::_aload_0, "fix bytecode definition");
       
   972   __ load_const_optimized(bc, Bytecodes::_fast_aload_0);
       
   973 
       
   974   // rewrite
       
   975   // bc: fast bytecode
       
   976   __ bind(rewrite);
       
   977 
       
   978   patch_bytecode(Bytecodes::_aload_0, bc, Z_R1_scratch, false);
       
   979   // Reload local 0 because of VM call inside patch_bytecode().
       
   980   // this may trigger GC and thus change the oop.
       
   981   aload(0);
       
   982 
       
   983   __ bind(done);
       
   984 }
       
   985 
       
   986 void TemplateTable::istore() {
       
   987   transition(itos, vtos);
       
   988   locals_index(Z_R1_scratch);
       
   989   __ reg2mem_opt(Z_tos, iaddress(_masm, Z_R1_scratch), false);
       
   990 }
       
   991 
       
   992 void TemplateTable::lstore() {
       
   993   transition(ltos, vtos);
       
   994   locals_index(Z_R1_scratch);
       
   995   __ reg2mem_opt(Z_tos, laddress(_masm, Z_R1_scratch));
       
   996 }
       
   997 
       
   998 void TemplateTable::fstore() {
       
   999   transition(ftos, vtos);
       
  1000   locals_index(Z_R1_scratch);
       
  1001   __ freg2mem_opt(Z_ftos, faddress(_masm, Z_R1_scratch));
       
  1002 }
       
  1003 
       
  1004 void TemplateTable::dstore() {
       
  1005   transition(dtos, vtos);
       
  1006   locals_index(Z_R1_scratch);
       
  1007   __ freg2mem_opt(Z_ftos, daddress(_masm, Z_R1_scratch));
       
  1008 }
       
  1009 
       
  1010 void TemplateTable::astore() {
       
  1011   transition(vtos, vtos);
       
  1012   __ pop_ptr(Z_tos);
       
  1013   locals_index(Z_R1_scratch);
       
  1014   __ reg2mem_opt(Z_tos, aaddress(_masm, Z_R1_scratch));
       
  1015 }
       
  1016 
       
  1017 void TemplateTable::wide_istore() {
       
  1018   transition(vtos, vtos);
       
  1019   __ pop_i(Z_tos);
       
  1020   locals_index_wide(Z_tmp_1);
       
  1021   __ reg2mem_opt(Z_tos, iaddress(_masm, Z_tmp_1), false);
       
  1022 }
       
  1023 
       
  1024 void TemplateTable::wide_lstore() {
       
  1025   transition(vtos, vtos);
       
  1026   __ pop_l(Z_tos);
       
  1027   locals_index_wide(Z_tmp_1);
       
  1028   __ reg2mem_opt(Z_tos, laddress(_masm, Z_tmp_1));
       
  1029 }
       
  1030 
       
  1031 void TemplateTable::wide_fstore() {
       
  1032   transition(vtos, vtos);
       
  1033   __ pop_f(Z_ftos);
       
  1034   locals_index_wide(Z_tmp_1);
       
  1035   __ freg2mem_opt(Z_ftos, faddress(_masm, Z_tmp_1), false);
       
  1036 }
       
  1037 
       
  1038 void TemplateTable::wide_dstore() {
       
  1039   transition(vtos, vtos);
       
  1040   __ pop_d(Z_ftos);
       
  1041   locals_index_wide(Z_tmp_1);
       
  1042   __ freg2mem_opt(Z_ftos, daddress(_masm, Z_tmp_1));
       
  1043 }
       
  1044 
       
  1045 void TemplateTable::wide_astore() {
       
  1046   transition(vtos, vtos);
       
  1047   __ pop_ptr(Z_tos);
       
  1048   locals_index_wide(Z_tmp_1);
       
  1049   __ reg2mem_opt(Z_tos, aaddress(_masm, Z_tmp_1));
       
  1050 }
       
  1051 
       
  1052 void TemplateTable::iastore() {
       
  1053   transition(itos, vtos);
       
  1054 
       
  1055   Register index = Z_ARG3; // Index_check expects index in Z_ARG3.
       
  1056   // Value is in Z_tos ...
       
  1057   __ pop_i(index);        // index
       
  1058   __ pop_ptr(Z_tmp_1);    // array
       
  1059   index_check(Z_tmp_1, index, LogBytesPerInt);
       
  1060   // ... and then move the value.
       
  1061   __ reg2mem_opt(Z_tos,
       
  1062                  Address(Z_tmp_1, index, arrayOopDesc::base_offset_in_bytes(T_INT)),
       
  1063                  false);
       
  1064 }
       
  1065 
       
  1066 void TemplateTable::lastore() {
       
  1067   transition(ltos, vtos);
       
  1068 
       
  1069   __ pop_i(Z_ARG3);
       
  1070   __ pop_ptr(Z_tmp_2);
       
  1071   // Z_tos   : value
       
  1072   // Z_ARG3  : index
       
  1073   // Z_tmp_2 : array
       
  1074  index_check(Z_tmp_2, Z_ARG3, LogBytesPerLong); // Prefer index in Z_ARG3.
       
  1075   __ reg2mem_opt(Z_tos,
       
  1076                  Address(Z_tmp_2, Z_ARG3, arrayOopDesc::base_offset_in_bytes(T_LONG)));
       
  1077 }
       
  1078 
       
  1079 void TemplateTable::fastore() {
       
  1080   transition(ftos, vtos);
       
  1081 
       
  1082   __ pop_i(Z_ARG3);
       
  1083   __ pop_ptr(Z_tmp_2);
       
  1084   // Z_ftos  : value
       
  1085   // Z_ARG3  : index
       
  1086   // Z_tmp_2 : array
       
  1087   index_check(Z_tmp_2, Z_ARG3, LogBytesPerInt); // Prefer index in Z_ARG3.
       
  1088   __ freg2mem_opt(Z_ftos,
       
  1089                   Address(Z_tmp_2, Z_ARG3, arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
       
  1090                   false);
       
  1091 }
       
  1092 
       
  1093 void TemplateTable::dastore() {
       
  1094   transition(dtos, vtos);
       
  1095 
       
  1096   __ pop_i(Z_ARG3);
       
  1097   __ pop_ptr(Z_tmp_2);
       
  1098   // Z_ftos  : value
       
  1099   // Z_ARG3  : index
       
  1100   // Z_tmp_2 : array
       
  1101   index_check(Z_tmp_2, Z_ARG3, LogBytesPerLong); // Prefer index in Z_ARG3.
       
  1102   __ freg2mem_opt(Z_ftos,
       
  1103                   Address(Z_tmp_2, Z_ARG3, arrayOopDesc::base_offset_in_bytes(T_DOUBLE)));
       
  1104 }
       
  1105 
       
  1106 void TemplateTable::aastore() {
       
  1107   NearLabel is_null, ok_is_subtype, done;
       
  1108   transition(vtos, vtos);
       
  1109 
       
  1110   // stack: ..., array, index, value
       
  1111 
       
  1112   Register Rvalue = Z_tos;
       
  1113   Register Rarray = Z_ARG2;
       
  1114   Register Rindex = Z_ARG3; // Convention for index_check().
       
  1115 
       
  1116   __ load_ptr(0, Rvalue);
       
  1117   __ z_l(Rindex, Address(Z_esp, Interpreter::expr_offset_in_bytes(1)));
       
  1118   __ load_ptr(2, Rarray);
       
  1119 
       
  1120   unsigned const int shift = LogBytesPerHeapOop;
       
  1121   index_check(Rarray, Rindex, shift); // side effect: Rindex = Rindex << shift
       
  1122   Register Rstore_addr  = Rindex;
       
  1123   // Address where the store goes to, i.e. &(Rarry[index])
       
  1124   __ load_address(Rstore_addr, Address(Rarray, Rindex, arrayOopDesc::base_offset_in_bytes(T_OBJECT)));
       
  1125 
       
  1126   // do array store check - check for NULL value first.
       
  1127   __ compareU64_and_branch(Rvalue, (intptr_t)0, Assembler::bcondEqual, is_null);
       
  1128 
       
  1129   Register Rsub_klass   = Z_ARG4;
       
  1130   Register Rsuper_klass = Z_ARG5;
       
  1131   __ load_klass(Rsub_klass, Rvalue);
       
  1132   // Load superklass.
       
  1133   __ load_klass(Rsuper_klass, Rarray);
       
  1134   __ z_lg(Rsuper_klass, Address(Rsuper_klass, ObjArrayKlass::element_klass_offset()));
       
  1135 
       
  1136   // Generate a fast subtype check.  Branch to ok_is_subtype if no failure.
       
  1137   // Throw if failure.
       
  1138   Register tmp1 = Z_tmp_1;
       
  1139   Register tmp2 = Z_tmp_2;
       
  1140   __ gen_subtype_check(Rsub_klass, Rsuper_klass, tmp1, tmp2, ok_is_subtype);
       
  1141 
       
  1142   // Fall through on failure.
       
  1143   // Object is in Rvalue == Z_tos.
       
  1144   assert(Rvalue == Z_tos, "that's the expected location");
       
  1145   __ load_absolute_address(tmp1, Interpreter::_throw_ArrayStoreException_entry);
       
  1146   __ z_br(tmp1);
       
  1147 
       
  1148   // Come here on success.
       
  1149   __ bind(ok_is_subtype);
       
  1150 
       
  1151   // Now store using the appropriate barrier.
       
  1152   Register tmp3 = Rsub_klass;
       
  1153   do_oop_store(_masm, Rstore_addr, (intptr_t)0/*offset*/, Rvalue, false/*val==null*/,
       
  1154                tmp3, tmp2, tmp1, _bs->kind(), true);
       
  1155   __ z_bru(done);
       
  1156 
       
  1157   // Have a NULL in Rvalue.
       
  1158   __ bind(is_null);
       
  1159   __ profile_null_seen(tmp1);
       
  1160 
       
  1161   // Store a NULL.
       
  1162   do_oop_store(_masm, Rstore_addr, (intptr_t)0/*offset*/, Rvalue, true/*val==null*/,
       
  1163                tmp3, tmp2, tmp1, _bs->kind(), true);
       
  1164 
       
  1165   // Pop stack arguments.
       
  1166   __ bind(done);
       
  1167   __ add2reg(Z_esp, 3 * Interpreter::stackElementSize);
       
  1168 }
       
  1169 
       
  1170 
       
  1171 void TemplateTable::bastore() {
       
  1172   transition(itos, vtos);
       
  1173 
       
  1174   __ pop_i(Z_ARG3);
       
  1175   __ pop_ptr(Z_tmp_2);
       
  1176   // Z_tos   : value
       
  1177   // Z_ARG3 : index
       
  1178   // Z_tmp_2 : array
       
  1179   // No index shift necessary - pass 0.
       
  1180   index_check(Z_tmp_2, Z_ARG3, 0); // Prefer index in Z_ARG3.
       
  1181   __ z_stc(Z_tos,
       
  1182            Address(Z_tmp_2, Z_ARG3, arrayOopDesc::base_offset_in_bytes(T_BYTE)));
       
  1183 }
       
  1184 
       
  1185 void TemplateTable::castore() {
       
  1186   transition(itos, vtos);
       
  1187 
       
  1188   __ pop_i(Z_ARG3);
       
  1189   __ pop_ptr(Z_tmp_2);
       
  1190   // Z_tos   : value
       
  1191   // Z_ARG3  : index
       
  1192   // Z_tmp_2 : array
       
  1193   Register index = Z_ARG3; // prefer index in Z_ARG3
       
  1194   index_check(Z_tmp_2, index, LogBytesPerShort);
       
  1195   __ z_sth(Z_tos,
       
  1196            Address(Z_tmp_2, index, arrayOopDesc::base_offset_in_bytes(T_CHAR)));
       
  1197 }
       
  1198 
       
  1199 void TemplateTable::sastore() {
       
  1200   castore();
       
  1201 }
       
  1202 
       
  1203 void TemplateTable::istore(int n) {
       
  1204   transition(itos, vtos);
       
  1205   __ reg2mem_opt(Z_tos, iaddress(n), false);
       
  1206 }
       
  1207 
       
  1208 void TemplateTable::lstore(int n) {
       
  1209   transition(ltos, vtos);
       
  1210   __ reg2mem_opt(Z_tos, laddress(n));
       
  1211 }
       
  1212 
       
  1213 void TemplateTable::fstore(int n) {
       
  1214   transition(ftos, vtos);
       
  1215   __ freg2mem_opt(Z_ftos, faddress(n), false);
       
  1216 }
       
  1217 
       
  1218 void TemplateTable::dstore(int n) {
       
  1219   transition(dtos, vtos);
       
  1220   __ freg2mem_opt(Z_ftos, daddress(n));
       
  1221 }
       
  1222 
       
  1223 void TemplateTable::astore(int n) {
       
  1224   transition(vtos, vtos);
       
  1225   __ pop_ptr(Z_tos);
       
  1226   __ reg2mem_opt(Z_tos, aaddress(n));
       
  1227 }
       
  1228 
       
  1229 void TemplateTable::pop() {
       
  1230   transition(vtos, vtos);
       
  1231   __ add2reg(Z_esp, Interpreter::stackElementSize);
       
  1232 }
       
  1233 
       
  1234 void TemplateTable::pop2() {
       
  1235   transition(vtos, vtos);
       
  1236   __ add2reg(Z_esp, 2 * Interpreter::stackElementSize);
       
  1237 }
       
  1238 
       
  1239 void TemplateTable::dup() {
       
  1240   transition(vtos, vtos);
       
  1241   __ load_ptr(0, Z_tos);
       
  1242   __ push_ptr(Z_tos);
       
  1243   // stack: ..., a, a
       
  1244 }
       
  1245 
       
  1246 void TemplateTable::dup_x1() {
       
  1247   transition(vtos, vtos);
       
  1248 
       
  1249   // stack: ..., a, b
       
  1250   __ load_ptr(0, Z_tos);          // load b
       
  1251   __ load_ptr(1, Z_R0_scratch);   // load a
       
  1252   __ store_ptr(1, Z_tos);         // store b
       
  1253   __ store_ptr(0, Z_R0_scratch);  // store a
       
  1254   __ push_ptr(Z_tos);             // push b
       
  1255   // stack: ..., b, a, b
       
  1256 }
       
  1257 
       
  1258 void TemplateTable::dup_x2() {
       
  1259   transition(vtos, vtos);
       
  1260 
       
  1261   // stack: ..., a, b, c
       
  1262   __ load_ptr(0, Z_R0_scratch);   // load c
       
  1263   __ load_ptr(2, Z_R1_scratch);   // load a
       
  1264   __ store_ptr(2, Z_R0_scratch);  // store c in a
       
  1265   __ push_ptr(Z_R0_scratch);      // push c
       
  1266   // stack: ..., c, b, c, c
       
  1267   __ load_ptr(2, Z_R0_scratch);   // load b
       
  1268   __ store_ptr(2, Z_R1_scratch);  // store a in b
       
  1269   // stack: ..., c, a, c, c
       
  1270   __ store_ptr(1, Z_R0_scratch);  // store b in c
       
  1271   // stack: ..., c, a, b, c
       
  1272 }
       
  1273 
       
  1274 void TemplateTable::dup2() {
       
  1275   transition(vtos, vtos);
       
  1276 
       
  1277   // stack: ..., a, b
       
  1278   __ load_ptr(1, Z_R0_scratch);  // load a
       
  1279   __ push_ptr(Z_R0_scratch);     // push a
       
  1280   __ load_ptr(1, Z_R0_scratch);  // load b
       
  1281   __ push_ptr(Z_R0_scratch);     // push b
       
  1282   // stack: ..., a, b, a, b
       
  1283 }
       
  1284 
       
  1285 void TemplateTable::dup2_x1() {
       
  1286   transition(vtos, vtos);
       
  1287 
       
  1288   // stack: ..., a, b, c
       
  1289   __ load_ptr(0, Z_R0_scratch);  // load c
       
  1290   __ load_ptr(1, Z_R1_scratch);  // load b
       
  1291   __ push_ptr(Z_R1_scratch);     // push b
       
  1292   __ push_ptr(Z_R0_scratch);     // push c
       
  1293   // stack: ..., a, b, c, b, c
       
  1294   __ store_ptr(3, Z_R0_scratch); // store c in b
       
  1295   // stack: ..., a, c, c, b, c
       
  1296   __ load_ptr( 4, Z_R0_scratch); // load a
       
  1297   __ store_ptr(2, Z_R0_scratch); // store a in 2nd c
       
  1298   // stack: ..., a, c, a, b, c
       
  1299   __ store_ptr(4, Z_R1_scratch); // store b in a
       
  1300   // stack: ..., b, c, a, b, c
       
  1301 }
       
  1302 
       
  1303 void TemplateTable::dup2_x2() {
       
  1304   transition(vtos, vtos);
       
  1305 
       
  1306   // stack: ..., a, b, c, d
       
  1307   __ load_ptr(0, Z_R0_scratch);   // load d
       
  1308   __ load_ptr(1, Z_R1_scratch);   // load c
       
  1309   __ push_ptr(Z_R1_scratch);      // push c
       
  1310   __ push_ptr(Z_R0_scratch);      // push d
       
  1311   // stack: ..., a, b, c, d, c, d
       
  1312   __ load_ptr(4, Z_R1_scratch);   // load b
       
  1313   __ store_ptr(2, Z_R1_scratch);  // store b in d
       
  1314   __ store_ptr(4, Z_R0_scratch);  // store d in b
       
  1315   // stack: ..., a, d, c, b, c, d
       
  1316   __ load_ptr(5, Z_R0_scratch);   // load a
       
  1317   __ load_ptr(3, Z_R1_scratch);   // load c
       
  1318   __ store_ptr(3, Z_R0_scratch);  // store a in c
       
  1319   __ store_ptr(5, Z_R1_scratch);  // store c in a
       
  1320   // stack: ..., c, d, a, b, c, d
       
  1321 }
       
  1322 
       
  1323 void TemplateTable::swap() {
       
  1324   transition(vtos, vtos);
       
  1325 
       
  1326   // stack: ..., a, b
       
  1327   __ load_ptr(1, Z_R0_scratch);  // load a
       
  1328   __ load_ptr(0, Z_R1_scratch);  // load b
       
  1329   __ store_ptr(0, Z_R0_scratch);  // store a in b
       
  1330   __ store_ptr(1, Z_R1_scratch);  // store b in a
       
  1331   // stack: ..., b, a
       
  1332 }
       
  1333 
       
  1334 void TemplateTable::iop2(Operation op) {
       
  1335   transition(itos, itos);
       
  1336   switch (op) {
       
  1337     case add  :                           __ z_ay(Z_tos,  __ stackTop()); __ pop_i(); break;
       
  1338     case sub  :                           __ z_sy(Z_tos,  __ stackTop()); __ pop_i(); __ z_lcr(Z_tos, Z_tos); break;
       
  1339     case mul  :                           __ z_msy(Z_tos, __ stackTop()); __ pop_i(); break;
       
  1340     case _and :                           __ z_ny(Z_tos,  __ stackTop()); __ pop_i(); break;
       
  1341     case _or  :                           __ z_oy(Z_tos,  __ stackTop()); __ pop_i(); break;
       
  1342     case _xor :                           __ z_xy(Z_tos,  __ stackTop()); __ pop_i(); break;
       
  1343     case shl  : __ z_lr(Z_tmp_1, Z_tos);
       
  1344                 __ z_nill(Z_tmp_1, 31);  // Lowest 5 bits are shiftamount.
       
  1345                                           __ pop_i(Z_tos);   __ z_sll(Z_tos, 0,  Z_tmp_1); break;
       
  1346     case shr  : __ z_lr(Z_tmp_1, Z_tos);
       
  1347                 __ z_nill(Z_tmp_1, 31);  // Lowest 5 bits are shiftamount.
       
  1348                                           __ pop_i(Z_tos);   __ z_sra(Z_tos, 0,  Z_tmp_1); break;
       
  1349     case ushr : __ z_lr(Z_tmp_1, Z_tos);
       
  1350                 __ z_nill(Z_tmp_1, 31);  // Lowest 5 bits are shiftamount.
       
  1351                                           __ pop_i(Z_tos);   __ z_srl(Z_tos, 0,  Z_tmp_1); break;
       
  1352     default   : ShouldNotReachHere(); break;
       
  1353   }
       
  1354   return;
       
  1355 }
       
  1356 
       
  1357 void TemplateTable::lop2(Operation op) {
       
  1358   transition(ltos, ltos);
       
  1359 
       
  1360   switch (op) {
       
  1361     case add  :  __ z_ag(Z_tos,  __ stackTop()); __ pop_l(); break;
       
  1362     case sub  :  __ z_sg(Z_tos,  __ stackTop()); __ pop_l(); __ z_lcgr(Z_tos, Z_tos); break;
       
  1363     case mul  :  __ z_msg(Z_tos, __ stackTop()); __ pop_l(); break;
       
  1364     case _and :  __ z_ng(Z_tos,  __ stackTop()); __ pop_l(); break;
       
  1365     case _or  :  __ z_og(Z_tos,  __ stackTop()); __ pop_l(); break;
       
  1366     case _xor :  __ z_xg(Z_tos,  __ stackTop()); __ pop_l(); break;
       
  1367     default   : ShouldNotReachHere(); break;
       
  1368   }
       
  1369   return;
       
  1370 }
       
  1371 
       
  1372 // Common part of idiv/irem.
       
  1373 static void idiv_helper(InterpreterMacroAssembler * _masm, address exception) {
       
  1374   NearLabel not_null;
       
  1375 
       
  1376   // Use register pair Z_tmp_1, Z_tmp_2 for DIVIDE SINGLE.
       
  1377   assert(Z_tmp_1->successor() == Z_tmp_2, " need even/odd register pair for idiv/irem");
       
  1378 
       
  1379   // Get dividend.
       
  1380   __ pop_i(Z_tmp_2);
       
  1381 
       
  1382   // If divisor == 0 throw exception.
       
  1383   __ compare32_and_branch(Z_tos, (intptr_t) 0,
       
  1384                           Assembler::bcondNotEqual, not_null   );
       
  1385   __ load_absolute_address(Z_R1_scratch, exception);
       
  1386   __ z_br(Z_R1_scratch);
       
  1387 
       
  1388   __ bind(not_null);
       
  1389 
       
  1390   __ z_lgfr(Z_tmp_2, Z_tmp_2);   // Sign extend dividend.
       
  1391   __ z_dsgfr(Z_tmp_1, Z_tos);    // Do it.
       
  1392 }
       
  1393 
       
  1394 void TemplateTable::idiv() {
       
  1395   transition(itos, itos);
       
  1396 
       
  1397   idiv_helper(_masm, Interpreter::_throw_ArithmeticException_entry);
       
  1398   __ z_llgfr(Z_tos, Z_tmp_2);     // Result is in Z_tmp_2.
       
  1399 }
       
  1400 
       
  1401 void TemplateTable::irem() {
       
  1402   transition(itos, itos);
       
  1403 
       
  1404   idiv_helper(_masm, Interpreter::_throw_ArithmeticException_entry);
       
  1405   __ z_llgfr(Z_tos, Z_tmp_1);     // Result is in Z_tmp_1.
       
  1406 }
       
  1407 
       
  1408 void TemplateTable::lmul() {
       
  1409   transition(ltos, ltos);
       
  1410 
       
  1411   // Multiply with memory operand.
       
  1412   __ z_msg(Z_tos, __ stackTop());
       
  1413   __ pop_l();  // Pop operand.
       
  1414 }
       
  1415 
       
  1416 // Common part of ldiv/lrem.
       
  1417 //
       
  1418 // Input:
       
  1419 //     Z_tos := the divisor (dividend still on stack)
       
  1420 //
       
  1421 // Updated registers:
       
  1422 //     Z_tmp_1 := pop_l() % Z_tos     ; if is_ldiv == false
       
  1423 //     Z_tmp_2 := pop_l() / Z_tos     ; if is_ldiv == true
       
  1424 //
       
  1425 static void ldiv_helper(InterpreterMacroAssembler * _masm, address exception, bool is_ldiv) {
       
  1426   NearLabel not_null, done;
       
  1427 
       
  1428   // Use register pair Z_tmp_1, Z_tmp_2 for DIVIDE SINGLE.
       
  1429   assert(Z_tmp_1->successor() == Z_tmp_2,
       
  1430          " need even/odd register pair for idiv/irem");
       
  1431 
       
  1432   // Get dividend.
       
  1433   __ pop_l(Z_tmp_2);
       
  1434 
       
  1435   // If divisor == 0 throw exception.
       
  1436   __ compare64_and_branch(Z_tos, (intptr_t)0, Assembler::bcondNotEqual, not_null);
       
  1437   __ load_absolute_address(Z_R1_scratch, exception);
       
  1438   __ z_br(Z_R1_scratch);
       
  1439 
       
  1440   __ bind(not_null);
       
  1441   // Special case for dividend == 0x8000 and divisor == -1.
       
  1442   if (is_ldiv) {
       
  1443     // result := Z_tmp_2 := - dividend
       
  1444     __ z_lcgr(Z_tmp_2, Z_tmp_2);
       
  1445   } else {
       
  1446     // result remainder := Z_tmp_1 := 0
       
  1447     __ clear_reg(Z_tmp_1, true, false);  // Don't set CC.
       
  1448   }
       
  1449 
       
  1450   // if divisor == -1 goto done
       
  1451   __ compare64_and_branch(Z_tos, -1, Assembler::bcondEqual, done);
       
  1452   if (is_ldiv)
       
  1453     // Restore sign, because divisor != -1.
       
  1454     __ z_lcgr(Z_tmp_2, Z_tmp_2);
       
  1455   __ z_dsgr(Z_tmp_1, Z_tos);    // Do it.
       
  1456   __ bind(done);
       
  1457 }
       
  1458 
       
  1459 void TemplateTable::ldiv() {
       
  1460   transition(ltos, ltos);
       
  1461 
       
  1462   ldiv_helper(_masm, Interpreter::_throw_ArithmeticException_entry, true /*is_ldiv*/);
       
  1463   __ z_lgr(Z_tos, Z_tmp_2);     // Result is in Z_tmp_2.
       
  1464 }
       
  1465 
       
  1466 void TemplateTable::lrem() {
       
  1467   transition(ltos, ltos);
       
  1468 
       
  1469   ldiv_helper(_masm, Interpreter::_throw_ArithmeticException_entry, false /*is_ldiv*/);
       
  1470   __ z_lgr(Z_tos, Z_tmp_1);     // Result is in Z_tmp_1.
       
  1471 }
       
  1472 
       
  1473 void TemplateTable::lshl() {
       
  1474   transition(itos, ltos);
       
  1475 
       
  1476   // Z_tos: shift amount
       
  1477   __ pop_l(Z_tmp_1);              // Get shift value.
       
  1478   __ z_sllg(Z_tos, Z_tmp_1, 0, Z_tos);
       
  1479 }
       
  1480 
       
  1481 void TemplateTable::lshr() {
       
  1482   transition(itos, ltos);
       
  1483 
       
  1484   // Z_tos: shift amount
       
  1485   __ pop_l(Z_tmp_1);              // Get shift value.
       
  1486   __ z_srag(Z_tos, Z_tmp_1, 0, Z_tos);
       
  1487 }
       
  1488 
       
  1489 void TemplateTable::lushr() {
       
  1490   transition(itos, ltos);
       
  1491 
       
  1492   // Z_tos: shift amount
       
  1493   __ pop_l(Z_tmp_1);              // Get shift value.
       
  1494   __ z_srlg(Z_tos, Z_tmp_1, 0, Z_tos);
       
  1495 }
       
  1496 
       
  1497 void TemplateTable::fop2(Operation op) {
       
  1498   transition(ftos, ftos);
       
  1499 
       
  1500   switch (op) {
       
  1501     case add:
       
  1502       // Add memory operand.
       
  1503       __ z_aeb(Z_ftos, __ stackTop()); __ pop_f(); return;
       
  1504     case sub:
       
  1505       // Sub memory operand.
       
  1506       __ z_ler(Z_F1, Z_ftos);    // first operand
       
  1507       __ pop_f(Z_ftos);          // second operand from stack
       
  1508       __ z_sebr(Z_ftos, Z_F1);
       
  1509       return;
       
  1510     case mul:
       
  1511       // Multiply with memory operand.
       
  1512       __ z_meeb(Z_ftos, __ stackTop()); __ pop_f(); return;
       
  1513     case div:
       
  1514       __ z_ler(Z_F1, Z_ftos);    // first operand
       
  1515       __ pop_f(Z_ftos);          // second operand from stack
       
  1516       __ z_debr(Z_ftos, Z_F1);
       
  1517       return;
       
  1518     case rem:
       
  1519       // Do runtime call.
       
  1520       __ z_ler(Z_FARG2, Z_ftos);  // divisor
       
  1521       __ pop_f(Z_FARG1);          // dividend
       
  1522       __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::frem));
       
  1523       // Result should be in the right place (Z_ftos == Z_FRET).
       
  1524       return;
       
  1525     default:
       
  1526       ShouldNotReachHere();
       
  1527       return;
       
  1528   }
       
  1529 }
       
  1530 
       
  1531 void TemplateTable::dop2(Operation op) {
       
  1532   transition(dtos, dtos);
       
  1533 
       
  1534   switch (op) {
       
  1535     case add:
       
  1536       // Add memory operand.
       
  1537       __ z_adb(Z_ftos, __ stackTop()); __ pop_d(); return;
       
  1538     case sub:
       
  1539       // Sub memory operand.
       
  1540       __ z_ldr(Z_F1, Z_ftos);    // first operand
       
  1541       __ pop_d(Z_ftos);          // second operand from stack
       
  1542       __ z_sdbr(Z_ftos, Z_F1);
       
  1543       return;
       
  1544     case mul:
       
  1545       // Multiply with memory operand.
       
  1546       __ z_mdb(Z_ftos, __ stackTop()); __ pop_d(); return;
       
  1547     case div:
       
  1548       __ z_ldr(Z_F1, Z_ftos);    // first operand
       
  1549       __ pop_d(Z_ftos);          // second operand from stack
       
  1550       __ z_ddbr(Z_ftos, Z_F1);
       
  1551       return;
       
  1552     case rem:
       
  1553       // Do runtime call.
       
  1554       __ z_ldr(Z_FARG2, Z_ftos);  // divisor
       
  1555       __ pop_d(Z_FARG1);          // dividend
       
  1556       __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::drem));
       
  1557       // Result should be in the right place (Z_ftos == Z_FRET).
       
  1558       return;
       
  1559     default:
       
  1560       ShouldNotReachHere();
       
  1561       return;
       
  1562   }
       
  1563 }
       
  1564 
       
  1565 void TemplateTable::ineg() {
       
  1566   transition(itos, itos);
       
  1567   __ z_lcr(Z_tos);
       
  1568 }
       
  1569 
       
  1570 void TemplateTable::lneg() {
       
  1571   transition(ltos, ltos);
       
  1572   __ z_lcgr(Z_tos);
       
  1573 }
       
  1574 
       
  1575 void TemplateTable::fneg() {
       
  1576   transition(ftos, ftos);
       
  1577   __ z_lcebr(Z_ftos, Z_ftos);
       
  1578 }
       
  1579 
       
  1580 void TemplateTable::dneg() {
       
  1581   transition(dtos, dtos);
       
  1582   __ z_lcdbr(Z_ftos, Z_ftos);
       
  1583 }
       
  1584 
       
  1585 void TemplateTable::iinc() {
       
  1586   transition(vtos, vtos);
       
  1587 
       
  1588   Address local;
       
  1589   __ z_lb(Z_R0_scratch, at_bcp(2)); // Get constant.
       
  1590   locals_index(Z_R1_scratch);
       
  1591   local = iaddress(_masm, Z_R1_scratch);
       
  1592   __ z_a(Z_R0_scratch, local);
       
  1593   __ reg2mem_opt(Z_R0_scratch, local, false);
       
  1594 }
       
  1595 
       
  1596 void TemplateTable::wide_iinc() {
       
  1597   transition(vtos, vtos);
       
  1598 
       
  1599   // Z_tmp_1 := increment
       
  1600   __ get_2_byte_integer_at_bcp(Z_tmp_1, 4, InterpreterMacroAssembler::Signed);
       
  1601   // Z_R1_scratch := index of local to increment
       
  1602   locals_index_wide(Z_tmp_2);
       
  1603   // Load, increment, and store.
       
  1604   __ access_local_int(Z_tmp_2, Z_tos);
       
  1605   __ z_agr(Z_tos,  Z_tmp_1);
       
  1606   // Shifted index is still in Z_tmp_2.
       
  1607   __ reg2mem_opt(Z_tos, Address(Z_locals, Z_tmp_2), false);
       
  1608 }
       
  1609 
       
  1610 
       
  1611 void TemplateTable::convert() {
       
  1612   // Checking
       
  1613 #ifdef ASSERT
       
  1614   TosState   tos_in  = ilgl;
       
  1615   TosState   tos_out = ilgl;
       
  1616 
       
  1617   switch (bytecode()) {
       
  1618     case Bytecodes::_i2l:
       
  1619     case Bytecodes::_i2f:
       
  1620     case Bytecodes::_i2d:
       
  1621     case Bytecodes::_i2b:
       
  1622     case Bytecodes::_i2c:
       
  1623     case Bytecodes::_i2s:
       
  1624       tos_in = itos;
       
  1625       break;
       
  1626     case Bytecodes::_l2i:
       
  1627     case Bytecodes::_l2f:
       
  1628     case Bytecodes::_l2d:
       
  1629       tos_in = ltos;
       
  1630       break;
       
  1631     case Bytecodes::_f2i:
       
  1632     case Bytecodes::_f2l:
       
  1633     case Bytecodes::_f2d:
       
  1634       tos_in = ftos;
       
  1635       break;
       
  1636     case Bytecodes::_d2i:
       
  1637     case Bytecodes::_d2l:
       
  1638     case Bytecodes::_d2f:
       
  1639       tos_in = dtos;
       
  1640       break;
       
  1641     default :
       
  1642       ShouldNotReachHere();
       
  1643   }
       
  1644   switch (bytecode()) {
       
  1645     case Bytecodes::_l2i:
       
  1646     case Bytecodes::_f2i:
       
  1647     case Bytecodes::_d2i:
       
  1648     case Bytecodes::_i2b:
       
  1649     case Bytecodes::_i2c:
       
  1650     case Bytecodes::_i2s:
       
  1651       tos_out = itos;
       
  1652       break;
       
  1653     case Bytecodes::_i2l:
       
  1654     case Bytecodes::_f2l:
       
  1655     case Bytecodes::_d2l:
       
  1656       tos_out = ltos;
       
  1657       break;
       
  1658     case Bytecodes::_i2f:
       
  1659     case Bytecodes::_l2f:
       
  1660     case Bytecodes::_d2f:
       
  1661       tos_out = ftos;
       
  1662       break;
       
  1663     case Bytecodes::_i2d:
       
  1664     case Bytecodes::_l2d:
       
  1665     case Bytecodes::_f2d:
       
  1666       tos_out = dtos;
       
  1667       break;
       
  1668     default :
       
  1669       ShouldNotReachHere();
       
  1670   }
       
  1671 
       
  1672   transition(tos_in, tos_out);
       
  1673 #endif // ASSERT
       
  1674 
       
  1675   // Conversion
       
  1676   Label done;
       
  1677   switch (bytecode()) {
       
  1678     case Bytecodes::_i2l:
       
  1679       __ z_lgfr(Z_tos, Z_tos);
       
  1680       return;
       
  1681     case Bytecodes::_i2f:
       
  1682       __ z_cefbr(Z_ftos, Z_tos);
       
  1683       return;
       
  1684     case Bytecodes::_i2d:
       
  1685       __ z_cdfbr(Z_ftos, Z_tos);
       
  1686       return;
       
  1687     case Bytecodes::_i2b:
       
  1688       // Sign extend least significant byte.
       
  1689       __ move_reg_if_needed(Z_tos, T_BYTE, Z_tos, T_INT);
       
  1690       return;
       
  1691     case Bytecodes::_i2c:
       
  1692       // Zero extend 2 least significant bytes.
       
  1693       __ move_reg_if_needed(Z_tos, T_CHAR, Z_tos, T_INT);
       
  1694       return;
       
  1695     case Bytecodes::_i2s:
       
  1696       // Sign extend 2 least significant bytes.
       
  1697       __ move_reg_if_needed(Z_tos, T_SHORT, Z_tos, T_INT);
       
  1698       return;
       
  1699     case Bytecodes::_l2i:
       
  1700       // Sign-extend not needed here, upper 4 bytes of int value in register are ignored.
       
  1701       return;
       
  1702     case Bytecodes::_l2f:
       
  1703       __ z_cegbr(Z_ftos, Z_tos);
       
  1704       return;
       
  1705     case Bytecodes::_l2d:
       
  1706       __ z_cdgbr(Z_ftos, Z_tos);
       
  1707       return;
       
  1708     case Bytecodes::_f2i:
       
  1709     case Bytecodes::_f2l:
       
  1710       __ clear_reg(Z_tos, true, false);  // Don't set CC.
       
  1711       __ z_cebr(Z_ftos, Z_ftos);
       
  1712       __ z_brno(done); // NaN -> 0
       
  1713       if (bytecode() == Bytecodes::_f2i)
       
  1714         __ z_cfebr(Z_tos, Z_ftos, Assembler::to_zero);
       
  1715       else // bytecode() == Bytecodes::_f2l
       
  1716         __ z_cgebr(Z_tos, Z_ftos, Assembler::to_zero);
       
  1717       break;
       
  1718     case Bytecodes::_f2d:
       
  1719       __ move_freg_if_needed(Z_ftos, T_DOUBLE, Z_ftos, T_FLOAT);
       
  1720       return;
       
  1721     case Bytecodes::_d2i:
       
  1722     case Bytecodes::_d2l:
       
  1723       __ clear_reg(Z_tos, true, false);  // Ddon't set CC.
       
  1724       __ z_cdbr(Z_ftos, Z_ftos);
       
  1725       __ z_brno(done); // NaN -> 0
       
  1726       if (bytecode() == Bytecodes::_d2i)
       
  1727         __ z_cfdbr(Z_tos, Z_ftos, Assembler::to_zero);
       
  1728       else // Bytecodes::_d2l
       
  1729         __ z_cgdbr(Z_tos, Z_ftos, Assembler::to_zero);
       
  1730       break;
       
  1731     case Bytecodes::_d2f:
       
  1732       __ move_freg_if_needed(Z_ftos, T_FLOAT, Z_ftos, T_DOUBLE);
       
  1733       return;
       
  1734     default:
       
  1735       ShouldNotReachHere();
       
  1736   }
       
  1737   __ bind(done);
       
  1738 }
       
  1739 
       
  1740 void TemplateTable::lcmp() {
       
  1741   transition(ltos, itos);
       
  1742 
       
  1743   Label   done;
       
  1744   Register val1 = Z_R0_scratch;
       
  1745   Register val2 = Z_R1_scratch;
       
  1746 
       
  1747   if (VM_Version::has_LoadStoreConditional()) {
       
  1748     __ pop_l(val1);           // pop value 1.
       
  1749     __ z_lghi(val2,  -1);     // lt value
       
  1750     __ z_cgr(val1, Z_tos);    // Compare with Z_tos (value 2). Protect CC under all circumstances.
       
  1751     __ z_lghi(val1,   1);     // gt value
       
  1752     __ z_lghi(Z_tos,  0);     // eq value
       
  1753 
       
  1754     __ z_locgr(Z_tos, val1, Assembler::bcondHigh);
       
  1755     __ z_locgr(Z_tos, val2, Assembler::bcondLow);
       
  1756   } else {
       
  1757     __ pop_l(val1);           // Pop value 1.
       
  1758     __ z_cgr(val1, Z_tos);    // Compare with Z_tos (value 2). Protect CC under all circumstances.
       
  1759 
       
  1760     __ z_lghi(Z_tos,  0);     // eq value
       
  1761     __ z_bre(done);
       
  1762 
       
  1763     __ z_lghi(Z_tos,  1);     // gt value
       
  1764     __ z_brh(done);
       
  1765 
       
  1766     __ z_lghi(Z_tos, -1);     // lt value
       
  1767   }
       
  1768 
       
  1769   __ bind(done);
       
  1770 }
       
  1771 
       
  1772 
       
  1773 void TemplateTable::float_cmp(bool is_float, int unordered_result) {
       
  1774   Label done;
       
  1775 
       
  1776   if (is_float) {
       
  1777     __ pop_f(Z_FARG2);
       
  1778     __ z_cebr(Z_FARG2, Z_ftos);
       
  1779   } else {
       
  1780     __ pop_d(Z_FARG2);
       
  1781     __ z_cdbr(Z_FARG2, Z_ftos);
       
  1782   }
       
  1783 
       
  1784   if (VM_Version::has_LoadStoreConditional()) {
       
  1785     Register one       = Z_R0_scratch;
       
  1786     Register minus_one = Z_R1_scratch;
       
  1787     __ z_lghi(minus_one,  -1);
       
  1788     __ z_lghi(one,  1);
       
  1789     __ z_lghi(Z_tos, 0);
       
  1790     __ z_locgr(Z_tos, one,       unordered_result == 1 ? Assembler::bcondHighOrNotOrdered : Assembler::bcondHigh);
       
  1791     __ z_locgr(Z_tos, minus_one, unordered_result == 1 ? Assembler::bcondLow              : Assembler::bcondLowOrNotOrdered);
       
  1792   } else {
       
  1793     // Z_FARG2 == Z_ftos
       
  1794     __ clear_reg(Z_tos, false, false);
       
  1795     __ z_bre(done);
       
  1796 
       
  1797     // F_ARG2 > Z_Ftos, or unordered
       
  1798     __ z_lhi(Z_tos, 1);
       
  1799     __ z_brc(unordered_result == 1 ? Assembler::bcondHighOrNotOrdered : Assembler::bcondHigh, done);
       
  1800 
       
  1801     // F_ARG2 < Z_FTOS, or unordered
       
  1802     __ z_lhi(Z_tos, -1);
       
  1803 
       
  1804     __ bind(done);
       
  1805   }
       
  1806 }
       
  1807 
       
  1808 void TemplateTable::branch(bool is_jsr, bool is_wide) {
       
  1809   const Register   bumped_count = Z_tmp_1;
       
  1810   const Register   method       = Z_tmp_2;
       
  1811   const Register   m_counters   = Z_R1_scratch;
       
  1812   const Register   mdo          = Z_tos;
       
  1813 
       
  1814   BLOCK_COMMENT("TemplateTable::branch {");
       
  1815   __ get_method(method);
       
  1816   __ profile_taken_branch(mdo, bumped_count);
       
  1817 
       
  1818   const ByteSize ctr_offset = InvocationCounter::counter_offset();
       
  1819   const ByteSize be_offset  = MethodCounters::backedge_counter_offset()   + ctr_offset;
       
  1820   const ByteSize inv_offset = MethodCounters::invocation_counter_offset() + ctr_offset;
       
  1821 
       
  1822   // Get (wide) offset to disp.
       
  1823   const Register disp = Z_ARG5;
       
  1824   if (is_wide) {
       
  1825     __ get_4_byte_integer_at_bcp(disp, 1);
       
  1826   } else {
       
  1827     __ get_2_byte_integer_at_bcp(disp, 1, InterpreterMacroAssembler::Signed);
       
  1828   }
       
  1829 
       
  1830   // Handle all the JSR stuff here, then exit.
       
  1831   // It's much shorter and cleaner than intermingling with the
       
  1832   // non-JSR normal-branch stuff occurring below.
       
  1833   if (is_jsr) {
       
  1834     // Compute return address as bci in Z_tos.
       
  1835     __ z_lgr(Z_R1_scratch, Z_bcp);
       
  1836     __ z_sg(Z_R1_scratch, Address(method, Method::const_offset()));
       
  1837     __ add2reg(Z_tos, (is_wide ? 5 : 3) - in_bytes(ConstMethod::codes_offset()), Z_R1_scratch);
       
  1838 
       
  1839     // Bump bcp to target of JSR.
       
  1840     __ z_agr(Z_bcp, disp);
       
  1841     // Push return address for "ret" on stack.
       
  1842     __ push_ptr(Z_tos);
       
  1843     // And away we go!
       
  1844     __ dispatch_next(vtos);
       
  1845     return;
       
  1846   }
       
  1847 
       
  1848   // Normal (non-jsr) branch handling.
       
  1849 
       
  1850   // Bump bytecode pointer by displacement (take the branch).
       
  1851   __ z_agr(Z_bcp, disp);
       
  1852 
       
  1853   assert(UseLoopCounter || !UseOnStackReplacement,
       
  1854          "on-stack-replacement requires loop counters");
       
  1855 
       
  1856   NearLabel backedge_counter_overflow;
       
  1857   NearLabel profile_method;
       
  1858   NearLabel dispatch;
       
  1859   int       increment = InvocationCounter::count_increment;
       
  1860 
       
  1861   if (UseLoopCounter) {
       
  1862     // Increment backedge counter for backward branches.
       
  1863     // disp: target offset
       
  1864     // Z_bcp: target bcp
       
  1865     // Z_locals: locals pointer
       
  1866     //
       
  1867     // Count only if backward branch.
       
  1868     __ compare32_and_branch(disp, (intptr_t)0, Assembler::bcondHigh, dispatch);
       
  1869 
       
  1870     if (TieredCompilation) {
       
  1871       Label noCounters;
       
  1872 
       
  1873       if (ProfileInterpreter) {
       
  1874         NearLabel   no_mdo;
       
  1875 
       
  1876         // Are we profiling?
       
  1877         __ load_and_test_long(mdo, Address(method, Method::method_data_offset()));
       
  1878         __ branch_optimized(Assembler::bcondZero, no_mdo);
       
  1879 
       
  1880         // Increment the MDO backedge counter.
       
  1881         const Address mdo_backedge_counter(mdo, MethodData::backedge_counter_offset() + InvocationCounter::counter_offset());
       
  1882 
       
  1883         const Address mask(mdo, MethodData::backedge_mask_offset());
       
  1884         __ increment_mask_and_jump(mdo_backedge_counter, increment, mask,
       
  1885                                    Z_ARG2, false, Assembler::bcondZero,
       
  1886                                    UseOnStackReplacement ? &backedge_counter_overflow : NULL);
       
  1887         __ z_bru(dispatch);
       
  1888         __ bind(no_mdo);
       
  1889       }
       
  1890 
       
  1891       // Increment backedge counter in MethodCounters*.
       
  1892       __ get_method_counters(method, m_counters, noCounters);
       
  1893       const Address mask(m_counters, MethodCounters::backedge_mask_offset());
       
  1894       __ increment_mask_and_jump(Address(m_counters, be_offset),
       
  1895                                  increment, mask,
       
  1896                                  Z_ARG2, false, Assembler::bcondZero,
       
  1897                                  UseOnStackReplacement ? &backedge_counter_overflow : NULL);
       
  1898       __ bind(noCounters);
       
  1899     } else {
       
  1900       Register counter = Z_tos;
       
  1901       Label    noCounters;
       
  1902       // Get address of MethodCounters object.
       
  1903       __ get_method_counters(method, m_counters, noCounters);
       
  1904       // Increment backedge counter.
       
  1905       __ increment_backedge_counter(m_counters, counter);
       
  1906 
       
  1907       if (ProfileInterpreter) {
       
  1908         // Test to see if we should create a method data obj.
       
  1909         __ z_cl(counter, Address(m_counters, MethodCounters::interpreter_profile_limit_offset()));
       
  1910         __ z_brl(dispatch);
       
  1911 
       
  1912         // If no method data exists, go to profile method.
       
  1913         __ test_method_data_pointer(Z_ARG4/*result unused*/, profile_method);
       
  1914 
       
  1915         if (UseOnStackReplacement) {
       
  1916           // Check for overflow against 'bumped_count' which is the MDO taken count.
       
  1917           __ z_cl(bumped_count, Address(m_counters, MethodCounters::interpreter_backward_branch_limit_offset()));
       
  1918           __ z_brl(dispatch);
       
  1919 
       
  1920           // When ProfileInterpreter is on, the backedge_count comes
       
  1921           // from the methodDataOop, which value does not get reset on
       
  1922           // the call to frequency_counter_overflow(). To avoid
       
  1923           // excessive calls to the overflow routine while the method is
       
  1924           // being compiled, add a second test to make sure the overflow
       
  1925           // function is called only once every overflow_frequency.
       
  1926           const int overflow_frequency = 1024;
       
  1927           __ and_imm(bumped_count, overflow_frequency - 1);
       
  1928           __ z_brz(backedge_counter_overflow);
       
  1929 
       
  1930         }
       
  1931       } else {
       
  1932         if (UseOnStackReplacement) {
       
  1933           // Check for overflow against 'counter', which is the sum of the
       
  1934           // counters.
       
  1935           __ z_cl(counter, Address(m_counters, MethodCounters::interpreter_backward_branch_limit_offset()));
       
  1936           __ z_brh(backedge_counter_overflow);
       
  1937         }
       
  1938       }
       
  1939       __ bind(noCounters);
       
  1940     }
       
  1941 
       
  1942     __ bind(dispatch);
       
  1943   }
       
  1944 
       
  1945   // Pre-load the next target bytecode into rbx.
       
  1946   __ z_llgc(Z_bytecode, Address(Z_bcp, (intptr_t) 0));
       
  1947 
       
  1948   // Continue with the bytecode @ target.
       
  1949   // Z_tos: Return bci for jsr's, unused otherwise.
       
  1950   // Z_bytecode: target bytecode
       
  1951   // Z_bcp: target bcp
       
  1952   __ dispatch_only(vtos);
       
  1953 
       
  1954   // Out-of-line code runtime calls.
       
  1955   if (UseLoopCounter) {
       
  1956     if (ProfileInterpreter) {
       
  1957       // Out-of-line code to allocate method data oop.
       
  1958       __ bind(profile_method);
       
  1959 
       
  1960       __ call_VM(noreg,
       
  1961                  CAST_FROM_FN_PTR(address, InterpreterRuntime::profile_method));
       
  1962       __ z_llgc(Z_bytecode, Address(Z_bcp, (intptr_t) 0));  // Restore target bytecode.
       
  1963       __ set_method_data_pointer_for_bcp();
       
  1964       __ z_bru(dispatch);
       
  1965     }
       
  1966 
       
  1967     if (UseOnStackReplacement) {
       
  1968 
       
  1969       // invocation counter overflow
       
  1970       __ bind(backedge_counter_overflow);
       
  1971 
       
  1972       __ z_lcgr(Z_ARG2, disp); // Z_ARG2 := -disp
       
  1973       __ z_agr(Z_ARG2, Z_bcp); // Z_ARG2 := branch target bcp - disp == branch bcp
       
  1974       __ call_VM(noreg,
       
  1975                  CAST_FROM_FN_PTR(address, InterpreterRuntime::frequency_counter_overflow),
       
  1976                  Z_ARG2);
       
  1977 
       
  1978       // Z_RET: osr nmethod (osr ok) or NULL (osr not possible).
       
  1979       __ compare64_and_branch(Z_RET, (intptr_t) 0, Assembler::bcondEqual, dispatch);
       
  1980 
       
  1981       // Nmethod may have been invalidated (VM may block upon call_VM return).
       
  1982       __ z_cliy(nmethod::state_offset(), Z_RET, nmethod::in_use);
       
  1983       __ z_brne(dispatch);
       
  1984 
       
  1985       // Migrate the interpreter frame off of the stack.
       
  1986 
       
  1987       __ z_lgr(Z_tmp_1, Z_RET); // Save the nmethod.
       
  1988 
       
  1989       call_VM(noreg,
       
  1990               CAST_FROM_FN_PTR(address, SharedRuntime::OSR_migration_begin));
       
  1991 
       
  1992       // Z_RET is OSR buffer, move it to expected parameter location.
       
  1993       __ lgr_if_needed(Z_ARG1, Z_RET);
       
  1994 
       
  1995       // Pop the interpreter frame ...
       
  1996       __ pop_interpreter_frame(Z_R14, Z_ARG2/*tmp1*/, Z_ARG3/*tmp2*/);
       
  1997 
       
  1998       // ... and begin the OSR nmethod.
       
  1999       __ z_lg(Z_R1_scratch, Address(Z_tmp_1, nmethod::osr_entry_point_offset()));
       
  2000       __ z_br(Z_R1_scratch);
       
  2001     }
       
  2002   }
       
  2003   BLOCK_COMMENT("} TemplateTable::branch");
       
  2004 }
       
  2005 
       
  2006 void TemplateTable::if_0cmp(Condition cc) {
       
  2007   transition(itos, vtos);
       
  2008 
       
  2009   // Assume branch is more often taken than not (loops use backward branches).
       
  2010   NearLabel not_taken;
       
  2011   __ compare32_and_branch(Z_tos, (intptr_t) 0, j_not(cc), not_taken);
       
  2012   branch(false, false);
       
  2013   __ bind(not_taken);
       
  2014   __ profile_not_taken_branch(Z_tos);
       
  2015 }
       
  2016 
       
  2017 void TemplateTable::if_icmp(Condition cc) {
       
  2018   transition(itos, vtos);
       
  2019 
       
  2020   // Assume branch is more often taken than not (loops use backward branches).
       
  2021   NearLabel not_taken;
       
  2022   __ pop_i(Z_R0_scratch);
       
  2023   __ compare32_and_branch(Z_R0_scratch, Z_tos, j_not(cc), not_taken);
       
  2024   branch(false, false);
       
  2025   __ bind(not_taken);
       
  2026   __ profile_not_taken_branch(Z_tos);
       
  2027 }
       
  2028 
       
  2029 void TemplateTable::if_nullcmp(Condition cc) {
       
  2030   transition(atos, vtos);
       
  2031 
       
  2032   // Assume branch is more often taken than not (loops use backward branches) .
       
  2033   NearLabel not_taken;
       
  2034   __ compare64_and_branch(Z_tos, (intptr_t) 0, j_not(cc), not_taken);
       
  2035   branch(false, false);
       
  2036   __ bind(not_taken);
       
  2037   __ profile_not_taken_branch(Z_tos);
       
  2038 }
       
  2039 
       
  2040 void TemplateTable::if_acmp(Condition cc) {
       
  2041   transition(atos, vtos);
       
  2042   // Assume branch is more often taken than not (loops use backward branches).
       
  2043   NearLabel not_taken;
       
  2044   __ pop_ptr(Z_ARG2);
       
  2045   __ verify_oop(Z_ARG2);
       
  2046   __ verify_oop(Z_tos);
       
  2047   __ compareU64_and_branch(Z_tos, Z_ARG2, j_not(cc), not_taken);
       
  2048   branch(false, false);
       
  2049   __ bind(not_taken);
       
  2050   __ profile_not_taken_branch(Z_ARG3);
       
  2051 }
       
  2052 
       
  2053 void TemplateTable::ret() {
       
  2054   transition(vtos, vtos);
       
  2055 
       
  2056   locals_index(Z_tmp_1);
       
  2057   // Get return bci, compute return bcp. Must load 64 bits.
       
  2058   __ mem2reg_opt(Z_tmp_1, iaddress(_masm, Z_tmp_1));
       
  2059   __ profile_ret(Z_tmp_1, Z_tmp_2);
       
  2060   __ get_method(Z_tos);
       
  2061   __ mem2reg_opt(Z_R1_scratch, Address(Z_tos, Method::const_offset()));
       
  2062   __ load_address(Z_bcp, Address(Z_R1_scratch, Z_tmp_1, ConstMethod::codes_offset()));
       
  2063   __ dispatch_next(vtos);
       
  2064 }
       
  2065 
       
  2066 void TemplateTable::wide_ret() {
       
  2067   transition(vtos, vtos);
       
  2068 
       
  2069   locals_index_wide(Z_tmp_1);
       
  2070   // Get return bci, compute return bcp.
       
  2071   __ mem2reg_opt(Z_tmp_1, aaddress(_masm, Z_tmp_1));
       
  2072   __ profile_ret(Z_tmp_1, Z_tmp_2);
       
  2073   __ get_method(Z_tos);
       
  2074   __ mem2reg_opt(Z_R1_scratch, Address(Z_tos, Method::const_offset()));
       
  2075   __ load_address(Z_bcp, Address(Z_R1_scratch, Z_tmp_1, ConstMethod::codes_offset()));
       
  2076   __ dispatch_next(vtos);
       
  2077 }
       
  2078 
       
  2079 void TemplateTable::tableswitch () {
       
  2080   transition(itos, vtos);
       
  2081 
       
  2082   NearLabel default_case, continue_execution;
       
  2083   Register  bcp = Z_ARG5;
       
  2084   // Align bcp.
       
  2085   __ load_address(bcp, at_bcp(BytesPerInt));
       
  2086   __ z_nill(bcp, (-BytesPerInt) & 0xffff);
       
  2087 
       
  2088   // Load lo & hi.
       
  2089   Register low  = Z_tmp_1;
       
  2090   Register high = Z_tmp_2;
       
  2091 
       
  2092   // Load low into 64 bits, since used for address calculation.
       
  2093   __ mem2reg_signed_opt(low, Address(bcp, BytesPerInt));
       
  2094   __ mem2reg_opt(high, Address(bcp, 2 * BytesPerInt), false);
       
  2095   // Sign extend "label" value for address calculation.
       
  2096   __ z_lgfr(Z_tos, Z_tos);
       
  2097 
       
  2098   // Check against lo & hi.
       
  2099   __ compare32_and_branch(Z_tos, low, Assembler::bcondLow, default_case);
       
  2100   __ compare32_and_branch(Z_tos, high, Assembler::bcondHigh, default_case);
       
  2101 
       
  2102   // Lookup dispatch offset.
       
  2103   __ z_sgr(Z_tos, low);
       
  2104   Register jump_table_offset = Z_ARG3;
       
  2105   // Index2offset; index in Z_tos is killed by profile_switch_case.
       
  2106   __ z_sllg(jump_table_offset, Z_tos, LogBytesPerInt);
       
  2107   __ profile_switch_case(Z_tos, Z_ARG4 /*tmp for mdp*/, low/*tmp*/, Z_bytecode/*tmp*/);
       
  2108 
       
  2109   Register index = Z_tmp_2;
       
  2110 
       
  2111   // Load index sign extended for addressing.
       
  2112   __ mem2reg_signed_opt(index, Address(bcp, jump_table_offset, 3 * BytesPerInt));
       
  2113 
       
  2114   // Continue execution.
       
  2115   __ bind(continue_execution);
       
  2116 
       
  2117   // Load next bytecode.
       
  2118   __ z_llgc(Z_bytecode, Address(Z_bcp, index));
       
  2119   __ z_agr(Z_bcp, index); // Advance bcp.
       
  2120   __ dispatch_only(vtos);
       
  2121 
       
  2122   // Handle default.
       
  2123   __ bind(default_case);
       
  2124 
       
  2125   __ profile_switch_default(Z_tos);
       
  2126   __ mem2reg_signed_opt(index, Address(bcp));
       
  2127   __ z_bru(continue_execution);
       
  2128 }
       
  2129 
       
  2130 void TemplateTable::lookupswitch () {
       
  2131   transition(itos, itos);
       
  2132   __ stop("lookupswitch bytecode should have been rewritten");
       
  2133 }
       
  2134 
       
  2135 void TemplateTable::fast_linearswitch () {
       
  2136   transition(itos, vtos);
       
  2137 
       
  2138   Label    loop_entry, loop, found, continue_execution;
       
  2139   Register bcp = Z_ARG5;
       
  2140 
       
  2141   // Align bcp.
       
  2142   __ load_address(bcp, at_bcp(BytesPerInt));
       
  2143   __ z_nill(bcp, (-BytesPerInt) & 0xffff);
       
  2144 
       
  2145   // Start search with last case.
       
  2146   Register current_case_offset = Z_tmp_1;
       
  2147 
       
  2148   __ mem2reg_signed_opt(current_case_offset, Address(bcp, BytesPerInt));
       
  2149   __ z_sllg(current_case_offset, current_case_offset, LogBytesPerWord);   // index2bytes
       
  2150   __ z_bru(loop_entry);
       
  2151 
       
  2152   // table search
       
  2153   __ bind(loop);
       
  2154 
       
  2155   __ z_c(Z_tos, Address(bcp, current_case_offset, 2 * BytesPerInt));
       
  2156   __ z_bre(found);
       
  2157 
       
  2158   __ bind(loop_entry);
       
  2159   __ z_aghi(current_case_offset, -2 * BytesPerInt);  // Decrement.
       
  2160   __ z_brnl(loop);
       
  2161 
       
  2162   // default case
       
  2163   Register   offset = Z_tmp_2;
       
  2164 
       
  2165   __ profile_switch_default(Z_tos);
       
  2166   // Load offset sign extended for addressing.
       
  2167   __ mem2reg_signed_opt(offset, Address(bcp));
       
  2168   __ z_bru(continue_execution);
       
  2169 
       
  2170   // Entry found -> get offset.
       
  2171   __ bind(found);
       
  2172   __ mem2reg_signed_opt(offset, Address(bcp, current_case_offset, 3 * BytesPerInt));
       
  2173   // Profile that this case was taken.
       
  2174   Register current_case_idx = Z_ARG4;
       
  2175   __ z_srlg(current_case_idx, current_case_offset, LogBytesPerWord); // bytes2index
       
  2176   __ profile_switch_case(current_case_idx, Z_tos, bcp, Z_bytecode);
       
  2177 
       
  2178   // Continue execution.
       
  2179   __ bind(continue_execution);
       
  2180 
       
  2181   // Load next bytecode.
       
  2182   __ z_llgc(Z_bytecode, Address(Z_bcp, offset, 0));
       
  2183   __ z_agr(Z_bcp, offset); // Advance bcp.
       
  2184   __ dispatch_only(vtos);
       
  2185 }
       
  2186 
       
  2187 
       
  2188 void TemplateTable::fast_binaryswitch() {
       
  2189 
       
  2190   transition(itos, vtos);
       
  2191 
       
  2192   // Implementation using the following core algorithm:
       
  2193   //
       
  2194   // int binary_search(int key, LookupswitchPair* array, int n) {
       
  2195   //   // Binary search according to "Methodik des Programmierens" by
       
  2196   //   // Edsger W. Dijkstra and W.H.J. Feijen, Addison Wesley Germany 1985.
       
  2197   //   int i = 0;
       
  2198   //   int j = n;
       
  2199   //   while (i+1 < j) {
       
  2200   //     // invariant P: 0 <= i < j <= n and (a[i] <= key < a[j] or Q)
       
  2201   //     // with      Q: for all i: 0 <= i < n: key < a[i]
       
  2202   //     // where a stands for the array and assuming that the (inexisting)
       
  2203   //     // element a[n] is infinitely big.
       
  2204   //     int h = (i + j) >> 1;
       
  2205   //     // i < h < j
       
  2206   //     if (key < array[h].fast_match()) {
       
  2207   //       j = h;
       
  2208   //     } else {
       
  2209   //       i = h;
       
  2210   //     }
       
  2211   //   }
       
  2212   //   // R: a[i] <= key < a[i+1] or Q
       
  2213   //   // (i.e., if key is within array, i is the correct index)
       
  2214   //   return i;
       
  2215   // }
       
  2216 
       
  2217   // Register allocation
       
  2218   // Note: Since we use the indices in address operands, we do all the
       
  2219   // computation in 64 bits.
       
  2220   const Register key   = Z_tos; // Already set (tosca).
       
  2221   const Register array = Z_tmp_1;
       
  2222   const Register i     = Z_tmp_2;
       
  2223   const Register j     = Z_ARG5;
       
  2224   const Register h     = Z_ARG4;
       
  2225   const Register temp  = Z_R1_scratch;
       
  2226 
       
  2227   // Find array start.
       
  2228   __ load_address(array, at_bcp(3 * BytesPerInt));
       
  2229   __ z_nill(array, (-BytesPerInt) & 0xffff);   // align
       
  2230 
       
  2231   // Initialize i & j.
       
  2232   __ clear_reg(i, true, false);  // i = 0;  Don't set CC.
       
  2233   __ mem2reg_signed_opt(j, Address(array, -BytesPerInt)); // j = length(array);
       
  2234 
       
  2235   // And start.
       
  2236   Label entry;
       
  2237   __ z_bru(entry);
       
  2238 
       
  2239   // binary search loop
       
  2240   {
       
  2241     NearLabel   loop;
       
  2242 
       
  2243     __ bind(loop);
       
  2244 
       
  2245     // int h = (i + j) >> 1;
       
  2246     __ add2reg_with_index(h, 0, i, j); // h = i + j;
       
  2247     __ z_srag(h, h, 1);                // h = (i + j) >> 1;
       
  2248 
       
  2249     // if (key < array[h].fast_match()) {
       
  2250     //   j = h;
       
  2251     // } else {
       
  2252     //   i = h;
       
  2253     // }
       
  2254 
       
  2255     // Convert array[h].match to native byte-ordering before compare.
       
  2256     __ z_sllg(temp, h, LogBytesPerWord);   // index2bytes
       
  2257     __ mem2reg_opt(temp, Address(array, temp), false);
       
  2258 
       
  2259     NearLabel  else_;
       
  2260 
       
  2261     __ compare32_and_branch(key, temp, Assembler::bcondNotLow, else_);
       
  2262     // j = h if (key <  array[h].fast_match())
       
  2263     __ z_lgr(j, h);
       
  2264     __ z_bru(entry); // continue
       
  2265 
       
  2266     __ bind(else_);
       
  2267 
       
  2268     // i = h if (key >= array[h].fast_match())
       
  2269     __ z_lgr(i, h);  // and fallthrough
       
  2270 
       
  2271     // while (i+1 < j)
       
  2272     __ bind(entry);
       
  2273 
       
  2274     // if (i + 1 < j) continue search
       
  2275     __ add2reg(h, 1, i);
       
  2276     __ compare64_and_branch(h, j, Assembler::bcondLow, loop);
       
  2277   }
       
  2278 
       
  2279   // End of binary search, result index is i (must check again!).
       
  2280   NearLabel default_case;
       
  2281 
       
  2282   // h is no longer needed, so use it to hold the byte offset.
       
  2283   __ z_sllg(h, i, LogBytesPerWord);   // index2bytes
       
  2284   __ mem2reg_opt(temp, Address(array, h), false);
       
  2285   __ compare32_and_branch(key, temp, Assembler::bcondNotEqual, default_case);
       
  2286 
       
  2287   // entry found -> j = offset
       
  2288   __ mem2reg_signed_opt(j, Address(array, h, BytesPerInt));
       
  2289   __ profile_switch_case(i, key, array, Z_bytecode);
       
  2290   // Load next bytecode.
       
  2291   __ z_llgc(Z_bytecode, Address(Z_bcp, j));
       
  2292   __ z_agr(Z_bcp, j);       // Advance bcp.
       
  2293   __ dispatch_only(vtos);
       
  2294 
       
  2295   // default case -> j = default offset
       
  2296   __ bind(default_case);
       
  2297 
       
  2298   __ profile_switch_default(i);
       
  2299   __ mem2reg_signed_opt(j, Address(array, -2 * BytesPerInt));
       
  2300   // Load next bytecode.
       
  2301   __ z_llgc(Z_bytecode, Address(Z_bcp, j));
       
  2302   __ z_agr(Z_bcp, j);       // Advance bcp.
       
  2303   __ dispatch_only(vtos);
       
  2304 }
       
  2305 
       
  2306 void TemplateTable::_return(TosState state) {
       
  2307   transition(state, state);
       
  2308   assert(_desc->calls_vm(),
       
  2309          "inconsistent calls_vm information"); // call in remove_activation
       
  2310 
       
  2311   if (_desc->bytecode() == Bytecodes::_return_register_finalizer) {
       
  2312     Register Rthis  = Z_ARG2;
       
  2313     Register Rklass = Z_ARG5;
       
  2314     Label skip_register_finalizer;
       
  2315     assert(state == vtos, "only valid state");
       
  2316     __ z_lg(Rthis, aaddress(0));
       
  2317     __ load_klass(Rklass, Rthis);
       
  2318     __ testbit(Address(Rklass, Klass::access_flags_offset()), exact_log2(JVM_ACC_HAS_FINALIZER));
       
  2319     __ z_bfalse(skip_register_finalizer);
       
  2320     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::register_finalizer), Rthis);
       
  2321     __ bind(skip_register_finalizer);
       
  2322   }
       
  2323 
       
  2324   __ remove_activation(state, Z_R14);
       
  2325   __ z_br(Z_R14);
       
  2326 }
       
  2327 
       
  2328 // ----------------------------------------------------------------------------
       
  2329 // NOTE: Cpe_offset is already computed as byte offset, so we must not
       
  2330 // shift it afterwards!
       
  2331 void TemplateTable::resolve_cache_and_index(int byte_no,
       
  2332                                             Register Rcache,
       
  2333                                             Register cpe_offset,
       
  2334                                             size_t index_size) {
       
  2335   BLOCK_COMMENT("resolve_cache_and_index {");
       
  2336   NearLabel      resolved;
       
  2337   const Register bytecode_in_cpcache = Z_R1_scratch;
       
  2338   const int      total_f1_offset = in_bytes(ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::f1_offset());
       
  2339   assert_different_registers(Rcache, cpe_offset, bytecode_in_cpcache);
       
  2340 
       
  2341   Bytecodes::Code code = bytecode();
       
  2342   switch (code) {
       
  2343     case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break;
       
  2344     case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break;
       
  2345   }
       
  2346 
       
  2347   {
       
  2348     assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
       
  2349     __ get_cache_and_index_and_bytecode_at_bcp(Rcache, cpe_offset, bytecode_in_cpcache, byte_no, 1, index_size);
       
  2350     // Have we resolved this bytecode?
       
  2351     __ compare32_and_branch(bytecode_in_cpcache, (int)code, Assembler::bcondEqual, resolved);
       
  2352   }
       
  2353 
       
  2354   // Resolve first time through.
       
  2355   address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
       
  2356   __ load_const_optimized(Z_ARG2, (int) code);
       
  2357   __ call_VM(noreg, entry, Z_ARG2);
       
  2358 
       
  2359   // Update registers with resolved info.
       
  2360   __ get_cache_and_index_at_bcp(Rcache, cpe_offset, 1, index_size);
       
  2361   __ bind(resolved);
       
  2362   BLOCK_COMMENT("} resolve_cache_and_index");
       
  2363 }
       
  2364 
       
  2365 // The Rcache and index registers must be set before call.
       
  2366 // Index is already a byte offset, don't shift!
       
  2367 void TemplateTable::load_field_cp_cache_entry(Register obj,
       
  2368                                               Register cache,
       
  2369                                               Register index,
       
  2370                                               Register off,
       
  2371                                               Register flags,
       
  2372                                               bool is_static = false) {
       
  2373   assert_different_registers(cache, index, flags, off);
       
  2374   ByteSize cp_base_offset = ConstantPoolCache::base_offset();
       
  2375 
       
  2376   // Field offset
       
  2377   __ mem2reg_opt(off, Address(cache, index, cp_base_offset + ConstantPoolCacheEntry::f2_offset()));
       
  2378   // Flags. Must load 64 bits.
       
  2379   __ mem2reg_opt(flags, Address(cache, index, cp_base_offset + ConstantPoolCacheEntry::flags_offset()));
       
  2380 
       
  2381   // klass overwrite register
       
  2382   if (is_static) {
       
  2383     __ mem2reg_opt(obj, Address(cache, index, cp_base_offset + ConstantPoolCacheEntry::f1_offset()));
       
  2384     __ mem2reg_opt(obj, Address(obj, Klass::java_mirror_offset()));
       
  2385   }
       
  2386 }
       
  2387 
       
  2388 void TemplateTable::load_invoke_cp_cache_entry(int byte_no,
       
  2389                                                Register method,
       
  2390                                                Register itable_index,
       
  2391                                                Register flags,
       
  2392                                                bool is_invokevirtual,
       
  2393                                                bool is_invokevfinal, // unused
       
  2394                                                bool is_invokedynamic) {
       
  2395   BLOCK_COMMENT("load_invoke_cp_cache_entry {");
       
  2396   // Setup registers.
       
  2397   const Register cache     = Z_ARG1;
       
  2398   const Register cpe_offset= flags;
       
  2399   const ByteSize base_off  = ConstantPoolCache::base_offset();
       
  2400   const ByteSize f1_off    = ConstantPoolCacheEntry::f1_offset();
       
  2401   const ByteSize f2_off    = ConstantPoolCacheEntry::f2_offset();
       
  2402   const ByteSize flags_off = ConstantPoolCacheEntry::flags_offset();
       
  2403   const int method_offset  = in_bytes(base_off + ((byte_no == f2_byte) ? f2_off : f1_off));
       
  2404   const int flags_offset   = in_bytes(base_off + flags_off);
       
  2405   // Access constant pool cache fields.
       
  2406   const int index_offset   = in_bytes(base_off + f2_off);
       
  2407 
       
  2408   assert_different_registers(method, itable_index, flags, cache);
       
  2409   assert(is_invokevirtual == (byte_no == f2_byte), "is_invokevirtual flag redundant");
       
  2410 
       
  2411   if (is_invokevfinal) {
       
  2412     // Already resolved.
       
  2413      assert(itable_index == noreg, "register not used");
       
  2414      __ get_cache_and_index_at_bcp(cache, cpe_offset, 1);
       
  2415   } else {
       
  2416     // Need to resolve.
       
  2417     resolve_cache_and_index(byte_no, cache, cpe_offset, is_invokedynamic ? sizeof(u4) : sizeof(u2));
       
  2418   }
       
  2419   __ z_lg(method, Address(cache, cpe_offset, method_offset));
       
  2420 
       
  2421   if (itable_index != noreg) {
       
  2422     __ z_lg(itable_index, Address(cache, cpe_offset, index_offset));
       
  2423   }
       
  2424 
       
  2425   // Only load the lower 4 bytes and fill high bytes of flags with zeros.
       
  2426   // Callers depend on this zero-extension!!!
       
  2427   // Attention: overwrites cpe_offset == flags
       
  2428   __ z_llgf(flags, Address(cache, cpe_offset, flags_offset + (BytesPerLong-BytesPerInt)));
       
  2429 
       
  2430   BLOCK_COMMENT("} load_invoke_cp_cache_entry");
       
  2431 }
       
  2432 
       
  2433 // The registers cache and index expected to be set before call.
       
  2434 // Correct values of the cache and index registers are preserved.
       
  2435 void TemplateTable::jvmti_post_field_access(Register cache, Register index,
       
  2436                                             bool is_static, bool has_tos) {
       
  2437 
       
  2438   // Do the JVMTI work here to avoid disturbing the register state below.
       
  2439   // We use c_rarg registers here because we want to use the register used in
       
  2440   // the call to the VM
       
  2441   if (!JvmtiExport::can_post_field_access()) {
       
  2442     return;
       
  2443   }
       
  2444 
       
  2445   // Check to see if a field access watch has been set before we
       
  2446   // take the time to call into the VM.
       
  2447   Label exit;
       
  2448   assert_different_registers(cache, index, Z_tos);
       
  2449   __ load_absolute_address(Z_tos, (address)JvmtiExport::get_field_access_count_addr());
       
  2450   __ load_and_test_int(Z_R0, Address(Z_tos));
       
  2451   __ z_brz(exit);
       
  2452 
       
  2453   // Index is returned as byte offset, do not shift!
       
  2454   __ get_cache_and_index_at_bcp(Z_ARG3, Z_R1_scratch, 1);
       
  2455 
       
  2456   // cache entry pointer
       
  2457   __ add2reg_with_index(Z_ARG3,
       
  2458                         in_bytes(ConstantPoolCache::base_offset()),
       
  2459                         Z_ARG3, Z_R1_scratch);
       
  2460 
       
  2461   if (is_static) {
       
  2462     __ clear_reg(Z_ARG2, true, false); // NULL object reference. Don't set CC.
       
  2463   } else {
       
  2464     __ mem2reg_opt(Z_ARG2, at_tos());  // Get object pointer without popping it.
       
  2465     __ verify_oop(Z_ARG2);
       
  2466   }
       
  2467   // Z_ARG2: object pointer or NULL
       
  2468   // Z_ARG3: cache entry pointer
       
  2469   __ call_VM(noreg,
       
  2470              CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access),
       
  2471              Z_ARG2, Z_ARG3);
       
  2472   __ get_cache_and_index_at_bcp(cache, index, 1);
       
  2473 
       
  2474   __ bind(exit);
       
  2475 }
       
  2476 
       
  2477 void TemplateTable::pop_and_check_object(Register r) {
       
  2478   __ pop_ptr(r);
       
  2479   __ null_check(r);  // for field access must check obj.
       
  2480   __ verify_oop(r);
       
  2481 }
       
  2482 
       
  2483 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
       
  2484   transition(vtos, vtos);
       
  2485 
       
  2486   const Register cache = Z_tmp_1;
       
  2487   const Register index = Z_tmp_2;
       
  2488   const Register obj   = Z_tmp_1;
       
  2489   const Register off   = Z_ARG2;
       
  2490   const Register flags = Z_ARG1;
       
  2491   const Register bc    = Z_tmp_1;  // Uses same reg as obj, so don't mix them.
       
  2492 
       
  2493   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
       
  2494   jvmti_post_field_access(cache, index, is_static, false);
       
  2495   load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
       
  2496 
       
  2497   if (!is_static) {
       
  2498     // Obj is on the stack.
       
  2499     pop_and_check_object(obj);
       
  2500   }
       
  2501 
       
  2502   // Displacement is 0, so any store instruction will be fine on any CPU.
       
  2503   const Address field(obj, off);
       
  2504 
       
  2505   Label    is_Byte, is_Bool, is_Int, is_Short, is_Char,
       
  2506            is_Long, is_Float, is_Object, is_Double;
       
  2507   Label    is_badState8, is_badState9, is_badStateA, is_badStateB,
       
  2508            is_badStateC, is_badStateD, is_badStateE, is_badStateF,
       
  2509            is_badState;
       
  2510   Label    branchTable, atosHandler,  Done;
       
  2511   Register br_tab       = Z_R1_scratch;
       
  2512   bool     do_rewrite   = !is_static && (rc == may_rewrite);
       
  2513   bool     dont_rewrite = (is_static || (rc == may_not_rewrite));
       
  2514 
       
  2515   assert(do_rewrite == !dont_rewrite, "Oops, code is not fit for that");
       
  2516   assert(btos == 0, "change code, btos != 0");
       
  2517 
       
  2518   // Calculate branch table size. Generated code size depends on ASSERT and on bytecode rewriting.
       
  2519 #ifdef ASSERT
       
  2520   const unsigned int bsize = dont_rewrite ? BTB_MINSIZE*1 : BTB_MINSIZE*4;
       
  2521 #else
       
  2522   const unsigned int bsize = dont_rewrite ? BTB_MINSIZE*1 : BTB_MINSIZE*4;
       
  2523 #endif
       
  2524 
       
  2525   // Calculate address of branch table entry and branch there.
       
  2526   {
       
  2527     const int bit_shift = exact_log2(bsize); // Size of each branch table entry.
       
  2528     const int r_bitpos  = 63 - bit_shift;
       
  2529     const int l_bitpos  = r_bitpos - ConstantPoolCacheEntry::tos_state_bits + 1;
       
  2530     const int n_rotate  = (bit_shift-ConstantPoolCacheEntry::tos_state_shift);
       
  2531     __ z_larl(br_tab, branchTable);
       
  2532     __ rotate_then_insert(flags, flags, l_bitpos, r_bitpos, n_rotate, true);
       
  2533   }
       
  2534   __ z_bc(Assembler::bcondAlways, 0, flags, br_tab);
       
  2535 
       
  2536   __ align_address(bsize);
       
  2537   BIND(branchTable);
       
  2538 
       
  2539   // btos
       
  2540   BTB_BEGIN(is_Byte, bsize, "getfield_or_static:is_Byte");
       
  2541   __ z_lb(Z_tos, field);
       
  2542   __ push(btos);
       
  2543   // Rewrite bytecode to be faster.
       
  2544   if (do_rewrite) {
       
  2545     patch_bytecode(Bytecodes::_fast_bgetfield, bc, Z_ARG5);
       
  2546   }
       
  2547   __ z_bru(Done);
       
  2548   BTB_END(is_Byte, bsize, "getfield_or_static:is_Byte");
       
  2549 
       
  2550   // ztos
       
  2551   BTB_BEGIN(is_Bool, bsize, "getfield_or_static:is_Bool");
       
  2552   __ z_lb(Z_tos, field);
       
  2553   __ push(ztos);
       
  2554   // Rewrite bytecode to be faster.
       
  2555   if (do_rewrite) {
       
  2556     // Use btos rewriting, no truncating to t/f bit is needed for getfield.
       
  2557     patch_bytecode(Bytecodes::_fast_bgetfield, bc, Z_ARG5);
       
  2558   }
       
  2559   __ z_bru(Done);
       
  2560   BTB_END(is_Bool, bsize, "getfield_or_static:is_Bool");
       
  2561 
       
  2562   // ctos
       
  2563   BTB_BEGIN(is_Char, bsize, "getfield_or_static:is_Char");
       
  2564   // Load into 64 bits, works on all CPUs.
       
  2565   __ z_llgh(Z_tos, field);
       
  2566   __ push(ctos);
       
  2567   // Rewrite bytecode to be faster.
       
  2568   if (do_rewrite) {
       
  2569     patch_bytecode(Bytecodes::_fast_cgetfield, bc, Z_ARG5);
       
  2570   }
       
  2571   __ z_bru(Done);
       
  2572   BTB_END(is_Char, bsize, "getfield_or_static:is_Char");
       
  2573 
       
  2574   // stos
       
  2575   BTB_BEGIN(is_Short, bsize, "getfield_or_static:is_Short");
       
  2576   __ z_lh(Z_tos, field);
       
  2577   __ push(stos);
       
  2578   // Rewrite bytecode to be faster.
       
  2579   if (do_rewrite) {
       
  2580     patch_bytecode(Bytecodes::_fast_sgetfield, bc, Z_ARG5);
       
  2581   }
       
  2582   __ z_bru(Done);
       
  2583   BTB_END(is_Short, bsize, "getfield_or_static:is_Short");
       
  2584 
       
  2585   // itos
       
  2586   BTB_BEGIN(is_Int, bsize, "getfield_or_static:is_Int");
       
  2587   __ mem2reg_opt(Z_tos, field, false);
       
  2588   __ push(itos);
       
  2589   // Rewrite bytecode to be faster.
       
  2590   if (do_rewrite) {
       
  2591     patch_bytecode(Bytecodes::_fast_igetfield, bc, Z_ARG5);
       
  2592   }
       
  2593   __ z_bru(Done);
       
  2594   BTB_END(is_Int, bsize, "getfield_or_static:is_Int");
       
  2595 
       
  2596   // ltos
       
  2597   BTB_BEGIN(is_Long, bsize, "getfield_or_static:is_Long");
       
  2598   __ mem2reg_opt(Z_tos, field);
       
  2599   __ push(ltos);
       
  2600   // Rewrite bytecode to be faster.
       
  2601   if (do_rewrite) {
       
  2602     patch_bytecode(Bytecodes::_fast_lgetfield, bc, Z_ARG5);
       
  2603   }
       
  2604   __ z_bru(Done);
       
  2605   BTB_END(is_Long, bsize, "getfield_or_static:is_Long");
       
  2606 
       
  2607   // ftos
       
  2608   BTB_BEGIN(is_Float, bsize, "getfield_or_static:is_Float");
       
  2609   __ mem2freg_opt(Z_ftos, field, false);
       
  2610   __ push(ftos);
       
  2611   // Rewrite bytecode to be faster.
       
  2612   if (do_rewrite) {
       
  2613     patch_bytecode(Bytecodes::_fast_fgetfield, bc, Z_ARG5);
       
  2614   }
       
  2615   __ z_bru(Done);
       
  2616   BTB_END(is_Float, bsize, "getfield_or_static:is_Float");
       
  2617 
       
  2618   // dtos
       
  2619   BTB_BEGIN(is_Double, bsize, "getfield_or_static:is_Double");
       
  2620   __ mem2freg_opt(Z_ftos, field);
       
  2621   __ push(dtos);
       
  2622   // Rewrite bytecode to be faster.
       
  2623   if (do_rewrite) {
       
  2624     patch_bytecode(Bytecodes::_fast_dgetfield, bc, Z_ARG5);
       
  2625   }
       
  2626   __ z_bru(Done);
       
  2627   BTB_END(is_Double, bsize, "getfield_or_static:is_Double");
       
  2628 
       
  2629   // atos
       
  2630   BTB_BEGIN(is_Object, bsize, "getfield_or_static:is_Object");
       
  2631   __ z_bru(atosHandler);
       
  2632   BTB_END(is_Object, bsize, "getfield_or_static:is_Object");
       
  2633 
       
  2634   // Bad state detection comes at no extra runtime cost.
       
  2635   BTB_BEGIN(is_badState8, bsize, "getfield_or_static:is_badState8");
       
  2636   __ z_illtrap();
       
  2637   __ z_bru(is_badState);
       
  2638   BTB_END( is_badState8, bsize, "getfield_or_static:is_badState8");
       
  2639   BTB_BEGIN(is_badState9, bsize, "getfield_or_static:is_badState9");
       
  2640   __ z_illtrap();
       
  2641   __ z_bru(is_badState);
       
  2642   BTB_END( is_badState9, bsize, "getfield_or_static:is_badState9");
       
  2643   BTB_BEGIN(is_badStateA, bsize, "getfield_or_static:is_badStateA");
       
  2644   __ z_illtrap();
       
  2645   __ z_bru(is_badState);
       
  2646   BTB_END( is_badStateA, bsize, "getfield_or_static:is_badStateA");
       
  2647   BTB_BEGIN(is_badStateB, bsize, "getfield_or_static:is_badStateB");
       
  2648   __ z_illtrap();
       
  2649   __ z_bru(is_badState);
       
  2650   BTB_END( is_badStateB, bsize, "getfield_or_static:is_badStateB");
       
  2651   BTB_BEGIN(is_badStateC, bsize, "getfield_or_static:is_badStateC");
       
  2652   __ z_illtrap();
       
  2653   __ z_bru(is_badState);
       
  2654   BTB_END( is_badStateC, bsize, "getfield_or_static:is_badStateC");
       
  2655   BTB_BEGIN(is_badStateD, bsize, "getfield_or_static:is_badStateD");
       
  2656   __ z_illtrap();
       
  2657   __ z_bru(is_badState);
       
  2658   BTB_END( is_badStateD, bsize, "getfield_or_static:is_badStateD");
       
  2659   BTB_BEGIN(is_badStateE, bsize, "getfield_or_static:is_badStateE");
       
  2660   __ z_illtrap();
       
  2661   __ z_bru(is_badState);
       
  2662   BTB_END( is_badStateE, bsize, "getfield_or_static:is_badStateE");
       
  2663   BTB_BEGIN(is_badStateF, bsize, "getfield_or_static:is_badStateF");
       
  2664   __ z_illtrap();
       
  2665   __ z_bru(is_badState);
       
  2666   BTB_END( is_badStateF, bsize, "getfield_or_static:is_badStateF");
       
  2667 
       
  2668   __ align_address(64);
       
  2669   BIND(is_badState);  // Do this outside branch table. Needs a lot of space.
       
  2670   {
       
  2671     unsigned int b_off = __ offset();
       
  2672     if (is_static) {
       
  2673       __ stop_static("Bad state in getstatic");
       
  2674     } else {
       
  2675       __ stop_static("Bad state in getfield");
       
  2676     }
       
  2677     unsigned int e_off = __ offset();
       
  2678   }
       
  2679 
       
  2680   __ align_address(64);
       
  2681   BIND(atosHandler);  // Oops are really complicated to handle.
       
  2682                       // There is a lot of code generated.
       
  2683                       // Therefore: generate the handler outside of branch table.
       
  2684                       // There is no performance penalty. The additional branch
       
  2685                       // to here is compensated for by the fallthru to "Done".
       
  2686   {
       
  2687     unsigned int b_off = __ offset();
       
  2688     __ load_heap_oop(Z_tos, field);
       
  2689     __ verify_oop(Z_tos);
       
  2690     __ push(atos);
       
  2691     if (do_rewrite) {
       
  2692       patch_bytecode(Bytecodes::_fast_agetfield, bc, Z_ARG5);
       
  2693     }
       
  2694     unsigned int e_off = __ offset();
       
  2695   }
       
  2696 
       
  2697   BIND(Done);
       
  2698 }
       
  2699 
       
  2700 void TemplateTable::getfield(int byte_no) {
       
  2701   BLOCK_COMMENT("getfield  {");
       
  2702   getfield_or_static(byte_no, false);
       
  2703   BLOCK_COMMENT("} getfield");
       
  2704 }
       
  2705 
       
  2706 void TemplateTable::nofast_getfield(int byte_no) {
       
  2707   getfield_or_static(byte_no, false, may_not_rewrite);
       
  2708 }
       
  2709 
       
  2710 void TemplateTable::getstatic(int byte_no) {
       
  2711   BLOCK_COMMENT("getstatic {");
       
  2712   getfield_or_static(byte_no, true);
       
  2713   BLOCK_COMMENT("} getstatic");
       
  2714 }
       
  2715 
       
  2716 // The registers cache and index expected to be set before call.  The
       
  2717 // function may destroy various registers, just not the cache and
       
  2718 // index registers.
       
  2719 void TemplateTable::jvmti_post_field_mod(Register cache,
       
  2720                                          Register index, bool is_static) {
       
  2721   transition(vtos, vtos);
       
  2722 
       
  2723   if (!JvmtiExport::can_post_field_modification()) {
       
  2724     return;
       
  2725   }
       
  2726 
       
  2727   BLOCK_COMMENT("jvmti_post_field_mod {");
       
  2728 
       
  2729   // Check to see if a field modification watch has been set before
       
  2730   // we take the time to call into the VM.
       
  2731   Label    L1;
       
  2732   ByteSize cp_base_offset = ConstantPoolCache::base_offset();
       
  2733   assert_different_registers(cache, index, Z_tos);
       
  2734 
       
  2735   __ load_absolute_address(Z_tos, (address)JvmtiExport::get_field_modification_count_addr());
       
  2736   __ load_and_test_int(Z_R0, Address(Z_tos));
       
  2737   __ z_brz(L1);
       
  2738 
       
  2739   // Index is returned as byte offset, do not shift!
       
  2740   __ get_cache_and_index_at_bcp(Z_ARG3, Z_R1_scratch, 1);
       
  2741 
       
  2742   if (is_static) {
       
  2743     // Life is simple. Null out the object pointer.
       
  2744     __ clear_reg(Z_ARG2, true, false);   // Don't set CC.
       
  2745   } else {
       
  2746     // Life is harder. The stack holds the value on top, followed by
       
  2747     // the object. We don't know the size of the value, though. It
       
  2748     // could be one or two words depending on its type. As a result,
       
  2749     // we must find the type to determine where the object is.
       
  2750     __ mem2reg_opt(Z_ARG4,
       
  2751                    Address(Z_ARG3, Z_R1_scratch,
       
  2752                            in_bytes(cp_base_offset + ConstantPoolCacheEntry::flags_offset()) +
       
  2753                            (BytesPerLong - BytesPerInt)),
       
  2754                    false);
       
  2755     __ z_srl(Z_ARG4, ConstantPoolCacheEntry::tos_state_shift);
       
  2756     // Make sure we don't need to mask Z_ARG4 for tos_state after the above shift.
       
  2757     ConstantPoolCacheEntry::verify_tos_state_shift();
       
  2758     __ mem2reg_opt(Z_ARG2, at_tos(1));  // Initially assume a one word jvalue.
       
  2759 
       
  2760     NearLabel   load_dtos, cont;
       
  2761 
       
  2762     __ compareU32_and_branch(Z_ARG4, (intptr_t) ltos,
       
  2763                               Assembler::bcondNotEqual, load_dtos);
       
  2764     __ mem2reg_opt(Z_ARG2, at_tos(2)); // ltos (two word jvalue)
       
  2765     __ z_bru(cont);
       
  2766 
       
  2767     __ bind(load_dtos);
       
  2768     __ compareU32_and_branch(Z_ARG4, (intptr_t)dtos, Assembler::bcondNotEqual, cont);
       
  2769     __ mem2reg_opt(Z_ARG2, at_tos(2)); // dtos (two word jvalue)
       
  2770 
       
  2771     __ bind(cont);
       
  2772   }
       
  2773   // cache entry pointer
       
  2774 
       
  2775   __ add2reg_with_index(Z_ARG3, in_bytes(cp_base_offset), Z_ARG3, Z_R1_scratch);
       
  2776 
       
  2777   // object(tos)
       
  2778   __ load_address(Z_ARG4, Address(Z_esp, Interpreter::stackElementSize));
       
  2779   // Z_ARG2: object pointer set up above (NULL if static)
       
  2780   // Z_ARG3: cache entry pointer
       
  2781   // Z_ARG4: jvalue object on the stack
       
  2782   __ call_VM(noreg,
       
  2783              CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification),
       
  2784              Z_ARG2, Z_ARG3, Z_ARG4);
       
  2785   __ get_cache_and_index_at_bcp(cache, index, 1);
       
  2786 
       
  2787   __ bind(L1);
       
  2788   BLOCK_COMMENT("} jvmti_post_field_mod");
       
  2789 }
       
  2790 
       
  2791 
       
  2792 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
       
  2793   transition(vtos, vtos);
       
  2794 
       
  2795   const Register cache         = Z_tmp_1;
       
  2796   const Register index         = Z_ARG5;
       
  2797   const Register obj           = Z_tmp_1;
       
  2798   const Register off           = Z_tmp_2;
       
  2799   const Register flags         = Z_R1_scratch;
       
  2800   const Register br_tab        = Z_ARG5;
       
  2801   const Register bc            = Z_tmp_1;
       
  2802   const Register oopStore_tmp1 = Z_R1_scratch;
       
  2803   const Register oopStore_tmp2 = Z_ARG5;
       
  2804   const Register oopStore_tmp3 = Z_R0_scratch;
       
  2805 
       
  2806   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
       
  2807   jvmti_post_field_mod(cache, index, is_static);
       
  2808   load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
       
  2809   // begin of life for:
       
  2810   //   obj, off   long life range
       
  2811   //   flags      short life range, up to branch into branch table
       
  2812   // end of life for:
       
  2813   //   cache, index
       
  2814 
       
  2815   const Address field(obj, off);
       
  2816   Label is_Byte, is_Bool, is_Int, is_Short, is_Char,
       
  2817         is_Long, is_Float, is_Object, is_Double;
       
  2818   Label is_badState8, is_badState9, is_badStateA, is_badStateB,
       
  2819         is_badStateC, is_badStateD, is_badStateE, is_badStateF,
       
  2820         is_badState;
       
  2821   Label branchTable, atosHandler, Done;
       
  2822   bool  do_rewrite   = !is_static && (rc == may_rewrite);
       
  2823   bool  dont_rewrite = (is_static || (rc == may_not_rewrite));
       
  2824 
       
  2825   assert(do_rewrite == !dont_rewrite, "Oops, code is not fit for that");
       
  2826 
       
  2827   assert(btos == 0, "change code, btos != 0");
       
  2828 
       
  2829 #ifdef ASSERT
       
  2830   const unsigned int bsize = is_static ? BTB_MINSIZE*1 : BTB_MINSIZE*4;
       
  2831 #else
       
  2832   const unsigned int bsize = is_static ? BTB_MINSIZE*1 : BTB_MINSIZE*8;
       
  2833 #endif
       
  2834 
       
  2835   // Calculate address of branch table entry and branch there.
       
  2836   {
       
  2837     const int bit_shift = exact_log2(bsize); // Size of each branch table entry.
       
  2838     const int r_bitpos  = 63 - bit_shift;
       
  2839     const int l_bitpos  = r_bitpos - ConstantPoolCacheEntry::tos_state_bits + 1;
       
  2840     const int n_rotate  = (bit_shift-ConstantPoolCacheEntry::tos_state_shift);
       
  2841     __ z_larl(br_tab, branchTable);
       
  2842     __ rotate_then_insert(flags, flags, l_bitpos, r_bitpos, n_rotate, true);
       
  2843     __ z_bc(Assembler::bcondAlways, 0, flags, br_tab);
       
  2844   }
       
  2845   // end of life for:
       
  2846   //   flags, br_tab
       
  2847 
       
  2848   __ align_address(bsize);
       
  2849   BIND(branchTable);
       
  2850 
       
  2851   // btos
       
  2852   BTB_BEGIN(is_Byte, bsize, "putfield_or_static:is_Byte");
       
  2853   __ pop(btos);
       
  2854   if (!is_static) {
       
  2855     pop_and_check_object(obj);
       
  2856   }
       
  2857   __ z_stc(Z_tos, field);
       
  2858   if (do_rewrite) {
       
  2859     patch_bytecode(Bytecodes::_fast_bputfield, bc, Z_ARG5, true, byte_no);
       
  2860   }
       
  2861   __ z_bru(Done);
       
  2862   BTB_END( is_Byte, bsize, "putfield_or_static:is_Byte");
       
  2863 
       
  2864   // ztos
       
  2865   BTB_BEGIN(is_Bool, bsize, "putfield_or_static:is_Bool");
       
  2866   __ pop(ztos);
       
  2867   if (do_rewrite) {
       
  2868     pop_and_check_object(obj);
       
  2869   }
       
  2870   __ z_nilf(Z_tos, 0x1);
       
  2871   __ z_stc(Z_tos, field);
       
  2872   if (!is_static) {
       
  2873     patch_bytecode(Bytecodes::_fast_zputfield, bc, Z_ARG5, true, byte_no);
       
  2874   }
       
  2875   __ z_bru(Done);
       
  2876   BTB_END(is_Bool, bsize, "putfield_or_static:is_Bool");
       
  2877 
       
  2878   // ctos
       
  2879   BTB_BEGIN(is_Char, bsize, "putfield_or_static:is_Char");
       
  2880   __ pop(ctos);
       
  2881   if (!is_static) {
       
  2882     pop_and_check_object(obj);
       
  2883   }
       
  2884   __ z_sth(Z_tos, field);
       
  2885   if (do_rewrite) {
       
  2886     patch_bytecode(Bytecodes::_fast_cputfield, bc, Z_ARG5, true, byte_no);
       
  2887   }
       
  2888   __ z_bru(Done);
       
  2889   BTB_END( is_Char, bsize, "putfield_or_static:is_Char");
       
  2890 
       
  2891   // stos
       
  2892   BTB_BEGIN(is_Short, bsize, "putfield_or_static:is_Short");
       
  2893   __ pop(stos);
       
  2894   if (!is_static) {
       
  2895     pop_and_check_object(obj);
       
  2896   }
       
  2897   __ z_sth(Z_tos, field);
       
  2898   if (do_rewrite) {
       
  2899     patch_bytecode(Bytecodes::_fast_sputfield, bc, Z_ARG5, true, byte_no);
       
  2900   }
       
  2901   __ z_bru(Done);
       
  2902   BTB_END( is_Short, bsize, "putfield_or_static:is_Short");
       
  2903 
       
  2904   // itos
       
  2905   BTB_BEGIN(is_Int, bsize, "putfield_or_static:is_Int");
       
  2906   __ pop(itos);
       
  2907   if (!is_static) {
       
  2908     pop_and_check_object(obj);
       
  2909   }
       
  2910   __ reg2mem_opt(Z_tos, field, false);
       
  2911   if (do_rewrite) {
       
  2912     patch_bytecode(Bytecodes::_fast_iputfield, bc, Z_ARG5, true, byte_no);
       
  2913   }
       
  2914   __ z_bru(Done);
       
  2915   BTB_END( is_Int, bsize, "putfield_or_static:is_Int");
       
  2916 
       
  2917   // ltos
       
  2918   BTB_BEGIN(is_Long, bsize, "putfield_or_static:is_Long");
       
  2919   __ pop(ltos);
       
  2920   if (!is_static) {
       
  2921     pop_and_check_object(obj);
       
  2922   }
       
  2923   __ reg2mem_opt(Z_tos, field);
       
  2924   if (do_rewrite) {
       
  2925     patch_bytecode(Bytecodes::_fast_lputfield, bc, Z_ARG5, true, byte_no);
       
  2926   }
       
  2927   __ z_bru(Done);
       
  2928   BTB_END( is_Long, bsize, "putfield_or_static:is_Long");
       
  2929 
       
  2930   // ftos
       
  2931   BTB_BEGIN(is_Float, bsize, "putfield_or_static:is_Float");
       
  2932   __ pop(ftos);
       
  2933   if (!is_static) {
       
  2934     pop_and_check_object(obj);
       
  2935   }
       
  2936   __ freg2mem_opt(Z_ftos, field, false);
       
  2937   if (do_rewrite) {
       
  2938     patch_bytecode(Bytecodes::_fast_fputfield, bc, Z_ARG5, true, byte_no);
       
  2939   }
       
  2940   __ z_bru(Done);
       
  2941   BTB_END( is_Float, bsize, "putfield_or_static:is_Float");
       
  2942 
       
  2943   // dtos
       
  2944   BTB_BEGIN(is_Double, bsize, "putfield_or_static:is_Double");
       
  2945   __ pop(dtos);
       
  2946   if (!is_static) {
       
  2947     pop_and_check_object(obj);
       
  2948   }
       
  2949   __ freg2mem_opt(Z_ftos, field);
       
  2950   if (do_rewrite) {
       
  2951     patch_bytecode(Bytecodes::_fast_dputfield, bc, Z_ARG5, true, byte_no);
       
  2952   }
       
  2953   __ z_bru(Done);
       
  2954   BTB_END( is_Double, bsize, "putfield_or_static:is_Double");
       
  2955 
       
  2956   // atos
       
  2957   BTB_BEGIN(is_Object, bsize, "putfield_or_static:is_Object");
       
  2958   __ z_bru(atosHandler);
       
  2959   BTB_END( is_Object, bsize, "putfield_or_static:is_Object");
       
  2960 
       
  2961   // Bad state detection comes at no extra runtime cost.
       
  2962   BTB_BEGIN(is_badState8, bsize, "putfield_or_static:is_badState8");
       
  2963   __ z_illtrap();
       
  2964   __ z_bru(is_badState);
       
  2965   BTB_END( is_badState8, bsize, "putfield_or_static:is_badState8");
       
  2966   BTB_BEGIN(is_badState9, bsize, "putfield_or_static:is_badState9");
       
  2967   __ z_illtrap();
       
  2968   __ z_bru(is_badState);
       
  2969   BTB_END( is_badState9, bsize, "putfield_or_static:is_badState9");
       
  2970   BTB_BEGIN(is_badStateA, bsize, "putfield_or_static:is_badStateA");
       
  2971   __ z_illtrap();
       
  2972   __ z_bru(is_badState);
       
  2973   BTB_END( is_badStateA, bsize, "putfield_or_static:is_badStateA");
       
  2974   BTB_BEGIN(is_badStateB, bsize, "putfield_or_static:is_badStateB");
       
  2975   __ z_illtrap();
       
  2976   __ z_bru(is_badState);
       
  2977   BTB_END( is_badStateB, bsize, "putfield_or_static:is_badStateB");
       
  2978   BTB_BEGIN(is_badStateC, bsize, "putfield_or_static:is_badStateC");
       
  2979   __ z_illtrap();
       
  2980   __ z_bru(is_badState);
       
  2981   BTB_END( is_badStateC, bsize, "putfield_or_static:is_badStateC");
       
  2982   BTB_BEGIN(is_badStateD, bsize, "putfield_or_static:is_badStateD");
       
  2983   __ z_illtrap();
       
  2984   __ z_bru(is_badState);
       
  2985   BTB_END( is_badStateD, bsize, "putfield_or_static:is_badStateD");
       
  2986   BTB_BEGIN(is_badStateE, bsize, "putfield_or_static:is_badStateE");
       
  2987   __ z_illtrap();
       
  2988   __ z_bru(is_badState);
       
  2989   BTB_END( is_badStateE, bsize, "putfield_or_static:is_badStateE");
       
  2990   BTB_BEGIN(is_badStateF, bsize, "putfield_or_static:is_badStateF");
       
  2991   __ z_illtrap();
       
  2992   __ z_bru(is_badState);
       
  2993   BTB_END( is_badStateF, bsize, "putfield_or_static:is_badStateF");
       
  2994 
       
  2995   __ align_address(64);
       
  2996   BIND(is_badState);  // Do this outside branch table. Needs a lot of space.
       
  2997   {
       
  2998     unsigned int b_off = __ offset();
       
  2999     if (is_static) __ stop_static("Bad state in putstatic");
       
  3000     else            __ stop_static("Bad state in putfield");
       
  3001     unsigned int e_off = __ offset();
       
  3002   }
       
  3003 
       
  3004   __ align_address(64);
       
  3005   BIND(atosHandler);  // Oops are really complicated to handle.
       
  3006                       // There is a lot of code generated.
       
  3007                       // Therefore: generate the handler outside of branch table.
       
  3008                       // There is no performance penalty. The additional branch
       
  3009                       // to here is compensated for by the fallthru to "Done".
       
  3010   {
       
  3011     unsigned int b_off = __ offset();
       
  3012     __ pop(atos);
       
  3013     if (!is_static) {
       
  3014       pop_and_check_object(obj);
       
  3015     }
       
  3016     // Store into the field
       
  3017     do_oop_store(_masm, obj, off, Z_tos, false,
       
  3018                  oopStore_tmp1, oopStore_tmp2, oopStore_tmp3, _bs->kind(), false);
       
  3019     if (do_rewrite) {
       
  3020       patch_bytecode(Bytecodes::_fast_aputfield, bc, Z_ARG5, true, byte_no);
       
  3021     }
       
  3022     // __ z_bru(Done); // fallthru
       
  3023     unsigned int e_off = __ offset();
       
  3024   }
       
  3025 
       
  3026   BIND(Done);
       
  3027 
       
  3028   // Check for volatile store.
       
  3029   Label notVolatile;
       
  3030 
       
  3031   __ testbit(Z_ARG4, ConstantPoolCacheEntry::is_volatile_shift);
       
  3032   __ z_brz(notVolatile);
       
  3033   __ z_fence();
       
  3034 
       
  3035   BIND(notVolatile);
       
  3036 }
       
  3037 
       
  3038 void TemplateTable::putfield(int byte_no) {
       
  3039   BLOCK_COMMENT("putfield  {");
       
  3040   putfield_or_static(byte_no, false);
       
  3041   BLOCK_COMMENT("} putfield");
       
  3042 }
       
  3043 
       
  3044 void TemplateTable::nofast_putfield(int byte_no) {
       
  3045   putfield_or_static(byte_no, false, may_not_rewrite);
       
  3046 }
       
  3047 
       
  3048 void TemplateTable::putstatic(int byte_no) {
       
  3049   BLOCK_COMMENT("putstatic {");
       
  3050   putfield_or_static(byte_no, true);
       
  3051   BLOCK_COMMENT("} putstatic");
       
  3052 }
       
  3053 
       
  3054 // Push the tos value back to the stack.
       
  3055 // gc will find oops there and update.
       
  3056 void TemplateTable::jvmti_post_fast_field_mod() {
       
  3057 
       
  3058   if (!JvmtiExport::can_post_field_modification()) {
       
  3059     return;
       
  3060   }
       
  3061 
       
  3062   // Check to see if a field modification watch has been set before
       
  3063   // we take the time to call into the VM.
       
  3064   Label   exit;
       
  3065 
       
  3066   BLOCK_COMMENT("jvmti_post_fast_field_mod {");
       
  3067 
       
  3068   __ load_absolute_address(Z_R1_scratch,
       
  3069                            (address) JvmtiExport::get_field_modification_count_addr());
       
  3070   __ load_and_test_int(Z_R0_scratch, Address(Z_R1_scratch));
       
  3071   __ z_brz(exit);
       
  3072 
       
  3073   Register obj = Z_tmp_1;
       
  3074 
       
  3075   __ pop_ptr(obj);                  // Copy the object pointer from tos.
       
  3076   __ verify_oop(obj);
       
  3077   __ push_ptr(obj);                 // Put the object pointer back on tos.
       
  3078 
       
  3079   // Save tos values before call_VM() clobbers them. Since we have
       
  3080   // to do it for every data type, we use the saved values as the
       
  3081   // jvalue object.
       
  3082   switch (bytecode()) {          // Load values into the jvalue object.
       
  3083     case Bytecodes::_fast_aputfield:
       
  3084       __ push_ptr(Z_tos);
       
  3085       break;
       
  3086     case Bytecodes::_fast_bputfield:
       
  3087     case Bytecodes::_fast_zputfield:
       
  3088     case Bytecodes::_fast_sputfield:
       
  3089     case Bytecodes::_fast_cputfield:
       
  3090     case Bytecodes::_fast_iputfield:
       
  3091       __ push_i(Z_tos);
       
  3092       break;
       
  3093     case Bytecodes::_fast_dputfield:
       
  3094       __ push_d();
       
  3095       break;
       
  3096     case Bytecodes::_fast_fputfield:
       
  3097       __ push_f();
       
  3098       break;
       
  3099     case Bytecodes::_fast_lputfield:
       
  3100       __ push_l(Z_tos);
       
  3101       break;
       
  3102 
       
  3103     default:
       
  3104       ShouldNotReachHere();
       
  3105   }
       
  3106 
       
  3107   // jvalue on the stack
       
  3108   __ load_address(Z_ARG4, Address(Z_esp, Interpreter::stackElementSize));
       
  3109   // Access constant pool cache entry.
       
  3110   __ get_cache_entry_pointer_at_bcp(Z_ARG3, Z_tos, 1);
       
  3111   __ verify_oop(obj);
       
  3112 
       
  3113   // obj   : object pointer copied above
       
  3114   // Z_ARG3: cache entry pointer
       
  3115   // Z_ARG4: jvalue object on the stack
       
  3116   __ call_VM(noreg,
       
  3117              CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification),
       
  3118              obj, Z_ARG3, Z_ARG4);
       
  3119 
       
  3120   switch (bytecode()) {             // Restore tos values.
       
  3121     case Bytecodes::_fast_aputfield:
       
  3122       __ pop_ptr(Z_tos);
       
  3123       break;
       
  3124     case Bytecodes::_fast_bputfield:
       
  3125     case Bytecodes::_fast_zputfield:
       
  3126     case Bytecodes::_fast_sputfield:
       
  3127     case Bytecodes::_fast_cputfield:
       
  3128     case Bytecodes::_fast_iputfield:
       
  3129       __ pop_i(Z_tos);
       
  3130       break;
       
  3131     case Bytecodes::_fast_dputfield:
       
  3132       __ pop_d(Z_ftos);
       
  3133       break;
       
  3134     case Bytecodes::_fast_fputfield:
       
  3135       __ pop_f(Z_ftos);
       
  3136       break;
       
  3137     case Bytecodes::_fast_lputfield:
       
  3138       __ pop_l(Z_tos);
       
  3139       break;
       
  3140   }
       
  3141 
       
  3142   __ bind(exit);
       
  3143   BLOCK_COMMENT("} jvmti_post_fast_field_mod");
       
  3144 }
       
  3145 
       
  3146 void TemplateTable::fast_storefield(TosState state) {
       
  3147   transition(state, vtos);
       
  3148 
       
  3149   ByteSize base = ConstantPoolCache::base_offset();
       
  3150   jvmti_post_fast_field_mod();
       
  3151 
       
  3152   // Access constant pool cache.
       
  3153   Register cache = Z_tmp_1;
       
  3154   Register index = Z_tmp_2;
       
  3155   Register flags = Z_ARG5;
       
  3156 
       
  3157   // Index comes in bytes, don't shift afterwards!
       
  3158   __ get_cache_and_index_at_bcp(cache, index, 1);
       
  3159 
       
  3160   // Test for volatile.
       
  3161   assert(!flags->is_volatile(), "do_oop_store could perform leaf RT call");
       
  3162   __ z_lg(flags, Address(cache, index, base + ConstantPoolCacheEntry::flags_offset()));
       
  3163 
       
  3164   // Replace index with field offset from cache entry.
       
  3165   Register field_offset = index;
       
  3166   __ z_lg(field_offset, Address(cache, index, base + ConstantPoolCacheEntry::f2_offset()));
       
  3167 
       
  3168   // Get object from stack.
       
  3169   Register   obj = cache;
       
  3170 
       
  3171   pop_and_check_object(obj);
       
  3172 
       
  3173   // field address
       
  3174   const Address   field(obj, field_offset);
       
  3175 
       
  3176   // access field
       
  3177   switch (bytecode()) {
       
  3178     case Bytecodes::_fast_aputfield:
       
  3179       do_oop_store(_masm, obj, field_offset, Z_tos, false,
       
  3180                    Z_ARG2, Z_ARG3, Z_ARG4, _bs->kind(), false);
       
  3181       break;
       
  3182     case Bytecodes::_fast_lputfield:
       
  3183       __ reg2mem_opt(Z_tos, field);
       
  3184       break;
       
  3185     case Bytecodes::_fast_iputfield:
       
  3186       __ reg2mem_opt(Z_tos, field, false);
       
  3187       break;
       
  3188     case Bytecodes::_fast_zputfield:
       
  3189       __ z_nilf(Z_tos, 0x1);
       
  3190       // fall through to bputfield
       
  3191     case Bytecodes::_fast_bputfield:
       
  3192       __ z_stc(Z_tos, field);
       
  3193       break;
       
  3194     case Bytecodes::_fast_sputfield:
       
  3195       // fall through
       
  3196     case Bytecodes::_fast_cputfield:
       
  3197       __ z_sth(Z_tos, field);
       
  3198       break;
       
  3199     case Bytecodes::_fast_fputfield:
       
  3200       __ freg2mem_opt(Z_ftos, field, false);
       
  3201       break;
       
  3202     case Bytecodes::_fast_dputfield:
       
  3203       __ freg2mem_opt(Z_ftos, field);
       
  3204       break;
       
  3205     default:
       
  3206       ShouldNotReachHere();
       
  3207   }
       
  3208 
       
  3209   //  Check for volatile store.
       
  3210   Label notVolatile;
       
  3211 
       
  3212   __ testbit(flags, ConstantPoolCacheEntry::is_volatile_shift);
       
  3213   __ z_brz(notVolatile);
       
  3214   __ z_fence();
       
  3215 
       
  3216   __ bind(notVolatile);
       
  3217 }
       
  3218 
       
  3219 void TemplateTable::fast_accessfield(TosState state) {
       
  3220   transition(atos, state);
       
  3221 
       
  3222   Register obj = Z_tos;
       
  3223 
       
  3224   // Do the JVMTI work here to avoid disturbing the register state below
       
  3225   if (JvmtiExport::can_post_field_access()) {
       
  3226     // Check to see if a field access watch has been set before we
       
  3227     // take the time to call into the VM.
       
  3228     Label cont;
       
  3229 
       
  3230     __ load_absolute_address(Z_R1_scratch,
       
  3231                              (address)JvmtiExport::get_field_access_count_addr());
       
  3232     __ load_and_test_int(Z_R0_scratch, Address(Z_R1_scratch));
       
  3233     __ z_brz(cont);
       
  3234 
       
  3235     // Access constant pool cache entry.
       
  3236 
       
  3237     __ get_cache_entry_pointer_at_bcp(Z_ARG3, Z_tmp_1, 1);
       
  3238     __ verify_oop(obj);
       
  3239     __ push_ptr(obj);  // Save object pointer before call_VM() clobbers it.
       
  3240     __ z_lgr(Z_ARG2, obj);
       
  3241 
       
  3242     // Z_ARG2: object pointer copied above
       
  3243     // Z_ARG3: cache entry pointer
       
  3244     __ call_VM(noreg,
       
  3245                CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access),
       
  3246                Z_ARG2, Z_ARG3);
       
  3247     __ pop_ptr(obj); // Restore object pointer.
       
  3248 
       
  3249     __ bind(cont);
       
  3250   }
       
  3251 
       
  3252   // Access constant pool cache.
       
  3253   Register   cache = Z_tmp_1;
       
  3254   Register   index = Z_tmp_2;
       
  3255 
       
  3256   // Index comes in bytes, don't shift afterwards!
       
  3257   __ get_cache_and_index_at_bcp(cache, index, 1);
       
  3258   // Replace index with field offset from cache entry.
       
  3259   __ mem2reg_opt(index,
       
  3260                  Address(cache, index,
       
  3261                          ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::f2_offset()));
       
  3262 
       
  3263   __ verify_oop(obj);
       
  3264   __ null_check(obj);
       
  3265 
       
  3266   Address field(obj, index);
       
  3267 
       
  3268   // access field
       
  3269   switch (bytecode()) {
       
  3270     case Bytecodes::_fast_agetfield:
       
  3271       __ load_heap_oop(Z_tos, field);
       
  3272       __ verify_oop(Z_tos);
       
  3273       return;
       
  3274     case Bytecodes::_fast_lgetfield:
       
  3275       __ mem2reg_opt(Z_tos, field);
       
  3276       return;
       
  3277     case Bytecodes::_fast_igetfield:
       
  3278       __ mem2reg_opt(Z_tos, field, false);
       
  3279       return;
       
  3280     case Bytecodes::_fast_bgetfield:
       
  3281       __ z_lb(Z_tos, field);
       
  3282       return;
       
  3283     case Bytecodes::_fast_sgetfield:
       
  3284       __ z_lh(Z_tos, field);
       
  3285       return;
       
  3286     case Bytecodes::_fast_cgetfield:
       
  3287       __ z_llgh(Z_tos, field);   // Load into 64 bits, works on all CPUs.
       
  3288       return;
       
  3289     case Bytecodes::_fast_fgetfield:
       
  3290       __ mem2freg_opt(Z_ftos, field, false);
       
  3291       return;
       
  3292     case Bytecodes::_fast_dgetfield:
       
  3293       __ mem2freg_opt(Z_ftos, field);
       
  3294       return;
       
  3295     default:
       
  3296       ShouldNotReachHere();
       
  3297   }
       
  3298 }
       
  3299 
       
  3300 void TemplateTable::fast_xaccess(TosState state) {
       
  3301   transition(vtos, state);
       
  3302 
       
  3303   Register receiver = Z_tos;
       
  3304   // Get receiver.
       
  3305   __ mem2reg_opt(Z_tos, aaddress(0));
       
  3306 
       
  3307   // Access constant pool cache.
       
  3308   Register cache = Z_tmp_1;
       
  3309   Register index = Z_tmp_2;
       
  3310 
       
  3311   // Index comes in bytes, don't shift afterwards!
       
  3312   __ get_cache_and_index_at_bcp(cache, index, 2);
       
  3313   // Replace index with field offset from cache entry.
       
  3314   __ mem2reg_opt(index,
       
  3315                  Address(cache, index,
       
  3316                          ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::f2_offset()));
       
  3317 
       
  3318   // Make sure exception is reported in correct bcp range (getfield is
       
  3319   // next instruction).
       
  3320   __ add2reg(Z_bcp, 1);
       
  3321   __ null_check(receiver);
       
  3322   switch (state) {
       
  3323     case itos:
       
  3324       __ mem2reg_opt(Z_tos, Address(receiver, index), false);
       
  3325       break;
       
  3326     case atos:
       
  3327       __ load_heap_oop(Z_tos, Address(receiver, index));
       
  3328       __ verify_oop(Z_tos);
       
  3329       break;
       
  3330     case ftos:
       
  3331       __ mem2freg_opt(Z_ftos, Address(receiver, index));
       
  3332       break;
       
  3333     default:
       
  3334       ShouldNotReachHere();
       
  3335   }
       
  3336 
       
  3337   // Reset bcp to original position.
       
  3338   __ add2reg(Z_bcp, -1);
       
  3339 }
       
  3340 
       
  3341 //-----------------------------------------------------------------------------
       
  3342 // Calls
       
  3343 
       
  3344 void TemplateTable::prepare_invoke(int byte_no,
       
  3345                                    Register method,  // linked method (or i-klass)
       
  3346                                    Register index,   // itable index, MethodType, etc.
       
  3347                                    Register recv,    // If caller wants to see it.
       
  3348                                    Register flags) { // If caller wants to test it.
       
  3349   // Determine flags.
       
  3350   const Bytecodes::Code code = bytecode();
       
  3351   const bool is_invokeinterface  = code == Bytecodes::_invokeinterface;
       
  3352   const bool is_invokedynamic    = code == Bytecodes::_invokedynamic;
       
  3353   const bool is_invokehandle     = code == Bytecodes::_invokehandle;
       
  3354   const bool is_invokevirtual    = code == Bytecodes::_invokevirtual;
       
  3355   const bool is_invokespecial    = code == Bytecodes::_invokespecial;
       
  3356   const bool load_receiver       = (recv != noreg);
       
  3357   assert(load_receiver == (code != Bytecodes::_invokestatic && code != Bytecodes::_invokedynamic), "");
       
  3358 
       
  3359   // Setup registers & access constant pool cache.
       
  3360   if (recv  == noreg) { recv  = Z_ARG1; }
       
  3361   if (flags == noreg) { flags = Z_ARG2; }
       
  3362   assert_different_registers(method, Z_R14, index, recv, flags);
       
  3363 
       
  3364   BLOCK_COMMENT("prepare_invoke {");
       
  3365 
       
  3366   load_invoke_cp_cache_entry(byte_no, method, index, flags, is_invokevirtual, false, is_invokedynamic);
       
  3367 
       
  3368   // Maybe push appendix to arguments.
       
  3369   if (is_invokedynamic || is_invokehandle) {
       
  3370     Label L_no_push;
       
  3371     Register resolved_reference = Z_R1_scratch;
       
  3372     __ testbit(flags, ConstantPoolCacheEntry::has_appendix_shift);
       
  3373     __ z_bfalse(L_no_push);
       
  3374     // Push the appendix as a trailing parameter.
       
  3375     // This must be done before we get the receiver,
       
  3376     // since the parameter_size includes it.
       
  3377     __ load_resolved_reference_at_index(resolved_reference, index);
       
  3378     __ verify_oop(resolved_reference);
       
  3379     __ push_ptr(resolved_reference);  // Push appendix (MethodType, CallSite, etc.).
       
  3380     __ bind(L_no_push);
       
  3381   }
       
  3382 
       
  3383   // Load receiver if needed (after appendix is pushed so parameter size is correct).
       
  3384   if (load_receiver) {
       
  3385     assert(!is_invokedynamic, "");
       
  3386     // recv := int2long(flags & ConstantPoolCacheEntry::parameter_size_mask) << 3
       
  3387     // Flags is zero-extended int2long when loaded during load_invoke_cp_cache_entry().
       
  3388     // Only the least significant byte (psize) of flags is used.
       
  3389     {
       
  3390       const unsigned int logSES = Interpreter::logStackElementSize;
       
  3391       const int bit_shift = logSES;
       
  3392       const int r_bitpos  = 63 - bit_shift;
       
  3393       const int l_bitpos  = r_bitpos - ConstantPoolCacheEntry::parameter_size_bits + 1;
       
  3394       const int n_rotate  = bit_shift;
       
  3395       assert(ConstantPoolCacheEntry::parameter_size_mask == 255, "adapt bitpositions");
       
  3396       __ rotate_then_insert(recv, flags, l_bitpos, r_bitpos, n_rotate, true);
       
  3397     }
       
  3398     // Recv now contains #arguments * StackElementSize.
       
  3399 
       
  3400     Address recv_addr(Z_esp, recv);
       
  3401     __ z_lg(recv, recv_addr);
       
  3402     __ verify_oop(recv);
       
  3403   }
       
  3404 
       
  3405   // Compute return type.
       
  3406   // ret_type is used by callers (invokespecial, invokestatic) at least.
       
  3407   Register ret_type = Z_R1_scratch;
       
  3408   assert_different_registers(ret_type, method);
       
  3409 
       
  3410   const address table_addr = (address)Interpreter::invoke_return_entry_table_for(code);
       
  3411   __ load_absolute_address(Z_R14, table_addr);
       
  3412 
       
  3413   {
       
  3414     const int bit_shift = LogBytesPerWord;           // Size of each table entry.
       
  3415     const int r_bitpos  = 63 - bit_shift;
       
  3416     const int l_bitpos  = r_bitpos - ConstantPoolCacheEntry::tos_state_bits + 1;
       
  3417     const int n_rotate  = bit_shift-ConstantPoolCacheEntry::tos_state_shift;
       
  3418     __ rotate_then_insert(ret_type, flags, l_bitpos, r_bitpos, n_rotate, true);
       
  3419     // Make sure we don't need to mask flags for tos_state after the above shift.
       
  3420     ConstantPoolCacheEntry::verify_tos_state_shift();
       
  3421   }
       
  3422 
       
  3423     __ z_lg(Z_R14, Address(Z_R14, ret_type)); // Load return address.
       
  3424   BLOCK_COMMENT("} prepare_invoke");
       
  3425 }
       
  3426 
       
  3427 
       
  3428 void TemplateTable::invokevirtual_helper(Register index,
       
  3429                                          Register recv,
       
  3430                                          Register flags) {
       
  3431   // Uses temporary registers Z_tmp_2, Z_ARG4.
       
  3432   assert_different_registers(index, recv, Z_tmp_2, Z_ARG4);
       
  3433 
       
  3434   // Test for an invoke of a final method.
       
  3435   Label notFinal;
       
  3436 
       
  3437   BLOCK_COMMENT("invokevirtual_helper {");
       
  3438 
       
  3439   __ testbit(flags, ConstantPoolCacheEntry::is_vfinal_shift);
       
  3440   __ z_brz(notFinal);
       
  3441 
       
  3442   const Register method = index;  // Method must be Z_ARG3.
       
  3443   assert(method == Z_ARG3, "method must be second argument for interpreter calling convention");
       
  3444 
       
  3445   // Do the call - the index is actually the method to call.
       
  3446   // That is, f2 is a vtable index if !is_vfinal, else f2 is a method.
       
  3447 
       
  3448   // It's final, need a null check here!
       
  3449   __ null_check(recv);
       
  3450 
       
  3451   // Profile this call.
       
  3452   __ profile_final_call(Z_tmp_2);
       
  3453   __ profile_arguments_type(Z_tmp_2, method, Z_ARG5, true); // Argument type profiling.
       
  3454   __ jump_from_interpreted(method, Z_tmp_2);
       
  3455 
       
  3456   __ bind(notFinal);
       
  3457 
       
  3458   // Get receiver klass.
       
  3459   __ null_check(recv, Z_R0_scratch, oopDesc::klass_offset_in_bytes());
       
  3460   __ load_klass(Z_tmp_2, recv);
       
  3461 
       
  3462   // Profile this call.
       
  3463   __ profile_virtual_call(Z_tmp_2, Z_ARG4, Z_ARG5);
       
  3464 
       
  3465   // Get target method & entry point.
       
  3466   __ z_sllg(index, index, exact_log2(vtableEntry::size_in_bytes()));
       
  3467   __ mem2reg_opt(method,
       
  3468                  Address(Z_tmp_2, index,
       
  3469                          Klass::vtable_start_offset() + in_ByteSize(vtableEntry::method_offset_in_bytes())));
       
  3470   __ profile_arguments_type(Z_ARG4, method, Z_ARG5, true);
       
  3471   __ jump_from_interpreted(method, Z_ARG4);
       
  3472   BLOCK_COMMENT("} invokevirtual_helper");
       
  3473 }
       
  3474 
       
  3475 void TemplateTable::invokevirtual(int byte_no) {
       
  3476   transition(vtos, vtos);
       
  3477 
       
  3478   assert(byte_no == f2_byte, "use this argument");
       
  3479   prepare_invoke(byte_no,
       
  3480                  Z_ARG3,  // method or vtable index
       
  3481                  noreg,   // unused itable index
       
  3482                  Z_ARG1,  // recv
       
  3483                  Z_ARG2); // flags
       
  3484 
       
  3485   // Z_ARG3 : index
       
  3486   // Z_ARG1 : receiver
       
  3487   // Z_ARG2 : flags
       
  3488   invokevirtual_helper(Z_ARG3, Z_ARG1, Z_ARG2);
       
  3489 }
       
  3490 
       
  3491 void TemplateTable::invokespecial(int byte_no) {
       
  3492   transition(vtos, vtos);
       
  3493 
       
  3494   assert(byte_no == f1_byte, "use this argument");
       
  3495   Register Rmethod = Z_tmp_2;
       
  3496   prepare_invoke(byte_no, Rmethod, noreg, // Get f1 method.
       
  3497                  Z_ARG3);   // Get receiver also for null check.
       
  3498   __ verify_oop(Z_ARG3);
       
  3499   __ null_check(Z_ARG3);
       
  3500   // Do the call.
       
  3501   __ profile_call(Z_ARG2);
       
  3502   __ profile_arguments_type(Z_ARG2, Rmethod, Z_ARG5, false);
       
  3503   __ jump_from_interpreted(Rmethod, Z_R1_scratch);
       
  3504 }
       
  3505 
       
  3506 void TemplateTable::invokestatic(int byte_no) {
       
  3507   transition(vtos, vtos);
       
  3508 
       
  3509   assert(byte_no == f1_byte, "use this argument");
       
  3510   Register Rmethod = Z_tmp_2;
       
  3511   prepare_invoke(byte_no, Rmethod);   // Get f1 method.
       
  3512   // Do the call.
       
  3513   __ profile_call(Z_ARG2);
       
  3514   __ profile_arguments_type(Z_ARG2, Rmethod, Z_ARG5, false);
       
  3515   __ jump_from_interpreted(Rmethod, Z_R1_scratch);
       
  3516 }
       
  3517 
       
  3518 // Outdated feature, and we don't support it.
       
  3519 void TemplateTable::fast_invokevfinal(int byte_no) {
       
  3520   transition(vtos, vtos);
       
  3521   assert(byte_no == f2_byte, "use this argument");
       
  3522   __ stop("fast_invokevfinal not used on linuxs390x");
       
  3523 }
       
  3524 
       
  3525 void TemplateTable::invokeinterface(int byte_no) {
       
  3526   transition(vtos, vtos);
       
  3527 
       
  3528   assert(byte_no == f1_byte, "use this argument");
       
  3529   Register interface = Z_tos;
       
  3530   Register index = Z_ARG3;
       
  3531   Register receiver = Z_tmp_1;
       
  3532   Register flags = Z_ARG5;
       
  3533 
       
  3534   BLOCK_COMMENT("invokeinterface {");
       
  3535 
       
  3536   // Destroys Z_ARG1 and Z_ARG2, thus use Z_ARG4 and copy afterwards.
       
  3537   prepare_invoke(byte_no, Z_ARG4, index,  // Get f1 klassOop, f2 itable index.
       
  3538                  receiver, flags);
       
  3539 
       
  3540   // Z_R14 (== Z_bytecode) : return entry
       
  3541 
       
  3542   __ z_lgr(interface, Z_ARG4);
       
  3543 
       
  3544   // Special case of invokeinterface called for virtual method of
       
  3545   // java.lang.Object. See cpCacheOop.cpp for details.
       
  3546   // This code isn't produced by javac, but could be produced by
       
  3547   // another compliant java compiler.
       
  3548   Label notMethod;
       
  3549   __ testbit(flags, ConstantPoolCacheEntry::is_forced_virtual_shift);
       
  3550   __ z_brz(notMethod);
       
  3551   invokevirtual_helper(index, receiver, flags);
       
  3552   __ bind(notMethod);
       
  3553 
       
  3554   // Get receiver klass into klass - also a null check.
       
  3555   Register klass = flags;
       
  3556 
       
  3557   __ restore_locals();
       
  3558   __ load_klass(klass, receiver);
       
  3559 
       
  3560   // Profile this call.
       
  3561   __ profile_virtual_call(klass, Z_ARG2/*mdp*/, Z_ARG4/*scratch*/);
       
  3562 
       
  3563   NearLabel  no_such_interface, no_such_method;
       
  3564   Register   method = Z_tmp_2;
       
  3565 
       
  3566   // TK 2010-08-24: save the index to Z_ARG4. needed in case of an error
       
  3567   //                in throw_AbstractMethodErrorByTemplateTable
       
  3568   __ z_lgr(Z_ARG4, index);
       
  3569   // TK 2011-03-24: copy also klass because it could be changed in
       
  3570   //                lookup_interface_method
       
  3571   __ z_lgr(Z_ARG2, klass);
       
  3572   __ lookup_interface_method(// inputs: rec. class, interface, itable index
       
  3573                               klass, interface, index,
       
  3574                               // outputs: method, scan temp. reg
       
  3575                               method, Z_tmp_2, Z_R1_scratch,
       
  3576                               no_such_interface);
       
  3577 
       
  3578   // Check for abstract method error.
       
  3579   // Note: This should be done more efficiently via a throw_abstract_method_error
       
  3580   // interpreter entry point and a conditional jump to it in case of a null
       
  3581   // method.
       
  3582   __ compareU64_and_branch(method, (intptr_t) 0,
       
  3583                             Assembler::bcondZero, no_such_method);
       
  3584 
       
  3585   __ profile_arguments_type(Z_ARG3, method, Z_ARG5, true);
       
  3586 
       
  3587   // Do the call.
       
  3588   __ jump_from_interpreted(method, Z_ARG5);
       
  3589   __ should_not_reach_here();
       
  3590 
       
  3591   // exception handling code follows...
       
  3592   // Note: Must restore interpreter registers to canonical
       
  3593   // state for exception handling to work correctly!
       
  3594 
       
  3595   __ bind(no_such_method);
       
  3596 
       
  3597   // Throw exception.
       
  3598   __ restore_bcp();      // Bcp must be correct for exception handler   (was destroyed).
       
  3599   __ restore_locals();   // Make sure locals pointer is correct as well (was destroyed).
       
  3600   // TK 2010-08-24: Call throw_AbstractMethodErrorByTemplateTable now with the
       
  3601   //                relevant information for generating a better error message
       
  3602   __ call_VM(noreg,
       
  3603               CAST_FROM_FN_PTR(address,
       
  3604                                InterpreterRuntime::throw_AbstractMethodError),
       
  3605               Z_ARG2, interface, Z_ARG4);
       
  3606   // The call_VM checks for exception, so we should never return here.
       
  3607   __ should_not_reach_here();
       
  3608 
       
  3609   __ bind(no_such_interface);
       
  3610 
       
  3611   // Throw exception.
       
  3612   __ restore_bcp();      // Bcp must be correct for exception handler   (was destroyed).
       
  3613   __ restore_locals();   // Make sure locals pointer is correct as well (was destroyed).
       
  3614   // TK 2010-08-24: Call throw_IncompatibleClassChangeErrorByTemplateTable now with the
       
  3615   //                relevant information for generating a better error message
       
  3616   __ call_VM(noreg,
       
  3617              CAST_FROM_FN_PTR(address,
       
  3618                               InterpreterRuntime::throw_IncompatibleClassChangeError),
       
  3619              Z_ARG2, interface);
       
  3620   // The call_VM checks for exception, so we should never return here.
       
  3621   __ should_not_reach_here();
       
  3622 
       
  3623   BLOCK_COMMENT("} invokeinterface");
       
  3624   return;
       
  3625 }
       
  3626 
       
  3627 void TemplateTable::invokehandle(int byte_no) {
       
  3628   transition(vtos, vtos);
       
  3629 
       
  3630   const Register method = Z_tmp_2;
       
  3631   const Register recv   = Z_ARG5;
       
  3632   const Register mtype  = Z_tmp_1;
       
  3633   prepare_invoke(byte_no,
       
  3634                  method, mtype,   // Get f2 method, f1 MethodType.
       
  3635                  recv);
       
  3636   __ verify_method_ptr(method);
       
  3637   __ verify_oop(recv);
       
  3638   __ null_check(recv);
       
  3639 
       
  3640   // Note: Mtype is already pushed (if necessary) by prepare_invoke.
       
  3641 
       
  3642   // FIXME: profile the LambdaForm also.
       
  3643   __ profile_final_call(Z_ARG2);
       
  3644   __ profile_arguments_type(Z_ARG3, method, Z_ARG5, true);
       
  3645 
       
  3646   __ jump_from_interpreted(method, Z_ARG3);
       
  3647 }
       
  3648 
       
  3649 void TemplateTable::invokedynamic(int byte_no) {
       
  3650   transition(vtos, vtos);
       
  3651 
       
  3652   const Register Rmethod   = Z_tmp_2;
       
  3653   const Register Rcallsite = Z_tmp_1;
       
  3654 
       
  3655   prepare_invoke(byte_no, Rmethod, Rcallsite);
       
  3656 
       
  3657   // Rmethod: CallSite object (from f1)
       
  3658   // Rcallsite: MH.linkToCallSite method (from f2)
       
  3659 
       
  3660   // Note: Callsite is already pushed by prepare_invoke.
       
  3661 
       
  3662   // TODO: should make a type profile for any invokedynamic that takes a ref argument.
       
  3663   // Profile this call.
       
  3664   __ profile_call(Z_ARG2);
       
  3665   __ profile_arguments_type(Z_ARG2, Rmethod, Z_ARG5, false);
       
  3666   __ jump_from_interpreted(Rmethod, Z_ARG2);
       
  3667 }
       
  3668 
       
  3669 //-----------------------------------------------------------------------------
       
  3670 // Allocation
       
  3671 
       
  3672 // Original comment on "allow_shared_alloc":
       
  3673 // Always go the slow path.
       
  3674 //  + Eliminated optimization within the template-based interpreter:
       
  3675 //    If an allocation is done within the interpreter without using
       
  3676 //    tlabs, the interpreter tries to do the allocation directly
       
  3677 //    on the heap.
       
  3678 //  + That means the profiling hooks are not considered and allocations
       
  3679 //    get lost for the profiling framework.
       
  3680 //  + However, we do not think that this optimization is really needed,
       
  3681 //    so we always go now the slow path through the VM in this case --
       
  3682 //    spec jbb2005 shows no measurable performance degradation.
       
  3683 void TemplateTable::_new() {
       
  3684   transition(vtos, atos);
       
  3685   address prev_instr_address = NULL;
       
  3686   Register tags  = Z_tmp_1;
       
  3687   Register RallocatedObject   = Z_tos;
       
  3688   Register cpool = Z_ARG2;
       
  3689   Register tmp = Z_ARG3; // RobjectFields==tmp and Rsize==offset must be a register pair.
       
  3690   Register offset = Z_ARG4;
       
  3691   Label slow_case;
       
  3692   Label done;
       
  3693   Label initialize_header;
       
  3694   Label initialize_object; // Including clearing the fields.
       
  3695   Label allocate_shared;
       
  3696 
       
  3697   BLOCK_COMMENT("TemplateTable::_new {");
       
  3698   __ get_2_byte_integer_at_bcp(offset/*dest*/, 1, InterpreterMacroAssembler::Unsigned);
       
  3699   __ get_cpool_and_tags(cpool, tags);
       
  3700   // Make sure the class we're about to instantiate has been resolved.
       
  3701   // This is done before loading InstanceKlass to be consistent with the order
       
  3702   // how Constant Pool is updated (see ConstantPool::klass_at_put).
       
  3703   const int tags_offset = Array<u1>::base_offset_in_bytes();
       
  3704   __ load_address(tmp, Address(tags, offset, tags_offset));
       
  3705   __ z_cli(0, tmp, JVM_CONSTANT_Class);
       
  3706   __ z_brne(slow_case);
       
  3707 
       
  3708   __ z_sllg(offset, offset, LogBytesPerWord); // Convert to to offset.
       
  3709   // Get InstanceKlass.
       
  3710   Register iklass = cpool;
       
  3711   __ load_resolved_klass_at_offset(cpool, offset, iklass);
       
  3712 
       
  3713   // Make sure klass is initialized & doesn't have finalizer.
       
  3714   // Make sure klass is fully initialized.
       
  3715   const int state_offset = in_bytes(InstanceKlass::init_state_offset());
       
  3716   if (Immediate::is_uimm12(state_offset)) {
       
  3717     __ z_cli(state_offset, iklass, InstanceKlass::fully_initialized);
       
  3718   } else {
       
  3719     __ z_cliy(state_offset, iklass, InstanceKlass::fully_initialized);
       
  3720   }
       
  3721   __ z_brne(slow_case);
       
  3722 
       
  3723   // Get instance_size in InstanceKlass (scaled to a count of bytes).
       
  3724   Register Rsize = offset;
       
  3725   const int mask = 1 << Klass::_lh_instance_slow_path_bit;
       
  3726   __ z_llgf(Rsize, Address(iklass, Klass::layout_helper_offset()));
       
  3727   __ z_tmll(Rsize, mask);
       
  3728   __ z_btrue(slow_case);
       
  3729 
       
  3730   // Allocate the instance
       
  3731   // 1) Try to allocate in the TLAB.
       
  3732   // 2) If fail and the object is large allocate in the shared Eden.
       
  3733   // 3) If the above fails (or is not applicable), go to a slow case
       
  3734   // (creates a new TLAB, etc.).
       
  3735 
       
  3736   // Always go the slow path. See comment above this template.
       
  3737   const bool allow_shared_alloc = false;
       
  3738 
       
  3739   if (UseTLAB) {
       
  3740     Register RoldTopValue = RallocatedObject;
       
  3741     Register RnewTopValue = tmp;
       
  3742     __ z_lg(RoldTopValue, Address(Z_thread, JavaThread::tlab_top_offset()));
       
  3743     __ load_address(RnewTopValue, Address(RoldTopValue, Rsize));
       
  3744     __ z_cg(RnewTopValue, Address(Z_thread, JavaThread::tlab_end_offset()));
       
  3745     __ z_brh(allow_shared_alloc ? allocate_shared : slow_case);
       
  3746     __ z_stg(RnewTopValue, Address(Z_thread, JavaThread::tlab_top_offset()));
       
  3747     if (ZeroTLAB) {
       
  3748       // The fields have been already cleared.
       
  3749       __ z_bru(initialize_header);
       
  3750     } else {
       
  3751       // Initialize both the header and fields.
       
  3752       if (allow_shared_alloc) {
       
  3753         __ z_bru(initialize_object);
       
  3754       } else {
       
  3755         // Fallthrough to initialize_object, but assert that it is on fall through path.
       
  3756         prev_instr_address = __ pc();
       
  3757       }
       
  3758     }
       
  3759   }
       
  3760 
       
  3761   if (allow_shared_alloc) {
       
  3762     // Allocation in shared Eden not implemented, because sapjvm allocation trace does not allow it.
       
  3763     Unimplemented();
       
  3764   }
       
  3765 
       
  3766   if (UseTLAB) {
       
  3767     Register RobjectFields = tmp;
       
  3768     Register Rzero = Z_R1_scratch;
       
  3769 
       
  3770     assert(ZeroTLAB || prev_instr_address == __ pc(),
       
  3771            "must not omit jump to initialize_object above, as it is not on the fall through path");
       
  3772     __ clear_reg(Rzero, true /*whole reg*/, false); // Load 0L into Rzero. Don't set CC.
       
  3773 
       
  3774     // The object is initialized before the header. If the object size is
       
  3775     // zero, go directly to the header initialization.
       
  3776     __ bind(initialize_object);
       
  3777     __ z_aghi(Rsize, (int)-sizeof(oopDesc)); // Subtract header size, set CC.
       
  3778     __ z_bre(initialize_header);             // Jump if size of fields is zero.
       
  3779 
       
  3780     // Initialize object fields.
       
  3781     // See documentation for MVCLE instruction!!!
       
  3782     assert(RobjectFields->encoding() % 2 == 0, "RobjectFields must be an even register");
       
  3783     assert(Rsize->encoding() == (RobjectFields->encoding()+1),
       
  3784            "RobjectFields and Rsize must be a register pair");
       
  3785     assert(Rzero->encoding() % 2 == 1, "Rzero must be an odd register");
       
  3786 
       
  3787     // Set Rzero to 0 and use it as src length, then mvcle will copy nothing
       
  3788     // and fill the object with the padding value 0.
       
  3789     __ add2reg(RobjectFields, sizeof(oopDesc), RallocatedObject);
       
  3790     __ move_long_ext(RobjectFields, as_Register(Rzero->encoding() - 1), 0);
       
  3791 
       
  3792     // Initialize object header only.
       
  3793     __ bind(initialize_header);
       
  3794     if (UseBiasedLocking) {
       
  3795       Register prototype = RobjectFields;
       
  3796       __ z_lg(prototype, Address(iklass, Klass::prototype_header_offset()));
       
  3797       __ z_stg(prototype, Address(RallocatedObject, oopDesc::mark_offset_in_bytes()));
       
  3798     } else {
       
  3799       __ store_const(Address(RallocatedObject, oopDesc::mark_offset_in_bytes()),
       
  3800                      (long)markOopDesc::prototype());
       
  3801     }
       
  3802 
       
  3803     __ store_klass_gap(Rzero, RallocatedObject);  // Zero klass gap for compressed oops.
       
  3804     __ store_klass(iklass, RallocatedObject);     // Store klass last.
       
  3805 
       
  3806     {
       
  3807       SkipIfEqual skip(_masm, &DTraceAllocProbes, false, Z_ARG5 /*scratch*/);
       
  3808       // Trigger dtrace event for fastpath.
       
  3809       __ push(atos); // Save the return value.
       
  3810       __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_object_alloc), RallocatedObject);
       
  3811       __ pop(atos); // Restore the return value.
       
  3812     }
       
  3813     __ z_bru(done);
       
  3814   }
       
  3815 
       
  3816   // slow case
       
  3817   __ bind(slow_case);
       
  3818   __ get_constant_pool(Z_ARG2);
       
  3819   __ get_2_byte_integer_at_bcp(Z_ARG3/*dest*/, 1, InterpreterMacroAssembler::Unsigned);
       
  3820   call_VM(Z_tos, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), Z_ARG2, Z_ARG3);
       
  3821   __ verify_oop(Z_tos);
       
  3822 
       
  3823   // continue
       
  3824   __ bind(done);
       
  3825 
       
  3826   BLOCK_COMMENT("} TemplateTable::_new");
       
  3827 }
       
  3828 
       
  3829 void TemplateTable::newarray() {
       
  3830   transition(itos, atos);
       
  3831 
       
  3832   // Call runtime.
       
  3833   __ z_llgc(Z_ARG2, at_bcp(1));   // type
       
  3834   __ z_lgfr(Z_ARG3, Z_tos);       // size
       
  3835   call_VM(Z_RET,
       
  3836           CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
       
  3837           Z_ARG2, Z_ARG3);
       
  3838 }
       
  3839 
       
  3840 void TemplateTable::anewarray() {
       
  3841   transition(itos, atos);
       
  3842   __ get_2_byte_integer_at_bcp(Z_ARG3, 1, InterpreterMacroAssembler::Unsigned);
       
  3843   __ get_constant_pool(Z_ARG2);
       
  3844   __ z_lgfr(Z_ARG4, Z_tos);
       
  3845   call_VM(Z_tos, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
       
  3846           Z_ARG2, Z_ARG3, Z_ARG4);
       
  3847 }
       
  3848 
       
  3849 void TemplateTable::arraylength() {
       
  3850   transition(atos, itos);
       
  3851 
       
  3852   int offset = arrayOopDesc::length_offset_in_bytes();
       
  3853 
       
  3854   __ null_check(Z_tos, Z_R0_scratch, offset);
       
  3855   __ mem2reg_opt(Z_tos, Address(Z_tos, offset), false);
       
  3856 }
       
  3857 
       
  3858 void TemplateTable::checkcast() {
       
  3859   transition(atos, atos);
       
  3860 
       
  3861   NearLabel done, is_null, ok_is_subtype, quicked, resolved;
       
  3862 
       
  3863   BLOCK_COMMENT("checkcast {");
       
  3864   // If object is NULL, we are almost done.
       
  3865   __ compareU64_and_branch(Z_tos, (intptr_t) 0, Assembler::bcondZero, is_null);
       
  3866 
       
  3867   // Get cpool & tags index.
       
  3868   Register cpool = Z_tmp_1;
       
  3869   Register tags = Z_tmp_2;
       
  3870   Register index = Z_ARG5;
       
  3871 
       
  3872   __ get_cpool_and_tags(cpool, tags);
       
  3873   __ get_2_byte_integer_at_bcp(index, 1, InterpreterMacroAssembler::Unsigned);
       
  3874   // See if bytecode has already been quicked.
       
  3875   // Note: For CLI, we would have to add the index to the tags pointer first,
       
  3876   // thus load and compare in a "classic" manner.
       
  3877   __ z_llgc(Z_R0_scratch,
       
  3878             Address(tags, index, Array<u1>::base_offset_in_bytes()));
       
  3879   __ compareU64_and_branch(Z_R0_scratch, JVM_CONSTANT_Class,
       
  3880                            Assembler::bcondEqual, quicked);
       
  3881 
       
  3882   __ push(atos); // Save receiver for result, and for GC.
       
  3883   call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
       
  3884   __ get_vm_result_2(Z_tos);
       
  3885 
       
  3886   Register   receiver = Z_ARG4;
       
  3887   Register   klass = Z_tos;
       
  3888   Register   subklass = Z_ARG5;
       
  3889 
       
  3890   __ pop_ptr(receiver); // restore receiver
       
  3891   __ z_bru(resolved);
       
  3892 
       
  3893   // Get superklass in klass and subklass in subklass.
       
  3894   __ bind(quicked);
       
  3895 
       
  3896   __ z_lgr(Z_ARG4, Z_tos);  // Save receiver.
       
  3897   __ z_sllg(index, index, LogBytesPerWord);  // index2bytes for addressing
       
  3898   __ load_resolved_klass_at_offset(cpool, index, klass);
       
  3899 
       
  3900   __ bind(resolved);
       
  3901 
       
  3902   __ load_klass(subklass, receiver);
       
  3903 
       
  3904   // Generate subtype check. Object in receiver.
       
  3905   // Superklass in klass. Subklass in subklass.
       
  3906   __ gen_subtype_check(subklass, klass, Z_ARG3, Z_tmp_1, ok_is_subtype);
       
  3907 
       
  3908   // Come here on failure.
       
  3909   __ push_ptr(receiver);
       
  3910   // Object is at TOS, target klass oop expected in rax by convention.
       
  3911   __ z_brul((address) Interpreter::_throw_ClassCastException_entry);
       
  3912 
       
  3913   // Come here on success.
       
  3914   __ bind(ok_is_subtype);
       
  3915 
       
  3916   __ z_lgr(Z_tos, receiver); // Restore object.
       
  3917 
       
  3918   // Collect counts on whether this test sees NULLs a lot or not.
       
  3919   if (ProfileInterpreter) {
       
  3920     __ z_bru(done);
       
  3921     __ bind(is_null);
       
  3922     __ profile_null_seen(Z_tmp_1);
       
  3923   } else {
       
  3924     __ bind(is_null);   // Same as 'done'.
       
  3925   }
       
  3926 
       
  3927   __ bind(done);
       
  3928   BLOCK_COMMENT("} checkcast");
       
  3929 }
       
  3930 
       
  3931 void TemplateTable::instanceof() {
       
  3932   transition(atos, itos);
       
  3933 
       
  3934   NearLabel done, is_null, ok_is_subtype, quicked, resolved;
       
  3935 
       
  3936   BLOCK_COMMENT("instanceof {");
       
  3937   // If object is NULL, we are almost done.
       
  3938   __ compareU64_and_branch(Z_tos, (intptr_t) 0, Assembler::bcondZero, is_null);
       
  3939 
       
  3940   // Get cpool & tags index.
       
  3941   Register cpool = Z_tmp_1;
       
  3942   Register tags = Z_tmp_2;
       
  3943   Register index = Z_ARG5;
       
  3944 
       
  3945   __ get_cpool_and_tags(cpool, tags);
       
  3946   __ get_2_byte_integer_at_bcp(index, 1, InterpreterMacroAssembler::Unsigned);
       
  3947   // See if bytecode has already been quicked.
       
  3948   // Note: For CLI, we would have to add the index to the tags pointer first,
       
  3949   // thus load and compare in a "classic" manner.
       
  3950   __ z_llgc(Z_R0_scratch,
       
  3951             Address(tags, index, Array<u1>::base_offset_in_bytes()));
       
  3952   __ compareU64_and_branch(Z_R0_scratch, JVM_CONSTANT_Class, Assembler::bcondEqual, quicked);
       
  3953 
       
  3954   __ push(atos); // Save receiver for result, and for GC.
       
  3955   call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
       
  3956   __ get_vm_result_2(Z_tos);
       
  3957 
       
  3958   Register receiver = Z_tmp_2;
       
  3959   Register klass = Z_tos;
       
  3960   Register subklass = Z_tmp_2;
       
  3961 
       
  3962   __ pop_ptr(receiver); // Restore receiver.
       
  3963   __ verify_oop(receiver);
       
  3964   __ load_klass(subklass, subklass);
       
  3965   __ z_bru(resolved);
       
  3966 
       
  3967   // Get superklass in klass and subklass in subklass.
       
  3968   __ bind(quicked);
       
  3969 
       
  3970   __ load_klass(subklass, Z_tos);
       
  3971   __ z_sllg(index, index, LogBytesPerWord);  // index2bytes for addressing
       
  3972   __ load_resolved_klass_at_offset(cpool, index, klass);
       
  3973 
       
  3974   __ bind(resolved);
       
  3975 
       
  3976   // Generate subtype check.
       
  3977   // Superklass in klass. Subklass in subklass.
       
  3978   __ gen_subtype_check(subklass, klass, Z_ARG4, Z_ARG5, ok_is_subtype);
       
  3979 
       
  3980   // Come here on failure.
       
  3981   __ clear_reg(Z_tos, true, false);
       
  3982   __ z_bru(done);
       
  3983 
       
  3984   // Come here on success.
       
  3985   __ bind(ok_is_subtype);
       
  3986   __ load_const_optimized(Z_tos, 1);
       
  3987 
       
  3988   // Collect counts on whether this test sees NULLs a lot or not.
       
  3989   if (ProfileInterpreter) {
       
  3990     __ z_bru(done);
       
  3991     __ bind(is_null);
       
  3992     __ profile_null_seen(Z_tmp_1);
       
  3993   } else {
       
  3994     __ bind(is_null);   // same as 'done'
       
  3995   }
       
  3996 
       
  3997   __ bind(done);
       
  3998   // tos = 0: obj == NULL or  obj is not an instanceof the specified klass
       
  3999   // tos = 1: obj != NULL and obj is     an instanceof the specified klass
       
  4000   BLOCK_COMMENT("} instanceof");
       
  4001 }
       
  4002 
       
  4003 //-----------------------------------------------------------------------------
       
  4004 // Breakpoints
       
  4005 void TemplateTable::_breakpoint() {
       
  4006 
       
  4007   // Note: We get here even if we are single stepping.
       
  4008   // Jbug insists on setting breakpoints at every bytecode
       
  4009   // even if we are in single step mode.
       
  4010 
       
  4011   transition(vtos, vtos);
       
  4012 
       
  4013   // Get the unpatched byte code.
       
  4014   __ get_method(Z_ARG2);
       
  4015   __ call_VM(noreg,
       
  4016              CAST_FROM_FN_PTR(address, InterpreterRuntime::get_original_bytecode_at),
       
  4017              Z_ARG2, Z_bcp);
       
  4018   // Save the result to a register that is preserved over C-function calls.
       
  4019   __ z_lgr(Z_tmp_1, Z_RET);
       
  4020 
       
  4021   // Post the breakpoint event.
       
  4022   __ get_method(Z_ARG2);
       
  4023   __ call_VM(noreg,
       
  4024              CAST_FROM_FN_PTR(address, InterpreterRuntime::_breakpoint),
       
  4025              Z_ARG2, Z_bcp);
       
  4026 
       
  4027   // Must restore the bytecode, because call_VM destroys Z_bytecode.
       
  4028   __ z_lgr(Z_bytecode, Z_tmp_1);
       
  4029 
       
  4030   // Complete the execution of original bytecode.
       
  4031   __ dispatch_only_normal(vtos);
       
  4032 }
       
  4033 
       
  4034 
       
  4035 // Exceptions
       
  4036 
       
  4037 void TemplateTable::athrow() {
       
  4038   transition(atos, vtos);
       
  4039   __ null_check(Z_tos);
       
  4040   __ load_absolute_address(Z_ARG2, Interpreter::throw_exception_entry());
       
  4041   __ z_br(Z_ARG2);
       
  4042 }
       
  4043 
       
  4044 // Synchronization
       
  4045 //
       
  4046 // Note: monitorenter & exit are symmetric routines; which is reflected
       
  4047 //       in the assembly code structure as well
       
  4048 //
       
  4049 // Stack layout:
       
  4050 //
       
  4051 //               callers_sp        <- Z_SP (callers_sp == Z_fp (own fp))
       
  4052 //               return_pc
       
  4053 //               [rest of ABI_160]
       
  4054 //              /slot o:   free
       
  4055 //             / ...       free
       
  4056 //       oper. | slot n+1: free    <- Z_esp points to first free slot
       
  4057 //       stack | slot n:   val                      caches IJAVA_STATE.esp
       
  4058 //             | ...
       
  4059 //              \slot 0:   val
       
  4060 //              /slot m            <- IJAVA_STATE.monitors = monitor block top
       
  4061 //             | ...
       
  4062 //     monitors| slot 2
       
  4063 //             | slot 1
       
  4064 //              \slot 0
       
  4065 //              /slot l            <- monitor block bot
       
  4066 // ijava_state | ...
       
  4067 //             | slot 2
       
  4068 //              \slot 0
       
  4069 //                                 <- Z_fp
       
  4070 void TemplateTable::monitorenter() {
       
  4071   transition(atos, vtos);
       
  4072 
       
  4073   BLOCK_COMMENT("monitorenter {");
       
  4074 
       
  4075   // Check for NULL object.
       
  4076   __ null_check(Z_tos);
       
  4077   const int entry_size = frame::interpreter_frame_monitor_size() * wordSize;
       
  4078   NearLabel allocated;
       
  4079   // Initialize entry pointer.
       
  4080   const Register Rfree_slot = Z_tmp_1;
       
  4081   __ clear_reg(Rfree_slot, true, false); // Points to free slot or NULL. Don't set CC.
       
  4082 
       
  4083   // Find a free slot in the monitor block from top to bot (result in Rfree_slot).
       
  4084   {
       
  4085     const Register Rcurr_monitor = Z_ARG2;
       
  4086     const Register Rbot = Z_ARG3; // Points to word under bottom of monitor block.
       
  4087     const Register Rlocked_obj = Z_ARG4;
       
  4088     NearLabel loop, exit, not_free;
       
  4089     // Starting with top-most entry.
       
  4090     __ get_monitors(Rcurr_monitor); // Rcur_monitor = IJAVA_STATE.monitors
       
  4091     __ add2reg(Rbot, -frame::z_ijava_state_size, Z_fp);
       
  4092 
       
  4093 #ifdef ASSERT
       
  4094     address reentry = NULL;
       
  4095     { NearLabel ok;
       
  4096       __ compareU64_and_branch(Rcurr_monitor, Rbot, Assembler::bcondNotHigh, ok);
       
  4097       reentry = __ stop_chain_static(reentry, "IJAVA_STATE.monitors points below monitor block bottom");
       
  4098       __ bind(ok);
       
  4099     }
       
  4100     { NearLabel ok;
       
  4101       __ compareU64_and_branch(Rcurr_monitor, Z_esp, Assembler::bcondHigh, ok);
       
  4102       reentry = __ stop_chain_static(reentry, "IJAVA_STATE.monitors above Z_esp");
       
  4103       __ bind(ok);
       
  4104     }
       
  4105 #endif
       
  4106 
       
  4107     // Check if bottom reached, i.e. if there is at least one monitor.
       
  4108     __ compareU64_and_branch(Rcurr_monitor, Rbot, Assembler::bcondEqual, exit);
       
  4109 
       
  4110     __ bind(loop);
       
  4111     // Check if current entry is used.
       
  4112     __ load_and_test_long(Rlocked_obj, Address(Rcurr_monitor, BasicObjectLock::obj_offset_in_bytes()));
       
  4113     __ z_brne(not_free);
       
  4114     // If not used then remember entry in Rfree_slot.
       
  4115     __ z_lgr(Rfree_slot, Rcurr_monitor);
       
  4116     __ bind(not_free);
       
  4117     // Exit if current entry is for same object; this guarantees, that new monitor
       
  4118     // used for recursive lock is above the older one.
       
  4119     __ compareU64_and_branch(Rlocked_obj, Z_tos, Assembler::bcondEqual, exit);
       
  4120     // otherwise advance to next entry
       
  4121     __ add2reg(Rcurr_monitor, entry_size);
       
  4122     // Check if bottom reached, if not at bottom then check this entry.
       
  4123     __ compareU64_and_branch(Rcurr_monitor, Rbot, Assembler::bcondNotEqual, loop);
       
  4124     __ bind(exit);
       
  4125   }
       
  4126 
       
  4127   // Rfree_slot != NULL -> found one
       
  4128   __ compareU64_and_branch(Rfree_slot, (intptr_t)0L, Assembler::bcondNotEqual, allocated);
       
  4129 
       
  4130   // Allocate one if there's no free slot.
       
  4131   __ add_monitor_to_stack(false, Z_ARG3, Z_ARG4, Z_ARG5);
       
  4132   __ get_monitors(Rfree_slot);
       
  4133 
       
  4134   // Rfree_slot: points to monitor entry.
       
  4135   __ bind(allocated);
       
  4136 
       
  4137   // Increment bcp to point to the next bytecode, so exception
       
  4138   // handling for async. exceptions work correctly.
       
  4139   // The object has already been poped from the stack, so the
       
  4140   // expression stack looks correct.
       
  4141   __ add2reg(Z_bcp, 1, Z_bcp);
       
  4142 
       
  4143   // Store object.
       
  4144   __ z_stg(Z_tos, BasicObjectLock::obj_offset_in_bytes(), Rfree_slot);
       
  4145   __ lock_object(Rfree_slot, Z_tos);
       
  4146 
       
  4147   // Check to make sure this monitor doesn't cause stack overflow after locking.
       
  4148   __ save_bcp();  // in case of exception
       
  4149   __ generate_stack_overflow_check(0);
       
  4150 
       
  4151   // The bcp has already been incremented. Just need to dispatch to
       
  4152   // next instruction.
       
  4153   __ dispatch_next(vtos);
       
  4154 
       
  4155   BLOCK_COMMENT("} monitorenter");
       
  4156 }
       
  4157 
       
  4158 
       
  4159 void TemplateTable::monitorexit() {
       
  4160   transition(atos, vtos);
       
  4161 
       
  4162   BLOCK_COMMENT("monitorexit {");
       
  4163 
       
  4164   // Check for NULL object.
       
  4165   __ null_check(Z_tos);
       
  4166 
       
  4167   NearLabel found, not_found;
       
  4168   const Register Rcurr_monitor = Z_ARG2;
       
  4169 
       
  4170   // Find matching slot.
       
  4171   {
       
  4172     const int entry_size = frame::interpreter_frame_monitor_size() * wordSize;
       
  4173     NearLabel entry, loop;
       
  4174 
       
  4175     const Register Rbot = Z_ARG3; // Points to word under bottom of monitor block.
       
  4176     const Register Rlocked_obj = Z_ARG4;
       
  4177     // Starting with top-most entry.
       
  4178     __ get_monitors(Rcurr_monitor); // Rcur_monitor = IJAVA_STATE.monitors
       
  4179     __ add2reg(Rbot, -frame::z_ijava_state_size, Z_fp);
       
  4180 
       
  4181 #ifdef ASSERT
       
  4182     address reentry = NULL;
       
  4183     { NearLabel ok;
       
  4184       __ compareU64_and_branch(Rcurr_monitor, Rbot, Assembler::bcondNotHigh, ok);
       
  4185       reentry = __ stop_chain_static(reentry, "IJAVA_STATE.monitors points below monitor block bottom");
       
  4186       __ bind(ok);
       
  4187     }
       
  4188     { NearLabel ok;
       
  4189       __ compareU64_and_branch(Rcurr_monitor, Z_esp, Assembler::bcondHigh, ok);
       
  4190       reentry = __ stop_chain_static(reentry, "IJAVA_STATE.monitors above Z_esp");
       
  4191       __ bind(ok);
       
  4192     }
       
  4193 #endif
       
  4194 
       
  4195     // Check if bottom reached, i.e. if there is at least one monitor.
       
  4196     __ compareU64_and_branch(Rcurr_monitor, Rbot, Assembler::bcondEqual, not_found);
       
  4197 
       
  4198     __ bind(loop);
       
  4199     // Check if current entry is for same object.
       
  4200     __ z_lg(Rlocked_obj, Address(Rcurr_monitor, BasicObjectLock::obj_offset_in_bytes()));
       
  4201     // If same object then stop searching.
       
  4202     __ compareU64_and_branch(Rlocked_obj, Z_tos, Assembler::bcondEqual, found);
       
  4203     // Otherwise advance to next entry.
       
  4204     __ add2reg(Rcurr_monitor, entry_size);
       
  4205     // Check if bottom reached, if not at bottom then check this entry.
       
  4206     __ compareU64_and_branch(Rcurr_monitor, Rbot, Assembler::bcondNotEqual, loop);
       
  4207   }
       
  4208 
       
  4209   __ bind(not_found);
       
  4210   // Error handling. Unlocking was not block-structured.
       
  4211   __ call_VM(noreg, CAST_FROM_FN_PTR(address,
       
  4212                    InterpreterRuntime::throw_illegal_monitor_state_exception));
       
  4213   __ should_not_reach_here();
       
  4214 
       
  4215   __ bind(found);
       
  4216   __ push_ptr(Z_tos); // Make sure object is on stack (contract with oopMaps).
       
  4217   __ unlock_object(Rcurr_monitor, Z_tos);
       
  4218   __ pop_ptr(Z_tos); // Discard object.
       
  4219   BLOCK_COMMENT("} monitorexit");
       
  4220 }
       
  4221 
       
  4222 // Wide instructions
       
  4223 void TemplateTable::wide() {
       
  4224   transition(vtos, vtos);
       
  4225 
       
  4226   __ z_llgc(Z_R1_scratch, at_bcp(1));
       
  4227   __ z_sllg(Z_R1_scratch, Z_R1_scratch, LogBytesPerWord);
       
  4228   __ load_absolute_address(Z_tmp_1, (address) Interpreter::_wentry_point);
       
  4229   __ mem2reg_opt(Z_tmp_1, Address(Z_tmp_1, Z_R1_scratch));
       
  4230   __ z_br(Z_tmp_1);
       
  4231   // Note: the bcp increment step is part of the individual wide
       
  4232   // bytecode implementations.
       
  4233 }
       
  4234 
       
  4235 // Multi arrays
       
  4236 void TemplateTable::multianewarray() {
       
  4237   transition(vtos, atos);
       
  4238 
       
  4239   __ z_llgc(Z_tmp_1, at_bcp(3)); // Get number of dimensions.
       
  4240   // Slot count to byte offset.
       
  4241   __ z_sllg(Z_tmp_1, Z_tmp_1, Interpreter::logStackElementSize);
       
  4242   // Z_esp points past last_dim, so set to Z_ARG2 to first_dim address.
       
  4243   __ load_address(Z_ARG2, Address(Z_esp, Z_tmp_1));
       
  4244   call_VM(Z_RET,
       
  4245           CAST_FROM_FN_PTR(address, InterpreterRuntime::multianewarray),
       
  4246           Z_ARG2);
       
  4247   // Pop dimensions from expression stack.
       
  4248   __ z_agr(Z_esp, Z_tmp_1);
       
  4249 }