hotspot/src/cpu/aarch64/vm/aarch64.ad
changeset 38286 0ddb6f84e138
parent 38241 32eab2eb41fd
child 38713 4a16e9ea88a0
equal deleted inserted replaced
38285:20b85a0ba796 38286:0ddb6f84e138
   994 // definitions necessary in the rest of the architecture description
   994 // definitions necessary in the rest of the architecture description
   995 
   995 
   996 source_hpp %{
   996 source_hpp %{
   997 
   997 
   998 #include "gc/shared/cardTableModRefBS.hpp"
   998 #include "gc/shared/cardTableModRefBS.hpp"
       
   999 #include "opto/addnode.hpp"
   999 
  1000 
  1000 class CallStubImpl {
  1001 class CallStubImpl {
  1001 
  1002 
  1002   //--------------------------------------------------------------
  1003   //--------------------------------------------------------------
  1003   //---<  Used for optimization in Compile::shorten_branches  >---
  1004   //---<  Used for optimization in Compile::shorten_branches  >---
  1059   // predicate controlling translation of CompareAndSwapX
  1060   // predicate controlling translation of CompareAndSwapX
  1060   bool needs_acquiring_load_exclusive(const Node *load);
  1061   bool needs_acquiring_load_exclusive(const Node *load);
  1061 
  1062 
  1062   // predicate controlling translation of StoreCM
  1063   // predicate controlling translation of StoreCM
  1063   bool unnecessary_storestore(const Node *storecm);
  1064   bool unnecessary_storestore(const Node *storecm);
       
  1065 
       
  1066   // predicate controlling addressing modes
       
  1067   bool size_fits_all_mem_uses(AddPNode* addp, int shift);
  1064 %}
  1068 %}
  1065 
  1069 
  1066 source %{
  1070 source %{
  1067 
  1071 
  1068   // Optimizaton of volatile gets and puts
  1072   // Optimizaton of volatile gets and puts
  3447 }
  3451 }
  3448 
  3452 
  3449 // Does the CPU require late expand (see block.cpp for description of late expand)?
  3453 // Does the CPU require late expand (see block.cpp for description of late expand)?
  3450 const bool Matcher::require_postalloc_expand = false;
  3454 const bool Matcher::require_postalloc_expand = false;
  3451 
  3455 
  3452 // Should the Matcher clone shifts on addressing modes, expecting them
       
  3453 // to be subsumed into complex addressing expressions or compute them
       
  3454 // into registers?  True for Intel but false for most RISCs
       
  3455 const bool Matcher::clone_shift_expressions = false;
       
  3456 
       
  3457 // Do we need to mask the count passed to shift instructions or does
  3456 // Do we need to mask the count passed to shift instructions or does
  3458 // the cpu only look at the lower 5/6 bits anyway?
  3457 // the cpu only look at the lower 5/6 bits anyway?
  3459 const bool Matcher::need_masked_shift_count = false;
  3458 const bool Matcher::need_masked_shift_count = false;
  3460 
  3459 
  3461 // This affects two different things:
  3460 // This affects two different things:
  3570 
  3569 
  3571 const RegMask Matcher::method_handle_invoke_SP_save_mask() {
  3570 const RegMask Matcher::method_handle_invoke_SP_save_mask() {
  3572   return FP_REG_mask();
  3571   return FP_REG_mask();
  3573 }
  3572 }
  3574 
  3573 
       
  3574 bool size_fits_all_mem_uses(AddPNode* addp, int shift) {
       
  3575   for (DUIterator_Fast imax, i = addp->fast_outs(imax); i < imax; i++) {
       
  3576     Node* u = addp->fast_out(i);
       
  3577     if (u->is_Mem()) {
       
  3578       int opsize = u->as_Mem()->memory_size();
       
  3579       assert(opsize > 0, "unexpected memory operand size");
       
  3580       if (u->as_Mem()->memory_size() != (1<<shift)) {
       
  3581         return false;
       
  3582       }
       
  3583     }
       
  3584   }
       
  3585   return true;
       
  3586 }
       
  3587 
  3575 const bool Matcher::convi2l_type_required = false;
  3588 const bool Matcher::convi2l_type_required = false;
       
  3589 
       
  3590 // Should the Matcher clone shifts on addressing modes, expecting them
       
  3591 // to be subsumed into complex addressing expressions or compute them
       
  3592 // into registers?
       
  3593 bool Matcher::clone_address_expressions(AddPNode* m, Matcher::MStack& mstack, VectorSet& address_visited) {
       
  3594   if (clone_base_plus_offset_address(m, mstack, address_visited)) {
       
  3595     return true;
       
  3596   }
       
  3597 
       
  3598   Node *off = m->in(AddPNode::Offset);
       
  3599   if (off->Opcode() == Op_LShiftL && off->in(2)->is_Con() &&
       
  3600       size_fits_all_mem_uses(m, off->in(2)->get_int()) &&
       
  3601       // Are there other uses besides address expressions?
       
  3602       !is_visited(off)) {
       
  3603     address_visited.set(off->_idx); // Flag as address_visited
       
  3604     mstack.push(off->in(2), Visit);
       
  3605     Node *conv = off->in(1);
       
  3606     if (conv->Opcode() == Op_ConvI2L &&
       
  3607         // Are there other uses besides address expressions?
       
  3608         !is_visited(conv)) {
       
  3609       address_visited.set(conv->_idx); // Flag as address_visited
       
  3610       mstack.push(conv->in(1), Pre_Visit);
       
  3611     } else {
       
  3612       mstack.push(conv, Pre_Visit);
       
  3613     }
       
  3614     address_visited.test_set(m->_idx); // Flag as address_visited
       
  3615     mstack.push(m->in(AddPNode::Address), Pre_Visit);
       
  3616     mstack.push(m->in(AddPNode::Base), Pre_Visit);
       
  3617     return true;
       
  3618   } else if (off->Opcode() == Op_ConvI2L &&
       
  3619              // Are there other uses besides address expressions?
       
  3620              !is_visited(off)) {
       
  3621     address_visited.test_set(m->_idx); // Flag as address_visited
       
  3622     address_visited.set(off->_idx); // Flag as address_visited
       
  3623     mstack.push(off->in(1), Pre_Visit);
       
  3624     mstack.push(m->in(AddPNode::Address), Pre_Visit);
       
  3625     mstack.push(m->in(AddPNode::Base), Pre_Visit);
       
  3626     return true;
       
  3627   }
       
  3628   return false;
       
  3629 }
       
  3630 
       
  3631 // Transform:
       
  3632 // (AddP base (AddP base address (LShiftL index con)) offset)
       
  3633 // into:
       
  3634 // (AddP base (AddP base offset) (LShiftL index con))
       
  3635 // to take full advantage of ARM's addressing modes
       
  3636 void Compile::reshape_address(AddPNode* addp) {
       
  3637   Node *addr = addp->in(AddPNode::Address);
       
  3638   if (addr->is_AddP() && addr->in(AddPNode::Base) == addp->in(AddPNode::Base)) {
       
  3639     const AddPNode *addp2 = addr->as_AddP();
       
  3640     if ((addp2->in(AddPNode::Offset)->Opcode() == Op_LShiftL &&
       
  3641          addp2->in(AddPNode::Offset)->in(2)->is_Con() &&
       
  3642          size_fits_all_mem_uses(addp, addp2->in(AddPNode::Offset)->in(2)->get_int())) ||
       
  3643         addp2->in(AddPNode::Offset)->Opcode() == Op_ConvI2L) {
       
  3644 
       
  3645       // Any use that can't embed the address computation?
       
  3646       for (DUIterator_Fast imax, i = addp->fast_outs(imax); i < imax; i++) {
       
  3647         Node* u = addp->fast_out(i);
       
  3648         if (!u->is_Mem() || u->is_LoadVector() || u->is_StoreVector() || u->Opcode() == Op_StoreCM) {
       
  3649           return;
       
  3650         }
       
  3651       }
       
  3652       
       
  3653       Node* off = addp->in(AddPNode::Offset);
       
  3654       Node* addr2 = addp2->in(AddPNode::Address);
       
  3655       Node* base = addp->in(AddPNode::Base);
       
  3656       
       
  3657       Node* new_addr = NULL;
       
  3658       // Check whether the graph already has the new AddP we need
       
  3659       // before we create one (no GVN available here).
       
  3660       for (DUIterator_Fast imax, i = addr2->fast_outs(imax); i < imax; i++) {
       
  3661         Node* u = addr2->fast_out(i);
       
  3662         if (u->is_AddP() &&
       
  3663             u->in(AddPNode::Base) == base &&
       
  3664             u->in(AddPNode::Address) == addr2 &&
       
  3665             u->in(AddPNode::Offset) == off) {
       
  3666           new_addr = u;
       
  3667           break;
       
  3668         }
       
  3669       }
       
  3670       
       
  3671       if (new_addr == NULL) {
       
  3672         new_addr = new AddPNode(base, addr2, off);
       
  3673       }
       
  3674       Node* new_off = addp2->in(AddPNode::Offset);
       
  3675       addp->set_req(AddPNode::Address, new_addr);
       
  3676       if (addr->outcnt() == 0) {
       
  3677         addr->disconnect_inputs(NULL, this);
       
  3678       }
       
  3679       addp->set_req(AddPNode::Offset, new_off);
       
  3680       if (off->outcnt() == 0) {
       
  3681         off->disconnect_inputs(NULL, this);
       
  3682       }
       
  3683     }
       
  3684   }
       
  3685 }
  3576 
  3686 
  3577 // helper for encoding java_to_runtime calls on sim
  3687 // helper for encoding java_to_runtime calls on sim
  3578 //
  3688 //
  3579 // this is needed to compute the extra arguments required when
  3689 // this is needed to compute the extra arguments required when
  3580 // planting a call to the simulator blrt instruction. the TypeFunc
  3690 // planting a call to the simulator blrt instruction. the TypeFunc
  3641 
  3751 
  3642     // Hooboy, this is fugly.  We need a way to communicate to the
  3752     // Hooboy, this is fugly.  We need a way to communicate to the
  3643     // encoder that the index needs to be sign extended, so we have to
  3753     // encoder that the index needs to be sign extended, so we have to
  3644     // enumerate all the cases.
  3754     // enumerate all the cases.
  3645     switch (opcode) {
  3755     switch (opcode) {
  3646     case INDINDEXSCALEDOFFSETI2L:
       
  3647     case INDINDEXSCALEDI2L:
  3756     case INDINDEXSCALEDI2L:
  3648     case INDINDEXSCALEDOFFSETI2LN:
       
  3649     case INDINDEXSCALEDI2LN:
  3757     case INDINDEXSCALEDI2LN:
  3650     case INDINDEXOFFSETI2L:
  3758     case INDINDEXI2L:
  3651     case INDINDEXOFFSETI2LN:
  3759     case INDINDEXI2LN:
  3652       scale = Address::sxtw(size);
  3760       scale = Address::sxtw(size);
  3653       break;
  3761       break;
  3654     default:
  3762     default:
  3655       scale = Address::lsl(size);
  3763       scale = Address::lsl(size);
  3656     }
  3764     }
  3657 
  3765 
  3658     if (index == -1) {
  3766     if (index == -1) {
  3659       (masm.*insn)(reg, Address(base, disp));
  3767       (masm.*insn)(reg, Address(base, disp));
  3660     } else {
  3768     } else {
  3661       if (disp == 0) {
  3769       assert(disp == 0, "unsupported address mode: disp = %d", disp);
  3662         (masm.*insn)(reg, Address(base, as_Register(index), scale));
  3770       (masm.*insn)(reg, Address(base, as_Register(index), scale));
  3663       } else {
       
  3664         masm.lea(rscratch1, Address(base, disp));
       
  3665         (masm.*insn)(reg, Address(rscratch1, as_Register(index), scale));
       
  3666       }
       
  3667     }
  3771     }
  3668   }
  3772   }
  3669 
  3773 
  3670   static void loadStore(MacroAssembler masm, mem_float_insn insn,
  3774   static void loadStore(MacroAssembler masm, mem_float_insn insn,
  3671                          FloatRegister reg, int opcode,
  3775                          FloatRegister reg, int opcode,
  3672                          Register base, int index, int size, int disp)
  3776                          Register base, int index, int size, int disp)
  3673   {
  3777   {
  3674     Address::extend scale;
  3778     Address::extend scale;
  3675 
  3779 
  3676     switch (opcode) {
  3780     switch (opcode) {
  3677     case INDINDEXSCALEDOFFSETI2L:
       
  3678     case INDINDEXSCALEDI2L:
  3781     case INDINDEXSCALEDI2L:
  3679     case INDINDEXSCALEDOFFSETI2LN:
       
  3680     case INDINDEXSCALEDI2LN:
  3782     case INDINDEXSCALEDI2LN:
  3681       scale = Address::sxtw(size);
  3783       scale = Address::sxtw(size);
  3682       break;
  3784       break;
  3683     default:
  3785     default:
  3684       scale = Address::lsl(size);
  3786       scale = Address::lsl(size);
  3685     }
  3787     }
  3686 
  3788 
  3687      if (index == -1) {
  3789      if (index == -1) {
  3688       (masm.*insn)(reg, Address(base, disp));
  3790       (masm.*insn)(reg, Address(base, disp));
  3689     } else {
  3791     } else {
  3690       if (disp == 0) {
  3792       assert(disp == 0, "unsupported address mode: disp = %d", disp);
  3691         (masm.*insn)(reg, Address(base, as_Register(index), scale));
  3793       (masm.*insn)(reg, Address(base, as_Register(index), scale));
  3692       } else {
       
  3693         masm.lea(rscratch1, Address(base, disp));
       
  3694         (masm.*insn)(reg, Address(rscratch1, as_Register(index), scale));
       
  3695       }
       
  3696     }
  3794     }
  3697   }
  3795   }
  3698 
  3796 
  3699   static void loadStore(MacroAssembler masm, mem_vector_insn insn,
  3797   static void loadStore(MacroAssembler masm, mem_vector_insn insn,
  3700                          FloatRegister reg, MacroAssembler::SIMD_RegVariant T,
  3798                          FloatRegister reg, MacroAssembler::SIMD_RegVariant T,
  6104     scale(0x0);
  6202     scale(0x0);
  6105     disp(0x0);
  6203     disp(0x0);
  6106   %}
  6204   %}
  6107 %}
  6205 %}
  6108 
  6206 
  6109 operand indIndexScaledOffsetI(iRegP reg, iRegL lreg, immIScale scale, immIU12 off)
  6207 operand indIndexScaledI2L(iRegP reg, iRegI ireg, immIScale scale)
  6110 %{
  6208 %{
  6111   constraint(ALLOC_IN_RC(ptr_reg));
  6209   constraint(ALLOC_IN_RC(ptr_reg));
  6112   match(AddP (AddP reg (LShiftL lreg scale)) off);
  6210   predicate(size_fits_all_mem_uses(n->as_AddP(), n->in(AddPNode::Offset)->in(2)->get_int()));
  6113   op_cost(INSN_COST);
       
  6114   format %{ "$reg, $lreg lsl($scale), $off" %}
       
  6115   interface(MEMORY_INTER) %{
       
  6116     base($reg);
       
  6117     index($lreg);
       
  6118     scale($scale);
       
  6119     disp($off);
       
  6120   %}
       
  6121 %}
       
  6122 
       
  6123 operand indIndexScaledOffsetL(iRegP reg, iRegL lreg, immIScale scale, immLU12 off)
       
  6124 %{
       
  6125   constraint(ALLOC_IN_RC(ptr_reg));
       
  6126   match(AddP (AddP reg (LShiftL lreg scale)) off);
       
  6127   op_cost(INSN_COST);
       
  6128   format %{ "$reg, $lreg lsl($scale), $off" %}
       
  6129   interface(MEMORY_INTER) %{
       
  6130     base($reg);
       
  6131     index($lreg);
       
  6132     scale($scale);
       
  6133     disp($off);
       
  6134   %}
       
  6135 %}
       
  6136 
       
  6137 operand indIndexOffsetI2L(iRegP reg, iRegI ireg, immLU12 off)
       
  6138 %{
       
  6139   constraint(ALLOC_IN_RC(ptr_reg));
       
  6140   match(AddP (AddP reg (ConvI2L ireg)) off);
       
  6141   op_cost(INSN_COST);
       
  6142   format %{ "$reg, $ireg, $off I2L" %}
       
  6143   interface(MEMORY_INTER) %{
       
  6144     base($reg);
       
  6145     index($ireg);
       
  6146     scale(0x0);
       
  6147     disp($off);
       
  6148   %}
       
  6149 %}
       
  6150 
       
  6151 operand indIndexScaledOffsetI2L(iRegP reg, iRegI ireg, immIScale scale, immLU12 off)
       
  6152 %{
       
  6153   constraint(ALLOC_IN_RC(ptr_reg));
       
  6154   match(AddP (AddP reg (LShiftL (ConvI2L ireg) scale)) off);
       
  6155   op_cost(INSN_COST);
       
  6156   format %{ "$reg, $ireg sxtw($scale), $off I2L" %}
       
  6157   interface(MEMORY_INTER) %{
       
  6158     base($reg);
       
  6159     index($ireg);
       
  6160     scale($scale);
       
  6161     disp($off);
       
  6162   %}
       
  6163 %}
       
  6164 
       
  6165 operand indIndexScaledI2L(iRegP reg, iRegI ireg, immIScale scale)
       
  6166 %{
       
  6167   constraint(ALLOC_IN_RC(ptr_reg));
       
  6168   match(AddP reg (LShiftL (ConvI2L ireg) scale));
  6211   match(AddP reg (LShiftL (ConvI2L ireg) scale));
  6169   op_cost(0);
  6212   op_cost(0);
  6170   format %{ "$reg, $ireg sxtw($scale), 0, I2L" %}
  6213   format %{ "$reg, $ireg sxtw($scale), 0, I2L" %}
  6171   interface(MEMORY_INTER) %{
  6214   interface(MEMORY_INTER) %{
  6172     base($reg);
  6215     base($reg);
  6177 %}
  6220 %}
  6178 
  6221 
  6179 operand indIndexScaled(iRegP reg, iRegL lreg, immIScale scale)
  6222 operand indIndexScaled(iRegP reg, iRegL lreg, immIScale scale)
  6180 %{
  6223 %{
  6181   constraint(ALLOC_IN_RC(ptr_reg));
  6224   constraint(ALLOC_IN_RC(ptr_reg));
       
  6225   predicate(size_fits_all_mem_uses(n->as_AddP(), n->in(AddPNode::Offset)->in(2)->get_int()));
  6182   match(AddP reg (LShiftL lreg scale));
  6226   match(AddP reg (LShiftL lreg scale));
  6183   op_cost(0);
  6227   op_cost(0);
  6184   format %{ "$reg, $lreg lsl($scale)" %}
  6228   format %{ "$reg, $lreg lsl($scale)" %}
  6185   interface(MEMORY_INTER) %{
  6229   interface(MEMORY_INTER) %{
  6186     base($reg);
  6230     base($reg);
  6187     index($lreg);
  6231     index($lreg);
  6188     scale($scale);
  6232     scale($scale);
       
  6233     disp(0x0);
       
  6234   %}
       
  6235 %}
       
  6236 
       
  6237 operand indIndexI2L(iRegP reg, iRegI ireg)
       
  6238 %{
       
  6239   constraint(ALLOC_IN_RC(ptr_reg));
       
  6240   match(AddP reg (ConvI2L ireg));
       
  6241   op_cost(0);
       
  6242   format %{ "$reg, $ireg, 0, I2L" %}
       
  6243   interface(MEMORY_INTER) %{
       
  6244     base($reg);
       
  6245     index($ireg);
       
  6246     scale(0x0);
  6189     disp(0x0);
  6247     disp(0x0);
  6190   %}
  6248   %}
  6191 %}
  6249 %}
  6192 
  6250 
  6193 operand indIndex(iRegP reg, iRegL lreg)
  6251 operand indIndex(iRegP reg, iRegL lreg)
  6329     scale(0x0);
  6387     scale(0x0);
  6330     disp(0x0);
  6388     disp(0x0);
  6331   %}
  6389   %}
  6332 %}
  6390 %}
  6333 
  6391 
  6334 operand indIndexScaledOffsetIN(iRegN reg, iRegL lreg, immIScale scale, immIU12 off)
       
  6335 %{
       
  6336   predicate(Universe::narrow_oop_shift() == 0);
       
  6337   constraint(ALLOC_IN_RC(ptr_reg));
       
  6338   match(AddP (AddP (DecodeN reg) (LShiftL lreg scale)) off);
       
  6339   op_cost(0);
       
  6340   format %{ "$reg, $lreg lsl($scale), $off\t# narrow" %}
       
  6341   interface(MEMORY_INTER) %{
       
  6342     base($reg);
       
  6343     index($lreg);
       
  6344     scale($scale);
       
  6345     disp($off);
       
  6346   %}
       
  6347 %}
       
  6348 
       
  6349 operand indIndexScaledOffsetLN(iRegN reg, iRegL lreg, immIScale scale, immLU12 off)
       
  6350 %{
       
  6351   predicate(Universe::narrow_oop_shift() == 0);
       
  6352   constraint(ALLOC_IN_RC(ptr_reg));
       
  6353   match(AddP (AddP (DecodeN reg) (LShiftL lreg scale)) off);
       
  6354   op_cost(INSN_COST);
       
  6355   format %{ "$reg, $lreg lsl($scale), $off\t# narrow" %}
       
  6356   interface(MEMORY_INTER) %{
       
  6357     base($reg);
       
  6358     index($lreg);
       
  6359     scale($scale);
       
  6360     disp($off);
       
  6361   %}
       
  6362 %}
       
  6363 
       
  6364 operand indIndexOffsetI2LN(iRegN reg, iRegI ireg, immLU12 off)
       
  6365 %{
       
  6366   predicate(Universe::narrow_oop_shift() == 0);
       
  6367   constraint(ALLOC_IN_RC(ptr_reg));
       
  6368   match(AddP (AddP (DecodeN reg) (ConvI2L ireg)) off);
       
  6369   op_cost(INSN_COST);
       
  6370   format %{ "$reg, $ireg, $off I2L\t# narrow" %}
       
  6371   interface(MEMORY_INTER) %{
       
  6372     base($reg);
       
  6373     index($ireg);
       
  6374     scale(0x0);
       
  6375     disp($off);
       
  6376   %}
       
  6377 %}
       
  6378 
       
  6379 operand indIndexScaledOffsetI2LN(iRegN reg, iRegI ireg, immIScale scale, immLU12 off)
       
  6380 %{
       
  6381   predicate(Universe::narrow_oop_shift() == 0);
       
  6382   constraint(ALLOC_IN_RC(ptr_reg));
       
  6383   match(AddP (AddP (DecodeN reg) (LShiftL (ConvI2L ireg) scale)) off);
       
  6384   op_cost(INSN_COST);
       
  6385   format %{ "$reg, $ireg sxtw($scale), $off I2L\t# narrow" %}
       
  6386   interface(MEMORY_INTER) %{
       
  6387     base($reg);
       
  6388     index($ireg);
       
  6389     scale($scale);
       
  6390     disp($off);
       
  6391   %}
       
  6392 %}
       
  6393 
       
  6394 operand indIndexScaledI2LN(iRegN reg, iRegI ireg, immIScale scale)
  6392 operand indIndexScaledI2LN(iRegN reg, iRegI ireg, immIScale scale)
  6395 %{
  6393 %{
  6396   predicate(Universe::narrow_oop_shift() == 0);
  6394   predicate(Universe::narrow_oop_shift() == 0 && size_fits_all_mem_uses(n->as_AddP(), n->in(AddPNode::Offset)->in(2)->get_int()));
  6397   constraint(ALLOC_IN_RC(ptr_reg));
  6395   constraint(ALLOC_IN_RC(ptr_reg));
  6398   match(AddP (DecodeN reg) (LShiftL (ConvI2L ireg) scale));
  6396   match(AddP (DecodeN reg) (LShiftL (ConvI2L ireg) scale));
  6399   op_cost(0);
  6397   op_cost(0);
  6400   format %{ "$reg, $ireg sxtw($scale), 0, I2L\t# narrow" %}
  6398   format %{ "$reg, $ireg sxtw($scale), 0, I2L\t# narrow" %}
  6401   interface(MEMORY_INTER) %{
  6399   interface(MEMORY_INTER) %{
  6406   %}
  6404   %}
  6407 %}
  6405 %}
  6408 
  6406 
  6409 operand indIndexScaledN(iRegN reg, iRegL lreg, immIScale scale)
  6407 operand indIndexScaledN(iRegN reg, iRegL lreg, immIScale scale)
  6410 %{
  6408 %{
  6411   predicate(Universe::narrow_oop_shift() == 0);
  6409   predicate(Universe::narrow_oop_shift() == 0 && size_fits_all_mem_uses(n->as_AddP(), n->in(AddPNode::Offset)->in(2)->get_int()));
  6412   constraint(ALLOC_IN_RC(ptr_reg));
  6410   constraint(ALLOC_IN_RC(ptr_reg));
  6413   match(AddP (DecodeN reg) (LShiftL lreg scale));
  6411   match(AddP (DecodeN reg) (LShiftL lreg scale));
  6414   op_cost(0);
  6412   op_cost(0);
  6415   format %{ "$reg, $lreg lsl($scale)\t# narrow" %}
  6413   format %{ "$reg, $lreg lsl($scale)\t# narrow" %}
  6416   interface(MEMORY_INTER) %{
  6414   interface(MEMORY_INTER) %{
  6417     base($reg);
  6415     base($reg);
  6418     index($lreg);
  6416     index($lreg);
  6419     scale($scale);
  6417     scale($scale);
       
  6418     disp(0x0);
       
  6419   %}
       
  6420 %}
       
  6421 
       
  6422 operand indIndexI2LN(iRegN reg, iRegI ireg)
       
  6423 %{
       
  6424   predicate(Universe::narrow_oop_shift() == 0);
       
  6425   constraint(ALLOC_IN_RC(ptr_reg));
       
  6426   match(AddP (DecodeN reg) (ConvI2L ireg));
       
  6427   op_cost(0);
       
  6428   format %{ "$reg, $ireg, 0, I2L\t# narrow" %}
       
  6429   interface(MEMORY_INTER) %{
       
  6430     base($reg);
       
  6431     index($ireg);
       
  6432     scale(0x0);
  6420     disp(0x0);
  6433     disp(0x0);
  6421   %}
  6434   %}
  6422 %}
  6435 %}
  6423 
  6436 
  6424 operand indIndexN(iRegN reg, iRegL lreg)
  6437 operand indIndexN(iRegN reg, iRegL lreg)
  6639 // encoding and format. The classic case of this is memory operands.
  6652 // encoding and format. The classic case of this is memory operands.
  6640 
  6653 
  6641 // memory is used to define read/write location for load/store
  6654 // memory is used to define read/write location for load/store
  6642 // instruction defs. we can turn a memory op into an Address
  6655 // instruction defs. we can turn a memory op into an Address
  6643 
  6656 
  6644 opclass memory(indirect, indIndexScaledOffsetI, indIndexScaledOffsetL, indIndexOffsetI2L, indIndexScaledOffsetI2L, indIndexScaled, indIndexScaledI2L, indIndex, indOffI, indOffL,
  6657 opclass memory(indirect, indIndexScaled, indIndexScaledI2L, indIndexI2L, indIndex, indOffI, indOffL,
  6645                indirectN, indIndexScaledOffsetIN, indIndexScaledOffsetLN, indIndexOffsetI2LN, indIndexScaledOffsetI2LN, indIndexScaledN, indIndexScaledI2LN, indIndexN, indOffIN, indOffLN);
  6658                indirectN, indIndexScaledN, indIndexScaledI2LN, indIndexI2LN, indIndexN, indOffIN, indOffLN);
  6646 
       
  6647 
  6659 
  6648 // iRegIorL2I is used for src inputs in rules for 32 bit int (I)
  6660 // iRegIorL2I is used for src inputs in rules for 32 bit int (I)
  6649 // operations. it allows the src to be either an iRegI or a (ConvL2I
  6661 // operations. it allows the src to be either an iRegI or a (ConvL2I
  6650 // iRegL). in the latter case the l2i normally planted for a ConvL2I
  6662 // iRegL). in the latter case the l2i normally planted for a ConvL2I
  6651 // can be elided because the 32-bit instruction will just employ the
  6663 // can be elided because the 32-bit instruction will just employ the