hotspot/src/share/vm/opto/matcher.cpp
changeset 594 9f4474e5dbaf
parent 590 2954744d7bba
child 595 a2be4c89de81
equal deleted inserted replaced
593:803947e176bd 594:9f4474e5dbaf
    50 Matcher::Matcher( Node_List &proj_list ) :
    50 Matcher::Matcher( Node_List &proj_list ) :
    51   PhaseTransform( Phase::Ins_Select ),
    51   PhaseTransform( Phase::Ins_Select ),
    52 #ifdef ASSERT
    52 #ifdef ASSERT
    53   _old2new_map(C->comp_arena()),
    53   _old2new_map(C->comp_arena()),
    54 #endif
    54 #endif
    55   _shared_constants(C->comp_arena()),
    55   _shared_nodes(C->comp_arena()),
    56   _reduceOp(reduceOp), _leftOp(leftOp), _rightOp(rightOp),
    56   _reduceOp(reduceOp), _leftOp(leftOp), _rightOp(rightOp),
    57   _swallowed(swallowed),
    57   _swallowed(swallowed),
    58   _begin_inst_chain_rule(_BEGIN_INST_CHAIN_RULE),
    58   _begin_inst_chain_rule(_BEGIN_INST_CHAIN_RULE),
    59   _end_inst_chain_rule(_END_INST_CHAIN_RULE),
    59   _end_inst_chain_rule(_END_INST_CHAIN_RULE),
    60   _must_clone(must_clone), _proj_list(proj_list),
    60   _must_clone(must_clone), _proj_list(proj_list),
  1189 
  1189 
  1190   // Add any Matcher-ignored edges
  1190   // Add any Matcher-ignored edges
  1191   uint cnt = n->req();
  1191   uint cnt = n->req();
  1192   uint start = 1;
  1192   uint start = 1;
  1193   if( mem != (Node*)1 ) start = MemNode::Memory+1;
  1193   if( mem != (Node*)1 ) start = MemNode::Memory+1;
  1194   if( n->Opcode() == Op_AddP ) {
  1194   if( n->is_AddP() ) {
  1195     assert( mem == (Node*)1, "" );
  1195     assert( mem == (Node*)1, "" );
  1196     start = AddPNode::Base+1;
  1196     start = AddPNode::Base+1;
  1197   }
  1197   }
  1198   for( i = start; i < cnt; i++ ) {
  1198   for( i = start; i < cnt; i++ ) {
  1199     if( !n->match_edge(i) ) {
  1199     if( !n->match_edge(i) ) {
  1217   const Type *t = m->bottom_type();
  1217   const Type *t = m->bottom_type();
  1218 
  1218 
  1219   if( t->singleton() ) {
  1219   if( t->singleton() ) {
  1220     // Never force constants into registers.  Allow them to match as
  1220     // Never force constants into registers.  Allow them to match as
  1221     // constants or registers.  Copies of the same value will share
  1221     // constants or registers.  Copies of the same value will share
  1222     // the same register.  See find_shared_constant.
  1222     // the same register.  See find_shared_node.
  1223     return false;
  1223     return false;
  1224   } else {                      // Not a constant
  1224   } else {                      // Not a constant
  1225     // Stop recursion if they have different Controls.
  1225     // Stop recursion if they have different Controls.
  1226     // Slot 0 of constants is not really a Control.
  1226     // Slot 0 of constants is not really a Control.
  1227     if( control && m->in(0) && control != m->in(0) ) {
  1227     if( control && m->in(0) && control != m->in(0) ) {
  1241           break;                // m->in(0)?  If so, we can use it
  1241           break;                // m->in(0)?  If so, we can use it
  1242       }
  1242       }
  1243       if( j == max_scan )       // No post-domination before scan end?
  1243       if( j == max_scan )       // No post-domination before scan end?
  1244         return true;            // Then break the match tree up
  1244         return true;            // Then break the match tree up
  1245     }
  1245     }
  1246 
  1246     if (m->is_DecodeN() && Matcher::clone_shift_expressions) {
  1247     if (m->Opcode() == Op_DecodeN && m->outcnt() == 2) {
       
  1248       // These are commonly used in address expressions and can
  1247       // These are commonly used in address expressions and can
  1249       // efficiently fold into them in some cases but because they are
  1248       // efficiently fold into them on X64 in some cases.
  1250       // consumed by AddP they commonly have two users.
  1249       return false;
  1251       if (m->raw_out(0) == m->raw_out(1) && m->raw_out(0)->Opcode() == Op_AddP) return false;
       
  1252     }
  1250     }
  1253   }
  1251   }
  1254 
  1252 
  1255   // Not forceably cloning.  If shared, put it into a register.
  1253   // Not forceably cloning.  If shared, put it into a register.
  1256   return shared;
  1254   return shared;
  1366 
  1364 
  1367 // Con nodes reduced using the same rule can share their MachNode
  1365 // Con nodes reduced using the same rule can share their MachNode
  1368 // which reduces the number of copies of a constant in the final
  1366 // which reduces the number of copies of a constant in the final
  1369 // program.  The register allocator is free to split uses later to
  1367 // program.  The register allocator is free to split uses later to
  1370 // split live ranges.
  1368 // split live ranges.
  1371 MachNode* Matcher::find_shared_constant(Node* leaf, uint rule) {
  1369 MachNode* Matcher::find_shared_node(Node* leaf, uint rule) {
  1372   if (!leaf->is_Con()) return NULL;
  1370   if (!leaf->is_Con() && !leaf->is_DecodeN()) return NULL;
  1373 
  1371 
  1374   // See if this Con has already been reduced using this rule.
  1372   // See if this Con has already been reduced using this rule.
  1375   if (_shared_constants.Size() <= leaf->_idx) return NULL;
  1373   if (_shared_nodes.Size() <= leaf->_idx) return NULL;
  1376   MachNode* last = (MachNode*)_shared_constants.at(leaf->_idx);
  1374   MachNode* last = (MachNode*)_shared_nodes.at(leaf->_idx);
  1377   if (last != NULL && rule == last->rule()) {
  1375   if (last != NULL && rule == last->rule()) {
       
  1376     // Don't expect control change for DecodeN
       
  1377     if (leaf->is_DecodeN())
       
  1378       return last;
  1378     // Get the new space root.
  1379     // Get the new space root.
  1379     Node* xroot = new_node(C->root());
  1380     Node* xroot = new_node(C->root());
  1380     if (xroot == NULL) {
  1381     if (xroot == NULL) {
  1381       // This shouldn't happen give the order of matching.
  1382       // This shouldn't happen give the order of matching.
  1382       return NULL;
  1383       return NULL;
  1418 //     Build the operand, place it inside the instruction
  1419 //     Build the operand, place it inside the instruction
  1419 //     Call ReduceOper.
  1420 //     Call ReduceOper.
  1420 MachNode *Matcher::ReduceInst( State *s, int rule, Node *&mem ) {
  1421 MachNode *Matcher::ReduceInst( State *s, int rule, Node *&mem ) {
  1421   assert( rule >= NUM_OPERANDS, "called with operand rule" );
  1422   assert( rule >= NUM_OPERANDS, "called with operand rule" );
  1422 
  1423 
  1423   MachNode* shared_con = find_shared_constant(s->_leaf, rule);
  1424   MachNode* shared_node = find_shared_node(s->_leaf, rule);
  1424   if (shared_con != NULL) {
  1425   if (shared_node != NULL) {
  1425     return shared_con;
  1426     return shared_node;
  1426   }
  1427   }
  1427 
  1428 
  1428   // Build the object to represent this state & prepare for recursive calls
  1429   // Build the object to represent this state & prepare for recursive calls
  1429   MachNode *mach = s->MachNodeGenerator( rule, C );
  1430   MachNode *mach = s->MachNodeGenerator( rule, C );
  1430   mach->_opnds[0] = s->MachOperGenerator( _reduceOp[rule], C );
  1431   mach->_opnds[0] = s->MachOperGenerator( _reduceOp[rule], C );
  1445   // If a Memory was used, insert a Memory edge
  1446   // If a Memory was used, insert a Memory edge
  1446   if( mem != (Node*)1 )
  1447   if( mem != (Node*)1 )
  1447     mach->ins_req(MemNode::Memory,mem);
  1448     mach->ins_req(MemNode::Memory,mem);
  1448 
  1449 
  1449   // If the _leaf is an AddP, insert the base edge
  1450   // If the _leaf is an AddP, insert the base edge
  1450   if( leaf->Opcode() == Op_AddP )
  1451   if( leaf->is_AddP() )
  1451     mach->ins_req(AddPNode::Base,leaf->in(AddPNode::Base));
  1452     mach->ins_req(AddPNode::Base,leaf->in(AddPNode::Base));
  1452 
  1453 
  1453   uint num_proj = _proj_list.size();
  1454   uint num_proj = _proj_list.size();
  1454 
  1455 
  1455   // Perform any 1-to-many expansions required
  1456   // Perform any 1-to-many expansions required
  1473   if (_allocation_started) {
  1474   if (_allocation_started) {
  1474     guarantee(ex == mach, "no expand rules during spill generation");
  1475     guarantee(ex == mach, "no expand rules during spill generation");
  1475     guarantee(_proj_list.size() == num_proj, "no allocation during spill generation");
  1476     guarantee(_proj_list.size() == num_proj, "no allocation during spill generation");
  1476   }
  1477   }
  1477 
  1478 
  1478   if (leaf->is_Con()) {
  1479   if (leaf->is_Con() || leaf->is_DecodeN()) {
  1479     // Record the con for sharing
  1480     // Record the con for sharing
  1480     _shared_constants.map(leaf->_idx, ex);
  1481     _shared_nodes.map(leaf->_idx, ex);
  1481   }
  1482   }
  1482 
  1483 
  1483   return ex;
  1484   return ex;
  1484 }
  1485 }
  1485 
  1486 
  1824           if( off->is_Con() ) {
  1825           if( off->is_Con() ) {
  1825             set_visited(m);  // Flag as visited now
  1826             set_visited(m);  // Flag as visited now
  1826             Node *adr = m->in(AddPNode::Address);
  1827             Node *adr = m->in(AddPNode::Address);
  1827 
  1828 
  1828             // Intel, ARM and friends can handle 2 adds in addressing mode
  1829             // Intel, ARM and friends can handle 2 adds in addressing mode
  1829             if( clone_shift_expressions && adr->Opcode() == Op_AddP &&
  1830             if( clone_shift_expressions && adr->is_AddP() &&
  1830                 // AtomicAdd is not an addressing expression.
  1831                 // AtomicAdd is not an addressing expression.
  1831                 // Cheap to find it by looking for screwy base.
  1832                 // Cheap to find it by looking for screwy base.
  1832                 !adr->in(AddPNode::Base)->is_top() ) {
  1833                 !adr->in(AddPNode::Base)->is_top() ) {
  1833               set_visited(adr);  // Flag as visited now
  1834               set_visited(adr);  // Flag as visited now
  1834               Node *shift = adr->in(AddPNode::Offset);
  1835               Node *shift = adr->in(AddPNode::Offset);