hotspot/src/share/vm/opto/matcher.cpp
changeset 23220 fc827339dc37
parent 22911 ff49c48c887d
child 23528 8f1a7f5e8066
equal deleted inserted replaced
23219:69e72eaf9f51 23220:fc827339dc37
  1919   calling_convention(&sig_bt, &regs, 1, is_outgoing);
  1919   calling_convention(&sig_bt, &regs, 1, is_outgoing);
  1920   // Return argument 0 register.  In the LP64 build pointers
  1920   // Return argument 0 register.  In the LP64 build pointers
  1921   // take 2 registers, but the VM wants only the 'main' name.
  1921   // take 2 registers, but the VM wants only the 'main' name.
  1922   return OptoReg::as_OptoReg(regs.first());
  1922   return OptoReg::as_OptoReg(regs.first());
  1923 }
  1923 }
       
  1924 
       
  1925 // This function identifies sub-graphs in which a 'load' node is
       
  1926 // input to two different nodes, and such that it can be matched
       
  1927 // with BMI instructions like blsi, blsr, etc.
       
  1928 // Example : for b = -a[i] & a[i] can be matched to blsi r32, m32.
       
  1929 // The graph is (AndL (SubL Con0 LoadL*) LoadL*), where LoadL*
       
  1930 // refers to the same node.
       
  1931 #ifdef X86
       
  1932 // Match the generic fused operations pattern (op1 (op2 Con{ConType} mop) mop)
       
  1933 // This is a temporary solution until we make DAGs expressible in ADL.
       
  1934 template<typename ConType>
       
  1935 class FusedPatternMatcher {
       
  1936   Node* _op1_node;
       
  1937   Node* _mop_node;
       
  1938   int _con_op;
       
  1939 
       
  1940   static int match_next(Node* n, int next_op, int next_op_idx) {
       
  1941     if (n->in(1) == NULL || n->in(2) == NULL) {
       
  1942       return -1;
       
  1943     }
       
  1944 
       
  1945     if (next_op_idx == -1) { // n is commutative, try rotations
       
  1946       if (n->in(1)->Opcode() == next_op) {
       
  1947         return 1;
       
  1948       } else if (n->in(2)->Opcode() == next_op) {
       
  1949         return 2;
       
  1950       }
       
  1951     } else {
       
  1952       assert(next_op_idx > 0 && next_op_idx <= 2, "Bad argument index");
       
  1953       if (n->in(next_op_idx)->Opcode() == next_op) {
       
  1954         return next_op_idx;
       
  1955       }
       
  1956     }
       
  1957     return -1;
       
  1958   }
       
  1959 public:
       
  1960   FusedPatternMatcher(Node* op1_node, Node *mop_node, int con_op) :
       
  1961     _op1_node(op1_node), _mop_node(mop_node), _con_op(con_op) { }
       
  1962 
       
  1963   bool match(int op1, int op1_op2_idx,  // op1 and the index of the op1->op2 edge, -1 if op1 is commutative
       
  1964              int op2, int op2_con_idx,  // op2 and the index of the op2->con edge, -1 if op2 is commutative
       
  1965              typename ConType::NativeType con_value) {
       
  1966     if (_op1_node->Opcode() != op1) {
       
  1967       return false;
       
  1968     }
       
  1969     if (_mop_node->outcnt() > 2) {
       
  1970       return false;
       
  1971     }
       
  1972     op1_op2_idx = match_next(_op1_node, op2, op1_op2_idx);
       
  1973     if (op1_op2_idx == -1) {
       
  1974       return false;
       
  1975     }
       
  1976     // Memory operation must be the other edge
       
  1977     int op1_mop_idx = (op1_op2_idx & 1) + 1;
       
  1978 
       
  1979     // Check that the mop node is really what we want
       
  1980     if (_op1_node->in(op1_mop_idx) == _mop_node) {
       
  1981       Node *op2_node = _op1_node->in(op1_op2_idx);
       
  1982       if (op2_node->outcnt() > 1) {
       
  1983         return false;
       
  1984       }
       
  1985       assert(op2_node->Opcode() == op2, "Should be");
       
  1986       op2_con_idx = match_next(op2_node, _con_op, op2_con_idx);
       
  1987       if (op2_con_idx == -1) {
       
  1988         return false;
       
  1989       }
       
  1990       // Memory operation must be the other edge
       
  1991       int op2_mop_idx = (op2_con_idx & 1) + 1;
       
  1992       // Check that the memory operation is the same node
       
  1993       if (op2_node->in(op2_mop_idx) == _mop_node) {
       
  1994         // Now check the constant
       
  1995         const Type* con_type = op2_node->in(op2_con_idx)->bottom_type();
       
  1996         if (con_type != Type::TOP && ConType::as_self(con_type)->get_con() == con_value) {
       
  1997           return true;
       
  1998         }
       
  1999       }
       
  2000     }
       
  2001     return false;
       
  2002   }
       
  2003 };
       
  2004 
       
  2005 
       
  2006 bool Matcher::is_bmi_pattern(Node *n, Node *m) {
       
  2007   if (n != NULL && m != NULL) {
       
  2008     if (m->Opcode() == Op_LoadI) {
       
  2009       FusedPatternMatcher<TypeInt> bmii(n, m, Op_ConI);
       
  2010       return bmii.match(Op_AndI, -1, Op_SubI,  1,  0)  ||
       
  2011              bmii.match(Op_AndI, -1, Op_AddI, -1, -1)  ||
       
  2012              bmii.match(Op_XorI, -1, Op_AddI, -1, -1);
       
  2013     } else if (m->Opcode() == Op_LoadL) {
       
  2014       FusedPatternMatcher<TypeLong> bmil(n, m, Op_ConL);
       
  2015       return bmil.match(Op_AndL, -1, Op_SubL,  1,  0) ||
       
  2016              bmil.match(Op_AndL, -1, Op_AddL, -1, -1) ||
       
  2017              bmil.match(Op_XorL, -1, Op_AddL, -1, -1);
       
  2018     }
       
  2019   }
       
  2020   return false;
       
  2021 }
       
  2022 #endif // X86
  1924 
  2023 
  1925 // A method-klass-holder may be passed in the inline_cache_reg
  2024 // A method-klass-holder may be passed in the inline_cache_reg
  1926 // and then expanded into the inline_cache_reg and a method_oop register
  2025 // and then expanded into the inline_cache_reg and a method_oop register
  1927 //   defined in ad_<arch>.cpp
  2026 //   defined in ad_<arch>.cpp
  1928 
  2027 
  2075           // they are shared through a DecodeN they may appear
  2174           // they are shared through a DecodeN they may appear
  2076           // to have a single use so force sharing here.
  2175           // to have a single use so force sharing here.
  2077           set_shared(m->in(AddPNode::Base)->in(1));
  2176           set_shared(m->in(AddPNode::Base)->in(1));
  2078         }
  2177         }
  2079 
  2178 
       
  2179         // if 'n' and 'm' are part of a graph for BMI instruction, clone this node.
       
  2180 #ifdef X86
       
  2181         if (UseBMI1Instructions && is_bmi_pattern(n, m)) {
       
  2182           mstack.push(m, Visit);
       
  2183           continue;
       
  2184         }
       
  2185 #endif
       
  2186 
  2080         // Clone addressing expressions as they are "free" in memory access instructions
  2187         // Clone addressing expressions as they are "free" in memory access instructions
  2081         if( mem_op && i == MemNode::Address && mop == Op_AddP ) {
  2188         if( mem_op && i == MemNode::Address && mop == Op_AddP ) {
  2082           // Some inputs for address expression are not put on stack
  2189           // Some inputs for address expression are not put on stack
  2083           // to avoid marking them as shared and forcing them into register
  2190           // to avoid marking them as shared and forcing them into register
  2084           // if they are used only in address expressions.
  2191           // if they are used only in address expressions.