41 class MemNode : public Node { |
41 class MemNode : public Node { |
42 private: |
42 private: |
43 bool _unaligned_access; // Unaligned access from unsafe |
43 bool _unaligned_access; // Unaligned access from unsafe |
44 bool _mismatched_access; // Mismatched access from unsafe: byte read in integer array for instance |
44 bool _mismatched_access; // Mismatched access from unsafe: byte read in integer array for instance |
45 bool _unsafe_access; // Access of unsafe origin. |
45 bool _unsafe_access; // Access of unsafe origin. |
|
46 uint8_t _barrier; // Bit field with barrier information |
|
47 |
46 protected: |
48 protected: |
47 #ifdef ASSERT |
49 #ifdef ASSERT |
48 const TypePtr* _adr_type; // What kind of memory is being addressed? |
50 const TypePtr* _adr_type; // What kind of memory is being addressed? |
49 #endif |
51 #endif |
50 virtual uint size_of() const; |
52 virtual uint size_of() const; |
60 release, // Store has to release or be preceded by MemBarRelease. |
62 release, // Store has to release or be preceded by MemBarRelease. |
61 seqcst, // LoadStore has to have both acquire and release semantics. |
63 seqcst, // LoadStore has to have both acquire and release semantics. |
62 unset // The memory ordering is not set (used for testing) |
64 unset // The memory ordering is not set (used for testing) |
63 } MemOrd; |
65 } MemOrd; |
64 protected: |
66 protected: |
65 MemNode( Node *c0, Node *c1, Node *c2, const TypePtr* at ) |
67 MemNode( Node *c0, Node *c1, Node *c2, const TypePtr* at ) : |
66 : Node(c0,c1,c2 ), _unaligned_access(false), _mismatched_access(false), _unsafe_access(false) { |
68 Node(c0,c1,c2), |
|
69 _unaligned_access(false), |
|
70 _mismatched_access(false), |
|
71 _unsafe_access(false), |
|
72 _barrier(0) { |
67 init_class_id(Class_Mem); |
73 init_class_id(Class_Mem); |
68 debug_only(_adr_type=at; adr_type();) |
74 debug_only(_adr_type=at; adr_type();) |
69 } |
75 } |
70 MemNode( Node *c0, Node *c1, Node *c2, const TypePtr* at, Node *c3 ) |
76 MemNode( Node *c0, Node *c1, Node *c2, const TypePtr* at, Node *c3 ) : |
71 : Node(c0,c1,c2,c3), _unaligned_access(false), _mismatched_access(false), _unsafe_access(false) { |
77 Node(c0,c1,c2,c3), |
|
78 _unaligned_access(false), |
|
79 _mismatched_access(false), |
|
80 _unsafe_access(false), |
|
81 _barrier(0) { |
72 init_class_id(Class_Mem); |
82 init_class_id(Class_Mem); |
73 debug_only(_adr_type=at; adr_type();) |
83 debug_only(_adr_type=at; adr_type();) |
74 } |
84 } |
75 MemNode( Node *c0, Node *c1, Node *c2, const TypePtr* at, Node *c3, Node *c4) |
85 MemNode( Node *c0, Node *c1, Node *c2, const TypePtr* at, Node *c3, Node *c4) : |
76 : Node(c0,c1,c2,c3,c4), _unaligned_access(false), _mismatched_access(false), _unsafe_access(false) { |
86 Node(c0,c1,c2,c3,c4), |
|
87 _unaligned_access(false), |
|
88 _mismatched_access(false), |
|
89 _unsafe_access(false), |
|
90 _barrier(0) { |
77 init_class_id(Class_Mem); |
91 init_class_id(Class_Mem); |
78 debug_only(_adr_type=at; adr_type();) |
92 debug_only(_adr_type=at; adr_type();) |
79 } |
93 } |
80 |
94 |
81 virtual Node* find_previous_arraycopy(PhaseTransform* phase, Node* ld_alloc, Node*& mem, bool can_see_stored_value) const { return NULL; } |
95 virtual Node* find_previous_arraycopy(PhaseTransform* phase, Node* ld_alloc, Node*& mem, bool can_see_stored_value) const { return NULL; } |
152 class LoadNode : public MemNode { |
169 class LoadNode : public MemNode { |
153 public: |
170 public: |
154 // Some loads (from unsafe) should be pinned: they don't depend only |
171 // Some loads (from unsafe) should be pinned: they don't depend only |
155 // on the dominating test. The field _control_dependency below records |
172 // on the dominating test. The field _control_dependency below records |
156 // whether that node depends only on the dominating test. |
173 // whether that node depends only on the dominating test. |
157 // Methods used to build LoadNodes pass an argument of type enum |
174 // Pinned and UnknownControl are similar, but differ in that Pinned |
158 // ControlDependency instead of a boolean because those methods |
175 // loads are not allowed to float across safepoints, whereas UnknownControl |
159 // typically have multiple boolean parameters with default values: |
176 // loads are allowed to do that. Therefore, Pinned is stricter. |
160 // passing the wrong boolean to one of these parameters by mistake |
|
161 // goes easily unnoticed. Using an enum, the compiler can check that |
|
162 // the type of a value and the type of the parameter match. |
|
163 enum ControlDependency { |
177 enum ControlDependency { |
164 Pinned, |
178 Pinned, |
|
179 UnknownControl, |
165 DependsOnlyOnTest |
180 DependsOnlyOnTest |
166 }; |
181 }; |
|
182 |
167 private: |
183 private: |
168 // LoadNode::hash() doesn't take the _control_dependency field |
184 // LoadNode::hash() doesn't take the _control_dependency field |
169 // into account: If the graph already has a non-pinned LoadNode and |
185 // into account: If the graph already has a non-pinned LoadNode and |
170 // we add a pinned LoadNode with the same inputs, it's safe for GVN |
186 // we add a pinned LoadNode with the same inputs, it's safe for GVN |
171 // to replace the pinned LoadNode with the non-pinned LoadNode, |
187 // to replace the pinned LoadNode with the non-pinned LoadNode, |
180 // loads that can be reordered, and such requiring acquire semantics to |
196 // loads that can be reordered, and such requiring acquire semantics to |
181 // adhere to the Java specification. The required behaviour is stored in |
197 // adhere to the Java specification. The required behaviour is stored in |
182 // this field. |
198 // this field. |
183 const MemOrd _mo; |
199 const MemOrd _mo; |
184 |
200 |
|
201 AllocateNode* is_new_object_mark_load(PhaseGVN *phase) const; |
|
202 |
185 protected: |
203 protected: |
186 virtual bool cmp(const Node &n) const; |
204 virtual bool cmp(const Node &n) const; |
187 virtual uint size_of() const; // Size is bigger |
205 virtual uint size_of() const; // Size is bigger |
188 // Should LoadNode::Ideal() attempt to remove control edges? |
206 // Should LoadNode::Ideal() attempt to remove control edges? |
189 virtual bool can_remove_control() const; |
207 virtual bool can_remove_control() const; |
808 // Note: is_Mem() method returns 'true' for this class. |
829 // Note: is_Mem() method returns 'true' for this class. |
809 class LoadStoreNode : public Node { |
830 class LoadStoreNode : public Node { |
810 private: |
831 private: |
811 const Type* const _type; // What kind of value is loaded? |
832 const Type* const _type; // What kind of value is loaded? |
812 const TypePtr* _adr_type; // What kind of memory is being addressed? |
833 const TypePtr* _adr_type; // What kind of memory is being addressed? |
|
834 uint8_t _barrier; // Bit field with barrier information |
813 virtual uint size_of() const; // Size is bigger |
835 virtual uint size_of() const; // Size is bigger |
814 public: |
836 public: |
815 LoadStoreNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at, const Type* rt, uint required ); |
837 LoadStoreNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at, const Type* rt, uint required ); |
816 virtual bool depends_only_on_test() const { return false; } |
838 virtual bool depends_only_on_test() const { return false; } |
817 virtual uint match_edge(uint idx) const { return idx == MemNode::Address || idx == MemNode::ValueIn; } |
839 virtual uint match_edge(uint idx) const { return idx == MemNode::Address || idx == MemNode::ValueIn; } |
871 public: |
896 public: |
872 CompareAndSwapNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, MemNode::MemOrd mem_ord) : LoadStoreConditionalNode(c, mem, adr, val, ex), _mem_ord(mem_ord) {} |
897 CompareAndSwapNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, MemNode::MemOrd mem_ord) : LoadStoreConditionalNode(c, mem, adr, val, ex), _mem_ord(mem_ord) {} |
873 MemNode::MemOrd order() const { |
898 MemNode::MemOrd order() const { |
874 return _mem_ord; |
899 return _mem_ord; |
875 } |
900 } |
|
901 virtual uint size_of() const { return sizeof(*this); } |
876 }; |
902 }; |
877 |
903 |
878 class CompareAndExchangeNode : public LoadStoreNode { |
904 class CompareAndExchangeNode : public LoadStoreNode { |
879 private: |
905 private: |
880 const MemNode::MemOrd _mem_ord; |
906 const MemNode::MemOrd _mem_ord; |
1629 } |
1656 } |
1630 return false; |
1657 return false; |
1631 } |
1658 } |
1632 }; |
1659 }; |
1633 |
1660 |
|
1661 // cachewb node for guaranteeing writeback of the cache line at a |
|
1662 // given address to (non-volatile) RAM |
|
1663 class CacheWBNode : public Node { |
|
1664 public: |
|
1665 CacheWBNode(Node *ctrl, Node *mem, Node *addr) : Node(ctrl, mem, addr) {} |
|
1666 virtual int Opcode() const; |
|
1667 virtual uint ideal_reg() const { return NotAMachineReg; } |
|
1668 virtual uint match_edge(uint idx) const { return (idx == 2); } |
|
1669 virtual const TypePtr *adr_type() const { return TypePtr::BOTTOM; } |
|
1670 virtual const Type *bottom_type() const { return Type::MEMORY; } |
|
1671 }; |
|
1672 |
|
1673 // cachewb pre sync node for ensuring that writebacks are serialised |
|
1674 // relative to preceding or following stores |
|
1675 class CacheWBPreSyncNode : public Node { |
|
1676 public: |
|
1677 CacheWBPreSyncNode(Node *ctrl, Node *mem) : Node(ctrl, mem) {} |
|
1678 virtual int Opcode() const; |
|
1679 virtual uint ideal_reg() const { return NotAMachineReg; } |
|
1680 virtual uint match_edge(uint idx) const { return false; } |
|
1681 virtual const TypePtr *adr_type() const { return TypePtr::BOTTOM; } |
|
1682 virtual const Type *bottom_type() const { return Type::MEMORY; } |
|
1683 }; |
|
1684 |
|
1685 // cachewb pre sync node for ensuring that writebacks are serialised |
|
1686 // relative to preceding or following stores |
|
1687 class CacheWBPostSyncNode : public Node { |
|
1688 public: |
|
1689 CacheWBPostSyncNode(Node *ctrl, Node *mem) : Node(ctrl, mem) {} |
|
1690 virtual int Opcode() const; |
|
1691 virtual uint ideal_reg() const { return NotAMachineReg; } |
|
1692 virtual uint match_edge(uint idx) const { return false; } |
|
1693 virtual const TypePtr *adr_type() const { return TypePtr::BOTTOM; } |
|
1694 virtual const Type *bottom_type() const { return Type::MEMORY; } |
|
1695 }; |
|
1696 |
1634 //------------------------------Prefetch--------------------------------------- |
1697 //------------------------------Prefetch--------------------------------------- |
1635 |
1698 |
1636 // Allocation prefetch which may fault, TLAB size have to be adjusted. |
1699 // Allocation prefetch which may fault, TLAB size have to be adjusted. |
1637 class PrefetchAllocationNode : public Node { |
1700 class PrefetchAllocationNode : public Node { |
1638 public: |
1701 public: |