50180
|
1 |
/*
|
|
2 |
* Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved.
|
|
3 |
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
|
4 |
*
|
|
5 |
* This code is free software; you can redistribute it and/or modify it
|
|
6 |
* under the terms of the GNU General Public License version 2 only, as
|
|
7 |
* published by the Free Software Foundation.
|
|
8 |
*
|
|
9 |
* This code is distributed in the hope that it will be useful, but WITHOUT
|
|
10 |
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
11 |
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
|
12 |
* version 2 for more details (a copy is included in the LICENSE file that
|
|
13 |
* accompanied this code).
|
|
14 |
*
|
|
15 |
* You should have received a copy of the GNU General Public License version
|
|
16 |
* 2 along with this work; if not, write to the Free Software Foundation,
|
|
17 |
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
|
|
18 |
*
|
|
19 |
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
|
|
20 |
* or visit www.oracle.com if you need additional information or have any
|
|
21 |
* questions.
|
|
22 |
*
|
|
23 |
*/
|
|
24 |
|
|
25 |
#ifndef SHARE_GC_SHARED_C2_BARRIERSETC2_HPP
|
|
26 |
#define SHARE_GC_SHARED_C2_BARRIERSETC2_HPP
|
|
27 |
|
|
28 |
#include "memory/allocation.hpp"
|
|
29 |
#include "oops/accessDecorators.hpp"
|
|
30 |
#include "opto/loopnode.hpp"
|
|
31 |
#include "opto/memnode.hpp"
|
|
32 |
#include "utilities/globalDefinitions.hpp"
|
|
33 |
|
|
34 |
// This means the access is mismatched. This means the value of an access
|
|
35 |
// is not equivalent to the value pointed to by the address.
|
|
36 |
const DecoratorSet C2_MISMATCHED = DECORATOR_LAST << 1;
|
|
37 |
// The access may not be aligned to its natural size.
|
|
38 |
const DecoratorSet C2_UNALIGNED = DECORATOR_LAST << 2;
|
|
39 |
// The atomic cmpxchg is weak, meaning that spurious false negatives are allowed,
|
|
40 |
// but never false positives.
|
|
41 |
const DecoratorSet C2_WEAK_CMPXCHG = DECORATOR_LAST << 3;
|
|
42 |
// This denotes that a load has control dependency.
|
|
43 |
const DecoratorSet C2_CONTROL_DEPENDENT_LOAD = DECORATOR_LAST << 4;
|
|
44 |
// This denotes that a load that must be pinned.
|
|
45 |
const DecoratorSet C2_PINNED_LOAD = DECORATOR_LAST << 5;
|
|
46 |
// This denotes that the access is produced from the sun.misc.Unsafe intrinsics.
|
|
47 |
const DecoratorSet C2_UNSAFE_ACCESS = DECORATOR_LAST << 6;
|
|
48 |
// This denotes that the access mutates state.
|
|
49 |
const DecoratorSet C2_WRITE_ACCESS = DECORATOR_LAST << 7;
|
|
50 |
// This denotes that the access reads state.
|
|
51 |
const DecoratorSet C2_READ_ACCESS = DECORATOR_LAST << 8;
|
|
52 |
|
|
53 |
class GraphKit;
|
|
54 |
class IdealKit;
|
|
55 |
class Node;
|
|
56 |
class Type;
|
|
57 |
class TypePtr;
|
|
58 |
class PhaseMacroExpand;
|
|
59 |
|
|
60 |
// This class wraps a node and a type.
|
|
61 |
class C2AccessValue: public StackObj {
|
|
62 |
protected:
|
|
63 |
Node* _node;
|
|
64 |
const Type* _type;
|
|
65 |
|
|
66 |
public:
|
|
67 |
C2AccessValue(Node* node, const Type* type) :
|
|
68 |
_node(node),
|
|
69 |
_type(type) {}
|
|
70 |
|
|
71 |
Node* node() const { return _node; }
|
|
72 |
const Type* type() const { return _type; }
|
|
73 |
|
|
74 |
void set_node(Node* node) { _node = node; }
|
|
75 |
};
|
|
76 |
|
|
77 |
// This class wraps a node and a pointer type.
|
|
78 |
class C2AccessValuePtr: public C2AccessValue {
|
|
79 |
int _alias_idx;
|
|
80 |
|
|
81 |
public:
|
|
82 |
C2AccessValuePtr(Node* node, const TypePtr* type) :
|
|
83 |
C2AccessValue(node, reinterpret_cast<const Type*>(type)) {}
|
|
84 |
|
|
85 |
const TypePtr* type() const { return reinterpret_cast<const TypePtr*>(_type); }
|
|
86 |
int alias_idx() const { return _alias_idx; }
|
|
87 |
};
|
|
88 |
|
|
89 |
// This class wraps a bunch of context parameters thare are passed around in the
|
|
90 |
// BarrierSetC2 backend hierarchy, for loads and stores, to reduce boiler plate.
|
|
91 |
class C2Access: public StackObj {
|
|
92 |
protected:
|
|
93 |
GraphKit* _kit;
|
|
94 |
DecoratorSet _decorators;
|
|
95 |
BasicType _type;
|
|
96 |
Node* _base;
|
|
97 |
C2AccessValuePtr& _addr;
|
|
98 |
Node* _raw_access;
|
|
99 |
|
|
100 |
void fixup_decorators();
|
|
101 |
void* barrier_set_state() const;
|
|
102 |
|
|
103 |
public:
|
|
104 |
C2Access(GraphKit* kit, DecoratorSet decorators,
|
|
105 |
BasicType type, Node* base, C2AccessValuePtr& addr) :
|
|
106 |
_kit(kit),
|
|
107 |
_decorators(decorators),
|
|
108 |
_type(type),
|
|
109 |
_base(base),
|
|
110 |
_addr(addr),
|
|
111 |
_raw_access(NULL)
|
|
112 |
{
|
|
113 |
fixup_decorators();
|
|
114 |
}
|
|
115 |
|
|
116 |
GraphKit* kit() const { return _kit; }
|
|
117 |
DecoratorSet decorators() const { return _decorators; }
|
|
118 |
Node* base() const { return _base; }
|
|
119 |
C2AccessValuePtr& addr() const { return _addr; }
|
|
120 |
BasicType type() const { return _type; }
|
|
121 |
bool is_oop() const { return _type == T_OBJECT || _type == T_ARRAY; }
|
|
122 |
bool is_raw() const { return (_decorators & AS_RAW) != 0; }
|
|
123 |
Node* raw_access() const { return _raw_access; }
|
|
124 |
|
|
125 |
void set_raw_access(Node* raw_access) { _raw_access = raw_access; }
|
|
126 |
virtual void set_memory() {} // no-op for normal accesses, but not for atomic accesses.
|
|
127 |
|
|
128 |
MemNode::MemOrd mem_node_mo() const;
|
|
129 |
bool needs_cpu_membar() const;
|
|
130 |
|
|
131 |
template <typename T>
|
|
132 |
T barrier_set_state_as() const {
|
|
133 |
return reinterpret_cast<T>(barrier_set_state());
|
|
134 |
}
|
|
135 |
};
|
|
136 |
|
|
137 |
// This class wraps a bunch of context parameters thare are passed around in the
|
|
138 |
// BarrierSetC2 backend hierarchy, for atomic accesses, to reduce boiler plate.
|
|
139 |
class C2AtomicAccess: public C2Access {
|
|
140 |
Node* _memory;
|
|
141 |
uint _alias_idx;
|
|
142 |
bool _needs_pinning;
|
|
143 |
|
|
144 |
public:
|
|
145 |
C2AtomicAccess(GraphKit* kit, DecoratorSet decorators, BasicType type,
|
|
146 |
Node* base, C2AccessValuePtr& addr, uint alias_idx) :
|
|
147 |
C2Access(kit, decorators, type, base, addr),
|
|
148 |
_memory(NULL),
|
|
149 |
_alias_idx(alias_idx),
|
|
150 |
_needs_pinning(true) {}
|
|
151 |
|
|
152 |
// Set the memory node based on the current memory slice.
|
|
153 |
virtual void set_memory();
|
|
154 |
|
|
155 |
Node* memory() const { return _memory; }
|
|
156 |
uint alias_idx() const { return _alias_idx; }
|
|
157 |
bool needs_pinning() const { return _needs_pinning; }
|
|
158 |
|
|
159 |
void set_needs_pinning(bool value) { _needs_pinning = value; }
|
|
160 |
};
|
|
161 |
|
|
162 |
// This is the top-level class for the backend of the Access API in C2.
|
|
163 |
// The top-level class is responsible for performing raw accesses. The
|
|
164 |
// various GC barrier sets inherit from the BarrierSetC2 class to sprinkle
|
|
165 |
// barriers into the accesses.
|
|
166 |
class BarrierSetC2: public CHeapObj<mtGC> {
|
|
167 |
protected:
|
|
168 |
virtual void resolve_address(C2Access& access) const;
|
|
169 |
virtual Node* store_at_resolved(C2Access& access, C2AccessValue& val) const;
|
|
170 |
virtual Node* load_at_resolved(C2Access& access, const Type* val_type) const;
|
|
171 |
|
|
172 |
virtual Node* atomic_cmpxchg_val_at_resolved(C2AtomicAccess& access, Node* expected_val,
|
|
173 |
Node* new_val, const Type* val_type) const;
|
|
174 |
virtual Node* atomic_cmpxchg_bool_at_resolved(C2AtomicAccess& access, Node* expected_val,
|
|
175 |
Node* new_val, const Type* value_type) const;
|
|
176 |
virtual Node* atomic_xchg_at_resolved(C2AtomicAccess& access, Node* new_val, const Type* val_type) const;
|
|
177 |
virtual Node* atomic_add_at_resolved(C2AtomicAccess& access, Node* new_val, const Type* val_type) const;
|
|
178 |
|
|
179 |
public:
|
|
180 |
// This is the entry-point for the backend to perform accesses through the Access API.
|
|
181 |
virtual Node* store_at(C2Access& access, C2AccessValue& val) const;
|
|
182 |
virtual Node* load_at(C2Access& access, const Type* val_type) const;
|
|
183 |
|
|
184 |
virtual Node* atomic_cmpxchg_val_at(C2AtomicAccess& access, Node* expected_val,
|
|
185 |
Node* new_val, const Type* val_type) const;
|
|
186 |
virtual Node* atomic_cmpxchg_bool_at(C2AtomicAccess& access, Node* expected_val,
|
|
187 |
Node* new_val, const Type* val_type) const;
|
|
188 |
virtual Node* atomic_xchg_at(C2AtomicAccess& access, Node* new_val, const Type* value_type) const;
|
|
189 |
virtual Node* atomic_add_at(C2AtomicAccess& access, Node* new_val, const Type* value_type) const;
|
|
190 |
|
|
191 |
virtual void clone(GraphKit* kit, Node* src, Node* dst, Node* size, bool is_array) const;
|
|
192 |
|
|
193 |
// These are general helper methods used by C2
|
|
194 |
virtual bool array_copy_requires_gc_barriers(BasicType type) const { return false; }
|
|
195 |
|
|
196 |
// Support for GC barriers emitted during parsing
|
|
197 |
virtual bool is_gc_barrier_node(Node* node) const { return false; }
|
|
198 |
virtual Node* step_over_gc_barrier(Node* c) const { return c; }
|
|
199 |
|
|
200 |
// Support for macro expanded GC barriers
|
|
201 |
virtual void register_potential_barrier_node(Node* node) const { }
|
|
202 |
virtual void unregister_potential_barrier_node(Node* node) const { }
|
|
203 |
virtual void eliminate_gc_barrier(PhaseMacroExpand* macro, Node* node) const { }
|
|
204 |
virtual void enqueue_useful_gc_barrier(Unique_Node_List &worklist, Node* node) const {}
|
|
205 |
virtual void eliminate_useless_gc_barriers(Unique_Node_List &useful) const {}
|
|
206 |
virtual void add_users_to_worklist(Unique_Node_List* worklist) const {}
|
|
207 |
|
|
208 |
// Allow barrier sets to have shared state that is preserved across a compilation unit.
|
|
209 |
// This could for example comprise macro nodes to be expanded during macro expansion.
|
|
210 |
virtual void* create_barrier_state(Arena* comp_arena) const { return NULL; }
|
|
211 |
// If the BarrierSetC2 state has kept macro nodes in its compilation unit state to be
|
|
212 |
// expanded later, then now is the time to do so.
|
|
213 |
virtual bool expand_macro_nodes(PhaseMacroExpand* macro) const { return false; }
|
|
214 |
virtual void verify_gc_barriers(bool post_parse) const {}
|
|
215 |
};
|
|
216 |
|
|
217 |
#endif // SHARE_GC_SHARED_C2_BARRIERSETC2_HPP
|