45 #include "utilities/pair.hpp" |
45 #include "utilities/pair.hpp" |
46 #include "utilities/resourceHash.hpp" |
46 #include "utilities/resourceHash.hpp" |
47 |
47 |
48 typedef enum { QUALIFIED, DISQUALIFIED } QualifiedState; |
48 typedef enum { QUALIFIED, DISQUALIFIED } QualifiedState; |
49 |
49 |
50 // Because we use an iterative algorithm when iterating over the type |
|
51 // hierarchy, we can't use traditional scoped objects which automatically do |
|
52 // cleanup in the destructor when the scope is exited. PseudoScope (and |
|
53 // PseudoScopeMark) provides a similar functionality, but for when you want a |
|
54 // scoped object in non-stack memory (such as in resource memory, as we do |
|
55 // here). You've just got to remember to call 'destroy()' on the scope when |
|
56 // leaving it (and marks have to be explicitly added). |
|
57 class PseudoScopeMark : public ResourceObj { |
|
58 public: |
|
59 virtual void destroy() = 0; |
|
60 }; |
|
61 |
|
62 class PseudoScope : public ResourceObj { |
|
63 private: |
|
64 GrowableArray<PseudoScopeMark*> _marks; |
|
65 public: |
|
66 |
|
67 static PseudoScope* cast(void* data) { |
|
68 return static_cast<PseudoScope*>(data); |
|
69 } |
|
70 |
|
71 void add_mark(PseudoScopeMark* psm) { |
|
72 _marks.append(psm); |
|
73 } |
|
74 |
|
75 void destroy() { |
|
76 for (int i = 0; i < _marks.length(); ++i) { |
|
77 _marks.at(i)->destroy(); |
|
78 } |
|
79 } |
|
80 }; |
|
81 |
|
82 static void print_slot(outputStream* str, Symbol* name, Symbol* signature) { |
50 static void print_slot(outputStream* str, Symbol* name, Symbol* signature) { |
83 str->print("%s%s", name->as_C_string(), signature->as_C_string()); |
51 str->print("%s%s", name->as_C_string(), signature->as_C_string()); |
84 } |
52 } |
85 |
53 |
86 static void print_method(outputStream* str, Method* mo, bool with_class=true) { |
54 static void print_method(outputStream* str, Method* mo, bool with_class=true) { |
106 * encountered (visit()), The algorithm can cut-off further exploration of a |
74 * encountered (visit()), The algorithm can cut-off further exploration of a |
107 * particular branch by returning 'false' from a visit() call. |
75 * particular branch by returning 'false' from a visit() call. |
108 * |
76 * |
109 * The ALGO class, must provide a visit() method, which each of which will be |
77 * The ALGO class, must provide a visit() method, which each of which will be |
110 * called once for each node in the inheritance tree during the iteration. In |
78 * called once for each node in the inheritance tree during the iteration. In |
111 * addition, it can provide a memory block via new_node_data(InstanceKlass*), |
79 * addition, it can provide a memory block via new_node_data(), which it can |
112 * which it can use for node-specific storage (and access via the |
80 * use for node-specific storage (and access via the current_data() and |
113 * current_data() and data_at_depth(int) methods). |
81 * data_at_depth(int) methods). |
114 * |
82 * |
115 * Bare minimum needed to be an ALGO class: |
83 * Bare minimum needed to be an ALGO class: |
116 * class Algo : public HierarchyVisitor<Algo> { |
84 * class Algo : public HierarchyVisitor<Algo> { |
117 * void* new_node_data(InstanceKlass* cls) { return NULL; } |
85 * void* new_node_data() { return NULL; } |
118 * void free_node_data(void* data) { return; } |
86 * void free_node_data(void* data) { return; } |
119 * bool visit() { return true; } |
87 * bool visit() { return true; } |
120 * }; |
88 * }; |
121 */ |
89 */ |
122 template <class ALGO> |
90 template <class ALGO> |
132 |
100 |
133 Node(InstanceKlass* cls, void* data, bool visit_super) |
101 Node(InstanceKlass* cls, void* data, bool visit_super) |
134 : _class(cls), _super_was_visited(!visit_super), |
102 : _class(cls), _super_was_visited(!visit_super), |
135 _interface_index(0), _algorithm_data(data) {} |
103 _interface_index(0), _algorithm_data(data) {} |
136 |
104 |
|
105 void update(InstanceKlass* cls, void* data, bool visit_super) { |
|
106 _class = cls; |
|
107 _super_was_visited = !visit_super; |
|
108 _interface_index = 0; |
|
109 _algorithm_data = data; |
|
110 } |
137 int number_of_interfaces() { return _class->local_interfaces()->length(); } |
111 int number_of_interfaces() { return _class->local_interfaces()->length(); } |
138 int interface_index() { return _interface_index; } |
112 int interface_index() { return _interface_index; } |
139 void set_super_visited() { _super_was_visited = true; } |
113 void set_super_visited() { _super_was_visited = true; } |
140 void increment_visited_interface() { ++_interface_index; } |
114 void increment_visited_interface() { ++_interface_index; } |
141 void set_all_interfaces_visited() { |
115 void set_all_interfaces_visited() { |
153 return interface_at(interface_index()); |
127 return interface_at(interface_index()); |
154 } |
128 } |
155 }; |
129 }; |
156 |
130 |
157 bool _visited_Object; |
131 bool _visited_Object; |
|
132 |
158 GrowableArray<Node*> _path; |
133 GrowableArray<Node*> _path; |
|
134 GrowableArray<Node*> _free_nodes; |
159 |
135 |
160 Node* current_top() const { return _path.top(); } |
136 Node* current_top() const { return _path.top(); } |
161 bool has_more_nodes() const { return !_path.is_empty(); } |
137 bool has_more_nodes() const { return _path.length() > 0; } |
162 void push(InstanceKlass* cls, void* data) { |
138 void push(InstanceKlass* cls, ALGO* algo) { |
163 assert(cls != NULL, "Requires a valid instance class"); |
139 assert(cls != NULL, "Requires a valid instance class"); |
164 Node* node = new Node(cls, data, has_super(cls)); |
|
165 if (cls == SystemDictionary::Object_klass()) { |
140 if (cls == SystemDictionary::Object_klass()) { |
166 _visited_Object = true; |
141 _visited_Object = true; |
167 } |
142 } |
|
143 void* data = algo->new_node_data(); |
|
144 Node* node; |
|
145 if (_free_nodes.is_empty()) { // Add a new node |
|
146 node = new Node(cls, data, has_super(cls)); |
|
147 } else { // Reuse existing node and data |
|
148 node = _free_nodes.pop(); |
|
149 node->update(cls, data, has_super(cls)); |
|
150 } |
168 _path.push(node); |
151 _path.push(node); |
169 } |
152 } |
170 void pop() { _path.pop(); } |
153 void pop() { |
|
154 Node* node = _path.pop(); |
|
155 // Make the node available for reuse |
|
156 _free_nodes.push(node); |
|
157 } |
171 |
158 |
172 // Since the starting point can be an interface, we must ensure we catch |
159 // Since the starting point can be an interface, we must ensure we catch |
173 // j.l.Object as the super once in those cases. The _visited_Object flag |
160 // j.l.Object as the super once in those cases. The _visited_Object flag |
174 // only ensures we don't then repeatedly enqueue Object for each interface |
161 // only ensures we don't then repeatedly enqueue Object for each interface |
175 // in the class hierarchy. |
162 // in the class hierarchy. |
197 return n == NULL ? NULL : n->_algorithm_data; |
189 return n == NULL ? NULL : n->_algorithm_data; |
198 } |
190 } |
199 void* current_data() { return data_at_depth(0); } |
191 void* current_data() { return data_at_depth(0); } |
200 |
192 |
201 public: |
193 public: |
|
194 HierarchyVisitor() : _visited_Object(false), _path() {} |
202 |
195 |
203 void run(InstanceKlass* root) { |
196 void run(InstanceKlass* root) { |
204 ALGO* algo = static_cast<ALGO*>(this); |
197 ALGO* algo = static_cast<ALGO*>(this); |
205 |
198 |
206 void* algo_data = algo->new_node_data(root); |
199 push(root, algo); |
207 push(root, algo_data); |
|
208 bool top_needs_visit = true; |
200 bool top_needs_visit = true; |
209 |
|
210 do { |
201 do { |
211 Node* top = current_top(); |
202 Node* top = current_top(); |
212 if (top_needs_visit) { |
203 if (top_needs_visit) { |
213 if (algo->visit() == false) { |
204 if (algo->visit() == false) { |
214 // algorithm does not want to continue along this path. Arrange |
205 // algorithm does not want to continue along this path. Arrange |
314 // hierarchy, they are tagged with a qualification state. The qualification |
304 // hierarchy, they are tagged with a qualification state. The qualification |
315 // state for an erased method is set to disqualified if there exists a path |
305 // state for an erased method is set to disqualified if there exists a path |
316 // from the root of hierarchy to the method that contains an interleaving |
306 // from the root of hierarchy to the method that contains an interleaving |
317 // erased method defined in an interface. |
307 // erased method defined in an interface. |
318 |
308 |
|
309 class MethodState { |
|
310 public: |
|
311 Method* _method; |
|
312 QualifiedState _state; |
|
313 |
|
314 MethodState() : _method(NULL), _state(DISQUALIFIED) {} |
|
315 MethodState(Method* method, QualifiedState state) : _method(method), _state(state) {} |
|
316 }; |
|
317 |
319 class MethodFamily : public ResourceObj { |
318 class MethodFamily : public ResourceObj { |
320 private: |
319 private: |
321 |
320 |
322 GrowableArray<Pair<Method*,QualifiedState> > _members; |
321 GrowableArray<MethodState> _members; |
323 ResourceHashtable<Method*, int> _member_index; |
|
324 |
322 |
325 Method* _selected_target; // Filled in later, if a unique target exists |
323 Method* _selected_target; // Filled in later, if a unique target exists |
326 Symbol* _exception_message; // If no unique target is found |
324 Symbol* _exception_message; // If no unique target is found |
327 Symbol* _exception_name; // If no unique target is found |
325 Symbol* _exception_name; // If no unique target is found |
328 |
326 |
329 bool contains_method(Method* method) { |
327 MethodState* find_method(Method* method) { |
330 int* lookup = _member_index.get(method); |
328 for (int i = 0; i < _members.length(); i++) { |
331 return lookup != NULL; |
329 if (_members.at(i)._method == method) { |
|
330 return &_members.at(i); |
|
331 } |
|
332 } |
|
333 return NULL; |
332 } |
334 } |
333 |
335 |
334 void add_method(Method* method, QualifiedState state) { |
336 void add_method(Method* method, QualifiedState state) { |
335 Pair<Method*,QualifiedState> entry(method, state); |
337 MethodState method_state(method, state); |
336 _member_index.put(method, _members.length()); |
338 _members.append(method_state); |
337 _members.append(entry); |
|
338 } |
|
339 |
|
340 void disqualify_method(Method* method) { |
|
341 int* index = _member_index.get(method); |
|
342 guarantee(index != NULL && *index >= 0 && *index < _members.length(), "bad index"); |
|
343 _members.at(*index).second = DISQUALIFIED; |
|
344 } |
339 } |
345 |
340 |
346 Symbol* generate_no_defaults_message(TRAPS) const; |
341 Symbol* generate_no_defaults_message(TRAPS) const; |
347 Symbol* generate_method_message(Symbol *klass_name, Method* method, TRAPS) const; |
342 Symbol* generate_method_message(Symbol *klass_name, Method* method, TRAPS) const; |
348 Symbol* generate_conflicts_message(GrowableArray<Method*>* methods, TRAPS) const; |
343 Symbol* generate_conflicts_message(GrowableArray<MethodState>* methods, TRAPS) const; |
349 |
344 |
350 public: |
345 public: |
351 |
346 |
352 MethodFamily() |
347 MethodFamily() |
353 : _selected_target(NULL), _exception_message(NULL), _exception_name(NULL) {} |
348 : _selected_target(NULL), _exception_message(NULL), _exception_name(NULL) {} |
356 if (_selected_target == NULL && !m->is_overpass()) { |
351 if (_selected_target == NULL && !m->is_overpass()) { |
357 _selected_target = m; |
352 _selected_target = m; |
358 } |
353 } |
359 } |
354 } |
360 |
355 |
361 void record_qualified_method(Method* m) { |
356 void record_method(Method* m, QualifiedState state) { |
362 // If the method already exists in the set as qualified, this operation is |
357 // If not in the set, add it. If it's already in the set, then leave it |
363 // redundant. If it already exists as disqualified, then we leave it as |
358 // as is if state is qualified, or set it to disqualified if state is |
364 // disqualfied. Thus we only add to the set if it's not already in the |
359 // disqualified. |
365 // set. |
360 MethodState* method_state = find_method(m); |
366 if (!contains_method(m)) { |
361 if (method_state == NULL) { |
367 add_method(m, QUALIFIED); |
362 add_method(m, state); |
368 } |
363 } else if (state == DISQUALIFIED) { |
369 } |
364 method_state->_state = DISQUALIFIED; |
370 |
|
371 void record_disqualified_method(Method* m) { |
|
372 // If not in the set, add it as disqualified. If it's already in the set, |
|
373 // then set the state to disqualified no matter what the previous state was. |
|
374 if (!contains_method(m)) { |
|
375 add_method(m, DISQUALIFIED); |
|
376 } else { |
|
377 disqualify_method(m); |
|
378 } |
365 } |
379 } |
366 } |
380 |
367 |
381 bool has_target() const { return _selected_target != NULL; } |
368 bool has_target() const { return _selected_target != NULL; } |
382 bool throws_exception() { return _exception_message != NULL; } |
369 bool throws_exception() { return _exception_message != NULL; } |
384 Method* get_selected_target() { return _selected_target; } |
371 Method* get_selected_target() { return _selected_target; } |
385 Symbol* get_exception_message() { return _exception_message; } |
372 Symbol* get_exception_message() { return _exception_message; } |
386 Symbol* get_exception_name() { return _exception_name; } |
373 Symbol* get_exception_name() { return _exception_name; } |
387 |
374 |
388 // Either sets the target or the exception error message |
375 // Either sets the target or the exception error message |
389 void determine_target(InstanceKlass* root, TRAPS) { |
376 void determine_target_or_set_exception_message(InstanceKlass* root, TRAPS) { |
390 if (has_target() || throws_exception()) { |
377 if (has_target() || throws_exception()) { |
391 return; |
378 return; |
392 } |
379 } |
393 |
380 |
394 // Qualified methods are maximally-specific methods |
381 // Qualified methods are maximally-specific methods |
395 // These include public, instance concrete (=default) and abstract methods |
382 // These include public, instance concrete (=default) and abstract methods |
396 GrowableArray<Method*> qualified_methods; |
|
397 int num_defaults = 0; |
383 int num_defaults = 0; |
398 int default_index = -1; |
384 int default_index = -1; |
399 int qualified_index = -1; |
385 for (int i = 0; i < _members.length(); i++) { |
400 for (int i = 0; i < _members.length(); ++i) { |
386 MethodState &member = _members.at(i); |
401 Pair<Method*,QualifiedState> entry = _members.at(i); |
387 if (member._state == QUALIFIED) { |
402 if (entry.second == QUALIFIED) { |
388 if (member._method->is_default_method()) { |
403 qualified_methods.append(entry.first); |
|
404 qualified_index++; |
|
405 if (entry.first->is_default_method()) { |
|
406 num_defaults++; |
389 num_defaults++; |
407 default_index = qualified_index; |
390 default_index = i; |
408 |
|
409 } |
391 } |
410 } |
392 } |
411 } |
393 } |
412 |
394 |
|
395 if (num_defaults == 1) { |
|
396 assert(_members.at(default_index)._state == QUALIFIED, ""); |
|
397 _selected_target = _members.at(default_index)._method; |
|
398 } else { |
|
399 generate_and_set_exception_message(root, num_defaults, default_index, CHECK); |
|
400 } |
|
401 } |
|
402 |
|
403 void generate_and_set_exception_message(InstanceKlass* root, int num_defaults, int default_index, TRAPS) { |
|
404 assert(num_defaults != 1, "invariant - should've been handled calling method"); |
|
405 |
|
406 GrowableArray<Method*> qualified_methods; |
|
407 for (int i = 0; i < _members.length(); i++) { |
|
408 MethodState& member = _members.at(i); |
|
409 if (member._state == QUALIFIED) { |
|
410 qualified_methods.push(member._method); |
|
411 } |
|
412 } |
413 if (num_defaults == 0) { |
413 if (num_defaults == 0) { |
414 // If the root klass has a static method with matching name and signature |
414 // If the root klass has a static method with matching name and signature |
415 // then do not generate an overpass method because it will hide the |
415 // then do not generate an overpass method because it will hide the |
416 // static method during resolution. |
416 // static method during resolution. |
417 if (qualified_methods.length() == 0) { |
417 if (qualified_methods.length() == 0) { |
419 } else { |
419 } else { |
420 assert(root != NULL, "Null root class"); |
420 assert(root != NULL, "Null root class"); |
421 _exception_message = generate_method_message(root->name(), qualified_methods.at(0), CHECK); |
421 _exception_message = generate_method_message(root->name(), qualified_methods.at(0), CHECK); |
422 } |
422 } |
423 _exception_name = vmSymbols::java_lang_AbstractMethodError(); |
423 _exception_name = vmSymbols::java_lang_AbstractMethodError(); |
424 |
424 } else { |
425 // If only one qualified method is default, select that |
425 _exception_message = generate_conflicts_message(&_members,CHECK); |
426 } else if (num_defaults == 1) { |
|
427 _selected_target = qualified_methods.at(default_index); |
|
428 |
|
429 } else if (num_defaults > 1) { |
|
430 _exception_message = generate_conflicts_message(&qualified_methods,CHECK); |
|
431 _exception_name = vmSymbols::java_lang_IncompatibleClassChangeError(); |
426 _exception_name = vmSymbols::java_lang_IncompatibleClassChangeError(); |
432 LogTarget(Debug, defaultmethods) lt; |
427 LogTarget(Debug, defaultmethods) lt; |
433 if (lt.is_enabled()) { |
428 if (lt.is_enabled()) { |
434 LogStream ls(lt); |
429 LogStream ls(lt); |
435 _exception_message->print_value_on(&ls); |
430 _exception_message->print_value_on(&ls); |
473 ss.write((const char*)signature->bytes(), signature->utf8_length()); |
468 ss.write((const char*)signature->bytes(), signature->utf8_length()); |
474 ss.print(" is abstract"); |
469 ss.print(" is abstract"); |
475 return SymbolTable::new_symbol(ss.base(), (int)ss.size()); |
470 return SymbolTable::new_symbol(ss.base(), (int)ss.size()); |
476 } |
471 } |
477 |
472 |
478 Symbol* MethodFamily::generate_conflicts_message(GrowableArray<Method*>* methods, TRAPS) const { |
473 Symbol* MethodFamily::generate_conflicts_message(GrowableArray<MethodState>* methods, TRAPS) const { |
479 stringStream ss; |
474 stringStream ss; |
480 ss.print("Conflicting default methods:"); |
475 ss.print("Conflicting default methods:"); |
481 for (int i = 0; i < methods->length(); ++i) { |
476 for (int i = 0; i < methods->length(); ++i) { |
482 Method* method = methods->at(i); |
477 Method *method = methods->at(i)._method; |
483 Symbol* klass = method->klass_name(); |
478 Symbol *klass = method->klass_name(); |
484 Symbol* name = method->name(); |
479 Symbol *name = method->name(); |
485 ss.print(" "); |
480 ss.print(" "); |
486 ss.write((const char*)klass->bytes(), klass->utf8_length()); |
481 ss.write((const char*) klass->bytes(), klass->utf8_length()); |
487 ss.print("."); |
482 ss.print("."); |
488 ss.write((const char*)name->bytes(), name->utf8_length()); |
483 ss.write((const char*) name->bytes(), name->utf8_length()); |
489 } |
484 } |
490 return SymbolTable::new_symbol(ss.base(), (int)ss.size()); |
485 return SymbolTable::new_symbol(ss.base(), (int)ss.size()); |
491 } |
486 } |
492 |
487 |
493 |
488 |
494 class StateRestorer; |
489 class StateRestorerScope; |
495 |
490 |
496 // StatefulMethodFamily is a wrapper around a MethodFamily that maintains the |
491 // StatefulMethodFamily is a wrapper around a MethodFamily that maintains the |
497 // qualification state during hierarchy visitation, and applies that state |
492 // qualification state during hierarchy visitation, and applies that state |
498 // when adding members to the MethodFamily |
493 // when adding members to the MethodFamily |
499 class StatefulMethodFamily : public ResourceObj { |
494 class StatefulMethodFamily : public ResourceObj { |
515 |
510 |
516 void set_target_if_empty(Method* m) { _method_family.set_target_if_empty(m); } |
511 void set_target_if_empty(Method* m) { _method_family.set_target_if_empty(m); } |
517 |
512 |
518 MethodFamily* get_method_family() { return &_method_family; } |
513 MethodFamily* get_method_family() { return &_method_family; } |
519 |
514 |
520 StateRestorer* record_method_and_dq_further(Method* mo); |
515 void record_method_and_dq_further(StateRestorerScope* scope, Method* mo); |
521 }; |
516 }; |
522 |
517 |
523 class StateRestorer : public PseudoScopeMark { |
518 // Because we use an iterative algorithm when iterating over the type |
524 private: |
519 // hierarchy, we can't use traditional scoped objects which automatically do |
|
520 // cleanup in the destructor when the scope is exited. StateRestorerScope (and |
|
521 // StateRestorer) provides a similar functionality, but for when you want a |
|
522 // scoped object in non-stack memory (such as in resource memory, as we do |
|
523 // here). You've just got to remember to call 'restore_state()' on the scope when |
|
524 // leaving it (and marks have to be explicitly added). The scope is reusable after |
|
525 // 'restore_state()' has been called. |
|
526 class StateRestorer : public ResourceObj { |
|
527 public: |
525 StatefulMethodFamily* _method; |
528 StatefulMethodFamily* _method; |
526 QualifiedState _state_to_restore; |
529 QualifiedState _state_to_restore; |
|
530 |
|
531 StateRestorer() : _method(NULL), _state_to_restore(DISQUALIFIED) {} |
|
532 |
|
533 void restore_state() { _method->set_qualification_state(_state_to_restore); } |
|
534 }; |
|
535 |
|
536 class StateRestorerScope : public ResourceObj { |
|
537 private: |
|
538 GrowableArray<StateRestorer*> _marks; |
|
539 GrowableArray<StateRestorer*>* _free_list; // Shared between scopes |
527 public: |
540 public: |
528 StateRestorer(StatefulMethodFamily* dm, QualifiedState state) |
541 StateRestorerScope(GrowableArray<StateRestorer*>* free_list) : _marks(), _free_list(free_list) {} |
529 : _method(dm), _state_to_restore(state) {} |
542 |
530 ~StateRestorer() { destroy(); } |
543 static StateRestorerScope* cast(void* data) { |
531 void restore_state() { _method->set_qualification_state(_state_to_restore); } |
544 return static_cast<StateRestorerScope*>(data); |
532 virtual void destroy() { restore_state(); } |
545 } |
|
546 |
|
547 void mark(StatefulMethodFamily* family, QualifiedState qualification_state) { |
|
548 StateRestorer* restorer; |
|
549 if (!_free_list->is_empty()) { |
|
550 restorer = _free_list->pop(); |
|
551 } else { |
|
552 restorer = new StateRestorer(); |
|
553 } |
|
554 restorer->_method = family; |
|
555 restorer->_state_to_restore = qualification_state; |
|
556 _marks.append(restorer); |
|
557 } |
|
558 |
|
559 #ifdef ASSERT |
|
560 bool is_empty() { |
|
561 return _marks.is_empty(); |
|
562 } |
|
563 #endif |
|
564 |
|
565 void restore_state() { |
|
566 while(!_marks.is_empty()) { |
|
567 StateRestorer* restorer = _marks.pop(); |
|
568 restorer->restore_state(); |
|
569 _free_list->push(restorer); |
|
570 } |
|
571 } |
533 }; |
572 }; |
534 |
573 |
535 StateRestorer* StatefulMethodFamily::record_method_and_dq_further(Method* mo) { |
574 void StatefulMethodFamily::record_method_and_dq_further(StateRestorerScope* scope, Method* mo) { |
536 StateRestorer* mark = new StateRestorer(this, _qualification_state); |
575 scope->mark(this, _qualification_state); |
537 if (_qualification_state == QUALIFIED) { |
576 _method_family.record_method(mo, _qualification_state); |
538 _method_family.record_qualified_method(mo); |
577 |
539 } else { |
|
540 _method_family.record_disqualified_method(mo); |
|
541 } |
|
542 // Everything found "above"??? this method in the hierarchy walk is set to |
578 // Everything found "above"??? this method in the hierarchy walk is set to |
543 // disqualified |
579 // disqualified |
544 set_qualification_state(DISQUALIFIED); |
580 set_qualification_state(DISQUALIFIED); |
545 return mark; |
|
546 } |
581 } |
547 |
582 |
548 // Represents a location corresponding to a vtable slot for methods that |
583 // Represents a location corresponding to a vtable slot for methods that |
549 // neither the class nor any of it's ancestors provide an implementaion. |
584 // neither the class nor any of it's ancestors provide an implementaion. |
550 // Default methods may be present to fill this slot. |
585 // Default methods may be present to fill this slot. |
658 // Context data |
693 // Context data |
659 Symbol* _method_name; |
694 Symbol* _method_name; |
660 Symbol* _method_signature; |
695 Symbol* _method_signature; |
661 StatefulMethodFamily* _family; |
696 StatefulMethodFamily* _family; |
662 bool _cur_class_is_interface; |
697 bool _cur_class_is_interface; |
663 |
698 // Free lists, used as an optimization |
|
699 GrowableArray<StateRestorerScope*> _free_scopes; |
|
700 GrowableArray<StateRestorer*> _free_restorers; |
664 public: |
701 public: |
665 FindMethodsByErasedSig(Symbol* name, Symbol* signature, bool is_interf) : |
702 FindMethodsByErasedSig() : _free_scopes(6), _free_restorers(6) {}; |
666 _method_name(name), _method_signature(signature), _family(NULL), |
703 |
667 _cur_class_is_interface(is_interf) {} |
704 void prepare(Symbol* name, Symbol* signature, bool is_interf) { |
|
705 reset(); |
|
706 _method_name = name; |
|
707 _method_signature = signature; |
|
708 _family = NULL; |
|
709 _cur_class_is_interface = is_interf; |
|
710 } |
668 |
711 |
669 void get_discovered_family(MethodFamily** family) { |
712 void get_discovered_family(MethodFamily** family) { |
670 if (_family != NULL) { |
713 if (_family != NULL) { |
671 *family = _family->get_method_family(); |
714 *family = _family->get_method_family(); |
672 } else { |
715 } else { |
673 *family = NULL; |
716 *family = NULL; |
674 } |
717 } |
675 } |
718 } |
676 |
719 |
677 void* new_node_data(InstanceKlass* cls) { return new PseudoScope(); } |
720 void* new_node_data() { |
|
721 if (!_free_scopes.is_empty()) { |
|
722 StateRestorerScope* free_scope = _free_scopes.pop(); |
|
723 assert(free_scope->is_empty(), "StateRestorerScope::_marks array not empty"); |
|
724 return free_scope; |
|
725 } |
|
726 return new StateRestorerScope(&_free_restorers); |
|
727 } |
678 void free_node_data(void* node_data) { |
728 void free_node_data(void* node_data) { |
679 PseudoScope::cast(node_data)->destroy(); |
729 StateRestorerScope* scope = StateRestorerScope::cast(node_data); |
|
730 scope->restore_state(); |
|
731 // Reuse scopes |
|
732 _free_scopes.push(scope); |
680 } |
733 } |
681 |
734 |
682 // Find all methods on this hierarchy that match this |
735 // Find all methods on this hierarchy that match this |
683 // method's erased (name, signature) |
736 // method's erased (name, signature) |
684 bool visit() { |
737 bool visit() { |
685 PseudoScope* scope = PseudoScope::cast(current_data()); |
738 StateRestorerScope* scope = StateRestorerScope::cast(current_data()); |
686 InstanceKlass* iklass = current_class(); |
739 InstanceKlass* iklass = current_class(); |
687 |
740 |
688 Method* m = iklass->find_method(_method_name, _method_signature); |
741 Method* m = iklass->find_method(_method_name, _method_signature); |
689 // Private interface methods are not candidates for default methods. |
742 // Private interface methods are not candidates for default methods. |
690 // invokespecial to private interface methods doesn't use default method logic. |
743 // invokespecial to private interface methods doesn't use default method logic. |
700 if (_family == NULL) { |
753 if (_family == NULL) { |
701 _family = new StatefulMethodFamily(); |
754 _family = new StatefulMethodFamily(); |
702 } |
755 } |
703 |
756 |
704 if (iklass->is_interface()) { |
757 if (iklass->is_interface()) { |
705 StateRestorer* restorer = _family->record_method_and_dq_further(m); |
758 _family->record_method_and_dq_further(scope, m); |
706 scope->add_mark(restorer); |
|
707 } else { |
759 } else { |
708 // This is the rule that methods in classes "win" (bad word) over |
760 // This is the rule that methods in classes "win" (bad word) over |
709 // methods in interfaces. This works because of single inheritance. |
761 // methods in interfaces. This works because of single inheritance. |
710 // Private methods in classes do not "win", they will be found |
762 // Private methods in classes do not "win", they will be found |
711 // first on searching, but overriding for invokevirtual needs |
763 // first on searching, but overriding for invokevirtual needs |
722 |
774 |
723 static void create_defaults_and_exceptions( |
775 static void create_defaults_and_exceptions( |
724 GrowableArray<EmptyVtableSlot*>* slots, InstanceKlass* klass, TRAPS); |
776 GrowableArray<EmptyVtableSlot*>* slots, InstanceKlass* klass, TRAPS); |
725 |
777 |
726 static void generate_erased_defaults( |
778 static void generate_erased_defaults( |
727 InstanceKlass* klass, EmptyVtableSlot* slot, bool is_intf, TRAPS) { |
779 FindMethodsByErasedSig* visitor, |
728 |
780 InstanceKlass* klass, EmptyVtableSlot* slot, bool is_intf, TRAPS) { |
|
781 |
|
782 // the visitor needs to be initialized or re-initialized before use |
|
783 // - this facilitates reusing the same visitor instance on multiple |
|
784 // generation passes as an optimization |
|
785 visitor->prepare(slot->name(), slot->signature(), is_intf); |
729 // sets up a set of methods with the same exact erased signature |
786 // sets up a set of methods with the same exact erased signature |
730 FindMethodsByErasedSig visitor(slot->name(), slot->signature(), is_intf); |
787 visitor->run(klass); |
731 visitor.run(klass); |
|
732 |
788 |
733 MethodFamily* family; |
789 MethodFamily* family; |
734 visitor.get_discovered_family(&family); |
790 visitor->get_discovered_family(&family); |
735 if (family != NULL) { |
791 if (family != NULL) { |
736 family->determine_target(klass, CHECK); |
792 family->determine_target_or_set_exception_message(klass, CHECK); |
737 slot->bind_family(family); |
793 slot->bind_family(family); |
738 } |
794 } |
739 } |
795 } |
740 |
796 |
741 static void merge_in_new_methods(InstanceKlass* klass, |
797 static void merge_in_new_methods(InstanceKlass* klass, |
786 |
842 |
787 GrowableArray<EmptyVtableSlot*> empty_slots; |
843 GrowableArray<EmptyVtableSlot*> empty_slots; |
788 find_empty_vtable_slots(&empty_slots, klass, mirandas, CHECK); |
844 find_empty_vtable_slots(&empty_slots, klass, mirandas, CHECK); |
789 |
845 |
790 if (empty_slots.length() > 0) { |
846 if (empty_slots.length() > 0) { |
|
847 FindMethodsByErasedSig findMethodsByErasedSig; |
791 for (int i = 0; i < empty_slots.length(); ++i) { |
848 for (int i = 0; i < empty_slots.length(); ++i) { |
792 EmptyVtableSlot* slot = empty_slots.at(i); |
849 EmptyVtableSlot* slot = empty_slots.at(i); |
793 LogTarget(Debug, defaultmethods) lt; |
850 LogTarget(Debug, defaultmethods) lt; |
794 if (lt.is_enabled()) { |
851 if (lt.is_enabled()) { |
795 LogStream ls(lt); |
852 LogStream ls(lt); |
796 streamIndentor si(&ls, 2); |
853 streamIndentor si(&ls, 2); |
797 ls.indent().print("Looking for default methods for slot "); |
854 ls.indent().print("Looking for default methods for slot "); |
798 slot->print_on(&ls); |
855 slot->print_on(&ls); |
799 ls.cr(); |
856 ls.cr(); |
800 } |
857 } |
801 generate_erased_defaults(klass, slot, klass->is_interface(), CHECK); |
858 generate_erased_defaults(&findMethodsByErasedSig, klass, slot, klass->is_interface(), CHECK); |
802 } |
859 } |
803 log_debug(defaultmethods)("Creating defaults and overpasses..."); |
860 log_debug(defaultmethods)("Creating defaults and overpasses..."); |
804 create_defaults_and_exceptions(&empty_slots, klass, CHECK); |
861 create_defaults_and_exceptions(&empty_slots, klass, CHECK); |
805 } |
862 } |
806 log_debug(defaultmethods)("Default method processing complete"); |
863 log_debug(defaultmethods)("Default method processing complete"); |
896 |
953 |
897 GrowableArray<Method*> overpasses; |
954 GrowableArray<Method*> overpasses; |
898 GrowableArray<Method*> defaults; |
955 GrowableArray<Method*> defaults; |
899 BytecodeConstantPool bpool(klass->constants()); |
956 BytecodeConstantPool bpool(klass->constants()); |
900 |
957 |
|
958 BytecodeBuffer* buffer = NULL; // Lazily create a reusable buffer |
901 for (int i = 0; i < slots->length(); ++i) { |
959 for (int i = 0; i < slots->length(); ++i) { |
902 EmptyVtableSlot* slot = slots->at(i); |
960 EmptyVtableSlot* slot = slots->at(i); |
903 |
961 |
904 if (slot->is_bound()) { |
962 if (slot->is_bound()) { |
905 MethodFamily* method = slot->get_binding(); |
963 MethodFamily* method = slot->get_binding(); |
906 BytecodeBuffer buffer; |
|
907 |
964 |
908 LogTarget(Debug, defaultmethods) lt; |
965 LogTarget(Debug, defaultmethods) lt; |
909 if (lt.is_enabled()) { |
966 if (lt.is_enabled()) { |
910 ResourceMark rm(THREAD); |
967 ResourceMark rm(THREAD); |
911 LogStream ls(lt); |
968 LogStream ls(lt); |
924 if (selected->method_holder()->is_interface()) { |
981 if (selected->method_holder()->is_interface()) { |
925 assert(!selected->is_private(), "pushing private interface method as default"); |
982 assert(!selected->is_private(), "pushing private interface method as default"); |
926 defaults.push(selected); |
983 defaults.push(selected); |
927 } |
984 } |
928 } else if (method->throws_exception()) { |
985 } else if (method->throws_exception()) { |
929 int max_stack = assemble_method_error(&bpool, &buffer, |
986 if (buffer == NULL) { |
|
987 buffer = new BytecodeBuffer(); |
|
988 } else { |
|
989 buffer->clear(); |
|
990 } |
|
991 int max_stack = assemble_method_error(&bpool, buffer, |
930 method->get_exception_name(), method->get_exception_message(), CHECK); |
992 method->get_exception_name(), method->get_exception_message(), CHECK); |
931 AccessFlags flags = accessFlags_from( |
993 AccessFlags flags = accessFlags_from( |
932 JVM_ACC_PUBLIC | JVM_ACC_SYNTHETIC | JVM_ACC_BRIDGE); |
994 JVM_ACC_PUBLIC | JVM_ACC_SYNTHETIC | JVM_ACC_BRIDGE); |
933 Method* m = new_method(&bpool, &buffer, slot->name(), slot->signature(), |
995 Method* m = new_method(&bpool, buffer, slot->name(), slot->signature(), |
934 flags, max_stack, slot->size_of_parameters(), |
996 flags, max_stack, slot->size_of_parameters(), |
935 ConstMethod::OVERPASS, CHECK); |
997 ConstMethod::OVERPASS, CHECK); |
936 // We push to the methods list: |
998 // We push to the methods list: |
937 // overpass methods which are exception throwing methods |
999 // overpass methods which are exception throwing methods |
938 if (m != NULL) { |
1000 if (m != NULL) { |