23 */ |
23 */ |
24 |
24 |
25 #include "precompiled.hpp" |
25 #include "precompiled.hpp" |
26 #include "ci/bcEscapeAnalyzer.hpp" |
26 #include "ci/bcEscapeAnalyzer.hpp" |
27 #include "compiler/compileLog.hpp" |
27 #include "compiler/compileLog.hpp" |
|
28 #include "gc/shared/barrierSet.hpp" |
28 #include "gc/shared/c2/barrierSetC2.hpp" |
29 #include "gc/shared/c2/barrierSetC2.hpp" |
29 #include "libadt/vectset.hpp" |
30 #include "libadt/vectset.hpp" |
30 #include "memory/allocation.hpp" |
31 #include "memory/allocation.hpp" |
31 #include "memory/resourceArea.hpp" |
32 #include "memory/resourceArea.hpp" |
32 #include "opto/c2compiler.hpp" |
33 #include "opto/c2compiler.hpp" |
37 #include "opto/escape.hpp" |
38 #include "opto/escape.hpp" |
38 #include "opto/phaseX.hpp" |
39 #include "opto/phaseX.hpp" |
39 #include "opto/movenode.hpp" |
40 #include "opto/movenode.hpp" |
40 #include "opto/rootnode.hpp" |
41 #include "opto/rootnode.hpp" |
41 #include "utilities/macros.hpp" |
42 #include "utilities/macros.hpp" |
42 #if INCLUDE_G1GC |
|
43 #include "gc/g1/g1ThreadLocalData.hpp" |
|
44 #endif // INCLUDE_G1GC |
|
45 #if INCLUDE_ZGC |
|
46 #include "gc/z/c2/zBarrierSetC2.hpp" |
|
47 #endif |
|
48 |
43 |
49 ConnectionGraph::ConnectionGraph(Compile * C, PhaseIterGVN *igvn) : |
44 ConnectionGraph::ConnectionGraph(Compile * C, PhaseIterGVN *igvn) : |
50 _nodes(C->comp_arena(), C->unique(), C->unique(), NULL), |
45 _nodes(C->comp_arena(), C->unique(), C->unique(), NULL), |
51 _in_worklist(C->comp_arena()), |
46 _in_worklist(C->comp_arena()), |
52 _next_pidx(0), |
47 _next_pidx(0), |
386 // point to phantom_obj. |
381 // point to phantom_obj. |
387 if (n_ptn == phantom_obj || n_ptn == null_obj) |
382 if (n_ptn == phantom_obj || n_ptn == null_obj) |
388 return; // Skip predefined nodes. |
383 return; // Skip predefined nodes. |
389 |
384 |
390 int opcode = n->Opcode(); |
385 int opcode = n->Opcode(); |
|
386 bool gc_handled = BarrierSet::barrier_set()->barrier_set_c2()->escape_add_to_con_graph(this, igvn, delayed_worklist, n, opcode); |
|
387 if (gc_handled) { |
|
388 return; // Ignore node if already handled by GC. |
|
389 } |
391 switch (opcode) { |
390 switch (opcode) { |
392 case Op_AddP: { |
391 case Op_AddP: { |
393 Node* base = get_addp_base(n); |
392 Node* base = get_addp_base(n); |
394 PointsToNode* ptn_base = ptnode_adr(base->_idx); |
393 PointsToNode* ptn_base = ptnode_adr(base->_idx); |
395 // Field nodes are created for all field types. They are used in |
394 // Field nodes are created for all field types. They are used in |
489 if (n->as_Proj()->_con == TypeFunc::Parms && n->in(0)->is_Call() && |
484 if (n->as_Proj()->_con == TypeFunc::Parms && n->in(0)->is_Call() && |
490 n->in(0)->as_Call()->returns_pointer()) { |
485 n->in(0)->as_Call()->returns_pointer()) { |
491 add_local_var_and_edge(n, PointsToNode::NoEscape, |
486 add_local_var_and_edge(n, PointsToNode::NoEscape, |
492 n->in(0), delayed_worklist); |
487 n->in(0), delayed_worklist); |
493 } |
488 } |
494 #if INCLUDE_ZGC |
|
495 else if (UseZGC) { |
|
496 if (n->as_Proj()->_con == LoadBarrierNode::Oop && n->in(0)->is_LoadBarrier()) { |
|
497 add_local_var_and_edge(n, PointsToNode::NoEscape, n->in(0)->in(LoadBarrierNode::Oop), delayed_worklist); |
|
498 } |
|
499 } |
|
500 #endif |
|
501 break; |
489 break; |
502 } |
490 } |
503 case Op_Rethrow: // Exception object escapes |
491 case Op_Rethrow: // Exception object escapes |
504 case Op_Return: { |
492 case Op_Return: { |
505 if (n->req() > TypeFunc::Parms && |
493 if (n->req() > TypeFunc::Parms && |
523 case Op_StorePConditional: |
511 case Op_StorePConditional: |
524 case Op_WeakCompareAndSwapP: |
512 case Op_WeakCompareAndSwapP: |
525 case Op_WeakCompareAndSwapN: |
513 case Op_WeakCompareAndSwapN: |
526 case Op_CompareAndSwapP: |
514 case Op_CompareAndSwapP: |
527 case Op_CompareAndSwapN: { |
515 case Op_CompareAndSwapN: { |
528 Node* adr = n->in(MemNode::Address); |
516 add_to_congraph_unsafe_access(n, opcode, delayed_worklist); |
529 const Type *adr_type = igvn->type(adr); |
|
530 adr_type = adr_type->make_ptr(); |
|
531 if (adr_type == NULL) { |
|
532 break; // skip dead nodes |
|
533 } |
|
534 if ( adr_type->isa_oopptr() |
|
535 || ( (opcode == Op_StoreP || opcode == Op_StoreN || opcode == Op_StoreNKlass) |
|
536 && adr_type == TypeRawPtr::NOTNULL |
|
537 && adr->in(AddPNode::Address)->is_Proj() |
|
538 && adr->in(AddPNode::Address)->in(0)->is_Allocate())) { |
|
539 delayed_worklist->push(n); // Process it later. |
|
540 #ifdef ASSERT |
|
541 assert(adr->is_AddP(), "expecting an AddP"); |
|
542 if (adr_type == TypeRawPtr::NOTNULL) { |
|
543 // Verify a raw address for a store captured by Initialize node. |
|
544 int offs = (int)igvn->find_intptr_t_con(adr->in(AddPNode::Offset), Type::OffsetBot); |
|
545 assert(offs != Type::OffsetBot, "offset must be a constant"); |
|
546 } |
|
547 #endif |
|
548 } else { |
|
549 // Ignore copy the displaced header to the BoxNode (OSR compilation). |
|
550 if (adr->is_BoxLock()) |
|
551 break; |
|
552 // Stored value escapes in unsafe access. |
|
553 if ((opcode == Op_StoreP) && adr_type->isa_rawptr()) { |
|
554 // Pointer stores in G1 barriers looks like unsafe access. |
|
555 // Ignore such stores to be able scalar replace non-escaping |
|
556 // allocations. |
|
557 #if INCLUDE_G1GC |
|
558 if (UseG1GC && adr->is_AddP()) { |
|
559 Node* base = get_addp_base(adr); |
|
560 if (base->Opcode() == Op_LoadP && |
|
561 base->in(MemNode::Address)->is_AddP()) { |
|
562 adr = base->in(MemNode::Address); |
|
563 Node* tls = get_addp_base(adr); |
|
564 if (tls->Opcode() == Op_ThreadLocal) { |
|
565 int offs = (int)igvn->find_intptr_t_con(adr->in(AddPNode::Offset), Type::OffsetBot); |
|
566 if (offs == in_bytes(G1ThreadLocalData::satb_mark_queue_buffer_offset())) { |
|
567 break; // G1 pre barrier previous oop value store. |
|
568 } |
|
569 if (offs == in_bytes(G1ThreadLocalData::dirty_card_queue_buffer_offset())) { |
|
570 break; // G1 post barrier card address store. |
|
571 } |
|
572 } |
|
573 } |
|
574 } |
|
575 #endif |
|
576 delayed_worklist->push(n); // Process unsafe access later. |
|
577 break; |
|
578 } |
|
579 #ifdef ASSERT |
|
580 n->dump(1); |
|
581 assert(false, "not unsafe or G1 barrier raw StoreP"); |
|
582 #endif |
|
583 } |
|
584 break; |
517 break; |
585 } |
518 } |
586 case Op_AryEq: |
519 case Op_AryEq: |
587 case Op_HasNegatives: |
520 case Op_HasNegatives: |
588 case Op_StrComp: |
521 case Op_StrComp: |
631 } |
564 } |
632 assert(n->is_Store() || n->is_LoadStore() || |
565 assert(n->is_Store() || n->is_LoadStore() || |
633 (n_ptn != NULL) && (n_ptn->ideal_node() != NULL), |
566 (n_ptn != NULL) && (n_ptn->ideal_node() != NULL), |
634 "node should be registered already"); |
567 "node should be registered already"); |
635 int opcode = n->Opcode(); |
568 int opcode = n->Opcode(); |
|
569 bool gc_handled = BarrierSet::barrier_set()->barrier_set_c2()->escape_add_final_edges(this, _igvn, n, opcode); |
|
570 if (gc_handled) { |
|
571 return; // Ignore node if already handled by GC. |
|
572 } |
636 switch (opcode) { |
573 switch (opcode) { |
637 case Op_AddP: { |
574 case Op_AddP: { |
638 Node* base = get_addp_base(n); |
575 Node* base = get_addp_base(n); |
639 PointsToNode* ptn_base = ptnode_adr(base->_idx); |
576 PointsToNode* ptn_base = ptnode_adr(base->_idx); |
640 assert(ptn_base != NULL, "field's base should be registered"); |
577 assert(ptn_base != NULL, "field's base should be registered"); |
707 if (n->as_Proj()->_con == TypeFunc::Parms && n->in(0)->is_Call() && |
640 if (n->as_Proj()->_con == TypeFunc::Parms && n->in(0)->is_Call() && |
708 n->in(0)->as_Call()->returns_pointer()) { |
641 n->in(0)->as_Call()->returns_pointer()) { |
709 add_local_var_and_edge(n, PointsToNode::NoEscape, n->in(0), NULL); |
642 add_local_var_and_edge(n, PointsToNode::NoEscape, n->in(0), NULL); |
710 break; |
643 break; |
711 } |
644 } |
712 #if INCLUDE_ZGC |
|
713 else if (UseZGC) { |
|
714 if (n->as_Proj()->_con == LoadBarrierNode::Oop && n->in(0)->is_LoadBarrier()) { |
|
715 add_local_var_and_edge(n, PointsToNode::NoEscape, n->in(0)->in(LoadBarrierNode::Oop), NULL); |
|
716 break; |
|
717 } |
|
718 } |
|
719 #endif |
|
720 ELSE_FAIL("Op_Proj"); |
645 ELSE_FAIL("Op_Proj"); |
721 } |
646 } |
722 case Op_Rethrow: // Exception object escapes |
647 case Op_Rethrow: // Exception object escapes |
723 case Op_Return: { |
648 case Op_Return: { |
724 if (n->req() > TypeFunc::Parms && |
649 if (n->req() > TypeFunc::Parms && |
740 case Op_CompareAndSwapN: |
665 case Op_CompareAndSwapN: |
741 case Op_WeakCompareAndSwapP: |
666 case Op_WeakCompareAndSwapP: |
742 case Op_WeakCompareAndSwapN: |
667 case Op_WeakCompareAndSwapN: |
743 case Op_GetAndSetP: |
668 case Op_GetAndSetP: |
744 case Op_GetAndSetN: { |
669 case Op_GetAndSetN: { |
745 Node* adr = n->in(MemNode::Address); |
670 if (add_final_edges_unsafe_access(n, opcode)) { |
746 const Type *adr_type = _igvn->type(adr); |
|
747 adr_type = adr_type->make_ptr(); |
|
748 #ifdef ASSERT |
|
749 if (adr_type == NULL) { |
|
750 n->dump(1); |
|
751 assert(adr_type != NULL, "dead node should not be on list"); |
|
752 break; |
|
753 } |
|
754 #endif |
|
755 if (opcode == Op_GetAndSetP || opcode == Op_GetAndSetN || |
|
756 opcode == Op_CompareAndExchangeN || opcode == Op_CompareAndExchangeP) { |
|
757 add_local_var_and_edge(n, PointsToNode::NoEscape, adr, NULL); |
|
758 } |
|
759 if ( adr_type->isa_oopptr() |
|
760 || ( (opcode == Op_StoreP || opcode == Op_StoreN || opcode == Op_StoreNKlass) |
|
761 && adr_type == TypeRawPtr::NOTNULL |
|
762 && adr->in(AddPNode::Address)->is_Proj() |
|
763 && adr->in(AddPNode::Address)->in(0)->is_Allocate())) { |
|
764 // Point Address to Value |
|
765 PointsToNode* adr_ptn = ptnode_adr(adr->_idx); |
|
766 assert(adr_ptn != NULL && |
|
767 adr_ptn->as_Field()->is_oop(), "node should be registered"); |
|
768 Node *val = n->in(MemNode::ValueIn); |
|
769 PointsToNode* ptn = ptnode_adr(val->_idx); |
|
770 assert(ptn != NULL, "node should be registered"); |
|
771 add_edge(adr_ptn, ptn); |
|
772 break; |
|
773 } else if ((opcode == Op_StoreP) && adr_type->isa_rawptr()) { |
|
774 // Stored value escapes in unsafe access. |
|
775 Node *val = n->in(MemNode::ValueIn); |
|
776 PointsToNode* ptn = ptnode_adr(val->_idx); |
|
777 assert(ptn != NULL, "node should be registered"); |
|
778 set_escape_state(ptn, PointsToNode::GlobalEscape); |
|
779 // Add edge to object for unsafe access with offset. |
|
780 PointsToNode* adr_ptn = ptnode_adr(adr->_idx); |
|
781 assert(adr_ptn != NULL, "node should be registered"); |
|
782 if (adr_ptn->is_Field()) { |
|
783 assert(adr_ptn->as_Field()->is_oop(), "should be oop field"); |
|
784 add_edge(adr_ptn, ptn); |
|
785 } |
|
786 break; |
671 break; |
787 } |
672 } |
788 ELSE_FAIL("Op_StoreP"); |
673 ELSE_FAIL("Op_StoreP"); |
789 } |
674 } |
790 case Op_AryEq: |
675 case Op_AryEq: |
823 #endif |
708 #endif |
824 guarantee(false, "unknown node"); |
709 guarantee(false, "unknown node"); |
825 } |
710 } |
826 } |
711 } |
827 return; |
712 return; |
|
713 } |
|
714 |
|
715 void ConnectionGraph::add_to_congraph_unsafe_access(Node* n, uint opcode, Unique_Node_List* delayed_worklist) { |
|
716 Node* adr = n->in(MemNode::Address); |
|
717 const Type* adr_type = _igvn->type(adr); |
|
718 adr_type = adr_type->make_ptr(); |
|
719 if (adr_type == NULL) { |
|
720 return; // skip dead nodes |
|
721 } |
|
722 if (adr_type->isa_oopptr() |
|
723 || ((opcode == Op_StoreP || opcode == Op_StoreN || opcode == Op_StoreNKlass) |
|
724 && adr_type == TypeRawPtr::NOTNULL |
|
725 && adr->in(AddPNode::Address)->is_Proj() |
|
726 && adr->in(AddPNode::Address)->in(0)->is_Allocate())) { |
|
727 delayed_worklist->push(n); // Process it later. |
|
728 #ifdef ASSERT |
|
729 assert (adr->is_AddP(), "expecting an AddP"); |
|
730 if (adr_type == TypeRawPtr::NOTNULL) { |
|
731 // Verify a raw address for a store captured by Initialize node. |
|
732 int offs = (int) _igvn->find_intptr_t_con(adr->in(AddPNode::Offset), Type::OffsetBot); |
|
733 assert(offs != Type::OffsetBot, "offset must be a constant"); |
|
734 } |
|
735 #endif |
|
736 } else { |
|
737 // Ignore copy the displaced header to the BoxNode (OSR compilation). |
|
738 if (adr->is_BoxLock()) { |
|
739 return; |
|
740 } |
|
741 // Stored value escapes in unsafe access. |
|
742 if ((opcode == Op_StoreP) && adr_type->isa_rawptr()) { |
|
743 delayed_worklist->push(n); // Process unsafe access later. |
|
744 return; |
|
745 } |
|
746 #ifdef ASSERT |
|
747 n->dump(1); |
|
748 assert(false, "not unsafe"); |
|
749 #endif |
|
750 } |
|
751 } |
|
752 |
|
753 bool ConnectionGraph::add_final_edges_unsafe_access(Node* n, uint opcode) { |
|
754 Node* adr = n->in(MemNode::Address); |
|
755 const Type *adr_type = _igvn->type(adr); |
|
756 adr_type = adr_type->make_ptr(); |
|
757 #ifdef ASSERT |
|
758 if (adr_type == NULL) { |
|
759 n->dump(1); |
|
760 assert(adr_type != NULL, "dead node should not be on list"); |
|
761 return true; |
|
762 } |
|
763 #endif |
|
764 |
|
765 if (opcode == Op_GetAndSetP || opcode == Op_GetAndSetN || |
|
766 opcode == Op_CompareAndExchangeN || opcode == Op_CompareAndExchangeP) { |
|
767 add_local_var_and_edge(n, PointsToNode::NoEscape, adr, NULL); |
|
768 } |
|
769 |
|
770 if (adr_type->isa_oopptr() |
|
771 || ((opcode == Op_StoreP || opcode == Op_StoreN || opcode == Op_StoreNKlass) |
|
772 && adr_type == TypeRawPtr::NOTNULL |
|
773 && adr->in(AddPNode::Address)->is_Proj() |
|
774 && adr->in(AddPNode::Address)->in(0)->is_Allocate())) { |
|
775 // Point Address to Value |
|
776 PointsToNode* adr_ptn = ptnode_adr(adr->_idx); |
|
777 assert(adr_ptn != NULL && |
|
778 adr_ptn->as_Field()->is_oop(), "node should be registered"); |
|
779 Node* val = n->in(MemNode::ValueIn); |
|
780 PointsToNode* ptn = ptnode_adr(val->_idx); |
|
781 assert(ptn != NULL, "node should be registered"); |
|
782 add_edge(adr_ptn, ptn); |
|
783 return true; |
|
784 } else if ((opcode == Op_StoreP) && adr_type->isa_rawptr()) { |
|
785 // Stored value escapes in unsafe access. |
|
786 Node* val = n->in(MemNode::ValueIn); |
|
787 PointsToNode* ptn = ptnode_adr(val->_idx); |
|
788 assert(ptn != NULL, "node should be registered"); |
|
789 set_escape_state(ptn, PointsToNode::GlobalEscape); |
|
790 // Add edge to object for unsafe access with offset. |
|
791 PointsToNode* adr_ptn = ptnode_adr(adr->_idx); |
|
792 assert(adr_ptn != NULL, "node should be registered"); |
|
793 if (adr_ptn->is_Field()) { |
|
794 assert(adr_ptn->as_Field()->is_oop(), "should be oop field"); |
|
795 add_edge(adr_ptn, ptn); |
|
796 } |
|
797 return true; |
|
798 } |
|
799 return false; |
828 } |
800 } |
829 |
801 |
830 void ConnectionGraph::add_call_node(CallNode* call) { |
802 void ConnectionGraph::add_call_node(CallNode* call) { |
831 assert(call->returns_pointer(), "only for call which returns pointer"); |
803 assert(call->returns_pointer(), "only for call which returns pointer"); |
832 uint call_idx = call->_idx; |
804 uint call_idx = call->_idx; |
2098 bt = field->layout_type(); |
2070 bt = field->layout_type(); |
2099 } else { |
2071 } else { |
2100 // Check for unsafe oop field access |
2072 // Check for unsafe oop field access |
2101 if (n->has_out_with(Op_StoreP, Op_LoadP, Op_StoreN, Op_LoadN) || |
2073 if (n->has_out_with(Op_StoreP, Op_LoadP, Op_StoreN, Op_LoadN) || |
2102 n->has_out_with(Op_GetAndSetP, Op_GetAndSetN, Op_CompareAndExchangeP, Op_CompareAndExchangeN) || |
2074 n->has_out_with(Op_GetAndSetP, Op_GetAndSetN, Op_CompareAndExchangeP, Op_CompareAndExchangeN) || |
2103 n->has_out_with(Op_CompareAndSwapP, Op_CompareAndSwapN, Op_WeakCompareAndSwapP, Op_WeakCompareAndSwapN)) { |
2075 n->has_out_with(Op_CompareAndSwapP, Op_CompareAndSwapN, Op_WeakCompareAndSwapP, Op_WeakCompareAndSwapN) || |
|
2076 BarrierSet::barrier_set()->barrier_set_c2()->escape_has_out_with_unsafe_object(n)) { |
2104 bt = T_OBJECT; |
2077 bt = T_OBJECT; |
2105 (*unsafe) = true; |
2078 (*unsafe) = true; |
2106 } |
2079 } |
2107 } |
2080 } |
2108 } else if (adr_type->isa_aryptr()) { |
2081 } else if (adr_type->isa_aryptr()) { |
2116 } |
2089 } |
2117 } else if (adr_type->isa_rawptr() || adr_type->isa_klassptr()) { |
2090 } else if (adr_type->isa_rawptr() || adr_type->isa_klassptr()) { |
2118 // Allocation initialization, ThreadLocal field access, unsafe access |
2091 // Allocation initialization, ThreadLocal field access, unsafe access |
2119 if (n->has_out_with(Op_StoreP, Op_LoadP, Op_StoreN, Op_LoadN) || |
2092 if (n->has_out_with(Op_StoreP, Op_LoadP, Op_StoreN, Op_LoadN) || |
2120 n->has_out_with(Op_GetAndSetP, Op_GetAndSetN, Op_CompareAndExchangeP, Op_CompareAndExchangeN) || |
2093 n->has_out_with(Op_GetAndSetP, Op_GetAndSetN, Op_CompareAndExchangeP, Op_CompareAndExchangeN) || |
2121 n->has_out_with(Op_CompareAndSwapP, Op_CompareAndSwapN, Op_WeakCompareAndSwapP, Op_WeakCompareAndSwapN)) { |
2094 n->has_out_with(Op_CompareAndSwapP, Op_CompareAndSwapN, Op_WeakCompareAndSwapP, Op_WeakCompareAndSwapN) || |
|
2095 BarrierSet::barrier_set()->barrier_set_c2()->escape_has_out_with_unsafe_object(n)) { |
2122 bt = T_OBJECT; |
2096 bt = T_OBJECT; |
2123 } |
2097 } |
2124 } |
2098 } |
2125 } |
2099 } |
2126 return (bt == T_OBJECT || bt == T_NARROWOOP || bt == T_ARRAY); |
2100 return (bt == T_OBJECT || bt == T_NARROWOOP || bt == T_ARRAY); |
3090 if (!split_AddP(n, base)) continue; // wrong type from dead path |
3065 if (!split_AddP(n, base)) continue; // wrong type from dead path |
3091 } else if (n->is_Phi() || |
3066 } else if (n->is_Phi() || |
3092 n->is_CheckCastPP() || |
3067 n->is_CheckCastPP() || |
3093 n->is_EncodeP() || |
3068 n->is_EncodeP() || |
3094 n->is_DecodeN() || |
3069 n->is_DecodeN() || |
|
3070 BarrierSet::barrier_set()->barrier_set_c2()->escape_is_barrier_node(n) || |
3095 (n->is_ConstraintCast() && n->Opcode() == Op_CastPP)) { |
3071 (n->is_ConstraintCast() && n->Opcode() == Op_CastPP)) { |
3096 if (visited.test_set(n->_idx)) { |
3072 if (visited.test_set(n->_idx)) { |
3097 assert(n->is_Phi(), "loops only through Phi's"); |
3073 assert(n->is_Phi(), "loops only through Phi's"); |
3098 continue; // already processed |
3074 continue; // already processed |
3099 } |
3075 } |
3160 alloc_worklist.append_if_missing(use); |
3136 alloc_worklist.append_if_missing(use); |
3161 } else if (use->is_Phi() || |
3137 } else if (use->is_Phi() || |
3162 use->is_CheckCastPP() || |
3138 use->is_CheckCastPP() || |
3163 use->is_EncodeNarrowPtr() || |
3139 use->is_EncodeNarrowPtr() || |
3164 use->is_DecodeNarrowPtr() || |
3140 use->is_DecodeNarrowPtr() || |
|
3141 BarrierSet::barrier_set()->barrier_set_c2()->escape_is_barrier_node(use) || |
3165 (use->is_ConstraintCast() && use->Opcode() == Op_CastPP)) { |
3142 (use->is_ConstraintCast() && use->Opcode() == Op_CastPP)) { |
3166 alloc_worklist.append_if_missing(use); |
3143 alloc_worklist.append_if_missing(use); |
3167 #ifdef ASSERT |
3144 #ifdef ASSERT |
3168 } else if (use->is_Mem()) { |
3145 } else if (use->is_Mem()) { |
3169 assert(use->in(MemNode::Address) != n, "EA: missing allocation reference path"); |
3146 assert(use->in(MemNode::Address) != n, "EA: missing allocation reference path"); |