774 // Check control edge of raw loads |
775 // Check control edge of raw loads |
775 assert( ctl != NULL || C->get_alias_index(adr_type) != Compile::AliasIdxRaw || |
776 assert( ctl != NULL || C->get_alias_index(adr_type) != Compile::AliasIdxRaw || |
776 // oop will be recorded in oop map if load crosses safepoint |
777 // oop will be recorded in oop map if load crosses safepoint |
777 rt->isa_oopptr() || is_immutable_value(adr), |
778 rt->isa_oopptr() || is_immutable_value(adr), |
778 "raw memory operations should have control edge"); |
779 "raw memory operations should have control edge"); |
|
780 LoadNode* load = NULL; |
779 switch (bt) { |
781 switch (bt) { |
780 case T_BOOLEAN: return new LoadUBNode(ctl, mem, adr, adr_type, rt->is_int(), mo, control_dependency); |
782 case T_BOOLEAN: load = new LoadUBNode(ctl, mem, adr, adr_type, rt->is_int(), mo, control_dependency); break; |
781 case T_BYTE: return new LoadBNode (ctl, mem, adr, adr_type, rt->is_int(), mo, control_dependency); |
783 case T_BYTE: load = new LoadBNode (ctl, mem, adr, adr_type, rt->is_int(), mo, control_dependency); break; |
782 case T_INT: return new LoadINode (ctl, mem, adr, adr_type, rt->is_int(), mo, control_dependency); |
784 case T_INT: load = new LoadINode (ctl, mem, adr, adr_type, rt->is_int(), mo, control_dependency); break; |
783 case T_CHAR: return new LoadUSNode(ctl, mem, adr, adr_type, rt->is_int(), mo, control_dependency); |
785 case T_CHAR: load = new LoadUSNode(ctl, mem, adr, adr_type, rt->is_int(), mo, control_dependency); break; |
784 case T_SHORT: return new LoadSNode (ctl, mem, adr, adr_type, rt->is_int(), mo, control_dependency); |
786 case T_SHORT: load = new LoadSNode (ctl, mem, adr, adr_type, rt->is_int(), mo, control_dependency); break; |
785 case T_LONG: return new LoadLNode (ctl, mem, adr, adr_type, rt->is_long(), mo, control_dependency); |
787 case T_LONG: load = new LoadLNode (ctl, mem, adr, adr_type, rt->is_long(), mo, control_dependency); break; |
786 case T_FLOAT: return new LoadFNode (ctl, mem, adr, adr_type, rt, mo, control_dependency); |
788 case T_FLOAT: load = new LoadFNode (ctl, mem, adr, adr_type, rt, mo, control_dependency); break; |
787 case T_DOUBLE: return new LoadDNode (ctl, mem, adr, adr_type, rt, mo, control_dependency); |
789 case T_DOUBLE: load = new LoadDNode (ctl, mem, adr, adr_type, rt, mo, control_dependency); break; |
788 case T_ADDRESS: return new LoadPNode (ctl, mem, adr, adr_type, rt->is_ptr(), mo, control_dependency); |
790 case T_ADDRESS: load = new LoadPNode (ctl, mem, adr, adr_type, rt->is_ptr(), mo, control_dependency); break; |
789 case T_OBJECT: |
791 case T_OBJECT: |
790 #ifdef _LP64 |
792 #ifdef _LP64 |
791 if (adr->bottom_type()->is_ptr_to_narrowoop()) { |
793 if (adr->bottom_type()->is_ptr_to_narrowoop()) { |
792 Node* load = gvn.transform(new LoadNNode(ctl, mem, adr, adr_type, rt->make_narrowoop(), mo, control_dependency)); |
794 load = new LoadNNode(ctl, mem, adr, adr_type, rt->make_narrowoop(), mo, control_dependency); |
793 return new DecodeNNode(load, load->bottom_type()->make_ptr()); |
|
794 } else |
795 } else |
795 #endif |
796 #endif |
796 { |
797 { |
797 assert(!adr->bottom_type()->is_ptr_to_narrowoop() && !adr->bottom_type()->is_ptr_to_narrowklass(), "should have got back a narrow oop"); |
798 assert(!adr->bottom_type()->is_ptr_to_narrowoop() && !adr->bottom_type()->is_ptr_to_narrowklass(), "should have got back a narrow oop"); |
798 return new LoadPNode(ctl, mem, adr, adr_type, rt->is_oopptr(), mo, control_dependency); |
799 load = new LoadPNode(ctl, mem, adr, adr_type, rt->is_oopptr(), mo, control_dependency); |
799 } |
800 } |
800 } |
801 break; |
801 ShouldNotReachHere(); |
802 } |
802 return (LoadNode*)NULL; |
803 assert(load != NULL, "LoadNode should have been created"); |
803 } |
804 if (unaligned) { |
804 |
805 load->set_unaligned_access(); |
805 LoadLNode* LoadLNode::make_atomic(Node* ctl, Node* mem, Node* adr, const TypePtr* adr_type, const Type* rt, MemOrd mo, ControlDependency control_dependency) { |
806 } |
|
807 if (mismatched) { |
|
808 load->set_mismatched_access(); |
|
809 } |
|
810 if (load->Opcode() == Op_LoadN) { |
|
811 Node* ld = gvn.transform(load); |
|
812 return new DecodeNNode(ld, ld->bottom_type()->make_ptr()); |
|
813 } |
|
814 |
|
815 return load; |
|
816 } |
|
817 |
|
818 LoadLNode* LoadLNode::make_atomic(Node* ctl, Node* mem, Node* adr, const TypePtr* adr_type, const Type* rt, MemOrd mo, |
|
819 ControlDependency control_dependency, bool unaligned, bool mismatched) { |
806 bool require_atomic = true; |
820 bool require_atomic = true; |
807 return new LoadLNode(ctl, mem, adr, adr_type, rt->is_long(), mo, control_dependency, require_atomic); |
821 LoadLNode* load = new LoadLNode(ctl, mem, adr, adr_type, rt->is_long(), mo, control_dependency, require_atomic); |
808 } |
822 if (unaligned) { |
809 |
823 load->set_unaligned_access(); |
810 LoadDNode* LoadDNode::make_atomic(Node* ctl, Node* mem, Node* adr, const TypePtr* adr_type, const Type* rt, MemOrd mo, ControlDependency control_dependency) { |
824 } |
|
825 if (mismatched) { |
|
826 load->set_mismatched_access(); |
|
827 } |
|
828 return load; |
|
829 } |
|
830 |
|
831 LoadDNode* LoadDNode::make_atomic(Node* ctl, Node* mem, Node* adr, const TypePtr* adr_type, const Type* rt, MemOrd mo, |
|
832 ControlDependency control_dependency, bool unaligned, bool mismatched) { |
811 bool require_atomic = true; |
833 bool require_atomic = true; |
812 return new LoadDNode(ctl, mem, adr, adr_type, rt, mo, control_dependency, require_atomic); |
834 LoadDNode* load = new LoadDNode(ctl, mem, adr, adr_type, rt, mo, control_dependency, require_atomic); |
|
835 if (unaligned) { |
|
836 load->set_unaligned_access(); |
|
837 } |
|
838 if (mismatched) { |
|
839 load->set_mismatched_access(); |
|
840 } |
|
841 return load; |
813 } |
842 } |
814 |
843 |
815 |
844 |
816 |
845 |
817 //------------------------------hash------------------------------------------- |
846 //------------------------------hash------------------------------------------- |