177 assert(vf->is_compiled_frame(), "Wrong frame type"); |
188 assert(vf->is_compiled_frame(), "Wrong frame type"); |
178 chunk->push(compiledVFrame::cast(vf)); |
189 chunk->push(compiledVFrame::cast(vf)); |
179 |
190 |
180 bool realloc_failures = false; |
191 bool realloc_failures = false; |
181 |
192 |
182 #ifdef COMPILER2 |
193 #if defined(COMPILER2) || INCLUDE_JVMCI |
183 // Reallocate the non-escaping objects and restore their fields. Then |
194 // Reallocate the non-escaping objects and restore their fields. Then |
184 // relock objects if synchronization on them was eliminated. |
195 // relock objects if synchronization on them was eliminated. |
|
196 #ifndef INCLUDE_JVMCI |
185 if (DoEscapeAnalysis || EliminateNestedLocks) { |
197 if (DoEscapeAnalysis || EliminateNestedLocks) { |
186 if (EliminateAllocations) { |
198 if (EliminateAllocations) { |
|
199 #endif // INCLUDE_JVMCI |
187 assert (chunk->at(0)->scope() != NULL,"expect only compiled java frames"); |
200 assert (chunk->at(0)->scope() != NULL,"expect only compiled java frames"); |
188 GrowableArray<ScopeValue*>* objects = chunk->at(0)->scope()->objects(); |
201 GrowableArray<ScopeValue*>* objects = chunk->at(0)->scope()->objects(); |
189 |
202 |
190 // The flag return_oop() indicates call sites which return oop |
203 // The flag return_oop() indicates call sites which return oop |
191 // in compiled code. Such sites include java method calls, |
204 // in compiled code. Such sites include java method calls, |
224 } |
237 } |
225 if (save_oop_result) { |
238 if (save_oop_result) { |
226 // Restore result. |
239 // Restore result. |
227 deoptee.set_saved_oop_result(&map, return_value()); |
240 deoptee.set_saved_oop_result(&map, return_value()); |
228 } |
241 } |
|
242 #ifndef INCLUDE_JVMCI |
229 } |
243 } |
230 if (EliminateLocks) { |
244 if (EliminateLocks) { |
|
245 #endif // INCLUDE_JVMCI |
231 #ifndef PRODUCT |
246 #ifndef PRODUCT |
232 bool first = true; |
247 bool first = true; |
233 #endif |
248 #endif |
234 for (int i = 0; i < chunk->length(); i++) { |
249 for (int i = 0; i < chunk->length(); i++) { |
235 compiledVFrame* cvf = chunk->at(i); |
250 compiledVFrame* cvf = chunk->at(i); |
236 assert (cvf->scope() != NULL,"expect only compiled java frames"); |
251 assert (cvf->scope() != NULL,"expect only compiled java frames"); |
237 GrowableArray<MonitorInfo*>* monitors = cvf->monitors(); |
252 GrowableArray<MonitorInfo*>* monitors = cvf->monitors(); |
238 if (monitors->is_nonempty()) { |
253 if (monitors->is_nonempty()) { |
239 relock_objects(monitors, thread, realloc_failures); |
254 relock_objects(monitors, thread, realloc_failures); |
240 #ifndef PRODUCT |
255 #ifndef PRODUCT |
241 if (TraceDeoptimization) { |
256 if (PrintDeoptimizationDetails) { |
242 ttyLocker ttyl; |
257 ttyLocker ttyl; |
243 for (int j = 0; j < monitors->length(); j++) { |
258 for (int j = 0; j < monitors->length(); j++) { |
244 MonitorInfo* mi = monitors->at(j); |
259 MonitorInfo* mi = monitors->at(j); |
245 if (mi->eliminated()) { |
260 if (mi->eliminated()) { |
246 if (first) { |
261 if (first) { |
766 thread->set_pending_exception(pending_exception(), exception_file, exception_line); |
788 thread->set_pending_exception(pending_exception(), exception_file, exception_line); |
767 } |
789 } |
768 |
790 |
769 return failures; |
791 return failures; |
770 } |
792 } |
771 |
|
772 // This assumes that the fields are stored in ObjectValue in the same order |
|
773 // they are yielded by do_nonstatic_fields. |
|
774 class FieldReassigner: public FieldClosure { |
|
775 frame* _fr; |
|
776 RegisterMap* _reg_map; |
|
777 ObjectValue* _sv; |
|
778 InstanceKlass* _ik; |
|
779 oop _obj; |
|
780 |
|
781 int _i; |
|
782 public: |
|
783 FieldReassigner(frame* fr, RegisterMap* reg_map, ObjectValue* sv, oop obj) : |
|
784 _fr(fr), _reg_map(reg_map), _sv(sv), _obj(obj), _i(0) {} |
|
785 |
|
786 int i() const { return _i; } |
|
787 |
|
788 |
|
789 void do_field(fieldDescriptor* fd) { |
|
790 intptr_t val; |
|
791 StackValue* value = |
|
792 StackValue::create_stack_value(_fr, _reg_map, _sv->field_at(i())); |
|
793 int offset = fd->offset(); |
|
794 switch (fd->field_type()) { |
|
795 case T_OBJECT: case T_ARRAY: |
|
796 assert(value->type() == T_OBJECT, "Agreement."); |
|
797 _obj->obj_field_put(offset, value->get_obj()()); |
|
798 break; |
|
799 |
|
800 case T_LONG: case T_DOUBLE: { |
|
801 assert(value->type() == T_INT, "Agreement."); |
|
802 StackValue* low = |
|
803 StackValue::create_stack_value(_fr, _reg_map, _sv->field_at(++_i)); |
|
804 #ifdef _LP64 |
|
805 jlong res = (jlong)low->get_int(); |
|
806 #else |
|
807 #ifdef SPARC |
|
808 // For SPARC we have to swap high and low words. |
|
809 jlong res = jlong_from((jint)low->get_int(), (jint)value->get_int()); |
|
810 #else |
|
811 jlong res = jlong_from((jint)value->get_int(), (jint)low->get_int()); |
|
812 #endif //SPARC |
|
813 #endif |
|
814 _obj->long_field_put(offset, res); |
|
815 break; |
|
816 } |
|
817 // Have to cast to INT (32 bits) pointer to avoid little/big-endian problem. |
|
818 case T_INT: case T_FLOAT: // 4 bytes. |
|
819 assert(value->type() == T_INT, "Agreement."); |
|
820 val = value->get_int(); |
|
821 _obj->int_field_put(offset, (jint)*((jint*)&val)); |
|
822 break; |
|
823 |
|
824 case T_SHORT: case T_CHAR: // 2 bytes |
|
825 assert(value->type() == T_INT, "Agreement."); |
|
826 val = value->get_int(); |
|
827 _obj->short_field_put(offset, (jshort)*((jint*)&val)); |
|
828 break; |
|
829 |
|
830 case T_BOOLEAN: case T_BYTE: // 1 byte |
|
831 assert(value->type() == T_INT, "Agreement."); |
|
832 val = value->get_int(); |
|
833 _obj->bool_field_put(offset, (jboolean)*((jint*)&val)); |
|
834 break; |
|
835 |
|
836 default: |
|
837 ShouldNotReachHere(); |
|
838 } |
|
839 _i++; |
|
840 } |
|
841 }; |
|
842 |
793 |
843 // restore elements of an eliminated type array |
794 // restore elements of an eliminated type array |
844 void Deoptimization::reassign_type_array_elements(frame* fr, RegisterMap* reg_map, ObjectValue* sv, typeArrayOop obj, BasicType type) { |
795 void Deoptimization::reassign_type_array_elements(frame* fr, RegisterMap* reg_map, ObjectValue* sv, typeArrayOop obj, BasicType type) { |
845 int index = 0; |
796 int index = 0; |
846 intptr_t val; |
797 intptr_t val; |
865 obj->long_at_put(index, res); |
816 obj->long_at_put(index, res); |
866 break; |
817 break; |
867 } |
818 } |
868 |
819 |
869 // Have to cast to INT (32 bits) pointer to avoid little/big-endian problem. |
820 // Have to cast to INT (32 bits) pointer to avoid little/big-endian problem. |
870 case T_INT: case T_FLOAT: // 4 bytes. |
821 case T_INT: case T_FLOAT: { // 4 bytes. |
871 assert(value->type() == T_INT, "Agreement."); |
822 assert(value->type() == T_INT, "Agreement."); |
872 val = value->get_int(); |
823 bool big_value = false; |
873 obj->int_at_put(index, (jint)*((jint*)&val)); |
824 if (i + 1 < sv->field_size() && type == T_INT) { |
|
825 if (sv->field_at(i)->is_location()) { |
|
826 Location::Type type = ((LocationValue*) sv->field_at(i))->location().type(); |
|
827 if (type == Location::dbl || type == Location::lng) { |
|
828 big_value = true; |
|
829 } |
|
830 } else if (sv->field_at(i)->is_constant_int()) { |
|
831 ScopeValue* next_scope_field = sv->field_at(i + 1); |
|
832 if (next_scope_field->is_constant_long() || next_scope_field->is_constant_double()) { |
|
833 big_value = true; |
|
834 } |
|
835 } |
|
836 } |
|
837 |
|
838 if (big_value) { |
|
839 StackValue* low = StackValue::create_stack_value(fr, reg_map, sv->field_at(++i)); |
|
840 #ifdef _LP64 |
|
841 jlong res = (jlong)low->get_int(); |
|
842 #else |
|
843 #ifdef SPARC |
|
844 // For SPARC we have to swap high and low words. |
|
845 jlong res = jlong_from((jint)low->get_int(), (jint)value->get_int()); |
|
846 #else |
|
847 jlong res = jlong_from((jint)value->get_int(), (jint)low->get_int()); |
|
848 #endif //SPARC |
|
849 #endif |
|
850 obj->int_at_put(index, (jint)*((jint*)&res)); |
|
851 obj->int_at_put(++index, (jint)*(((jint*)&res) + 1)); |
|
852 } else { |
|
853 val = value->get_int(); |
|
854 obj->int_at_put(index, (jint)*((jint*)&val)); |
|
855 } |
874 break; |
856 break; |
|
857 } |
875 |
858 |
876 case T_SHORT: case T_CHAR: // 2 bytes |
859 case T_SHORT: case T_CHAR: // 2 bytes |
877 assert(value->type() == T_INT, "Agreement."); |
860 assert(value->type() == T_INT, "Agreement."); |
878 val = value->get_int(); |
861 val = value->get_int(); |
879 obj->short_at_put(index, (jshort)*((jint*)&val)); |
862 obj->short_at_put(index, (jshort)*((jint*)&val)); |
900 assert(value->type() == T_OBJECT, "object element expected"); |
883 assert(value->type() == T_OBJECT, "object element expected"); |
901 obj->obj_at_put(i, value->get_obj()()); |
884 obj->obj_at_put(i, value->get_obj()()); |
902 } |
885 } |
903 } |
886 } |
904 |
887 |
|
888 class ReassignedField { |
|
889 public: |
|
890 int _offset; |
|
891 BasicType _type; |
|
892 public: |
|
893 ReassignedField() { |
|
894 _offset = 0; |
|
895 _type = T_ILLEGAL; |
|
896 } |
|
897 }; |
|
898 |
|
899 int compare(ReassignedField* left, ReassignedField* right) { |
|
900 return left->_offset - right->_offset; |
|
901 } |
|
902 |
|
903 // Restore fields of an eliminated instance object using the same field order |
|
904 // returned by HotSpotResolvedObjectTypeImpl.getInstanceFields(true) |
|
905 static int reassign_fields_by_klass(InstanceKlass* klass, frame* fr, RegisterMap* reg_map, ObjectValue* sv, int svIndex, oop obj, bool skip_internal) { |
|
906 if (klass->superklass() != NULL) { |
|
907 svIndex = reassign_fields_by_klass(klass->superklass(), fr, reg_map, sv, svIndex, obj, skip_internal); |
|
908 } |
|
909 |
|
910 GrowableArray<ReassignedField>* fields = new GrowableArray<ReassignedField>(); |
|
911 for (AllFieldStream fs(klass); !fs.done(); fs.next()) { |
|
912 if (!fs.access_flags().is_static() && (!skip_internal || !fs.access_flags().is_internal())) { |
|
913 ReassignedField field; |
|
914 field._offset = fs.offset(); |
|
915 field._type = FieldType::basic_type(fs.signature()); |
|
916 fields->append(field); |
|
917 } |
|
918 } |
|
919 fields->sort(compare); |
|
920 for (int i = 0; i < fields->length(); i++) { |
|
921 intptr_t val; |
|
922 ScopeValue* scope_field = sv->field_at(svIndex); |
|
923 StackValue* value = StackValue::create_stack_value(fr, reg_map, scope_field); |
|
924 int offset = fields->at(i)._offset; |
|
925 BasicType type = fields->at(i)._type; |
|
926 switch (type) { |
|
927 case T_OBJECT: case T_ARRAY: |
|
928 assert(value->type() == T_OBJECT, "Agreement."); |
|
929 obj->obj_field_put(offset, value->get_obj()()); |
|
930 break; |
|
931 |
|
932 // Have to cast to INT (32 bits) pointer to avoid little/big-endian problem. |
|
933 case T_INT: case T_FLOAT: { // 4 bytes. |
|
934 assert(value->type() == T_INT, "Agreement."); |
|
935 bool big_value = false; |
|
936 if (i+1 < fields->length() && fields->at(i+1)._type == T_INT) { |
|
937 if (scope_field->is_location()) { |
|
938 Location::Type type = ((LocationValue*) scope_field)->location().type(); |
|
939 if (type == Location::dbl || type == Location::lng) { |
|
940 big_value = true; |
|
941 } |
|
942 } |
|
943 if (scope_field->is_constant_int()) { |
|
944 ScopeValue* next_scope_field = sv->field_at(svIndex + 1); |
|
945 if (next_scope_field->is_constant_long() || next_scope_field->is_constant_double()) { |
|
946 big_value = true; |
|
947 } |
|
948 } |
|
949 } |
|
950 |
|
951 if (big_value) { |
|
952 i++; |
|
953 assert(i < fields->length(), "second T_INT field needed"); |
|
954 assert(fields->at(i)._type == T_INT, "T_INT field needed"); |
|
955 } else { |
|
956 val = value->get_int(); |
|
957 obj->int_field_put(offset, (jint)*((jint*)&val)); |
|
958 break; |
|
959 } |
|
960 } |
|
961 /* no break */ |
|
962 |
|
963 case T_LONG: case T_DOUBLE: { |
|
964 assert(value->type() == T_INT, "Agreement."); |
|
965 StackValue* low = StackValue::create_stack_value(fr, reg_map, sv->field_at(++svIndex)); |
|
966 #ifdef _LP64 |
|
967 jlong res = (jlong)low->get_int(); |
|
968 #else |
|
969 #ifdef SPARC |
|
970 // For SPARC we have to swap high and low words. |
|
971 jlong res = jlong_from((jint)low->get_int(), (jint)value->get_int()); |
|
972 #else |
|
973 jlong res = jlong_from((jint)value->get_int(), (jint)low->get_int()); |
|
974 #endif //SPARC |
|
975 #endif |
|
976 obj->long_field_put(offset, res); |
|
977 break; |
|
978 } |
|
979 |
|
980 case T_SHORT: case T_CHAR: // 2 bytes |
|
981 assert(value->type() == T_INT, "Agreement."); |
|
982 val = value->get_int(); |
|
983 obj->short_field_put(offset, (jshort)*((jint*)&val)); |
|
984 break; |
|
985 |
|
986 case T_BOOLEAN: case T_BYTE: // 1 byte |
|
987 assert(value->type() == T_INT, "Agreement."); |
|
988 val = value->get_int(); |
|
989 obj->bool_field_put(offset, (jboolean)*((jint*)&val)); |
|
990 break; |
|
991 |
|
992 default: |
|
993 ShouldNotReachHere(); |
|
994 } |
|
995 svIndex++; |
|
996 } |
|
997 return svIndex; |
|
998 } |
905 |
999 |
906 // restore fields of all eliminated objects and arrays |
1000 // restore fields of all eliminated objects and arrays |
907 void Deoptimization::reassign_fields(frame* fr, RegisterMap* reg_map, GrowableArray<ScopeValue*>* objects, bool realloc_failures) { |
1001 void Deoptimization::reassign_fields(frame* fr, RegisterMap* reg_map, GrowableArray<ScopeValue*>* objects, bool realloc_failures, bool skip_internal) { |
908 for (int i = 0; i < objects->length(); i++) { |
1002 for (int i = 0; i < objects->length(); i++) { |
909 ObjectValue* sv = (ObjectValue*) objects->at(i); |
1003 ObjectValue* sv = (ObjectValue*) objects->at(i); |
910 KlassHandle k(java_lang_Class::as_Klass(sv->klass()->as_ConstantOopReadValue()->value()())); |
1004 KlassHandle k(java_lang_Class::as_Klass(sv->klass()->as_ConstantOopReadValue()->value()())); |
911 Handle obj = sv->value(); |
1005 Handle obj = sv->value(); |
912 assert(obj.not_null() || realloc_failures, "reallocation was missed"); |
1006 assert(obj.not_null() || realloc_failures, "reallocation was missed"); |
|
1007 if (PrintDeoptimizationDetails) { |
|
1008 tty->print_cr("reassign fields for object of type %s!", k->name()->as_C_string()); |
|
1009 } |
913 if (obj.is_null()) { |
1010 if (obj.is_null()) { |
914 continue; |
1011 continue; |
915 } |
1012 } |
916 |
1013 |
917 if (k->oop_is_instance()) { |
1014 if (k->oop_is_instance()) { |
918 InstanceKlass* ik = InstanceKlass::cast(k()); |
1015 InstanceKlass* ik = InstanceKlass::cast(k()); |
919 FieldReassigner reassign(fr, reg_map, sv, obj()); |
1016 reassign_fields_by_klass(ik, fr, reg_map, sv, 0, obj(), skip_internal); |
920 ik->do_nonstatic_fields(&reassign); |
|
921 } else if (k->oop_is_typeArray()) { |
1017 } else if (k->oop_is_typeArray()) { |
922 TypeArrayKlass* ak = TypeArrayKlass::cast(k()); |
1018 TypeArrayKlass* ak = TypeArrayKlass::cast(k()); |
923 reassign_type_array_elements(fr, reg_map, sv, (typeArrayOop) obj(), ak->element_type()); |
1019 reassign_type_array_elements(fr, reg_map, sv, (typeArrayOop) obj(), ak->element_type()); |
924 } else if (k->oop_is_objArray()) { |
1020 } else if (k->oop_is_objArray()) { |
925 reassign_object_array_elements(fr, reg_map, sv, (objArrayOop) obj()); |
1021 reassign_object_array_elements(fr, reg_map, sv, (objArrayOop) obj()); |
1148 } |
1244 } |
1149 BiasedLocking::revoke_at_safepoint(objects_to_revoke); |
1245 BiasedLocking::revoke_at_safepoint(objects_to_revoke); |
1150 } |
1246 } |
1151 |
1247 |
1152 |
1248 |
1153 void Deoptimization::deoptimize_single_frame(JavaThread* thread, frame fr) { |
1249 void Deoptimization::deoptimize_single_frame(JavaThread* thread, frame fr, Deoptimization::DeoptReason reason) { |
1154 assert(fr.can_be_deoptimized(), "checking frame type"); |
1250 assert(fr.can_be_deoptimized(), "checking frame type"); |
1155 |
1251 |
1156 gather_statistics(Reason_constraint, Action_none, Bytecodes::_illegal); |
1252 gather_statistics(reason, Action_none, Bytecodes::_illegal); |
1157 |
1253 |
1158 // Patch the nmethod so that when execution returns to it we will |
1254 if (LogCompilation && xtty != NULL) { |
|
1255 nmethod* nm = fr.cb()->as_nmethod_or_null(); |
|
1256 assert(nm != NULL, "only compiled methods can deopt"); |
|
1257 |
|
1258 ttyLocker ttyl; |
|
1259 xtty->begin_head("deoptimized thread='" UINTX_FORMAT "'", thread->osthread()->thread_id()); |
|
1260 nm->log_identity(xtty); |
|
1261 xtty->end_head(); |
|
1262 for (ScopeDesc* sd = nm->scope_desc_at(fr.pc()); ; sd = sd->sender()) { |
|
1263 xtty->begin_elem("jvms bci='%d'", sd->bci()); |
|
1264 xtty->method(sd->method()); |
|
1265 xtty->end_elem(); |
|
1266 if (sd->is_top()) break; |
|
1267 } |
|
1268 xtty->tail("deoptimized"); |
|
1269 } |
|
1270 |
|
1271 // Patch the compiled method so that when execution returns to it we will |
1159 // deopt the execution state and return to the interpreter. |
1272 // deopt the execution state and return to the interpreter. |
1160 fr.deoptimize(thread); |
1273 fr.deoptimize(thread); |
1161 } |
1274 } |
1162 |
1275 |
1163 void Deoptimization::deoptimize(JavaThread* thread, frame fr, RegisterMap *map) { |
1276 void Deoptimization::deoptimize(JavaThread* thread, frame fr, RegisterMap *map) { |
|
1277 deoptimize(thread, fr, map, Reason_constraint); |
|
1278 } |
|
1279 |
|
1280 void Deoptimization::deoptimize(JavaThread* thread, frame fr, RegisterMap *map, DeoptReason reason) { |
1164 // Deoptimize only if the frame comes from compile code. |
1281 // Deoptimize only if the frame comes from compile code. |
1165 // Do not deoptimize the frame which is already patched |
1282 // Do not deoptimize the frame which is already patched |
1166 // during the execution of the loops below. |
1283 // during the execution of the loops below. |
1167 if (!fr.is_compiled_frame() || fr.is_deoptimized_frame()) { |
1284 if (!fr.is_compiled_frame() || fr.is_deoptimized_frame()) { |
1168 return; |
1285 return; |
1170 ResourceMark rm; |
1287 ResourceMark rm; |
1171 DeoptimizationMarker dm; |
1288 DeoptimizationMarker dm; |
1172 if (UseBiasedLocking) { |
1289 if (UseBiasedLocking) { |
1173 revoke_biases_of_monitors(thread, fr, map); |
1290 revoke_biases_of_monitors(thread, fr, map); |
1174 } |
1291 } |
1175 deoptimize_single_frame(thread, fr); |
1292 deoptimize_single_frame(thread, fr, reason); |
1176 |
1293 |
1177 } |
1294 } |
1178 |
1295 |
1179 |
1296 |
1180 void Deoptimization::deoptimize_frame_internal(JavaThread* thread, intptr_t* id) { |
1297 void Deoptimization::deoptimize_frame_internal(JavaThread* thread, intptr_t* id, DeoptReason reason) { |
1181 assert(thread == Thread::current() || SafepointSynchronize::is_at_safepoint(), |
1298 assert(thread == Thread::current() || SafepointSynchronize::is_at_safepoint(), |
1182 "can only deoptimize other thread at a safepoint"); |
1299 "can only deoptimize other thread at a safepoint"); |
1183 // Compute frame and register map based on thread and sp. |
1300 // Compute frame and register map based on thread and sp. |
1184 RegisterMap reg_map(thread, UseBiasedLocking); |
1301 RegisterMap reg_map(thread, UseBiasedLocking); |
1185 frame fr = thread->last_frame(); |
1302 frame fr = thread->last_frame(); |
1186 while (fr.id() != id) { |
1303 while (fr.id() != id) { |
1187 fr = fr.sender(®_map); |
1304 fr = fr.sender(®_map); |
1188 } |
1305 } |
1189 deoptimize(thread, fr, ®_map); |
1306 deoptimize(thread, fr, ®_map, reason); |
1190 } |
1307 } |
1191 |
1308 |
|
1309 |
|
1310 void Deoptimization::deoptimize_frame(JavaThread* thread, intptr_t* id, DeoptReason reason) { |
|
1311 if (thread == Thread::current()) { |
|
1312 Deoptimization::deoptimize_frame_internal(thread, id, reason); |
|
1313 } else { |
|
1314 VM_DeoptimizeFrame deopt(thread, id, reason); |
|
1315 VMThread::execute(&deopt); |
|
1316 } |
|
1317 } |
1192 |
1318 |
1193 void Deoptimization::deoptimize_frame(JavaThread* thread, intptr_t* id) { |
1319 void Deoptimization::deoptimize_frame(JavaThread* thread, intptr_t* id) { |
1194 if (thread == Thread::current()) { |
1320 deoptimize_frame(thread, id, Reason_constraint); |
1195 Deoptimization::deoptimize_frame_internal(thread, id); |
1321 } |
1196 } else { |
|
1197 VM_DeoptimizeFrame deopt(thread, id); |
|
1198 VMThread::execute(&deopt); |
|
1199 } |
|
1200 } |
|
1201 |
|
1202 |
1322 |
1203 // JVMTI PopFrame support |
1323 // JVMTI PopFrame support |
1204 JRT_LEAF(void, Deoptimization::popframe_preserve_args(JavaThread* thread, int bytes_to_save, void* start_address)) |
1324 JRT_LEAF(void, Deoptimization::popframe_preserve_args(JavaThread* thread, int bytes_to_save, void* start_address)) |
1205 { |
1325 { |
1206 thread->popframe_preserve_args(in_ByteSize(bytes_to_save), start_address); |
1326 thread->popframe_preserve_args(in_ByteSize(bytes_to_save), start_address); |
1286 // that can confuse an asynchronous stack walker. This counter is |
1406 // that can confuse an asynchronous stack walker. This counter is |
1287 // decremented at the end of unpack_frames(). |
1407 // decremented at the end of unpack_frames(). |
1288 thread->inc_in_deopt_handler(); |
1408 thread->inc_in_deopt_handler(); |
1289 |
1409 |
1290 // We need to update the map if we have biased locking. |
1410 // We need to update the map if we have biased locking. |
|
1411 #if INCLUDE_JVMCI |
|
1412 // JVMCI might need to get an exception from the stack, which in turn requires the register map to be valid |
|
1413 RegisterMap reg_map(thread, true); |
|
1414 #else |
1291 RegisterMap reg_map(thread, UseBiasedLocking); |
1415 RegisterMap reg_map(thread, UseBiasedLocking); |
|
1416 #endif |
1292 frame stub_frame = thread->last_frame(); |
1417 frame stub_frame = thread->last_frame(); |
1293 frame fr = stub_frame.sender(®_map); |
1418 frame fr = stub_frame.sender(®_map); |
1294 // Make sure the calling nmethod is not getting deoptimized and removed |
1419 // Make sure the calling nmethod is not getting deoptimized and removed |
1295 // before we are done with it. |
1420 // before we are done with it. |
1296 nmethodLocker nl(fr.pc()); |
1421 nmethodLocker nl(fr.pc()); |
1297 |
1422 |
1298 // Log a message |
1423 // Log a message |
1299 Events::log(thread, "Uncommon trap: trap_request=" PTR32_FORMAT " fr.pc=" INTPTR_FORMAT, |
1424 Events::log(thread, "Uncommon trap: trap_request=" PTR32_FORMAT " fr.pc=" INTPTR_FORMAT " relative=" INTPTR_FORMAT, |
1300 trap_request, fr.pc()); |
1425 trap_request, fr.pc(), fr.pc() - fr.cb()->code_begin()); |
1301 |
1426 |
1302 { |
1427 { |
1303 ResourceMark rm; |
1428 ResourceMark rm; |
1304 |
1429 |
1305 // Revoke biases of any monitors in the frame to ensure we can migrate them |
1430 // Revoke biases of any monitors in the frame to ensure we can migrate them |
1306 revoke_biases_of_monitors(thread, fr, ®_map); |
1431 revoke_biases_of_monitors(thread, fr, ®_map); |
1307 |
1432 |
1308 DeoptReason reason = trap_request_reason(trap_request); |
1433 DeoptReason reason = trap_request_reason(trap_request); |
1309 DeoptAction action = trap_request_action(trap_request); |
1434 DeoptAction action = trap_request_action(trap_request); |
|
1435 #if INCLUDE_JVMCI |
|
1436 int debug_id = trap_request_debug_id(trap_request); |
|
1437 #endif |
1310 jint unloaded_class_index = trap_request_index(trap_request); // CP idx or -1 |
1438 jint unloaded_class_index = trap_request_index(trap_request); // CP idx or -1 |
1311 |
1439 |
1312 vframe* vf = vframe::new_vframe(&fr, ®_map, thread); |
1440 vframe* vf = vframe::new_vframe(&fr, ®_map, thread); |
1313 compiledVFrame* cvf = compiledVFrame::cast(vf); |
1441 compiledVFrame* cvf = compiledVFrame::cast(vf); |
1314 |
1442 |
1315 nmethod* nm = cvf->code(); |
1443 nmethod* nm = cvf->code(); |
1316 |
1444 |
1317 ScopeDesc* trap_scope = cvf->scope(); |
1445 ScopeDesc* trap_scope = cvf->scope(); |
|
1446 |
|
1447 if (TraceDeoptimization) { |
|
1448 ttyLocker ttyl; |
|
1449 tty->print_cr(" bci=%d pc=" INTPTR_FORMAT ", relative_pc=%d, method=%s" JVMCI_ONLY(", debug_id=%d"), trap_scope->bci(), fr.pc(), fr.pc() - nm->code_begin(), trap_scope->method()->name_and_sig_as_C_string() |
|
1450 #if INCLUDE_JVMCI |
|
1451 , debug_id |
|
1452 #endif |
|
1453 ); |
|
1454 } |
|
1455 |
1318 methodHandle trap_method = trap_scope->method(); |
1456 methodHandle trap_method = trap_scope->method(); |
1319 int trap_bci = trap_scope->bci(); |
1457 int trap_bci = trap_scope->bci(); |
|
1458 #if INCLUDE_JVMCI |
|
1459 oop speculation = thread->pending_failed_speculation(); |
|
1460 if (nm->is_compiled_by_jvmci()) { |
|
1461 if (speculation != NULL) { |
|
1462 oop speculation_log = nm->speculation_log(); |
|
1463 if (speculation_log != NULL) { |
|
1464 if (TraceDeoptimization || TraceUncollectedSpeculations) { |
|
1465 if (SpeculationLog::lastFailed(speculation_log) != NULL) { |
|
1466 tty->print_cr("A speculation that was not collected by the compiler is being overwritten"); |
|
1467 } |
|
1468 } |
|
1469 if (TraceDeoptimization) { |
|
1470 tty->print_cr("Saving speculation to speculation log"); |
|
1471 } |
|
1472 SpeculationLog::set_lastFailed(speculation_log, speculation); |
|
1473 } else { |
|
1474 if (TraceDeoptimization) { |
|
1475 tty->print_cr("Speculation present but no speculation log"); |
|
1476 } |
|
1477 } |
|
1478 thread->set_pending_failed_speculation(NULL); |
|
1479 } else { |
|
1480 if (TraceDeoptimization) { |
|
1481 tty->print_cr("No speculation"); |
|
1482 } |
|
1483 } |
|
1484 } else { |
|
1485 assert(speculation == NULL, "There should not be a speculation for method compiled by non-JVMCI compilers"); |
|
1486 } |
|
1487 |
|
1488 if (trap_bci == SynchronizationEntryBCI) { |
|
1489 trap_bci = 0; |
|
1490 thread->set_pending_monitorenter(true); |
|
1491 } |
|
1492 |
|
1493 if (reason == Deoptimization::Reason_transfer_to_interpreter) { |
|
1494 thread->set_pending_transfer_to_interpreter(true); |
|
1495 } |
|
1496 #endif |
|
1497 |
1320 Bytecodes::Code trap_bc = trap_method->java_code_at(trap_bci); |
1498 Bytecodes::Code trap_bc = trap_method->java_code_at(trap_bci); |
|
1499 |
|
1500 if (trap_scope->rethrow_exception()) { |
|
1501 if (PrintDeoptimizationDetails) { |
|
1502 tty->print_cr("Exception to be rethrown in the interpreter for method %s::%s at bci %d", trap_method->method_holder()->name()->as_C_string(), trap_method->name()->as_C_string(), trap_bci); |
|
1503 } |
|
1504 GrowableArray<ScopeValue*>* expressions = trap_scope->expressions(); |
|
1505 guarantee(expressions != NULL, "must have exception to throw"); |
|
1506 ScopeValue* topOfStack = expressions->top(); |
|
1507 Handle topOfStackObj = StackValue::create_stack_value(&fr, ®_map, topOfStack)->get_obj(); |
|
1508 THREAD->set_pending_exception(topOfStackObj(), NULL, 0); |
|
1509 } |
1321 |
1510 |
1322 // Record this event in the histogram. |
1511 // Record this event in the histogram. |
1323 gather_statistics(reason, action, trap_bc); |
1512 gather_statistics(reason, action, trap_bc); |
1324 |
1513 |
1325 // Ensure that we can record deopt. history: |
1514 // Ensure that we can record deopt. history: |
1326 // Need MDO to record RTM code generation state. |
1515 // Need MDO to record RTM code generation state. |
1327 bool create_if_missing = ProfileTraps || UseCodeAging RTM_OPT_ONLY( || UseRTMLocking ); |
1516 bool create_if_missing = ProfileTraps || UseCodeAging RTM_OPT_ONLY( || UseRTMLocking ); |
1328 |
1517 |
|
1518 methodHandle profiled_method; |
|
1519 #if INCLUDE_JVMCI |
|
1520 if (nm->is_compiled_by_jvmci()) { |
|
1521 profiled_method = nm->method(); |
|
1522 } else { |
|
1523 profiled_method = trap_method; |
|
1524 } |
|
1525 #else |
|
1526 profiled_method = trap_method; |
|
1527 #endif |
|
1528 |
1329 MethodData* trap_mdo = |
1529 MethodData* trap_mdo = |
1330 get_method_data(thread, trap_method, create_if_missing); |
1530 get_method_data(thread, profiled_method, create_if_missing); |
1331 |
1531 |
1332 // Log a message |
1532 // Log a message |
1333 Events::log_deopt_message(thread, "Uncommon trap: reason=%s action=%s pc=" INTPTR_FORMAT " method=%s @ %d", |
1533 Events::log_deopt_message(thread, "Uncommon trap: reason=%s action=%s pc=" INTPTR_FORMAT " method=%s @ %d", |
1334 trap_reason_name(reason), trap_action_name(action), fr.pc(), |
1534 trap_reason_name(reason), trap_action_name(action), fr.pc(), |
1335 trap_method->name_and_sig_as_C_string(), trap_bci); |
1535 trap_method->name_and_sig_as_C_string(), trap_bci); |
1383 xtty->end_head(); |
1583 xtty->end_head(); |
1384 } |
1584 } |
1385 if (TraceDeoptimization) { // make noise on the tty |
1585 if (TraceDeoptimization) { // make noise on the tty |
1386 tty->print("Uncommon trap occurred in"); |
1586 tty->print("Uncommon trap occurred in"); |
1387 nm->method()->print_short_name(tty); |
1587 nm->method()->print_short_name(tty); |
1388 tty->print(" (@" INTPTR_FORMAT ") thread=" UINTX_FORMAT " reason=%s action=%s unloaded_class_index=%d", |
1588 tty->print(" compiler=%s compile_id=%d", nm->compiler() == NULL ? "" : nm->compiler()->name(), nm->compile_id()); |
|
1589 #if INCLUDE_JVMCI |
|
1590 oop installedCode = nm->jvmci_installed_code(); |
|
1591 if (installedCode != NULL) { |
|
1592 oop installedCodeName = NULL; |
|
1593 if (installedCode->is_a(InstalledCode::klass())) { |
|
1594 installedCodeName = InstalledCode::name(installedCode); |
|
1595 } |
|
1596 if (installedCodeName != NULL) { |
|
1597 tty->print(" (JVMCI: installedCodeName=%s) ", java_lang_String::as_utf8_string(installedCodeName)); |
|
1598 } else { |
|
1599 tty->print(" (JVMCI: installed code has no name) "); |
|
1600 } |
|
1601 } else if (nm->is_compiled_by_jvmci()) { |
|
1602 tty->print(" (JVMCI: no installed code) "); |
|
1603 } |
|
1604 #endif |
|
1605 tty->print(" (@" INTPTR_FORMAT ") thread=" UINTX_FORMAT " reason=%s action=%s unloaded_class_index=%d" JVMCI_ONLY(" debug_id=%d"), |
1389 fr.pc(), |
1606 fr.pc(), |
1390 os::current_thread_id(), |
1607 os::current_thread_id(), |
1391 trap_reason_name(reason), |
1608 trap_reason_name(reason), |
1392 trap_action_name(action), |
1609 trap_action_name(action), |
1393 unloaded_class_index); |
1610 unloaded_class_index |
|
1611 #if INCLUDE_JVMCI |
|
1612 , debug_id |
|
1613 #endif |
|
1614 ); |
1394 if (class_name != NULL) { |
1615 if (class_name != NULL) { |
1395 tty->print(unresolved ? " unresolved class: " : " symbol: "); |
1616 tty->print(unresolved ? " unresolved class: " : " symbol: "); |
1396 class_name->print_symbol_on(tty); |
1617 class_name->print_symbol_on(tty); |
1397 } |
1618 } |
1398 tty->cr(); |
1619 tty->cr(); |
1522 // to use the MDO to detect hot deoptimization points and control |
1743 // to use the MDO to detect hot deoptimization points and control |
1523 // aggressive optimization. |
1744 // aggressive optimization. |
1524 bool inc_recompile_count = false; |
1745 bool inc_recompile_count = false; |
1525 ProfileData* pdata = NULL; |
1746 ProfileData* pdata = NULL; |
1526 if (ProfileTraps && update_trap_state && trap_mdo != NULL) { |
1747 if (ProfileTraps && update_trap_state && trap_mdo != NULL) { |
1527 assert(trap_mdo == get_method_data(thread, trap_method, false), "sanity"); |
1748 assert(trap_mdo == get_method_data(thread, profiled_method, false), "sanity"); |
1528 uint this_trap_count = 0; |
1749 uint this_trap_count = 0; |
1529 bool maybe_prior_trap = false; |
1750 bool maybe_prior_trap = false; |
1530 bool maybe_prior_recompile = false; |
1751 bool maybe_prior_recompile = false; |
1531 pdata = query_update_method_data(trap_mdo, trap_bci, reason, |
1752 pdata = query_update_method_data(trap_mdo, trap_bci, reason, true, |
|
1753 #if INCLUDE_JVMCI |
|
1754 nm->is_compiled_by_jvmci() && nm->is_osr_method(), |
|
1755 #endif |
1532 nm->method(), |
1756 nm->method(), |
1533 //outputs: |
1757 //outputs: |
1534 this_trap_count, |
1758 this_trap_count, |
1535 maybe_prior_trap, |
1759 maybe_prior_trap, |
1536 maybe_prior_recompile); |
1760 maybe_prior_recompile); |
1658 |
1882 |
1659 ProfileData* |
1883 ProfileData* |
1660 Deoptimization::query_update_method_data(MethodData* trap_mdo, |
1884 Deoptimization::query_update_method_data(MethodData* trap_mdo, |
1661 int trap_bci, |
1885 int trap_bci, |
1662 Deoptimization::DeoptReason reason, |
1886 Deoptimization::DeoptReason reason, |
|
1887 bool update_total_trap_count, |
|
1888 #if INCLUDE_JVMCI |
|
1889 bool is_osr, |
|
1890 #endif |
1663 Method* compiled_method, |
1891 Method* compiled_method, |
1664 //outputs: |
1892 //outputs: |
1665 uint& ret_this_trap_count, |
1893 uint& ret_this_trap_count, |
1666 bool& ret_maybe_prior_trap, |
1894 bool& ret_maybe_prior_trap, |
1667 bool& ret_maybe_prior_recompile) { |
1895 bool& ret_maybe_prior_recompile) { |
1668 uint prior_trap_count = trap_mdo->trap_count(reason); |
1896 bool maybe_prior_trap = false; |
1669 uint this_trap_count = trap_mdo->inc_trap_count(reason); |
1897 bool maybe_prior_recompile = false; |
1670 |
1898 uint this_trap_count = 0; |
1671 // If the runtime cannot find a place to store trap history, |
1899 if (update_total_trap_count) { |
1672 // it is estimated based on the general condition of the method. |
1900 uint idx = reason; |
1673 // If the method has ever been recompiled, or has ever incurred |
1901 #if INCLUDE_JVMCI |
1674 // a trap with the present reason , then this BCI is assumed |
1902 if (is_osr) { |
1675 // (pessimistically) to be the culprit. |
1903 idx += Reason_LIMIT; |
1676 bool maybe_prior_trap = (prior_trap_count != 0); |
1904 } |
1677 bool maybe_prior_recompile = (trap_mdo->decompile_count() != 0); |
1905 #endif |
|
1906 uint prior_trap_count = trap_mdo->trap_count(idx); |
|
1907 this_trap_count = trap_mdo->inc_trap_count(idx); |
|
1908 |
|
1909 // If the runtime cannot find a place to store trap history, |
|
1910 // it is estimated based on the general condition of the method. |
|
1911 // If the method has ever been recompiled, or has ever incurred |
|
1912 // a trap with the present reason , then this BCI is assumed |
|
1913 // (pessimistically) to be the culprit. |
|
1914 maybe_prior_trap = (prior_trap_count != 0); |
|
1915 maybe_prior_recompile = (trap_mdo->decompile_count() != 0); |
|
1916 } |
1678 ProfileData* pdata = NULL; |
1917 ProfileData* pdata = NULL; |
1679 |
1918 |
1680 |
1919 |
1681 // For reasons which are recorded per bytecode, we check per-BCI data. |
1920 // For reasons which are recorded per bytecode, we check per-BCI data. |
1682 DeoptReason per_bc_reason = reason_recorded_per_bytecode_if_any(reason); |
1921 DeoptReason per_bc_reason = reason_recorded_per_bytecode_if_any(reason); |
|
1922 assert(per_bc_reason != Reason_none || update_total_trap_count, "must be"); |
1683 if (per_bc_reason != Reason_none) { |
1923 if (per_bc_reason != Reason_none) { |
1684 // Find the profile data for this BCI. If there isn't one, |
1924 // Find the profile data for this BCI. If there isn't one, |
1685 // try to allocate one from the MDO's set of spares. |
1925 // try to allocate one from the MDO's set of spares. |
1686 // This will let us detect a repeated trap at this point. |
1926 // This will let us detect a repeated trap at this point. |
1687 pdata = trap_mdo->allocate_bci_to_data(trap_bci, reason_is_speculate(reason) ? compiled_method : NULL); |
1927 pdata = trap_mdo->allocate_bci_to_data(trap_bci, reason_is_speculate(reason) ? compiled_method : NULL); |
1730 // Ignored outputs: |
1970 // Ignored outputs: |
1731 uint ignore_this_trap_count; |
1971 uint ignore_this_trap_count; |
1732 bool ignore_maybe_prior_trap; |
1972 bool ignore_maybe_prior_trap; |
1733 bool ignore_maybe_prior_recompile; |
1973 bool ignore_maybe_prior_recompile; |
1734 assert(!reason_is_speculate(reason), "reason speculate only used by compiler"); |
1974 assert(!reason_is_speculate(reason), "reason speculate only used by compiler"); |
|
1975 // JVMCI uses the total counts to determine if deoptimizations are happening too frequently -> do not adjust total counts |
|
1976 bool update_total_counts = JVMCI_ONLY(false) NOT_JVMCI(true); |
1735 query_update_method_data(trap_mdo, trap_bci, |
1977 query_update_method_data(trap_mdo, trap_bci, |
1736 (DeoptReason)reason, |
1978 (DeoptReason)reason, |
|
1979 update_total_counts, |
|
1980 #if INCLUDE_JVMCI |
|
1981 false, |
|
1982 #endif |
1737 NULL, |
1983 NULL, |
1738 ignore_this_trap_count, |
1984 ignore_this_trap_count, |
1739 ignore_maybe_prior_trap, |
1985 ignore_maybe_prior_trap, |
1740 ignore_maybe_prior_recompile); |
1986 ignore_maybe_prior_recompile); |
1741 } |
1987 } |
1742 |
1988 |
1743 Deoptimization::UnrollBlock* Deoptimization::uncommon_trap(JavaThread* thread, jint trap_request) { |
1989 Deoptimization::UnrollBlock* Deoptimization::uncommon_trap(JavaThread* thread, jint trap_request) { |
1744 |
1990 if (TraceDeoptimization) { |
|
1991 tty->print("Uncommon trap "); |
|
1992 } |
1745 // Still in Java no safepoints |
1993 // Still in Java no safepoints |
1746 { |
1994 { |
1747 // This enters VM and may safepoint |
1995 // This enters VM and may safepoint |
1748 uncommon_trap_inner(thread, trap_request); |
1996 uncommon_trap_inner(thread, trap_request); |
1749 } |
1997 } |
1903 const char* Deoptimization::format_trap_request(char* buf, size_t buflen, |
2158 const char* Deoptimization::format_trap_request(char* buf, size_t buflen, |
1904 int trap_request) { |
2159 int trap_request) { |
1905 jint unloaded_class_index = trap_request_index(trap_request); |
2160 jint unloaded_class_index = trap_request_index(trap_request); |
1906 const char* reason = trap_reason_name(trap_request_reason(trap_request)); |
2161 const char* reason = trap_reason_name(trap_request_reason(trap_request)); |
1907 const char* action = trap_action_name(trap_request_action(trap_request)); |
2162 const char* action = trap_action_name(trap_request_action(trap_request)); |
|
2163 #if INCLUDE_JVMCI |
|
2164 int debug_id = trap_request_debug_id(trap_request); |
|
2165 #endif |
1908 size_t len; |
2166 size_t len; |
1909 if (unloaded_class_index < 0) { |
2167 if (unloaded_class_index < 0) { |
1910 len = jio_snprintf(buf, buflen, "reason='%s' action='%s'", |
2168 len = jio_snprintf(buf, buflen, "reason='%s' action='%s'" JVMCI_ONLY(" debug_id='%d'"), |
1911 reason, action); |
2169 reason, action |
|
2170 #if INCLUDE_JVMCI |
|
2171 ,debug_id |
|
2172 #endif |
|
2173 ); |
1912 } else { |
2174 } else { |
1913 len = jio_snprintf(buf, buflen, "reason='%s' action='%s' index='%d'", |
2175 len = jio_snprintf(buf, buflen, "reason='%s' action='%s' index='%d'" JVMCI_ONLY(" debug_id='%d'"), |
1914 reason, action, unloaded_class_index); |
2176 reason, action, unloaded_class_index |
|
2177 #if INCLUDE_JVMCI |
|
2178 ,debug_id |
|
2179 #endif |
|
2180 ); |
1915 } |
2181 } |
1916 if (len >= buflen) |
2182 if (len >= buflen) |
1917 buf[buflen-1] = '\0'; |
2183 buf[buflen-1] = '\0'; |
1918 return buf; |
2184 return buf; |
1919 } |
2185 } |