hotspot/src/share/vm/c1/c1_LIRGenerator.cpp
changeset 6745 a34ef8968a84
parent 6742 81ef369b8fc7
child 6751 b399fd234e47
equal deleted inserted replaced
6743:ef1795cd50a7 6745:a34ef8968a84
   384   }
   384   }
   385 }
   385 }
   386 
   386 
   387 
   387 
   388 CodeEmitInfo* LIRGenerator::state_for(Instruction* x, ValueStack* state, bool ignore_xhandler) {
   388 CodeEmitInfo* LIRGenerator::state_for(Instruction* x, ValueStack* state, bool ignore_xhandler) {
   389   int index;
   389   assert(state != NULL, "state must be defined");
   390   Value value;
   390 
   391   for_each_stack_value(state, index, value) {
       
   392     assert(value->subst() == value, "missed substition");
       
   393     if (!value->is_pinned() && value->as_Constant() == NULL && value->as_Local() == NULL) {
       
   394       walk(value);
       
   395       assert(value->operand()->is_valid(), "must be evaluated now");
       
   396     }
       
   397   }
       
   398   ValueStack* s = state;
   391   ValueStack* s = state;
   399   int bci = x->bci();
       
   400   for_each_state(s) {
   392   for_each_state(s) {
       
   393     if (s->kind() == ValueStack::EmptyExceptionState) {
       
   394       assert(s->stack_size() == 0 && s->locals_size() == 0 && (s->locks_size() == 0 || s->locks_size() == 1), "state must be empty");
       
   395       continue;
       
   396     }
       
   397 
       
   398     int index;
       
   399     Value value;
       
   400     for_each_stack_value(s, index, value) {
       
   401       assert(value->subst() == value, "missed substitution");
       
   402       if (!value->is_pinned() && value->as_Constant() == NULL && value->as_Local() == NULL) {
       
   403         walk(value);
       
   404         assert(value->operand()->is_valid(), "must be evaluated now");
       
   405       }
       
   406     }
       
   407 
       
   408     int bci = s->bci();
   401     IRScope* scope = s->scope();
   409     IRScope* scope = s->scope();
   402     ciMethod* method = scope->method();
   410     ciMethod* method = scope->method();
   403 
   411 
   404     MethodLivenessResult liveness = method->liveness_at_bci(bci);
   412     MethodLivenessResult liveness = method->liveness_at_bci(bci);
   405     if (bci == SynchronizationEntryBCI) {
   413     if (bci == SynchronizationEntryBCI) {
   426           // NULL out this local so that linear scan can assume that all non-NULL values are live.
   434           // NULL out this local so that linear scan can assume that all non-NULL values are live.
   427           s->invalidate_local(index);
   435           s->invalidate_local(index);
   428         }
   436         }
   429       }
   437       }
   430     }
   438     }
   431     bci = scope->caller_bci();
   439   }
   432   }
   440 
   433 
   441   return new CodeEmitInfo(state, ignore_xhandler ? NULL : x->exception_handlers());
   434   return new CodeEmitInfo(x->bci(), state, ignore_xhandler ? NULL : x->exception_handlers());
       
   435 }
   442 }
   436 
   443 
   437 
   444 
   438 CodeEmitInfo* LIRGenerator::state_for(Instruction* x) {
   445 CodeEmitInfo* LIRGenerator::state_for(Instruction* x) {
   439   return state_for(x, x->lock_stack());
   446   return state_for(x, x->exception_state());
   440 }
   447 }
   441 
   448 
   442 
   449 
   443 void LIRGenerator::jobject2reg_with_patching(LIR_Opr r, ciObject* obj, CodeEmitInfo* info) {
   450 void LIRGenerator::jobject2reg_with_patching(LIR_Opr r, ciObject* obj, CodeEmitInfo* info) {
   444   if (!obj->is_loaded() || PatchALot) {
   451   if (!obj->is_loaded() || PatchALot) {
   898 
   905 
   899       ValueStack* sux_state = sux->state();
   906       ValueStack* sux_state = sux->state();
   900       Value sux_value;
   907       Value sux_value;
   901       int index;
   908       int index;
   902 
   909 
       
   910       assert(cur_state->scope() == sux_state->scope(), "not matching");
       
   911       assert(cur_state->locals_size() == sux_state->locals_size(), "not matching");
       
   912       assert(cur_state->stack_size() == sux_state->stack_size(), "not matching");
       
   913 
   903       for_each_stack_value(sux_state, index, sux_value) {
   914       for_each_stack_value(sux_state, index, sux_value) {
   904         move_to_phi(&resolver, cur_state->stack_at(index), sux_value);
   915         move_to_phi(&resolver, cur_state->stack_at(index), sux_value);
   905       }
       
   906 
       
   907       // Inlining may cause the local state not to match up, so walk up
       
   908       // the caller state until we get to the same scope as the
       
   909       // successor and then start processing from there.
       
   910       while (cur_state->scope() != sux_state->scope()) {
       
   911         cur_state = cur_state->caller_state();
       
   912         assert(cur_state != NULL, "scopes don't match up");
       
   913       }
   916       }
   914 
   917 
   915       for_each_local_value(sux_state, index, sux_value) {
   918       for_each_local_value(sux_state, index, sux_value) {
   916         move_to_phi(&resolver, cur_state->local_at(index), sux_value);
   919         move_to_phi(&resolver, cur_state->local_at(index), sux_value);
   917       }
   920       }
  1021 }
  1024 }
  1022 
  1025 
  1023 
  1026 
  1024 // Code for a constant is generated lazily unless the constant is frequently used and can't be inlined.
  1027 // Code for a constant is generated lazily unless the constant is frequently used and can't be inlined.
  1025 void LIRGenerator::do_Constant(Constant* x) {
  1028 void LIRGenerator::do_Constant(Constant* x) {
  1026   if (x->state() != NULL) {
  1029   if (x->state_before() != NULL) {
  1027     // Any constant with a ValueStack requires patching so emit the patch here
  1030     // Any constant with a ValueStack requires patching so emit the patch here
  1028     LIR_Opr reg = rlock_result(x);
  1031     LIR_Opr reg = rlock_result(x);
  1029     CodeEmitInfo* info = state_for(x, x->state());
  1032     CodeEmitInfo* info = state_for(x, x->state_before());
  1030     __ oop2reg_patch(NULL, reg, info);
  1033     __ oop2reg_patch(NULL, reg, info);
  1031   } else if (x->use_count() > 1 && !can_inline_as_constant(x)) {
  1034   } else if (x->use_count() > 1 && !can_inline_as_constant(x)) {
  1032     if (!x->is_pinned()) {
  1035     if (!x->is_pinned()) {
  1033       // unpinned constants are handled specially so that they can be
  1036       // unpinned constants are handled specially so that they can be
  1034       // put into registers when they are used multiple times within a
  1037       // put into registers when they are used multiple times within a
  1100   LIR_Opr result = rlock_result(x);
  1103   LIR_Opr result = rlock_result(x);
  1101 
  1104 
  1102   // need to perform the null check on the rcvr
  1105   // need to perform the null check on the rcvr
  1103   CodeEmitInfo* info = NULL;
  1106   CodeEmitInfo* info = NULL;
  1104   if (x->needs_null_check()) {
  1107   if (x->needs_null_check()) {
  1105     info = state_for(x, x->state()->copy_locks());
  1108     info = state_for(x);
  1106   }
  1109   }
  1107   __ move(new LIR_Address(rcvr.result(), oopDesc::klass_offset_in_bytes(), T_OBJECT), result, info);
  1110   __ move(new LIR_Address(rcvr.result(), oopDesc::klass_offset_in_bytes(), T_OBJECT), result, info);
  1108   __ move(new LIR_Address(result, Klass::java_mirror_offset_in_bytes() +
  1111   __ move(new LIR_Address(result, Klass::java_mirror_offset_in_bytes() +
  1109                           klassOopDesc::klass_part_offset_in_bytes(), T_OBJECT), result);
  1112                           klassOopDesc::klass_part_offset_in_bytes(), T_OBJECT), result);
  1110 }
  1113 }
  1479     assert(x->explicit_null_check() == NULL, "can't fold null check into patching field access");
  1482     assert(x->explicit_null_check() == NULL, "can't fold null check into patching field access");
  1480     info = state_for(x, x->state_before());
  1483     info = state_for(x, x->state_before());
  1481   } else if (x->needs_null_check()) {
  1484   } else if (x->needs_null_check()) {
  1482     NullCheck* nc = x->explicit_null_check();
  1485     NullCheck* nc = x->explicit_null_check();
  1483     if (nc == NULL) {
  1486     if (nc == NULL) {
  1484       info = state_for(x, x->lock_stack());
  1487       info = state_for(x);
  1485     } else {
  1488     } else {
  1486       info = state_for(nc);
  1489       info = state_for(nc);
  1487     }
  1490     }
  1488   }
  1491   }
  1489 
  1492 
  1507     value.load_for_store(field_type);
  1510     value.load_for_store(field_type);
  1508   }
  1511   }
  1509 
  1512 
  1510   set_no_result(x);
  1513   set_no_result(x);
  1511 
  1514 
       
  1515 #ifndef PRODUCT
  1512   if (PrintNotLoaded && needs_patching) {
  1516   if (PrintNotLoaded && needs_patching) {
  1513     tty->print_cr("   ###class not loaded at store_%s bci %d",
  1517     tty->print_cr("   ###class not loaded at store_%s bci %d",
  1514                   x->is_static() ?  "static" : "field", x->bci());
  1518                   x->is_static() ?  "static" : "field", x->printable_bci());
  1515   }
  1519   }
       
  1520 #endif
  1516 
  1521 
  1517   if (x->needs_null_check() &&
  1522   if (x->needs_null_check() &&
  1518       (needs_patching ||
  1523       (needs_patching ||
  1519        MacroAssembler::needs_explicit_null_check(x->offset()))) {
  1524        MacroAssembler::needs_explicit_null_check(x->offset()))) {
  1520     // emit an explicit null check because the offset is too large
  1525     // emit an explicit null check because the offset is too large
  1573     assert(x->explicit_null_check() == NULL, "can't fold null check into patching field access");
  1578     assert(x->explicit_null_check() == NULL, "can't fold null check into patching field access");
  1574     info = state_for(x, x->state_before());
  1579     info = state_for(x, x->state_before());
  1575   } else if (x->needs_null_check()) {
  1580   } else if (x->needs_null_check()) {
  1576     NullCheck* nc = x->explicit_null_check();
  1581     NullCheck* nc = x->explicit_null_check();
  1577     if (nc == NULL) {
  1582     if (nc == NULL) {
  1578       info = state_for(x, x->lock_stack());
  1583       info = state_for(x);
  1579     } else {
  1584     } else {
  1580       info = state_for(nc);
  1585       info = state_for(nc);
  1581     }
  1586     }
  1582   }
  1587   }
  1583 
  1588 
  1584   LIRItem object(x->obj(), this);
  1589   LIRItem object(x->obj(), this);
  1585 
  1590 
  1586   object.load_item();
  1591   object.load_item();
  1587 
  1592 
       
  1593 #ifndef PRODUCT
  1588   if (PrintNotLoaded && needs_patching) {
  1594   if (PrintNotLoaded && needs_patching) {
  1589     tty->print_cr("   ###class not loaded at load_%s bci %d",
  1595     tty->print_cr("   ###class not loaded at load_%s bci %d",
  1590                   x->is_static() ?  "static" : "field", x->bci());
  1596                   x->is_static() ?  "static" : "field", x->printable_bci());
  1591   }
  1597   }
       
  1598 #endif
  1592 
  1599 
  1593   if (x->needs_null_check() &&
  1600   if (x->needs_null_check() &&
  1594       (needs_patching ||
  1601       (needs_patching ||
  1595        MacroAssembler::needs_explicit_null_check(x->offset()))) {
  1602        MacroAssembler::needs_explicit_null_check(x->offset()))) {
  1596     // emit an explicit null check because the offset is too large
  1603     // emit an explicit null check because the offset is too large
  1779   // Use a copy of the CodeEmitInfo because debug information is
  1786   // Use a copy of the CodeEmitInfo because debug information is
  1780   // different for null_check and throw.
  1787   // different for null_check and throw.
  1781   if (GenerateCompilerNullChecks &&
  1788   if (GenerateCompilerNullChecks &&
  1782       (x->exception()->as_NewInstance() == NULL && x->exception()->as_ExceptionObject() == NULL)) {
  1789       (x->exception()->as_NewInstance() == NULL && x->exception()->as_ExceptionObject() == NULL)) {
  1783     // if the exception object wasn't created using new then it might be null.
  1790     // if the exception object wasn't created using new then it might be null.
  1784     __ null_check(exception_opr, new CodeEmitInfo(info, true));
  1791     __ null_check(exception_opr, new CodeEmitInfo(info, x->state()->copy(ValueStack::ExceptionState, x->state()->bci())));
  1785   }
  1792   }
  1786 
  1793 
  1787   if (compilation()->env()->jvmti_can_post_on_exceptions()) {
  1794   if (compilation()->env()->jvmti_can_post_on_exceptions()) {
  1788     // we need to go through the exception lookup path to get JVMTI
  1795     // we need to go through the exception lookup path to get JVMTI
  1789     // notification done
  1796     // notification done
  2125   move_to_phi(x->state());
  2132   move_to_phi(x->state());
  2126 
  2133 
  2127   int lo_key = x->lo_key();
  2134   int lo_key = x->lo_key();
  2128   int hi_key = x->hi_key();
  2135   int hi_key = x->hi_key();
  2129   int len = x->length();
  2136   int len = x->length();
  2130   CodeEmitInfo* info = state_for(x, x->state());
       
  2131   LIR_Opr value = tag.result();
  2137   LIR_Opr value = tag.result();
  2132   if (UseTableRanges) {
  2138   if (UseTableRanges) {
  2133     do_SwitchRanges(create_lookup_ranges(x), value, x->default_sux());
  2139     do_SwitchRanges(create_lookup_ranges(x), value, x->default_sux());
  2134   } else {
  2140   } else {
  2135     for (int i = 0; i < len; i++) {
  2141     for (int i = 0; i < len; i++) {
  2184   if (x->is_safepoint()) {
  2190   if (x->is_safepoint()) {
  2185     ValueStack* state = x->state_before() ? x->state_before() : x->state();
  2191     ValueStack* state = x->state_before() ? x->state_before() : x->state();
  2186 
  2192 
  2187     // increment backedge counter if needed
  2193     // increment backedge counter if needed
  2188     CodeEmitInfo* info = state_for(x, state);
  2194     CodeEmitInfo* info = state_for(x, state);
  2189     increment_backedge_counter(info, info->bci());
  2195     increment_backedge_counter(info, info->stack()->bci());
  2190     CodeEmitInfo* safepoint_info = state_for(x, state);
  2196     CodeEmitInfo* safepoint_info = state_for(x, state);
  2191     __ safepoint(safepoint_poll_register(), safepoint_info);
  2197     __ safepoint(safepoint_poll_register(), safepoint_info);
  2192   }
  2198   }
  2193 
  2199 
  2194   // Gotos can be folded Ifs, handle this case.
  2200   // Gotos can be folded Ifs, handle this case.
  2291 
  2297 
  2292     if (method()->is_synchronized() && GenerateSynchronizationCode) {
  2298     if (method()->is_synchronized() && GenerateSynchronizationCode) {
  2293       LIR_Opr lock = new_register(T_INT);
  2299       LIR_Opr lock = new_register(T_INT);
  2294       __ load_stack_address_monitor(0, lock);
  2300       __ load_stack_address_monitor(0, lock);
  2295 
  2301 
  2296       CodeEmitInfo* info = new CodeEmitInfo(SynchronizationEntryBCI, scope()->start()->state(), NULL);
  2302       CodeEmitInfo* info = new CodeEmitInfo(scope()->start()->state()->copy(ValueStack::StateBefore, SynchronizationEntryBCI), NULL);
  2297       CodeStub* slow_path = new MonitorEnterStub(obj, lock, info);
  2303       CodeStub* slow_path = new MonitorEnterStub(obj, lock, info);
  2298 
  2304 
  2299       // receiver is guaranteed non-NULL so don't need CodeEmitInfo
  2305       // receiver is guaranteed non-NULL so don't need CodeEmitInfo
  2300       __ lock_object(syncTempOpr(), obj, lock, new_register(T_OBJECT), slow_path, NULL);
  2306       __ lock_object(syncTempOpr(), obj, lock, new_register(T_OBJECT), slow_path, NULL);
  2301     }
  2307     }
  2302   }
  2308   }
  2303 
  2309 
  2304   // increment invocation counters if needed
  2310   // increment invocation counters if needed
  2305   if (!method()->is_accessor()) { // Accessors do not have MDOs, so no counting.
  2311   if (!method()->is_accessor()) { // Accessors do not have MDOs, so no counting.
  2306     CodeEmitInfo* info = new CodeEmitInfo(InvocationEntryBci, scope()->start()->state(), NULL);
  2312     CodeEmitInfo* info = new CodeEmitInfo(scope()->start()->state(), NULL);
  2307     increment_invocation_counter(info);
  2313     increment_invocation_counter(info);
  2308   }
  2314   }
  2309 
  2315 
  2310   // all blocks with a successor must end with an unconditional jump
  2316   // all blocks with a successor must end with an unconditional jump
  2311   // to the successor even if they are consecutive
  2317   // to the successor even if they are consecutive
  2461         __ call_virtual(target, receiver, result_register, vtable_offset, arg_list, info);
  2467         __ call_virtual(target, receiver, result_register, vtable_offset, arg_list, info);
  2462       }
  2468       }
  2463       break;
  2469       break;
  2464     case Bytecodes::_invokedynamic: {
  2470     case Bytecodes::_invokedynamic: {
  2465       ciBytecodeStream bcs(x->scope()->method());
  2471       ciBytecodeStream bcs(x->scope()->method());
  2466       bcs.force_bci(x->bci());
  2472       bcs.force_bci(x->state()->bci());
  2467       assert(bcs.cur_bc() == Bytecodes::_invokedynamic, "wrong stream");
  2473       assert(bcs.cur_bc() == Bytecodes::_invokedynamic, "wrong stream");
  2468       ciCPCache* cpcache = bcs.get_cpcache();
  2474       ciCPCache* cpcache = bcs.get_cpcache();
  2469 
  2475 
  2470       // Get CallSite offset from constant pool cache pointer.
  2476       // Get CallSite offset from constant pool cache pointer.
  2471       int index = bcs.get_method_index();
  2477       int index = bcs.get_method_index();