hotspot/src/cpu/ppc/vm/nativeInst_ppc.cpp
changeset 22824 28258dd5cb2e
child 22849 b8670e920530
equal deleted inserted replaced
22823:40b2c6c30123 22824:28258dd5cb2e
       
     1 /*
       
     2  * Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
       
     3  * Copyright 2012, 2013 SAP AG. All rights reserved.
       
     4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
       
     5  *
       
     6  * This code is free software; you can redistribute it and/or modify it
       
     7  * under the terms of the GNU General Public License version 2 only, as
       
     8  * published by the Free Software Foundation.
       
     9  *
       
    10  * This code is distributed in the hope that it will be useful, but WITHOUT
       
    11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
       
    12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
       
    13  * version 2 for more details (a copy is included in the LICENSE file that
       
    14  * accompanied this code).
       
    15  *
       
    16  * You should have received a copy of the GNU General Public License version
       
    17  * 2 along with this work; if not, write to the Free Software Foundation,
       
    18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
       
    19  *
       
    20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
       
    21  * or visit www.oracle.com if you need additional information or have any
       
    22  * questions.
       
    23  *
       
    24  */
       
    25 
       
    26 #include "precompiled.hpp"
       
    27 #include "asm/macroAssembler.inline.hpp"
       
    28 #include "memory/resourceArea.hpp"
       
    29 #include "nativeInst_ppc.hpp"
       
    30 #include "oops/oop.inline.hpp"
       
    31 #include "runtime/handles.hpp"
       
    32 #include "runtime/sharedRuntime.hpp"
       
    33 #include "runtime/stubRoutines.hpp"
       
    34 #include "utilities/ostream.hpp"
       
    35 #ifdef COMPILER1
       
    36 #include "c1/c1_Runtime1.hpp"
       
    37 #endif
       
    38 
       
    39 // We use an illtrap for marking a method as not_entrant or zombie iff !UseSIGTRAP
       
    40 // Work around a C++ compiler bug which changes 'this'
       
    41 bool NativeInstruction::is_sigill_zombie_not_entrant_at(address addr) {
       
    42   assert(!UseSIGTRAP, "precondition");
       
    43   if (*(int*)addr != 0 /*illtrap*/) return false;
       
    44   CodeBlob* cb = CodeCache::find_blob_unsafe(addr);
       
    45   if (cb == NULL || !cb->is_nmethod()) return false;
       
    46   nmethod *nm = (nmethod *)cb;
       
    47   // This method is not_entrant or zombie iff the illtrap instruction is
       
    48   // located at the verified entry point.
       
    49   return nm->verified_entry_point() == addr;
       
    50 }
       
    51 
       
    52 #ifdef ASSERT
       
    53 void NativeInstruction::verify() {
       
    54   // Make sure code pattern is actually an instruction address.
       
    55   address addr = addr_at(0);
       
    56   if (addr == 0 || ((intptr_t)addr & 3) != 0) {
       
    57     fatal("not an instruction address");
       
    58   }
       
    59 }
       
    60 #endif // ASSERT
       
    61 
       
    62 // Extract call destination from a NativeCall. The call might use a trampoline stub.
       
    63 address NativeCall::destination() const {
       
    64   address addr = (address)this;
       
    65   address destination = Assembler::bxx_destination(addr);
       
    66 
       
    67   // Do we use a trampoline stub for this call?
       
    68   CodeBlob* cb = CodeCache::find_blob_unsafe(addr);   // Else we get assertion if nmethod is zombie.
       
    69   assert(cb && cb->is_nmethod(), "sanity");
       
    70   nmethod *nm = (nmethod *)cb;
       
    71   if (nm->stub_contains(destination) && is_NativeCallTrampolineStub_at(destination)) {
       
    72     // Yes we do, so get the destination from the trampoline stub.
       
    73     const address trampoline_stub_addr = destination;
       
    74     destination = NativeCallTrampolineStub_at(trampoline_stub_addr)->destination();
       
    75   }
       
    76 
       
    77   return destination;
       
    78 }
       
    79 
       
    80 // Similar to replace_mt_safe, but just changes the destination. The
       
    81 // important thing is that free-running threads are able to execute this
       
    82 // call instruction at all times. Thus, the displacement field must be
       
    83 // instruction-word-aligned.
       
    84 //
       
    85 // Used in the runtime linkage of calls; see class CompiledIC.
       
    86 //
       
    87 // Add parameter assert_lock to switch off assertion
       
    88 // during code generation, where no patching lock is needed.
       
    89 void NativeCall::set_destination_mt_safe(address dest, bool assert_lock) {
       
    90   assert(!assert_lock ||
       
    91          (Patching_lock->is_locked() || SafepointSynchronize::is_at_safepoint()),
       
    92          "concurrent code patching");
       
    93 
       
    94   ResourceMark rm;
       
    95   int code_size = 1 * BytesPerInstWord;
       
    96   address addr_call = addr_at(0);
       
    97   assert(MacroAssembler::is_bl(*(int*)addr_call), "unexpected code at call-site");
       
    98 
       
    99   CodeBuffer cb(addr_call, code_size + 1);
       
   100   MacroAssembler* a = new MacroAssembler(&cb);
       
   101 
       
   102   // Patch the call.
       
   103   if (ReoptimizeCallSequences &&
       
   104       a->is_within_range_of_b(dest, addr_call)) {
       
   105     a->bl(dest);
       
   106   } else {
       
   107     address trampoline_stub_addr = get_trampoline();
       
   108 
       
   109     // We did not find a trampoline stub because the current codeblob
       
   110     // does not provide this information. The branch will be patched
       
   111     // later during a final fixup, when all necessary information is
       
   112     // available.
       
   113     if (trampoline_stub_addr == 0)
       
   114       return;
       
   115 
       
   116     // Patch the constant in the call's trampoline stub.
       
   117     NativeCallTrampolineStub_at(trampoline_stub_addr)->set_destination(dest);
       
   118 
       
   119     a->bl(trampoline_stub_addr);
       
   120   }
       
   121   ICache::invalidate_range(addr_call, code_size);
       
   122 }
       
   123 
       
   124 address NativeCall::get_trampoline() {
       
   125   address call_addr = addr_at(0);
       
   126 
       
   127   CodeBlob *code = CodeCache::find_blob(call_addr);
       
   128   assert(code != NULL, "Could not find the containing code blob");
       
   129 
       
   130   // There are no relocations available when the code gets relocated
       
   131   // because of CodeBuffer expansion.
       
   132   if (code->relocation_size() == 0)
       
   133     return NULL;
       
   134 
       
   135   address bl_destination = Assembler::bxx_destination(call_addr);
       
   136   if (code->content_contains(bl_destination) &&
       
   137       is_NativeCallTrampolineStub_at(bl_destination))
       
   138     return bl_destination;
       
   139 
       
   140   // If the codeBlob is not a nmethod, this is because we get here from the
       
   141   // CodeBlob constructor, which is called within the nmethod constructor.
       
   142   return trampoline_stub_Relocation::get_trampoline_for(call_addr, (nmethod*)code);
       
   143 }
       
   144 
       
   145 #ifdef ASSERT
       
   146 void NativeCall::verify() {
       
   147   address addr = addr_at(0);
       
   148 
       
   149   if (!NativeCall::is_call_at(addr)) {
       
   150     tty->print_cr("not a NativeCall at " PTR_FORMAT, addr);
       
   151     // TODO: PPC port: Disassembler::decode(addr - 20, addr + 20, tty);
       
   152     fatal(err_msg("not a NativeCall at " PTR_FORMAT, addr));
       
   153   }
       
   154 }
       
   155 #endif // ASSERT
       
   156 
       
   157 #ifdef ASSERT
       
   158 void NativeFarCall::verify() {
       
   159   address addr = addr_at(0);
       
   160 
       
   161   NativeInstruction::verify();
       
   162   if (!NativeFarCall::is_far_call_at(addr)) {
       
   163     tty->print_cr("not a NativeFarCall at " PTR_FORMAT, addr);
       
   164     // TODO: PPC port: Disassembler::decode(addr, 20, 20, tty);
       
   165     fatal(err_msg("not a NativeFarCall at " PTR_FORMAT, addr));
       
   166   }
       
   167 }
       
   168 #endif // ASSERT
       
   169 
       
   170 address NativeMovConstReg::next_instruction_address() const {
       
   171 #ifdef ASSERT
       
   172   CodeBlob* nm = CodeCache::find_blob(instruction_address());
       
   173   assert(!MacroAssembler::is_set_narrow_oop(addr_at(0), nm->content_begin()), "Should not patch narrow oop here");
       
   174 #endif
       
   175 
       
   176   if (MacroAssembler::is_load_const_from_method_toc_at(addr_at(0))) {
       
   177     return addr_at(load_const_from_method_toc_instruction_size);
       
   178   } else {
       
   179     return addr_at(load_const_instruction_size);
       
   180   }
       
   181 }
       
   182 
       
   183 intptr_t NativeMovConstReg::data() const {
       
   184   address   addr = addr_at(0);
       
   185   CodeBlob* cb = CodeCache::find_blob_unsafe(addr);
       
   186 
       
   187   if (MacroAssembler::is_load_const_at(addr)) {
       
   188     return MacroAssembler::get_const(addr);
       
   189   } else if (MacroAssembler::is_set_narrow_oop(addr, cb->content_begin())) {
       
   190     narrowOop no = (narrowOop)MacroAssembler::get_narrow_oop(addr, cb->content_begin());
       
   191     return (intptr_t)oopDesc::decode_heap_oop(no);
       
   192   } else {
       
   193     assert(MacroAssembler::is_load_const_from_method_toc_at(addr), "must be load_const_from_pool");
       
   194 
       
   195     address ctable = cb->content_begin();
       
   196     int offset = MacroAssembler::get_offset_of_load_const_from_method_toc_at(addr);
       
   197     return *(intptr_t *)(ctable + offset);
       
   198   }
       
   199 }
       
   200 
       
   201 address NativeMovConstReg::set_data_plain(intptr_t data, CodeBlob *cb) {
       
   202   address addr         = instruction_address();
       
   203   address next_address = NULL;
       
   204   if (!cb) cb = CodeCache::find_blob(addr);
       
   205 
       
   206   if (cb != NULL && MacroAssembler::is_load_const_from_method_toc_at(addr)) {
       
   207     // A load from the method's TOC (ctable).
       
   208     assert(cb->is_nmethod(), "must be nmethod");
       
   209     const address ctable = cb->content_begin();
       
   210     const int toc_offset = MacroAssembler::get_offset_of_load_const_from_method_toc_at(addr);
       
   211     *(intptr_t *)(ctable + toc_offset) = data;
       
   212     next_address = addr + BytesPerInstWord;
       
   213   } else if (cb != NULL &&
       
   214              MacroAssembler::is_calculate_address_from_global_toc_at(addr, cb->content_begin())) {
       
   215     // A calculation relative to the global TOC.
       
   216     const int invalidated_range =
       
   217       MacroAssembler::patch_calculate_address_from_global_toc_at(addr, cb->content_begin(),
       
   218                                                                  (address)data);
       
   219     const address start = invalidated_range < 0 ? addr + invalidated_range : addr;
       
   220     // FIXME:
       
   221     const int range = invalidated_range < 0 ? 4 - invalidated_range : 8;
       
   222     ICache::invalidate_range(start, range);
       
   223     next_address = addr + 1 * BytesPerInstWord;
       
   224   } else if (MacroAssembler::is_load_const_at(addr)) {
       
   225     // A normal 5 instruction load_const code sequence.
       
   226     // This is not mt safe, ok in methods like CodeBuffer::copy_code().
       
   227     MacroAssembler::patch_const(addr, (long)data);
       
   228     ICache::invalidate_range(addr, load_const_instruction_size);
       
   229     next_address = addr + 5 * BytesPerInstWord;
       
   230   } else if (MacroAssembler::is_bl(* (int*) addr)) {
       
   231     // A single branch-and-link instruction.
       
   232     ResourceMark rm;
       
   233     const int code_size = 1 * BytesPerInstWord;
       
   234     CodeBuffer cb(addr, code_size + 1);
       
   235     MacroAssembler* a = new MacroAssembler(&cb);
       
   236     a->bl((address) data);
       
   237     ICache::invalidate_range(addr, code_size);
       
   238     next_address = addr + code_size;
       
   239   } else {
       
   240     ShouldNotReachHere();
       
   241   }
       
   242 
       
   243   return next_address;
       
   244 }
       
   245 
       
   246 void NativeMovConstReg::set_data(intptr_t data) {
       
   247   // Store the value into the instruction stream.
       
   248   CodeBlob *cb = CodeCache::find_blob(instruction_address());
       
   249   address next_address = set_data_plain(data, cb);
       
   250 
       
   251   // Also store the value into an oop_Relocation cell, if any.
       
   252   if (cb && cb->is_nmethod()) {
       
   253     RelocIterator iter((nmethod *) cb, instruction_address(), next_address);
       
   254     oop* oop_addr = NULL;
       
   255     Metadata** metadata_addr = NULL;
       
   256     while (iter.next()) {
       
   257       if (iter.type() == relocInfo::oop_type) {
       
   258         oop_Relocation *r = iter.oop_reloc();
       
   259         if (oop_addr == NULL) {
       
   260           oop_addr = r->oop_addr();
       
   261           *oop_addr = (oop)data;
       
   262         } else {
       
   263           assert(oop_addr == r->oop_addr(), "must be only one set-oop here") ;
       
   264         }
       
   265       }
       
   266       if (iter.type() == relocInfo::metadata_type) {
       
   267         metadata_Relocation *r = iter.metadata_reloc();
       
   268         if (metadata_addr == NULL) {
       
   269           metadata_addr = r->metadata_addr();
       
   270           *metadata_addr = (Metadata*)data;
       
   271         } else {
       
   272           assert(metadata_addr == r->metadata_addr(), "must be only one set-metadata here");
       
   273         }
       
   274       }
       
   275     }
       
   276   }
       
   277 }
       
   278 
       
   279 void NativeMovConstReg::set_narrow_oop(narrowOop data, CodeBlob *code /* = NULL */) {
       
   280   address   addr = addr_at(0);
       
   281   CodeBlob* cb = (code) ? code : CodeCache::find_blob(instruction_address());
       
   282   const int invalidated_range =
       
   283     MacroAssembler::patch_set_narrow_oop(addr, cb->content_begin(), (long)data);
       
   284   const address start = invalidated_range < 0 ? addr + invalidated_range : addr;
       
   285   // FIXME:
       
   286   const int range = invalidated_range < 0 ? 4 - invalidated_range : 8;
       
   287   ICache::invalidate_range(start, range);
       
   288 }
       
   289 
       
   290 // Do not use an assertion here. Let clients decide whether they only
       
   291 // want this when assertions are enabled.
       
   292 #ifdef ASSERT
       
   293 void NativeMovConstReg::verify() {
       
   294   address   addr = addr_at(0);
       
   295   CodeBlob* cb = CodeCache::find_blob_unsafe(addr);   // find_nmethod() asserts if nmethod is zombie.
       
   296   if (! MacroAssembler::is_load_const_at(addr) &&
       
   297       ! MacroAssembler::is_load_const_from_method_toc_at(addr) &&
       
   298       ! (cb != NULL && MacroAssembler::is_calculate_address_from_global_toc_at(addr, cb->content_begin())) &&
       
   299       ! (cb != NULL && MacroAssembler::is_set_narrow_oop(addr, cb->content_begin())) &&
       
   300       ! MacroAssembler::is_bl(*((int*) addr))) {
       
   301     tty->print_cr("not a NativeMovConstReg at " PTR_FORMAT, addr);
       
   302     // TODO: PPC port Disassembler::decode(addr, 20, 20, tty);
       
   303     fatal(err_msg("not a NativeMovConstReg at " PTR_FORMAT, addr));
       
   304   }
       
   305 }
       
   306 #endif // ASSERT
       
   307 
       
   308 void NativeJump::patch_verified_entry(address entry, address verified_entry, address dest) {
       
   309   ResourceMark rm;
       
   310   int code_size = 1 * BytesPerInstWord;
       
   311   CodeBuffer cb(verified_entry, code_size + 1);
       
   312   MacroAssembler* a = new MacroAssembler(&cb);
       
   313 #ifdef COMPILER2
       
   314   assert(dest == SharedRuntime::get_handle_wrong_method_stub(), "expected fixed destination of patch");
       
   315 #endif
       
   316   // Patch this nmethod atomically. Always use illtrap/trap in debug build.
       
   317   if (DEBUG_ONLY(false &&) a->is_within_range_of_b(dest, a->pc())) {
       
   318     a->b(dest);
       
   319   } else {
       
   320     // The signal handler will continue at dest=OptoRuntime::handle_wrong_method_stub().
       
   321     if (TrapBasedNotEntrantChecks) {
       
   322       // We use a special trap for marking a method as not_entrant or zombie.
       
   323       a->trap_zombie_not_entrant();
       
   324     } else {
       
   325       // We use an illtrap for marking a method as not_entrant or zombie.
       
   326       a->illtrap();
       
   327     }
       
   328   }
       
   329   ICache::invalidate_range(verified_entry, code_size);
       
   330 }
       
   331 
       
   332 #ifdef ASSERT
       
   333 void NativeJump::verify() {
       
   334   address addr = addr_at(0);
       
   335 
       
   336   NativeInstruction::verify();
       
   337   if (!NativeJump::is_jump_at(addr)) {
       
   338     tty->print_cr("not a NativeJump at " PTR_FORMAT, addr);
       
   339     // TODO: PPC port: Disassembler::decode(addr, 20, 20, tty);
       
   340     fatal(err_msg("not a NativeJump at " PTR_FORMAT, addr));
       
   341   }
       
   342 }
       
   343 #endif // ASSERT
       
   344 
       
   345 //-------------------------------------------------------------------
       
   346 
       
   347 // Call trampoline stubs.
       
   348 //
       
   349 // Layout and instructions of a call trampoline stub:
       
   350 //    0:  load the TOC (part 1)
       
   351 //    4:  load the TOC (part 2)
       
   352 //    8:  load the call target from the constant pool (part 1)
       
   353 //  [12:  load the call target from the constant pool (part 2, optional)]
       
   354 //   ..:  branch via CTR
       
   355 //
       
   356 
       
   357 address NativeCallTrampolineStub::encoded_destination_addr() const {
       
   358   address instruction_addr = addr_at(2 * BytesPerInstWord);
       
   359   assert(MacroAssembler::is_ld_largeoffset(instruction_addr),
       
   360          "must be a ld with large offset (from the constant pool)");
       
   361 
       
   362   return instruction_addr;
       
   363 }
       
   364 
       
   365 address NativeCallTrampolineStub::destination() const {
       
   366   CodeBlob* cb = CodeCache::find_blob(addr_at(0));
       
   367   address ctable = cb->content_begin();
       
   368 
       
   369   return *(address*)(ctable + destination_toc_offset());
       
   370 }
       
   371 
       
   372 int NativeCallTrampolineStub::destination_toc_offset() const {
       
   373   return MacroAssembler::get_ld_largeoffset_offset(encoded_destination_addr());
       
   374 }
       
   375 
       
   376 void NativeCallTrampolineStub::set_destination(address new_destination) {
       
   377   CodeBlob* cb = CodeCache::find_blob(addr_at(0));
       
   378   address ctable = cb->content_begin();
       
   379 
       
   380   *(address*)(ctable + destination_toc_offset()) = new_destination;
       
   381 }
       
   382