hotspot/src/cpu/ppc/vm/nativeInst_ppc.cpp
changeset 35085 839c8ba29724
parent 33105 294e48b4f704
child 35594 cc13089c6327
equal deleted inserted replaced
35084:5b34a4ae0f58 35085:839c8ba29724
     1 /*
     1 /*
     2  * Copyright (c) 1997, 2014, Oracle and/or its affiliates. All rights reserved.
     2  * Copyright (c) 1997, 2015, Oracle and/or its affiliates. All rights reserved.
     3  * Copyright 2012, 2014 SAP AG. All rights reserved.
     3  * Copyright 2012, 2015 SAP AG. All rights reserved.
     4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
     4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
     5  *
     5  *
     6  * This code is free software; you can redistribute it and/or modify it
     6  * This code is free software; you can redistribute it and/or modify it
     7  * under the terms of the GNU General Public License version 2 only, as
     7  * under the terms of the GNU General Public License version 2 only, as
     8  * published by the Free Software Foundation.
     8  * published by the Free Software Foundation.
    63 address NativeCall::destination() const {
    63 address NativeCall::destination() const {
    64   address addr = (address)this;
    64   address addr = (address)this;
    65   address destination = Assembler::bxx_destination(addr);
    65   address destination = Assembler::bxx_destination(addr);
    66 
    66 
    67   // Do we use a trampoline stub for this call?
    67   // Do we use a trampoline stub for this call?
    68   CodeBlob* cb = CodeCache::find_blob_unsafe(addr);   // Else we get assertion if nmethod is zombie.
    68   // Trampoline stubs are located behind the main code.
    69   assert(cb && cb->is_nmethod(), "sanity");
    69   if (destination > addr) {
    70   nmethod *nm = (nmethod *)cb;
    70     // Filter out recursive method invocation (call to verified/unverified entry point).
    71   if (nm->stub_contains(destination) && is_NativeCallTrampolineStub_at(destination)) {
    71     CodeBlob* cb = CodeCache::find_blob_unsafe(addr);   // Else we get assertion if nmethod is zombie.
    72     // Yes we do, so get the destination from the trampoline stub.
    72     assert(cb && cb->is_nmethod(), "sanity");
    73     const address trampoline_stub_addr = destination;
    73     nmethod *nm = (nmethod *)cb;
    74     destination = NativeCallTrampolineStub_at(trampoline_stub_addr)->destination(nm);
    74     if (nm->stub_contains(destination) && is_NativeCallTrampolineStub_at(destination)) {
       
    75       // Yes we do, so get the destination from the trampoline stub.
       
    76       const address trampoline_stub_addr = destination;
       
    77       destination = NativeCallTrampolineStub_at(trampoline_stub_addr)->destination(nm);
       
    78     }
    75   }
    79   }
    76 
    80 
    77   return destination;
    81   return destination;
    78 }
    82 }
    79 
    83 
   265         oop_Relocation *r = iter.oop_reloc();
   269         oop_Relocation *r = iter.oop_reloc();
   266         if (oop_addr == NULL) {
   270         if (oop_addr == NULL) {
   267           oop_addr = r->oop_addr();
   271           oop_addr = r->oop_addr();
   268           *oop_addr = cast_to_oop(data);
   272           *oop_addr = cast_to_oop(data);
   269         } else {
   273         } else {
   270           assert(oop_addr == r->oop_addr(), "must be only one set-oop here") ;
   274           assert(oop_addr == r->oop_addr(), "must be only one set-oop here");
   271         }
   275         }
   272       }
   276       }
   273       if (iter.type() == relocInfo::metadata_type) {
   277       if (iter.type() == relocInfo::metadata_type) {
   274         metadata_Relocation *r = iter.metadata_reloc();
   278         metadata_Relocation *r = iter.metadata_reloc();
   275         if (metadata_addr == NULL) {
   279         if (metadata_addr == NULL) {
   349     fatal("not a NativeJump at " PTR_FORMAT, p2i(addr));
   353     fatal("not a NativeJump at " PTR_FORMAT, p2i(addr));
   350   }
   354   }
   351 }
   355 }
   352 #endif // ASSERT
   356 #endif // ASSERT
   353 
   357 
       
   358 
       
   359 void NativeGeneralJump::insert_unconditional(address code_pos, address entry) {
       
   360   CodeBuffer cb(code_pos, BytesPerInstWord + 1);
       
   361   MacroAssembler* a = new MacroAssembler(&cb);
       
   362   a->b(entry);
       
   363   ICache::ppc64_flush_icache_bytes(code_pos, NativeGeneralJump::instruction_size);
       
   364 }
       
   365 
       
   366 // MT-safe patching of a jmp instruction.
       
   367 void NativeGeneralJump::replace_mt_safe(address instr_addr, address code_buffer) {
       
   368   // Bytes beyond offset NativeGeneralJump::instruction_size are copied by caller.
       
   369 
       
   370   // Finally patch out the jump.
       
   371   volatile juint *jump_addr = (volatile juint*)instr_addr;
       
   372   // Release not needed because caller uses invalidate_range after copying the remaining bytes.
       
   373   //OrderAccess::release_store(jump_addr, *((juint*)code_buffer));
       
   374   *jump_addr = *((juint*)code_buffer); // atomically store code over branch instruction
       
   375   ICache::ppc64_flush_icache_bytes(instr_addr, NativeGeneralJump::instruction_size);
       
   376 }
       
   377 
       
   378 
   354 //-------------------------------------------------------------------
   379 //-------------------------------------------------------------------
   355 
   380 
   356 // Call trampoline stubs.
   381 // Call trampoline stubs.
   357 //
   382 //
   358 // Layout and instructions of a call trampoline stub:
   383 // Layout and instructions of a call trampoline stub:
   362 //  [12:  load the call target from the constant pool (part 2, optional)]
   387 //  [12:  load the call target from the constant pool (part 2, optional)]
   363 //   ..:  branch via CTR
   388 //   ..:  branch via CTR
   364 //
   389 //
   365 
   390 
   366 address NativeCallTrampolineStub::encoded_destination_addr() const {
   391 address NativeCallTrampolineStub::encoded_destination_addr() const {
   367   address instruction_addr = addr_at(2 * BytesPerInstWord);
   392   address instruction_addr = addr_at(0 * BytesPerInstWord);
   368   assert(MacroAssembler::is_ld_largeoffset(instruction_addr),
   393   if (!MacroAssembler::is_ld_largeoffset(instruction_addr)) {
   369          "must be a ld with large offset (from the constant pool)");
   394     instruction_addr = addr_at(2 * BytesPerInstWord);
   370 
   395     assert(MacroAssembler::is_ld_largeoffset(instruction_addr),
       
   396            "must be a ld with large offset (from the constant pool)");
       
   397   }
   371   return instruction_addr;
   398   return instruction_addr;
   372 }
   399 }
   373 
   400 
   374 address NativeCallTrampolineStub::destination(nmethod *nm) const {
   401 address NativeCallTrampolineStub::destination(nmethod *nm) const {
   375   CodeBlob* cb = nm ? nm : CodeCache::find_blob_unsafe(addr_at(0));
   402   CodeBlob* cb = nm ? nm : CodeCache::find_blob_unsafe(addr_at(0));