src/cpu/ppc/vm/nativeInst_ppc.cpp

Tue, 17 Oct 2017 12:58:25 +0800

author
aoqi
date
Tue, 17 Oct 2017 12:58:25 +0800
changeset 7994
04ff2f6cd0eb
parent 7535
7ae4e26cb1e0
permissions
-rw-r--r--

merge

     1 /*
     2  * Copyright (c) 1997, 2014, Oracle and/or its affiliates. All rights reserved.
     3  * Copyright 2012, 2014 SAP AG. All rights reserved.
     4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
     5  *
     6  * This code is free software; you can redistribute it and/or modify it
     7  * under the terms of the GNU General Public License version 2 only, as
     8  * published by the Free Software Foundation.
     9  *
    10  * This code is distributed in the hope that it will be useful, but WITHOUT
    11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
    12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
    13  * version 2 for more details (a copy is included in the LICENSE file that
    14  * accompanied this code).
    15  *
    16  * You should have received a copy of the GNU General Public License version
    17  * 2 along with this work; if not, write to the Free Software Foundation,
    18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
    19  *
    20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
    21  * or visit www.oracle.com if you need additional information or have any
    22  * questions.
    23  *
    24  */
    26 #include "precompiled.hpp"
    27 #include "asm/macroAssembler.inline.hpp"
    28 #include "memory/resourceArea.hpp"
    29 #include "nativeInst_ppc.hpp"
    30 #include "oops/oop.inline.hpp"
    31 #include "runtime/handles.hpp"
    32 #include "runtime/sharedRuntime.hpp"
    33 #include "runtime/stubRoutines.hpp"
    34 #include "utilities/ostream.hpp"
    35 #ifdef COMPILER1
    36 #include "c1/c1_Runtime1.hpp"
    37 #endif
    39 // We use an illtrap for marking a method as not_entrant or zombie iff !UseSIGTRAP
    40 // Work around a C++ compiler bug which changes 'this'
    41 bool NativeInstruction::is_sigill_zombie_not_entrant_at(address addr) {
    42   assert(!UseSIGTRAP, "precondition");
    43   if (*(int*)addr != 0 /*illtrap*/) return false;
    44   CodeBlob* cb = CodeCache::find_blob_unsafe(addr);
    45   if (cb == NULL || !cb->is_nmethod()) return false;
    46   nmethod *nm = (nmethod *)cb;
    47   // This method is not_entrant or zombie iff the illtrap instruction is
    48   // located at the verified entry point.
    49   return nm->verified_entry_point() == addr;
    50 }
    52 #ifdef ASSERT
    53 void NativeInstruction::verify() {
    54   // Make sure code pattern is actually an instruction address.
    55   address addr = addr_at(0);
    56   if (addr == 0 || ((intptr_t)addr & 3) != 0) {
    57     fatal("not an instruction address");
    58   }
    59 }
    60 #endif // ASSERT
    62 // Extract call destination from a NativeCall. The call might use a trampoline stub.
    63 address NativeCall::destination() const {
    64   address addr = (address)this;
    65   address destination = Assembler::bxx_destination(addr);
    67   // Do we use a trampoline stub for this call?
    68   CodeBlob* cb = CodeCache::find_blob_unsafe(addr);   // Else we get assertion if nmethod is zombie.
    69   assert(cb && cb->is_nmethod(), "sanity");
    70   nmethod *nm = (nmethod *)cb;
    71   if (nm->stub_contains(destination) && is_NativeCallTrampolineStub_at(destination)) {
    72     // Yes we do, so get the destination from the trampoline stub.
    73     const address trampoline_stub_addr = destination;
    74     destination = NativeCallTrampolineStub_at(trampoline_stub_addr)->destination(nm);
    75   }
    77   return destination;
    78 }
    80 // Similar to replace_mt_safe, but just changes the destination. The
    81 // important thing is that free-running threads are able to execute this
    82 // call instruction at all times. Thus, the displacement field must be
    83 // instruction-word-aligned.
    84 //
    85 // Used in the runtime linkage of calls; see class CompiledIC.
    86 //
    87 // Add parameter assert_lock to switch off assertion
    88 // during code generation, where no patching lock is needed.
    89 void NativeCall::set_destination_mt_safe(address dest, bool assert_lock) {
    90   assert(!assert_lock ||
    91          (Patching_lock->is_locked() || SafepointSynchronize::is_at_safepoint()),
    92          "concurrent code patching");
    94   ResourceMark rm;
    95   int code_size = 1 * BytesPerInstWord;
    96   address addr_call = addr_at(0);
    97   assert(MacroAssembler::is_bl(*(int*)addr_call), "unexpected code at call-site");
    99   CodeBuffer cb(addr_call, code_size + 1);
   100   MacroAssembler* a = new MacroAssembler(&cb);
   102   // Patch the call.
   103   if (!ReoptimizeCallSequences || !a->is_within_range_of_b(dest, addr_call)) {
   104     address trampoline_stub_addr = get_trampoline();
   106     // We did not find a trampoline stub because the current codeblob
   107     // does not provide this information. The branch will be patched
   108     // later during a final fixup, when all necessary information is
   109     // available.
   110     if (trampoline_stub_addr == 0)
   111       return;
   113     // Patch the constant in the call's trampoline stub.
   114     NativeCallTrampolineStub_at(trampoline_stub_addr)->set_destination(dest);
   115     dest = trampoline_stub_addr;
   116   }
   118   OrderAccess::release();
   119   a->bl(dest);
   121   ICache::ppc64_flush_icache_bytes(addr_call, code_size);
   122 }
   124 address NativeCall::get_trampoline() {
   125   address call_addr = addr_at(0);
   127   CodeBlob *code = CodeCache::find_blob(call_addr);
   128   assert(code != NULL, "Could not find the containing code blob");
   130   // There are no relocations available when the code gets relocated
   131   // because of CodeBuffer expansion.
   132   if (code->relocation_size() == 0)
   133     return NULL;
   135   address bl_destination = Assembler::bxx_destination(call_addr);
   136   if (code->content_contains(bl_destination) &&
   137       is_NativeCallTrampolineStub_at(bl_destination))
   138     return bl_destination;
   140   // If the codeBlob is not a nmethod, this is because we get here from the
   141   // CodeBlob constructor, which is called within the nmethod constructor.
   142   return trampoline_stub_Relocation::get_trampoline_for(call_addr, (nmethod*)code);
   143 }
   145 #ifdef ASSERT
   146 void NativeCall::verify() {
   147   address addr = addr_at(0);
   149   if (!NativeCall::is_call_at(addr)) {
   150     tty->print_cr("not a NativeCall at " PTR_FORMAT, p2i(addr));
   151     // TODO: PPC port: Disassembler::decode(addr - 20, addr + 20, tty);
   152     fatal(err_msg("not a NativeCall at " PTR_FORMAT, p2i(addr)));
   153   }
   154 }
   155 #endif // ASSERT
   157 #ifdef ASSERT
   158 void NativeFarCall::verify() {
   159   address addr = addr_at(0);
   161   NativeInstruction::verify();
   162   if (!NativeFarCall::is_far_call_at(addr)) {
   163     tty->print_cr("not a NativeFarCall at " PTR_FORMAT, p2i(addr));
   164     // TODO: PPC port: Disassembler::decode(addr, 20, 20, tty);
   165     fatal(err_msg("not a NativeFarCall at " PTR_FORMAT, p2i(addr)));
   166   }
   167 }
   168 #endif // ASSERT
   170 address NativeMovConstReg::next_instruction_address() const {
   171 #ifdef ASSERT
   172   CodeBlob* nm = CodeCache::find_blob(instruction_address());
   173   assert(!MacroAssembler::is_set_narrow_oop(addr_at(0), nm->content_begin()), "Should not patch narrow oop here");
   174 #endif
   176   if (MacroAssembler::is_load_const_from_method_toc_at(addr_at(0))) {
   177     return addr_at(load_const_from_method_toc_instruction_size);
   178   } else {
   179     return addr_at(load_const_instruction_size);
   180   }
   181 }
   183 intptr_t NativeMovConstReg::data() const {
   184   address   addr = addr_at(0);
   186   if (MacroAssembler::is_load_const_at(addr)) {
   187     return MacroAssembler::get_const(addr);
   188   }
   190   CodeBlob* cb = CodeCache::find_blob_unsafe(addr);
   191   if (MacroAssembler::is_set_narrow_oop(addr, cb->content_begin())) {
   192     narrowOop no = (narrowOop)MacroAssembler::get_narrow_oop(addr, cb->content_begin());
   193     return cast_from_oop<intptr_t>(oopDesc::decode_heap_oop(no));
   194   } else {
   195     assert(MacroAssembler::is_load_const_from_method_toc_at(addr), "must be load_const_from_pool");
   197     address ctable = cb->content_begin();
   198     int offset = MacroAssembler::get_offset_of_load_const_from_method_toc_at(addr);
   199     return *(intptr_t *)(ctable + offset);
   200   }
   201 }
   203 address NativeMovConstReg::set_data_plain(intptr_t data, CodeBlob *cb) {
   204   address addr         = instruction_address();
   205   address next_address = NULL;
   206   if (!cb) cb = CodeCache::find_blob(addr);
   208   if (cb != NULL && MacroAssembler::is_load_const_from_method_toc_at(addr)) {
   209     // A load from the method's TOC (ctable).
   210     assert(cb->is_nmethod(), "must be nmethod");
   211     const address ctable = cb->content_begin();
   212     const int toc_offset = MacroAssembler::get_offset_of_load_const_from_method_toc_at(addr);
   213     *(intptr_t *)(ctable + toc_offset) = data;
   214     next_address = addr + BytesPerInstWord;
   215   } else if (cb != NULL &&
   216              MacroAssembler::is_calculate_address_from_global_toc_at(addr, cb->content_begin())) {
   217     // A calculation relative to the global TOC.
   218     if (MacroAssembler::get_address_of_calculate_address_from_global_toc_at(addr, cb->content_begin()) !=
   219         (address)data) {
   220       const int invalidated_range =
   221         MacroAssembler::patch_calculate_address_from_global_toc_at(addr, cb->content_begin(),
   222                                                                    (address)data);
   223       const address start = invalidated_range < 0 ? addr + invalidated_range : addr;
   224       // FIXME:
   225       const int range = invalidated_range < 0 ? 4 - invalidated_range : 8;
   226       ICache::ppc64_flush_icache_bytes(start, range);
   227     }
   228     next_address = addr + 1 * BytesPerInstWord;
   229   } else if (MacroAssembler::is_load_const_at(addr)) {
   230     // A normal 5 instruction load_const code sequence.
   231     if (MacroAssembler::get_const(addr) != (long)data) {
   232       // This is not mt safe, ok in methods like CodeBuffer::copy_code().
   233       MacroAssembler::patch_const(addr, (long)data);
   234       ICache::ppc64_flush_icache_bytes(addr, load_const_instruction_size);
   235     }
   236     next_address = addr + 5 * BytesPerInstWord;
   237   } else if (MacroAssembler::is_bl(* (int*) addr)) {
   238     // A single branch-and-link instruction.
   239     ResourceMark rm;
   240     const int code_size = 1 * BytesPerInstWord;
   241     CodeBuffer cb(addr, code_size + 1);
   242     MacroAssembler* a = new MacroAssembler(&cb);
   243     a->bl((address) data);
   244     ICache::ppc64_flush_icache_bytes(addr, code_size);
   245     next_address = addr + code_size;
   246   } else {
   247     ShouldNotReachHere();
   248   }
   250   return next_address;
   251 }
   253 void NativeMovConstReg::set_data(intptr_t data) {
   254   // Store the value into the instruction stream.
   255   CodeBlob *cb = CodeCache::find_blob(instruction_address());
   256   address next_address = set_data_plain(data, cb);
   258   // Also store the value into an oop_Relocation cell, if any.
   259   if (cb && cb->is_nmethod()) {
   260     RelocIterator iter((nmethod *) cb, instruction_address(), next_address);
   261     oop* oop_addr = NULL;
   262     Metadata** metadata_addr = NULL;
   263     while (iter.next()) {
   264       if (iter.type() == relocInfo::oop_type) {
   265         oop_Relocation *r = iter.oop_reloc();
   266         if (oop_addr == NULL) {
   267           oop_addr = r->oop_addr();
   268           *oop_addr = cast_to_oop(data);
   269         } else {
   270           assert(oop_addr == r->oop_addr(), "must be only one set-oop here") ;
   271         }
   272       }
   273       if (iter.type() == relocInfo::metadata_type) {
   274         metadata_Relocation *r = iter.metadata_reloc();
   275         if (metadata_addr == NULL) {
   276           metadata_addr = r->metadata_addr();
   277           *metadata_addr = (Metadata*)data;
   278         } else {
   279           assert(metadata_addr == r->metadata_addr(), "must be only one set-metadata here");
   280         }
   281       }
   282     }
   283   }
   284 }
   286 void NativeMovConstReg::set_narrow_oop(narrowOop data, CodeBlob *code /* = NULL */) {
   287   address   addr = addr_at(0);
   288   CodeBlob* cb = (code) ? code : CodeCache::find_blob(instruction_address());
   289   if (MacroAssembler::get_narrow_oop(addr, cb->content_begin()) == (long)data) return;
   290   const int invalidated_range =
   291     MacroAssembler::patch_set_narrow_oop(addr, cb->content_begin(), (long)data);
   292   const address start = invalidated_range < 0 ? addr + invalidated_range : addr;
   293   // FIXME:
   294   const int range = invalidated_range < 0 ? 4 - invalidated_range : 8;
   295   ICache::ppc64_flush_icache_bytes(start, range);
   296 }
   298 // Do not use an assertion here. Let clients decide whether they only
   299 // want this when assertions are enabled.
   300 #ifdef ASSERT
   301 void NativeMovConstReg::verify() {
   302   address   addr = addr_at(0);
   303   if (! MacroAssembler::is_load_const_at(addr) &&
   304       ! MacroAssembler::is_load_const_from_method_toc_at(addr)) {
   305     CodeBlob* cb = CodeCache::find_blob_unsafe(addr);   // find_nmethod() asserts if nmethod is zombie.
   306     if (! (cb != NULL && MacroAssembler::is_calculate_address_from_global_toc_at(addr, cb->content_begin())) &&
   307         ! (cb != NULL && MacroAssembler::is_set_narrow_oop(addr, cb->content_begin())) &&
   308         ! MacroAssembler::is_bl(*((int*) addr))) {
   309       tty->print_cr("not a NativeMovConstReg at " PTR_FORMAT, p2i(addr));
   310       // TODO: PPC port: Disassembler::decode(addr, 20, 20, tty);
   311       fatal(err_msg("not a NativeMovConstReg at " PTR_FORMAT, p2i(addr)));
   312     }
   313   }
   314 }
   315 #endif // ASSERT
   317 void NativeJump::patch_verified_entry(address entry, address verified_entry, address dest) {
   318   ResourceMark rm;
   319   int code_size = 1 * BytesPerInstWord;
   320   CodeBuffer cb(verified_entry, code_size + 1);
   321   MacroAssembler* a = new MacroAssembler(&cb);
   322 #ifdef COMPILER2
   323   assert(dest == SharedRuntime::get_handle_wrong_method_stub(), "expected fixed destination of patch");
   324 #endif
   325   // Patch this nmethod atomically. Always use illtrap/trap in debug build.
   326   if (DEBUG_ONLY(false &&) a->is_within_range_of_b(dest, a->pc())) {
   327     a->b(dest);
   328   } else {
   329     // The signal handler will continue at dest=OptoRuntime::handle_wrong_method_stub().
   330     if (TrapBasedNotEntrantChecks) {
   331       // We use a special trap for marking a method as not_entrant or zombie.
   332       a->trap_zombie_not_entrant();
   333     } else {
   334       // We use an illtrap for marking a method as not_entrant or zombie.
   335       a->illtrap();
   336     }
   337   }
   338   ICache::ppc64_flush_icache_bytes(verified_entry, code_size);
   339 }
   341 #ifdef ASSERT
   342 void NativeJump::verify() {
   343   address addr = addr_at(0);
   345   NativeInstruction::verify();
   346   if (!NativeJump::is_jump_at(addr)) {
   347     tty->print_cr("not a NativeJump at " PTR_FORMAT, p2i(addr));
   348     // TODO: PPC port: Disassembler::decode(addr, 20, 20, tty);
   349     fatal(err_msg("not a NativeJump at " PTR_FORMAT, p2i(addr)));
   350   }
   351 }
   352 #endif // ASSERT
   354 //-------------------------------------------------------------------
   356 // Call trampoline stubs.
   357 //
   358 // Layout and instructions of a call trampoline stub:
   359 //    0:  load the TOC (part 1)
   360 //    4:  load the TOC (part 2)
   361 //    8:  load the call target from the constant pool (part 1)
   362 //  [12:  load the call target from the constant pool (part 2, optional)]
   363 //   ..:  branch via CTR
   364 //
   366 address NativeCallTrampolineStub::encoded_destination_addr() const {
   367   address instruction_addr = addr_at(2 * BytesPerInstWord);
   368   assert(MacroAssembler::is_ld_largeoffset(instruction_addr),
   369          "must be a ld with large offset (from the constant pool)");
   371   return instruction_addr;
   372 }
   374 address NativeCallTrampolineStub::destination(nmethod *nm) const {
   375   CodeBlob* cb = nm ? nm : CodeCache::find_blob_unsafe(addr_at(0));
   376   address ctable = cb->content_begin();
   378   return *(address*)(ctable + destination_toc_offset());
   379 }
   381 int NativeCallTrampolineStub::destination_toc_offset() const {
   382   return MacroAssembler::get_ld_largeoffset_offset(encoded_destination_addr());
   383 }
   385 void NativeCallTrampolineStub::set_destination(address new_destination) {
   386   CodeBlob* cb = CodeCache::find_blob(addr_at(0));
   387   address ctable = cb->content_begin();
   389   *(address*)(ctable + destination_toc_offset()) = new_destination;
   390 }

mercurial