src/cpu/x86/vm/compiledIC_x86.cpp

Tue, 08 Aug 2017 15:57:29 +0800

author
aoqi
date
Tue, 08 Aug 2017 15:57:29 +0800
changeset 6876
710a3c8b516e
parent 6680
78bbf4d43a14
parent 0
f90c822e73f8
child 7535
7ae4e26cb1e0
permissions
-rw-r--r--

merge

     1 /*
     2  * Copyright (c) 1997, 2014, Oracle and/or its affiliates. All rights reserved.
     3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
     4  *
     5  * This code is free software; you can redistribute it and/or modify it
     6  * under the terms of the GNU General Public License version 2 only, as
     7  * published by the Free Software Foundation.
     8  *
     9  * This code is distributed in the hope that it will be useful, but WITHOUT
    10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
    11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
    12  * version 2 for more details (a copy is included in the LICENSE file that
    13  * accompanied this code).
    14  *
    15  * You should have received a copy of the GNU General Public License version
    16  * 2 along with this work; if not, write to the Free Software Foundation,
    17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
    18  *
    19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
    20  * or visit www.oracle.com if you need additional information or have any
    21  * questions.
    22  *
    23  */
    25 #include "precompiled.hpp"
    26 #include "asm/macroAssembler.inline.hpp"
    27 #include "code/compiledIC.hpp"
    28 #include "code/icBuffer.hpp"
    29 #include "code/nmethod.hpp"
    30 #include "memory/resourceArea.hpp"
    31 #include "runtime/mutexLocker.hpp"
    32 #include "runtime/safepoint.hpp"
    34 // Release the CompiledICHolder* associated with this call site is there is one.
    35 void CompiledIC::cleanup_call_site(virtual_call_Relocation* call_site) {
    36   // This call site might have become stale so inspect it carefully.
    37   NativeCall* call = nativeCall_at(call_site->addr());
    38   if (is_icholder_entry(call->destination())) {
    39     NativeMovConstReg* value = nativeMovConstReg_at(call_site->cached_value());
    40     InlineCacheBuffer::queue_for_release((CompiledICHolder*)value->data());
    41   }
    42 }
    44 bool CompiledIC::is_icholder_call_site(virtual_call_Relocation* call_site) {
    45   // This call site might have become stale so inspect it carefully.
    46   NativeCall* call = nativeCall_at(call_site->addr());
    47   return is_icholder_entry(call->destination());
    48 }
    50 //-----------------------------------------------------------------------------
    51 // High-level access to an inline cache. Guaranteed to be MT-safe.
    53 CompiledIC::CompiledIC(nmethod* nm, NativeCall* call)
    54   : _ic_call(call)
    55 {
    56   address ic_call = call->instruction_address();
    58   assert(ic_call != NULL, "ic_call address must be set");
    59   assert(nm != NULL, "must pass nmethod");
    60   assert(nm->contains(ic_call), "must be in nmethod");
    62   // Search for the ic_call at the given address.
    63   RelocIterator iter(nm, ic_call, ic_call+1);
    64   bool ret = iter.next();
    65   assert(ret == true, "relocInfo must exist at this address");
    66   assert(iter.addr() == ic_call, "must find ic_call");
    67   if (iter.type() == relocInfo::virtual_call_type) {
    68     virtual_call_Relocation* r = iter.virtual_call_reloc();
    69     _is_optimized = false;
    70     _value = nativeMovConstReg_at(r->cached_value());
    71   } else {
    72     assert(iter.type() == relocInfo::opt_virtual_call_type, "must be a virtual call");
    73     _is_optimized = true;
    74     _value = NULL;
    75   }
    76 }
    78 // ----------------------------------------------------------------------------
    80 #define __ _masm.
    81 void CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) {
    82   // Stub is fixed up when the corresponding call is converted from
    83   // calling compiled code to calling interpreted code.
    84   // movq rbx, 0
    85   // jmp -5 # to self
    87   address mark = cbuf.insts_mark();  // Get mark within main instrs section.
    89   // Note that the code buffer's insts_mark is always relative to insts.
    90   // That's why we must use the macroassembler to generate a stub.
    91   MacroAssembler _masm(&cbuf);
    93   address base =
    94   __ start_a_stub(to_interp_stub_size()*2);
    95   if (base == NULL) return;  // CodeBuffer::expand failed.
    96   // Static stub relocation stores the instruction address of the call.
    97   __ relocate(static_stub_Relocation::spec(mark), Assembler::imm_operand);
    98   // Static stub relocation also tags the Method* in the code-stream.
    99   __ mov_metadata(rbx, (Metadata*) NULL);  // Method is zapped till fixup time.
   100   // This is recognized as unresolved by relocs/nativeinst/ic code.
   101   __ jump(RuntimeAddress(__ pc()));
   103   // Update current stubs pointer and restore insts_end.
   104   __ end_a_stub();
   105 }
   106 #undef __
   108 int CompiledStaticCall::to_interp_stub_size() {
   109   return NOT_LP64(10)    // movl; jmp
   110          LP64_ONLY(15);  // movq (1+1+8); jmp (1+4)
   111 }
   113 // Relocation entries for call stub, compiled java to interpreter.
   114 int CompiledStaticCall::reloc_to_interp_stub() {
   115   return 4; // 3 in emit_to_interp_stub + 1 in emit_call
   116 }
   118 void CompiledStaticCall::set_to_interpreted(methodHandle callee, address entry) {
   119   address stub = find_stub();
   120   guarantee(stub != NULL, "stub not found");
   122   if (TraceICs) {
   123     ResourceMark rm;
   124     tty->print_cr("CompiledStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s",
   125                   p2i(instruction_address()),
   126                   callee->name_and_sig_as_C_string());
   127   }
   129   // Creation also verifies the object.
   130   NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);
   131   NativeJump*        jump          = nativeJump_at(method_holder->next_instruction_address());
   133   assert(method_holder->data() == 0 || method_holder->data() == (intptr_t)callee(),
   134          "a) MT-unsafe modification of inline cache");
   135   assert(jump->jump_destination() == (address)-1 || jump->jump_destination() == entry,
   136          "b) MT-unsafe modification of inline cache");
   138   // Update stub.
   139   method_holder->set_data((intptr_t)callee());
   140   jump->set_jump_destination(entry);
   142   // Update jump to call.
   143   set_destination_mt_safe(stub);
   144 }
   146 void CompiledStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) {
   147   assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
   148   // Reset stub.
   149   address stub = static_stub->addr();
   150   assert(stub != NULL, "stub not found");
   151   // Creation also verifies the object.
   152   NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);
   153   NativeJump*        jump          = nativeJump_at(method_holder->next_instruction_address());
   154   method_holder->set_data(0);
   155   jump->set_jump_destination((address)-1);
   156 }
   158 //-----------------------------------------------------------------------------
   159 // Non-product mode code
   160 #ifndef PRODUCT
   162 void CompiledStaticCall::verify() {
   163   // Verify call.
   164   NativeCall::verify();
   165   if (os::is_MP()) {
   166     verify_alignment();
   167   }
   169   // Verify stub.
   170   address stub = find_stub();
   171   assert(stub != NULL, "no stub found for static call");
   172   // Creation also verifies the object.
   173   NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);
   174   NativeJump*        jump          = nativeJump_at(method_holder->next_instruction_address());
   176   // Verify state.
   177   assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check");
   178 }
   180 #endif // !PRODUCT

mercurial