1.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 1.2 +++ b/src/cpu/sparc/vm/compiledIC_sparc.cpp Wed Apr 27 01:25:04 2016 +0800 1.3 @@ -0,0 +1,193 @@ 1.4 +/* 1.5 + * Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved. 1.6 + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 1.7 + * 1.8 + * This code is free software; you can redistribute it and/or modify it 1.9 + * under the terms of the GNU General Public License version 2 only, as 1.10 + * published by the Free Software Foundation. 1.11 + * 1.12 + * This code is distributed in the hope that it will be useful, but WITHOUT 1.13 + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 1.14 + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 1.15 + * version 2 for more details (a copy is included in the LICENSE file that 1.16 + * accompanied this code). 1.17 + * 1.18 + * You should have received a copy of the GNU General Public License version 1.19 + * 2 along with this work; if not, write to the Free Software Foundation, 1.20 + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 1.21 + * 1.22 + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 1.23 + * or visit www.oracle.com if you need additional information or have any 1.24 + * questions. 1.25 + * 1.26 + */ 1.27 + 1.28 +#include "precompiled.hpp" 1.29 +#include "asm/macroAssembler.inline.hpp" 1.30 +#include "code/compiledIC.hpp" 1.31 +#include "code/icBuffer.hpp" 1.32 +#include "code/nmethod.hpp" 1.33 +#include "memory/resourceArea.hpp" 1.34 +#include "runtime/mutexLocker.hpp" 1.35 +#include "runtime/safepoint.hpp" 1.36 +#ifdef COMPILER2 1.37 +#include "opto/matcher.hpp" 1.38 +#endif 1.39 + 1.40 +// Release the CompiledICHolder* associated with this call site is there is one. 1.41 +void CompiledIC::cleanup_call_site(virtual_call_Relocation* call_site) { 1.42 + // This call site might have become stale so inspect it carefully. 1.43 + NativeCall* call = nativeCall_at(call_site->addr()); 1.44 + if (is_icholder_entry(call->destination())) { 1.45 + NativeMovConstReg* value = nativeMovConstReg_at(call_site->cached_value()); 1.46 + InlineCacheBuffer::queue_for_release((CompiledICHolder*)value->data()); 1.47 + } 1.48 +} 1.49 + 1.50 +bool CompiledIC::is_icholder_call_site(virtual_call_Relocation* call_site) { 1.51 + // This call site might have become stale so inspect it carefully. 1.52 + NativeCall* call = nativeCall_at(call_site->addr()); 1.53 + return is_icholder_entry(call->destination()); 1.54 +} 1.55 + 1.56 +//----------------------------------------------------------------------------- 1.57 +// High-level access to an inline cache. Guaranteed to be MT-safe. 1.58 + 1.59 +CompiledIC::CompiledIC(nmethod* nm, NativeCall* call) 1.60 + : _ic_call(call) 1.61 +{ 1.62 + address ic_call = call->instruction_address(); 1.63 + 1.64 + assert(ic_call != NULL, "ic_call address must be set"); 1.65 + assert(nm != NULL, "must pass nmethod"); 1.66 + assert(nm->contains(ic_call), "must be in nmethod"); 1.67 + 1.68 + // Search for the ic_call at the given address. 1.69 + RelocIterator iter(nm, ic_call, ic_call+1); 1.70 + bool ret = iter.next(); 1.71 + assert(ret == true, "relocInfo must exist at this address"); 1.72 + assert(iter.addr() == ic_call, "must find ic_call"); 1.73 + if (iter.type() == relocInfo::virtual_call_type) { 1.74 + virtual_call_Relocation* r = iter.virtual_call_reloc(); 1.75 + _is_optimized = false; 1.76 + _value = nativeMovConstReg_at(r->cached_value()); 1.77 + } else { 1.78 + assert(iter.type() == relocInfo::opt_virtual_call_type, "must be a virtual call"); 1.79 + _is_optimized = true; 1.80 + _value = NULL; 1.81 + } 1.82 +} 1.83 + 1.84 +// ---------------------------------------------------------------------------- 1.85 + 1.86 +#define __ _masm. 1.87 +void CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) { 1.88 +#ifdef COMPILER2 1.89 + // Stub is fixed up when the corresponding call is converted from calling 1.90 + // compiled code to calling interpreted code. 1.91 + // set (empty), G5 1.92 + // jmp -1 1.93 + 1.94 + address mark = cbuf.insts_mark(); // Get mark within main instrs section. 1.95 + 1.96 + MacroAssembler _masm(&cbuf); 1.97 + 1.98 + address base = 1.99 + __ start_a_stub(to_interp_stub_size()*2); 1.100 + if (base == NULL) return; // CodeBuffer::expand failed. 1.101 + 1.102 + // Static stub relocation stores the instruction address of the call. 1.103 + __ relocate(static_stub_Relocation::spec(mark)); 1.104 + 1.105 + __ set_metadata(NULL, as_Register(Matcher::inline_cache_reg_encode())); 1.106 + 1.107 + __ set_inst_mark(); 1.108 + AddressLiteral addrlit(-1); 1.109 + __ JUMP(addrlit, G3, 0); 1.110 + 1.111 + __ delayed()->nop(); 1.112 + 1.113 + // Update current stubs pointer and restore code_end. 1.114 + __ end_a_stub(); 1.115 +#else 1.116 + ShouldNotReachHere(); 1.117 +#endif 1.118 +} 1.119 +#undef __ 1.120 + 1.121 +int CompiledStaticCall::to_interp_stub_size() { 1.122 + // This doesn't need to be accurate but it must be larger or equal to 1.123 + // the real size of the stub. 1.124 + return (NativeMovConstReg::instruction_size + // sethi/setlo; 1.125 + NativeJump::instruction_size + // sethi; jmp; nop 1.126 + (TraceJumps ? 20 * BytesPerInstWord : 0) ); 1.127 +} 1.128 + 1.129 +// Relocation entries for call stub, compiled java to interpreter. 1.130 +int CompiledStaticCall::reloc_to_interp_stub() { 1.131 + return 10; // 4 in emit_java_to_interp + 1 in Java_Static_Call 1.132 +} 1.133 + 1.134 +void CompiledStaticCall::set_to_interpreted(methodHandle callee, address entry) { 1.135 + address stub = find_stub(); 1.136 + guarantee(stub != NULL, "stub not found"); 1.137 + 1.138 + if (TraceICs) { 1.139 + ResourceMark rm; 1.140 + tty->print_cr("CompiledStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s", 1.141 + instruction_address(), 1.142 + callee->name_and_sig_as_C_string()); 1.143 + } 1.144 + 1.145 + // Creation also verifies the object. 1.146 + NativeMovConstReg* method_holder = nativeMovConstReg_at(stub); 1.147 + NativeJump* jump = nativeJump_at(method_holder->next_instruction_address()); 1.148 + 1.149 + assert(method_holder->data() == 0 || method_holder->data() == (intptr_t)callee(), 1.150 + "a) MT-unsafe modification of inline cache"); 1.151 + assert(jump->jump_destination() == (address)-1 || jump->jump_destination() == entry, 1.152 + "b) MT-unsafe modification of inline cache"); 1.153 + 1.154 + // Update stub. 1.155 + method_holder->set_data((intptr_t)callee()); 1.156 + jump->set_jump_destination(entry); 1.157 + 1.158 + // Update jump to call. 1.159 + set_destination_mt_safe(stub); 1.160 +} 1.161 + 1.162 +void CompiledStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) { 1.163 + assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call"); 1.164 + // Reset stub. 1.165 + address stub = static_stub->addr(); 1.166 + assert(stub != NULL, "stub not found"); 1.167 + // Creation also verifies the object. 1.168 + NativeMovConstReg* method_holder = nativeMovConstReg_at(stub); 1.169 + NativeJump* jump = nativeJump_at(method_holder->next_instruction_address()); 1.170 + method_holder->set_data(0); 1.171 + jump->set_jump_destination((address)-1); 1.172 +} 1.173 + 1.174 +//----------------------------------------------------------------------------- 1.175 +// Non-product mode code 1.176 +#ifndef PRODUCT 1.177 + 1.178 +void CompiledStaticCall::verify() { 1.179 + // Verify call. 1.180 + NativeCall::verify(); 1.181 + if (os::is_MP()) { 1.182 + verify_alignment(); 1.183 + } 1.184 + 1.185 + // Verify stub. 1.186 + address stub = find_stub(); 1.187 + assert(stub != NULL, "no stub found for static call"); 1.188 + // Creation also verifies the object. 1.189 + NativeMovConstReg* method_holder = nativeMovConstReg_at(stub); 1.190 + NativeJump* jump = nativeJump_at(method_holder->next_instruction_address()); 1.191 + 1.192 + // Verify state. 1.193 + assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check"); 1.194 +} 1.195 + 1.196 +#endif // !PRODUCT