1.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 1.2 +++ b/src/cpu/mips/vm/compiledIC_mips.cpp Fri Apr 29 00:06:10 2016 +0800 1.3 @@ -0,0 +1,206 @@ 1.4 +/* 1.5 + * Copyright (c) 1997, 2014, Oracle and/or its affiliates. All rights reserved. 1.6 + * Copyright (c) 2015, 2016, Loongson Technology. All rights reserved. 1.7 + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 1.8 + * 1.9 + * This code is free software; you can redistribute it and/or modify it 1.10 + * under the terms of the GNU General Public License version 2 only, as 1.11 + * published by the Free Software Foundation. 1.12 + * 1.13 + * This code is distributed in the hope that it will be useful, but WITHOUT 1.14 + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 1.15 + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 1.16 + * version 2 for more details (a copy is included in the LICENSE file that 1.17 + * accompanied this code). 1.18 + * 1.19 + * You should have received a copy of the GNU General Public License version 1.20 + * 2 along with this work; if not, write to the Free Software Foundation, 1.21 + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 1.22 + * 1.23 + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 1.24 + * or visit www.oracle.com if you need additional information or have any 1.25 + * questions. 1.26 + * 1.27 + */ 1.28 + 1.29 +#include "precompiled.hpp" 1.30 +#include "asm/macroAssembler.inline.hpp" 1.31 +#include "code/compiledIC.hpp" 1.32 +#include "code/icBuffer.hpp" 1.33 +#include "code/nmethod.hpp" 1.34 +#include "memory/resourceArea.hpp" 1.35 +#include "runtime/mutexLocker.hpp" 1.36 +#include "runtime/safepoint.hpp" 1.37 + 1.38 +// Release the CompiledICHolder* associated with this call site is there is one. 1.39 +void CompiledIC::cleanup_call_site(virtual_call_Relocation* call_site) { 1.40 + // This call site might have become stale so inspect it carefully. 1.41 + NativeCall* call = nativeCall_at(call_site->addr()); 1.42 + if (is_icholder_entry(call->destination())) { 1.43 + NativeMovConstReg* value = nativeMovConstReg_at(call_site->cached_value()); 1.44 + InlineCacheBuffer::queue_for_release((CompiledICHolder*)value->data()); 1.45 + } 1.46 +} 1.47 + 1.48 +bool CompiledIC::is_icholder_call_site(virtual_call_Relocation* call_site) { 1.49 + // This call site might have become stale so inspect it carefully. 1.50 + NativeCall* call = nativeCall_at(call_site->addr()); 1.51 + return is_icholder_entry(call->destination()); 1.52 +} 1.53 + 1.54 +//----------------------------------------------------------------------------- 1.55 +// High-level access to an inline cache. Guaranteed to be MT-safe. 1.56 + 1.57 +CompiledIC::CompiledIC(nmethod* nm, NativeCall* call) 1.58 + : _ic_call(call) 1.59 +{ 1.60 + address ic_call = call->instruction_address(); 1.61 + 1.62 + assert(ic_call != NULL, "ic_call address must be set"); 1.63 + assert(nm != NULL, "must pass nmethod"); 1.64 + assert(nm->contains(ic_call), "must be in nmethod"); 1.65 + 1.66 + // Search for the ic_call at the given address. 1.67 + RelocIterator iter(nm, ic_call, ic_call+1); 1.68 + bool ret = iter.next(); 1.69 + assert(ret == true, "relocInfo must exist at this address"); 1.70 + assert(iter.addr() == ic_call, "must find ic_call"); 1.71 + if (iter.type() == relocInfo::virtual_call_type) { 1.72 + virtual_call_Relocation* r = iter.virtual_call_reloc(); 1.73 + _is_optimized = false; 1.74 + _value = nativeMovConstReg_at(r->cached_value()); 1.75 + } else { 1.76 + assert(iter.type() == relocInfo::opt_virtual_call_type, "must be a virtual call"); 1.77 + _is_optimized = true; 1.78 + _value = NULL; 1.79 + } 1.80 +} 1.81 + 1.82 +// ---------------------------------------------------------------------------- 1.83 + 1.84 +#define __ _masm. 1.85 +void CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) { 1.86 + 1.87 + address mark = cbuf.insts_mark(); // get mark within main instrs section 1.88 + 1.89 + // Note that the code buffer's insts_mark is always relative to insts. 1.90 + // That's why we must use the macroassembler to generate a stub. 1.91 + MacroAssembler _masm(&cbuf); 1.92 + 1.93 + address base = 1.94 + __ start_a_stub(Compile::MAX_stubs_size); 1.95 + if (base == NULL) return; // CodeBuffer::expand failed 1.96 + // static stub relocation stores the instruction address of the call 1.97 + 1.98 + __ relocate(static_stub_Relocation::spec(mark), 0); 1.99 + 1.100 + /* 2012/10/29 Jin: Rmethod contains methodOop, it should be relocated for GC */ 1.101 +/* 1.102 + int oop_index = __ oop_recorder()->allocate_index(NULL); 1.103 + RelocationHolder rspec = oop_Relocation::spec(oop_index); 1.104 + __ relocate(rspec); 1.105 +*/ 1.106 + 1.107 + // static stub relocation also tags the methodOop in the code-stream. 1.108 + __ li48(S3, (long)0); 1.109 + // This is recognized as unresolved by relocs/nativeInst/ic code 1.110 + 1.111 + __ relocate(relocInfo::runtime_call_type); 1.112 + 1.113 + cbuf.set_insts_mark(); 1.114 + address call_pc = (address)-1; 1.115 + __ li48(AT, (long)call_pc); 1.116 + __ jr(AT); 1.117 + __ nop(); 1.118 + __ align(16); 1.119 + __ end_a_stub(); 1.120 + // Update current stubs pointer and restore code_end. 1.121 +} 1.122 +#undef __ 1.123 + 1.124 +int CompiledStaticCall::to_interp_stub_size() { 1.125 + int size = 4 * 4 + NativeCall::instruction_size; // sizeof(li48) + NativeCall::instruction_size 1.126 + return round_to(size, 16); 1.127 +} 1.128 + 1.129 +// Relocation entries for call stub, compiled java to interpreter. 1.130 +int CompiledStaticCall::reloc_to_interp_stub() { 1.131 + return 16; 1.132 +} 1.133 + 1.134 +void CompiledStaticCall::set_to_interpreted(methodHandle callee, address entry) { 1.135 + address stub = find_stub(); 1.136 + guarantee(stub != NULL, "stub not found"); 1.137 + 1.138 + if (TraceICs) { 1.139 + ResourceMark rm; 1.140 + tty->print_cr("CompiledStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s", 1.141 + instruction_address(), 1.142 + callee->name_and_sig_as_C_string()); 1.143 + } 1.144 + 1.145 + // Creation also verifies the object. 1.146 + NativeMovConstReg* method_holder = nativeMovConstReg_at(stub); 1.147 +#ifndef MIPS64 1.148 + NativeJump* jump = nativeJump_at(method_holder->next_instruction_address()); 1.149 +#else 1.150 + NativeGeneralJump* jump = nativeGeneralJump_at(method_holder->next_instruction_address()); 1.151 +#endif 1.152 + 1.153 + assert(method_holder->data() == 0 || method_holder->data() == (intptr_t)callee(), 1.154 + "a) MT-unsafe modification of inline cache"); 1.155 + assert(jump->jump_destination() == (address)-1 || jump->jump_destination() == entry, 1.156 + "b) MT-unsafe modification of inline cache"); 1.157 + 1.158 + // Update stub. 1.159 + method_holder->set_data((intptr_t)callee()); 1.160 + jump->set_jump_destination(entry); 1.161 + 1.162 + // Update jump to call. 1.163 + set_destination_mt_safe(stub); 1.164 +} 1.165 + 1.166 +void CompiledStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) { 1.167 + assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call"); 1.168 + // Reset stub. 1.169 + address stub = static_stub->addr(); 1.170 + assert(stub != NULL, "stub not found"); 1.171 + // Creation also verifies the object. 1.172 + NativeMovConstReg* method_holder = nativeMovConstReg_at(stub); 1.173 +#ifndef MIPS64 1.174 + NativeJump* jump = nativeJump_at(method_holder->next_instruction_address()); 1.175 +#else 1.176 + NativeGeneralJump* jump = nativeGeneralJump_at(method_holder->next_instruction_address()); 1.177 +#endif 1.178 + method_holder->set_data(0); 1.179 + jump->set_jump_destination((address)-1); 1.180 +} 1.181 + 1.182 +//----------------------------------------------------------------------------- 1.183 +// Non-product mode code 1.184 +#ifndef PRODUCT 1.185 + 1.186 +void CompiledStaticCall::verify() { 1.187 + // Verify call. 1.188 + NativeCall::verify(); 1.189 + if (os::is_MP()) { 1.190 + verify_alignment(); 1.191 + } 1.192 + 1.193 + // Verify stub. 1.194 + address stub = find_stub(); 1.195 + assert(stub != NULL, "no stub found for static call"); 1.196 + // Creation also verifies the object. 1.197 + NativeMovConstReg* method_holder = nativeMovConstReg_at(stub); 1.198 +#ifndef MIPS64 1.199 + NativeJump* jump = nativeJump_at(method_holder->next_instruction_address()); 1.200 +#else 1.201 + NativeGeneralJump* jump = nativeGeneralJump_at(method_holder->next_instruction_address()); 1.202 +#endif 1.203 + 1.204 + 1.205 + // Verify state. 1.206 + assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check"); 1.207 +} 1.208 + 1.209 +#endif // !PRODUCT