Wed, 03 Jul 2019 20:42:37 +0800
Merge
1 /*
2 * Copyright (c) 1997, 2014, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
25 /*
26 * This file has been modified by Loongson Technology in 2015. These
27 * modifications are Copyright (c) 2015 Loongson Technology, and are made
28 * available on the same license terms set forth above.
29 */
31 #include "precompiled.hpp"
32 #include "asm/macroAssembler.hpp"
33 #include "asm/macroAssembler.inline.hpp"
34 #include "compiler/disassembler.hpp"
35 #include "interpreter/bytecodeHistogram.hpp"
36 #include "interpreter/bytecodeInterpreter.hpp"
37 #include "interpreter/interpreter.hpp"
38 #include "interpreter/interpreterRuntime.hpp"
39 #include "interpreter/templateTable.hpp"
40 #include "memory/allocation.inline.hpp"
41 #include "memory/resourceArea.hpp"
42 #include "oops/arrayOop.hpp"
43 #include "oops/methodData.hpp"
44 #include "oops/method.hpp"
45 #include "oops/oop.inline.hpp"
46 #include "prims/forte.hpp"
47 #include "prims/jvmtiExport.hpp"
48 #include "prims/methodHandles.hpp"
49 #include "runtime/handles.inline.hpp"
50 #include "runtime/sharedRuntime.hpp"
51 #include "runtime/stubRoutines.hpp"
52 #include "runtime/timer.hpp"
54 # define __ _masm->
57 //------------------------------------------------------------------------------------------------------------------------
58 // Implementation of InterpreterCodelet
60 void InterpreterCodelet::initialize(const char* description, Bytecodes::Code bytecode) {
61 _description = description;
62 _bytecode = bytecode;
63 }
66 void InterpreterCodelet::verify() {
67 }
70 void InterpreterCodelet::print_on(outputStream* st) const {
71 ttyLocker ttyl;
73 if (PrintInterpreter) {
74 st->cr();
75 st->print_cr("----------------------------------------------------------------------");
76 }
78 if (description() != NULL) st->print("%s ", description());
79 if (bytecode() >= 0 ) st->print("%d %s ", bytecode(), Bytecodes::name(bytecode()));
80 st->print_cr("[" INTPTR_FORMAT ", " INTPTR_FORMAT "] %d bytes",
81 p2i(code_begin()), p2i(code_end()), code_size());
83 if (PrintInterpreter) {
84 st->cr();
85 #ifndef MIPS
86 Disassembler::decode(code_begin(), code_end(), st, DEBUG_ONLY(_strings) NOT_DEBUG(CodeStrings()));
87 #else
88 Disassembler::decode(code_begin(), code_end(), st);
89 #endif //disassembler_mips.cpp not implement "decode(address start, address end, outputStream* st, CodeComments c". 2013/02/25.
90 }
91 }
94 //------------------------------------------------------------------------------------------------------------------------
95 // Implementation of platform independent aspects of Interpreter
97 void AbstractInterpreter::initialize() {
98 if (_code != NULL) return;
100 // make sure 'imported' classes are initialized
101 if (CountBytecodes || TraceBytecodes || StopInterpreterAt) BytecodeCounter::reset();
102 if (PrintBytecodeHistogram) BytecodeHistogram::reset();
103 if (PrintBytecodePairHistogram) BytecodePairHistogram::reset();
105 InvocationCounter::reinitialize(DelayCompilationDuringStartup);
107 }
109 void AbstractInterpreter::print() {
110 tty->cr();
111 tty->print_cr("----------------------------------------------------------------------");
112 tty->print_cr("Interpreter");
113 tty->cr();
114 tty->print_cr("code size = %6dK bytes", (int)_code->used_space()/1024);
115 tty->print_cr("total space = %6dK bytes", (int)_code->total_space()/1024);
116 tty->print_cr("wasted space = %6dK bytes", (int)_code->available_space()/1024);
117 tty->cr();
118 tty->print_cr("# of codelets = %6d" , _code->number_of_stubs());
119 tty->print_cr("avg codelet size = %6d bytes", _code->used_space() / _code->number_of_stubs());
120 tty->cr();
121 _code->print();
122 tty->print_cr("----------------------------------------------------------------------");
123 tty->cr();
124 }
127 void interpreter_init() {
128 Interpreter::initialize();
129 #ifndef PRODUCT
130 if (TraceBytecodes) BytecodeTracer::set_closure(BytecodeTracer::std_closure());
131 #endif // PRODUCT
132 // need to hit every safepoint in order to call zapping routine
133 // register the interpreter
134 Forte::register_stub(
135 "Interpreter",
136 AbstractInterpreter::code()->code_start(),
137 AbstractInterpreter::code()->code_end()
138 );
140 // notify JVMTI profiler
141 if (JvmtiExport::should_post_dynamic_code_generated()) {
142 JvmtiExport::post_dynamic_code_generated("Interpreter",
143 AbstractInterpreter::code()->code_start(),
144 AbstractInterpreter::code()->code_end());
145 }
146 }
148 //------------------------------------------------------------------------------------------------------------------------
149 // Implementation of interpreter
151 StubQueue* AbstractInterpreter::_code = NULL;
152 bool AbstractInterpreter::_notice_safepoints = false;
153 address AbstractInterpreter::_rethrow_exception_entry = NULL;
155 address AbstractInterpreter::_native_entry_begin = NULL;
156 address AbstractInterpreter::_native_entry_end = NULL;
157 address AbstractInterpreter::_slow_signature_handler;
158 address AbstractInterpreter::_entry_table [AbstractInterpreter::number_of_method_entries];
159 address AbstractInterpreter::_native_abi_to_tosca [AbstractInterpreter::number_of_result_handlers];
161 //------------------------------------------------------------------------------------------------------------------------
162 // Generation of complete interpreter
164 AbstractInterpreterGenerator::AbstractInterpreterGenerator(StubQueue* _code) {
165 _masm = NULL;
166 }
169 static const BasicType types[Interpreter::number_of_result_handlers] = {
170 T_BOOLEAN,
171 T_CHAR ,
172 T_BYTE ,
173 T_SHORT ,
174 T_INT ,
175 T_LONG ,
176 T_VOID ,
177 T_FLOAT ,
178 T_DOUBLE ,
179 T_OBJECT
180 };
182 void AbstractInterpreterGenerator::generate_all() {
185 { CodeletMark cm(_masm, "slow signature handler");
186 Interpreter::_slow_signature_handler = generate_slow_signature_handler();
187 }
189 }
191 //------------------------------------------------------------------------------------------------------------------------
192 // Entry points
194 AbstractInterpreter::MethodKind AbstractInterpreter::method_kind(methodHandle m) {
195 // Abstract method?
196 if (m->is_abstract()) return abstract;
198 // Method handle primitive?
199 if (m->is_method_handle_intrinsic()) {
200 vmIntrinsics::ID id = m->intrinsic_id();
201 assert(MethodHandles::is_signature_polymorphic(id), "must match an intrinsic");
202 MethodKind kind = (MethodKind)( method_handle_invoke_FIRST +
203 ((int)id - vmIntrinsics::FIRST_MH_SIG_POLY) );
204 assert(kind <= method_handle_invoke_LAST, "parallel enum ranges");
205 return kind;
206 }
208 #ifndef CC_INTERP
209 if (UseCRC32Intrinsics && m->is_native()) {
210 // Use optimized stub code for CRC32 native methods.
211 switch (m->intrinsic_id()) {
212 case vmIntrinsics::_updateCRC32 : return java_util_zip_CRC32_update;
213 case vmIntrinsics::_updateBytesCRC32 : return java_util_zip_CRC32_updateBytes;
214 case vmIntrinsics::_updateByteBufferCRC32 : return java_util_zip_CRC32_updateByteBuffer;
215 }
216 }
217 #endif
219 // Native method?
220 // Note: This test must come _before_ the test for intrinsic
221 // methods. See also comments below.
222 if (m->is_native()) {
223 assert(!m->is_method_handle_intrinsic(), "overlapping bits here, watch out");
224 return m->is_synchronized() ? native_synchronized : native;
225 }
227 // Synchronized?
228 if (m->is_synchronized()) {
229 return zerolocals_synchronized;
230 }
232 if (RegisterFinalizersAtInit && m->code_size() == 1 &&
233 m->intrinsic_id() == vmIntrinsics::_Object_init) {
234 // We need to execute the special return bytecode to check for
235 // finalizer registration so create a normal frame.
236 return zerolocals;
237 }
239 // Empty method?
240 if (m->is_empty_method()) {
241 return empty;
242 }
244 // Special intrinsic method?
245 // Note: This test must come _after_ the test for native methods,
246 // otherwise we will run into problems with JDK 1.2, see also
247 // AbstractInterpreterGenerator::generate_method_entry() for
248 // for details.
249 switch (m->intrinsic_id()) {
250 case vmIntrinsics::_dsin : return java_lang_math_sin ;
251 case vmIntrinsics::_dcos : return java_lang_math_cos ;
252 case vmIntrinsics::_dtan : return java_lang_math_tan ;
253 case vmIntrinsics::_dabs : return java_lang_math_abs ;
254 case vmIntrinsics::_dsqrt : return java_lang_math_sqrt ;
255 case vmIntrinsics::_dlog : return java_lang_math_log ;
256 case vmIntrinsics::_dlog10: return java_lang_math_log10;
257 case vmIntrinsics::_dpow : return java_lang_math_pow ;
258 case vmIntrinsics::_dexp : return java_lang_math_exp ;
260 case vmIntrinsics::_Reference_get:
261 return java_lang_ref_reference_get;
262 }
264 // Accessor method?
265 if (m->is_accessor()) {
266 assert(m->size_of_parameters() == 1, "fast code for accessors assumes parameter size = 1");
267 return accessor;
268 }
270 // Note: for now: zero locals for all non-empty methods
271 return zerolocals;
272 }
275 void AbstractInterpreter::set_entry_for_kind(AbstractInterpreter::MethodKind kind, address entry) {
276 assert(kind >= method_handle_invoke_FIRST &&
277 kind <= method_handle_invoke_LAST, "late initialization only for MH entry points");
278 assert(_entry_table[kind] == _entry_table[abstract], "previous value must be AME entry");
279 _entry_table[kind] = entry;
280 }
283 // Return true if the interpreter can prove that the given bytecode has
284 // not yet been executed (in Java semantics, not in actual operation).
285 bool AbstractInterpreter::is_not_reached(methodHandle method, int bci) {
286 Bytecodes::Code code = method()->code_at(bci);
288 if (!Bytecodes::must_rewrite(code)) {
289 // might have been reached
290 return false;
291 }
293 // the bytecode might not be rewritten if the method is an accessor, etc.
294 address ientry = method->interpreter_entry();
295 if (ientry != entry_for_kind(AbstractInterpreter::zerolocals) &&
296 ientry != entry_for_kind(AbstractInterpreter::zerolocals_synchronized))
297 return false; // interpreter does not run this method!
299 // otherwise, we can be sure this bytecode has never been executed
300 return true;
301 }
304 #ifndef PRODUCT
305 void AbstractInterpreter::print_method_kind(MethodKind kind) {
306 switch (kind) {
307 case zerolocals : tty->print("zerolocals" ); break;
308 case zerolocals_synchronized: tty->print("zerolocals_synchronized"); break;
309 case native : tty->print("native" ); break;
310 case native_synchronized : tty->print("native_synchronized" ); break;
311 case empty : tty->print("empty" ); break;
312 case accessor : tty->print("accessor" ); break;
313 case abstract : tty->print("abstract" ); break;
314 case java_lang_math_sin : tty->print("java_lang_math_sin" ); break;
315 case java_lang_math_cos : tty->print("java_lang_math_cos" ); break;
316 case java_lang_math_tan : tty->print("java_lang_math_tan" ); break;
317 case java_lang_math_abs : tty->print("java_lang_math_abs" ); break;
318 case java_lang_math_sqrt : tty->print("java_lang_math_sqrt" ); break;
319 case java_lang_math_log : tty->print("java_lang_math_log" ); break;
320 case java_lang_math_log10 : tty->print("java_lang_math_log10" ); break;
321 case java_util_zip_CRC32_update : tty->print("java_util_zip_CRC32_update"); break;
322 case java_util_zip_CRC32_updateBytes : tty->print("java_util_zip_CRC32_updateBytes"); break;
323 case java_util_zip_CRC32_updateByteBuffer : tty->print("java_util_zip_CRC32_updateByteBuffer"); break;
324 default:
325 if (kind >= method_handle_invoke_FIRST &&
326 kind <= method_handle_invoke_LAST) {
327 const char* kind_name = vmIntrinsics::name_at(method_handle_intrinsic(kind));
328 if (kind_name[0] == '_') kind_name = &kind_name[1]; // '_invokeExact' => 'invokeExact'
329 tty->print("method_handle_%s", kind_name);
330 break;
331 }
332 ShouldNotReachHere();
333 break;
334 }
335 }
336 #endif // PRODUCT
339 //------------------------------------------------------------------------------------------------------------------------
340 // Deoptimization support
342 /**
343 * If a deoptimization happens, this function returns the point of next bytecode to continue execution.
344 */
345 address AbstractInterpreter::deopt_continue_after_entry(Method* method, address bcp, int callee_parameters, bool is_top_frame) {
346 assert(method->contains(bcp), "just checkin'");
348 // Get the original and rewritten bytecode.
349 Bytecodes::Code code = Bytecodes::java_code_at(method, bcp);
350 assert(!Interpreter::bytecode_should_reexecute(code), "should not reexecute");
352 const int bci = method->bci_from(bcp);
354 // compute continuation length
355 const int length = Bytecodes::length_at(method, bcp);
357 // compute result type
358 BasicType type = T_ILLEGAL;
360 switch (code) {
361 case Bytecodes::_invokevirtual :
362 case Bytecodes::_invokespecial :
363 case Bytecodes::_invokestatic :
364 case Bytecodes::_invokeinterface: {
365 Thread *thread = Thread::current();
366 ResourceMark rm(thread);
367 methodHandle mh(thread, method);
368 type = Bytecode_invoke(mh, bci).result_type();
369 // since the cache entry might not be initialized:
370 // (NOT needed for the old calling convension)
371 if (!is_top_frame) {
372 int index = Bytes::get_native_u2(bcp+1);
373 method->constants()->cache()->entry_at(index)->set_parameter_size(callee_parameters);
374 }
375 break;
376 }
378 case Bytecodes::_invokedynamic: {
379 Thread *thread = Thread::current();
380 ResourceMark rm(thread);
381 methodHandle mh(thread, method);
382 type = Bytecode_invoke(mh, bci).result_type();
383 // since the cache entry might not be initialized:
384 // (NOT needed for the old calling convension)
385 if (!is_top_frame) {
386 int index = Bytes::get_native_u4(bcp+1);
387 method->constants()->invokedynamic_cp_cache_entry_at(index)->set_parameter_size(callee_parameters);
388 }
389 break;
390 }
392 case Bytecodes::_ldc :
393 case Bytecodes::_ldc_w : // fall through
394 case Bytecodes::_ldc2_w:
395 {
396 Thread *thread = Thread::current();
397 ResourceMark rm(thread);
398 methodHandle mh(thread, method);
399 type = Bytecode_loadconstant(mh, bci).result_type();
400 break;
401 }
403 default:
404 type = Bytecodes::result_type(code);
405 break;
406 }
408 // return entry point for computed continuation state & bytecode length
409 return
410 is_top_frame
411 ? Interpreter::deopt_entry (as_TosState(type), length)
412 : Interpreter::return_entry(as_TosState(type), length, code);
413 }
415 // If deoptimization happens, this function returns the point where the interpreter reexecutes
416 // the bytecode.
417 // Note: Bytecodes::_athrow is a special case in that it does not return
418 // Interpreter::deopt_entry(vtos, 0) like others
419 address AbstractInterpreter::deopt_reexecute_entry(Method* method, address bcp) {
420 assert(method->contains(bcp), "just checkin'");
421 Bytecodes::Code code = Bytecodes::java_code_at(method, bcp);
422 #ifdef COMPILER1
423 if(code == Bytecodes::_athrow ) {
424 return Interpreter::rethrow_exception_entry();
425 }
426 #endif /* COMPILER1 */
427 return Interpreter::deopt_entry(vtos, 0);
428 }
430 // If deoptimization happens, the interpreter should reexecute these bytecodes.
431 // This function mainly helps the compilers to set up the reexecute bit.
432 bool AbstractInterpreter::bytecode_should_reexecute(Bytecodes::Code code) {
433 switch (code) {
434 case Bytecodes::_lookupswitch:
435 case Bytecodes::_tableswitch:
436 case Bytecodes::_fast_binaryswitch:
437 case Bytecodes::_fast_linearswitch:
438 // recompute condtional expression folded into _if<cond>
439 case Bytecodes::_lcmp :
440 case Bytecodes::_fcmpl :
441 case Bytecodes::_fcmpg :
442 case Bytecodes::_dcmpl :
443 case Bytecodes::_dcmpg :
444 case Bytecodes::_ifnull :
445 case Bytecodes::_ifnonnull :
446 case Bytecodes::_goto :
447 case Bytecodes::_goto_w :
448 case Bytecodes::_ifeq :
449 case Bytecodes::_ifne :
450 case Bytecodes::_iflt :
451 case Bytecodes::_ifge :
452 case Bytecodes::_ifgt :
453 case Bytecodes::_ifle :
454 case Bytecodes::_if_icmpeq :
455 case Bytecodes::_if_icmpne :
456 case Bytecodes::_if_icmplt :
457 case Bytecodes::_if_icmpge :
458 case Bytecodes::_if_icmpgt :
459 case Bytecodes::_if_icmple :
460 case Bytecodes::_if_acmpeq :
461 case Bytecodes::_if_acmpne :
462 // special cases
463 case Bytecodes::_getfield :
464 case Bytecodes::_putfield :
465 case Bytecodes::_getstatic :
466 case Bytecodes::_putstatic :
467 case Bytecodes::_aastore :
468 #ifdef COMPILER1
469 //special case of reexecution
470 case Bytecodes::_athrow :
471 #endif
472 return true;
474 default:
475 return false;
476 }
477 }
479 void AbstractInterpreterGenerator::bang_stack_shadow_pages(bool native_call) {
480 // Quick & dirty stack overflow checking: bang the stack & handle trap.
481 // Note that we do the banging after the frame is setup, since the exception
482 // handling code expects to find a valid interpreter frame on the stack.
483 // Doing the banging earlier fails if the caller frame is not an interpreter
484 // frame.
485 // (Also, the exception throwing code expects to unlock any synchronized
486 // method receiever, so do the banging after locking the receiver.)
488 // Bang each page in the shadow zone. We can't assume it's been done for
489 // an interpreter frame with greater than a page of locals, so each page
490 // needs to be checked. Only true for non-native.
491 if (UseStackBanging) {
492 const int start_page = native_call ? StackShadowPages : 1;
493 const int page_size = os::vm_page_size();
494 for (int pages = start_page; pages <= StackShadowPages ; pages++) {
495 __ bang_stack_with_offset(pages*page_size);
496 }
497 }
498 }
500 void AbstractInterpreterGenerator::initialize_method_handle_entries() {
501 // method handle entry kinds are generated later in MethodHandlesAdapterGenerator::generate:
502 for (int i = Interpreter::method_handle_invoke_FIRST; i <= Interpreter::method_handle_invoke_LAST; i++) {
503 Interpreter::MethodKind kind = (Interpreter::MethodKind) i;
504 Interpreter::_entry_table[kind] = Interpreter::_entry_table[Interpreter::abstract];
505 }
506 }