src/cpu/mips/vm/methodHandles_mips.cpp

Tue, 26 Jul 2016 17:06:17 +0800

author
fujie
date
Tue, 26 Jul 2016 17:06:17 +0800
changeset 41
d885f8d65c58
parent 1
2d8a650513c2
child 410
63bcd8487c2a
permissions
-rw-r--r--

Add multiply word to GPR instruction (mul) in MIPS assembler.

aoqi@1 1 /*
aoqi@1 2 * Copyright (c) 1997, 2014, Oracle and/or its affiliates. All rights reserved.
aoqi@1 3 * Copyright (c) 2015, 2016, Loongson Technology. All rights reserved.
aoqi@1 4 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
aoqi@1 5 *
aoqi@1 6 * This code is free software; you can redistribute it and/or modify it
aoqi@1 7 * under the terms of the GNU General Public License version 2 only, as
aoqi@1 8 * published by the Free Software Foundation.
aoqi@1 9 *
aoqi@1 10 * This code is distributed in the hope that it will be useful, but WITHOUT
aoqi@1 11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
aoqi@1 12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
aoqi@1 13 * version 2 for more details (a copy is included in the LICENSE file that
aoqi@1 14 * accompanied this code).
aoqi@1 15 *
aoqi@1 16 * You should have received a copy of the GNU General Public License version
aoqi@1 17 * 2 along with this work; if not, write to the Free Software Foundation,
aoqi@1 18 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
aoqi@1 19 *
aoqi@1 20 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
aoqi@1 21 * or visit www.oracle.com if you need additional information or have any
aoqi@1 22 * questions.
aoqi@1 23 *
aoqi@1 24 */
aoqi@1 25
aoqi@1 26 #include "precompiled.hpp"
aoqi@1 27 #include "asm/macroAssembler.hpp"
aoqi@1 28 #include "interpreter/interpreter.hpp"
aoqi@1 29 #include "interpreter/interpreterRuntime.hpp"
aoqi@1 30 #include "memory/allocation.inline.hpp"
aoqi@1 31 #include "prims/methodHandles.hpp"
aoqi@1 32
aoqi@1 33 #define __ _masm->
aoqi@1 34
aoqi@1 35 #ifdef PRODUCT
aoqi@1 36 #define BLOCK_COMMENT(str) /* nothing */
aoqi@1 37 #define STOP(error) stop(error)
aoqi@1 38 #else
aoqi@1 39 #define BLOCK_COMMENT(str) __ block_comment(str)
aoqi@1 40 #define STOP(error) block_comment(error); __ stop(error)
aoqi@1 41 #endif
aoqi@1 42
aoqi@1 43 #define BIND(label) bind(label); BLOCK_COMMENT(#label ":")
aoqi@1 44
aoqi@1 45 void MethodHandles::load_klass_from_Class(MacroAssembler* _masm, Register klass_reg) {
aoqi@1 46 if (VerifyMethodHandles)
aoqi@1 47 verify_klass(_masm, klass_reg, SystemDictionary::WK_KLASS_ENUM_NAME(java_lang_Class),
aoqi@1 48 "MH argument is a Class");
aoqi@1 49 __ ld(klass_reg, Address(klass_reg, java_lang_Class::klass_offset_in_bytes()));
aoqi@1 50 }
aoqi@1 51
aoqi@1 52 #ifdef ASSERT
aoqi@1 53 static int check_nonzero(const char* xname, int x) {
aoqi@1 54 assert(x != 0, err_msg("%s should be nonzero", xname));
aoqi@1 55 return x;
aoqi@1 56 }
aoqi@1 57 #define NONZERO(x) check_nonzero(#x, x)
aoqi@1 58 #else //ASSERT
aoqi@1 59 #define NONZERO(x) (x)
aoqi@1 60 #endif //ASSERT
aoqi@1 61
aoqi@1 62 #ifdef ASSERT
aoqi@1 63 void MethodHandles::verify_klass(MacroAssembler* _masm,
aoqi@1 64 Register obj, SystemDictionary::WKID klass_id,
aoqi@1 65 const char* error_message) {
aoqi@1 66 /*
aoqi@1 67 Klass** klass_addr = SystemDictionary::well_known_klass_addr(klass_id);
aoqi@1 68 KlassHandle klass = SystemDictionary::well_known_klass(klass_id);
aoqi@1 69 Register temp = S0;
aoqi@1 70 // Register temp2 = noreg;
aoqi@1 71 // LP64_ONLY(temp2 = S1); // used by MacroAssembler::cmpptr
aoqi@1 72 Label L_ok, L_bad;
aoqi@1 73 BLOCK_COMMENT("verify_klass {");
aoqi@1 74 __ verify_oop(obj);
aoqi@1 75 // __ testptr(obj, obj);
aoqi@1 76 // __ jcc(Assembler::zero, L_bad);
aoqi@1 77 __ beq(obj, R0, L_bad);
aoqi@1 78 __ nop();
aoqi@1 79 __ push(temp); //if (temp2 != noreg) __ push(temp2);
aoqi@1 80 #define UNPUSH { __ pop(temp); }
aoqi@1 81 __ load_klass(temp, obj);
aoqi@1 82 // __ cmpptr(temp, ExternalAddress((address) klass_addr));
aoqi@1 83 // __ jcc(Assembler::equal, L_ok);
aoqi@1 84 __ li(AT, (long)&klass_addr);
aoqi@1 85 __ ld(AT, AT, 0);
aoqi@1 86 __ beq(temp, AT, L_ok);
aoqi@1 87 __ nop();
aoqi@1 88 intptr_t super_check_offset = klass->super_check_offset();
aoqi@1 89 __ ld(temp, Address(temp, super_check_offset));
aoqi@1 90 // __ cmpptr(temp, ExternalAddress((address) klass_addr));
aoqi@1 91 // __ jcc(Assembler::equal, L_ok);
aoqi@1 92 __ li(AT, (long)&klass_addr);
aoqi@1 93 __ ld(AT, AT, 0);
aoqi@1 94 __ beq(AT, temp, L_ok);
aoqi@1 95 __ nop();
aoqi@1 96 UNPUSH;
aoqi@1 97 __ bind(L_bad);
aoqi@1 98 __ STOP(error_message);
aoqi@1 99 __ BIND(L_ok);
aoqi@1 100 UNPUSH;
aoqi@1 101 BLOCK_COMMENT("} verify_klass");
aoqi@1 102 */
aoqi@1 103 }
aoqi@1 104
aoqi@1 105 void MethodHandles::verify_ref_kind(MacroAssembler* _masm, int ref_kind, Register member_reg, Register temp) {
aoqi@1 106 Label L;
aoqi@1 107 BLOCK_COMMENT("verify_ref_kind {");
aoqi@1 108 __ lw(temp, Address(member_reg, NONZERO(java_lang_invoke_MemberName::flags_offset_in_bytes())));
aoqi@1 109 // __ shrl(temp, java_lang_invoke_MemberName::MN_REFERENCE_KIND_SHIFT);
aoqi@1 110 __ sra(temp, temp, java_lang_invoke_MemberName::MN_REFERENCE_KIND_SHIFT);
aoqi@1 111 // __ andl(temp, java_lang_invoke_MemberName::MN_REFERENCE_KIND_MASK);
aoqi@1 112 __ move(AT, java_lang_invoke_MemberName::MN_REFERENCE_KIND_MASK);
aoqi@1 113 __ andr(temp, temp, AT);
aoqi@1 114 // __ cmpl(temp, ref_kind);
aoqi@1 115 // __ jcc(Assembler::equal, L);
aoqi@1 116 __ move(AT, ref_kind);
aoqi@1 117 __ beq(temp, AT, L);
aoqi@1 118 __ nop();
aoqi@1 119 { char* buf = NEW_C_HEAP_ARRAY(char, 100, mtInternal);
aoqi@1 120 jio_snprintf(buf, 100, "verify_ref_kind expected %x", ref_kind);
aoqi@1 121 if (ref_kind == JVM_REF_invokeVirtual ||
aoqi@1 122 ref_kind == JVM_REF_invokeSpecial)
aoqi@1 123 // could do this for all ref_kinds, but would explode assembly code size
aoqi@1 124 trace_method_handle(_masm, buf);
aoqi@1 125 __ STOP(buf);
aoqi@1 126 }
aoqi@1 127 BLOCK_COMMENT("} verify_ref_kind");
aoqi@1 128 __ bind(L);
aoqi@1 129 }
aoqi@1 130
aoqi@1 131 #endif //ASSERT
aoqi@1 132
aoqi@1 133 void MethodHandles::jump_from_method_handle(MacroAssembler* _masm, Register method, Register temp,
aoqi@1 134 bool for_compiler_entry) {
aoqi@1 135 assert(method == Rmethod, "interpreter calling convention");
aoqi@1 136
aoqi@1 137 Label L_no_such_method;
aoqi@1 138 __ beq(method, R0, L_no_such_method);
aoqi@1 139 __ nop();
aoqi@1 140
aoqi@1 141 __ verify_method_ptr(method);
aoqi@1 142
aoqi@1 143 if (!for_compiler_entry && JvmtiExport::can_post_interpreter_events()) {
aoqi@1 144 Label run_compiled_code;
aoqi@1 145 // JVMTI events, such as single-stepping, are implemented partly by avoiding running
aoqi@1 146 // compiled code in threads for which the event is enabled. Check here for
aoqi@1 147 // interp_only_mode if these events CAN be enabled.
aoqi@1 148 #ifdef _LP64
aoqi@1 149 Register rthread = TREG;
aoqi@1 150 #else
aoqi@1 151 Register rthread = temp;
aoqi@1 152 __ get_thread(rthread);
aoqi@1 153 #endif
aoqi@1 154 // interp_only is an int, on little endian it is sufficient to test the byte only
aoqi@1 155 // Is a cmpl faster?
aoqi@1 156 // __ cmpb(Address(rthread, JavaThread::interp_only_mode_offset()), 0);
aoqi@1 157 __ lbu(AT, rthread, in_bytes(JavaThread::interp_only_mode_offset()));
aoqi@1 158 // __ jccb(Assembler::zero, run_compiled_code);
aoqi@1 159 __ beq(AT, R0, run_compiled_code);
aoqi@1 160 __ nop();
aoqi@1 161 // __ jmp(Address(method, Method::interpreter_entry_offset()));
aoqi@1 162 __ ld(T9, method, in_bytes(Method::interpreter_entry_offset()));
aoqi@1 163 __ jr(T9);
aoqi@1 164 __ nop();
aoqi@1 165 __ BIND(run_compiled_code);
aoqi@1 166 }
aoqi@1 167
aoqi@1 168 const ByteSize entry_offset = for_compiler_entry ? Method::from_compiled_offset() :
aoqi@1 169 Method::from_interpreted_offset();
aoqi@1 170 // __ jmp(Address(method, entry_offset));
aoqi@1 171 __ ld(T9, method, in_bytes(entry_offset));
aoqi@1 172 __ jr(T9);
aoqi@1 173 __ nop();
aoqi@1 174
aoqi@1 175 __ bind(L_no_such_method);
aoqi@1 176 // __ jump(RuntimeAddress(StubRoutines::throw_AbstractMethodError_entry()));
aoqi@1 177 address wrong_method = StubRoutines::throw_AbstractMethodError_entry();
aoqi@1 178 __ jmp(wrong_method, relocInfo::runtime_call_type);
aoqi@1 179 __ nop();
aoqi@1 180 }
aoqi@1 181
aoqi@1 182 void MethodHandles::jump_to_lambda_form(MacroAssembler* _masm,
aoqi@1 183 Register recv, Register method_temp,
aoqi@1 184 Register temp2,
aoqi@1 185 bool for_compiler_entry) {
aoqi@1 186 BLOCK_COMMENT("jump_to_lambda_form {");
aoqi@1 187 // This is the initial entry point of a lazy method handle.
aoqi@1 188 // After type checking, it picks up the invoker from the LambdaForm.
aoqi@1 189 assert_different_registers(recv, method_temp, temp2);
aoqi@1 190 assert(recv != noreg, "required register");
aoqi@1 191 assert(method_temp == Rmethod, "required register for loading method");
aoqi@1 192
aoqi@1 193 //NOT_PRODUCT({ FlagSetting fs(TraceMethodHandles, true); trace_method_handle(_masm, "LZMH"); });
aoqi@1 194
aoqi@1 195 // Load the invoker, as MH -> MH.form -> LF.vmentry
aoqi@1 196 __ verify_oop(recv);
aoqi@1 197 __ load_heap_oop(method_temp, Address(recv, NONZERO(java_lang_invoke_MethodHandle::form_offset_in_bytes())));
aoqi@1 198 __ verify_oop(method_temp);
aoqi@1 199 __ load_heap_oop(method_temp, Address(method_temp, NONZERO(java_lang_invoke_LambdaForm::vmentry_offset_in_bytes())));
aoqi@1 200 __ verify_oop(method_temp);
aoqi@1 201 // the following assumes that a Method* is normally compressed in the vmtarget field:
aoqi@1 202 // __ movptr(method_temp, Address(method_temp, NONZERO(java_lang_invoke_MemberName::vmtarget_offset_in_bytes())));
aoqi@1 203 __ ld(method_temp, Address(method_temp, NONZERO(java_lang_invoke_MemberName::vmtarget_offset_in_bytes())));
aoqi@1 204
aoqi@1 205 if (VerifyMethodHandles && !for_compiler_entry) {
aoqi@1 206 // make sure recv is already on stack
aoqi@1 207 // __ movptr(temp2, Address(method_temp, Method::const_offset()));
aoqi@1 208 __ ld(temp2, Address(method_temp, Method::const_offset()));
aoqi@1 209 __ load_sized_value(temp2,
aoqi@1 210 Address(temp2, ConstMethod::size_of_parameters_offset()),
aoqi@1 211 sizeof(u2), false);
aoqi@1 212 // assert(sizeof(u2) == sizeof(Method::_size_of_parameters), "");
aoqi@1 213 Label L;
aoqi@1 214 // __ cmpptr(recv, __ argument_address(temp2, -1));
aoqi@1 215 // __ jcc(Assembler::equal, L);
aoqi@1 216 Address recv_addr = __ argument_address(temp2, -1);
aoqi@1 217 __ ld(AT, recv_addr);
aoqi@1 218 __ beq(recv, AT, L);
aoqi@1 219 __ nop();
aoqi@1 220
aoqi@1 221 // __ movptr(rax, __ argument_address(temp2, -1));
aoqi@1 222 recv_addr = __ argument_address(temp2, -1);
aoqi@1 223 __ ld(V0, recv_addr);
aoqi@1 224 __ STOP("receiver not on stack");
aoqi@1 225 __ BIND(L);
aoqi@1 226 }
aoqi@1 227
aoqi@1 228 jump_from_method_handle(_masm, method_temp, temp2, for_compiler_entry);
aoqi@1 229 BLOCK_COMMENT("} jump_to_lambda_form");
aoqi@1 230 }
aoqi@1 231
aoqi@1 232
aoqi@1 233 // Code generation
aoqi@1 234 address MethodHandles::generate_method_handle_interpreter_entry(MacroAssembler* _masm,
aoqi@1 235 vmIntrinsics::ID iid) {
aoqi@1 236 const bool not_for_compiler_entry = false; // this is the interpreter entry
aoqi@1 237 assert(is_signature_polymorphic(iid), "expected invoke iid");
aoqi@1 238 if (iid == vmIntrinsics::_invokeGeneric ||
aoqi@1 239 iid == vmIntrinsics::_compiledLambdaForm) {
aoqi@1 240 // Perhaps surprisingly, the symbolic references visible to Java are not directly used.
aoqi@1 241 // They are linked to Java-generated adapters via MethodHandleNatives.linkMethod.
aoqi@1 242 // They all allow an appendix argument.
aoqi@1 243 // __ hlt(); // empty stubs make SG sick
aoqi@1 244 __ stop("empty stubs make SG sick");
aoqi@1 245 return NULL;
aoqi@1 246 }
aoqi@1 247
aoqi@1 248 // rsi/r13: sender SP (must preserve; see prepare_to_jump_from_interpreted)
aoqi@1 249 // rbx: Method*
aoqi@1 250 // rdx: argument locator (parameter slot count, added to rsp)
aoqi@1 251 // rcx: used as temp to hold mh or receiver
aoqi@1 252 // rax, rdi: garbage temps, blown away
aoqi@1 253 Register rdx_argp = T9; // argument list ptr, live on error paths
aoqi@1 254 // Register rax_temp = rax;
aoqi@1 255 Register rcx_mh = S7; // MH receiver; dies quickly and is recycled
aoqi@1 256 Register rbx_method = Rmethod; // eventual target of this invocation
aoqi@1 257
aoqi@1 258 // here's where control starts out:
aoqi@1 259 __ align(CodeEntryAlignment);
aoqi@1 260 address entry_point = __ pc();
aoqi@1 261
aoqi@1 262 if (VerifyMethodHandles) {
aoqi@1 263 Label L;
aoqi@1 264 BLOCK_COMMENT("verify_intrinsic_id {");
aoqi@1 265 // __ cmpb(Address(rbx_method, Method::intrinsic_id_offset_in_bytes()), (int) iid);
aoqi@1 266 // __ jcc(Assembler::equal, L);
aoqi@1 267 __ lbu(AT, rbx_method, Method::intrinsic_id_offset_in_bytes());
aoqi@1 268 guarantee(Assembler::is_simm16(iid), "Oops, iid is not simm16! Change the instructions.");
aoqi@1 269 __ addiu(AT, AT, -1 * (int) iid);
aoqi@1 270 __ beq(AT, R0, L);
aoqi@1 271 __ nop();
aoqi@1 272 if (iid == vmIntrinsics::_linkToVirtual ||
aoqi@1 273 iid == vmIntrinsics::_linkToSpecial) {
aoqi@1 274 // could do this for all kinds, but would explode assembly code size
aoqi@1 275 trace_method_handle(_masm, "bad Method*::intrinsic_id");
aoqi@1 276 }
aoqi@1 277 __ STOP("bad Method*::intrinsic_id");
aoqi@1 278 __ bind(L);
aoqi@1 279 BLOCK_COMMENT("} verify_intrinsic_id");
aoqi@1 280 }
aoqi@1 281
aoqi@1 282 // First task: Find out how big the argument list is.
aoqi@1 283 Address rdx_first_arg_addr;
aoqi@1 284 int ref_kind = signature_polymorphic_intrinsic_ref_kind(iid);
aoqi@1 285 assert(ref_kind != 0 || iid == vmIntrinsics::_invokeBasic, "must be _invokeBasic or a linkTo intrinsic");
aoqi@1 286 if (ref_kind == 0 || MethodHandles::ref_kind_has_receiver(ref_kind)) {
aoqi@1 287 __ ld(rdx_argp, Address(rbx_method, Method::const_offset()));
aoqi@1 288 __ load_sized_value(rdx_argp,
aoqi@1 289 Address(rdx_argp, ConstMethod::size_of_parameters_offset()),
aoqi@1 290 sizeof(u2), false);
aoqi@1 291 // assert(sizeof(u2) == sizeof(Method::_size_of_parameters), "");
aoqi@1 292 rdx_first_arg_addr = __ argument_address(rdx_argp, -1);
aoqi@1 293 } else {
aoqi@1 294 DEBUG_ONLY(rdx_argp = noreg);
aoqi@1 295 }
aoqi@1 296
aoqi@1 297 if (!is_signature_polymorphic_static(iid)) {
aoqi@1 298 __ ld(rcx_mh, rdx_first_arg_addr);
aoqi@1 299 DEBUG_ONLY(rdx_argp = noreg);
aoqi@1 300 }
aoqi@1 301
aoqi@1 302 // rdx_first_arg_addr is live!
aoqi@1 303
aoqi@1 304 trace_method_handle_interpreter_entry(_masm, iid);
aoqi@1 305
aoqi@1 306 if (iid == vmIntrinsics::_invokeBasic) {
aoqi@1 307 generate_method_handle_dispatch(_masm, iid, rcx_mh, noreg, not_for_compiler_entry);
aoqi@1 308
aoqi@1 309 } else {
aoqi@1 310 // Adjust argument list by popping the trailing MemberName argument.
aoqi@1 311 Register rcx_recv = noreg;
aoqi@1 312 if (MethodHandles::ref_kind_has_receiver(ref_kind)) {
aoqi@1 313 // Load the receiver (not the MH; the actual MemberName's receiver) up from the interpreter stack.
aoqi@1 314 __ ld(rcx_recv = T2, rdx_first_arg_addr);
aoqi@1 315 }
aoqi@1 316 DEBUG_ONLY(rdx_argp = noreg);
aoqi@1 317 Register rbx_member = rbx_method; // MemberName ptr; incoming method ptr is dead now
aoqi@1 318 // __ pop(AT); // return address
aoqi@1 319 __ pop(rbx_member); // extract last argument
aoqi@1 320 // __ push(AT); // re-push return address
aoqi@1 321 generate_method_handle_dispatch(_masm, iid, rcx_recv, rbx_member, not_for_compiler_entry);
aoqi@1 322 }
aoqi@1 323
aoqi@1 324 return entry_point;
aoqi@1 325 }
aoqi@1 326
aoqi@1 327 void MethodHandles::generate_method_handle_dispatch(MacroAssembler* _masm,
aoqi@1 328 vmIntrinsics::ID iid,
aoqi@1 329 Register receiver_reg,
aoqi@1 330 Register member_reg,
aoqi@1 331 bool for_compiler_entry) {
aoqi@1 332 assert(is_signature_polymorphic(iid), "expected invoke iid");
aoqi@1 333 Register rbx_method = Rmethod; // eventual target of this invocation
aoqi@1 334 // temps used in this code are not used in *either* compiled or interpreted calling sequences
aoqi@1 335 #ifdef _LP64
aoqi@1 336 Register j_rarg0 = T0;
aoqi@1 337 Register j_rarg1 = A0;
aoqi@1 338 Register j_rarg2 = A1;
aoqi@1 339 Register j_rarg3 = A2;
aoqi@1 340 Register j_rarg4 = A3;
aoqi@1 341 Register j_rarg5 = A4;
aoqi@1 342
aoqi@1 343 Register temp1 = T8;
aoqi@1 344 Register temp2 = T9;
aoqi@1 345 Register temp3 = V0;
aoqi@1 346 if (for_compiler_entry) {
aoqi@1 347 assert(receiver_reg == (iid == vmIntrinsics::_linkToStatic ? noreg : j_rarg0), "only valid assignment");
aoqi@1 348 assert_different_registers(temp1, j_rarg0, j_rarg1, j_rarg2, j_rarg3, j_rarg4, j_rarg5);
aoqi@1 349 assert_different_registers(temp2, j_rarg0, j_rarg1, j_rarg2, j_rarg3, j_rarg4, j_rarg5);
aoqi@1 350 assert_different_registers(temp3, j_rarg0, j_rarg1, j_rarg2, j_rarg3, j_rarg4, j_rarg5);
aoqi@1 351 }
aoqi@1 352 #else
aoqi@1 353 /*
aoqi@1 354 Register temp1 = (for_compiler_entry ? rsi : rdx);
aoqi@1 355 Register temp2 = rdi;
aoqi@1 356 Register temp3 = rax;
aoqi@1 357 if (for_compiler_entry) {
aoqi@1 358 assert(receiver_reg == (iid == vmIntrinsics::_linkToStatic ? noreg : rcx), "only valid assignment");
aoqi@1 359 assert_different_registers(temp1, rcx, rdx);
aoqi@1 360 assert_different_registers(temp2, rcx, rdx);
aoqi@1 361 assert_different_registers(temp3, rcx, rdx);
aoqi@1 362 }
aoqi@1 363 */
aoqi@1 364 #endif
aoqi@1 365 else {
aoqi@1 366 assert_different_registers(temp1, temp2, temp3, saved_last_sp_register()); // don't trash lastSP
aoqi@1 367 }
aoqi@1 368 assert_different_registers(temp1, temp2, temp3, receiver_reg);
aoqi@1 369 assert_different_registers(temp1, temp2, temp3, member_reg);
aoqi@1 370
aoqi@1 371 if (iid == vmIntrinsics::_invokeBasic) {
aoqi@1 372 // indirect through MH.form.vmentry.vmtarget
aoqi@1 373 jump_to_lambda_form(_masm, receiver_reg, rbx_method, temp1, for_compiler_entry);
aoqi@1 374
aoqi@1 375 } else {
aoqi@1 376 // The method is a member invoker used by direct method handles.
aoqi@1 377 if (VerifyMethodHandles) {
aoqi@1 378 // make sure the trailing argument really is a MemberName (caller responsibility)
aoqi@1 379 verify_klass(_masm, member_reg, SystemDictionary::WK_KLASS_ENUM_NAME(java_lang_invoke_MemberName),
aoqi@1 380 "MemberName required for invokeVirtual etc.");
aoqi@1 381 }
aoqi@1 382
aoqi@1 383 Address member_clazz( member_reg, NONZERO(java_lang_invoke_MemberName::clazz_offset_in_bytes()));
aoqi@1 384 Address member_vmindex( member_reg, NONZERO(java_lang_invoke_MemberName::vmindex_offset_in_bytes()));
aoqi@1 385 Address member_vmtarget( member_reg, NONZERO(java_lang_invoke_MemberName::vmtarget_offset_in_bytes()));
aoqi@1 386
aoqi@1 387 Register temp1_recv_klass = temp1;
aoqi@1 388 if (iid != vmIntrinsics::_linkToStatic) {
aoqi@1 389 __ verify_oop(receiver_reg);
aoqi@1 390 if (iid == vmIntrinsics::_linkToSpecial) {
aoqi@1 391 // Don't actually load the klass; just null-check the receiver.
aoqi@1 392 __ null_check(receiver_reg);
aoqi@1 393 } else {
aoqi@1 394 // load receiver klass itself
aoqi@1 395 __ null_check(receiver_reg, oopDesc::klass_offset_in_bytes());
aoqi@1 396 __ load_klass(temp1_recv_klass, receiver_reg);
aoqi@1 397 __ verify_klass_ptr(temp1_recv_klass);
aoqi@1 398 }
aoqi@1 399 BLOCK_COMMENT("check_receiver {");
aoqi@1 400 // The receiver for the MemberName must be in receiver_reg.
aoqi@1 401 // Check the receiver against the MemberName.clazz
aoqi@1 402 if (VerifyMethodHandles && iid == vmIntrinsics::_linkToSpecial) {
aoqi@1 403 // Did not load it above...
aoqi@1 404 __ load_klass(temp1_recv_klass, receiver_reg);
aoqi@1 405 __ verify_klass_ptr(temp1_recv_klass);
aoqi@1 406 }
aoqi@1 407 if (VerifyMethodHandles && iid != vmIntrinsics::_linkToInterface) {
aoqi@1 408 Label L_ok;
aoqi@1 409 Register temp2_defc = temp2;
aoqi@1 410 __ load_heap_oop(temp2_defc, member_clazz);
aoqi@1 411 load_klass_from_Class(_masm, temp2_defc);
aoqi@1 412 __ verify_klass_ptr(temp2_defc);
aoqi@1 413 __ check_klass_subtype(temp1_recv_klass, temp2_defc, temp3, L_ok);
aoqi@1 414 // If we get here, the type check failed!
aoqi@1 415 __ STOP("receiver class disagrees with MemberName.clazz");
aoqi@1 416 __ bind(L_ok);
aoqi@1 417 }
aoqi@1 418 BLOCK_COMMENT("} check_receiver");
aoqi@1 419 }
aoqi@1 420 if (iid == vmIntrinsics::_linkToSpecial ||
aoqi@1 421 iid == vmIntrinsics::_linkToStatic) {
aoqi@1 422 DEBUG_ONLY(temp1_recv_klass = noreg); // these guys didn't load the recv_klass
aoqi@1 423 }
aoqi@1 424
aoqi@1 425 // Live registers at this point:
aoqi@1 426 // member_reg - MemberName that was the trailing argument
aoqi@1 427 // temp1_recv_klass - klass of stacked receiver, if needed
aoqi@1 428 // rsi/r13 - interpreter linkage (if interpreted)
aoqi@1 429 // rcx, rdx, rsi, rdi, r8, r8 - compiler arguments (if compiled)
aoqi@1 430
aoqi@1 431 Label L_incompatible_class_change_error;
aoqi@1 432 switch (iid) {
aoqi@1 433 case vmIntrinsics::_linkToSpecial:
aoqi@1 434 if (VerifyMethodHandles) {
aoqi@1 435 verify_ref_kind(_masm, JVM_REF_invokeSpecial, member_reg, temp3);
aoqi@1 436 }
aoqi@1 437 __ ld(rbx_method, member_vmtarget);
aoqi@1 438 break;
aoqi@1 439
aoqi@1 440 case vmIntrinsics::_linkToStatic:
aoqi@1 441 if (VerifyMethodHandles) {
aoqi@1 442 verify_ref_kind(_masm, JVM_REF_invokeStatic, member_reg, temp3);
aoqi@1 443 }
aoqi@1 444 __ ld(rbx_method, member_vmtarget);
aoqi@1 445 break;
aoqi@1 446
aoqi@1 447 case vmIntrinsics::_linkToVirtual:
aoqi@1 448 {
aoqi@1 449 // same as TemplateTable::invokevirtual,
aoqi@1 450 // minus the CP setup and profiling:
aoqi@1 451
aoqi@1 452 if (VerifyMethodHandles) {
aoqi@1 453 verify_ref_kind(_masm, JVM_REF_invokeVirtual, member_reg, temp3);
aoqi@1 454 }
aoqi@1 455
aoqi@1 456 // pick out the vtable index from the MemberName, and then we can discard it:
aoqi@1 457 Register temp2_index = temp2;
aoqi@1 458 __ ld(temp2_index, member_vmindex);
aoqi@1 459
aoqi@1 460 if (VerifyMethodHandles) {
aoqi@1 461 Label L_index_ok;
aoqi@1 462 // __ cmpl(temp2_index, 0);
aoqi@1 463 // __ jcc(Assembler::greaterEqual, L_index_ok);
aoqi@1 464
aoqi@1 465 __ slt(AT, R0, temp2_index);
aoqi@1 466 __ bne(AT, R0, L_index_ok);
aoqi@1 467 __ nop();
aoqi@1 468 __ STOP("no virtual index");
aoqi@1 469 __ BIND(L_index_ok);
aoqi@1 470 }
aoqi@1 471
aoqi@1 472 // Note: The verifier invariants allow us to ignore MemberName.clazz and vmtarget
aoqi@1 473 // at this point. And VerifyMethodHandles has already checked clazz, if needed.
aoqi@1 474
aoqi@1 475 // get target Method* & entry point
aoqi@1 476 __ lookup_virtual_method(temp1_recv_klass, temp2_index, rbx_method);
aoqi@1 477 break;
aoqi@1 478 }
aoqi@1 479
aoqi@1 480 case vmIntrinsics::_linkToInterface:
aoqi@1 481 {
aoqi@1 482 // same as TemplateTable::invokeinterface
aoqi@1 483 // (minus the CP setup and profiling, with different argument motion)
aoqi@1 484 if (VerifyMethodHandles) {
aoqi@1 485 verify_ref_kind(_masm, JVM_REF_invokeInterface, member_reg, temp3);
aoqi@1 486 }
aoqi@1 487
aoqi@1 488 Register temp3_intf = temp3;
aoqi@1 489 __ load_heap_oop(temp3_intf, member_clazz);
aoqi@1 490 load_klass_from_Class(_masm, temp3_intf);
aoqi@1 491 __ verify_klass_ptr(temp3_intf);
aoqi@1 492
aoqi@1 493 Register rbx_index = rbx_method;
aoqi@1 494 __ ld(rbx_index, member_vmindex);
aoqi@1 495 if (VerifyMethodHandles) {
aoqi@1 496 Label L;
aoqi@1 497 // __ cmpl(rbx_index, 0);
aoqi@1 498 // __ jcc(Assembler::greaterEqual, L);
aoqi@1 499 __ slt(AT, R0, rbx_index);
aoqi@1 500 __ bne(AT, R0, L);
aoqi@1 501 __ nop();
aoqi@1 502 __ STOP("invalid vtable index for MH.invokeInterface");
aoqi@1 503 __ bind(L);
aoqi@1 504 }
aoqi@1 505
aoqi@1 506 // given intf, index, and recv klass, dispatch to the implementation method
aoqi@1 507 __ lookup_interface_method(temp1_recv_klass, temp3_intf,
aoqi@1 508 // note: next two args must be the same:
aoqi@1 509 rbx_index, rbx_method,
aoqi@1 510 temp2,
aoqi@1 511 L_incompatible_class_change_error);
aoqi@1 512 break;
aoqi@1 513 }
aoqi@1 514
aoqi@1 515 default:
aoqi@1 516 fatal(err_msg_res("unexpected intrinsic %d: %s", iid, vmIntrinsics::name_at(iid)));
aoqi@1 517 break;
aoqi@1 518 }
aoqi@1 519
aoqi@1 520 // Live at this point:
aoqi@1 521 // rbx_method
aoqi@1 522 // rsi/r13 (if interpreted)
aoqi@1 523
aoqi@1 524 // After figuring out which concrete method to call, jump into it.
aoqi@1 525 // Note that this works in the interpreter with no data motion.
aoqi@1 526 // But the compiled version will require that rcx_recv be shifted out.
aoqi@1 527 __ verify_method_ptr(rbx_method);
aoqi@1 528 jump_from_method_handle(_masm, rbx_method, temp1, for_compiler_entry);
aoqi@1 529
aoqi@1 530 if (iid == vmIntrinsics::_linkToInterface) {
aoqi@1 531 __ bind(L_incompatible_class_change_error);
aoqi@1 532 // __ jump(RuntimeAddress(StubRoutines::throw_IncompatibleClassChangeError_entry()));
aoqi@1 533 address icce_entry= StubRoutines::throw_IncompatibleClassChangeError_entry();
aoqi@1 534 __ jmp(icce_entry, relocInfo::runtime_call_type);
aoqi@1 535 __ nop();
aoqi@1 536 }
aoqi@1 537 }
aoqi@1 538 }
aoqi@1 539
aoqi@1 540 #ifndef PRODUCT
aoqi@1 541 void trace_method_handle_stub(const char* adaptername,
aoqi@1 542 oop mh,
aoqi@1 543 intptr_t* saved_regs,
aoqi@1 544 intptr_t* entry_sp) {
aoqi@1 545 // called as a leaf from native code: do not block the JVM!
aoqi@1 546 bool has_mh = (strstr(adaptername, "/static") == NULL &&
aoqi@1 547 strstr(adaptername, "linkTo") == NULL); // static linkers don't have MH
aoqi@1 548 const char* mh_reg_name = has_mh ? "rcx_mh" : "rcx";
aoqi@1 549 tty->print_cr("MH %s %s="PTR_FORMAT" sp="PTR_FORMAT,
aoqi@1 550 adaptername, mh_reg_name,
aoqi@1 551 (void *)mh, entry_sp);
aoqi@1 552
aoqi@1 553 if (Verbose) {
aoqi@1 554 tty->print_cr("Registers:");
aoqi@1 555 const int saved_regs_count = RegisterImpl::number_of_registers;
aoqi@1 556 for (int i = 0; i < saved_regs_count; i++) {
aoqi@1 557 Register r = as_Register(i);
aoqi@1 558 // The registers are stored in reverse order on the stack (by pusha).
aoqi@1 559 tty->print("%3s=" PTR_FORMAT, r->name(), saved_regs[((saved_regs_count - 1) - i)]);
aoqi@1 560 if ((i + 1) % 4 == 0) {
aoqi@1 561 tty->cr();
aoqi@1 562 } else {
aoqi@1 563 tty->print(", ");
aoqi@1 564 }
aoqi@1 565 }
aoqi@1 566 tty->cr();
aoqi@1 567
aoqi@1 568 {
aoqi@1 569 // dumping last frame with frame::describe
aoqi@1 570
aoqi@1 571 JavaThread* p = JavaThread::active();
aoqi@1 572
aoqi@1 573 ResourceMark rm;
aoqi@1 574 PRESERVE_EXCEPTION_MARK; // may not be needed by safer and unexpensive here
aoqi@1 575 FrameValues values;
aoqi@1 576
aoqi@1 577 // Note: We want to allow trace_method_handle from any call site.
aoqi@1 578 // While trace_method_handle creates a frame, it may be entered
aoqi@1 579 // without a PC on the stack top (e.g. not just after a call).
aoqi@1 580 // Walking that frame could lead to failures due to that invalid PC.
aoqi@1 581 // => carefully detect that frame when doing the stack walking
aoqi@1 582
aoqi@1 583 // Current C frame
aoqi@1 584 frame cur_frame = os::current_frame();
aoqi@1 585
aoqi@1 586 // Robust search of trace_calling_frame (independant of inlining).
aoqi@1 587 // Assumes saved_regs comes from a pusha in the trace_calling_frame.
aoqi@1 588 assert(cur_frame.sp() < saved_regs, "registers not saved on stack ?");
aoqi@1 589 frame trace_calling_frame = os::get_sender_for_C_frame(&cur_frame);
aoqi@1 590 while (trace_calling_frame.fp() < saved_regs) {
aoqi@1 591 trace_calling_frame = os::get_sender_for_C_frame(&trace_calling_frame);
aoqi@1 592 }
aoqi@1 593
aoqi@1 594 // safely create a frame and call frame::describe
aoqi@1 595 intptr_t *dump_sp = trace_calling_frame.sender_sp();
aoqi@1 596 intptr_t *dump_fp = trace_calling_frame.link();
aoqi@1 597
aoqi@1 598 bool walkable = has_mh; // whether the traced frame shoud be walkable
aoqi@1 599
aoqi@1 600 if (walkable) {
aoqi@1 601 // The previous definition of walkable may have to be refined
aoqi@1 602 // if new call sites cause the next frame constructor to start
aoqi@1 603 // failing. Alternatively, frame constructors could be
aoqi@1 604 // modified to support the current or future non walkable
aoqi@1 605 // frames (but this is more intrusive and is not considered as
aoqi@1 606 // part of this RFE, which will instead use a simpler output).
aoqi@1 607 frame dump_frame = frame(dump_sp, dump_fp);
aoqi@1 608 dump_frame.describe(values, 1);
aoqi@1 609 } else {
aoqi@1 610 // Stack may not be walkable (invalid PC above FP):
aoqi@1 611 // Add descriptions without building a Java frame to avoid issues
aoqi@1 612 values.describe(-1, dump_fp, "fp for #1 <not parsed, cannot trust pc>");
aoqi@1 613 values.describe(-1, dump_sp, "sp for #1");
aoqi@1 614 }
aoqi@1 615 values.describe(-1, entry_sp, "raw top of stack");
aoqi@1 616
aoqi@1 617 tty->print_cr("Stack layout:");
aoqi@1 618 values.print(p);
aoqi@1 619 }
aoqi@1 620 if (has_mh && mh->is_oop()) {
aoqi@1 621 mh->print();
aoqi@1 622 if (java_lang_invoke_MethodHandle::is_instance(mh)) {
aoqi@1 623 if (java_lang_invoke_MethodHandle::form_offset_in_bytes() != 0)
aoqi@1 624 java_lang_invoke_MethodHandle::form(mh)->print();
aoqi@1 625 }
aoqi@1 626 }
aoqi@1 627 }
aoqi@1 628 }
aoqi@1 629
aoqi@1 630 // The stub wraps the arguments in a struct on the stack to avoid
aoqi@1 631 // dealing with the different calling conventions for passing 6
aoqi@1 632 // arguments.
aoqi@1 633 struct MethodHandleStubArguments {
aoqi@1 634 const char* adaptername;
aoqi@1 635 oopDesc* mh;
aoqi@1 636 intptr_t* saved_regs;
aoqi@1 637 intptr_t* entry_sp;
aoqi@1 638 };
aoqi@1 639 void trace_method_handle_stub_wrapper(MethodHandleStubArguments* args) {
aoqi@1 640 trace_method_handle_stub(args->adaptername,
aoqi@1 641 args->mh,
aoqi@1 642 args->saved_regs,
aoqi@1 643 args->entry_sp);
aoqi@1 644 }
aoqi@1 645
aoqi@1 646 void MethodHandles::trace_method_handle(MacroAssembler* _masm, const char* adaptername) {
aoqi@1 647 /*
aoqi@1 648 if (!TraceMethodHandles) return;
aoqi@1 649 BLOCK_COMMENT("trace_method_handle {");
aoqi@1 650 __ enter();
aoqi@1 651 __ andptr(rsp, -16); // align stack if needed for FPU state
aoqi@1 652 __ pusha();
aoqi@1 653 __ mov(rbx, rsp); // for retreiving saved_regs
aoqi@1 654 // Note: saved_regs must be in the entered frame for the
aoqi@1 655 // robust stack walking implemented in trace_method_handle_stub.
aoqi@1 656
aoqi@1 657 // save FP result, valid at some call sites (adapter_opt_return_float, ...)
aoqi@1 658 __ increment(rsp, -2 * wordSize);
aoqi@1 659 if (UseSSE >= 2) {
aoqi@1 660 __ movdbl(Address(rsp, 0), xmm0);
aoqi@1 661 } else if (UseSSE == 1) {
aoqi@1 662 __ movflt(Address(rsp, 0), xmm0);
aoqi@1 663 } else {
aoqi@1 664 __ fst_d(Address(rsp, 0));
aoqi@1 665 }
aoqi@1 666
aoqi@1 667 // Incoming state:
aoqi@1 668 // rcx: method handle
aoqi@1 669 //
aoqi@1 670 // To avoid calling convention issues, build a record on the stack
aoqi@1 671 // and pass the pointer to that instead.
aoqi@1 672 __ push(rbp); // entry_sp (with extra align space)
aoqi@1 673 __ push(rbx); // pusha saved_regs
aoqi@1 674 __ push(rcx); // mh
aoqi@1 675 __ push(rcx); // slot for adaptername
aoqi@1 676 __ movptr(Address(rsp, 0), (intptr_t) adaptername);
aoqi@1 677 __ super_call_VM_leaf(CAST_FROM_FN_PTR(address, trace_method_handle_stub_wrapper), rsp);
aoqi@1 678 __ increment(rsp, sizeof(MethodHandleStubArguments));
aoqi@1 679
aoqi@1 680 if (UseSSE >= 2) {
aoqi@1 681 __ movdbl(xmm0, Address(rsp, 0));
aoqi@1 682 } else if (UseSSE == 1) {
aoqi@1 683 __ movflt(xmm0, Address(rsp, 0));
aoqi@1 684 } else {
aoqi@1 685 __ fld_d(Address(rsp, 0));
aoqi@1 686 }
aoqi@1 687 __ increment(rsp, 2 * wordSize);
aoqi@1 688
aoqi@1 689 __ popa();
aoqi@1 690 __ leave();
aoqi@1 691 BLOCK_COMMENT("} trace_method_handle");
aoqi@1 692 */
aoqi@1 693 }
aoqi@1 694 #endif //PRODUCT

mercurial