src/cpu/sparc/vm/methodHandles_sparc.cpp

changeset 4101
2cb2f30450c7
parent 4052
75f33eecc1b3
child 4158
65d07d9ee446
     1.1 --- a/src/cpu/sparc/vm/methodHandles_sparc.cpp	Fri Sep 14 22:00:41 2012 -0700
     1.2 +++ b/src/cpu/sparc/vm/methodHandles_sparc.cpp	Mon Sep 17 12:57:58 2012 -0700
     1.3 @@ -121,6 +121,7 @@
     1.4  void MethodHandles::jump_from_method_handle(MacroAssembler* _masm, Register method, Register target, Register temp,
     1.5                                              bool for_compiler_entry) {
     1.6    assert(method == G5_method, "interpreter calling convention");
     1.7 +  assert_different_registers(method, target, temp);
     1.8  
     1.9    if (!for_compiler_entry && JvmtiExport::can_post_interpreter_events()) {
    1.10      Label run_compiled_code;
    1.11 @@ -153,19 +154,19 @@
    1.12    BLOCK_COMMENT("jump_to_lambda_form {");
    1.13    // This is the initial entry point of a lazy method handle.
    1.14    // After type checking, it picks up the invoker from the LambdaForm.
    1.15 -  assert_different_registers(recv, method_temp, temp2, temp3);
    1.16 +  assert_different_registers(recv, method_temp, temp2);  // temp3 is only passed on
    1.17    assert(method_temp == G5_method, "required register for loading method");
    1.18  
    1.19    //NOT_PRODUCT({ FlagSetting fs(TraceMethodHandles, true); trace_method_handle(_masm, "LZMH"); });
    1.20  
    1.21    // Load the invoker, as MH -> MH.form -> LF.vmentry
    1.22    __ verify_oop(recv);
    1.23 -  __ load_heap_oop(Address(recv,        NONZERO(java_lang_invoke_MethodHandle::form_offset_in_bytes())),       method_temp);
    1.24 +  __ load_heap_oop(Address(recv,        NONZERO(java_lang_invoke_MethodHandle::form_offset_in_bytes())),   method_temp);
    1.25    __ verify_oop(method_temp);
    1.26 -  __ load_heap_oop(Address(method_temp, NONZERO(java_lang_invoke_LambdaForm::vmentry_offset_in_bytes())), method_temp);
    1.27 +  __ load_heap_oop(Address(method_temp, NONZERO(java_lang_invoke_LambdaForm::vmentry_offset_in_bytes())),  method_temp);
    1.28    __ verify_oop(method_temp);
    1.29    // the following assumes that a Method* is normally compressed in the vmtarget field:
    1.30 -  __ ld_ptr(Address(method_temp, NONZERO(java_lang_invoke_MemberName::vmtarget_offset_in_bytes())),     method_temp);
    1.31 +  __ ld_ptr(       Address(method_temp, NONZERO(java_lang_invoke_MemberName::vmtarget_offset_in_bytes())), method_temp);
    1.32  
    1.33    if (VerifyMethodHandles && !for_compiler_entry) {
    1.34      // make sure recv is already on stack
    1.35 @@ -303,25 +304,25 @@
    1.36                                                      Register member_reg,
    1.37                                                      bool for_compiler_entry) {
    1.38    assert(is_signature_polymorphic(iid), "expected invoke iid");
    1.39 -  // temps used in this code are not used in *either* compiled or interpreted calling sequences
    1.40    Register temp1 = (for_compiler_entry ? G1_scratch : O1);
    1.41 -  Register temp2 = (for_compiler_entry ? G4_scratch : O4);
    1.42 -  Register temp3 = G3_scratch;
    1.43 -  Register temp4 = (for_compiler_entry ? noreg      : O2);
    1.44 +  Register temp2 = (for_compiler_entry ? G3_scratch : O2);
    1.45 +  Register temp3 = (for_compiler_entry ? G4_scratch : O3);
    1.46 +  Register temp4 = (for_compiler_entry ? noreg      : O4);
    1.47    if (for_compiler_entry) {
    1.48      assert(receiver_reg == (iid == vmIntrinsics::_linkToStatic ? noreg : O0), "only valid assignment");
    1.49 -    assert_different_registers(temp1,      O0, O1, O2, O3, O4, O5);
    1.50 -    assert_different_registers(temp2,      O0, O1, O2, O3, O4, O5);
    1.51 -    assert_different_registers(temp3,      O0, O1, O2, O3, O4, O5);
    1.52 -    assert_different_registers(temp4,      O0, O1, O2, O3, O4, O5);
    1.53 +    assert_different_registers(temp1, O0, O1, O2, O3, O4, O5);
    1.54 +    assert_different_registers(temp2, O0, O1, O2, O3, O4, O5);
    1.55 +    assert_different_registers(temp3, O0, O1, O2, O3, O4, O5);
    1.56 +    assert_different_registers(temp4, O0, O1, O2, O3, O4, O5);
    1.57 +  } else {
    1.58 +    assert_different_registers(temp1, temp2, temp3, temp4, O5_savedSP);  // don't trash lastSP
    1.59    }
    1.60    if (receiver_reg != noreg)  assert_different_registers(temp1, temp2, temp3, temp4, receiver_reg);
    1.61    if (member_reg   != noreg)  assert_different_registers(temp1, temp2, temp3, temp4, member_reg);
    1.62 -  if (!for_compiler_entry)    assert_different_registers(temp1, temp2, temp3, temp4, O5_savedSP);  // don't trash lastSP
    1.63  
    1.64    if (iid == vmIntrinsics::_invokeBasic) {
    1.65      // indirect through MH.form.vmentry.vmtarget
    1.66 -    jump_to_lambda_form(_masm, receiver_reg, G5_method, temp2, temp3, for_compiler_entry);
    1.67 +    jump_to_lambda_form(_masm, receiver_reg, G5_method, temp1, temp2, for_compiler_entry);
    1.68  
    1.69    } else {
    1.70      // The method is a member invoker used by direct method handles.
    1.71 @@ -378,24 +379,22 @@
    1.72      //  member_reg - MemberName that was the trailing argument
    1.73      //  temp1_recv_klass - klass of stacked receiver, if needed
    1.74      //  O5_savedSP - interpreter linkage (if interpreted)
    1.75 -    //  O0..O7,G1,G4 - compiler arguments (if compiled)
    1.76 +    //  O0..O5 - compiler arguments (if compiled)
    1.77  
    1.78 -    bool method_is_live = false;
    1.79 +    Label L_incompatible_class_change_error;
    1.80      switch (iid) {
    1.81      case vmIntrinsics::_linkToSpecial:
    1.82        if (VerifyMethodHandles) {
    1.83 -        verify_ref_kind(_masm, JVM_REF_invokeSpecial, member_reg, temp3);
    1.84 +        verify_ref_kind(_masm, JVM_REF_invokeSpecial, member_reg, temp2);
    1.85        }
    1.86        __ ld_ptr(member_vmtarget, G5_method);
    1.87 -      method_is_live = true;
    1.88        break;
    1.89  
    1.90      case vmIntrinsics::_linkToStatic:
    1.91        if (VerifyMethodHandles) {
    1.92 -        verify_ref_kind(_masm, JVM_REF_invokeStatic, member_reg, temp3);
    1.93 +        verify_ref_kind(_masm, JVM_REF_invokeStatic, member_reg, temp2);
    1.94        }
    1.95        __ ld_ptr(member_vmtarget, G5_method);
    1.96 -      method_is_live = true;
    1.97        break;
    1.98  
    1.99      case vmIntrinsics::_linkToVirtual:
   1.100 @@ -404,7 +403,7 @@
   1.101        // minus the CP setup and profiling:
   1.102  
   1.103        if (VerifyMethodHandles) {
   1.104 -        verify_ref_kind(_masm, JVM_REF_invokeVirtual, member_reg, temp3);
   1.105 +        verify_ref_kind(_masm, JVM_REF_invokeVirtual, member_reg, temp2);
   1.106        }
   1.107  
   1.108        // pick out the vtable index from the MemberName, and then we can discard it:
   1.109 @@ -423,7 +422,6 @@
   1.110  
   1.111        // get target Method* & entry point
   1.112        __ lookup_virtual_method(temp1_recv_klass, temp2_index, G5_method);
   1.113 -      method_is_live = true;
   1.114        break;
   1.115      }
   1.116  
   1.117 @@ -432,13 +430,13 @@
   1.118        // same as TemplateTable::invokeinterface
   1.119        // (minus the CP setup and profiling, with different argument motion)
   1.120        if (VerifyMethodHandles) {
   1.121 -        verify_ref_kind(_masm, JVM_REF_invokeInterface, member_reg, temp3);
   1.122 +        verify_ref_kind(_masm, JVM_REF_invokeInterface, member_reg, temp2);
   1.123        }
   1.124  
   1.125 -      Register temp3_intf = temp3;
   1.126 -      __ load_heap_oop(member_clazz, temp3_intf);
   1.127 -      load_klass_from_Class(_masm, temp3_intf, temp2, temp4);
   1.128 -      __ verify_klass_ptr(temp3_intf);
   1.129 +      Register temp2_intf = temp2;
   1.130 +      __ load_heap_oop(member_clazz, temp2_intf);
   1.131 +      load_klass_from_Class(_masm, temp2_intf, temp3, temp4);
   1.132 +      __ verify_klass_ptr(temp2_intf);
   1.133  
   1.134        Register G5_index = G5_method;
   1.135        __ ld_ptr(member_vmindex, G5_index);
   1.136 @@ -450,37 +448,34 @@
   1.137        }
   1.138  
   1.139        // given intf, index, and recv klass, dispatch to the implementation method
   1.140 -      Label L_no_such_interface;
   1.141 -      Register no_sethi_temp = noreg;
   1.142 -      __ lookup_interface_method(temp1_recv_klass, temp3_intf,
   1.143 +      __ lookup_interface_method(temp1_recv_klass, temp2_intf,
   1.144                                   // note: next two args must be the same:
   1.145                                   G5_index, G5_method,
   1.146 -                                 temp2, no_sethi_temp,
   1.147 -                                 L_no_such_interface);
   1.148 -
   1.149 -      __ verify_method_ptr(G5_method);
   1.150 -      jump_from_method_handle(_masm, G5_method, temp2, temp3, for_compiler_entry);
   1.151 -
   1.152 -      __ bind(L_no_such_interface);
   1.153 -      AddressLiteral icce(StubRoutines::throw_IncompatibleClassChangeError_entry());
   1.154 -      __ jump_to(icce, temp3);
   1.155 -      __ delayed()->nop();
   1.156 +                                 temp3, temp4,
   1.157 +                                 L_incompatible_class_change_error);
   1.158        break;
   1.159      }
   1.160  
   1.161      default:
   1.162 -      fatal(err_msg("unexpected intrinsic %d: %s", iid, vmIntrinsics::name_at(iid)));
   1.163 +      fatal(err_msg_res("unexpected intrinsic %d: %s", iid, vmIntrinsics::name_at(iid)));
   1.164        break;
   1.165      }
   1.166  
   1.167 -    if (method_is_live) {
   1.168 -      // live at this point:  G5_method, O5_savedSP (if interpreted)
   1.169 +    // Live at this point:
   1.170 +    //   G5_method
   1.171 +    //   O5_savedSP (if interpreted)
   1.172  
   1.173 -      // After figuring out which concrete method to call, jump into it.
   1.174 -      // Note that this works in the interpreter with no data motion.
   1.175 -      // But the compiled version will require that rcx_recv be shifted out.
   1.176 -      __ verify_method_ptr(G5_method);
   1.177 -      jump_from_method_handle(_masm, G5_method, temp1, temp3, for_compiler_entry);
   1.178 +    // After figuring out which concrete method to call, jump into it.
   1.179 +    // Note that this works in the interpreter with no data motion.
   1.180 +    // But the compiled version will require that rcx_recv be shifted out.
   1.181 +    __ verify_method_ptr(G5_method);
   1.182 +    jump_from_method_handle(_masm, G5_method, temp1, temp2, for_compiler_entry);
   1.183 +
   1.184 +    if (iid == vmIntrinsics::_linkToInterface) {
   1.185 +      __ BIND(L_incompatible_class_change_error);
   1.186 +      AddressLiteral icce(StubRoutines::throw_IncompatibleClassChangeError_entry());
   1.187 +      __ jump_to(icce, temp1);
   1.188 +      __ delayed()->nop();
   1.189      }
   1.190    }
   1.191  }

mercurial