1.1 --- a/src/cpu/sparc/vm/sharedRuntime_sparc.cpp Fri Apr 30 04:27:25 2010 -0700 1.2 +++ b/src/cpu/sparc/vm/sharedRuntime_sparc.cpp Fri Apr 30 08:37:24 2010 -0700 1.3 @@ -547,17 +547,11 @@ 1.4 void set_Rdisp(Register r) { Rdisp = r; } 1.5 1.6 void patch_callers_callsite(); 1.7 - void tag_c2i_arg(frame::Tag t, Register base, int st_off, Register scratch); 1.8 1.9 // base+st_off points to top of argument 1.10 - int arg_offset(const int st_off) { return st_off + Interpreter::value_offset_in_bytes(); } 1.11 + int arg_offset(const int st_off) { return st_off; } 1.12 int next_arg_offset(const int st_off) { 1.13 - return st_off - Interpreter::stackElementSize() + Interpreter::value_offset_in_bytes(); 1.14 - } 1.15 - 1.16 - int tag_offset(const int st_off) { return st_off + Interpreter::tag_offset_in_bytes(); } 1.17 - int next_tag_offset(const int st_off) { 1.18 - return st_off - Interpreter::stackElementSize() + Interpreter::tag_offset_in_bytes(); 1.19 + return st_off - Interpreter::stackElementSize; 1.20 } 1.21 1.22 // Argument slot values may be loaded first into a register because 1.23 @@ -565,9 +559,6 @@ 1.24 RegisterOrConstant arg_slot(const int st_off); 1.25 RegisterOrConstant next_arg_slot(const int st_off); 1.26 1.27 - RegisterOrConstant tag_slot(const int st_off); 1.28 - RegisterOrConstant next_tag_slot(const int st_off); 1.29 - 1.30 // Stores long into offset pointed to by base 1.31 void store_c2i_long(Register r, Register base, 1.32 const int st_off, bool is_stack); 1.33 @@ -653,23 +644,6 @@ 1.34 __ bind(L); 1.35 } 1.36 1.37 -void AdapterGenerator::tag_c2i_arg(frame::Tag t, Register base, int st_off, 1.38 - Register scratch) { 1.39 - if (TaggedStackInterpreter) { 1.40 - RegisterOrConstant slot = tag_slot(st_off); 1.41 - // have to store zero because local slots can be reused (rats!) 1.42 - if (t == frame::TagValue) { 1.43 - __ st_ptr(G0, base, slot); 1.44 - } else if (t == frame::TagCategory2) { 1.45 - __ st_ptr(G0, base, slot); 1.46 - __ st_ptr(G0, base, next_tag_slot(st_off)); 1.47 - } else { 1.48 - __ mov(t, scratch); 1.49 - __ st_ptr(scratch, base, slot); 1.50 - } 1.51 - } 1.52 -} 1.53 - 1.54 1.55 RegisterOrConstant AdapterGenerator::arg_slot(const int st_off) { 1.56 RegisterOrConstant roc(arg_offset(st_off)); 1.57 @@ -682,17 +656,6 @@ 1.58 } 1.59 1.60 1.61 -RegisterOrConstant AdapterGenerator::tag_slot(const int st_off) { 1.62 - RegisterOrConstant roc(tag_offset(st_off)); 1.63 - return __ ensure_simm13_or_reg(roc, Rdisp); 1.64 -} 1.65 - 1.66 -RegisterOrConstant AdapterGenerator::next_tag_slot(const int st_off) { 1.67 - RegisterOrConstant roc(next_tag_offset(st_off)); 1.68 - return __ ensure_simm13_or_reg(roc, Rdisp); 1.69 -} 1.70 - 1.71 - 1.72 // Stores long into offset pointed to by base 1.73 void AdapterGenerator::store_c2i_long(Register r, Register base, 1.74 const int st_off, bool is_stack) { 1.75 @@ -718,19 +681,16 @@ 1.76 } 1.77 #endif // COMPILER2 1.78 #endif // _LP64 1.79 - tag_c2i_arg(frame::TagCategory2, base, st_off, r); 1.80 } 1.81 1.82 void AdapterGenerator::store_c2i_object(Register r, Register base, 1.83 const int st_off) { 1.84 __ st_ptr (r, base, arg_slot(st_off)); 1.85 - tag_c2i_arg(frame::TagReference, base, st_off, r); 1.86 } 1.87 1.88 void AdapterGenerator::store_c2i_int(Register r, Register base, 1.89 const int st_off) { 1.90 __ st (r, base, arg_slot(st_off)); 1.91 - tag_c2i_arg(frame::TagValue, base, st_off, r); 1.92 } 1.93 1.94 // Stores into offset pointed to by base 1.95 @@ -745,13 +705,11 @@ 1.96 __ stf(FloatRegisterImpl::S, r_1->as_FloatRegister(), base, next_arg_slot(st_off)); 1.97 __ stf(FloatRegisterImpl::S, r_2->as_FloatRegister(), base, arg_slot(st_off) ); 1.98 #endif 1.99 - tag_c2i_arg(frame::TagCategory2, base, st_off, G1_scratch); 1.100 } 1.101 1.102 void AdapterGenerator::store_c2i_float(FloatRegister f, Register base, 1.103 const int st_off) { 1.104 __ stf(FloatRegisterImpl::S, f, base, arg_slot(st_off)); 1.105 - tag_c2i_arg(frame::TagValue, base, st_off, G1_scratch); 1.106 } 1.107 1.108 void AdapterGenerator::gen_c2i_adapter( 1.109 @@ -786,14 +744,14 @@ 1.110 // Since all args are passed on the stack, total_args_passed*wordSize is the 1.111 // space we need. Add in varargs area needed by the interpreter. Round up 1.112 // to stack alignment. 1.113 - const int arg_size = total_args_passed * Interpreter::stackElementSize(); 1.114 + const int arg_size = total_args_passed * Interpreter::stackElementSize; 1.115 const int varargs_area = 1.116 (frame::varargs_offset - frame::register_save_words)*wordSize; 1.117 const int extraspace = round_to(arg_size + varargs_area, 2*wordSize); 1.118 1.119 int bias = STACK_BIAS; 1.120 const int interp_arg_offset = frame::varargs_offset*wordSize + 1.121 - (total_args_passed-1)*Interpreter::stackElementSize(); 1.122 + (total_args_passed-1)*Interpreter::stackElementSize; 1.123 1.124 Register base = SP; 1.125 1.126 @@ -814,7 +772,7 @@ 1.127 1.128 // First write G1 (if used) to where ever it must go 1.129 for (int i=0; i<total_args_passed; i++) { 1.130 - const int st_off = interp_arg_offset - (i*Interpreter::stackElementSize()) + bias; 1.131 + const int st_off = interp_arg_offset - (i*Interpreter::stackElementSize) + bias; 1.132 VMReg r_1 = regs[i].first(); 1.133 VMReg r_2 = regs[i].second(); 1.134 if (r_1 == G1_scratch->as_VMReg()) { 1.135 @@ -831,7 +789,7 @@ 1.136 1.137 // Now write the args into the outgoing interpreter space 1.138 for (int i=0; i<total_args_passed; i++) { 1.139 - const int st_off = interp_arg_offset - (i*Interpreter::stackElementSize()) + bias; 1.140 + const int st_off = interp_arg_offset - (i*Interpreter::stackElementSize) + bias; 1.141 VMReg r_1 = regs[i].first(); 1.142 VMReg r_2 = regs[i].second(); 1.143 if (!r_1->is_valid()) { 1.144 @@ -900,7 +858,7 @@ 1.145 #endif // _LP64 1.146 1.147 __ mov((frame::varargs_offset)*wordSize - 1.148 - 1*Interpreter::stackElementSize()+bias+BytesPerWord, G1); 1.149 + 1*Interpreter::stackElementSize+bias+BytesPerWord, G1); 1.150 // Jump to the interpreter just as if interpreter was doing it. 1.151 __ jmpl(G3_scratch, 0, G0); 1.152 // Setup Lesp for the call. Cannot actually set Lesp as the current Lesp 1.153 @@ -1051,7 +1009,7 @@ 1.154 // ldx/lddf optimizations. 1.155 1.156 // Load in argument order going down. 1.157 - const int ld_off = (total_args_passed-i)*Interpreter::stackElementSize(); 1.158 + const int ld_off = (total_args_passed-i)*Interpreter::stackElementSize; 1.159 set_Rdisp(G1_scratch); 1.160 1.161 VMReg r_1 = regs[i].first(); 1.162 @@ -1120,7 +1078,7 @@ 1.163 for (int i=0; i<total_args_passed; i++) { 1.164 if (regs[i].first()->is_Register() && regs[i].second()->is_valid()) { 1.165 // Load in argument order going down 1.166 - int ld_off = (total_args_passed-i)*Interpreter::stackElementSize(); 1.167 + int ld_off = (total_args_passed-i)*Interpreter::stackElementSize; 1.168 // Need to marshal 64-bit value from misaligned Lesp loads 1.169 Register r = regs[i].first()->as_Register()->after_restore(); 1.170 if (r == G1 || r == G4) { 1.171 @@ -3062,7 +3020,7 @@ 1.172 "test and remove; got more parms than locals"); 1.173 if (callee_locals < callee_parameters) 1.174 return 0; // No adjustment for negative locals 1.175 - int diff = (callee_locals - callee_parameters) * Interpreter::stackElementWords(); 1.176 + int diff = (callee_locals - callee_parameters) * Interpreter::stackElementWords; 1.177 return round_to(diff, WordsPerLong); 1.178 } 1.179