src/cpu/sparc/vm/templateTable_sparc.cpp

Thu, 07 Apr 2011 09:53:20 -0700

author
johnc
date
Thu, 07 Apr 2011 09:53:20 -0700
changeset 2781
e1162778c1c8
parent 2639
8033953d67ff
child 2784
92add02409c9
permissions
-rw-r--r--

7009266: G1: assert(obj->is_oop_or_null(true )) failed: Error
Summary: A referent object that is only weakly reachable at the start of concurrent marking but is re-attached to the strongly reachable object graph during marking may not be marked as live. This can cause the reference object to be processed prematurely and leave dangling pointers to the referent object. Implement a read barrier for the java.lang.ref.Reference::referent field by intrinsifying the Reference.get() method, and intercepting accesses though JNI, reflection, and Unsafe, so that when a non-null referent object is read it is also logged in an SATB buffer.
Reviewed-by: kvn, iveresov, never, tonyp, dholmes

duke@435 1 /*
phh@2423 2 * Copyright (c) 1997, 2011, Oracle and/or its affiliates. All rights reserved.
duke@435 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
duke@435 4 *
duke@435 5 * This code is free software; you can redistribute it and/or modify it
duke@435 6 * under the terms of the GNU General Public License version 2 only, as
duke@435 7 * published by the Free Software Foundation.
duke@435 8 *
duke@435 9 * This code is distributed in the hope that it will be useful, but WITHOUT
duke@435 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
duke@435 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
duke@435 12 * version 2 for more details (a copy is included in the LICENSE file that
duke@435 13 * accompanied this code).
duke@435 14 *
duke@435 15 * You should have received a copy of the GNU General Public License version
duke@435 16 * 2 along with this work; if not, write to the Free Software Foundation,
duke@435 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
duke@435 18 *
trims@1907 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
trims@1907 20 * or visit www.oracle.com if you need additional information or have any
trims@1907 21 * questions.
duke@435 22 *
duke@435 23 */
duke@435 24
stefank@2314 25 #include "precompiled.hpp"
stefank@2314 26 #include "interpreter/interpreter.hpp"
stefank@2314 27 #include "interpreter/interpreterRuntime.hpp"
stefank@2314 28 #include "interpreter/templateTable.hpp"
stefank@2314 29 #include "memory/universe.inline.hpp"
stefank@2314 30 #include "oops/methodDataOop.hpp"
stefank@2314 31 #include "oops/objArrayKlass.hpp"
stefank@2314 32 #include "oops/oop.inline.hpp"
stefank@2314 33 #include "prims/methodHandles.hpp"
stefank@2314 34 #include "runtime/sharedRuntime.hpp"
stefank@2314 35 #include "runtime/stubRoutines.hpp"
stefank@2314 36 #include "runtime/synchronizer.hpp"
duke@435 37
duke@435 38 #ifndef CC_INTERP
duke@435 39 #define __ _masm->
duke@435 40
ysr@777 41 // Misc helpers
ysr@777 42
ysr@777 43 // Do an oop store like *(base + index + offset) = val
ysr@777 44 // index can be noreg,
ysr@777 45 static void do_oop_store(InterpreterMacroAssembler* _masm,
ysr@777 46 Register base,
ysr@777 47 Register index,
ysr@777 48 int offset,
ysr@777 49 Register val,
ysr@777 50 Register tmp,
ysr@777 51 BarrierSet::Name barrier,
ysr@777 52 bool precise) {
ysr@777 53 assert(tmp != val && tmp != base && tmp != index, "register collision");
ysr@777 54 assert(index == noreg || offset == 0, "only one offset");
ysr@777 55 switch (barrier) {
ysr@777 56 #ifndef SERIALGC
ysr@777 57 case BarrierSet::G1SATBCT:
ysr@777 58 case BarrierSet::G1SATBCTLogging:
ysr@777 59 {
johnc@2781 60 // Load and record the previous value.
johnc@2781 61 __ g1_write_barrier_pre(base, index, offset,
johnc@2781 62 noreg /* pre_val */,
johnc@2781 63 tmp, true /*preserve_o_regs*/);
johnc@2781 64
ysr@777 65 if (index == noreg ) {
ysr@777 66 assert(Assembler::is_simm13(offset), "fix this code");
ysr@777 67 __ store_heap_oop(val, base, offset);
ysr@777 68 } else {
ysr@777 69 __ store_heap_oop(val, base, index);
ysr@777 70 }
ysr@777 71
ysr@777 72 // No need for post barrier if storing NULL
ysr@777 73 if (val != G0) {
ysr@777 74 if (precise) {
ysr@777 75 if (index == noreg) {
ysr@777 76 __ add(base, offset, base);
ysr@777 77 } else {
ysr@777 78 __ add(base, index, base);
ysr@777 79 }
ysr@777 80 }
ysr@777 81 __ g1_write_barrier_post(base, val, tmp);
ysr@777 82 }
ysr@777 83 }
ysr@777 84 break;
ysr@777 85 #endif // SERIALGC
ysr@777 86 case BarrierSet::CardTableModRef:
ysr@777 87 case BarrierSet::CardTableExtension:
ysr@777 88 {
ysr@777 89 if (index == noreg ) {
ysr@777 90 assert(Assembler::is_simm13(offset), "fix this code");
ysr@777 91 __ store_heap_oop(val, base, offset);
ysr@777 92 } else {
ysr@777 93 __ store_heap_oop(val, base, index);
ysr@777 94 }
ysr@777 95 // No need for post barrier if storing NULL
ysr@777 96 if (val != G0) {
ysr@777 97 if (precise) {
ysr@777 98 if (index == noreg) {
ysr@777 99 __ add(base, offset, base);
ysr@777 100 } else {
ysr@777 101 __ add(base, index, base);
ysr@777 102 }
ysr@777 103 }
ysr@777 104 __ card_write_barrier_post(base, val, tmp);
ysr@777 105 }
ysr@777 106 }
ysr@777 107 break;
ysr@777 108 case BarrierSet::ModRef:
ysr@777 109 case BarrierSet::Other:
ysr@777 110 ShouldNotReachHere();
ysr@777 111 break;
ysr@777 112 default :
ysr@777 113 ShouldNotReachHere();
ysr@777 114
ysr@777 115 }
ysr@777 116 }
ysr@777 117
duke@435 118
duke@435 119 //----------------------------------------------------------------------------------------------------
duke@435 120 // Platform-dependent initialization
duke@435 121
duke@435 122 void TemplateTable::pd_initialize() {
duke@435 123 // (none)
duke@435 124 }
duke@435 125
duke@435 126
duke@435 127 //----------------------------------------------------------------------------------------------------
duke@435 128 // Condition conversion
duke@435 129 Assembler::Condition ccNot(TemplateTable::Condition cc) {
duke@435 130 switch (cc) {
duke@435 131 case TemplateTable::equal : return Assembler::notEqual;
duke@435 132 case TemplateTable::not_equal : return Assembler::equal;
duke@435 133 case TemplateTable::less : return Assembler::greaterEqual;
duke@435 134 case TemplateTable::less_equal : return Assembler::greater;
duke@435 135 case TemplateTable::greater : return Assembler::lessEqual;
duke@435 136 case TemplateTable::greater_equal: return Assembler::less;
duke@435 137 }
duke@435 138 ShouldNotReachHere();
duke@435 139 return Assembler::zero;
duke@435 140 }
duke@435 141
duke@435 142 //----------------------------------------------------------------------------------------------------
duke@435 143 // Miscelaneous helper routines
duke@435 144
duke@435 145
duke@435 146 Address TemplateTable::at_bcp(int offset) {
duke@435 147 assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
twisti@1162 148 return Address(Lbcp, offset);
duke@435 149 }
duke@435 150
duke@435 151
duke@435 152 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register Rbyte_code,
duke@435 153 Register Rscratch,
duke@435 154 bool load_bc_into_scratch /*=true*/) {
duke@435 155 // With sharing on, may need to test methodOop flag.
duke@435 156 if (!RewriteBytecodes) return;
duke@435 157 if (load_bc_into_scratch) __ set(bc, Rbyte_code);
duke@435 158 Label patch_done;
duke@435 159 if (JvmtiExport::can_post_breakpoint()) {
duke@435 160 Label fast_patch;
duke@435 161 __ ldub(at_bcp(0), Rscratch);
duke@435 162 __ cmp(Rscratch, Bytecodes::_breakpoint);
duke@435 163 __ br(Assembler::notEqual, false, Assembler::pt, fast_patch);
duke@435 164 __ delayed()->nop(); // don't bother to hoist the stb here
duke@435 165 // perform the quickening, slowly, in the bowels of the breakpoint table
duke@435 166 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::set_original_bytecode_at), Lmethod, Lbcp, Rbyte_code);
duke@435 167 __ ba(false, patch_done);
duke@435 168 __ delayed()->nop();
duke@435 169 __ bind(fast_patch);
duke@435 170 }
duke@435 171 #ifdef ASSERT
duke@435 172 Bytecodes::Code orig_bytecode = Bytecodes::java_code(bc);
duke@435 173 Label okay;
duke@435 174 __ ldub(at_bcp(0), Rscratch);
duke@435 175 __ cmp(Rscratch, orig_bytecode);
duke@435 176 __ br(Assembler::equal, false, Assembler::pt, okay);
duke@435 177 __ delayed() ->cmp(Rscratch, Rbyte_code);
duke@435 178 __ br(Assembler::equal, false, Assembler::pt, okay);
duke@435 179 __ delayed()->nop();
duke@435 180 __ stop("Rewriting wrong bytecode location");
duke@435 181 __ bind(okay);
duke@435 182 #endif
duke@435 183 __ stb(Rbyte_code, at_bcp(0));
duke@435 184 __ bind(patch_done);
duke@435 185 }
duke@435 186
duke@435 187 //----------------------------------------------------------------------------------------------------
duke@435 188 // Individual instructions
duke@435 189
duke@435 190 void TemplateTable::nop() {
duke@435 191 transition(vtos, vtos);
duke@435 192 // nothing to do
duke@435 193 }
duke@435 194
duke@435 195 void TemplateTable::shouldnotreachhere() {
duke@435 196 transition(vtos, vtos);
duke@435 197 __ stop("shouldnotreachhere bytecode");
duke@435 198 }
duke@435 199
duke@435 200 void TemplateTable::aconst_null() {
duke@435 201 transition(vtos, atos);
duke@435 202 __ clr(Otos_i);
duke@435 203 }
duke@435 204
duke@435 205
duke@435 206 void TemplateTable::iconst(int value) {
duke@435 207 transition(vtos, itos);
duke@435 208 __ set(value, Otos_i);
duke@435 209 }
duke@435 210
duke@435 211
duke@435 212 void TemplateTable::lconst(int value) {
duke@435 213 transition(vtos, ltos);
duke@435 214 assert(value >= 0, "check this code");
duke@435 215 #ifdef _LP64
duke@435 216 __ set(value, Otos_l);
duke@435 217 #else
duke@435 218 __ set(value, Otos_l2);
duke@435 219 __ clr( Otos_l1);
duke@435 220 #endif
duke@435 221 }
duke@435 222
duke@435 223
duke@435 224 void TemplateTable::fconst(int value) {
duke@435 225 transition(vtos, ftos);
duke@435 226 static float zero = 0.0, one = 1.0, two = 2.0;
duke@435 227 float* p;
duke@435 228 switch( value ) {
duke@435 229 default: ShouldNotReachHere();
duke@435 230 case 0: p = &zero; break;
duke@435 231 case 1: p = &one; break;
duke@435 232 case 2: p = &two; break;
duke@435 233 }
twisti@1162 234 AddressLiteral a(p);
twisti@1162 235 __ sethi(a, G3_scratch);
twisti@1162 236 __ ldf(FloatRegisterImpl::S, G3_scratch, a.low10(), Ftos_f);
duke@435 237 }
duke@435 238
duke@435 239
duke@435 240 void TemplateTable::dconst(int value) {
duke@435 241 transition(vtos, dtos);
duke@435 242 static double zero = 0.0, one = 1.0;
duke@435 243 double* p;
duke@435 244 switch( value ) {
duke@435 245 default: ShouldNotReachHere();
duke@435 246 case 0: p = &zero; break;
duke@435 247 case 1: p = &one; break;
duke@435 248 }
twisti@1162 249 AddressLiteral a(p);
twisti@1162 250 __ sethi(a, G3_scratch);
twisti@1162 251 __ ldf(FloatRegisterImpl::D, G3_scratch, a.low10(), Ftos_d);
duke@435 252 }
duke@435 253
duke@435 254
duke@435 255 // %%%%% Should factore most snippet templates across platforms
duke@435 256
duke@435 257 void TemplateTable::bipush() {
duke@435 258 transition(vtos, itos);
duke@435 259 __ ldsb( at_bcp(1), Otos_i );
duke@435 260 }
duke@435 261
duke@435 262 void TemplateTable::sipush() {
duke@435 263 transition(vtos, itos);
duke@435 264 __ get_2_byte_integer_at_bcp(1, G3_scratch, Otos_i, InterpreterMacroAssembler::Signed);
duke@435 265 }
duke@435 266
duke@435 267 void TemplateTable::ldc(bool wide) {
duke@435 268 transition(vtos, vtos);
duke@435 269 Label call_ldc, notInt, notString, notClass, exit;
duke@435 270
duke@435 271 if (wide) {
duke@435 272 __ get_2_byte_integer_at_bcp(1, G3_scratch, O1, InterpreterMacroAssembler::Unsigned);
duke@435 273 } else {
duke@435 274 __ ldub(Lbcp, 1, O1);
duke@435 275 }
duke@435 276 __ get_cpool_and_tags(O0, O2);
duke@435 277
duke@435 278 const int base_offset = constantPoolOopDesc::header_size() * wordSize;
duke@435 279 const int tags_offset = typeArrayOopDesc::header_size(T_BYTE) * wordSize;
duke@435 280
duke@435 281 // get type from tags
duke@435 282 __ add(O2, tags_offset, O2);
duke@435 283 __ ldub(O2, O1, O2);
duke@435 284 __ cmp(O2, JVM_CONSTANT_UnresolvedString); // unresolved string? If so, must resolve
duke@435 285 __ brx(Assembler::equal, true, Assembler::pt, call_ldc);
duke@435 286 __ delayed()->nop();
duke@435 287
duke@435 288 __ cmp(O2, JVM_CONSTANT_UnresolvedClass); // unresolved class? If so, must resolve
duke@435 289 __ brx(Assembler::equal, true, Assembler::pt, call_ldc);
duke@435 290 __ delayed()->nop();
duke@435 291
duke@435 292 __ cmp(O2, JVM_CONSTANT_UnresolvedClassInError); // unresolved class in error state
duke@435 293 __ brx(Assembler::equal, true, Assembler::pn, call_ldc);
duke@435 294 __ delayed()->nop();
duke@435 295
duke@435 296 __ cmp(O2, JVM_CONSTANT_Class); // need to call vm to get java mirror of the class
duke@435 297 __ brx(Assembler::notEqual, true, Assembler::pt, notClass);
duke@435 298 __ delayed()->add(O0, base_offset, O0);
duke@435 299
duke@435 300 __ bind(call_ldc);
duke@435 301 __ set(wide, O1);
duke@435 302 call_VM(Otos_i, CAST_FROM_FN_PTR(address, InterpreterRuntime::ldc), O1);
duke@435 303 __ push(atos);
duke@435 304 __ ba(false, exit);
duke@435 305 __ delayed()->nop();
duke@435 306
duke@435 307 __ bind(notClass);
duke@435 308 // __ add(O0, base_offset, O0);
duke@435 309 __ sll(O1, LogBytesPerWord, O1);
duke@435 310 __ cmp(O2, JVM_CONSTANT_Integer);
duke@435 311 __ brx(Assembler::notEqual, true, Assembler::pt, notInt);
duke@435 312 __ delayed()->cmp(O2, JVM_CONSTANT_String);
duke@435 313 __ ld(O0, O1, Otos_i);
duke@435 314 __ push(itos);
duke@435 315 __ ba(false, exit);
duke@435 316 __ delayed()->nop();
duke@435 317
duke@435 318 __ bind(notInt);
duke@435 319 // __ cmp(O2, JVM_CONSTANT_String);
duke@435 320 __ brx(Assembler::notEqual, true, Assembler::pt, notString);
duke@435 321 __ delayed()->ldf(FloatRegisterImpl::S, O0, O1, Ftos_f);
duke@435 322 __ ld_ptr(O0, O1, Otos_i);
duke@435 323 __ verify_oop(Otos_i);
duke@435 324 __ push(atos);
duke@435 325 __ ba(false, exit);
duke@435 326 __ delayed()->nop();
duke@435 327
duke@435 328 __ bind(notString);
duke@435 329 // __ ldf(FloatRegisterImpl::S, O0, O1, Ftos_f);
duke@435 330 __ push(ftos);
duke@435 331
duke@435 332 __ bind(exit);
duke@435 333 }
duke@435 334
jrose@1957 335 // Fast path for caching oop constants.
jrose@1957 336 // %%% We should use this to handle Class and String constants also.
jrose@1957 337 // %%% It will simplify the ldc/primitive path considerably.
jrose@1957 338 void TemplateTable::fast_aldc(bool wide) {
jrose@1957 339 transition(vtos, atos);
jrose@1957 340
jrose@1957 341 if (!EnableMethodHandles) {
jrose@1957 342 // We should not encounter this bytecode if !EnableMethodHandles.
jrose@1957 343 // The verifier will stop it. However, if we get past the verifier,
jrose@1957 344 // this will stop the thread in a reasonable way, without crashing the JVM.
jrose@1957 345 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
jrose@1957 346 InterpreterRuntime::throw_IncompatibleClassChangeError));
jrose@1957 347 // the call_VM checks for exception, so we should never return here.
jrose@1957 348 __ should_not_reach_here();
jrose@1957 349 return;
jrose@1957 350 }
jrose@1957 351
jrose@1957 352 Register Rcache = G3_scratch;
jrose@1957 353 Register Rscratch = G4_scratch;
jrose@1957 354
jrose@1957 355 resolve_cache_and_index(f1_oop, Otos_i, Rcache, Rscratch, wide ? sizeof(u2) : sizeof(u1));
jrose@1957 356
jrose@1957 357 __ verify_oop(Otos_i);
jrose@2268 358
jrose@2268 359 Label L_done;
jrose@2268 360 const Register Rcon_klass = G3_scratch; // same as Rcache
jrose@2268 361 const Register Rarray_klass = G4_scratch; // same as Rscratch
jrose@2268 362 __ load_klass(Otos_i, Rcon_klass);
jrose@2268 363 AddressLiteral array_klass_addr((address)Universe::systemObjArrayKlassObj_addr());
jrose@2268 364 __ load_contents(array_klass_addr, Rarray_klass);
jrose@2268 365 __ cmp(Rarray_klass, Rcon_klass);
jrose@2268 366 __ brx(Assembler::notEqual, false, Assembler::pt, L_done);
jrose@2268 367 __ delayed()->nop();
jrose@2268 368 __ ld(Address(Otos_i, arrayOopDesc::length_offset_in_bytes()), Rcon_klass);
jrose@2268 369 __ tst(Rcon_klass);
jrose@2268 370 __ brx(Assembler::zero, true, Assembler::pt, L_done);
jrose@2268 371 __ delayed()->clr(Otos_i); // executed only if branch is taken
jrose@2268 372
jrose@2268 373 // Load the exception from the system-array which wraps it:
jrose@2268 374 __ load_heap_oop(Otos_i, arrayOopDesc::base_offset_in_bytes(T_OBJECT), Otos_i);
jrose@2268 375 __ throw_if_not_x(Assembler::never, Interpreter::throw_exception_entry(), G3_scratch);
jrose@2268 376
jrose@2268 377 __ bind(L_done);
jrose@1957 378 }
jrose@1957 379
duke@435 380 void TemplateTable::ldc2_w() {
duke@435 381 transition(vtos, vtos);
duke@435 382 Label retry, resolved, Long, exit;
duke@435 383
duke@435 384 __ bind(retry);
duke@435 385 __ get_2_byte_integer_at_bcp(1, G3_scratch, O1, InterpreterMacroAssembler::Unsigned);
duke@435 386 __ get_cpool_and_tags(O0, O2);
duke@435 387
duke@435 388 const int base_offset = constantPoolOopDesc::header_size() * wordSize;
duke@435 389 const int tags_offset = typeArrayOopDesc::header_size(T_BYTE) * wordSize;
duke@435 390 // get type from tags
duke@435 391 __ add(O2, tags_offset, O2);
duke@435 392 __ ldub(O2, O1, O2);
duke@435 393
duke@435 394 __ sll(O1, LogBytesPerWord, O1);
duke@435 395 __ add(O0, O1, G3_scratch);
duke@435 396
duke@435 397 __ cmp(O2, JVM_CONSTANT_Double);
duke@435 398 __ brx(Assembler::notEqual, false, Assembler::pt, Long);
duke@435 399 __ delayed()->nop();
duke@435 400 // A double can be placed at word-aligned locations in the constant pool.
duke@435 401 // Check out Conversions.java for an example.
duke@435 402 // Also constantPoolOopDesc::header_size() is 20, which makes it very difficult
duke@435 403 // to double-align double on the constant pool. SG, 11/7/97
duke@435 404 #ifdef _LP64
duke@435 405 __ ldf(FloatRegisterImpl::D, G3_scratch, base_offset, Ftos_d);
duke@435 406 #else
duke@435 407 FloatRegister f = Ftos_d;
duke@435 408 __ ldf(FloatRegisterImpl::S, G3_scratch, base_offset, f);
duke@435 409 __ ldf(FloatRegisterImpl::S, G3_scratch, base_offset + sizeof(jdouble)/2,
duke@435 410 f->successor());
duke@435 411 #endif
duke@435 412 __ push(dtos);
duke@435 413 __ ba(false, exit);
duke@435 414 __ delayed()->nop();
duke@435 415
duke@435 416 __ bind(Long);
duke@435 417 #ifdef _LP64
duke@435 418 __ ldx(G3_scratch, base_offset, Otos_l);
duke@435 419 #else
duke@435 420 __ ld(G3_scratch, base_offset, Otos_l);
duke@435 421 __ ld(G3_scratch, base_offset + sizeof(jlong)/2, Otos_l->successor());
duke@435 422 #endif
duke@435 423 __ push(ltos);
duke@435 424
duke@435 425 __ bind(exit);
duke@435 426 }
duke@435 427
duke@435 428
duke@435 429 void TemplateTable::locals_index(Register reg, int offset) {
duke@435 430 __ ldub( at_bcp(offset), reg );
duke@435 431 }
duke@435 432
duke@435 433
duke@435 434 void TemplateTable::locals_index_wide(Register reg) {
duke@435 435 // offset is 2, not 1, because Lbcp points to wide prefix code
duke@435 436 __ get_2_byte_integer_at_bcp(2, G4_scratch, reg, InterpreterMacroAssembler::Unsigned);
duke@435 437 }
duke@435 438
duke@435 439 void TemplateTable::iload() {
duke@435 440 transition(vtos, itos);
duke@435 441 // Rewrite iload,iload pair into fast_iload2
duke@435 442 // iload,caload pair into fast_icaload
duke@435 443 if (RewriteFrequentPairs) {
duke@435 444 Label rewrite, done;
duke@435 445
duke@435 446 // get next byte
duke@435 447 __ ldub(at_bcp(Bytecodes::length_for(Bytecodes::_iload)), G3_scratch);
duke@435 448
duke@435 449 // if _iload, wait to rewrite to iload2. We only want to rewrite the
duke@435 450 // last two iloads in a pair. Comparing against fast_iload means that
duke@435 451 // the next bytecode is neither an iload or a caload, and therefore
duke@435 452 // an iload pair.
duke@435 453 __ cmp(G3_scratch, (int)Bytecodes::_iload);
duke@435 454 __ br(Assembler::equal, false, Assembler::pn, done);
duke@435 455 __ delayed()->nop();
duke@435 456
duke@435 457 __ cmp(G3_scratch, (int)Bytecodes::_fast_iload);
duke@435 458 __ br(Assembler::equal, false, Assembler::pn, rewrite);
duke@435 459 __ delayed()->set(Bytecodes::_fast_iload2, G4_scratch);
duke@435 460
duke@435 461 __ cmp(G3_scratch, (int)Bytecodes::_caload);
duke@435 462 __ br(Assembler::equal, false, Assembler::pn, rewrite);
duke@435 463 __ delayed()->set(Bytecodes::_fast_icaload, G4_scratch);
duke@435 464
duke@435 465 __ set(Bytecodes::_fast_iload, G4_scratch); // don't check again
duke@435 466 // rewrite
duke@435 467 // G4_scratch: fast bytecode
duke@435 468 __ bind(rewrite);
duke@435 469 patch_bytecode(Bytecodes::_iload, G4_scratch, G3_scratch, false);
duke@435 470 __ bind(done);
duke@435 471 }
duke@435 472
duke@435 473 // Get the local value into tos
duke@435 474 locals_index(G3_scratch);
duke@435 475 __ access_local_int( G3_scratch, Otos_i );
duke@435 476 }
duke@435 477
duke@435 478 void TemplateTable::fast_iload2() {
duke@435 479 transition(vtos, itos);
duke@435 480 locals_index(G3_scratch);
duke@435 481 __ access_local_int( G3_scratch, Otos_i );
duke@435 482 __ push_i();
duke@435 483 locals_index(G3_scratch, 3); // get next bytecode's local index.
duke@435 484 __ access_local_int( G3_scratch, Otos_i );
duke@435 485 }
duke@435 486
duke@435 487 void TemplateTable::fast_iload() {
duke@435 488 transition(vtos, itos);
duke@435 489 locals_index(G3_scratch);
duke@435 490 __ access_local_int( G3_scratch, Otos_i );
duke@435 491 }
duke@435 492
duke@435 493 void TemplateTable::lload() {
duke@435 494 transition(vtos, ltos);
duke@435 495 locals_index(G3_scratch);
duke@435 496 __ access_local_long( G3_scratch, Otos_l );
duke@435 497 }
duke@435 498
duke@435 499
duke@435 500 void TemplateTable::fload() {
duke@435 501 transition(vtos, ftos);
duke@435 502 locals_index(G3_scratch);
duke@435 503 __ access_local_float( G3_scratch, Ftos_f );
duke@435 504 }
duke@435 505
duke@435 506
duke@435 507 void TemplateTable::dload() {
duke@435 508 transition(vtos, dtos);
duke@435 509 locals_index(G3_scratch);
duke@435 510 __ access_local_double( G3_scratch, Ftos_d );
duke@435 511 }
duke@435 512
duke@435 513
duke@435 514 void TemplateTable::aload() {
duke@435 515 transition(vtos, atos);
duke@435 516 locals_index(G3_scratch);
duke@435 517 __ access_local_ptr( G3_scratch, Otos_i);
duke@435 518 }
duke@435 519
duke@435 520
duke@435 521 void TemplateTable::wide_iload() {
duke@435 522 transition(vtos, itos);
duke@435 523 locals_index_wide(G3_scratch);
duke@435 524 __ access_local_int( G3_scratch, Otos_i );
duke@435 525 }
duke@435 526
duke@435 527
duke@435 528 void TemplateTable::wide_lload() {
duke@435 529 transition(vtos, ltos);
duke@435 530 locals_index_wide(G3_scratch);
duke@435 531 __ access_local_long( G3_scratch, Otos_l );
duke@435 532 }
duke@435 533
duke@435 534
duke@435 535 void TemplateTable::wide_fload() {
duke@435 536 transition(vtos, ftos);
duke@435 537 locals_index_wide(G3_scratch);
duke@435 538 __ access_local_float( G3_scratch, Ftos_f );
duke@435 539 }
duke@435 540
duke@435 541
duke@435 542 void TemplateTable::wide_dload() {
duke@435 543 transition(vtos, dtos);
duke@435 544 locals_index_wide(G3_scratch);
duke@435 545 __ access_local_double( G3_scratch, Ftos_d );
duke@435 546 }
duke@435 547
duke@435 548
duke@435 549 void TemplateTable::wide_aload() {
duke@435 550 transition(vtos, atos);
duke@435 551 locals_index_wide(G3_scratch);
duke@435 552 __ access_local_ptr( G3_scratch, Otos_i );
duke@435 553 __ verify_oop(Otos_i);
duke@435 554 }
duke@435 555
duke@435 556
duke@435 557 void TemplateTable::iaload() {
duke@435 558 transition(itos, itos);
duke@435 559 // Otos_i: index
duke@435 560 // tos: array
duke@435 561 __ index_check(O2, Otos_i, LogBytesPerInt, G3_scratch, O3);
duke@435 562 __ ld(O3, arrayOopDesc::base_offset_in_bytes(T_INT), Otos_i);
duke@435 563 }
duke@435 564
duke@435 565
duke@435 566 void TemplateTable::laload() {
duke@435 567 transition(itos, ltos);
duke@435 568 // Otos_i: index
duke@435 569 // O2: array
duke@435 570 __ index_check(O2, Otos_i, LogBytesPerLong, G3_scratch, O3);
duke@435 571 __ ld_long(O3, arrayOopDesc::base_offset_in_bytes(T_LONG), Otos_l);
duke@435 572 }
duke@435 573
duke@435 574
duke@435 575 void TemplateTable::faload() {
duke@435 576 transition(itos, ftos);
duke@435 577 // Otos_i: index
duke@435 578 // O2: array
duke@435 579 __ index_check(O2, Otos_i, LogBytesPerInt, G3_scratch, O3);
duke@435 580 __ ldf(FloatRegisterImpl::S, O3, arrayOopDesc::base_offset_in_bytes(T_FLOAT), Ftos_f);
duke@435 581 }
duke@435 582
duke@435 583
duke@435 584 void TemplateTable::daload() {
duke@435 585 transition(itos, dtos);
duke@435 586 // Otos_i: index
duke@435 587 // O2: array
duke@435 588 __ index_check(O2, Otos_i, LogBytesPerLong, G3_scratch, O3);
duke@435 589 __ ldf(FloatRegisterImpl::D, O3, arrayOopDesc::base_offset_in_bytes(T_DOUBLE), Ftos_d);
duke@435 590 }
duke@435 591
duke@435 592
duke@435 593 void TemplateTable::aaload() {
duke@435 594 transition(itos, atos);
duke@435 595 // Otos_i: index
duke@435 596 // tos: array
coleenp@548 597 __ index_check(O2, Otos_i, UseCompressedOops ? 2 : LogBytesPerWord, G3_scratch, O3);
coleenp@548 598 __ load_heap_oop(O3, arrayOopDesc::base_offset_in_bytes(T_OBJECT), Otos_i);
duke@435 599 __ verify_oop(Otos_i);
duke@435 600 }
duke@435 601
duke@435 602
duke@435 603 void TemplateTable::baload() {
duke@435 604 transition(itos, itos);
duke@435 605 // Otos_i: index
duke@435 606 // tos: array
duke@435 607 __ index_check(O2, Otos_i, 0, G3_scratch, O3);
duke@435 608 __ ldsb(O3, arrayOopDesc::base_offset_in_bytes(T_BYTE), Otos_i);
duke@435 609 }
duke@435 610
duke@435 611
duke@435 612 void TemplateTable::caload() {
duke@435 613 transition(itos, itos);
duke@435 614 // Otos_i: index
duke@435 615 // tos: array
duke@435 616 __ index_check(O2, Otos_i, LogBytesPerShort, G3_scratch, O3);
duke@435 617 __ lduh(O3, arrayOopDesc::base_offset_in_bytes(T_CHAR), Otos_i);
duke@435 618 }
duke@435 619
duke@435 620 void TemplateTable::fast_icaload() {
duke@435 621 transition(vtos, itos);
duke@435 622 // Otos_i: index
duke@435 623 // tos: array
duke@435 624 locals_index(G3_scratch);
duke@435 625 __ access_local_int( G3_scratch, Otos_i );
duke@435 626 __ index_check(O2, Otos_i, LogBytesPerShort, G3_scratch, O3);
duke@435 627 __ lduh(O3, arrayOopDesc::base_offset_in_bytes(T_CHAR), Otos_i);
duke@435 628 }
duke@435 629
duke@435 630
duke@435 631 void TemplateTable::saload() {
duke@435 632 transition(itos, itos);
duke@435 633 // Otos_i: index
duke@435 634 // tos: array
duke@435 635 __ index_check(O2, Otos_i, LogBytesPerShort, G3_scratch, O3);
duke@435 636 __ ldsh(O3, arrayOopDesc::base_offset_in_bytes(T_SHORT), Otos_i);
duke@435 637 }
duke@435 638
duke@435 639
duke@435 640 void TemplateTable::iload(int n) {
duke@435 641 transition(vtos, itos);
duke@435 642 __ ld( Llocals, Interpreter::local_offset_in_bytes(n), Otos_i );
duke@435 643 }
duke@435 644
duke@435 645
duke@435 646 void TemplateTable::lload(int n) {
duke@435 647 transition(vtos, ltos);
duke@435 648 assert(n+1 < Argument::n_register_parameters, "would need more code");
duke@435 649 __ load_unaligned_long(Llocals, Interpreter::local_offset_in_bytes(n+1), Otos_l);
duke@435 650 }
duke@435 651
duke@435 652
duke@435 653 void TemplateTable::fload(int n) {
duke@435 654 transition(vtos, ftos);
duke@435 655 assert(n < Argument::n_register_parameters, "would need more code");
duke@435 656 __ ldf( FloatRegisterImpl::S, Llocals, Interpreter::local_offset_in_bytes(n), Ftos_f );
duke@435 657 }
duke@435 658
duke@435 659
duke@435 660 void TemplateTable::dload(int n) {
duke@435 661 transition(vtos, dtos);
duke@435 662 FloatRegister dst = Ftos_d;
duke@435 663 __ load_unaligned_double(Llocals, Interpreter::local_offset_in_bytes(n+1), dst);
duke@435 664 }
duke@435 665
duke@435 666
duke@435 667 void TemplateTable::aload(int n) {
duke@435 668 transition(vtos, atos);
duke@435 669 __ ld_ptr( Llocals, Interpreter::local_offset_in_bytes(n), Otos_i );
duke@435 670 }
duke@435 671
duke@435 672
duke@435 673 void TemplateTable::aload_0() {
duke@435 674 transition(vtos, atos);
duke@435 675
duke@435 676 // According to bytecode histograms, the pairs:
duke@435 677 //
duke@435 678 // _aload_0, _fast_igetfield (itos)
duke@435 679 // _aload_0, _fast_agetfield (atos)
duke@435 680 // _aload_0, _fast_fgetfield (ftos)
duke@435 681 //
duke@435 682 // occur frequently. If RewriteFrequentPairs is set, the (slow) _aload_0
duke@435 683 // bytecode checks the next bytecode and then rewrites the current
duke@435 684 // bytecode into a pair bytecode; otherwise it rewrites the current
duke@435 685 // bytecode into _fast_aload_0 that doesn't do the pair check anymore.
duke@435 686 //
duke@435 687 if (RewriteFrequentPairs) {
duke@435 688 Label rewrite, done;
duke@435 689
duke@435 690 // get next byte
duke@435 691 __ ldub(at_bcp(Bytecodes::length_for(Bytecodes::_aload_0)), G3_scratch);
duke@435 692
duke@435 693 // do actual aload_0
duke@435 694 aload(0);
duke@435 695
duke@435 696 // if _getfield then wait with rewrite
duke@435 697 __ cmp(G3_scratch, (int)Bytecodes::_getfield);
duke@435 698 __ br(Assembler::equal, false, Assembler::pn, done);
duke@435 699 __ delayed()->nop();
duke@435 700
duke@435 701 // if _igetfield then rewrite to _fast_iaccess_0
duke@435 702 assert(Bytecodes::java_code(Bytecodes::_fast_iaccess_0) == Bytecodes::_aload_0, "adjust fast bytecode def");
duke@435 703 __ cmp(G3_scratch, (int)Bytecodes::_fast_igetfield);
duke@435 704 __ br(Assembler::equal, false, Assembler::pn, rewrite);
duke@435 705 __ delayed()->set(Bytecodes::_fast_iaccess_0, G4_scratch);
duke@435 706
duke@435 707 // if _agetfield then rewrite to _fast_aaccess_0
duke@435 708 assert(Bytecodes::java_code(Bytecodes::_fast_aaccess_0) == Bytecodes::_aload_0, "adjust fast bytecode def");
duke@435 709 __ cmp(G3_scratch, (int)Bytecodes::_fast_agetfield);
duke@435 710 __ br(Assembler::equal, false, Assembler::pn, rewrite);
duke@435 711 __ delayed()->set(Bytecodes::_fast_aaccess_0, G4_scratch);
duke@435 712
duke@435 713 // if _fgetfield then rewrite to _fast_faccess_0
duke@435 714 assert(Bytecodes::java_code(Bytecodes::_fast_faccess_0) == Bytecodes::_aload_0, "adjust fast bytecode def");
duke@435 715 __ cmp(G3_scratch, (int)Bytecodes::_fast_fgetfield);
duke@435 716 __ br(Assembler::equal, false, Assembler::pn, rewrite);
duke@435 717 __ delayed()->set(Bytecodes::_fast_faccess_0, G4_scratch);
duke@435 718
duke@435 719 // else rewrite to _fast_aload0
duke@435 720 assert(Bytecodes::java_code(Bytecodes::_fast_aload_0) == Bytecodes::_aload_0, "adjust fast bytecode def");
duke@435 721 __ set(Bytecodes::_fast_aload_0, G4_scratch);
duke@435 722
duke@435 723 // rewrite
duke@435 724 // G4_scratch: fast bytecode
duke@435 725 __ bind(rewrite);
duke@435 726 patch_bytecode(Bytecodes::_aload_0, G4_scratch, G3_scratch, false);
duke@435 727 __ bind(done);
duke@435 728 } else {
duke@435 729 aload(0);
duke@435 730 }
duke@435 731 }
duke@435 732
duke@435 733
duke@435 734 void TemplateTable::istore() {
duke@435 735 transition(itos, vtos);
duke@435 736 locals_index(G3_scratch);
duke@435 737 __ store_local_int( G3_scratch, Otos_i );
duke@435 738 }
duke@435 739
duke@435 740
duke@435 741 void TemplateTable::lstore() {
duke@435 742 transition(ltos, vtos);
duke@435 743 locals_index(G3_scratch);
duke@435 744 __ store_local_long( G3_scratch, Otos_l );
duke@435 745 }
duke@435 746
duke@435 747
duke@435 748 void TemplateTable::fstore() {
duke@435 749 transition(ftos, vtos);
duke@435 750 locals_index(G3_scratch);
duke@435 751 __ store_local_float( G3_scratch, Ftos_f );
duke@435 752 }
duke@435 753
duke@435 754
duke@435 755 void TemplateTable::dstore() {
duke@435 756 transition(dtos, vtos);
duke@435 757 locals_index(G3_scratch);
duke@435 758 __ store_local_double( G3_scratch, Ftos_d );
duke@435 759 }
duke@435 760
duke@435 761
duke@435 762 void TemplateTable::astore() {
duke@435 763 transition(vtos, vtos);
twisti@1861 764 __ load_ptr(0, Otos_i);
twisti@1861 765 __ inc(Lesp, Interpreter::stackElementSize);
duke@435 766 __ verify_oop_or_return_address(Otos_i, G3_scratch);
duke@435 767 locals_index(G3_scratch);
twisti@1861 768 __ store_local_ptr(G3_scratch, Otos_i);
duke@435 769 }
duke@435 770
duke@435 771
duke@435 772 void TemplateTable::wide_istore() {
duke@435 773 transition(vtos, vtos);
duke@435 774 __ pop_i();
duke@435 775 locals_index_wide(G3_scratch);
duke@435 776 __ store_local_int( G3_scratch, Otos_i );
duke@435 777 }
duke@435 778
duke@435 779
duke@435 780 void TemplateTable::wide_lstore() {
duke@435 781 transition(vtos, vtos);
duke@435 782 __ pop_l();
duke@435 783 locals_index_wide(G3_scratch);
duke@435 784 __ store_local_long( G3_scratch, Otos_l );
duke@435 785 }
duke@435 786
duke@435 787
duke@435 788 void TemplateTable::wide_fstore() {
duke@435 789 transition(vtos, vtos);
duke@435 790 __ pop_f();
duke@435 791 locals_index_wide(G3_scratch);
duke@435 792 __ store_local_float( G3_scratch, Ftos_f );
duke@435 793 }
duke@435 794
duke@435 795
duke@435 796 void TemplateTable::wide_dstore() {
duke@435 797 transition(vtos, vtos);
duke@435 798 __ pop_d();
duke@435 799 locals_index_wide(G3_scratch);
duke@435 800 __ store_local_double( G3_scratch, Ftos_d );
duke@435 801 }
duke@435 802
duke@435 803
duke@435 804 void TemplateTable::wide_astore() {
duke@435 805 transition(vtos, vtos);
twisti@1861 806 __ load_ptr(0, Otos_i);
twisti@1861 807 __ inc(Lesp, Interpreter::stackElementSize);
duke@435 808 __ verify_oop_or_return_address(Otos_i, G3_scratch);
duke@435 809 locals_index_wide(G3_scratch);
twisti@1861 810 __ store_local_ptr(G3_scratch, Otos_i);
duke@435 811 }
duke@435 812
duke@435 813
duke@435 814 void TemplateTable::iastore() {
duke@435 815 transition(itos, vtos);
duke@435 816 __ pop_i(O2); // index
duke@435 817 // Otos_i: val
duke@435 818 // O3: array
duke@435 819 __ index_check(O3, O2, LogBytesPerInt, G3_scratch, O2);
duke@435 820 __ st(Otos_i, O2, arrayOopDesc::base_offset_in_bytes(T_INT));
duke@435 821 }
duke@435 822
duke@435 823
duke@435 824 void TemplateTable::lastore() {
duke@435 825 transition(ltos, vtos);
duke@435 826 __ pop_i(O2); // index
duke@435 827 // Otos_l: val
duke@435 828 // O3: array
duke@435 829 __ index_check(O3, O2, LogBytesPerLong, G3_scratch, O2);
duke@435 830 __ st_long(Otos_l, O2, arrayOopDesc::base_offset_in_bytes(T_LONG));
duke@435 831 }
duke@435 832
duke@435 833
duke@435 834 void TemplateTable::fastore() {
duke@435 835 transition(ftos, vtos);
duke@435 836 __ pop_i(O2); // index
duke@435 837 // Ftos_f: val
duke@435 838 // O3: array
duke@435 839 __ index_check(O3, O2, LogBytesPerInt, G3_scratch, O2);
duke@435 840 __ stf(FloatRegisterImpl::S, Ftos_f, O2, arrayOopDesc::base_offset_in_bytes(T_FLOAT));
duke@435 841 }
duke@435 842
duke@435 843
duke@435 844 void TemplateTable::dastore() {
duke@435 845 transition(dtos, vtos);
duke@435 846 __ pop_i(O2); // index
duke@435 847 // Fos_d: val
duke@435 848 // O3: array
duke@435 849 __ index_check(O3, O2, LogBytesPerLong, G3_scratch, O2);
duke@435 850 __ stf(FloatRegisterImpl::D, Ftos_d, O2, arrayOopDesc::base_offset_in_bytes(T_DOUBLE));
duke@435 851 }
duke@435 852
duke@435 853
duke@435 854 void TemplateTable::aastore() {
duke@435 855 Label store_ok, is_null, done;
duke@435 856 transition(vtos, vtos);
duke@435 857 __ ld_ptr(Lesp, Interpreter::expr_offset_in_bytes(0), Otos_i);
duke@435 858 __ ld(Lesp, Interpreter::expr_offset_in_bytes(1), O2); // get index
duke@435 859 __ ld_ptr(Lesp, Interpreter::expr_offset_in_bytes(2), O3); // get array
duke@435 860 // Otos_i: val
duke@435 861 // O2: index
duke@435 862 // O3: array
duke@435 863 __ verify_oop(Otos_i);
coleenp@548 864 __ index_check_without_pop(O3, O2, UseCompressedOops ? 2 : LogBytesPerWord, G3_scratch, O1);
duke@435 865
duke@435 866 // do array store check - check for NULL value first
duke@435 867 __ br_null( Otos_i, false, Assembler::pn, is_null );
coleenp@548 868 __ delayed()->nop();
coleenp@548 869
coleenp@548 870 __ load_klass(O3, O4); // get array klass
coleenp@548 871 __ load_klass(Otos_i, O5); // get value klass
duke@435 872
duke@435 873 // do fast instanceof cache test
duke@435 874
duke@435 875 __ ld_ptr(O4, sizeof(oopDesc) + objArrayKlass::element_klass_offset_in_bytes(), O4);
duke@435 876
duke@435 877 assert(Otos_i == O0, "just checking");
duke@435 878
duke@435 879 // Otos_i: value
duke@435 880 // O1: addr - offset
duke@435 881 // O2: index
duke@435 882 // O3: array
duke@435 883 // O4: array element klass
duke@435 884 // O5: value klass
duke@435 885
ysr@777 886 // Address element(O1, 0, arrayOopDesc::base_offset_in_bytes(T_OBJECT));
ysr@777 887
duke@435 888 // Generate a fast subtype check. Branch to store_ok if no
duke@435 889 // failure. Throw if failure.
duke@435 890 __ gen_subtype_check( O5, O4, G3_scratch, G4_scratch, G1_scratch, store_ok );
duke@435 891
duke@435 892 // Not a subtype; so must throw exception
duke@435 893 __ throw_if_not_x( Assembler::never, Interpreter::_throw_ArrayStoreException_entry, G3_scratch );
duke@435 894
duke@435 895 // Store is OK.
duke@435 896 __ bind(store_ok);
ysr@777 897 do_oop_store(_masm, O1, noreg, arrayOopDesc::base_offset_in_bytes(T_OBJECT), Otos_i, G3_scratch, _bs->kind(), true);
ysr@777 898
duke@435 899 __ ba(false,done);
twisti@1861 900 __ delayed()->inc(Lesp, 3* Interpreter::stackElementSize); // adj sp (pops array, index and value)
duke@435 901
duke@435 902 __ bind(is_null);
ysr@777 903 do_oop_store(_masm, O1, noreg, arrayOopDesc::base_offset_in_bytes(T_OBJECT), G0, G4_scratch, _bs->kind(), true);
ysr@777 904
duke@435 905 __ profile_null_seen(G3_scratch);
twisti@1861 906 __ inc(Lesp, 3* Interpreter::stackElementSize); // adj sp (pops array, index and value)
duke@435 907 __ bind(done);
duke@435 908 }
duke@435 909
duke@435 910
duke@435 911 void TemplateTable::bastore() {
duke@435 912 transition(itos, vtos);
duke@435 913 __ pop_i(O2); // index
duke@435 914 // Otos_i: val
duke@435 915 // O3: array
duke@435 916 __ index_check(O3, O2, 0, G3_scratch, O2);
duke@435 917 __ stb(Otos_i, O2, arrayOopDesc::base_offset_in_bytes(T_BYTE));
duke@435 918 }
duke@435 919
duke@435 920
duke@435 921 void TemplateTable::castore() {
duke@435 922 transition(itos, vtos);
duke@435 923 __ pop_i(O2); // index
duke@435 924 // Otos_i: val
duke@435 925 // O3: array
duke@435 926 __ index_check(O3, O2, LogBytesPerShort, G3_scratch, O2);
duke@435 927 __ sth(Otos_i, O2, arrayOopDesc::base_offset_in_bytes(T_CHAR));
duke@435 928 }
duke@435 929
duke@435 930
duke@435 931 void TemplateTable::sastore() {
duke@435 932 // %%%%% Factor across platform
duke@435 933 castore();
duke@435 934 }
duke@435 935
duke@435 936
duke@435 937 void TemplateTable::istore(int n) {
duke@435 938 transition(itos, vtos);
duke@435 939 __ st(Otos_i, Llocals, Interpreter::local_offset_in_bytes(n));
duke@435 940 }
duke@435 941
duke@435 942
duke@435 943 void TemplateTable::lstore(int n) {
duke@435 944 transition(ltos, vtos);
duke@435 945 assert(n+1 < Argument::n_register_parameters, "only handle register cases");
duke@435 946 __ store_unaligned_long(Otos_l, Llocals, Interpreter::local_offset_in_bytes(n+1));
duke@435 947
duke@435 948 }
duke@435 949
duke@435 950
duke@435 951 void TemplateTable::fstore(int n) {
duke@435 952 transition(ftos, vtos);
duke@435 953 assert(n < Argument::n_register_parameters, "only handle register cases");
duke@435 954 __ stf(FloatRegisterImpl::S, Ftos_f, Llocals, Interpreter::local_offset_in_bytes(n));
duke@435 955 }
duke@435 956
duke@435 957
duke@435 958 void TemplateTable::dstore(int n) {
duke@435 959 transition(dtos, vtos);
duke@435 960 FloatRegister src = Ftos_d;
duke@435 961 __ store_unaligned_double(src, Llocals, Interpreter::local_offset_in_bytes(n+1));
duke@435 962 }
duke@435 963
duke@435 964
duke@435 965 void TemplateTable::astore(int n) {
duke@435 966 transition(vtos, vtos);
twisti@1861 967 __ load_ptr(0, Otos_i);
twisti@1861 968 __ inc(Lesp, Interpreter::stackElementSize);
duke@435 969 __ verify_oop_or_return_address(Otos_i, G3_scratch);
twisti@1861 970 __ store_local_ptr(n, Otos_i);
duke@435 971 }
duke@435 972
duke@435 973
duke@435 974 void TemplateTable::pop() {
duke@435 975 transition(vtos, vtos);
twisti@1861 976 __ inc(Lesp, Interpreter::stackElementSize);
duke@435 977 }
duke@435 978
duke@435 979
duke@435 980 void TemplateTable::pop2() {
duke@435 981 transition(vtos, vtos);
twisti@1861 982 __ inc(Lesp, 2 * Interpreter::stackElementSize);
duke@435 983 }
duke@435 984
duke@435 985
duke@435 986 void TemplateTable::dup() {
duke@435 987 transition(vtos, vtos);
duke@435 988 // stack: ..., a
duke@435 989 // load a and tag
twisti@1861 990 __ load_ptr(0, Otos_i);
twisti@1861 991 __ push_ptr(Otos_i);
duke@435 992 // stack: ..., a, a
duke@435 993 }
duke@435 994
duke@435 995
duke@435 996 void TemplateTable::dup_x1() {
duke@435 997 transition(vtos, vtos);
duke@435 998 // stack: ..., a, b
twisti@1861 999 __ load_ptr( 1, G3_scratch); // get a
twisti@1861 1000 __ load_ptr( 0, Otos_l1); // get b
twisti@1861 1001 __ store_ptr(1, Otos_l1); // put b
twisti@1861 1002 __ store_ptr(0, G3_scratch); // put a - like swap
twisti@1861 1003 __ push_ptr(Otos_l1); // push b
duke@435 1004 // stack: ..., b, a, b
duke@435 1005 }
duke@435 1006
duke@435 1007
duke@435 1008 void TemplateTable::dup_x2() {
duke@435 1009 transition(vtos, vtos);
duke@435 1010 // stack: ..., a, b, c
duke@435 1011 // get c and push on stack, reuse registers
twisti@1861 1012 __ load_ptr( 0, G3_scratch); // get c
twisti@1861 1013 __ push_ptr(G3_scratch); // push c with tag
duke@435 1014 // stack: ..., a, b, c, c (c in reg) (Lesp - 4)
duke@435 1015 // (stack offsets n+1 now)
twisti@1861 1016 __ load_ptr( 3, Otos_l1); // get a
twisti@1861 1017 __ store_ptr(3, G3_scratch); // put c at 3
duke@435 1018 // stack: ..., c, b, c, c (a in reg)
twisti@1861 1019 __ load_ptr( 2, G3_scratch); // get b
twisti@1861 1020 __ store_ptr(2, Otos_l1); // put a at 2
duke@435 1021 // stack: ..., c, a, c, c (b in reg)
twisti@1861 1022 __ store_ptr(1, G3_scratch); // put b at 1
duke@435 1023 // stack: ..., c, a, b, c
duke@435 1024 }
duke@435 1025
duke@435 1026
duke@435 1027 void TemplateTable::dup2() {
duke@435 1028 transition(vtos, vtos);
twisti@1861 1029 __ load_ptr(1, G3_scratch); // get a
twisti@1861 1030 __ load_ptr(0, Otos_l1); // get b
twisti@1861 1031 __ push_ptr(G3_scratch); // push a
twisti@1861 1032 __ push_ptr(Otos_l1); // push b
duke@435 1033 // stack: ..., a, b, a, b
duke@435 1034 }
duke@435 1035
duke@435 1036
duke@435 1037 void TemplateTable::dup2_x1() {
duke@435 1038 transition(vtos, vtos);
duke@435 1039 // stack: ..., a, b, c
twisti@1861 1040 __ load_ptr( 1, Lscratch); // get b
twisti@1861 1041 __ load_ptr( 2, Otos_l1); // get a
twisti@1861 1042 __ store_ptr(2, Lscratch); // put b at a
duke@435 1043 // stack: ..., b, b, c
twisti@1861 1044 __ load_ptr( 0, G3_scratch); // get c
twisti@1861 1045 __ store_ptr(1, G3_scratch); // put c at b
duke@435 1046 // stack: ..., b, c, c
twisti@1861 1047 __ store_ptr(0, Otos_l1); // put a at c
duke@435 1048 // stack: ..., b, c, a
twisti@1861 1049 __ push_ptr(Lscratch); // push b
twisti@1861 1050 __ push_ptr(G3_scratch); // push c
duke@435 1051 // stack: ..., b, c, a, b, c
duke@435 1052 }
duke@435 1053
duke@435 1054
duke@435 1055 // The spec says that these types can be a mixture of category 1 (1 word)
duke@435 1056 // types and/or category 2 types (long and doubles)
duke@435 1057 void TemplateTable::dup2_x2() {
duke@435 1058 transition(vtos, vtos);
duke@435 1059 // stack: ..., a, b, c, d
twisti@1861 1060 __ load_ptr( 1, Lscratch); // get c
twisti@1861 1061 __ load_ptr( 3, Otos_l1); // get a
twisti@1861 1062 __ store_ptr(3, Lscratch); // put c at 3
twisti@1861 1063 __ store_ptr(1, Otos_l1); // put a at 1
duke@435 1064 // stack: ..., c, b, a, d
twisti@1861 1065 __ load_ptr( 2, G3_scratch); // get b
twisti@1861 1066 __ load_ptr( 0, Otos_l1); // get d
twisti@1861 1067 __ store_ptr(0, G3_scratch); // put b at 0
twisti@1861 1068 __ store_ptr(2, Otos_l1); // put d at 2
duke@435 1069 // stack: ..., c, d, a, b
twisti@1861 1070 __ push_ptr(Lscratch); // push c
twisti@1861 1071 __ push_ptr(Otos_l1); // push d
duke@435 1072 // stack: ..., c, d, a, b, c, d
duke@435 1073 }
duke@435 1074
duke@435 1075
duke@435 1076 void TemplateTable::swap() {
duke@435 1077 transition(vtos, vtos);
duke@435 1078 // stack: ..., a, b
twisti@1861 1079 __ load_ptr( 1, G3_scratch); // get a
twisti@1861 1080 __ load_ptr( 0, Otos_l1); // get b
twisti@1861 1081 __ store_ptr(0, G3_scratch); // put b
twisti@1861 1082 __ store_ptr(1, Otos_l1); // put a
duke@435 1083 // stack: ..., b, a
duke@435 1084 }
duke@435 1085
duke@435 1086
duke@435 1087 void TemplateTable::iop2(Operation op) {
duke@435 1088 transition(itos, itos);
duke@435 1089 __ pop_i(O1);
duke@435 1090 switch (op) {
duke@435 1091 case add: __ add(O1, Otos_i, Otos_i); break;
duke@435 1092 case sub: __ sub(O1, Otos_i, Otos_i); break;
duke@435 1093 // %%%%% Mul may not exist: better to call .mul?
duke@435 1094 case mul: __ smul(O1, Otos_i, Otos_i); break;
twisti@1861 1095 case _and: __ and3(O1, Otos_i, Otos_i); break;
twisti@1861 1096 case _or: __ or3(O1, Otos_i, Otos_i); break;
twisti@1861 1097 case _xor: __ xor3(O1, Otos_i, Otos_i); break;
duke@435 1098 case shl: __ sll(O1, Otos_i, Otos_i); break;
duke@435 1099 case shr: __ sra(O1, Otos_i, Otos_i); break;
duke@435 1100 case ushr: __ srl(O1, Otos_i, Otos_i); break;
duke@435 1101 default: ShouldNotReachHere();
duke@435 1102 }
duke@435 1103 }
duke@435 1104
duke@435 1105
duke@435 1106 void TemplateTable::lop2(Operation op) {
duke@435 1107 transition(ltos, ltos);
duke@435 1108 __ pop_l(O2);
duke@435 1109 switch (op) {
duke@435 1110 #ifdef _LP64
twisti@1861 1111 case add: __ add(O2, Otos_l, Otos_l); break;
twisti@1861 1112 case sub: __ sub(O2, Otos_l, Otos_l); break;
twisti@1861 1113 case _and: __ and3(O2, Otos_l, Otos_l); break;
twisti@1861 1114 case _or: __ or3(O2, Otos_l, Otos_l); break;
twisti@1861 1115 case _xor: __ xor3(O2, Otos_l, Otos_l); break;
duke@435 1116 #else
duke@435 1117 case add: __ addcc(O3, Otos_l2, Otos_l2); __ addc(O2, Otos_l1, Otos_l1); break;
duke@435 1118 case sub: __ subcc(O3, Otos_l2, Otos_l2); __ subc(O2, Otos_l1, Otos_l1); break;
twisti@1861 1119 case _and: __ and3(O3, Otos_l2, Otos_l2); __ and3(O2, Otos_l1, Otos_l1); break;
twisti@1861 1120 case _or: __ or3(O3, Otos_l2, Otos_l2); __ or3(O2, Otos_l1, Otos_l1); break;
twisti@1861 1121 case _xor: __ xor3(O3, Otos_l2, Otos_l2); __ xor3(O2, Otos_l1, Otos_l1); break;
duke@435 1122 #endif
duke@435 1123 default: ShouldNotReachHere();
duke@435 1124 }
duke@435 1125 }
duke@435 1126
duke@435 1127
duke@435 1128 void TemplateTable::idiv() {
duke@435 1129 // %%%%% Later: ForSPARC/V7 call .sdiv library routine,
duke@435 1130 // %%%%% Use ldsw...sdivx on pure V9 ABI. 64 bit safe.
duke@435 1131
duke@435 1132 transition(itos, itos);
duke@435 1133 __ pop_i(O1); // get 1st op
duke@435 1134
duke@435 1135 // Y contains upper 32 bits of result, set it to 0 or all ones
duke@435 1136 __ wry(G0);
duke@435 1137 __ mov(~0, G3_scratch);
duke@435 1138
duke@435 1139 __ tst(O1);
duke@435 1140 Label neg;
duke@435 1141 __ br(Assembler::negative, true, Assembler::pn, neg);
duke@435 1142 __ delayed()->wry(G3_scratch);
duke@435 1143 __ bind(neg);
duke@435 1144
duke@435 1145 Label ok;
duke@435 1146 __ tst(Otos_i);
duke@435 1147 __ throw_if_not_icc( Assembler::notZero, Interpreter::_throw_ArithmeticException_entry, G3_scratch );
duke@435 1148
duke@435 1149 const int min_int = 0x80000000;
duke@435 1150 Label regular;
duke@435 1151 __ cmp(Otos_i, -1);
duke@435 1152 __ br(Assembler::notEqual, false, Assembler::pt, regular);
duke@435 1153 #ifdef _LP64
duke@435 1154 // Don't put set in delay slot
duke@435 1155 // Set will turn into multiple instructions in 64 bit mode
duke@435 1156 __ delayed()->nop();
duke@435 1157 __ set(min_int, G4_scratch);
duke@435 1158 #else
duke@435 1159 __ delayed()->set(min_int, G4_scratch);
duke@435 1160 #endif
duke@435 1161 Label done;
duke@435 1162 __ cmp(O1, G4_scratch);
duke@435 1163 __ br(Assembler::equal, true, Assembler::pt, done);
duke@435 1164 __ delayed()->mov(O1, Otos_i); // (mov only executed if branch taken)
duke@435 1165
duke@435 1166 __ bind(regular);
duke@435 1167 __ sdiv(O1, Otos_i, Otos_i); // note: irem uses O1 after this instruction!
duke@435 1168 __ bind(done);
duke@435 1169 }
duke@435 1170
duke@435 1171
duke@435 1172 void TemplateTable::irem() {
duke@435 1173 transition(itos, itos);
duke@435 1174 __ mov(Otos_i, O2); // save divisor
duke@435 1175 idiv(); // %%%% Hack: exploits fact that idiv leaves dividend in O1
duke@435 1176 __ smul(Otos_i, O2, Otos_i);
duke@435 1177 __ sub(O1, Otos_i, Otos_i);
duke@435 1178 }
duke@435 1179
duke@435 1180
duke@435 1181 void TemplateTable::lmul() {
duke@435 1182 transition(ltos, ltos);
duke@435 1183 __ pop_l(O2);
duke@435 1184 #ifdef _LP64
duke@435 1185 __ mulx(Otos_l, O2, Otos_l);
duke@435 1186 #else
duke@435 1187 __ call_VM_leaf(Lscratch, CAST_FROM_FN_PTR(address, SharedRuntime::lmul));
duke@435 1188 #endif
duke@435 1189
duke@435 1190 }
duke@435 1191
duke@435 1192
duke@435 1193 void TemplateTable::ldiv() {
duke@435 1194 transition(ltos, ltos);
duke@435 1195
duke@435 1196 // check for zero
duke@435 1197 __ pop_l(O2);
duke@435 1198 #ifdef _LP64
duke@435 1199 __ tst(Otos_l);
duke@435 1200 __ throw_if_not_xcc( Assembler::notZero, Interpreter::_throw_ArithmeticException_entry, G3_scratch);
duke@435 1201 __ sdivx(O2, Otos_l, Otos_l);
duke@435 1202 #else
duke@435 1203 __ orcc(Otos_l1, Otos_l2, G0);
duke@435 1204 __ throw_if_not_icc( Assembler::notZero, Interpreter::_throw_ArithmeticException_entry, G3_scratch);
duke@435 1205 __ call_VM_leaf(Lscratch, CAST_FROM_FN_PTR(address, SharedRuntime::ldiv));
duke@435 1206 #endif
duke@435 1207 }
duke@435 1208
duke@435 1209
duke@435 1210 void TemplateTable::lrem() {
duke@435 1211 transition(ltos, ltos);
duke@435 1212
duke@435 1213 // check for zero
duke@435 1214 __ pop_l(O2);
duke@435 1215 #ifdef _LP64
duke@435 1216 __ tst(Otos_l);
duke@435 1217 __ throw_if_not_xcc( Assembler::notZero, Interpreter::_throw_ArithmeticException_entry, G3_scratch);
duke@435 1218 __ sdivx(O2, Otos_l, Otos_l2);
duke@435 1219 __ mulx (Otos_l2, Otos_l, Otos_l2);
duke@435 1220 __ sub (O2, Otos_l2, Otos_l);
duke@435 1221 #else
duke@435 1222 __ orcc(Otos_l1, Otos_l2, G0);
duke@435 1223 __ throw_if_not_icc(Assembler::notZero, Interpreter::_throw_ArithmeticException_entry, G3_scratch);
duke@435 1224 __ call_VM_leaf(Lscratch, CAST_FROM_FN_PTR(address, SharedRuntime::lrem));
duke@435 1225 #endif
duke@435 1226 }
duke@435 1227
duke@435 1228
duke@435 1229 void TemplateTable::lshl() {
duke@435 1230 transition(itos, ltos); // %%%% could optimize, fill delay slot or opt for ultra
duke@435 1231
duke@435 1232 __ pop_l(O2); // shift value in O2, O3
duke@435 1233 #ifdef _LP64
duke@435 1234 __ sllx(O2, Otos_i, Otos_l);
duke@435 1235 #else
duke@435 1236 __ lshl(O2, O3, Otos_i, Otos_l1, Otos_l2, O4);
duke@435 1237 #endif
duke@435 1238 }
duke@435 1239
duke@435 1240
duke@435 1241 void TemplateTable::lshr() {
duke@435 1242 transition(itos, ltos); // %%%% see lshl comment
duke@435 1243
duke@435 1244 __ pop_l(O2); // shift value in O2, O3
duke@435 1245 #ifdef _LP64
duke@435 1246 __ srax(O2, Otos_i, Otos_l);
duke@435 1247 #else
duke@435 1248 __ lshr(O2, O3, Otos_i, Otos_l1, Otos_l2, O4);
duke@435 1249 #endif
duke@435 1250 }
duke@435 1251
duke@435 1252
duke@435 1253
duke@435 1254 void TemplateTable::lushr() {
duke@435 1255 transition(itos, ltos); // %%%% see lshl comment
duke@435 1256
duke@435 1257 __ pop_l(O2); // shift value in O2, O3
duke@435 1258 #ifdef _LP64
duke@435 1259 __ srlx(O2, Otos_i, Otos_l);
duke@435 1260 #else
duke@435 1261 __ lushr(O2, O3, Otos_i, Otos_l1, Otos_l2, O4);
duke@435 1262 #endif
duke@435 1263 }
duke@435 1264
duke@435 1265
duke@435 1266 void TemplateTable::fop2(Operation op) {
duke@435 1267 transition(ftos, ftos);
duke@435 1268 switch (op) {
duke@435 1269 case add: __ pop_f(F4); __ fadd(FloatRegisterImpl::S, F4, Ftos_f, Ftos_f); break;
duke@435 1270 case sub: __ pop_f(F4); __ fsub(FloatRegisterImpl::S, F4, Ftos_f, Ftos_f); break;
duke@435 1271 case mul: __ pop_f(F4); __ fmul(FloatRegisterImpl::S, F4, Ftos_f, Ftos_f); break;
duke@435 1272 case div: __ pop_f(F4); __ fdiv(FloatRegisterImpl::S, F4, Ftos_f, Ftos_f); break;
duke@435 1273 case rem:
duke@435 1274 assert(Ftos_f == F0, "just checking");
duke@435 1275 #ifdef _LP64
duke@435 1276 // LP64 calling conventions use F1, F3 for passing 2 floats
duke@435 1277 __ pop_f(F1);
duke@435 1278 __ fmov(FloatRegisterImpl::S, Ftos_f, F3);
duke@435 1279 #else
duke@435 1280 __ pop_i(O0);
duke@435 1281 __ stf(FloatRegisterImpl::S, Ftos_f, __ d_tmp);
duke@435 1282 __ ld( __ d_tmp, O1 );
duke@435 1283 #endif
duke@435 1284 __ call_VM_leaf(Lscratch, CAST_FROM_FN_PTR(address, SharedRuntime::frem));
duke@435 1285 assert( Ftos_f == F0, "fix this code" );
duke@435 1286 break;
duke@435 1287
duke@435 1288 default: ShouldNotReachHere();
duke@435 1289 }
duke@435 1290 }
duke@435 1291
duke@435 1292
duke@435 1293 void TemplateTable::dop2(Operation op) {
duke@435 1294 transition(dtos, dtos);
duke@435 1295 switch (op) {
duke@435 1296 case add: __ pop_d(F4); __ fadd(FloatRegisterImpl::D, F4, Ftos_d, Ftos_d); break;
duke@435 1297 case sub: __ pop_d(F4); __ fsub(FloatRegisterImpl::D, F4, Ftos_d, Ftos_d); break;
duke@435 1298 case mul: __ pop_d(F4); __ fmul(FloatRegisterImpl::D, F4, Ftos_d, Ftos_d); break;
duke@435 1299 case div: __ pop_d(F4); __ fdiv(FloatRegisterImpl::D, F4, Ftos_d, Ftos_d); break;
duke@435 1300 case rem:
duke@435 1301 #ifdef _LP64
duke@435 1302 // Pass arguments in D0, D2
duke@435 1303 __ fmov(FloatRegisterImpl::D, Ftos_f, F2 );
duke@435 1304 __ pop_d( F0 );
duke@435 1305 #else
duke@435 1306 // Pass arguments in O0O1, O2O3
duke@435 1307 __ stf(FloatRegisterImpl::D, Ftos_f, __ d_tmp);
duke@435 1308 __ ldd( __ d_tmp, O2 );
duke@435 1309 __ pop_d(Ftos_f);
duke@435 1310 __ stf(FloatRegisterImpl::D, Ftos_f, __ d_tmp);
duke@435 1311 __ ldd( __ d_tmp, O0 );
duke@435 1312 #endif
duke@435 1313 __ call_VM_leaf(Lscratch, CAST_FROM_FN_PTR(address, SharedRuntime::drem));
duke@435 1314 assert( Ftos_d == F0, "fix this code" );
duke@435 1315 break;
duke@435 1316
duke@435 1317 default: ShouldNotReachHere();
duke@435 1318 }
duke@435 1319 }
duke@435 1320
duke@435 1321
duke@435 1322 void TemplateTable::ineg() {
duke@435 1323 transition(itos, itos);
duke@435 1324 __ neg(Otos_i);
duke@435 1325 }
duke@435 1326
duke@435 1327
duke@435 1328 void TemplateTable::lneg() {
duke@435 1329 transition(ltos, ltos);
duke@435 1330 #ifdef _LP64
duke@435 1331 __ sub(G0, Otos_l, Otos_l);
duke@435 1332 #else
duke@435 1333 __ lneg(Otos_l1, Otos_l2);
duke@435 1334 #endif
duke@435 1335 }
duke@435 1336
duke@435 1337
duke@435 1338 void TemplateTable::fneg() {
duke@435 1339 transition(ftos, ftos);
duke@435 1340 __ fneg(FloatRegisterImpl::S, Ftos_f);
duke@435 1341 }
duke@435 1342
duke@435 1343
duke@435 1344 void TemplateTable::dneg() {
duke@435 1345 transition(dtos, dtos);
duke@435 1346 // v8 has fnegd if source and dest are the same
duke@435 1347 __ fneg(FloatRegisterImpl::D, Ftos_f);
duke@435 1348 }
duke@435 1349
duke@435 1350
duke@435 1351 void TemplateTable::iinc() {
duke@435 1352 transition(vtos, vtos);
duke@435 1353 locals_index(G3_scratch);
duke@435 1354 __ ldsb(Lbcp, 2, O2); // load constant
duke@435 1355 __ access_local_int(G3_scratch, Otos_i);
duke@435 1356 __ add(Otos_i, O2, Otos_i);
twisti@1861 1357 __ st(Otos_i, G3_scratch, 0); // access_local_int puts E.A. in G3_scratch
duke@435 1358 }
duke@435 1359
duke@435 1360
duke@435 1361 void TemplateTable::wide_iinc() {
duke@435 1362 transition(vtos, vtos);
duke@435 1363 locals_index_wide(G3_scratch);
duke@435 1364 __ get_2_byte_integer_at_bcp( 4, O2, O3, InterpreterMacroAssembler::Signed);
duke@435 1365 __ access_local_int(G3_scratch, Otos_i);
duke@435 1366 __ add(Otos_i, O3, Otos_i);
twisti@1861 1367 __ st(Otos_i, G3_scratch, 0); // access_local_int puts E.A. in G3_scratch
duke@435 1368 }
duke@435 1369
duke@435 1370
duke@435 1371 void TemplateTable::convert() {
duke@435 1372 // %%%%% Factor this first part accross platforms
duke@435 1373 #ifdef ASSERT
duke@435 1374 TosState tos_in = ilgl;
duke@435 1375 TosState tos_out = ilgl;
duke@435 1376 switch (bytecode()) {
duke@435 1377 case Bytecodes::_i2l: // fall through
duke@435 1378 case Bytecodes::_i2f: // fall through
duke@435 1379 case Bytecodes::_i2d: // fall through
duke@435 1380 case Bytecodes::_i2b: // fall through
duke@435 1381 case Bytecodes::_i2c: // fall through
duke@435 1382 case Bytecodes::_i2s: tos_in = itos; break;
duke@435 1383 case Bytecodes::_l2i: // fall through
duke@435 1384 case Bytecodes::_l2f: // fall through
duke@435 1385 case Bytecodes::_l2d: tos_in = ltos; break;
duke@435 1386 case Bytecodes::_f2i: // fall through
duke@435 1387 case Bytecodes::_f2l: // fall through
duke@435 1388 case Bytecodes::_f2d: tos_in = ftos; break;
duke@435 1389 case Bytecodes::_d2i: // fall through
duke@435 1390 case Bytecodes::_d2l: // fall through
duke@435 1391 case Bytecodes::_d2f: tos_in = dtos; break;
duke@435 1392 default : ShouldNotReachHere();
duke@435 1393 }
duke@435 1394 switch (bytecode()) {
duke@435 1395 case Bytecodes::_l2i: // fall through
duke@435 1396 case Bytecodes::_f2i: // fall through
duke@435 1397 case Bytecodes::_d2i: // fall through
duke@435 1398 case Bytecodes::_i2b: // fall through
duke@435 1399 case Bytecodes::_i2c: // fall through
duke@435 1400 case Bytecodes::_i2s: tos_out = itos; break;
duke@435 1401 case Bytecodes::_i2l: // fall through
duke@435 1402 case Bytecodes::_f2l: // fall through
duke@435 1403 case Bytecodes::_d2l: tos_out = ltos; break;
duke@435 1404 case Bytecodes::_i2f: // fall through
duke@435 1405 case Bytecodes::_l2f: // fall through
duke@435 1406 case Bytecodes::_d2f: tos_out = ftos; break;
duke@435 1407 case Bytecodes::_i2d: // fall through
duke@435 1408 case Bytecodes::_l2d: // fall through
duke@435 1409 case Bytecodes::_f2d: tos_out = dtos; break;
duke@435 1410 default : ShouldNotReachHere();
duke@435 1411 }
duke@435 1412 transition(tos_in, tos_out);
duke@435 1413 #endif
duke@435 1414
duke@435 1415
duke@435 1416 // Conversion
duke@435 1417 Label done;
duke@435 1418 switch (bytecode()) {
duke@435 1419 case Bytecodes::_i2l:
duke@435 1420 #ifdef _LP64
duke@435 1421 // Sign extend the 32 bits
duke@435 1422 __ sra ( Otos_i, 0, Otos_l );
duke@435 1423 #else
duke@435 1424 __ addcc(Otos_i, 0, Otos_l2);
duke@435 1425 __ br(Assembler::greaterEqual, true, Assembler::pt, done);
duke@435 1426 __ delayed()->clr(Otos_l1);
duke@435 1427 __ set(~0, Otos_l1);
duke@435 1428 #endif
duke@435 1429 break;
duke@435 1430
duke@435 1431 case Bytecodes::_i2f:
duke@435 1432 __ st(Otos_i, __ d_tmp );
duke@435 1433 __ ldf(FloatRegisterImpl::S, __ d_tmp, F0);
duke@435 1434 __ fitof(FloatRegisterImpl::S, F0, Ftos_f);
duke@435 1435 break;
duke@435 1436
duke@435 1437 case Bytecodes::_i2d:
duke@435 1438 __ st(Otos_i, __ d_tmp);
duke@435 1439 __ ldf(FloatRegisterImpl::S, __ d_tmp, F0);
duke@435 1440 __ fitof(FloatRegisterImpl::D, F0, Ftos_f);
duke@435 1441 break;
duke@435 1442
duke@435 1443 case Bytecodes::_i2b:
duke@435 1444 __ sll(Otos_i, 24, Otos_i);
duke@435 1445 __ sra(Otos_i, 24, Otos_i);
duke@435 1446 break;
duke@435 1447
duke@435 1448 case Bytecodes::_i2c:
duke@435 1449 __ sll(Otos_i, 16, Otos_i);
duke@435 1450 __ srl(Otos_i, 16, Otos_i);
duke@435 1451 break;
duke@435 1452
duke@435 1453 case Bytecodes::_i2s:
duke@435 1454 __ sll(Otos_i, 16, Otos_i);
duke@435 1455 __ sra(Otos_i, 16, Otos_i);
duke@435 1456 break;
duke@435 1457
duke@435 1458 case Bytecodes::_l2i:
duke@435 1459 #ifndef _LP64
duke@435 1460 __ mov(Otos_l2, Otos_i);
duke@435 1461 #else
duke@435 1462 // Sign-extend into the high 32 bits
duke@435 1463 __ sra(Otos_l, 0, Otos_i);
duke@435 1464 #endif
duke@435 1465 break;
duke@435 1466
duke@435 1467 case Bytecodes::_l2f:
duke@435 1468 case Bytecodes::_l2d:
duke@435 1469 __ st_long(Otos_l, __ d_tmp);
duke@435 1470 __ ldf(FloatRegisterImpl::D, __ d_tmp, Ftos_d);
duke@435 1471
duke@435 1472 if (VM_Version::v9_instructions_work()) {
duke@435 1473 if (bytecode() == Bytecodes::_l2f) {
duke@435 1474 __ fxtof(FloatRegisterImpl::S, Ftos_d, Ftos_f);
duke@435 1475 } else {
duke@435 1476 __ fxtof(FloatRegisterImpl::D, Ftos_d, Ftos_d);
duke@435 1477 }
duke@435 1478 } else {
duke@435 1479 __ call_VM_leaf(
duke@435 1480 Lscratch,
duke@435 1481 bytecode() == Bytecodes::_l2f
duke@435 1482 ? CAST_FROM_FN_PTR(address, SharedRuntime::l2f)
duke@435 1483 : CAST_FROM_FN_PTR(address, SharedRuntime::l2d)
duke@435 1484 );
duke@435 1485 }
duke@435 1486 break;
duke@435 1487
duke@435 1488 case Bytecodes::_f2i: {
duke@435 1489 Label isNaN;
duke@435 1490 // result must be 0 if value is NaN; test by comparing value to itself
duke@435 1491 __ fcmp(FloatRegisterImpl::S, Assembler::fcc0, Ftos_f, Ftos_f);
duke@435 1492 // According to the v8 manual, you have to have a non-fp instruction
duke@435 1493 // between fcmp and fb.
duke@435 1494 if (!VM_Version::v9_instructions_work()) {
duke@435 1495 __ nop();
duke@435 1496 }
duke@435 1497 __ fb(Assembler::f_unordered, true, Assembler::pn, isNaN);
duke@435 1498 __ delayed()->clr(Otos_i); // NaN
duke@435 1499 __ ftoi(FloatRegisterImpl::S, Ftos_f, F30);
duke@435 1500 __ stf(FloatRegisterImpl::S, F30, __ d_tmp);
duke@435 1501 __ ld(__ d_tmp, Otos_i);
duke@435 1502 __ bind(isNaN);
duke@435 1503 }
duke@435 1504 break;
duke@435 1505
duke@435 1506 case Bytecodes::_f2l:
duke@435 1507 // must uncache tos
duke@435 1508 __ push_f();
duke@435 1509 #ifdef _LP64
duke@435 1510 __ pop_f(F1);
duke@435 1511 #else
duke@435 1512 __ pop_i(O0);
duke@435 1513 #endif
duke@435 1514 __ call_VM_leaf(Lscratch, CAST_FROM_FN_PTR(address, SharedRuntime::f2l));
duke@435 1515 break;
duke@435 1516
duke@435 1517 case Bytecodes::_f2d:
duke@435 1518 __ ftof( FloatRegisterImpl::S, FloatRegisterImpl::D, Ftos_f, Ftos_f);
duke@435 1519 break;
duke@435 1520
duke@435 1521 case Bytecodes::_d2i:
duke@435 1522 case Bytecodes::_d2l:
duke@435 1523 // must uncache tos
duke@435 1524 __ push_d();
duke@435 1525 #ifdef _LP64
duke@435 1526 // LP64 calling conventions pass first double arg in D0
duke@435 1527 __ pop_d( Ftos_d );
duke@435 1528 #else
duke@435 1529 __ pop_i( O0 );
duke@435 1530 __ pop_i( O1 );
duke@435 1531 #endif
duke@435 1532 __ call_VM_leaf(Lscratch,
duke@435 1533 bytecode() == Bytecodes::_d2i
duke@435 1534 ? CAST_FROM_FN_PTR(address, SharedRuntime::d2i)
duke@435 1535 : CAST_FROM_FN_PTR(address, SharedRuntime::d2l));
duke@435 1536 break;
duke@435 1537
duke@435 1538 case Bytecodes::_d2f:
duke@435 1539 if (VM_Version::v9_instructions_work()) {
duke@435 1540 __ ftof( FloatRegisterImpl::D, FloatRegisterImpl::S, Ftos_d, Ftos_f);
duke@435 1541 }
duke@435 1542 else {
duke@435 1543 // must uncache tos
duke@435 1544 __ push_d();
duke@435 1545 __ pop_i(O0);
duke@435 1546 __ pop_i(O1);
duke@435 1547 __ call_VM_leaf(Lscratch, CAST_FROM_FN_PTR(address, SharedRuntime::d2f));
duke@435 1548 }
duke@435 1549 break;
duke@435 1550
duke@435 1551 default: ShouldNotReachHere();
duke@435 1552 }
duke@435 1553 __ bind(done);
duke@435 1554 }
duke@435 1555
duke@435 1556
duke@435 1557 void TemplateTable::lcmp() {
duke@435 1558 transition(ltos, itos);
duke@435 1559
duke@435 1560 #ifdef _LP64
duke@435 1561 __ pop_l(O1); // pop off value 1, value 2 is in O0
duke@435 1562 __ lcmp( O1, Otos_l, Otos_i );
duke@435 1563 #else
duke@435 1564 __ pop_l(O2); // cmp O2,3 to O0,1
duke@435 1565 __ lcmp( O2, O3, Otos_l1, Otos_l2, Otos_i );
duke@435 1566 #endif
duke@435 1567 }
duke@435 1568
duke@435 1569
duke@435 1570 void TemplateTable::float_cmp(bool is_float, int unordered_result) {
duke@435 1571
duke@435 1572 if (is_float) __ pop_f(F2);
duke@435 1573 else __ pop_d(F2);
duke@435 1574
duke@435 1575 assert(Ftos_f == F0 && Ftos_d == F0, "alias checking:");
duke@435 1576
duke@435 1577 __ float_cmp( is_float, unordered_result, F2, F0, Otos_i );
duke@435 1578 }
duke@435 1579
duke@435 1580 void TemplateTable::branch(bool is_jsr, bool is_wide) {
duke@435 1581 // Note: on SPARC, we use InterpreterMacroAssembler::if_cmp also.
duke@435 1582 __ verify_oop(Lmethod);
duke@435 1583 __ verify_thread();
duke@435 1584
duke@435 1585 const Register O2_bumped_count = O2;
duke@435 1586 __ profile_taken_branch(G3_scratch, O2_bumped_count);
duke@435 1587
duke@435 1588 // get (wide) offset to O1_disp
duke@435 1589 const Register O1_disp = O1;
duke@435 1590 if (is_wide) __ get_4_byte_integer_at_bcp( 1, G4_scratch, O1_disp, InterpreterMacroAssembler::set_CC);
duke@435 1591 else __ get_2_byte_integer_at_bcp( 1, G4_scratch, O1_disp, InterpreterMacroAssembler::Signed, InterpreterMacroAssembler::set_CC);
duke@435 1592
duke@435 1593 // Handle all the JSR stuff here, then exit.
duke@435 1594 // It's much shorter and cleaner than intermingling with the
twisti@1040 1595 // non-JSR normal-branch stuff occurring below.
duke@435 1596 if( is_jsr ) {
duke@435 1597 // compute return address as bci in Otos_i
twisti@1162 1598 __ ld_ptr(Lmethod, methodOopDesc::const_offset(), G3_scratch);
duke@435 1599 __ sub(Lbcp, G3_scratch, G3_scratch);
duke@435 1600 __ sub(G3_scratch, in_bytes(constMethodOopDesc::codes_offset()) - (is_wide ? 5 : 3), Otos_i);
duke@435 1601
duke@435 1602 // Bump Lbcp to target of JSR
duke@435 1603 __ add(Lbcp, O1_disp, Lbcp);
duke@435 1604 // Push returnAddress for "ret" on stack
twisti@1861 1605 __ push_ptr(Otos_i);
duke@435 1606 // And away we go!
duke@435 1607 __ dispatch_next(vtos);
duke@435 1608 return;
duke@435 1609 }
duke@435 1610
duke@435 1611 // Normal (non-jsr) branch handling
duke@435 1612
duke@435 1613 // Save the current Lbcp
duke@435 1614 const Register O0_cur_bcp = O0;
duke@435 1615 __ mov( Lbcp, O0_cur_bcp );
duke@435 1616
iveresov@2138 1617
duke@435 1618 bool increment_invocation_counter_for_backward_branches = UseCompiler && UseLoopCounter;
duke@435 1619 if ( increment_invocation_counter_for_backward_branches ) {
duke@435 1620 Label Lforward;
duke@435 1621 // check branch direction
duke@435 1622 __ br( Assembler::positive, false, Assembler::pn, Lforward );
duke@435 1623 // Bump bytecode pointer by displacement (take the branch)
duke@435 1624 __ delayed()->add( O1_disp, Lbcp, Lbcp ); // add to bc addr
duke@435 1625
iveresov@2138 1626 if (TieredCompilation) {
iveresov@2138 1627 Label Lno_mdo, Loverflow;
iveresov@2138 1628 int increment = InvocationCounter::count_increment;
iveresov@2138 1629 int mask = ((1 << Tier0BackedgeNotifyFreqLog) - 1) << InvocationCounter::count_shift;
iveresov@2138 1630 if (ProfileInterpreter) {
iveresov@2138 1631 // If no method data exists, go to profile_continue.
iveresov@2138 1632 __ ld_ptr(Lmethod, methodOopDesc::method_data_offset(), G4_scratch);
iveresov@2138 1633 __ br_null(G4_scratch, false, Assembler::pn, Lno_mdo);
iveresov@2138 1634 __ delayed()->nop();
iveresov@2138 1635
iveresov@2138 1636 // Increment backedge counter in the MDO
iveresov@2138 1637 Address mdo_backedge_counter(G4_scratch, in_bytes(methodDataOopDesc::backedge_counter_offset()) +
iveresov@2138 1638 in_bytes(InvocationCounter::counter_offset()));
iveresov@2138 1639 __ increment_mask_and_jump(mdo_backedge_counter, increment, mask, G3_scratch, Lscratch,
iveresov@2138 1640 Assembler::notZero, &Lforward);
iveresov@2138 1641 __ ba(false, Loverflow);
iveresov@2138 1642 __ delayed()->nop();
duke@435 1643 }
iveresov@2138 1644
iveresov@2138 1645 // If there's no MDO, increment counter in methodOop
iveresov@2138 1646 __ bind(Lno_mdo);
iveresov@2138 1647 Address backedge_counter(Lmethod, in_bytes(methodOopDesc::backedge_counter_offset()) +
iveresov@2138 1648 in_bytes(InvocationCounter::counter_offset()));
iveresov@2138 1649 __ increment_mask_and_jump(backedge_counter, increment, mask, G3_scratch, Lscratch,
iveresov@2138 1650 Assembler::notZero, &Lforward);
iveresov@2138 1651 __ bind(Loverflow);
iveresov@2138 1652
iveresov@2138 1653 // notify point for loop, pass branch bytecode
iveresov@2138 1654 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::frequency_counter_overflow), O0_cur_bcp);
iveresov@2138 1655
iveresov@2138 1656 // Was an OSR adapter generated?
iveresov@2138 1657 // O0 = osr nmethod
iveresov@2138 1658 __ br_null(O0, false, Assembler::pn, Lforward);
iveresov@2138 1659 __ delayed()->nop();
iveresov@2138 1660
iveresov@2138 1661 // Has the nmethod been invalidated already?
iveresov@2138 1662 __ ld(O0, nmethod::entry_bci_offset(), O2);
iveresov@2138 1663 __ cmp(O2, InvalidOSREntryBci);
iveresov@2138 1664 __ br(Assembler::equal, false, Assembler::pn, Lforward);
iveresov@2138 1665 __ delayed()->nop();
iveresov@2138 1666
iveresov@2138 1667 // migrate the interpreter frame off of the stack
iveresov@2138 1668
iveresov@2138 1669 __ mov(G2_thread, L7);
iveresov@2138 1670 // save nmethod
iveresov@2138 1671 __ mov(O0, L6);
iveresov@2138 1672 __ set_last_Java_frame(SP, noreg);
iveresov@2138 1673 __ call_VM_leaf(noreg, CAST_FROM_FN_PTR(address, SharedRuntime::OSR_migration_begin), L7);
iveresov@2138 1674 __ reset_last_Java_frame();
iveresov@2138 1675 __ mov(L7, G2_thread);
iveresov@2138 1676
iveresov@2138 1677 // move OSR nmethod to I1
iveresov@2138 1678 __ mov(L6, I1);
iveresov@2138 1679
iveresov@2138 1680 // OSR buffer to I0
iveresov@2138 1681 __ mov(O0, I0);
iveresov@2138 1682
iveresov@2138 1683 // remove the interpreter frame
iveresov@2138 1684 __ restore(I5_savedSP, 0, SP);
iveresov@2138 1685
iveresov@2138 1686 // Jump to the osr code.
iveresov@2138 1687 __ ld_ptr(O1, nmethod::osr_entry_point_offset(), O2);
iveresov@2138 1688 __ jmp(O2, G0);
iveresov@2138 1689 __ delayed()->nop();
iveresov@2138 1690
duke@435 1691 } else {
iveresov@2138 1692 // Update Backedge branch separately from invocations
iveresov@2138 1693 const Register G4_invoke_ctr = G4;
iveresov@2138 1694 __ increment_backedge_counter(G4_invoke_ctr, G1_scratch);
iveresov@2138 1695 if (ProfileInterpreter) {
iveresov@2438 1696 __ test_invocation_counter_for_mdp(G4_invoke_ctr, G3_scratch, Lforward);
iveresov@2138 1697 if (UseOnStackReplacement) {
iveresov@2138 1698 __ test_backedge_count_for_osr(O2_bumped_count, O0_cur_bcp, G3_scratch);
iveresov@2138 1699 }
iveresov@2138 1700 } else {
iveresov@2138 1701 if (UseOnStackReplacement) {
iveresov@2138 1702 __ test_backedge_count_for_osr(G4_invoke_ctr, O0_cur_bcp, G3_scratch);
iveresov@2138 1703 }
duke@435 1704 }
duke@435 1705 }
duke@435 1706
duke@435 1707 __ bind(Lforward);
duke@435 1708 } else
duke@435 1709 // Bump bytecode pointer by displacement (take the branch)
duke@435 1710 __ add( O1_disp, Lbcp, Lbcp );// add to bc addr
duke@435 1711
duke@435 1712 // continue with bytecode @ target
duke@435 1713 // %%%%% Like Intel, could speed things up by moving bytecode fetch to code above,
duke@435 1714 // %%%%% and changing dispatch_next to dispatch_only
duke@435 1715 __ dispatch_next(vtos);
duke@435 1716 }
duke@435 1717
duke@435 1718
duke@435 1719 // Note Condition in argument is TemplateTable::Condition
duke@435 1720 // arg scope is within class scope
duke@435 1721
duke@435 1722 void TemplateTable::if_0cmp(Condition cc) {
duke@435 1723 // no pointers, integer only!
duke@435 1724 transition(itos, vtos);
duke@435 1725 // assume branch is more often taken than not (loops use backward branches)
duke@435 1726 __ cmp( Otos_i, 0);
duke@435 1727 __ if_cmp(ccNot(cc), false);
duke@435 1728 }
duke@435 1729
duke@435 1730
duke@435 1731 void TemplateTable::if_icmp(Condition cc) {
duke@435 1732 transition(itos, vtos);
duke@435 1733 __ pop_i(O1);
duke@435 1734 __ cmp(O1, Otos_i);
duke@435 1735 __ if_cmp(ccNot(cc), false);
duke@435 1736 }
duke@435 1737
duke@435 1738
duke@435 1739 void TemplateTable::if_nullcmp(Condition cc) {
duke@435 1740 transition(atos, vtos);
duke@435 1741 __ tst(Otos_i);
duke@435 1742 __ if_cmp(ccNot(cc), true);
duke@435 1743 }
duke@435 1744
duke@435 1745
duke@435 1746 void TemplateTable::if_acmp(Condition cc) {
duke@435 1747 transition(atos, vtos);
duke@435 1748 __ pop_ptr(O1);
duke@435 1749 __ verify_oop(O1);
duke@435 1750 __ verify_oop(Otos_i);
duke@435 1751 __ cmp(O1, Otos_i);
duke@435 1752 __ if_cmp(ccNot(cc), true);
duke@435 1753 }
duke@435 1754
duke@435 1755
duke@435 1756
duke@435 1757 void TemplateTable::ret() {
duke@435 1758 transition(vtos, vtos);
duke@435 1759 locals_index(G3_scratch);
duke@435 1760 __ access_local_returnAddress(G3_scratch, Otos_i);
duke@435 1761 // Otos_i contains the bci, compute the bcp from that
duke@435 1762
duke@435 1763 #ifdef _LP64
duke@435 1764 #ifdef ASSERT
duke@435 1765 // jsr result was labeled as an 'itos' not an 'atos' because we cannot GC
duke@435 1766 // the result. The return address (really a BCI) was stored with an
duke@435 1767 // 'astore' because JVM specs claim it's a pointer-sized thing. Hence in
duke@435 1768 // the 64-bit build the 32-bit BCI is actually in the low bits of a 64-bit
duke@435 1769 // loaded value.
duke@435 1770 { Label zzz ;
duke@435 1771 __ set (65536, G3_scratch) ;
duke@435 1772 __ cmp (Otos_i, G3_scratch) ;
duke@435 1773 __ bp( Assembler::lessEqualUnsigned, false, Assembler::xcc, Assembler::pn, zzz);
duke@435 1774 __ delayed()->nop();
duke@435 1775 __ stop("BCI is in the wrong register half?");
duke@435 1776 __ bind (zzz) ;
duke@435 1777 }
duke@435 1778 #endif
duke@435 1779 #endif
duke@435 1780
duke@435 1781 __ profile_ret(vtos, Otos_i, G4_scratch);
duke@435 1782
twisti@1162 1783 __ ld_ptr(Lmethod, methodOopDesc::const_offset(), G3_scratch);
duke@435 1784 __ add(G3_scratch, Otos_i, G3_scratch);
duke@435 1785 __ add(G3_scratch, in_bytes(constMethodOopDesc::codes_offset()), Lbcp);
duke@435 1786 __ dispatch_next(vtos);
duke@435 1787 }
duke@435 1788
duke@435 1789
duke@435 1790 void TemplateTable::wide_ret() {
duke@435 1791 transition(vtos, vtos);
duke@435 1792 locals_index_wide(G3_scratch);
duke@435 1793 __ access_local_returnAddress(G3_scratch, Otos_i);
duke@435 1794 // Otos_i contains the bci, compute the bcp from that
duke@435 1795
duke@435 1796 __ profile_ret(vtos, Otos_i, G4_scratch);
duke@435 1797
twisti@1162 1798 __ ld_ptr(Lmethod, methodOopDesc::const_offset(), G3_scratch);
duke@435 1799 __ add(G3_scratch, Otos_i, G3_scratch);
duke@435 1800 __ add(G3_scratch, in_bytes(constMethodOopDesc::codes_offset()), Lbcp);
duke@435 1801 __ dispatch_next(vtos);
duke@435 1802 }
duke@435 1803
duke@435 1804
duke@435 1805 void TemplateTable::tableswitch() {
duke@435 1806 transition(itos, vtos);
duke@435 1807 Label default_case, continue_execution;
duke@435 1808
duke@435 1809 // align bcp
duke@435 1810 __ add(Lbcp, BytesPerInt, O1);
duke@435 1811 __ and3(O1, -BytesPerInt, O1);
duke@435 1812 // load lo, hi
duke@435 1813 __ ld(O1, 1 * BytesPerInt, O2); // Low Byte
duke@435 1814 __ ld(O1, 2 * BytesPerInt, O3); // High Byte
duke@435 1815 #ifdef _LP64
duke@435 1816 // Sign extend the 32 bits
duke@435 1817 __ sra ( Otos_i, 0, Otos_i );
duke@435 1818 #endif /* _LP64 */
duke@435 1819
duke@435 1820 // check against lo & hi
duke@435 1821 __ cmp( Otos_i, O2);
duke@435 1822 __ br( Assembler::less, false, Assembler::pn, default_case);
duke@435 1823 __ delayed()->cmp( Otos_i, O3 );
duke@435 1824 __ br( Assembler::greater, false, Assembler::pn, default_case);
duke@435 1825 // lookup dispatch offset
duke@435 1826 __ delayed()->sub(Otos_i, O2, O2);
duke@435 1827 __ profile_switch_case(O2, O3, G3_scratch, G4_scratch);
duke@435 1828 __ sll(O2, LogBytesPerInt, O2);
duke@435 1829 __ add(O2, 3 * BytesPerInt, O2);
duke@435 1830 __ ba(false, continue_execution);
duke@435 1831 __ delayed()->ld(O1, O2, O2);
duke@435 1832 // handle default
duke@435 1833 __ bind(default_case);
duke@435 1834 __ profile_switch_default(O3);
duke@435 1835 __ ld(O1, 0, O2); // get default offset
duke@435 1836 // continue execution
duke@435 1837 __ bind(continue_execution);
duke@435 1838 __ add(Lbcp, O2, Lbcp);
duke@435 1839 __ dispatch_next(vtos);
duke@435 1840 }
duke@435 1841
duke@435 1842
duke@435 1843 void TemplateTable::lookupswitch() {
duke@435 1844 transition(itos, itos);
duke@435 1845 __ stop("lookupswitch bytecode should have been rewritten");
duke@435 1846 }
duke@435 1847
duke@435 1848 void TemplateTable::fast_linearswitch() {
duke@435 1849 transition(itos, vtos);
duke@435 1850 Label loop_entry, loop, found, continue_execution;
duke@435 1851 // align bcp
duke@435 1852 __ add(Lbcp, BytesPerInt, O1);
duke@435 1853 __ and3(O1, -BytesPerInt, O1);
duke@435 1854 // set counter
duke@435 1855 __ ld(O1, BytesPerInt, O2);
duke@435 1856 __ sll(O2, LogBytesPerInt + 1, O2); // in word-pairs
duke@435 1857 __ add(O1, 2 * BytesPerInt, O3); // set first pair addr
duke@435 1858 __ ba(false, loop_entry);
duke@435 1859 __ delayed()->add(O3, O2, O2); // counter now points past last pair
duke@435 1860
duke@435 1861 // table search
duke@435 1862 __ bind(loop);
duke@435 1863 __ cmp(O4, Otos_i);
duke@435 1864 __ br(Assembler::equal, true, Assembler::pn, found);
duke@435 1865 __ delayed()->ld(O3, BytesPerInt, O4); // offset -> O4
duke@435 1866 __ inc(O3, 2 * BytesPerInt);
duke@435 1867
duke@435 1868 __ bind(loop_entry);
duke@435 1869 __ cmp(O2, O3);
duke@435 1870 __ brx(Assembler::greaterUnsigned, true, Assembler::pt, loop);
duke@435 1871 __ delayed()->ld(O3, 0, O4);
duke@435 1872
duke@435 1873 // default case
duke@435 1874 __ ld(O1, 0, O4); // get default offset
duke@435 1875 if (ProfileInterpreter) {
duke@435 1876 __ profile_switch_default(O3);
duke@435 1877 __ ba(false, continue_execution);
duke@435 1878 __ delayed()->nop();
duke@435 1879 }
duke@435 1880
duke@435 1881 // entry found -> get offset
duke@435 1882 __ bind(found);
duke@435 1883 if (ProfileInterpreter) {
duke@435 1884 __ sub(O3, O1, O3);
duke@435 1885 __ sub(O3, 2*BytesPerInt, O3);
duke@435 1886 __ srl(O3, LogBytesPerInt + 1, O3); // in word-pairs
duke@435 1887 __ profile_switch_case(O3, O1, O2, G3_scratch);
duke@435 1888
duke@435 1889 __ bind(continue_execution);
duke@435 1890 }
duke@435 1891 __ add(Lbcp, O4, Lbcp);
duke@435 1892 __ dispatch_next(vtos);
duke@435 1893 }
duke@435 1894
duke@435 1895
duke@435 1896 void TemplateTable::fast_binaryswitch() {
duke@435 1897 transition(itos, vtos);
duke@435 1898 // Implementation using the following core algorithm: (copied from Intel)
duke@435 1899 //
duke@435 1900 // int binary_search(int key, LookupswitchPair* array, int n) {
duke@435 1901 // // Binary search according to "Methodik des Programmierens" by
duke@435 1902 // // Edsger W. Dijkstra and W.H.J. Feijen, Addison Wesley Germany 1985.
duke@435 1903 // int i = 0;
duke@435 1904 // int j = n;
duke@435 1905 // while (i+1 < j) {
duke@435 1906 // // invariant P: 0 <= i < j <= n and (a[i] <= key < a[j] or Q)
duke@435 1907 // // with Q: for all i: 0 <= i < n: key < a[i]
duke@435 1908 // // where a stands for the array and assuming that the (inexisting)
duke@435 1909 // // element a[n] is infinitely big.
duke@435 1910 // int h = (i + j) >> 1;
duke@435 1911 // // i < h < j
duke@435 1912 // if (key < array[h].fast_match()) {
duke@435 1913 // j = h;
duke@435 1914 // } else {
duke@435 1915 // i = h;
duke@435 1916 // }
duke@435 1917 // }
duke@435 1918 // // R: a[i] <= key < a[i+1] or Q
duke@435 1919 // // (i.e., if key is within array, i is the correct index)
duke@435 1920 // return i;
duke@435 1921 // }
duke@435 1922
duke@435 1923 // register allocation
duke@435 1924 assert(Otos_i == O0, "alias checking");
duke@435 1925 const Register Rkey = Otos_i; // already set (tosca)
duke@435 1926 const Register Rarray = O1;
duke@435 1927 const Register Ri = O2;
duke@435 1928 const Register Rj = O3;
duke@435 1929 const Register Rh = O4;
duke@435 1930 const Register Rscratch = O5;
duke@435 1931
duke@435 1932 const int log_entry_size = 3;
duke@435 1933 const int entry_size = 1 << log_entry_size;
duke@435 1934
duke@435 1935 Label found;
duke@435 1936 // Find Array start
duke@435 1937 __ add(Lbcp, 3 * BytesPerInt, Rarray);
duke@435 1938 __ and3(Rarray, -BytesPerInt, Rarray);
duke@435 1939 // initialize i & j (in delay slot)
duke@435 1940 __ clr( Ri );
duke@435 1941
duke@435 1942 // and start
duke@435 1943 Label entry;
duke@435 1944 __ ba(false, entry);
duke@435 1945 __ delayed()->ld( Rarray, -BytesPerInt, Rj);
duke@435 1946 // (Rj is already in the native byte-ordering.)
duke@435 1947
duke@435 1948 // binary search loop
duke@435 1949 { Label loop;
duke@435 1950 __ bind( loop );
duke@435 1951 // int h = (i + j) >> 1;
duke@435 1952 __ sra( Rh, 1, Rh );
duke@435 1953 // if (key < array[h].fast_match()) {
duke@435 1954 // j = h;
duke@435 1955 // } else {
duke@435 1956 // i = h;
duke@435 1957 // }
duke@435 1958 __ sll( Rh, log_entry_size, Rscratch );
duke@435 1959 __ ld( Rarray, Rscratch, Rscratch );
duke@435 1960 // (Rscratch is already in the native byte-ordering.)
duke@435 1961 __ cmp( Rkey, Rscratch );
duke@435 1962 if ( VM_Version::v9_instructions_work() ) {
duke@435 1963 __ movcc( Assembler::less, false, Assembler::icc, Rh, Rj ); // j = h if (key < array[h].fast_match())
duke@435 1964 __ movcc( Assembler::greaterEqual, false, Assembler::icc, Rh, Ri ); // i = h if (key >= array[h].fast_match())
duke@435 1965 }
duke@435 1966 else {
duke@435 1967 Label end_of_if;
duke@435 1968 __ br( Assembler::less, true, Assembler::pt, end_of_if );
duke@435 1969 __ delayed()->mov( Rh, Rj ); // if (<) Rj = Rh
duke@435 1970 __ mov( Rh, Ri ); // else i = h
duke@435 1971 __ bind(end_of_if); // }
duke@435 1972 }
duke@435 1973
duke@435 1974 // while (i+1 < j)
duke@435 1975 __ bind( entry );
duke@435 1976 __ add( Ri, 1, Rscratch );
duke@435 1977 __ cmp(Rscratch, Rj);
duke@435 1978 __ br( Assembler::less, true, Assembler::pt, loop );
duke@435 1979 __ delayed()->add( Ri, Rj, Rh ); // start h = i + j >> 1;
duke@435 1980 }
duke@435 1981
duke@435 1982 // end of binary search, result index is i (must check again!)
duke@435 1983 Label default_case;
duke@435 1984 Label continue_execution;
duke@435 1985 if (ProfileInterpreter) {
duke@435 1986 __ mov( Ri, Rh ); // Save index in i for profiling
duke@435 1987 }
duke@435 1988 __ sll( Ri, log_entry_size, Ri );
duke@435 1989 __ ld( Rarray, Ri, Rscratch );
duke@435 1990 // (Rscratch is already in the native byte-ordering.)
duke@435 1991 __ cmp( Rkey, Rscratch );
duke@435 1992 __ br( Assembler::notEqual, true, Assembler::pn, default_case );
duke@435 1993 __ delayed()->ld( Rarray, -2 * BytesPerInt, Rj ); // load default offset -> j
duke@435 1994
duke@435 1995 // entry found -> j = offset
duke@435 1996 __ inc( Ri, BytesPerInt );
duke@435 1997 __ profile_switch_case(Rh, Rj, Rscratch, Rkey);
duke@435 1998 __ ld( Rarray, Ri, Rj );
duke@435 1999 // (Rj is already in the native byte-ordering.)
duke@435 2000
duke@435 2001 if (ProfileInterpreter) {
duke@435 2002 __ ba(false, continue_execution);
duke@435 2003 __ delayed()->nop();
duke@435 2004 }
duke@435 2005
duke@435 2006 __ bind(default_case); // fall through (if not profiling)
duke@435 2007 __ profile_switch_default(Ri);
duke@435 2008
duke@435 2009 __ bind(continue_execution);
duke@435 2010 __ add( Lbcp, Rj, Lbcp );
duke@435 2011 __ dispatch_next( vtos );
duke@435 2012 }
duke@435 2013
duke@435 2014
duke@435 2015 void TemplateTable::_return(TosState state) {
duke@435 2016 transition(state, state);
duke@435 2017 assert(_desc->calls_vm(), "inconsistent calls_vm information");
duke@435 2018
duke@435 2019 if (_desc->bytecode() == Bytecodes::_return_register_finalizer) {
duke@435 2020 assert(state == vtos, "only valid state");
duke@435 2021 __ mov(G0, G3_scratch);
duke@435 2022 __ access_local_ptr(G3_scratch, Otos_i);
coleenp@548 2023 __ load_klass(Otos_i, O2);
duke@435 2024 __ set(JVM_ACC_HAS_FINALIZER, G3);
duke@435 2025 __ ld(O2, Klass::access_flags_offset_in_bytes() + sizeof(oopDesc), O2);
duke@435 2026 __ andcc(G3, O2, G0);
duke@435 2027 Label skip_register_finalizer;
duke@435 2028 __ br(Assembler::zero, false, Assembler::pn, skip_register_finalizer);
duke@435 2029 __ delayed()->nop();
duke@435 2030
duke@435 2031 // Call out to do finalizer registration
duke@435 2032 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::register_finalizer), Otos_i);
duke@435 2033
duke@435 2034 __ bind(skip_register_finalizer);
duke@435 2035 }
duke@435 2036
duke@435 2037 __ remove_activation(state, /* throw_monitor_exception */ true);
duke@435 2038
duke@435 2039 // The caller's SP was adjusted upon method entry to accomodate
duke@435 2040 // the callee's non-argument locals. Undo that adjustment.
duke@435 2041 __ ret(); // return to caller
duke@435 2042 __ delayed()->restore(I5_savedSP, G0, SP);
duke@435 2043 }
duke@435 2044
duke@435 2045
duke@435 2046 // ----------------------------------------------------------------------------
duke@435 2047 // Volatile variables demand their effects be made known to all CPU's in
duke@435 2048 // order. Store buffers on most chips allow reads & writes to reorder; the
duke@435 2049 // JMM's ReadAfterWrite.java test fails in -Xint mode without some kind of
duke@435 2050 // memory barrier (i.e., it's not sufficient that the interpreter does not
duke@435 2051 // reorder volatile references, the hardware also must not reorder them).
duke@435 2052 //
duke@435 2053 // According to the new Java Memory Model (JMM):
duke@435 2054 // (1) All volatiles are serialized wrt to each other.
duke@435 2055 // ALSO reads & writes act as aquire & release, so:
duke@435 2056 // (2) A read cannot let unrelated NON-volatile memory refs that happen after
duke@435 2057 // the read float up to before the read. It's OK for non-volatile memory refs
duke@435 2058 // that happen before the volatile read to float down below it.
duke@435 2059 // (3) Similar a volatile write cannot let unrelated NON-volatile memory refs
duke@435 2060 // that happen BEFORE the write float down to after the write. It's OK for
duke@435 2061 // non-volatile memory refs that happen after the volatile write to float up
duke@435 2062 // before it.
duke@435 2063 //
duke@435 2064 // We only put in barriers around volatile refs (they are expensive), not
duke@435 2065 // _between_ memory refs (that would require us to track the flavor of the
duke@435 2066 // previous memory refs). Requirements (2) and (3) require some barriers
duke@435 2067 // before volatile stores and after volatile loads. These nearly cover
duke@435 2068 // requirement (1) but miss the volatile-store-volatile-load case. This final
duke@435 2069 // case is placed after volatile-stores although it could just as well go
duke@435 2070 // before volatile-loads.
duke@435 2071 void TemplateTable::volatile_barrier(Assembler::Membar_mask_bits order_constraint) {
duke@435 2072 // Helper function to insert a is-volatile test and memory barrier
duke@435 2073 // All current sparc implementations run in TSO, needing only StoreLoad
duke@435 2074 if ((order_constraint & Assembler::StoreLoad) == 0) return;
duke@435 2075 __ membar( order_constraint );
duke@435 2076 }
duke@435 2077
duke@435 2078 // ----------------------------------------------------------------------------
jrose@1920 2079 void TemplateTable::resolve_cache_and_index(int byte_no,
jrose@1920 2080 Register result,
jrose@1920 2081 Register Rcache,
jrose@1920 2082 Register index,
jrose@1920 2083 size_t index_size) {
duke@435 2084 // Depends on cpCacheOop layout!
duke@435 2085 Label resolved;
duke@435 2086
jrose@1920 2087 __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size);
jrose@1920 2088 if (byte_no == f1_oop) {
jrose@1920 2089 // We are resolved if the f1 field contains a non-null object (CallSite, etc.)
jrose@1920 2090 // This kind of CP cache entry does not need to match the flags byte, because
jrose@1920 2091 // there is a 1-1 relation between bytecode type and CP entry type.
jrose@1920 2092 assert_different_registers(result, Rcache);
twisti@1858 2093 __ ld_ptr(Rcache, constantPoolCacheOopDesc::base_offset() +
jrose@1920 2094 ConstantPoolCacheEntry::f1_offset(), result);
jrose@1920 2095 __ tst(result);
twisti@1858 2096 __ br(Assembler::notEqual, false, Assembler::pt, resolved);
twisti@1858 2097 __ delayed()->set((int)bytecode(), O1);
twisti@1858 2098 } else {
jrose@1920 2099 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
jrose@1920 2100 assert(result == noreg, ""); //else change code for setting result
jrose@1920 2101 const int shift_count = (1 + byte_no)*BitsPerByte;
jrose@1920 2102
twisti@1858 2103 __ ld_ptr(Rcache, constantPoolCacheOopDesc::base_offset() +
twisti@1858 2104 ConstantPoolCacheEntry::indices_offset(), Lbyte_code);
twisti@1858 2105
twisti@1858 2106 __ srl( Lbyte_code, shift_count, Lbyte_code );
twisti@1858 2107 __ and3( Lbyte_code, 0xFF, Lbyte_code );
twisti@1858 2108 __ cmp( Lbyte_code, (int)bytecode());
twisti@1858 2109 __ br( Assembler::equal, false, Assembler::pt, resolved);
twisti@1858 2110 __ delayed()->set((int)bytecode(), O1);
twisti@1858 2111 }
duke@435 2112
duke@435 2113 address entry;
duke@435 2114 switch (bytecode()) {
duke@435 2115 case Bytecodes::_getstatic : // fall through
duke@435 2116 case Bytecodes::_putstatic : // fall through
duke@435 2117 case Bytecodes::_getfield : // fall through
duke@435 2118 case Bytecodes::_putfield : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_get_put); break;
duke@435 2119 case Bytecodes::_invokevirtual : // fall through
duke@435 2120 case Bytecodes::_invokespecial : // fall through
duke@435 2121 case Bytecodes::_invokestatic : // fall through
duke@435 2122 case Bytecodes::_invokeinterface: entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invoke); break;
twisti@1858 2123 case Bytecodes::_invokedynamic : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic); break;
jrose@1957 2124 case Bytecodes::_fast_aldc : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_ldc); break;
jrose@1957 2125 case Bytecodes::_fast_aldc_w : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_ldc); break;
duke@435 2126 default : ShouldNotReachHere(); break;
duke@435 2127 }
duke@435 2128 // first time invocation - must resolve first
duke@435 2129 __ call_VM(noreg, entry, O1);
duke@435 2130 // Update registers with resolved info
jrose@1920 2131 __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size);
jrose@1920 2132 if (result != noreg)
jrose@1920 2133 __ ld_ptr(Rcache, constantPoolCacheOopDesc::base_offset() +
jrose@1920 2134 ConstantPoolCacheEntry::f1_offset(), result);
duke@435 2135 __ bind(resolved);
duke@435 2136 }
duke@435 2137
duke@435 2138 void TemplateTable::load_invoke_cp_cache_entry(int byte_no,
duke@435 2139 Register Rmethod,
duke@435 2140 Register Ritable_index,
duke@435 2141 Register Rflags,
duke@435 2142 bool is_invokevirtual,
jrose@1920 2143 bool is_invokevfinal,
jrose@1920 2144 bool is_invokedynamic) {
duke@435 2145 // Uses both G3_scratch and G4_scratch
duke@435 2146 Register Rcache = G3_scratch;
duke@435 2147 Register Rscratch = G4_scratch;
duke@435 2148 assert_different_registers(Rcache, Rmethod, Ritable_index);
duke@435 2149
duke@435 2150 ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset();
duke@435 2151
duke@435 2152 // determine constant pool cache field offsets
duke@435 2153 const int method_offset = in_bytes(
duke@435 2154 cp_base_offset +
duke@435 2155 (is_invokevirtual
duke@435 2156 ? ConstantPoolCacheEntry::f2_offset()
duke@435 2157 : ConstantPoolCacheEntry::f1_offset()
duke@435 2158 )
duke@435 2159 );
duke@435 2160 const int flags_offset = in_bytes(cp_base_offset +
duke@435 2161 ConstantPoolCacheEntry::flags_offset());
duke@435 2162 // access constant pool cache fields
duke@435 2163 const int index_offset = in_bytes(cp_base_offset +
duke@435 2164 ConstantPoolCacheEntry::f2_offset());
duke@435 2165
duke@435 2166 if (is_invokevfinal) {
duke@435 2167 __ get_cache_and_index_at_bcp(Rcache, Rscratch, 1);
jrose@1920 2168 __ ld_ptr(Rcache, method_offset, Rmethod);
jrose@1920 2169 } else if (byte_no == f1_oop) {
jrose@1920 2170 // Resolved f1_oop goes directly into 'method' register.
jrose@1920 2171 resolve_cache_and_index(byte_no, Rmethod, Rcache, Rscratch, sizeof(u4));
duke@435 2172 } else {
jrose@1920 2173 resolve_cache_and_index(byte_no, noreg, Rcache, Rscratch, sizeof(u2));
jrose@1920 2174 __ ld_ptr(Rcache, method_offset, Rmethod);
duke@435 2175 }
duke@435 2176
duke@435 2177 if (Ritable_index != noreg) {
twisti@1162 2178 __ ld_ptr(Rcache, index_offset, Ritable_index);
duke@435 2179 }
twisti@1162 2180 __ ld_ptr(Rcache, flags_offset, Rflags);
duke@435 2181 }
duke@435 2182
duke@435 2183 // The Rcache register must be set before call
duke@435 2184 void TemplateTable::load_field_cp_cache_entry(Register Robj,
duke@435 2185 Register Rcache,
duke@435 2186 Register index,
duke@435 2187 Register Roffset,
duke@435 2188 Register Rflags,
duke@435 2189 bool is_static) {
duke@435 2190 assert_different_registers(Rcache, Rflags, Roffset);
duke@435 2191
duke@435 2192 ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset();
duke@435 2193
twisti@1162 2194 __ ld_ptr(Rcache, cp_base_offset + ConstantPoolCacheEntry::flags_offset(), Rflags);
twisti@1162 2195 __ ld_ptr(Rcache, cp_base_offset + ConstantPoolCacheEntry::f2_offset(), Roffset);
duke@435 2196 if (is_static) {
twisti@1162 2197 __ ld_ptr(Rcache, cp_base_offset + ConstantPoolCacheEntry::f1_offset(), Robj);
duke@435 2198 }
duke@435 2199 }
duke@435 2200
duke@435 2201 // The registers Rcache and index expected to be set before call.
duke@435 2202 // Correct values of the Rcache and index registers are preserved.
duke@435 2203 void TemplateTable::jvmti_post_field_access(Register Rcache,
duke@435 2204 Register index,
duke@435 2205 bool is_static,
duke@435 2206 bool has_tos) {
duke@435 2207 ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset();
duke@435 2208
duke@435 2209 if (JvmtiExport::can_post_field_access()) {
duke@435 2210 // Check to see if a field access watch has been set before we take
duke@435 2211 // the time to call into the VM.
duke@435 2212 Label Label1;
duke@435 2213 assert_different_registers(Rcache, index, G1_scratch);
twisti@1162 2214 AddressLiteral get_field_access_count_addr(JvmtiExport::get_field_access_count_addr());
duke@435 2215 __ load_contents(get_field_access_count_addr, G1_scratch);
duke@435 2216 __ tst(G1_scratch);
duke@435 2217 __ br(Assembler::zero, false, Assembler::pt, Label1);
duke@435 2218 __ delayed()->nop();
duke@435 2219
duke@435 2220 __ add(Rcache, in_bytes(cp_base_offset), Rcache);
duke@435 2221
duke@435 2222 if (is_static) {
duke@435 2223 __ clr(Otos_i);
duke@435 2224 } else {
duke@435 2225 if (has_tos) {
duke@435 2226 // save object pointer before call_VM() clobbers it
coleenp@885 2227 __ push_ptr(Otos_i); // put object on tos where GC wants it.
duke@435 2228 } else {
duke@435 2229 // Load top of stack (do not pop the value off the stack);
duke@435 2230 __ ld_ptr(Lesp, Interpreter::expr_offset_in_bytes(0), Otos_i);
duke@435 2231 }
duke@435 2232 __ verify_oop(Otos_i);
duke@435 2233 }
duke@435 2234 // Otos_i: object pointer or NULL if static
duke@435 2235 // Rcache: cache entry pointer
duke@435 2236 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access),
duke@435 2237 Otos_i, Rcache);
duke@435 2238 if (!is_static && has_tos) {
coleenp@885 2239 __ pop_ptr(Otos_i); // restore object pointer
duke@435 2240 __ verify_oop(Otos_i);
duke@435 2241 }
duke@435 2242 __ get_cache_and_index_at_bcp(Rcache, index, 1);
duke@435 2243 __ bind(Label1);
duke@435 2244 }
duke@435 2245 }
duke@435 2246
duke@435 2247 void TemplateTable::getfield_or_static(int byte_no, bool is_static) {
duke@435 2248 transition(vtos, vtos);
duke@435 2249
duke@435 2250 Register Rcache = G3_scratch;
duke@435 2251 Register index = G4_scratch;
duke@435 2252 Register Rclass = Rcache;
duke@435 2253 Register Roffset= G4_scratch;
duke@435 2254 Register Rflags = G1_scratch;
duke@435 2255 ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset();
duke@435 2256
jrose@1920 2257 resolve_cache_and_index(byte_no, noreg, Rcache, index, sizeof(u2));
duke@435 2258 jvmti_post_field_access(Rcache, index, is_static, false);
duke@435 2259 load_field_cp_cache_entry(Rclass, Rcache, index, Roffset, Rflags, is_static);
duke@435 2260
duke@435 2261 if (!is_static) {
duke@435 2262 pop_and_check_object(Rclass);
duke@435 2263 } else {
duke@435 2264 __ verify_oop(Rclass);
duke@435 2265 }
duke@435 2266
duke@435 2267 Label exit;
duke@435 2268
duke@435 2269 Assembler::Membar_mask_bits membar_bits =
duke@435 2270 Assembler::Membar_mask_bits(Assembler::LoadLoad | Assembler::LoadStore);
duke@435 2271
duke@435 2272 if (__ membar_has_effect(membar_bits)) {
duke@435 2273 // Get volatile flag
duke@435 2274 __ set((1 << ConstantPoolCacheEntry::volatileField), Lscratch);
duke@435 2275 __ and3(Rflags, Lscratch, Lscratch);
duke@435 2276 }
duke@435 2277
duke@435 2278 Label checkVolatile;
duke@435 2279
duke@435 2280 // compute field type
duke@435 2281 Label notByte, notInt, notShort, notChar, notLong, notFloat, notObj;
duke@435 2282 __ srl(Rflags, ConstantPoolCacheEntry::tosBits, Rflags);
duke@435 2283 // Make sure we don't need to mask Rflags for tosBits after the above shift
duke@435 2284 ConstantPoolCacheEntry::verify_tosBits();
duke@435 2285
duke@435 2286 // Check atos before itos for getstatic, more likely (in Queens at least)
duke@435 2287 __ cmp(Rflags, atos);
duke@435 2288 __ br(Assembler::notEqual, false, Assembler::pt, notObj);
duke@435 2289 __ delayed() ->cmp(Rflags, itos);
duke@435 2290
duke@435 2291 // atos
coleenp@548 2292 __ load_heap_oop(Rclass, Roffset, Otos_i);
duke@435 2293 __ verify_oop(Otos_i);
duke@435 2294 __ push(atos);
duke@435 2295 if (!is_static) {
duke@435 2296 patch_bytecode(Bytecodes::_fast_agetfield, G3_scratch, G4_scratch);
duke@435 2297 }
duke@435 2298 __ ba(false, checkVolatile);
duke@435 2299 __ delayed()->tst(Lscratch);
duke@435 2300
duke@435 2301 __ bind(notObj);
duke@435 2302
duke@435 2303 // cmp(Rflags, itos);
duke@435 2304 __ br(Assembler::notEqual, false, Assembler::pt, notInt);
duke@435 2305 __ delayed() ->cmp(Rflags, ltos);
duke@435 2306
duke@435 2307 // itos
duke@435 2308 __ ld(Rclass, Roffset, Otos_i);
duke@435 2309 __ push(itos);
duke@435 2310 if (!is_static) {
duke@435 2311 patch_bytecode(Bytecodes::_fast_igetfield, G3_scratch, G4_scratch);
duke@435 2312 }
duke@435 2313 __ ba(false, checkVolatile);
duke@435 2314 __ delayed()->tst(Lscratch);
duke@435 2315
duke@435 2316 __ bind(notInt);
duke@435 2317
duke@435 2318 // cmp(Rflags, ltos);
duke@435 2319 __ br(Assembler::notEqual, false, Assembler::pt, notLong);
duke@435 2320 __ delayed() ->cmp(Rflags, btos);
duke@435 2321
duke@435 2322 // ltos
duke@435 2323 // load must be atomic
duke@435 2324 __ ld_long(Rclass, Roffset, Otos_l);
duke@435 2325 __ push(ltos);
duke@435 2326 if (!is_static) {
duke@435 2327 patch_bytecode(Bytecodes::_fast_lgetfield, G3_scratch, G4_scratch);
duke@435 2328 }
duke@435 2329 __ ba(false, checkVolatile);
duke@435 2330 __ delayed()->tst(Lscratch);
duke@435 2331
duke@435 2332 __ bind(notLong);
duke@435 2333
duke@435 2334 // cmp(Rflags, btos);
duke@435 2335 __ br(Assembler::notEqual, false, Assembler::pt, notByte);
duke@435 2336 __ delayed() ->cmp(Rflags, ctos);
duke@435 2337
duke@435 2338 // btos
duke@435 2339 __ ldsb(Rclass, Roffset, Otos_i);
duke@435 2340 __ push(itos);
duke@435 2341 if (!is_static) {
duke@435 2342 patch_bytecode(Bytecodes::_fast_bgetfield, G3_scratch, G4_scratch);
duke@435 2343 }
duke@435 2344 __ ba(false, checkVolatile);
duke@435 2345 __ delayed()->tst(Lscratch);
duke@435 2346
duke@435 2347 __ bind(notByte);
duke@435 2348
duke@435 2349 // cmp(Rflags, ctos);
duke@435 2350 __ br(Assembler::notEqual, false, Assembler::pt, notChar);
duke@435 2351 __ delayed() ->cmp(Rflags, stos);
duke@435 2352
duke@435 2353 // ctos
duke@435 2354 __ lduh(Rclass, Roffset, Otos_i);
duke@435 2355 __ push(itos);
duke@435 2356 if (!is_static) {
duke@435 2357 patch_bytecode(Bytecodes::_fast_cgetfield, G3_scratch, G4_scratch);
duke@435 2358 }
duke@435 2359 __ ba(false, checkVolatile);
duke@435 2360 __ delayed()->tst(Lscratch);
duke@435 2361
duke@435 2362 __ bind(notChar);
duke@435 2363
duke@435 2364 // cmp(Rflags, stos);
duke@435 2365 __ br(Assembler::notEqual, false, Assembler::pt, notShort);
duke@435 2366 __ delayed() ->cmp(Rflags, ftos);
duke@435 2367
duke@435 2368 // stos
duke@435 2369 __ ldsh(Rclass, Roffset, Otos_i);
duke@435 2370 __ push(itos);
duke@435 2371 if (!is_static) {
duke@435 2372 patch_bytecode(Bytecodes::_fast_sgetfield, G3_scratch, G4_scratch);
duke@435 2373 }
duke@435 2374 __ ba(false, checkVolatile);
duke@435 2375 __ delayed()->tst(Lscratch);
duke@435 2376
duke@435 2377 __ bind(notShort);
duke@435 2378
duke@435 2379
duke@435 2380 // cmp(Rflags, ftos);
duke@435 2381 __ br(Assembler::notEqual, false, Assembler::pt, notFloat);
duke@435 2382 __ delayed() ->tst(Lscratch);
duke@435 2383
duke@435 2384 // ftos
duke@435 2385 __ ldf(FloatRegisterImpl::S, Rclass, Roffset, Ftos_f);
duke@435 2386 __ push(ftos);
duke@435 2387 if (!is_static) {
duke@435 2388 patch_bytecode(Bytecodes::_fast_fgetfield, G3_scratch, G4_scratch);
duke@435 2389 }
duke@435 2390 __ ba(false, checkVolatile);
duke@435 2391 __ delayed()->tst(Lscratch);
duke@435 2392
duke@435 2393 __ bind(notFloat);
duke@435 2394
duke@435 2395
duke@435 2396 // dtos
duke@435 2397 __ ldf(FloatRegisterImpl::D, Rclass, Roffset, Ftos_d);
duke@435 2398 __ push(dtos);
duke@435 2399 if (!is_static) {
duke@435 2400 patch_bytecode(Bytecodes::_fast_dgetfield, G3_scratch, G4_scratch);
duke@435 2401 }
duke@435 2402
duke@435 2403 __ bind(checkVolatile);
duke@435 2404 if (__ membar_has_effect(membar_bits)) {
duke@435 2405 // __ tst(Lscratch); executed in delay slot
duke@435 2406 __ br(Assembler::zero, false, Assembler::pt, exit);
duke@435 2407 __ delayed()->nop();
duke@435 2408 volatile_barrier(membar_bits);
duke@435 2409 }
duke@435 2410
duke@435 2411 __ bind(exit);
duke@435 2412 }
duke@435 2413
duke@435 2414
duke@435 2415 void TemplateTable::getfield(int byte_no) {
duke@435 2416 getfield_or_static(byte_no, false);
duke@435 2417 }
duke@435 2418
duke@435 2419 void TemplateTable::getstatic(int byte_no) {
duke@435 2420 getfield_or_static(byte_no, true);
duke@435 2421 }
duke@435 2422
duke@435 2423
duke@435 2424 void TemplateTable::fast_accessfield(TosState state) {
duke@435 2425 transition(atos, state);
duke@435 2426 Register Rcache = G3_scratch;
duke@435 2427 Register index = G4_scratch;
duke@435 2428 Register Roffset = G4_scratch;
duke@435 2429 Register Rflags = Rcache;
duke@435 2430 ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset();
duke@435 2431
duke@435 2432 __ get_cache_and_index_at_bcp(Rcache, index, 1);
duke@435 2433 jvmti_post_field_access(Rcache, index, /*is_static*/false, /*has_tos*/true);
duke@435 2434
twisti@1162 2435 __ ld_ptr(Rcache, cp_base_offset + ConstantPoolCacheEntry::f2_offset(), Roffset);
duke@435 2436
duke@435 2437 __ null_check(Otos_i);
duke@435 2438 __ verify_oop(Otos_i);
duke@435 2439
duke@435 2440 Label exit;
duke@435 2441
duke@435 2442 Assembler::Membar_mask_bits membar_bits =
duke@435 2443 Assembler::Membar_mask_bits(Assembler::LoadLoad | Assembler::LoadStore);
duke@435 2444 if (__ membar_has_effect(membar_bits)) {
duke@435 2445 // Get volatile flag
twisti@1162 2446 __ ld_ptr(Rcache, cp_base_offset + ConstantPoolCacheEntry::f2_offset(), Rflags);
duke@435 2447 __ set((1 << ConstantPoolCacheEntry::volatileField), Lscratch);
duke@435 2448 }
duke@435 2449
duke@435 2450 switch (bytecode()) {
duke@435 2451 case Bytecodes::_fast_bgetfield:
duke@435 2452 __ ldsb(Otos_i, Roffset, Otos_i);
duke@435 2453 break;
duke@435 2454 case Bytecodes::_fast_cgetfield:
duke@435 2455 __ lduh(Otos_i, Roffset, Otos_i);
duke@435 2456 break;
duke@435 2457 case Bytecodes::_fast_sgetfield:
duke@435 2458 __ ldsh(Otos_i, Roffset, Otos_i);
duke@435 2459 break;
duke@435 2460 case Bytecodes::_fast_igetfield:
duke@435 2461 __ ld(Otos_i, Roffset, Otos_i);
duke@435 2462 break;
duke@435 2463 case Bytecodes::_fast_lgetfield:
duke@435 2464 __ ld_long(Otos_i, Roffset, Otos_l);
duke@435 2465 break;
duke@435 2466 case Bytecodes::_fast_fgetfield:
duke@435 2467 __ ldf(FloatRegisterImpl::S, Otos_i, Roffset, Ftos_f);
duke@435 2468 break;
duke@435 2469 case Bytecodes::_fast_dgetfield:
duke@435 2470 __ ldf(FloatRegisterImpl::D, Otos_i, Roffset, Ftos_d);
duke@435 2471 break;
duke@435 2472 case Bytecodes::_fast_agetfield:
coleenp@548 2473 __ load_heap_oop(Otos_i, Roffset, Otos_i);
duke@435 2474 break;
duke@435 2475 default:
duke@435 2476 ShouldNotReachHere();
duke@435 2477 }
duke@435 2478
duke@435 2479 if (__ membar_has_effect(membar_bits)) {
duke@435 2480 __ btst(Lscratch, Rflags);
duke@435 2481 __ br(Assembler::zero, false, Assembler::pt, exit);
duke@435 2482 __ delayed()->nop();
duke@435 2483 volatile_barrier(membar_bits);
duke@435 2484 __ bind(exit);
duke@435 2485 }
duke@435 2486
duke@435 2487 if (state == atos) {
duke@435 2488 __ verify_oop(Otos_i); // does not blow flags!
duke@435 2489 }
duke@435 2490 }
duke@435 2491
duke@435 2492 void TemplateTable::jvmti_post_fast_field_mod() {
duke@435 2493 if (JvmtiExport::can_post_field_modification()) {
duke@435 2494 // Check to see if a field modification watch has been set before we take
duke@435 2495 // the time to call into the VM.
duke@435 2496 Label done;
twisti@1162 2497 AddressLiteral get_field_modification_count_addr(JvmtiExport::get_field_modification_count_addr());
duke@435 2498 __ load_contents(get_field_modification_count_addr, G4_scratch);
duke@435 2499 __ tst(G4_scratch);
duke@435 2500 __ br(Assembler::zero, false, Assembler::pt, done);
duke@435 2501 __ delayed()->nop();
duke@435 2502 __ pop_ptr(G4_scratch); // copy the object pointer from tos
duke@435 2503 __ verify_oop(G4_scratch);
duke@435 2504 __ push_ptr(G4_scratch); // put the object pointer back on tos
duke@435 2505 __ get_cache_entry_pointer_at_bcp(G1_scratch, G3_scratch, 1);
duke@435 2506 // Save tos values before call_VM() clobbers them. Since we have
duke@435 2507 // to do it for every data type, we use the saved values as the
duke@435 2508 // jvalue object.
duke@435 2509 switch (bytecode()) { // save tos values before call_VM() clobbers them
duke@435 2510 case Bytecodes::_fast_aputfield: __ push_ptr(Otos_i); break;
duke@435 2511 case Bytecodes::_fast_bputfield: // fall through
duke@435 2512 case Bytecodes::_fast_sputfield: // fall through
duke@435 2513 case Bytecodes::_fast_cputfield: // fall through
duke@435 2514 case Bytecodes::_fast_iputfield: __ push_i(Otos_i); break;
duke@435 2515 case Bytecodes::_fast_dputfield: __ push_d(Ftos_d); break;
duke@435 2516 case Bytecodes::_fast_fputfield: __ push_f(Ftos_f); break;
duke@435 2517 // get words in right order for use as jvalue object
duke@435 2518 case Bytecodes::_fast_lputfield: __ push_l(Otos_l); break;
duke@435 2519 }
duke@435 2520 // setup pointer to jvalue object
duke@435 2521 __ mov(Lesp, G3_scratch); __ inc(G3_scratch, wordSize);
duke@435 2522 // G4_scratch: object pointer
duke@435 2523 // G1_scratch: cache entry pointer
duke@435 2524 // G3_scratch: jvalue object on the stack
duke@435 2525 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), G4_scratch, G1_scratch, G3_scratch);
duke@435 2526 switch (bytecode()) { // restore tos values
duke@435 2527 case Bytecodes::_fast_aputfield: __ pop_ptr(Otos_i); break;
duke@435 2528 case Bytecodes::_fast_bputfield: // fall through
duke@435 2529 case Bytecodes::_fast_sputfield: // fall through
duke@435 2530 case Bytecodes::_fast_cputfield: // fall through
duke@435 2531 case Bytecodes::_fast_iputfield: __ pop_i(Otos_i); break;
duke@435 2532 case Bytecodes::_fast_dputfield: __ pop_d(Ftos_d); break;
duke@435 2533 case Bytecodes::_fast_fputfield: __ pop_f(Ftos_f); break;
duke@435 2534 case Bytecodes::_fast_lputfield: __ pop_l(Otos_l); break;
duke@435 2535 }
duke@435 2536 __ bind(done);
duke@435 2537 }
duke@435 2538 }
duke@435 2539
duke@435 2540 // The registers Rcache and index expected to be set before call.
duke@435 2541 // The function may destroy various registers, just not the Rcache and index registers.
duke@435 2542 void TemplateTable::jvmti_post_field_mod(Register Rcache, Register index, bool is_static) {
duke@435 2543 ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset();
duke@435 2544
duke@435 2545 if (JvmtiExport::can_post_field_modification()) {
duke@435 2546 // Check to see if a field modification watch has been set before we take
duke@435 2547 // the time to call into the VM.
duke@435 2548 Label Label1;
duke@435 2549 assert_different_registers(Rcache, index, G1_scratch);
twisti@1162 2550 AddressLiteral get_field_modification_count_addr(JvmtiExport::get_field_modification_count_addr());
duke@435 2551 __ load_contents(get_field_modification_count_addr, G1_scratch);
duke@435 2552 __ tst(G1_scratch);
duke@435 2553 __ br(Assembler::zero, false, Assembler::pt, Label1);
duke@435 2554 __ delayed()->nop();
duke@435 2555
duke@435 2556 // The Rcache and index registers have been already set.
duke@435 2557 // This allows to eliminate this call but the Rcache and index
duke@435 2558 // registers must be correspondingly used after this line.
duke@435 2559 __ get_cache_and_index_at_bcp(G1_scratch, G4_scratch, 1);
duke@435 2560
duke@435 2561 __ add(G1_scratch, in_bytes(cp_base_offset), G3_scratch);
duke@435 2562 if (is_static) {
duke@435 2563 // Life is simple. Null out the object pointer.
duke@435 2564 __ clr(G4_scratch);
duke@435 2565 } else {
duke@435 2566 Register Rflags = G1_scratch;
duke@435 2567 // Life is harder. The stack holds the value on top, followed by the
duke@435 2568 // object. We don't know the size of the value, though; it could be
duke@435 2569 // one or two words depending on its type. As a result, we must find
duke@435 2570 // the type to determine where the object is.
duke@435 2571
duke@435 2572 Label two_word, valsizeknown;
twisti@1162 2573 __ ld_ptr(G1_scratch, cp_base_offset + ConstantPoolCacheEntry::flags_offset(), Rflags);
duke@435 2574 __ mov(Lesp, G4_scratch);
duke@435 2575 __ srl(Rflags, ConstantPoolCacheEntry::tosBits, Rflags);
duke@435 2576 // Make sure we don't need to mask Rflags for tosBits after the above shift
duke@435 2577 ConstantPoolCacheEntry::verify_tosBits();
duke@435 2578 __ cmp(Rflags, ltos);
duke@435 2579 __ br(Assembler::equal, false, Assembler::pt, two_word);
duke@435 2580 __ delayed()->cmp(Rflags, dtos);
duke@435 2581 __ br(Assembler::equal, false, Assembler::pt, two_word);
duke@435 2582 __ delayed()->nop();
duke@435 2583 __ inc(G4_scratch, Interpreter::expr_offset_in_bytes(1));
duke@435 2584 __ br(Assembler::always, false, Assembler::pt, valsizeknown);
duke@435 2585 __ delayed()->nop();
duke@435 2586 __ bind(two_word);
duke@435 2587
duke@435 2588 __ inc(G4_scratch, Interpreter::expr_offset_in_bytes(2));
duke@435 2589
duke@435 2590 __ bind(valsizeknown);
duke@435 2591 // setup object pointer
duke@435 2592 __ ld_ptr(G4_scratch, 0, G4_scratch);
duke@435 2593 __ verify_oop(G4_scratch);
duke@435 2594 }
duke@435 2595 // setup pointer to jvalue object
duke@435 2596 __ mov(Lesp, G1_scratch); __ inc(G1_scratch, wordSize);
duke@435 2597 // G4_scratch: object pointer or NULL if static
duke@435 2598 // G3_scratch: cache entry pointer
duke@435 2599 // G1_scratch: jvalue object on the stack
duke@435 2600 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification),
duke@435 2601 G4_scratch, G3_scratch, G1_scratch);
duke@435 2602 __ get_cache_and_index_at_bcp(Rcache, index, 1);
duke@435 2603 __ bind(Label1);
duke@435 2604 }
duke@435 2605 }
duke@435 2606
duke@435 2607 void TemplateTable::pop_and_check_object(Register r) {
duke@435 2608 __ pop_ptr(r);
duke@435 2609 __ null_check(r); // for field access must check obj.
duke@435 2610 __ verify_oop(r);
duke@435 2611 }
duke@435 2612
duke@435 2613 void TemplateTable::putfield_or_static(int byte_no, bool is_static) {
duke@435 2614 transition(vtos, vtos);
duke@435 2615 Register Rcache = G3_scratch;
duke@435 2616 Register index = G4_scratch;
duke@435 2617 Register Rclass = Rcache;
duke@435 2618 Register Roffset= G4_scratch;
duke@435 2619 Register Rflags = G1_scratch;
duke@435 2620 ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset();
duke@435 2621
jrose@1920 2622 resolve_cache_and_index(byte_no, noreg, Rcache, index, sizeof(u2));
duke@435 2623 jvmti_post_field_mod(Rcache, index, is_static);
duke@435 2624 load_field_cp_cache_entry(Rclass, Rcache, index, Roffset, Rflags, is_static);
duke@435 2625
duke@435 2626 Assembler::Membar_mask_bits read_bits =
duke@435 2627 Assembler::Membar_mask_bits(Assembler::LoadStore | Assembler::StoreStore);
duke@435 2628 Assembler::Membar_mask_bits write_bits = Assembler::StoreLoad;
duke@435 2629
duke@435 2630 Label notVolatile, checkVolatile, exit;
duke@435 2631 if (__ membar_has_effect(read_bits) || __ membar_has_effect(write_bits)) {
duke@435 2632 __ set((1 << ConstantPoolCacheEntry::volatileField), Lscratch);
duke@435 2633 __ and3(Rflags, Lscratch, Lscratch);
duke@435 2634
duke@435 2635 if (__ membar_has_effect(read_bits)) {
duke@435 2636 __ tst(Lscratch);
duke@435 2637 __ br(Assembler::zero, false, Assembler::pt, notVolatile);
duke@435 2638 __ delayed()->nop();
duke@435 2639 volatile_barrier(read_bits);
duke@435 2640 __ bind(notVolatile);
duke@435 2641 }
duke@435 2642 }
duke@435 2643
duke@435 2644 __ srl(Rflags, ConstantPoolCacheEntry::tosBits, Rflags);
duke@435 2645 // Make sure we don't need to mask Rflags for tosBits after the above shift
duke@435 2646 ConstantPoolCacheEntry::verify_tosBits();
duke@435 2647
duke@435 2648 // compute field type
duke@435 2649 Label notInt, notShort, notChar, notObj, notByte, notLong, notFloat;
duke@435 2650
duke@435 2651 if (is_static) {
duke@435 2652 // putstatic with object type most likely, check that first
duke@435 2653 __ cmp(Rflags, atos );
duke@435 2654 __ br(Assembler::notEqual, false, Assembler::pt, notObj);
duke@435 2655 __ delayed() ->cmp(Rflags, itos );
duke@435 2656
duke@435 2657 // atos
duke@435 2658 __ pop_ptr();
duke@435 2659 __ verify_oop(Otos_i);
ysr@777 2660
ysr@777 2661 do_oop_store(_masm, Rclass, Roffset, 0, Otos_i, G1_scratch, _bs->kind(), false);
ysr@777 2662
duke@435 2663 __ ba(false, checkVolatile);
duke@435 2664 __ delayed()->tst(Lscratch);
duke@435 2665
duke@435 2666 __ bind(notObj);
duke@435 2667
duke@435 2668 // cmp(Rflags, itos );
duke@435 2669 __ br(Assembler::notEqual, false, Assembler::pt, notInt);
duke@435 2670 __ delayed() ->cmp(Rflags, btos );
duke@435 2671
duke@435 2672 // itos
duke@435 2673 __ pop_i();
duke@435 2674 __ st(Otos_i, Rclass, Roffset);
duke@435 2675 __ ba(false, checkVolatile);
duke@435 2676 __ delayed()->tst(Lscratch);
duke@435 2677
duke@435 2678 __ bind(notInt);
duke@435 2679
duke@435 2680 } else {
duke@435 2681 // putfield with int type most likely, check that first
duke@435 2682 __ cmp(Rflags, itos );
duke@435 2683 __ br(Assembler::notEqual, false, Assembler::pt, notInt);
duke@435 2684 __ delayed() ->cmp(Rflags, atos );
duke@435 2685
duke@435 2686 // itos
duke@435 2687 __ pop_i();
duke@435 2688 pop_and_check_object(Rclass);
duke@435 2689 __ st(Otos_i, Rclass, Roffset);
duke@435 2690 patch_bytecode(Bytecodes::_fast_iputfield, G3_scratch, G4_scratch);
duke@435 2691 __ ba(false, checkVolatile);
duke@435 2692 __ delayed()->tst(Lscratch);
duke@435 2693
duke@435 2694 __ bind(notInt);
duke@435 2695 // cmp(Rflags, atos );
duke@435 2696 __ br(Assembler::notEqual, false, Assembler::pt, notObj);
duke@435 2697 __ delayed() ->cmp(Rflags, btos );
duke@435 2698
duke@435 2699 // atos
duke@435 2700 __ pop_ptr();
duke@435 2701 pop_and_check_object(Rclass);
duke@435 2702 __ verify_oop(Otos_i);
ysr@777 2703
ysr@777 2704 do_oop_store(_masm, Rclass, Roffset, 0, Otos_i, G1_scratch, _bs->kind(), false);
ysr@777 2705
duke@435 2706 patch_bytecode(Bytecodes::_fast_aputfield, G3_scratch, G4_scratch);
duke@435 2707 __ ba(false, checkVolatile);
duke@435 2708 __ delayed()->tst(Lscratch);
duke@435 2709
duke@435 2710 __ bind(notObj);
duke@435 2711 }
duke@435 2712
duke@435 2713 // cmp(Rflags, btos );
duke@435 2714 __ br(Assembler::notEqual, false, Assembler::pt, notByte);
duke@435 2715 __ delayed() ->cmp(Rflags, ltos );
duke@435 2716
duke@435 2717 // btos
duke@435 2718 __ pop_i();
duke@435 2719 if (!is_static) pop_and_check_object(Rclass);
duke@435 2720 __ stb(Otos_i, Rclass, Roffset);
duke@435 2721 if (!is_static) {
duke@435 2722 patch_bytecode(Bytecodes::_fast_bputfield, G3_scratch, G4_scratch);
duke@435 2723 }
duke@435 2724 __ ba(false, checkVolatile);
duke@435 2725 __ delayed()->tst(Lscratch);
duke@435 2726
duke@435 2727 __ bind(notByte);
duke@435 2728
duke@435 2729 // cmp(Rflags, ltos );
duke@435 2730 __ br(Assembler::notEqual, false, Assembler::pt, notLong);
duke@435 2731 __ delayed() ->cmp(Rflags, ctos );
duke@435 2732
duke@435 2733 // ltos
duke@435 2734 __ pop_l();
duke@435 2735 if (!is_static) pop_and_check_object(Rclass);
duke@435 2736 __ st_long(Otos_l, Rclass, Roffset);
duke@435 2737 if (!is_static) {
duke@435 2738 patch_bytecode(Bytecodes::_fast_lputfield, G3_scratch, G4_scratch);
duke@435 2739 }
duke@435 2740 __ ba(false, checkVolatile);
duke@435 2741 __ delayed()->tst(Lscratch);
duke@435 2742
duke@435 2743 __ bind(notLong);
duke@435 2744
duke@435 2745 // cmp(Rflags, ctos );
duke@435 2746 __ br(Assembler::notEqual, false, Assembler::pt, notChar);
duke@435 2747 __ delayed() ->cmp(Rflags, stos );
duke@435 2748
duke@435 2749 // ctos (char)
duke@435 2750 __ pop_i();
duke@435 2751 if (!is_static) pop_and_check_object(Rclass);
duke@435 2752 __ sth(Otos_i, Rclass, Roffset);
duke@435 2753 if (!is_static) {
duke@435 2754 patch_bytecode(Bytecodes::_fast_cputfield, G3_scratch, G4_scratch);
duke@435 2755 }
duke@435 2756 __ ba(false, checkVolatile);
duke@435 2757 __ delayed()->tst(Lscratch);
duke@435 2758
duke@435 2759 __ bind(notChar);
duke@435 2760 // cmp(Rflags, stos );
duke@435 2761 __ br(Assembler::notEqual, false, Assembler::pt, notShort);
duke@435 2762 __ delayed() ->cmp(Rflags, ftos );
duke@435 2763
duke@435 2764 // stos (char)
duke@435 2765 __ pop_i();
duke@435 2766 if (!is_static) pop_and_check_object(Rclass);
duke@435 2767 __ sth(Otos_i, Rclass, Roffset);
duke@435 2768 if (!is_static) {
duke@435 2769 patch_bytecode(Bytecodes::_fast_sputfield, G3_scratch, G4_scratch);
duke@435 2770 }
duke@435 2771 __ ba(false, checkVolatile);
duke@435 2772 __ delayed()->tst(Lscratch);
duke@435 2773
duke@435 2774 __ bind(notShort);
duke@435 2775 // cmp(Rflags, ftos );
duke@435 2776 __ br(Assembler::notZero, false, Assembler::pt, notFloat);
duke@435 2777 __ delayed()->nop();
duke@435 2778
duke@435 2779 // ftos
duke@435 2780 __ pop_f();
duke@435 2781 if (!is_static) pop_and_check_object(Rclass);
duke@435 2782 __ stf(FloatRegisterImpl::S, Ftos_f, Rclass, Roffset);
duke@435 2783 if (!is_static) {
duke@435 2784 patch_bytecode(Bytecodes::_fast_fputfield, G3_scratch, G4_scratch);
duke@435 2785 }
duke@435 2786 __ ba(false, checkVolatile);
duke@435 2787 __ delayed()->tst(Lscratch);
duke@435 2788
duke@435 2789 __ bind(notFloat);
duke@435 2790
duke@435 2791 // dtos
duke@435 2792 __ pop_d();
duke@435 2793 if (!is_static) pop_and_check_object(Rclass);
duke@435 2794 __ stf(FloatRegisterImpl::D, Ftos_d, Rclass, Roffset);
duke@435 2795 if (!is_static) {
duke@435 2796 patch_bytecode(Bytecodes::_fast_dputfield, G3_scratch, G4_scratch);
duke@435 2797 }
duke@435 2798
duke@435 2799 __ bind(checkVolatile);
duke@435 2800 __ tst(Lscratch);
duke@435 2801
duke@435 2802 if (__ membar_has_effect(write_bits)) {
duke@435 2803 // __ tst(Lscratch); in delay slot
duke@435 2804 __ br(Assembler::zero, false, Assembler::pt, exit);
duke@435 2805 __ delayed()->nop();
duke@435 2806 volatile_barrier(Assembler::StoreLoad);
duke@435 2807 __ bind(exit);
duke@435 2808 }
duke@435 2809 }
duke@435 2810
duke@435 2811 void TemplateTable::fast_storefield(TosState state) {
duke@435 2812 transition(state, vtos);
duke@435 2813 Register Rcache = G3_scratch;
duke@435 2814 Register Rclass = Rcache;
duke@435 2815 Register Roffset= G4_scratch;
duke@435 2816 Register Rflags = G1_scratch;
duke@435 2817 ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset();
duke@435 2818
duke@435 2819 jvmti_post_fast_field_mod();
duke@435 2820
duke@435 2821 __ get_cache_and_index_at_bcp(Rcache, G4_scratch, 1);
duke@435 2822
duke@435 2823 Assembler::Membar_mask_bits read_bits =
duke@435 2824 Assembler::Membar_mask_bits(Assembler::LoadStore | Assembler::StoreStore);
duke@435 2825 Assembler::Membar_mask_bits write_bits = Assembler::StoreLoad;
duke@435 2826
duke@435 2827 Label notVolatile, checkVolatile, exit;
duke@435 2828 if (__ membar_has_effect(read_bits) || __ membar_has_effect(write_bits)) {
twisti@1162 2829 __ ld_ptr(Rcache, cp_base_offset + ConstantPoolCacheEntry::flags_offset(), Rflags);
duke@435 2830 __ set((1 << ConstantPoolCacheEntry::volatileField), Lscratch);
duke@435 2831 __ and3(Rflags, Lscratch, Lscratch);
duke@435 2832 if (__ membar_has_effect(read_bits)) {
duke@435 2833 __ tst(Lscratch);
duke@435 2834 __ br(Assembler::zero, false, Assembler::pt, notVolatile);
duke@435 2835 __ delayed()->nop();
duke@435 2836 volatile_barrier(read_bits);
duke@435 2837 __ bind(notVolatile);
duke@435 2838 }
duke@435 2839 }
duke@435 2840
twisti@1162 2841 __ ld_ptr(Rcache, cp_base_offset + ConstantPoolCacheEntry::f2_offset(), Roffset);
duke@435 2842 pop_and_check_object(Rclass);
duke@435 2843
duke@435 2844 switch (bytecode()) {
duke@435 2845 case Bytecodes::_fast_bputfield: __ stb(Otos_i, Rclass, Roffset); break;
duke@435 2846 case Bytecodes::_fast_cputfield: /* fall through */
duke@435 2847 case Bytecodes::_fast_sputfield: __ sth(Otos_i, Rclass, Roffset); break;
duke@435 2848 case Bytecodes::_fast_iputfield: __ st(Otos_i, Rclass, Roffset); break;
duke@435 2849 case Bytecodes::_fast_lputfield: __ st_long(Otos_l, Rclass, Roffset); break;
duke@435 2850 case Bytecodes::_fast_fputfield:
duke@435 2851 __ stf(FloatRegisterImpl::S, Ftos_f, Rclass, Roffset);
duke@435 2852 break;
duke@435 2853 case Bytecodes::_fast_dputfield:
duke@435 2854 __ stf(FloatRegisterImpl::D, Ftos_d, Rclass, Roffset);
duke@435 2855 break;
duke@435 2856 case Bytecodes::_fast_aputfield:
ysr@777 2857 do_oop_store(_masm, Rclass, Roffset, 0, Otos_i, G1_scratch, _bs->kind(), false);
duke@435 2858 break;
duke@435 2859 default:
duke@435 2860 ShouldNotReachHere();
duke@435 2861 }
duke@435 2862
duke@435 2863 if (__ membar_has_effect(write_bits)) {
duke@435 2864 __ tst(Lscratch);
duke@435 2865 __ br(Assembler::zero, false, Assembler::pt, exit);
duke@435 2866 __ delayed()->nop();
duke@435 2867 volatile_barrier(Assembler::StoreLoad);
duke@435 2868 __ bind(exit);
duke@435 2869 }
duke@435 2870 }
duke@435 2871
duke@435 2872
duke@435 2873 void TemplateTable::putfield(int byte_no) {
duke@435 2874 putfield_or_static(byte_no, false);
duke@435 2875 }
duke@435 2876
duke@435 2877 void TemplateTable::putstatic(int byte_no) {
duke@435 2878 putfield_or_static(byte_no, true);
duke@435 2879 }
duke@435 2880
duke@435 2881
duke@435 2882 void TemplateTable::fast_xaccess(TosState state) {
duke@435 2883 transition(vtos, state);
duke@435 2884 Register Rcache = G3_scratch;
duke@435 2885 Register Roffset = G4_scratch;
duke@435 2886 Register Rflags = G4_scratch;
duke@435 2887 Register Rreceiver = Lscratch;
duke@435 2888
twisti@1861 2889 __ ld_ptr(Llocals, 0, Rreceiver);
duke@435 2890
duke@435 2891 // access constant pool cache (is resolved)
duke@435 2892 __ get_cache_and_index_at_bcp(Rcache, G4_scratch, 2);
twisti@1162 2893 __ ld_ptr(Rcache, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::f2_offset(), Roffset);
duke@435 2894 __ add(Lbcp, 1, Lbcp); // needed to report exception at the correct bcp
duke@435 2895
duke@435 2896 __ verify_oop(Rreceiver);
duke@435 2897 __ null_check(Rreceiver);
duke@435 2898 if (state == atos) {
coleenp@548 2899 __ load_heap_oop(Rreceiver, Roffset, Otos_i);
duke@435 2900 } else if (state == itos) {
duke@435 2901 __ ld (Rreceiver, Roffset, Otos_i) ;
duke@435 2902 } else if (state == ftos) {
duke@435 2903 __ ldf(FloatRegisterImpl::S, Rreceiver, Roffset, Ftos_f);
duke@435 2904 } else {
duke@435 2905 ShouldNotReachHere();
duke@435 2906 }
duke@435 2907
duke@435 2908 Assembler::Membar_mask_bits membar_bits =
duke@435 2909 Assembler::Membar_mask_bits(Assembler::LoadLoad | Assembler::LoadStore);
duke@435 2910 if (__ membar_has_effect(membar_bits)) {
duke@435 2911
duke@435 2912 // Get is_volatile value in Rflags and check if membar is needed
twisti@1162 2913 __ ld_ptr(Rcache, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::flags_offset(), Rflags);
duke@435 2914
duke@435 2915 // Test volatile
duke@435 2916 Label notVolatile;
duke@435 2917 __ set((1 << ConstantPoolCacheEntry::volatileField), Lscratch);
duke@435 2918 __ btst(Rflags, Lscratch);
duke@435 2919 __ br(Assembler::zero, false, Assembler::pt, notVolatile);
duke@435 2920 __ delayed()->nop();
duke@435 2921 volatile_barrier(membar_bits);
duke@435 2922 __ bind(notVolatile);
duke@435 2923 }
duke@435 2924
duke@435 2925 __ interp_verify_oop(Otos_i, state, __FILE__, __LINE__);
duke@435 2926 __ sub(Lbcp, 1, Lbcp);
duke@435 2927 }
duke@435 2928
duke@435 2929 //----------------------------------------------------------------------------------------------------
duke@435 2930 // Calls
duke@435 2931
duke@435 2932 void TemplateTable::count_calls(Register method, Register temp) {
duke@435 2933 // implemented elsewhere
duke@435 2934 ShouldNotReachHere();
duke@435 2935 }
duke@435 2936
duke@435 2937 void TemplateTable::generate_vtable_call(Register Rrecv, Register Rindex, Register Rret) {
duke@435 2938 Register Rtemp = G4_scratch;
duke@435 2939 Register Rcall = Rindex;
duke@435 2940 assert_different_registers(Rcall, G5_method, Gargs, Rret);
duke@435 2941
duke@435 2942 // get target methodOop & entry point
duke@435 2943 const int base = instanceKlass::vtable_start_offset() * wordSize;
duke@435 2944 if (vtableEntry::size() % 3 == 0) {
duke@435 2945 // scale the vtable index by 12:
duke@435 2946 int one_third = vtableEntry::size() / 3;
duke@435 2947 __ sll(Rindex, exact_log2(one_third * 1 * wordSize), Rtemp);
duke@435 2948 __ sll(Rindex, exact_log2(one_third * 2 * wordSize), Rindex);
duke@435 2949 __ add(Rindex, Rtemp, Rindex);
duke@435 2950 } else {
duke@435 2951 // scale the vtable index by 8:
duke@435 2952 __ sll(Rindex, exact_log2(vtableEntry::size() * wordSize), Rindex);
duke@435 2953 }
duke@435 2954
duke@435 2955 __ add(Rrecv, Rindex, Rrecv);
duke@435 2956 __ ld_ptr(Rrecv, base + vtableEntry::method_offset_in_bytes(), G5_method);
duke@435 2957
duke@435 2958 __ call_from_interpreter(Rcall, Gargs, Rret);
duke@435 2959 }
duke@435 2960
duke@435 2961 void TemplateTable::invokevirtual(int byte_no) {
duke@435 2962 transition(vtos, vtos);
jrose@1920 2963 assert(byte_no == f2_byte, "use this argument");
duke@435 2964
duke@435 2965 Register Rscratch = G3_scratch;
duke@435 2966 Register Rtemp = G4_scratch;
duke@435 2967 Register Rret = Lscratch;
duke@435 2968 Register Rrecv = G5_method;
duke@435 2969 Label notFinal;
duke@435 2970
jrose@1920 2971 load_invoke_cp_cache_entry(byte_no, G5_method, noreg, Rret, true, false, false);
duke@435 2972 __ mov(SP, O5_savedSP); // record SP that we wanted the callee to restore
duke@435 2973
duke@435 2974 // Check for vfinal
duke@435 2975 __ set((1 << ConstantPoolCacheEntry::vfinalMethod), G4_scratch);
duke@435 2976 __ btst(Rret, G4_scratch);
duke@435 2977 __ br(Assembler::zero, false, Assembler::pt, notFinal);
duke@435 2978 __ delayed()->and3(Rret, 0xFF, G4_scratch); // gets number of parameters
duke@435 2979
duke@435 2980 patch_bytecode(Bytecodes::_fast_invokevfinal, Rscratch, Rtemp);
duke@435 2981
duke@435 2982 invokevfinal_helper(Rscratch, Rret);
duke@435 2983
duke@435 2984 __ bind(notFinal);
duke@435 2985
duke@435 2986 __ mov(G5_method, Rscratch); // better scratch register
duke@435 2987 __ load_receiver(G4_scratch, O0); // gets receiverOop
duke@435 2988 // receiver is in O0
duke@435 2989 __ verify_oop(O0);
duke@435 2990
duke@435 2991 // get return address
twisti@1162 2992 AddressLiteral table(Interpreter::return_3_addrs_by_index_table());
twisti@1162 2993 __ set(table, Rtemp);
duke@435 2994 __ srl(Rret, ConstantPoolCacheEntry::tosBits, Rret); // get return type
duke@435 2995 // Make sure we don't need to mask Rret for tosBits after the above shift
duke@435 2996 ConstantPoolCacheEntry::verify_tosBits();
duke@435 2997 __ sll(Rret, LogBytesPerWord, Rret);
duke@435 2998 __ ld_ptr(Rtemp, Rret, Rret); // get return address
duke@435 2999
duke@435 3000 // get receiver klass
duke@435 3001 __ null_check(O0, oopDesc::klass_offset_in_bytes());
coleenp@548 3002 __ load_klass(O0, Rrecv);
duke@435 3003 __ verify_oop(Rrecv);
duke@435 3004
duke@435 3005 __ profile_virtual_call(Rrecv, O4);
duke@435 3006
duke@435 3007 generate_vtable_call(Rrecv, Rscratch, Rret);
duke@435 3008 }
duke@435 3009
duke@435 3010 void TemplateTable::fast_invokevfinal(int byte_no) {
duke@435 3011 transition(vtos, vtos);
jrose@1920 3012 assert(byte_no == f2_byte, "use this argument");
duke@435 3013
duke@435 3014 load_invoke_cp_cache_entry(byte_no, G5_method, noreg, Lscratch, true,
jrose@1920 3015 /*is_invokevfinal*/true, false);
duke@435 3016 __ mov(SP, O5_savedSP); // record SP that we wanted the callee to restore
duke@435 3017 invokevfinal_helper(G3_scratch, Lscratch);
duke@435 3018 }
duke@435 3019
duke@435 3020 void TemplateTable::invokevfinal_helper(Register Rscratch, Register Rret) {
duke@435 3021 Register Rtemp = G4_scratch;
duke@435 3022
duke@435 3023 __ verify_oop(G5_method);
duke@435 3024
duke@435 3025 // Load receiver from stack slot
twisti@1162 3026 __ lduh(G5_method, in_bytes(methodOopDesc::size_of_parameters_offset()), G4_scratch);
duke@435 3027 __ load_receiver(G4_scratch, O0);
duke@435 3028
duke@435 3029 // receiver NULL check
duke@435 3030 __ null_check(O0);
duke@435 3031
duke@435 3032 __ profile_final_call(O4);
duke@435 3033
duke@435 3034 // get return address
twisti@1162 3035 AddressLiteral table(Interpreter::return_3_addrs_by_index_table());
twisti@1162 3036 __ set(table, Rtemp);
duke@435 3037 __ srl(Rret, ConstantPoolCacheEntry::tosBits, Rret); // get return type
duke@435 3038 // Make sure we don't need to mask Rret for tosBits after the above shift
duke@435 3039 ConstantPoolCacheEntry::verify_tosBits();
duke@435 3040 __ sll(Rret, LogBytesPerWord, Rret);
duke@435 3041 __ ld_ptr(Rtemp, Rret, Rret); // get return address
duke@435 3042
duke@435 3043
duke@435 3044 // do the call
duke@435 3045 __ call_from_interpreter(Rscratch, Gargs, Rret);
duke@435 3046 }
duke@435 3047
duke@435 3048 void TemplateTable::invokespecial(int byte_no) {
duke@435 3049 transition(vtos, vtos);
jrose@1920 3050 assert(byte_no == f1_byte, "use this argument");
duke@435 3051
duke@435 3052 Register Rscratch = G3_scratch;
duke@435 3053 Register Rtemp = G4_scratch;
duke@435 3054 Register Rret = Lscratch;
duke@435 3055
jrose@1920 3056 load_invoke_cp_cache_entry(byte_no, G5_method, noreg, Rret, /*virtual*/ false, false, false);
duke@435 3057 __ mov(SP, O5_savedSP); // record SP that we wanted the callee to restore
duke@435 3058
duke@435 3059 __ verify_oop(G5_method);
duke@435 3060
twisti@1162 3061 __ lduh(G5_method, in_bytes(methodOopDesc::size_of_parameters_offset()), G4_scratch);
duke@435 3062 __ load_receiver(G4_scratch, O0);
duke@435 3063
duke@435 3064 // receiver NULL check
duke@435 3065 __ null_check(O0);
duke@435 3066
duke@435 3067 __ profile_call(O4);
duke@435 3068
duke@435 3069 // get return address
twisti@1162 3070 AddressLiteral table(Interpreter::return_3_addrs_by_index_table());
twisti@1162 3071 __ set(table, Rtemp);
duke@435 3072 __ srl(Rret, ConstantPoolCacheEntry::tosBits, Rret); // get return type
duke@435 3073 // Make sure we don't need to mask Rret for tosBits after the above shift
duke@435 3074 ConstantPoolCacheEntry::verify_tosBits();
duke@435 3075 __ sll(Rret, LogBytesPerWord, Rret);
duke@435 3076 __ ld_ptr(Rtemp, Rret, Rret); // get return address
duke@435 3077
duke@435 3078 // do the call
duke@435 3079 __ call_from_interpreter(Rscratch, Gargs, Rret);
duke@435 3080 }
duke@435 3081
duke@435 3082 void TemplateTable::invokestatic(int byte_no) {
duke@435 3083 transition(vtos, vtos);
jrose@1920 3084 assert(byte_no == f1_byte, "use this argument");
duke@435 3085
duke@435 3086 Register Rscratch = G3_scratch;
duke@435 3087 Register Rtemp = G4_scratch;
duke@435 3088 Register Rret = Lscratch;
duke@435 3089
jrose@1920 3090 load_invoke_cp_cache_entry(byte_no, G5_method, noreg, Rret, /*virtual*/ false, false, false);
duke@435 3091 __ mov(SP, O5_savedSP); // record SP that we wanted the callee to restore
duke@435 3092
duke@435 3093 __ verify_oop(G5_method);
duke@435 3094
duke@435 3095 __ profile_call(O4);
duke@435 3096
duke@435 3097 // get return address
twisti@1162 3098 AddressLiteral table(Interpreter::return_3_addrs_by_index_table());
twisti@1162 3099 __ set(table, Rtemp);
duke@435 3100 __ srl(Rret, ConstantPoolCacheEntry::tosBits, Rret); // get return type
duke@435 3101 // Make sure we don't need to mask Rret for tosBits after the above shift
duke@435 3102 ConstantPoolCacheEntry::verify_tosBits();
duke@435 3103 __ sll(Rret, LogBytesPerWord, Rret);
duke@435 3104 __ ld_ptr(Rtemp, Rret, Rret); // get return address
duke@435 3105
duke@435 3106 // do the call
duke@435 3107 __ call_from_interpreter(Rscratch, Gargs, Rret);
duke@435 3108 }
duke@435 3109
duke@435 3110
duke@435 3111 void TemplateTable::invokeinterface_object_method(Register RklassOop,
duke@435 3112 Register Rcall,
duke@435 3113 Register Rret,
duke@435 3114 Register Rflags) {
duke@435 3115 Register Rscratch = G4_scratch;
duke@435 3116 Register Rindex = Lscratch;
duke@435 3117
duke@435 3118 assert_different_registers(Rscratch, Rindex, Rret);
duke@435 3119
duke@435 3120 Label notFinal;
duke@435 3121
duke@435 3122 // Check for vfinal
duke@435 3123 __ set((1 << ConstantPoolCacheEntry::vfinalMethod), Rscratch);
duke@435 3124 __ btst(Rflags, Rscratch);
duke@435 3125 __ br(Assembler::zero, false, Assembler::pt, notFinal);
duke@435 3126 __ delayed()->nop();
duke@435 3127
duke@435 3128 __ profile_final_call(O4);
duke@435 3129
duke@435 3130 // do the call - the index (f2) contains the methodOop
duke@435 3131 assert_different_registers(G5_method, Gargs, Rcall);
duke@435 3132 __ mov(Rindex, G5_method);
duke@435 3133 __ call_from_interpreter(Rcall, Gargs, Rret);
duke@435 3134 __ bind(notFinal);
duke@435 3135
duke@435 3136 __ profile_virtual_call(RklassOop, O4);
duke@435 3137 generate_vtable_call(RklassOop, Rindex, Rret);
duke@435 3138 }
duke@435 3139
duke@435 3140
duke@435 3141 void TemplateTable::invokeinterface(int byte_no) {
duke@435 3142 transition(vtos, vtos);
jrose@1920 3143 assert(byte_no == f1_byte, "use this argument");
duke@435 3144
duke@435 3145 Register Rscratch = G4_scratch;
duke@435 3146 Register Rret = G3_scratch;
duke@435 3147 Register Rindex = Lscratch;
duke@435 3148 Register Rinterface = G1_scratch;
duke@435 3149 Register RklassOop = G5_method;
duke@435 3150 Register Rflags = O1;
duke@435 3151 assert_different_registers(Rscratch, G5_method);
duke@435 3152
jrose@1920 3153 load_invoke_cp_cache_entry(byte_no, Rinterface, Rindex, Rflags, /*virtual*/ false, false, false);
duke@435 3154 __ mov(SP, O5_savedSP); // record SP that we wanted the callee to restore
duke@435 3155
duke@435 3156 // get receiver
duke@435 3157 __ and3(Rflags, 0xFF, Rscratch); // gets number of parameters
duke@435 3158 __ load_receiver(Rscratch, O0);
duke@435 3159 __ verify_oop(O0);
duke@435 3160
duke@435 3161 __ mov(Rflags, Rret);
duke@435 3162
duke@435 3163 // get return address
twisti@1162 3164 AddressLiteral table(Interpreter::return_5_addrs_by_index_table());
twisti@1162 3165 __ set(table, Rscratch);
duke@435 3166 __ srl(Rret, ConstantPoolCacheEntry::tosBits, Rret); // get return type
duke@435 3167 // Make sure we don't need to mask Rret for tosBits after the above shift
duke@435 3168 ConstantPoolCacheEntry::verify_tosBits();
duke@435 3169 __ sll(Rret, LogBytesPerWord, Rret);
duke@435 3170 __ ld_ptr(Rscratch, Rret, Rret); // get return address
duke@435 3171
duke@435 3172 // get receiver klass
duke@435 3173 __ null_check(O0, oopDesc::klass_offset_in_bytes());
coleenp@548 3174 __ load_klass(O0, RklassOop);
duke@435 3175 __ verify_oop(RklassOop);
duke@435 3176
duke@435 3177 // Special case of invokeinterface called for virtual method of
duke@435 3178 // java.lang.Object. See cpCacheOop.cpp for details.
duke@435 3179 // This code isn't produced by javac, but could be produced by
duke@435 3180 // another compliant java compiler.
duke@435 3181 Label notMethod;
duke@435 3182 __ set((1 << ConstantPoolCacheEntry::methodInterface), Rscratch);
duke@435 3183 __ btst(Rflags, Rscratch);
duke@435 3184 __ br(Assembler::zero, false, Assembler::pt, notMethod);
duke@435 3185 __ delayed()->nop();
duke@435 3186
duke@435 3187 invokeinterface_object_method(RklassOop, Rinterface, Rret, Rflags);
duke@435 3188
duke@435 3189 __ bind(notMethod);
duke@435 3190
duke@435 3191 __ profile_virtual_call(RklassOop, O4);
duke@435 3192
duke@435 3193 //
duke@435 3194 // find entry point to call
duke@435 3195 //
duke@435 3196
duke@435 3197 // compute start of first itableOffsetEntry (which is at end of vtable)
duke@435 3198 const int base = instanceKlass::vtable_start_offset() * wordSize;
duke@435 3199 Label search;
duke@435 3200 Register Rtemp = Rflags;
duke@435 3201
twisti@1162 3202 __ ld(RklassOop, instanceKlass::vtable_length_offset() * wordSize, Rtemp);
duke@435 3203 if (align_object_offset(1) > 1) {
duke@435 3204 __ round_to(Rtemp, align_object_offset(1));
duke@435 3205 }
duke@435 3206 __ sll(Rtemp, LogBytesPerWord, Rtemp); // Rscratch *= 4;
duke@435 3207 if (Assembler::is_simm13(base)) {
duke@435 3208 __ add(Rtemp, base, Rtemp);
duke@435 3209 } else {
duke@435 3210 __ set(base, Rscratch);
duke@435 3211 __ add(Rscratch, Rtemp, Rtemp);
duke@435 3212 }
duke@435 3213 __ add(RklassOop, Rtemp, Rscratch);
duke@435 3214
duke@435 3215 __ bind(search);
duke@435 3216
duke@435 3217 __ ld_ptr(Rscratch, itableOffsetEntry::interface_offset_in_bytes(), Rtemp);
duke@435 3218 {
duke@435 3219 Label ok;
duke@435 3220
duke@435 3221 // Check that entry is non-null. Null entries are probably a bytecode
twisti@1040 3222 // problem. If the interface isn't implemented by the receiver class,
duke@435 3223 // the VM should throw IncompatibleClassChangeError. linkResolver checks
duke@435 3224 // this too but that's only if the entry isn't already resolved, so we
duke@435 3225 // need to check again.
duke@435 3226 __ br_notnull( Rtemp, false, Assembler::pt, ok);
duke@435 3227 __ delayed()->nop();
duke@435 3228 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_IncompatibleClassChangeError));
duke@435 3229 __ should_not_reach_here();
duke@435 3230 __ bind(ok);
duke@435 3231 __ verify_oop(Rtemp);
duke@435 3232 }
duke@435 3233
duke@435 3234 __ verify_oop(Rinterface);
duke@435 3235
duke@435 3236 __ cmp(Rinterface, Rtemp);
duke@435 3237 __ brx(Assembler::notEqual, true, Assembler::pn, search);
duke@435 3238 __ delayed()->add(Rscratch, itableOffsetEntry::size() * wordSize, Rscratch);
duke@435 3239
duke@435 3240 // entry found and Rscratch points to it
duke@435 3241 __ ld(Rscratch, itableOffsetEntry::offset_offset_in_bytes(), Rscratch);
duke@435 3242
duke@435 3243 assert(itableMethodEntry::method_offset_in_bytes() == 0, "adjust instruction below");
duke@435 3244 __ sll(Rindex, exact_log2(itableMethodEntry::size() * wordSize), Rindex); // Rindex *= 8;
duke@435 3245 __ add(Rscratch, Rindex, Rscratch);
duke@435 3246 __ ld_ptr(RklassOop, Rscratch, G5_method);
duke@435 3247
duke@435 3248 // Check for abstract method error.
duke@435 3249 {
duke@435 3250 Label ok;
duke@435 3251 __ tst(G5_method);
duke@435 3252 __ brx(Assembler::notZero, false, Assembler::pt, ok);
duke@435 3253 __ delayed()->nop();
duke@435 3254 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_AbstractMethodError));
duke@435 3255 __ should_not_reach_here();
duke@435 3256 __ bind(ok);
duke@435 3257 }
duke@435 3258
duke@435 3259 Register Rcall = Rinterface;
duke@435 3260 assert_different_registers(Rcall, G5_method, Gargs, Rret);
duke@435 3261
duke@435 3262 __ verify_oop(G5_method);
duke@435 3263 __ call_from_interpreter(Rcall, Gargs, Rret);
duke@435 3264
duke@435 3265 }
duke@435 3266
duke@435 3267
jrose@1161 3268 void TemplateTable::invokedynamic(int byte_no) {
jrose@1161 3269 transition(vtos, vtos);
jrose@1920 3270 assert(byte_no == f1_oop, "use this argument");
jrose@1161 3271
jrose@1161 3272 if (!EnableInvokeDynamic) {
jrose@1161 3273 // We should not encounter this bytecode if !EnableInvokeDynamic.
jrose@1161 3274 // The verifier will stop it. However, if we get past the verifier,
jrose@1161 3275 // this will stop the thread in a reasonable way, without crashing the JVM.
jrose@1161 3276 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
jrose@1161 3277 InterpreterRuntime::throw_IncompatibleClassChangeError));
jrose@1161 3278 // the call_VM checks for exception, so we should never return here.
jrose@1161 3279 __ should_not_reach_here();
jrose@1161 3280 return;
jrose@1161 3281 }
jrose@1161 3282
twisti@1858 3283 // G5: CallSite object (f1)
twisti@1858 3284 // XX: unused (f2)
twisti@1858 3285 // XX: flags (unused)
twisti@1858 3286
twisti@1858 3287 Register G5_callsite = G5_method;
twisti@1858 3288 Register Rscratch = G3_scratch;
twisti@1858 3289 Register Rtemp = G1_scratch;
twisti@1858 3290 Register Rret = Lscratch;
twisti@1858 3291
jrose@1920 3292 load_invoke_cp_cache_entry(byte_no, G5_callsite, noreg, Rret,
jrose@1920 3293 /*virtual*/ false, /*vfinal*/ false, /*indy*/ true);
twisti@1858 3294 __ mov(SP, O5_savedSP); // record SP that we wanted the callee to restore
twisti@1858 3295
twisti@1858 3296 __ verify_oop(G5_callsite);
twisti@1858 3297
twisti@1858 3298 // profile this call
twisti@1858 3299 __ profile_call(O4);
twisti@1858 3300
twisti@1858 3301 // get return address
twisti@1858 3302 AddressLiteral table(Interpreter::return_5_addrs_by_index_table());
twisti@1858 3303 __ set(table, Rtemp);
twisti@1858 3304 __ srl(Rret, ConstantPoolCacheEntry::tosBits, Rret); // get return type
twisti@1858 3305 // Make sure we don't need to mask Rret for tosBits after the above shift
twisti@1858 3306 ConstantPoolCacheEntry::verify_tosBits();
twisti@1858 3307 __ sll(Rret, LogBytesPerWord, Rret);
twisti@1858 3308 __ ld_ptr(Rtemp, Rret, Rret); // get return address
twisti@1858 3309
jrose@2639 3310 __ load_heap_oop(G5_callsite, __ delayed_value(java_lang_invoke_CallSite::target_offset_in_bytes, Rscratch), G3_method_handle);
twisti@1858 3311 __ null_check(G3_method_handle);
twisti@1858 3312
twisti@1858 3313 // Adjust Rret first so Llast_SP can be same as Rret
twisti@1858 3314 __ add(Rret, -frame::pc_return_offset, O7);
twisti@1858 3315 __ add(Lesp, BytesPerWord, Gargs); // setup parameter pointer
twisti@1858 3316 __ jump_to_method_handle_entry(G3_method_handle, Rtemp, /* emit_delayed_nop */ false);
twisti@1858 3317 // Record SP so we can remove any stack space allocated by adapter transition
twisti@1858 3318 __ delayed()->mov(SP, Llast_SP);
jrose@1161 3319 }
jrose@1161 3320
jrose@1161 3321
duke@435 3322 //----------------------------------------------------------------------------------------------------
duke@435 3323 // Allocation
duke@435 3324
duke@435 3325 void TemplateTable::_new() {
duke@435 3326 transition(vtos, atos);
duke@435 3327
duke@435 3328 Label slow_case;
duke@435 3329 Label done;
duke@435 3330 Label initialize_header;
duke@435 3331 Label initialize_object; // including clearing the fields
duke@435 3332
duke@435 3333 Register RallocatedObject = Otos_i;
duke@435 3334 Register RinstanceKlass = O1;
duke@435 3335 Register Roffset = O3;
duke@435 3336 Register Rscratch = O4;
duke@435 3337
duke@435 3338 __ get_2_byte_integer_at_bcp(1, Rscratch, Roffset, InterpreterMacroAssembler::Unsigned);
duke@435 3339 __ get_cpool_and_tags(Rscratch, G3_scratch);
duke@435 3340 // make sure the class we're about to instantiate has been resolved
bobv@2036 3341 // This is done before loading instanceKlass to be consistent with the order
bobv@2036 3342 // how Constant Pool is updated (see constantPoolOopDesc::klass_at_put)
duke@435 3343 __ add(G3_scratch, typeArrayOopDesc::header_size(T_BYTE) * wordSize, G3_scratch);
duke@435 3344 __ ldub(G3_scratch, Roffset, G3_scratch);
duke@435 3345 __ cmp(G3_scratch, JVM_CONSTANT_Class);
duke@435 3346 __ br(Assembler::notEqual, false, Assembler::pn, slow_case);
duke@435 3347 __ delayed()->sll(Roffset, LogBytesPerWord, Roffset);
bobv@2036 3348 // get instanceKlass
duke@435 3349 //__ sll(Roffset, LogBytesPerWord, Roffset); // executed in delay slot
duke@435 3350 __ add(Roffset, sizeof(constantPoolOopDesc), Roffset);
duke@435 3351 __ ld_ptr(Rscratch, Roffset, RinstanceKlass);
duke@435 3352
duke@435 3353 // make sure klass is fully initialized:
duke@435 3354 __ ld(RinstanceKlass, instanceKlass::init_state_offset_in_bytes() + sizeof(oopDesc), G3_scratch);
duke@435 3355 __ cmp(G3_scratch, instanceKlass::fully_initialized);
duke@435 3356 __ br(Assembler::notEqual, false, Assembler::pn, slow_case);
duke@435 3357 __ delayed()->ld(RinstanceKlass, Klass::layout_helper_offset_in_bytes() + sizeof(oopDesc), Roffset);
duke@435 3358
duke@435 3359 // get instance_size in instanceKlass (already aligned)
duke@435 3360 //__ ld(RinstanceKlass, Klass::layout_helper_offset_in_bytes() + sizeof(oopDesc), Roffset);
duke@435 3361
duke@435 3362 // make sure klass does not have has_finalizer, or is abstract, or interface or java/lang/Class
duke@435 3363 __ btst(Klass::_lh_instance_slow_path_bit, Roffset);
duke@435 3364 __ br(Assembler::notZero, false, Assembler::pn, slow_case);
duke@435 3365 __ delayed()->nop();
duke@435 3366
duke@435 3367 // allocate the instance
duke@435 3368 // 1) Try to allocate in the TLAB
duke@435 3369 // 2) if fail, and the TLAB is not full enough to discard, allocate in the shared Eden
duke@435 3370 // 3) if the above fails (or is not applicable), go to a slow case
duke@435 3371 // (creates a new TLAB, etc.)
duke@435 3372
duke@435 3373 const bool allow_shared_alloc =
duke@435 3374 Universe::heap()->supports_inline_contig_alloc() && !CMSIncrementalMode;
duke@435 3375
duke@435 3376 if(UseTLAB) {
duke@435 3377 Register RoldTopValue = RallocatedObject;
duke@435 3378 Register RtopAddr = G3_scratch, RtlabWasteLimitValue = G3_scratch;
duke@435 3379 Register RnewTopValue = G1_scratch;
duke@435 3380 Register RendValue = Rscratch;
duke@435 3381 Register RfreeValue = RnewTopValue;
duke@435 3382
duke@435 3383 // check if we can allocate in the TLAB
duke@435 3384 __ ld_ptr(G2_thread, in_bytes(JavaThread::tlab_top_offset()), RoldTopValue); // sets up RalocatedObject
duke@435 3385 __ ld_ptr(G2_thread, in_bytes(JavaThread::tlab_end_offset()), RendValue);
duke@435 3386 __ add(RoldTopValue, Roffset, RnewTopValue);
duke@435 3387
duke@435 3388 // if there is enough space, we do not CAS and do not clear
duke@435 3389 __ cmp(RnewTopValue, RendValue);
duke@435 3390 if(ZeroTLAB) {
duke@435 3391 // the fields have already been cleared
duke@435 3392 __ brx(Assembler::lessEqualUnsigned, true, Assembler::pt, initialize_header);
duke@435 3393 } else {
duke@435 3394 // initialize both the header and fields
duke@435 3395 __ brx(Assembler::lessEqualUnsigned, true, Assembler::pt, initialize_object);
duke@435 3396 }
duke@435 3397 __ delayed()->st_ptr(RnewTopValue, G2_thread, in_bytes(JavaThread::tlab_top_offset()));
duke@435 3398
duke@435 3399 if (allow_shared_alloc) {
phh@2423 3400 // Check if tlab should be discarded (refill_waste_limit >= free)
phh@2423 3401 __ ld_ptr(G2_thread, in_bytes(JavaThread::tlab_refill_waste_limit_offset()), RtlabWasteLimitValue);
phh@2423 3402 __ sub(RendValue, RoldTopValue, RfreeValue);
duke@435 3403 #ifdef _LP64
phh@2423 3404 __ srlx(RfreeValue, LogHeapWordSize, RfreeValue);
duke@435 3405 #else
phh@2423 3406 __ srl(RfreeValue, LogHeapWordSize, RfreeValue);
duke@435 3407 #endif
phh@2423 3408 __ cmp(RtlabWasteLimitValue, RfreeValue);
phh@2423 3409 __ brx(Assembler::greaterEqualUnsigned, false, Assembler::pt, slow_case); // tlab waste is small
phh@2423 3410 __ delayed()->nop();
phh@2423 3411
phh@2423 3412 // increment waste limit to prevent getting stuck on this slow path
phh@2423 3413 __ add(RtlabWasteLimitValue, ThreadLocalAllocBuffer::refill_waste_limit_increment(), RtlabWasteLimitValue);
phh@2423 3414 __ st_ptr(RtlabWasteLimitValue, G2_thread, in_bytes(JavaThread::tlab_refill_waste_limit_offset()));
duke@435 3415 } else {
duke@435 3416 // No allocation in the shared eden.
duke@435 3417 __ br(Assembler::always, false, Assembler::pt, slow_case);
duke@435 3418 __ delayed()->nop();
duke@435 3419 }
duke@435 3420 }
duke@435 3421
duke@435 3422 // Allocation in the shared Eden
duke@435 3423 if (allow_shared_alloc) {
duke@435 3424 Register RoldTopValue = G1_scratch;
duke@435 3425 Register RtopAddr = G3_scratch;
duke@435 3426 Register RnewTopValue = RallocatedObject;
duke@435 3427 Register RendValue = Rscratch;
duke@435 3428
duke@435 3429 __ set((intptr_t)Universe::heap()->top_addr(), RtopAddr);
duke@435 3430
duke@435 3431 Label retry;
duke@435 3432 __ bind(retry);
duke@435 3433 __ set((intptr_t)Universe::heap()->end_addr(), RendValue);
duke@435 3434 __ ld_ptr(RendValue, 0, RendValue);
duke@435 3435 __ ld_ptr(RtopAddr, 0, RoldTopValue);
duke@435 3436 __ add(RoldTopValue, Roffset, RnewTopValue);
duke@435 3437
duke@435 3438 // RnewTopValue contains the top address after the new object
duke@435 3439 // has been allocated.
duke@435 3440 __ cmp(RnewTopValue, RendValue);
duke@435 3441 __ brx(Assembler::greaterUnsigned, false, Assembler::pn, slow_case);
duke@435 3442 __ delayed()->nop();
duke@435 3443
duke@435 3444 __ casx_under_lock(RtopAddr, RoldTopValue, RnewTopValue,
duke@435 3445 VM_Version::v9_instructions_work() ? NULL :
duke@435 3446 (address)StubRoutines::Sparc::atomic_memory_operation_lock_addr());
duke@435 3447
duke@435 3448 // if someone beat us on the allocation, try again, otherwise continue
duke@435 3449 __ cmp(RoldTopValue, RnewTopValue);
duke@435 3450 __ brx(Assembler::notEqual, false, Assembler::pn, retry);
duke@435 3451 __ delayed()->nop();
phh@2423 3452
phh@2423 3453 // bump total bytes allocated by this thread
phh@2447 3454 // RoldTopValue and RtopAddr are dead, so can use G1 and G3
phh@2447 3455 __ incr_allocated_bytes(Roffset, G1_scratch, G3_scratch);
duke@435 3456 }
duke@435 3457
duke@435 3458 if (UseTLAB || Universe::heap()->supports_inline_contig_alloc()) {
duke@435 3459 // clear object fields
duke@435 3460 __ bind(initialize_object);
duke@435 3461 __ deccc(Roffset, sizeof(oopDesc));
duke@435 3462 __ br(Assembler::zero, false, Assembler::pt, initialize_header);
duke@435 3463 __ delayed()->add(RallocatedObject, sizeof(oopDesc), G3_scratch);
duke@435 3464
duke@435 3465 // initialize remaining object fields
duke@435 3466 { Label loop;
duke@435 3467 __ subcc(Roffset, wordSize, Roffset);
duke@435 3468 __ bind(loop);
duke@435 3469 //__ subcc(Roffset, wordSize, Roffset); // executed above loop or in delay slot
duke@435 3470 __ st_ptr(G0, G3_scratch, Roffset);
duke@435 3471 __ br(Assembler::notEqual, false, Assembler::pt, loop);
duke@435 3472 __ delayed()->subcc(Roffset, wordSize, Roffset);
duke@435 3473 }
duke@435 3474 __ br(Assembler::always, false, Assembler::pt, initialize_header);
duke@435 3475 __ delayed()->nop();
duke@435 3476 }
duke@435 3477
duke@435 3478 // slow case
duke@435 3479 __ bind(slow_case);
duke@435 3480 __ get_2_byte_integer_at_bcp(1, G3_scratch, O2, InterpreterMacroAssembler::Unsigned);
duke@435 3481 __ get_constant_pool(O1);
duke@435 3482
duke@435 3483 call_VM(Otos_i, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), O1, O2);
duke@435 3484
duke@435 3485 __ ba(false, done);
duke@435 3486 __ delayed()->nop();
duke@435 3487
duke@435 3488 // Initialize the header: mark, klass
duke@435 3489 __ bind(initialize_header);
duke@435 3490
duke@435 3491 if (UseBiasedLocking) {
duke@435 3492 __ ld_ptr(RinstanceKlass, Klass::prototype_header_offset_in_bytes() + sizeof(oopDesc), G4_scratch);
duke@435 3493 } else {
duke@435 3494 __ set((intptr_t)markOopDesc::prototype(), G4_scratch);
duke@435 3495 }
duke@435 3496 __ st_ptr(G4_scratch, RallocatedObject, oopDesc::mark_offset_in_bytes()); // mark
coleenp@602 3497 __ store_klass_gap(G0, RallocatedObject); // klass gap if compressed
coleenp@602 3498 __ store_klass(RinstanceKlass, RallocatedObject); // klass (last for cms)
duke@435 3499
duke@435 3500 {
duke@435 3501 SkipIfEqual skip_if(
duke@435 3502 _masm, G4_scratch, &DTraceAllocProbes, Assembler::zero);
duke@435 3503 // Trigger dtrace event
duke@435 3504 __ push(atos);
duke@435 3505 __ call_VM_leaf(noreg,
duke@435 3506 CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_object_alloc), O0);
duke@435 3507 __ pop(atos);
duke@435 3508 }
duke@435 3509
duke@435 3510 // continue
duke@435 3511 __ bind(done);
duke@435 3512 }
duke@435 3513
duke@435 3514
duke@435 3515
duke@435 3516 void TemplateTable::newarray() {
duke@435 3517 transition(itos, atos);
duke@435 3518 __ ldub(Lbcp, 1, O1);
duke@435 3519 call_VM(Otos_i, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray), O1, Otos_i);
duke@435 3520 }
duke@435 3521
duke@435 3522
duke@435 3523 void TemplateTable::anewarray() {
duke@435 3524 transition(itos, atos);
duke@435 3525 __ get_constant_pool(O1);
duke@435 3526 __ get_2_byte_integer_at_bcp(1, G4_scratch, O2, InterpreterMacroAssembler::Unsigned);
duke@435 3527 call_VM(Otos_i, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray), O1, O2, Otos_i);
duke@435 3528 }
duke@435 3529
duke@435 3530
duke@435 3531 void TemplateTable::arraylength() {
duke@435 3532 transition(atos, itos);
duke@435 3533 Label ok;
duke@435 3534 __ verify_oop(Otos_i);
duke@435 3535 __ tst(Otos_i);
duke@435 3536 __ throw_if_not_1_x( Assembler::notZero, ok );
duke@435 3537 __ delayed()->ld(Otos_i, arrayOopDesc::length_offset_in_bytes(), Otos_i);
duke@435 3538 __ throw_if_not_2( Interpreter::_throw_NullPointerException_entry, G3_scratch, ok);
duke@435 3539 }
duke@435 3540
duke@435 3541
duke@435 3542 void TemplateTable::checkcast() {
duke@435 3543 transition(atos, atos);
duke@435 3544 Label done, is_null, quicked, cast_ok, resolved;
duke@435 3545 Register Roffset = G1_scratch;
duke@435 3546 Register RobjKlass = O5;
duke@435 3547 Register RspecifiedKlass = O4;
duke@435 3548
duke@435 3549 // Check for casting a NULL
duke@435 3550 __ br_null(Otos_i, false, Assembler::pn, is_null);
duke@435 3551 __ delayed()->nop();
duke@435 3552
duke@435 3553 // Get value klass in RobjKlass
coleenp@548 3554 __ load_klass(Otos_i, RobjKlass); // get value klass
duke@435 3555
duke@435 3556 // Get constant pool tag
duke@435 3557 __ get_2_byte_integer_at_bcp(1, Lscratch, Roffset, InterpreterMacroAssembler::Unsigned);
duke@435 3558
duke@435 3559 // See if the checkcast has been quickened
duke@435 3560 __ get_cpool_and_tags(Lscratch, G3_scratch);
duke@435 3561 __ add(G3_scratch, typeArrayOopDesc::header_size(T_BYTE) * wordSize, G3_scratch);
duke@435 3562 __ ldub(G3_scratch, Roffset, G3_scratch);
duke@435 3563 __ cmp(G3_scratch, JVM_CONSTANT_Class);
duke@435 3564 __ br(Assembler::equal, true, Assembler::pt, quicked);
duke@435 3565 __ delayed()->sll(Roffset, LogBytesPerWord, Roffset);
duke@435 3566
duke@435 3567 __ push_ptr(); // save receiver for result, and for GC
duke@435 3568 call_VM(RspecifiedKlass, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc) );
duke@435 3569 __ pop_ptr(Otos_i, G3_scratch); // restore receiver
duke@435 3570
duke@435 3571 __ br(Assembler::always, false, Assembler::pt, resolved);
coleenp@548 3572 __ delayed()->nop();
duke@435 3573
duke@435 3574 // Extract target class from constant pool
duke@435 3575 __ bind(quicked);
duke@435 3576 __ add(Roffset, sizeof(constantPoolOopDesc), Roffset);
duke@435 3577 __ ld_ptr(Lscratch, Roffset, RspecifiedKlass);
duke@435 3578 __ bind(resolved);
coleenp@548 3579 __ load_klass(Otos_i, RobjKlass); // get value klass
duke@435 3580
duke@435 3581 // Generate a fast subtype check. Branch to cast_ok if no
duke@435 3582 // failure. Throw exception if failure.
duke@435 3583 __ gen_subtype_check( RobjKlass, RspecifiedKlass, G3_scratch, G4_scratch, G1_scratch, cast_ok );
duke@435 3584
duke@435 3585 // Not a subtype; so must throw exception
duke@435 3586 __ throw_if_not_x( Assembler::never, Interpreter::_throw_ClassCastException_entry, G3_scratch );
duke@435 3587
duke@435 3588 __ bind(cast_ok);
duke@435 3589
duke@435 3590 if (ProfileInterpreter) {
duke@435 3591 __ ba(false, done);
duke@435 3592 __ delayed()->nop();
duke@435 3593 }
duke@435 3594 __ bind(is_null);
duke@435 3595 __ profile_null_seen(G3_scratch);
duke@435 3596 __ bind(done);
duke@435 3597 }
duke@435 3598
duke@435 3599
duke@435 3600 void TemplateTable::instanceof() {
duke@435 3601 Label done, is_null, quicked, resolved;
duke@435 3602 transition(atos, itos);
duke@435 3603 Register Roffset = G1_scratch;
duke@435 3604 Register RobjKlass = O5;
duke@435 3605 Register RspecifiedKlass = O4;
duke@435 3606
duke@435 3607 // Check for casting a NULL
duke@435 3608 __ br_null(Otos_i, false, Assembler::pt, is_null);
duke@435 3609 __ delayed()->nop();
duke@435 3610
duke@435 3611 // Get value klass in RobjKlass
coleenp@548 3612 __ load_klass(Otos_i, RobjKlass); // get value klass
duke@435 3613
duke@435 3614 // Get constant pool tag
duke@435 3615 __ get_2_byte_integer_at_bcp(1, Lscratch, Roffset, InterpreterMacroAssembler::Unsigned);
duke@435 3616
duke@435 3617 // See if the checkcast has been quickened
duke@435 3618 __ get_cpool_and_tags(Lscratch, G3_scratch);
duke@435 3619 __ add(G3_scratch, typeArrayOopDesc::header_size(T_BYTE) * wordSize, G3_scratch);
duke@435 3620 __ ldub(G3_scratch, Roffset, G3_scratch);
duke@435 3621 __ cmp(G3_scratch, JVM_CONSTANT_Class);
duke@435 3622 __ br(Assembler::equal, true, Assembler::pt, quicked);
duke@435 3623 __ delayed()->sll(Roffset, LogBytesPerWord, Roffset);
duke@435 3624
duke@435 3625 __ push_ptr(); // save receiver for result, and for GC
duke@435 3626 call_VM(RspecifiedKlass, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc) );
duke@435 3627 __ pop_ptr(Otos_i, G3_scratch); // restore receiver
duke@435 3628
duke@435 3629 __ br(Assembler::always, false, Assembler::pt, resolved);
coleenp@548 3630 __ delayed()->nop();
duke@435 3631
duke@435 3632
duke@435 3633 // Extract target class from constant pool
duke@435 3634 __ bind(quicked);
duke@435 3635 __ add(Roffset, sizeof(constantPoolOopDesc), Roffset);
duke@435 3636 __ get_constant_pool(Lscratch);
duke@435 3637 __ ld_ptr(Lscratch, Roffset, RspecifiedKlass);
duke@435 3638 __ bind(resolved);
coleenp@548 3639 __ load_klass(Otos_i, RobjKlass); // get value klass
duke@435 3640
duke@435 3641 // Generate a fast subtype check. Branch to cast_ok if no
duke@435 3642 // failure. Return 0 if failure.
duke@435 3643 __ or3(G0, 1, Otos_i); // set result assuming quick tests succeed
duke@435 3644 __ gen_subtype_check( RobjKlass, RspecifiedKlass, G3_scratch, G4_scratch, G1_scratch, done );
duke@435 3645 // Not a subtype; return 0;
duke@435 3646 __ clr( Otos_i );
duke@435 3647
duke@435 3648 if (ProfileInterpreter) {
duke@435 3649 __ ba(false, done);
duke@435 3650 __ delayed()->nop();
duke@435 3651 }
duke@435 3652 __ bind(is_null);
duke@435 3653 __ profile_null_seen(G3_scratch);
duke@435 3654 __ bind(done);
duke@435 3655 }
duke@435 3656
duke@435 3657 void TemplateTable::_breakpoint() {
duke@435 3658
duke@435 3659 // Note: We get here even if we are single stepping..
duke@435 3660 // jbug inists on setting breakpoints at every bytecode
duke@435 3661 // even if we are in single step mode.
duke@435 3662
duke@435 3663 transition(vtos, vtos);
duke@435 3664 // get the unpatched byte code
duke@435 3665 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::get_original_bytecode_at), Lmethod, Lbcp);
duke@435 3666 __ mov(O0, Lbyte_code);
duke@435 3667
duke@435 3668 // post the breakpoint event
duke@435 3669 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::_breakpoint), Lmethod, Lbcp);
duke@435 3670
duke@435 3671 // complete the execution of original bytecode
duke@435 3672 __ dispatch_normal(vtos);
duke@435 3673 }
duke@435 3674
duke@435 3675
duke@435 3676 //----------------------------------------------------------------------------------------------------
duke@435 3677 // Exceptions
duke@435 3678
duke@435 3679 void TemplateTable::athrow() {
duke@435 3680 transition(atos, vtos);
duke@435 3681
duke@435 3682 // This works because exception is cached in Otos_i which is same as O0,
duke@435 3683 // which is same as what throw_exception_entry_expects
duke@435 3684 assert(Otos_i == Oexception, "see explanation above");
duke@435 3685
duke@435 3686 __ verify_oop(Otos_i);
duke@435 3687 __ null_check(Otos_i);
duke@435 3688 __ throw_if_not_x(Assembler::never, Interpreter::throw_exception_entry(), G3_scratch);
duke@435 3689 }
duke@435 3690
duke@435 3691
duke@435 3692 //----------------------------------------------------------------------------------------------------
duke@435 3693 // Synchronization
duke@435 3694
duke@435 3695
duke@435 3696 // See frame_sparc.hpp for monitor block layout.
duke@435 3697 // Monitor elements are dynamically allocated by growing stack as needed.
duke@435 3698
duke@435 3699 void TemplateTable::monitorenter() {
duke@435 3700 transition(atos, vtos);
duke@435 3701 __ verify_oop(Otos_i);
duke@435 3702 // Try to acquire a lock on the object
duke@435 3703 // Repeat until succeeded (i.e., until
duke@435 3704 // monitorenter returns true).
duke@435 3705
duke@435 3706 { Label ok;
duke@435 3707 __ tst(Otos_i);
duke@435 3708 __ throw_if_not_1_x( Assembler::notZero, ok);
duke@435 3709 __ delayed()->mov(Otos_i, Lscratch); // save obj
duke@435 3710 __ throw_if_not_2( Interpreter::_throw_NullPointerException_entry, G3_scratch, ok);
duke@435 3711 }
duke@435 3712
duke@435 3713 assert(O0 == Otos_i, "Be sure where the object to lock is");
duke@435 3714
duke@435 3715 // find a free slot in the monitor block
duke@435 3716
duke@435 3717
duke@435 3718 // initialize entry pointer
duke@435 3719 __ clr(O1); // points to free slot or NULL
duke@435 3720
duke@435 3721 {
duke@435 3722 Label entry, loop, exit;
duke@435 3723 __ add( __ top_most_monitor(), O2 ); // last one to check
duke@435 3724 __ ba( false, entry );
duke@435 3725 __ delayed()->mov( Lmonitors, O3 ); // first one to check
duke@435 3726
duke@435 3727
duke@435 3728 __ bind( loop );
duke@435 3729
duke@435 3730 __ verify_oop(O4); // verify each monitor's oop
duke@435 3731 __ tst(O4); // is this entry unused?
duke@435 3732 if (VM_Version::v9_instructions_work())
duke@435 3733 __ movcc( Assembler::zero, false, Assembler::ptr_cc, O3, O1);
duke@435 3734 else {
duke@435 3735 Label L;
duke@435 3736 __ br( Assembler::zero, true, Assembler::pn, L );
duke@435 3737 __ delayed()->mov(O3, O1); // rememeber this one if match
duke@435 3738 __ bind(L);
duke@435 3739 }
duke@435 3740
duke@435 3741 __ cmp(O4, O0); // check if current entry is for same object
duke@435 3742 __ brx( Assembler::equal, false, Assembler::pn, exit );
duke@435 3743 __ delayed()->inc( O3, frame::interpreter_frame_monitor_size() * wordSize ); // check next one
duke@435 3744
duke@435 3745 __ bind( entry );
duke@435 3746
duke@435 3747 __ cmp( O3, O2 );
duke@435 3748 __ brx( Assembler::lessEqualUnsigned, true, Assembler::pt, loop );
duke@435 3749 __ delayed()->ld_ptr(O3, BasicObjectLock::obj_offset_in_bytes(), O4);
duke@435 3750
duke@435 3751 __ bind( exit );
duke@435 3752 }
duke@435 3753
duke@435 3754 { Label allocated;
duke@435 3755
duke@435 3756 // found free slot?
duke@435 3757 __ br_notnull(O1, false, Assembler::pn, allocated);
duke@435 3758 __ delayed()->nop();
duke@435 3759
duke@435 3760 __ add_monitor_to_stack( false, O2, O3 );
duke@435 3761 __ mov(Lmonitors, O1);
duke@435 3762
duke@435 3763 __ bind(allocated);
duke@435 3764 }
duke@435 3765
duke@435 3766 // Increment bcp to point to the next bytecode, so exception handling for async. exceptions work correctly.
duke@435 3767 // The object has already been poped from the stack, so the expression stack looks correct.
duke@435 3768 __ inc(Lbcp);
duke@435 3769
duke@435 3770 __ st_ptr(O0, O1, BasicObjectLock::obj_offset_in_bytes()); // store object
duke@435 3771 __ lock_object(O1, O0);
duke@435 3772
duke@435 3773 // check if there's enough space on the stack for the monitors after locking
duke@435 3774 __ generate_stack_overflow_check(0);
duke@435 3775
duke@435 3776 // The bcp has already been incremented. Just need to dispatch to next instruction.
duke@435 3777 __ dispatch_next(vtos);
duke@435 3778 }
duke@435 3779
duke@435 3780
duke@435 3781 void TemplateTable::monitorexit() {
duke@435 3782 transition(atos, vtos);
duke@435 3783 __ verify_oop(Otos_i);
duke@435 3784 __ tst(Otos_i);
duke@435 3785 __ throw_if_not_x( Assembler::notZero, Interpreter::_throw_NullPointerException_entry, G3_scratch );
duke@435 3786
duke@435 3787 assert(O0 == Otos_i, "just checking");
duke@435 3788
duke@435 3789 { Label entry, loop, found;
duke@435 3790 __ add( __ top_most_monitor(), O2 ); // last one to check
duke@435 3791 __ ba(false, entry );
duke@435 3792 // use Lscratch to hold monitor elem to check, start with most recent monitor,
duke@435 3793 // By using a local it survives the call to the C routine.
duke@435 3794 __ delayed()->mov( Lmonitors, Lscratch );
duke@435 3795
duke@435 3796 __ bind( loop );
duke@435 3797
duke@435 3798 __ verify_oop(O4); // verify each monitor's oop
duke@435 3799 __ cmp(O4, O0); // check if current entry is for desired object
duke@435 3800 __ brx( Assembler::equal, true, Assembler::pt, found );
duke@435 3801 __ delayed()->mov(Lscratch, O1); // pass found entry as argument to monitorexit
duke@435 3802
duke@435 3803 __ inc( Lscratch, frame::interpreter_frame_monitor_size() * wordSize ); // advance to next
duke@435 3804
duke@435 3805 __ bind( entry );
duke@435 3806
duke@435 3807 __ cmp( Lscratch, O2 );
duke@435 3808 __ brx( Assembler::lessEqualUnsigned, true, Assembler::pt, loop );
duke@435 3809 __ delayed()->ld_ptr(Lscratch, BasicObjectLock::obj_offset_in_bytes(), O4);
duke@435 3810
duke@435 3811 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_illegal_monitor_state_exception));
duke@435 3812 __ should_not_reach_here();
duke@435 3813
duke@435 3814 __ bind(found);
duke@435 3815 }
duke@435 3816 __ unlock_object(O1);
duke@435 3817 }
duke@435 3818
duke@435 3819
duke@435 3820 //----------------------------------------------------------------------------------------------------
duke@435 3821 // Wide instructions
duke@435 3822
duke@435 3823 void TemplateTable::wide() {
duke@435 3824 transition(vtos, vtos);
duke@435 3825 __ ldub(Lbcp, 1, G3_scratch);// get next bc
duke@435 3826 __ sll(G3_scratch, LogBytesPerWord, G3_scratch);
twisti@1162 3827 AddressLiteral ep(Interpreter::_wentry_point);
twisti@1162 3828 __ set(ep, G4_scratch);
twisti@1162 3829 __ ld_ptr(G4_scratch, G3_scratch, G3_scratch);
duke@435 3830 __ jmp(G3_scratch, G0);
duke@435 3831 __ delayed()->nop();
duke@435 3832 // Note: the Lbcp increment step is part of the individual wide bytecode implementations
duke@435 3833 }
duke@435 3834
duke@435 3835
duke@435 3836 //----------------------------------------------------------------------------------------------------
duke@435 3837 // Multi arrays
duke@435 3838
duke@435 3839 void TemplateTable::multianewarray() {
duke@435 3840 transition(vtos, atos);
duke@435 3841 // put ndims * wordSize into Lscratch
duke@435 3842 __ ldub( Lbcp, 3, Lscratch);
twisti@1861 3843 __ sll( Lscratch, Interpreter::logStackElementSize, Lscratch);
duke@435 3844 // Lesp points past last_dim, so set to O1 to first_dim address
duke@435 3845 __ add( Lesp, Lscratch, O1);
duke@435 3846 call_VM(Otos_i, CAST_FROM_FN_PTR(address, InterpreterRuntime::multianewarray), O1);
duke@435 3847 __ add( Lesp, Lscratch, Lesp); // pop all dimensions off the stack
duke@435 3848 }
duke@435 3849 #endif /* !CC_INTERP */

mercurial