duke@435: /* xdono@631: * Copyright 1997-2008 Sun Microsystems, Inc. All Rights Reserved. duke@435: * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. duke@435: * duke@435: * This code is free software; you can redistribute it and/or modify it duke@435: * under the terms of the GNU General Public License version 2 only, as duke@435: * published by the Free Software Foundation. duke@435: * duke@435: * This code is distributed in the hope that it will be useful, but WITHOUT duke@435: * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or duke@435: * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License duke@435: * version 2 for more details (a copy is included in the LICENSE file that duke@435: * accompanied this code). duke@435: * duke@435: * You should have received a copy of the GNU General Public License version duke@435: * 2 along with this work; if not, write to the Free Software Foundation, duke@435: * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. duke@435: * duke@435: * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara, duke@435: * CA 95054 USA or visit www.sun.com if you need additional information or duke@435: * have any questions. duke@435: * duke@435: */ duke@435: duke@435: #include "incls/_precompiled.incl" duke@435: #include "incls/_vtableStubs_sparc.cpp.incl" duke@435: duke@435: // machine-dependent part of VtableStubs: create vtableStub of correct size and duke@435: // initialize its code duke@435: duke@435: #define __ masm-> duke@435: duke@435: duke@435: #ifndef PRODUCT duke@435: extern "C" void bad_compiled_vtable_index(JavaThread* thread, oopDesc* receiver, int index); duke@435: #endif duke@435: duke@435: duke@435: // Used by compiler only; may use only caller saved, non-argument registers duke@435: // NOTE: %%%% if any change is made to this stub make sure that the function duke@435: // pd_code_size_limit is changed to ensure the correct size for VtableStub duke@435: VtableStub* VtableStubs::create_vtable_stub(int vtable_index) { duke@435: const int sparc_code_length = VtableStub::pd_code_size_limit(true); duke@435: VtableStub* s = new(sparc_code_length) VtableStub(true, vtable_index); duke@435: ResourceMark rm; duke@435: CodeBuffer cb(s->entry_point(), sparc_code_length); duke@435: MacroAssembler* masm = new MacroAssembler(&cb); duke@435: duke@435: #ifndef PRODUCT duke@435: if (CountCompiledCalls) { duke@435: Address ctr(G5, SharedRuntime::nof_megamorphic_calls_addr()); duke@435: __ sethi(ctr); duke@435: __ ld(ctr, G3_scratch); duke@435: __ inc(G3_scratch); duke@435: __ st(G3_scratch, ctr); duke@435: } duke@435: #endif /* PRODUCT */ duke@435: duke@435: assert(VtableStub::receiver_location() == O0->as_VMReg(), "receiver expected in O0"); duke@435: duke@435: // get receiver klass duke@435: address npe_addr = __ pc(); coleenp@548: __ load_klass(O0, G3_scratch); duke@435: duke@435: // set methodOop (in case of interpreted method), and destination address duke@435: int entry_offset = instanceKlass::vtable_start_offset() + vtable_index*vtableEntry::size(); duke@435: #ifndef PRODUCT duke@435: if (DebugVtables) { duke@435: Label L; duke@435: // check offset vs vtable length duke@435: __ ld(G3_scratch, instanceKlass::vtable_length_offset()*wordSize, G5); duke@435: __ cmp(G5, vtable_index*vtableEntry::size()); duke@435: __ br(Assembler::greaterUnsigned, false, Assembler::pt, L); duke@435: __ delayed()->nop(); duke@435: __ set(vtable_index, O2); duke@435: __ call_VM(noreg, CAST_FROM_FN_PTR(address, bad_compiled_vtable_index), O0, O2); duke@435: __ bind(L); duke@435: } duke@435: #endif duke@435: int v_off = entry_offset*wordSize + vtableEntry::method_offset_in_bytes(); duke@435: if( __ is_simm13(v_off) ) { duke@435: __ ld_ptr(G3, v_off, G5_method); duke@435: } else { duke@435: __ set(v_off,G5); duke@435: __ ld_ptr(G3, G5, G5_method); duke@435: } duke@435: duke@435: #ifndef PRODUCT duke@435: if (DebugVtables) { duke@435: Label L; duke@435: __ br_notnull(G5_method, false, Assembler::pt, L); duke@435: __ delayed()->nop(); duke@435: __ stop("Vtable entry is ZERO"); duke@435: __ bind(L); duke@435: } duke@435: #endif duke@435: duke@435: address ame_addr = __ pc(); // if the vtable entry is null, the method is abstract duke@435: // NOTE: for vtable dispatches, the vtable entry will never be null. duke@435: duke@435: __ ld_ptr(G5_method, in_bytes(methodOopDesc::from_compiled_offset()), G3_scratch); duke@435: duke@435: // jump to target (either compiled code or c2iadapter) duke@435: __ JMP(G3_scratch, 0); duke@435: // load methodOop (in case we call c2iadapter) duke@435: __ delayed()->nop(); duke@435: duke@435: masm->flush(); duke@435: s->set_exception_points(npe_addr, ame_addr); duke@435: return s; duke@435: } duke@435: duke@435: duke@435: // NOTE: %%%% if any change is made to this stub make sure that the function duke@435: // pd_code_size_limit is changed to ensure the correct size for VtableStub duke@435: VtableStub* VtableStubs::create_itable_stub(int vtable_index) { duke@435: const int sparc_code_length = VtableStub::pd_code_size_limit(false); duke@435: VtableStub* s = new(sparc_code_length) VtableStub(false, vtable_index); duke@435: ResourceMark rm; duke@435: CodeBuffer cb(s->entry_point(), sparc_code_length); duke@435: MacroAssembler* masm = new MacroAssembler(&cb); duke@435: duke@435: Register G3_klassOop = G3_scratch; duke@435: Register G5_interface = G5; // Passed in as an argument duke@435: Label search; duke@435: duke@435: // Entry arguments: duke@435: // G5_interface: Interface duke@435: // O0: Receiver duke@435: assert(VtableStub::receiver_location() == O0->as_VMReg(), "receiver expected in O0"); duke@435: duke@435: // get receiver klass (also an implicit null-check) duke@435: address npe_addr = __ pc(); coleenp@548: __ load_klass(O0, G3_klassOop); duke@435: __ verify_oop(G3_klassOop); duke@435: duke@435: // Push a new window to get some temp registers. This chops the head of all duke@435: // my 64-bit %o registers in the LION build, but this is OK because no longs duke@435: // are passed in the %o registers. Instead, longs are passed in G1 and G4 duke@435: // and so those registers are not available here. duke@435: __ save(SP,-frame::register_save_words*wordSize,SP); duke@435: Register I0_receiver = I0; // Location of receiver after save duke@435: duke@435: #ifndef PRODUCT duke@435: if (CountCompiledCalls) { duke@435: Address ctr(L0, SharedRuntime::nof_megamorphic_calls_addr()); duke@435: __ sethi(ctr); duke@435: __ ld(ctr, L1); duke@435: __ inc(L1); duke@435: __ st(L1, ctr); duke@435: } duke@435: #endif /* PRODUCT */ duke@435: duke@435: // load start of itable entries into L0 register duke@435: const int base = instanceKlass::vtable_start_offset() * wordSize; duke@435: __ ld(Address(G3_klassOop, 0, instanceKlass::vtable_length_offset() * wordSize), L0); duke@435: duke@435: // %%% Could store the aligned, prescaled offset in the klassoop. duke@435: __ sll(L0, exact_log2(vtableEntry::size() * wordSize), L0); duke@435: // see code for instanceKlass::start_of_itable! duke@435: const int vtable_alignment = align_object_offset(1); duke@435: assert(vtable_alignment == 1 || vtable_alignment == 2, ""); duke@435: const int odd_bit = vtableEntry::size() * wordSize; duke@435: if (vtable_alignment == 2) { duke@435: __ and3(L0, odd_bit, L1); // isolate the odd bit duke@435: } duke@435: __ add(G3_klassOop, L0, L0); duke@435: if (vtable_alignment == 2) { duke@435: __ add(L0, L1, L0); // double the odd bit, to align up duke@435: } duke@435: duke@435: // Loop over all itable entries until desired interfaceOop (G5_interface) found duke@435: __ bind(search); duke@435: duke@435: // %%%% Could load both offset and interface in one ldx, if they were duke@435: // in the opposite order. This would save a load. duke@435: __ ld_ptr(L0, base + itableOffsetEntry::interface_offset_in_bytes(), L1); duke@435: dcubed@451: // If the entry is NULL then we've reached the end of the table dcubed@451: // without finding the expected interface, so throw an exception dcubed@451: Label throw_icce; dcubed@451: __ bpr(Assembler::rc_z, false, Assembler::pn, L1, throw_icce); dcubed@451: __ delayed()->cmp(G5_interface, L1); duke@435: __ brx(Assembler::notEqual, true, Assembler::pn, search); duke@435: __ delayed()->add(L0, itableOffsetEntry::size() * wordSize, L0); duke@435: duke@435: // entry found and L0 points to it, move offset of vtable for interface into L0 duke@435: __ ld(L0, base + itableOffsetEntry::offset_offset_in_bytes(), L0); duke@435: duke@435: // Compute itableMethodEntry and get methodOop(G5_method) and entrypoint(L0) for compiler duke@435: const int method_offset = (itableMethodEntry::size() * wordSize * vtable_index) + itableMethodEntry::method_offset_in_bytes(); duke@435: __ add(G3_klassOop, L0, L1); duke@435: __ ld_ptr(L1, method_offset, G5_method); duke@435: duke@435: #ifndef PRODUCT duke@435: if (DebugVtables) { duke@435: Label L01; duke@435: __ ld_ptr(L1, method_offset, G5_method); duke@435: __ bpr(Assembler::rc_nz, false, Assembler::pt, G5_method, L01); duke@435: __ delayed()->nop(); duke@435: __ stop("methodOop is null"); duke@435: __ bind(L01); duke@435: __ verify_oop(G5_method); duke@435: } duke@435: #endif duke@435: duke@435: // If the following load is through a NULL pointer, we'll take an OS duke@435: // exception that should translate into an AbstractMethodError. We need the duke@435: // window count to be correct at that time. duke@435: __ restore(); // Restore registers BEFORE the AME point duke@435: duke@435: address ame_addr = __ pc(); // if the vtable entry is null, the method is abstract duke@435: __ ld_ptr(G5_method, in_bytes(methodOopDesc::from_compiled_offset()), G3_scratch); duke@435: duke@435: // G5_method: methodOop duke@435: // O0: Receiver duke@435: // G3_scratch: entry point duke@435: __ JMP(G3_scratch, 0); duke@435: __ delayed()->nop(); duke@435: dcubed@451: __ bind(throw_icce); dcubed@451: Address icce(G3_scratch, StubRoutines::throw_IncompatibleClassChangeError_entry()); dcubed@451: __ jump_to(icce, 0); dcubed@451: __ delayed()->restore(); dcubed@451: duke@435: masm->flush(); dcubed@451: dcubed@451: guarantee(__ pc() <= s->code_end(), "overflowed buffer"); dcubed@451: duke@435: s->set_exception_points(npe_addr, ame_addr); duke@435: return s; duke@435: } duke@435: duke@435: duke@435: int VtableStub::pd_code_size_limit(bool is_vtable_stub) { dcubed@451: if (TraceJumps || DebugVtables || CountCompiledCalls || VerifyOops) return 1000; duke@435: else { duke@435: const int slop = 2*BytesPerInstWord; // sethi;add (needed for long offsets) duke@435: if (is_vtable_stub) { coleenp@548: // ld;ld;ld,jmp,nop coleenp@548: const int basic = 5*BytesPerInstWord + coleenp@548: // shift;add for load_klass coleenp@548: (UseCompressedOops ? 2*BytesPerInstWord : 0); duke@435: return basic + slop; duke@435: } else { dcubed@451: // save, ld, ld, sll, and, add, add, ld, cmp, br, add, ld, add, ld, ld, jmp, restore, sethi, jmpl, restore coleenp@548: const int basic = (20 LP64_ONLY(+ 6)) * BytesPerInstWord + coleenp@548: // shift;add for load_klass coleenp@548: (UseCompressedOops ? 2*BytesPerInstWord : 0); duke@435: return (basic + slop); duke@435: } duke@435: } duke@435: } duke@435: duke@435: duke@435: int VtableStub::pd_code_alignment() { duke@435: // UltraSPARC cache line size is 8 instructions: duke@435: const unsigned int icache_line_size = 32; duke@435: return icache_line_size; duke@435: }