duke@435: /* duke@435: * Copyright 1999-2006 Sun Microsystems, Inc. All Rights Reserved. duke@435: * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. duke@435: * duke@435: * This code is free software; you can redistribute it and/or modify it duke@435: * under the terms of the GNU General Public License version 2 only, as duke@435: * published by the Free Software Foundation. duke@435: * duke@435: * This code is distributed in the hope that it will be useful, but WITHOUT duke@435: * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or duke@435: * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License duke@435: * version 2 for more details (a copy is included in the LICENSE file that duke@435: * accompanied this code). duke@435: * duke@435: * You should have received a copy of the GNU General Public License version duke@435: * 2 along with this work; if not, write to the Free Software Foundation, duke@435: * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. duke@435: * duke@435: * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara, duke@435: * CA 95054 USA or visit www.sun.com if you need additional information or duke@435: * have any questions. duke@435: * duke@435: */ duke@435: duke@435: #include "incls/_precompiled.incl" duke@435: #include "incls/_locknode.cpp.incl" duke@435: duke@435: //============================================================================= duke@435: const RegMask &BoxLockNode::in_RegMask(uint i) const { duke@435: return _inmask; duke@435: } duke@435: duke@435: const RegMask &BoxLockNode::out_RegMask() const { duke@435: return *Matcher::idealreg2regmask[Op_RegP]; duke@435: } duke@435: duke@435: uint BoxLockNode::size_of() const { return sizeof(*this); } duke@435: duke@435: BoxLockNode::BoxLockNode( int slot ) : Node( Compile::current()->root() ), _slot(slot) { duke@435: init_class_id(Class_BoxLock); duke@435: init_flags(Flag_rematerialize); duke@435: OptoReg::Name reg = OptoReg::stack2reg(_slot); duke@435: _inmask.Insert(reg); duke@435: } duke@435: duke@435: //------------------------------cmp-------------------------------------------- duke@435: uint BoxLockNode::cmp( const Node &n ) const { duke@435: const BoxLockNode &bn = (const BoxLockNode &)n; duke@435: return bn._slot == _slot; duke@435: } duke@435: duke@435: OptoReg::Name BoxLockNode::stack_slot(Node* box_node) { duke@435: // Chase down the BoxNode duke@435: while (!box_node->is_BoxLock()) { duke@435: // if (box_node->is_SpillCopy()) { duke@435: // Node *m = box_node->in(1); duke@435: // if (m->is_Mach() && m->as_Mach()->ideal_Opcode() == Op_StoreP) { duke@435: // box_node = m->in(m->as_Mach()->operand_index(2)); duke@435: // continue; duke@435: // } duke@435: // } duke@435: assert(box_node->is_SpillCopy() || box_node->is_Phi(), "Bad spill of Lock."); duke@435: box_node = box_node->in(1); duke@435: } duke@435: return box_node->in_RegMask(0).find_first_elem(); duke@435: } duke@435: duke@435: //============================================================================= duke@435: //-----------------------------hash-------------------------------------------- duke@435: uint FastLockNode::hash() const { return NO_HASH; } duke@435: duke@435: //------------------------------cmp-------------------------------------------- duke@435: uint FastLockNode::cmp( const Node &n ) const { duke@435: return (&n == this); // Always fail except on self duke@435: } duke@435: duke@435: //============================================================================= duke@435: //-----------------------------hash-------------------------------------------- duke@435: uint FastUnlockNode::hash() const { return NO_HASH; } duke@435: duke@435: //------------------------------cmp-------------------------------------------- duke@435: uint FastUnlockNode::cmp( const Node &n ) const { duke@435: return (&n == this); // Always fail except on self duke@435: } duke@435: duke@435: // duke@435: // Create a counter which counts the number of times this lock is acquired duke@435: // duke@435: void FastLockNode::create_lock_counter(JVMState* state) { duke@435: BiasedLockingNamedCounter* blnc = (BiasedLockingNamedCounter*) duke@435: OptoRuntime::new_named_counter(state, NamedCounter::BiasedLockingCounter); duke@435: _counters = blnc->counters(); duke@435: } duke@435: duke@435: //============================================================================= duke@435: //------------------------------do_monitor_enter------------------------------- duke@435: void Parse::do_monitor_enter() { duke@435: kill_dead_locals(); duke@435: duke@435: // Null check; get casted pointer. duke@435: Node *obj = do_null_check(peek(), T_OBJECT); duke@435: // Check for locking null object duke@435: if (stopped()) return; duke@435: duke@435: // the monitor object is not part of debug info expression stack duke@435: pop(); duke@435: duke@435: // Insert a FastLockNode which takes as arguments the current thread pointer, duke@435: // the obj pointer & the address of the stack slot pair used for the lock. duke@435: shared_lock(obj); duke@435: } duke@435: duke@435: //------------------------------do_monitor_exit-------------------------------- duke@435: void Parse::do_monitor_exit() { duke@435: kill_dead_locals(); duke@435: duke@435: pop(); // Pop oop to unlock duke@435: // Because monitors are guarenteed paired (else we bail out), we know duke@435: // the matching Lock for this Unlock. Hence we know there is no need duke@435: // for a null check on Unlock. duke@435: shared_unlock(map()->peek_monitor_box(), map()->peek_monitor_obj()); duke@435: }