Mon, 26 Sep 2011 10:24:05 -0700
7081933: Use zeroing elimination optimization for large array
Summary: Don't zero new typeArray during runtime call if the allocation is followed by arraycopy into it.
Reviewed-by: twisti
1 /*
2 * Copyright (c) 1999, 2010, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
25 #include "precompiled.hpp"
26 #include "opto/locknode.hpp"
27 #include "opto/parse.hpp"
28 #include "opto/rootnode.hpp"
29 #include "opto/runtime.hpp"
31 //=============================================================================
32 const RegMask &BoxLockNode::in_RegMask(uint i) const {
33 return _inmask;
34 }
36 const RegMask &BoxLockNode::out_RegMask() const {
37 return *Matcher::idealreg2regmask[Op_RegP];
38 }
40 uint BoxLockNode::size_of() const { return sizeof(*this); }
42 BoxLockNode::BoxLockNode( int slot ) : Node( Compile::current()->root() ),
43 _slot(slot), _is_eliminated(false) {
44 init_class_id(Class_BoxLock);
45 init_flags(Flag_rematerialize);
46 OptoReg::Name reg = OptoReg::stack2reg(_slot);
47 _inmask.Insert(reg);
48 }
50 //-----------------------------hash--------------------------------------------
51 uint BoxLockNode::hash() const {
52 return Node::hash() + _slot + (_is_eliminated ? Compile::current()->fixed_slots() : 0);
53 }
55 //------------------------------cmp--------------------------------------------
56 uint BoxLockNode::cmp( const Node &n ) const {
57 const BoxLockNode &bn = (const BoxLockNode &)n;
58 return bn._slot == _slot && bn._is_eliminated == _is_eliminated;
59 }
61 OptoReg::Name BoxLockNode::stack_slot(Node* box_node) {
62 // Chase down the BoxNode
63 while (!box_node->is_BoxLock()) {
64 // if (box_node->is_SpillCopy()) {
65 // Node *m = box_node->in(1);
66 // if (m->is_Mach() && m->as_Mach()->ideal_Opcode() == Op_StoreP) {
67 // box_node = m->in(m->as_Mach()->operand_index(2));
68 // continue;
69 // }
70 // }
71 assert(box_node->is_SpillCopy() || box_node->is_Phi(), "Bad spill of Lock.");
72 box_node = box_node->in(1);
73 }
74 return box_node->in_RegMask(0).find_first_elem();
75 }
77 //=============================================================================
78 //-----------------------------hash--------------------------------------------
79 uint FastLockNode::hash() const { return NO_HASH; }
81 //------------------------------cmp--------------------------------------------
82 uint FastLockNode::cmp( const Node &n ) const {
83 return (&n == this); // Always fail except on self
84 }
86 //=============================================================================
87 //-----------------------------hash--------------------------------------------
88 uint FastUnlockNode::hash() const { return NO_HASH; }
90 //------------------------------cmp--------------------------------------------
91 uint FastUnlockNode::cmp( const Node &n ) const {
92 return (&n == this); // Always fail except on self
93 }
95 //
96 // Create a counter which counts the number of times this lock is acquired
97 //
98 void FastLockNode::create_lock_counter(JVMState* state) {
99 BiasedLockingNamedCounter* blnc = (BiasedLockingNamedCounter*)
100 OptoRuntime::new_named_counter(state, NamedCounter::BiasedLockingCounter);
101 _counters = blnc->counters();
102 }
104 //=============================================================================
105 //------------------------------do_monitor_enter-------------------------------
106 void Parse::do_monitor_enter() {
107 kill_dead_locals();
109 // Null check; get casted pointer.
110 Node *obj = do_null_check(peek(), T_OBJECT);
111 // Check for locking null object
112 if (stopped()) return;
114 // the monitor object is not part of debug info expression stack
115 pop();
117 // Insert a FastLockNode which takes as arguments the current thread pointer,
118 // the obj pointer & the address of the stack slot pair used for the lock.
119 shared_lock(obj);
120 }
122 //------------------------------do_monitor_exit--------------------------------
123 void Parse::do_monitor_exit() {
124 kill_dead_locals();
126 pop(); // Pop oop to unlock
127 // Because monitors are guaranteed paired (else we bail out), we know
128 // the matching Lock for this Unlock. Hence we know there is no need
129 // for a null check on Unlock.
130 shared_unlock(map()->peek_monitor_box(), map()->peek_monitor_obj());
131 }