1.1 --- a/src/share/vm/opto/callnode.hpp Mon May 06 19:49:23 2013 -0700 1.2 +++ b/src/share/vm/opto/callnode.hpp Wed May 08 15:08:01 2013 -0700 1.3 @@ -49,6 +49,7 @@ 1.4 class CallLeafNoFPNode; 1.5 class AllocateNode; 1.6 class AllocateArrayNode; 1.7 +class BoxLockNode; 1.8 class LockNode; 1.9 class UnlockNode; 1.10 class JVMState; 1.11 @@ -235,7 +236,6 @@ 1.12 1.13 int loc_size() const { return stkoff() - locoff(); } 1.14 int stk_size() const { return monoff() - stkoff(); } 1.15 - int arg_size() const { return monoff() - argoff(); } 1.16 int mon_size() const { return scloff() - monoff(); } 1.17 int scl_size() const { return endoff() - scloff(); } 1.18 1.19 @@ -298,6 +298,7 @@ 1.20 // Miscellaneous utility functions 1.21 JVMState* clone_deep(Compile* C) const; // recursively clones caller chain 1.22 JVMState* clone_shallow(Compile* C) const; // retains uncloned caller 1.23 + void set_map_deep(SafePointNode *map);// reset map for all callers 1.24 1.25 #ifndef PRODUCT 1.26 void format(PhaseRegAlloc *regalloc, const Node *n, outputStream* st) const; 1.27 @@ -439,7 +440,7 @@ 1.28 static bool needs_polling_address_input(); 1.29 1.30 #ifndef PRODUCT 1.31 - virtual void dump_spec(outputStream *st) const; 1.32 + virtual void dump_spec(outputStream *st) const; 1.33 #endif 1.34 }; 1.35 1.36 @@ -554,10 +555,10 @@ 1.37 virtual bool guaranteed_safepoint() { return true; } 1.38 // For macro nodes, the JVMState gets modified during expansion, so when cloning 1.39 // the node the JVMState must be cloned. 1.40 - virtual void clone_jvms() { } // default is not to clone 1.41 + virtual void clone_jvms(Compile* C) { } // default is not to clone 1.42 1.43 // Returns true if the call may modify n 1.44 - virtual bool may_modify(const TypePtr *addr_t, PhaseTransform *phase); 1.45 + virtual bool may_modify(const TypeOopPtr *t_oop, PhaseTransform *phase); 1.46 // Does this node have a use of n other than in debug information? 1.47 bool has_non_debug_use(Node *n); 1.48 // Returns the unique CheckCastPP of a call 1.49 @@ -630,9 +631,15 @@ 1.50 virtual uint cmp( const Node &n ) const; 1.51 virtual uint size_of() const; // Size is bigger 1.52 public: 1.53 - CallStaticJavaNode(const TypeFunc* tf, address addr, ciMethod* method, int bci) 1.54 + CallStaticJavaNode(Compile* C, const TypeFunc* tf, address addr, ciMethod* method, int bci) 1.55 : CallJavaNode(tf, addr, method, bci), _name(NULL) { 1.56 init_class_id(Class_CallStaticJava); 1.57 + if (C->eliminate_boxing() && (method != NULL) && method->is_boxing_method()) { 1.58 + init_flags(Flag_is_macro); 1.59 + C->add_macro_node(this); 1.60 + } 1.61 + _is_scalar_replaceable = false; 1.62 + _is_non_escaping = false; 1.63 } 1.64 CallStaticJavaNode(const TypeFunc* tf, address addr, const char* name, int bci, 1.65 const TypePtr* adr_type) 1.66 @@ -640,13 +647,31 @@ 1.67 init_class_id(Class_CallStaticJava); 1.68 // This node calls a runtime stub, which often has narrow memory effects. 1.69 _adr_type = adr_type; 1.70 + _is_scalar_replaceable = false; 1.71 + _is_non_escaping = false; 1.72 } 1.73 - const char *_name; // Runtime wrapper name 1.74 + const char *_name; // Runtime wrapper name 1.75 + 1.76 + // Result of Escape Analysis 1.77 + bool _is_scalar_replaceable; 1.78 + bool _is_non_escaping; 1.79 1.80 // If this is an uncommon trap, return the request code, else zero. 1.81 int uncommon_trap_request() const; 1.82 static int extract_uncommon_trap_request(const Node* call); 1.83 1.84 + bool is_boxing_method() const { 1.85 + return is_macro() && (method() != NULL) && method()->is_boxing_method(); 1.86 + } 1.87 + // Later inlining modifies the JVMState, so we need to clone it 1.88 + // when the call node is cloned (because it is macro node). 1.89 + virtual void clone_jvms(Compile* C) { 1.90 + if ((jvms() != NULL) && is_boxing_method()) { 1.91 + set_jvms(jvms()->clone_deep(C)); 1.92 + jvms()->set_map_deep(this); 1.93 + } 1.94 + } 1.95 + 1.96 virtual int Opcode() const; 1.97 #ifndef PRODUCT 1.98 virtual void dump_spec(outputStream *st) const; 1.99 @@ -748,12 +773,12 @@ 1.100 ParmLimit 1.101 }; 1.102 1.103 - static const TypeFunc* alloc_type() { 1.104 + static const TypeFunc* alloc_type(const Type* t) { 1.105 const Type** fields = TypeTuple::fields(ParmLimit - TypeFunc::Parms); 1.106 fields[AllocSize] = TypeInt::POS; 1.107 fields[KlassNode] = TypeInstPtr::NOTNULL; 1.108 fields[InitialTest] = TypeInt::BOOL; 1.109 - fields[ALength] = TypeInt::INT; // length (can be a bad length) 1.110 + fields[ALength] = t; // length (can be a bad length) 1.111 1.112 const TypeTuple *domain = TypeTuple::make(ParmLimit, fields); 1.113 1.114 @@ -766,21 +791,26 @@ 1.115 return TypeFunc::make(domain, range); 1.116 } 1.117 1.118 - bool _is_scalar_replaceable; // Result of Escape Analysis 1.119 + // Result of Escape Analysis 1.120 + bool _is_scalar_replaceable; 1.121 + bool _is_non_escaping; 1.122 1.123 virtual uint size_of() const; // Size is bigger 1.124 AllocateNode(Compile* C, const TypeFunc *atype, Node *ctrl, Node *mem, Node *abio, 1.125 Node *size, Node *klass_node, Node *initial_test); 1.126 // Expansion modifies the JVMState, so we need to clone it 1.127 - virtual void clone_jvms() { 1.128 - set_jvms(jvms()->clone_deep(Compile::current())); 1.129 + virtual void clone_jvms(Compile* C) { 1.130 + if (jvms() != NULL) { 1.131 + set_jvms(jvms()->clone_deep(C)); 1.132 + jvms()->set_map_deep(this); 1.133 + } 1.134 } 1.135 virtual int Opcode() const; 1.136 virtual uint ideal_reg() const { return Op_RegP; } 1.137 virtual bool guaranteed_safepoint() { return false; } 1.138 1.139 // allocations do not modify their arguments 1.140 - virtual bool may_modify(const TypePtr *addr_t, PhaseTransform *phase) { return false;} 1.141 + virtual bool may_modify(const TypeOopPtr *t_oop, PhaseTransform *phase) { return false;} 1.142 1.143 // Pattern-match a possible usage of AllocateNode. 1.144 // Return null if no allocation is recognized. 1.145 @@ -815,10 +845,6 @@ 1.146 // are defined in graphKit.cpp, which sets up the bidirectional relation.) 1.147 InitializeNode* initialization(); 1.148 1.149 - // Return the corresponding storestore barrier (or null if none). 1.150 - // Walks out edges to find it... 1.151 - MemBarStoreStoreNode* storestore(); 1.152 - 1.153 // Convenience for initialization->maybe_set_complete(phase) 1.154 bool maybe_set_complete(PhaseGVN* phase); 1.155 }; 1.156 @@ -840,7 +866,6 @@ 1.157 set_req(AllocateNode::ALength, count_val); 1.158 } 1.159 virtual int Opcode() const; 1.160 - virtual uint size_of() const; // Size is bigger 1.161 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape); 1.162 1.163 // Dig the length operand out of a array allocation site. 1.164 @@ -918,7 +943,7 @@ 1.165 void set_nested() { _kind = Nested; set_eliminated_lock_counter(); } 1.166 1.167 // locking does not modify its arguments 1.168 - virtual bool may_modify(const TypePtr *addr_t, PhaseTransform *phase){ return false;} 1.169 + virtual bool may_modify(const TypeOopPtr *t_oop, PhaseTransform *phase){ return false;} 1.170 1.171 #ifndef PRODUCT 1.172 void create_lock_counter(JVMState* s); 1.173 @@ -965,8 +990,11 @@ 1.174 1.175 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape); 1.176 // Expansion modifies the JVMState, so we need to clone it 1.177 - virtual void clone_jvms() { 1.178 - set_jvms(jvms()->clone_deep(Compile::current())); 1.179 + virtual void clone_jvms(Compile* C) { 1.180 + if (jvms() != NULL) { 1.181 + set_jvms(jvms()->clone_deep(C)); 1.182 + jvms()->set_map_deep(this); 1.183 + } 1.184 } 1.185 1.186 bool is_nested_lock_region(); // Is this Lock nested?