1.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 1.2 +++ b/src/share/vm/opto/graphKit.hpp Sat Dec 01 00:00:00 2007 +0000 1.3 @@ -0,0 +1,720 @@ 1.4 +/* 1.5 + * Copyright 2001-2007 Sun Microsystems, Inc. All Rights Reserved. 1.6 + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 1.7 + * 1.8 + * This code is free software; you can redistribute it and/or modify it 1.9 + * under the terms of the GNU General Public License version 2 only, as 1.10 + * published by the Free Software Foundation. 1.11 + * 1.12 + * This code is distributed in the hope that it will be useful, but WITHOUT 1.13 + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 1.14 + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 1.15 + * version 2 for more details (a copy is included in the LICENSE file that 1.16 + * accompanied this code). 1.17 + * 1.18 + * You should have received a copy of the GNU General Public License version 1.19 + * 2 along with this work; if not, write to the Free Software Foundation, 1.20 + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 1.21 + * 1.22 + * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara, 1.23 + * CA 95054 USA or visit www.sun.com if you need additional information or 1.24 + * have any questions. 1.25 + * 1.26 + */ 1.27 + 1.28 +class FastLockNode; 1.29 +class FastUnlockNode; 1.30 +class Parse; 1.31 +class RootNode; 1.32 + 1.33 +//----------------------------------------------------------------------------- 1.34 +//----------------------------GraphKit----------------------------------------- 1.35 +// Toolkit for building the common sorts of subgraphs. 1.36 +// Does not know about bytecode parsing or type-flow results. 1.37 +// It is able to create graphs implementing the semantics of most 1.38 +// or all bytecodes, so that it can expand intrinsics and calls. 1.39 +// It may depend on JVMState structure, but it must not depend 1.40 +// on specific bytecode streams. 1.41 +class GraphKit : public Phase { 1.42 + friend class PreserveJVMState; 1.43 + 1.44 + protected: 1.45 + ciEnv* _env; // Compilation environment 1.46 + PhaseGVN &_gvn; // Some optimizations while parsing 1.47 + SafePointNode* _map; // Parser map from JVM to Nodes 1.48 + SafePointNode* _exceptions;// Parser map(s) for exception state(s) 1.49 + int _sp; // JVM Expression Stack Pointer 1.50 + int _bci; // JVM Bytecode Pointer 1.51 + ciMethod* _method; // JVM Current Method 1.52 + 1.53 + private: 1.54 + SafePointNode* map_not_null() const { 1.55 + assert(_map != NULL, "must call stopped() to test for reset compiler map"); 1.56 + return _map; 1.57 + } 1.58 + 1.59 + public: 1.60 + GraphKit(); // empty constructor 1.61 + GraphKit(JVMState* jvms); // the JVM state on which to operate 1.62 + 1.63 +#ifdef ASSERT 1.64 + ~GraphKit() { 1.65 + assert(!has_exceptions(), "user must call transfer_exceptions_into_jvms"); 1.66 + } 1.67 +#endif 1.68 + 1.69 + virtual Parse* is_Parse() const { return NULL; } 1.70 + 1.71 + ciEnv* env() const { return _env; } 1.72 + PhaseGVN& gvn() const { return _gvn; } 1.73 + 1.74 + void record_for_igvn(Node* n) const { C->record_for_igvn(n); } // delegate to Compile 1.75 + 1.76 + // Handy well-known nodes: 1.77 + Node* null() const { return zerocon(T_OBJECT); } 1.78 + Node* top() const { return C->top(); } 1.79 + RootNode* root() const { return C->root(); } 1.80 + 1.81 + // Create or find a constant node 1.82 + Node* intcon(jint con) const { return _gvn.intcon(con); } 1.83 + Node* longcon(jlong con) const { return _gvn.longcon(con); } 1.84 + Node* makecon(const Type *t) const { return _gvn.makecon(t); } 1.85 + Node* zerocon(BasicType bt) const { return _gvn.zerocon(bt); } 1.86 + // (See also macro MakeConX in type.hpp, which uses intcon or longcon.) 1.87 + 1.88 + jint find_int_con(Node* n, jint value_if_unknown) { 1.89 + return _gvn.find_int_con(n, value_if_unknown); 1.90 + } 1.91 + jlong find_long_con(Node* n, jlong value_if_unknown) { 1.92 + return _gvn.find_long_con(n, value_if_unknown); 1.93 + } 1.94 + // (See also macro find_intptr_t_con in type.hpp, which uses one of these.) 1.95 + 1.96 + // JVM State accessors: 1.97 + // Parser mapping from JVM indices into Nodes. 1.98 + // Low slots are accessed by the StartNode::enum. 1.99 + // Then come the locals at StartNode::Parms to StartNode::Parms+max_locals(); 1.100 + // Then come JVM stack slots. 1.101 + // Finally come the monitors, if any. 1.102 + // See layout accessors in class JVMState. 1.103 + 1.104 + SafePointNode* map() const { return _map; } 1.105 + bool has_exceptions() const { return _exceptions != NULL; } 1.106 + JVMState* jvms() const { return map_not_null()->_jvms; } 1.107 + int sp() const { return _sp; } 1.108 + int bci() const { return _bci; } 1.109 + Bytecodes::Code java_bc() const; 1.110 + ciMethod* method() const { return _method; } 1.111 + 1.112 + void set_jvms(JVMState* jvms) { set_map(jvms->map()); 1.113 + assert(jvms == this->jvms(), "sanity"); 1.114 + _sp = jvms->sp(); 1.115 + _bci = jvms->bci(); 1.116 + _method = jvms->has_method() ? jvms->method() : NULL; } 1.117 + void set_map(SafePointNode* m) { _map = m; debug_only(verify_map()); } 1.118 + void set_sp(int i) { assert(i >= 0, "must be non-negative"); _sp = i; } 1.119 + void clean_stack(int from_sp); // clear garbage beyond from_sp to top 1.120 + 1.121 + void inc_sp(int i) { set_sp(sp() + i); } 1.122 + void set_bci(int bci) { _bci = bci; } 1.123 + 1.124 + // Make sure jvms has current bci & sp. 1.125 + JVMState* sync_jvms() const; 1.126 +#ifdef ASSERT 1.127 + // Make sure JVMS has an updated copy of bci and sp. 1.128 + // Also sanity-check method, depth, and monitor depth. 1.129 + bool jvms_in_sync() const; 1.130 + 1.131 + // Make sure the map looks OK. 1.132 + void verify_map() const; 1.133 + 1.134 + // Make sure a proposed exception state looks OK. 1.135 + static void verify_exception_state(SafePointNode* ex_map); 1.136 +#endif 1.137 + 1.138 + // Clone the existing map state. (Implements PreserveJVMState.) 1.139 + SafePointNode* clone_map(); 1.140 + 1.141 + // Set the map to a clone of the given one. 1.142 + void set_map_clone(SafePointNode* m); 1.143 + 1.144 + // Tell if the compilation is failing. 1.145 + bool failing() const { return C->failing(); } 1.146 + 1.147 + // Set _map to NULL, signalling a stop to further bytecode execution. 1.148 + // Preserve the map intact for future use, and return it back to the caller. 1.149 + SafePointNode* stop() { SafePointNode* m = map(); set_map(NULL); return m; } 1.150 + 1.151 + // Stop, but first smash the map's inputs to NULL, to mark it dead. 1.152 + void stop_and_kill_map(); 1.153 + 1.154 + // Tell if _map is NULL, or control is top. 1.155 + bool stopped(); 1.156 + 1.157 + // Tell if this method or any caller method has exception handlers. 1.158 + bool has_ex_handler(); 1.159 + 1.160 + // Save an exception without blowing stack contents or other JVM state. 1.161 + // (The extra pointer is stuck with add_req on the map, beyond the JVMS.) 1.162 + static void set_saved_ex_oop(SafePointNode* ex_map, Node* ex_oop); 1.163 + 1.164 + // Recover a saved exception from its map. 1.165 + static Node* saved_ex_oop(SafePointNode* ex_map); 1.166 + 1.167 + // Recover a saved exception from its map, and remove it from the map. 1.168 + static Node* clear_saved_ex_oop(SafePointNode* ex_map); 1.169 + 1.170 +#ifdef ASSERT 1.171 + // Recover a saved exception from its map, and remove it from the map. 1.172 + static bool has_saved_ex_oop(SafePointNode* ex_map); 1.173 +#endif 1.174 + 1.175 + // Push an exception in the canonical position for handlers (stack(0)). 1.176 + void push_ex_oop(Node* ex_oop) { 1.177 + ensure_stack(1); // ensure room to push the exception 1.178 + set_stack(0, ex_oop); 1.179 + set_sp(1); 1.180 + clean_stack(1); 1.181 + } 1.182 + 1.183 + // Detach and return an exception state. 1.184 + SafePointNode* pop_exception_state() { 1.185 + SafePointNode* ex_map = _exceptions; 1.186 + if (ex_map != NULL) { 1.187 + _exceptions = ex_map->next_exception(); 1.188 + ex_map->set_next_exception(NULL); 1.189 + debug_only(verify_exception_state(ex_map)); 1.190 + } 1.191 + return ex_map; 1.192 + } 1.193 + 1.194 + // Add an exception, using the given JVM state, without commoning. 1.195 + void push_exception_state(SafePointNode* ex_map) { 1.196 + debug_only(verify_exception_state(ex_map)); 1.197 + ex_map->set_next_exception(_exceptions); 1.198 + _exceptions = ex_map; 1.199 + } 1.200 + 1.201 + // Turn the current JVM state into an exception state, appending the ex_oop. 1.202 + SafePointNode* make_exception_state(Node* ex_oop); 1.203 + 1.204 + // Add an exception, using the given JVM state. 1.205 + // Combine all exceptions with a common exception type into a single state. 1.206 + // (This is done via combine_exception_states.) 1.207 + void add_exception_state(SafePointNode* ex_map); 1.208 + 1.209 + // Combine all exceptions of any sort whatever into a single master state. 1.210 + SafePointNode* combine_and_pop_all_exception_states() { 1.211 + if (_exceptions == NULL) return NULL; 1.212 + SafePointNode* phi_map = pop_exception_state(); 1.213 + SafePointNode* ex_map; 1.214 + while ((ex_map = pop_exception_state()) != NULL) { 1.215 + combine_exception_states(ex_map, phi_map); 1.216 + } 1.217 + return phi_map; 1.218 + } 1.219 + 1.220 + // Combine the two exception states, building phis as necessary. 1.221 + // The second argument is updated to include contributions from the first. 1.222 + void combine_exception_states(SafePointNode* ex_map, SafePointNode* phi_map); 1.223 + 1.224 + // Reset the map to the given state. If there are any half-finished phis 1.225 + // in it (created by combine_exception_states), transform them now. 1.226 + // Returns the exception oop. (Caller must call push_ex_oop if required.) 1.227 + Node* use_exception_state(SafePointNode* ex_map); 1.228 + 1.229 + // Collect exceptions from a given JVM state into my exception list. 1.230 + void add_exception_states_from(JVMState* jvms); 1.231 + 1.232 + // Collect all raised exceptions into the current JVM state. 1.233 + // Clear the current exception list and map, returns the combined states. 1.234 + JVMState* transfer_exceptions_into_jvms(); 1.235 + 1.236 + // Helper to throw a built-in exception. 1.237 + // Range checks take the offending index. 1.238 + // Cast and array store checks take the offending class. 1.239 + // Others do not take the optional argument. 1.240 + // The JVMS must allow the bytecode to be re-executed 1.241 + // via an uncommon trap. 1.242 + void builtin_throw(Deoptimization::DeoptReason reason, Node* arg = NULL); 1.243 + 1.244 + // Helper Functions for adding debug information 1.245 + void kill_dead_locals(); 1.246 +#ifdef ASSERT 1.247 + bool dead_locals_are_killed(); 1.248 +#endif 1.249 + // The call may deoptimize. Supply required JVM state as debug info. 1.250 + // If must_throw is true, the call is guaranteed not to return normally. 1.251 + void add_safepoint_edges(SafePointNode* call, 1.252 + bool must_throw = false); 1.253 + 1.254 + // How many stack inputs does the current BC consume? 1.255 + // And, how does the stack change after the bytecode? 1.256 + // Returns false if unknown. 1.257 + bool compute_stack_effects(int& inputs, int& depth); 1.258 + 1.259 + // Add a fixed offset to a pointer 1.260 + Node* basic_plus_adr(Node* base, Node* ptr, intptr_t offset) { 1.261 + return basic_plus_adr(base, ptr, MakeConX(offset)); 1.262 + } 1.263 + Node* basic_plus_adr(Node* base, intptr_t offset) { 1.264 + return basic_plus_adr(base, base, MakeConX(offset)); 1.265 + } 1.266 + // Add a variable offset to a pointer 1.267 + Node* basic_plus_adr(Node* base, Node* offset) { 1.268 + return basic_plus_adr(base, base, offset); 1.269 + } 1.270 + Node* basic_plus_adr(Node* base, Node* ptr, Node* offset); 1.271 + 1.272 + // Convert between int and long, and size_t. 1.273 + // (See macros ConvI2X, etc., in type.hpp for ConvI2X, etc.) 1.274 + Node* ConvI2L(Node* offset); 1.275 + Node* ConvL2I(Node* offset); 1.276 + // Find out the klass of an object. 1.277 + Node* load_object_klass(Node* object); 1.278 + // Find out the length of an array. 1.279 + Node* load_array_length(Node* array); 1.280 + // Helper function to do a NULL pointer check or ZERO check based on type. 1.281 + Node* null_check_common(Node* value, BasicType type, 1.282 + bool assert_null, Node* *null_control); 1.283 + // Throw an exception if a given value is null. 1.284 + // Return the value cast to not-null. 1.285 + // Be clever about equivalent dominating null checks. 1.286 + Node* do_null_check(Node* value, BasicType type) { 1.287 + return null_check_common(value, type, false, NULL); 1.288 + } 1.289 + // Throw an uncommon trap if a given value is __not__ null. 1.290 + // Return the value cast to null, and be clever about dominating checks. 1.291 + Node* do_null_assert(Node* value, BasicType type) { 1.292 + return null_check_common(value, type, true, NULL); 1.293 + } 1.294 + // Null check oop. Return null-path control into (*null_control). 1.295 + // Return a cast-not-null node which depends on the not-null control. 1.296 + // If never_see_null, use an uncommon trap (*null_control sees a top). 1.297 + // The cast is not valid along the null path; keep a copy of the original. 1.298 + Node* null_check_oop(Node* value, Node* *null_control, 1.299 + bool never_see_null = false); 1.300 + 1.301 + // Cast obj to not-null on this path 1.302 + Node* cast_not_null(Node* obj, bool do_replace_in_map = true); 1.303 + // Replace all occurrences of one node by another. 1.304 + void replace_in_map(Node* old, Node* neww); 1.305 + 1.306 + void push(Node* n) { map_not_null(); _map->set_stack(_map->_jvms,_sp++,n); } 1.307 + Node* pop() { map_not_null(); return _map->stack(_map->_jvms,--_sp); } 1.308 + Node* peek(int off=0) { map_not_null(); return _map->stack(_map->_jvms, _sp - off - 1); } 1.309 + 1.310 + void push_pair(Node* ldval) { 1.311 + push(ldval); 1.312 + push(top()); // the halfword is merely a placeholder 1.313 + } 1.314 + void push_pair_local(int i) { 1.315 + // longs are stored in locals in "push" order 1.316 + push( local(i+0) ); // the real value 1.317 + assert(local(i+1) == top(), ""); 1.318 + push(top()); // halfword placeholder 1.319 + } 1.320 + Node* pop_pair() { 1.321 + // the second half is pushed last & popped first; it contains exactly nothing 1.322 + Node* halfword = pop(); 1.323 + assert(halfword == top(), ""); 1.324 + // the long bits are pushed first & popped last: 1.325 + return pop(); 1.326 + } 1.327 + void set_pair_local(int i, Node* lval) { 1.328 + // longs are stored in locals as a value/half pair (like doubles) 1.329 + set_local(i+0, lval); 1.330 + set_local(i+1, top()); 1.331 + } 1.332 + 1.333 + // Push the node, which may be zero, one, or two words. 1.334 + void push_node(BasicType n_type, Node* n) { 1.335 + int n_size = type2size[n_type]; 1.336 + if (n_size == 1) push( n ); // T_INT, ... 1.337 + else if (n_size == 2) push_pair( n ); // T_DOUBLE, T_LONG 1.338 + else { assert(n_size == 0, "must be T_VOID"); } 1.339 + } 1.340 + 1.341 + Node* pop_node(BasicType n_type) { 1.342 + int n_size = type2size[n_type]; 1.343 + if (n_size == 1) return pop(); 1.344 + else if (n_size == 2) return pop_pair(); 1.345 + else return NULL; 1.346 + } 1.347 + 1.348 + Node* control() const { return map_not_null()->control(); } 1.349 + Node* i_o() const { return map_not_null()->i_o(); } 1.350 + Node* returnadr() const { return map_not_null()->returnadr(); } 1.351 + Node* frameptr() const { return map_not_null()->frameptr(); } 1.352 + Node* local(uint idx) const { map_not_null(); return _map->local( _map->_jvms, idx); } 1.353 + Node* stack(uint idx) const { map_not_null(); return _map->stack( _map->_jvms, idx); } 1.354 + Node* argument(uint idx) const { map_not_null(); return _map->argument( _map->_jvms, idx); } 1.355 + Node* monitor_box(uint idx) const { map_not_null(); return _map->monitor_box(_map->_jvms, idx); } 1.356 + Node* monitor_obj(uint idx) const { map_not_null(); return _map->monitor_obj(_map->_jvms, idx); } 1.357 + 1.358 + void set_control (Node* c) { map_not_null()->set_control(c); } 1.359 + void set_i_o (Node* c) { map_not_null()->set_i_o(c); } 1.360 + void set_local(uint idx, Node* c) { map_not_null(); _map->set_local( _map->_jvms, idx, c); } 1.361 + void set_stack(uint idx, Node* c) { map_not_null(); _map->set_stack( _map->_jvms, idx, c); } 1.362 + void set_argument(uint idx, Node* c){ map_not_null(); _map->set_argument(_map->_jvms, idx, c); } 1.363 + void ensure_stack(uint stk_size) { map_not_null(); _map->ensure_stack(_map->_jvms, stk_size); } 1.364 + 1.365 + // Access unaliased memory 1.366 + Node* memory(uint alias_idx); 1.367 + Node* memory(const TypePtr *tp) { return memory(C->get_alias_index(tp)); } 1.368 + Node* memory(Node* adr) { return memory(_gvn.type(adr)->is_ptr()); } 1.369 + 1.370 + // Access immutable memory 1.371 + Node* immutable_memory() { return C->immutable_memory(); } 1.372 + 1.373 + // Set unaliased memory 1.374 + void set_memory(Node* c, uint alias_idx) { merged_memory()->set_memory_at(alias_idx, c); } 1.375 + void set_memory(Node* c, const TypePtr *tp) { set_memory(c,C->get_alias_index(tp)); } 1.376 + void set_memory(Node* c, Node* adr) { set_memory(c,_gvn.type(adr)->is_ptr()); } 1.377 + 1.378 + // Get the entire memory state (probably a MergeMemNode), and reset it 1.379 + // (The resetting prevents somebody from using the dangling Node pointer.) 1.380 + Node* reset_memory(); 1.381 + 1.382 + // Get the entire memory state, asserted to be a MergeMemNode. 1.383 + MergeMemNode* merged_memory() { 1.384 + Node* mem = map_not_null()->memory(); 1.385 + assert(mem->is_MergeMem(), "parse memory is always pre-split"); 1.386 + return mem->as_MergeMem(); 1.387 + } 1.388 + 1.389 + // Set the entire memory state; produce a new MergeMemNode. 1.390 + void set_all_memory(Node* newmem); 1.391 + 1.392 + // Create a memory projection from the call, then set_all_memory. 1.393 + void set_all_memory_call(Node* call); 1.394 + 1.395 + // Create a LoadNode, reading from the parser's memory state. 1.396 + // (Note: require_atomic_access is useful only with T_LONG.) 1.397 + Node* make_load(Node* ctl, Node* adr, const Type* t, BasicType bt, 1.398 + bool require_atomic_access = false) { 1.399 + // This version computes alias_index from bottom_type 1.400 + return make_load(ctl, adr, t, bt, adr->bottom_type()->is_ptr(), 1.401 + require_atomic_access); 1.402 + } 1.403 + Node* make_load(Node* ctl, Node* adr, const Type* t, BasicType bt, const TypePtr* adr_type, bool require_atomic_access = false) { 1.404 + // This version computes alias_index from an address type 1.405 + assert(adr_type != NULL, "use other make_load factory"); 1.406 + return make_load(ctl, adr, t, bt, C->get_alias_index(adr_type), 1.407 + require_atomic_access); 1.408 + } 1.409 + // This is the base version which is given an alias index. 1.410 + Node* make_load(Node* ctl, Node* adr, const Type* t, BasicType bt, int adr_idx, bool require_atomic_access = false); 1.411 + 1.412 + // Create & transform a StoreNode and store the effect into the 1.413 + // parser's memory state. 1.414 + Node* store_to_memory(Node* ctl, Node* adr, Node* val, BasicType bt, 1.415 + const TypePtr* adr_type, 1.416 + bool require_atomic_access = false) { 1.417 + // This version computes alias_index from an address type 1.418 + assert(adr_type != NULL, "use other store_to_memory factory"); 1.419 + return store_to_memory(ctl, adr, val, bt, 1.420 + C->get_alias_index(adr_type), 1.421 + require_atomic_access); 1.422 + } 1.423 + // This is the base version which is given alias index 1.424 + // Return the new StoreXNode 1.425 + Node* store_to_memory(Node* ctl, Node* adr, Node* val, BasicType bt, 1.426 + int adr_idx, 1.427 + bool require_atomic_access = false); 1.428 + 1.429 + 1.430 + // All in one pre-barrier, store, post_barrier 1.431 + // Insert a write-barrier'd store. This is to let generational GC 1.432 + // work; we have to flag all oop-stores before the next GC point. 1.433 + // 1.434 + // It comes in 3 flavors of store to an object, array, or unknown. 1.435 + // We use precise card marks for arrays to avoid scanning the entire 1.436 + // array. We use imprecise for object. We use precise for unknown 1.437 + // since we don't know if we have an array or and object or even 1.438 + // where the object starts. 1.439 + // 1.440 + // If val==NULL, it is taken to be a completely unknown value. QQQ 1.441 + 1.442 + Node* store_oop_to_object(Node* ctl, 1.443 + Node* obj, // containing obj 1.444 + Node* adr, // actual adress to store val at 1.445 + const TypePtr* adr_type, 1.446 + Node* val, 1.447 + const Type* val_type, 1.448 + BasicType bt); 1.449 + 1.450 + Node* store_oop_to_array(Node* ctl, 1.451 + Node* obj, // containing obj 1.452 + Node* adr, // actual adress to store val at 1.453 + const TypePtr* adr_type, 1.454 + Node* val, 1.455 + const Type* val_type, 1.456 + BasicType bt); 1.457 + 1.458 + // Could be an array or object we don't know at compile time (unsafe ref.) 1.459 + Node* store_oop_to_unknown(Node* ctl, 1.460 + Node* obj, // containing obj 1.461 + Node* adr, // actual adress to store val at 1.462 + const TypePtr* adr_type, 1.463 + Node* val, 1.464 + const Type* val_type, 1.465 + BasicType bt); 1.466 + 1.467 + // For the few case where the barriers need special help 1.468 + void pre_barrier(Node* ctl, Node* obj, Node* adr, uint adr_idx, 1.469 + Node* val, const Type* val_type, BasicType bt); 1.470 + 1.471 + void post_barrier(Node* ctl, Node* store, Node* obj, Node* adr, uint adr_idx, 1.472 + Node* val, BasicType bt, bool use_precise); 1.473 + 1.474 + // Return addressing for an array element. 1.475 + Node* array_element_address(Node* ary, Node* idx, BasicType elembt, 1.476 + // Optional constraint on the array size: 1.477 + const TypeInt* sizetype = NULL); 1.478 + 1.479 + // Return a load of array element at idx. 1.480 + Node* load_array_element(Node* ctl, Node* ary, Node* idx, const TypeAryPtr* arytype); 1.481 + 1.482 + // CMS card-marks have an input from the corresponding oop_store 1.483 + void cms_card_mark(Node* ctl, Node* adr, Node* val, Node* oop_store); 1.484 + 1.485 + //---------------- Dtrace support -------------------- 1.486 + void make_dtrace_method_entry_exit(ciMethod* method, bool is_entry); 1.487 + void make_dtrace_method_entry(ciMethod* method) { 1.488 + make_dtrace_method_entry_exit(method, true); 1.489 + } 1.490 + void make_dtrace_method_exit(ciMethod* method) { 1.491 + make_dtrace_method_entry_exit(method, false); 1.492 + } 1.493 + 1.494 + //--------------- stub generation ------------------- 1.495 + public: 1.496 + void gen_stub(address C_function, 1.497 + const char *name, 1.498 + int is_fancy_jump, 1.499 + bool pass_tls, 1.500 + bool return_pc); 1.501 + 1.502 + //---------- help for generating calls -------------- 1.503 + 1.504 + // Do a null check on the receiver, which is in argument(0). 1.505 + Node* null_check_receiver(ciMethod* callee) { 1.506 + assert(!callee->is_static(), "must be a virtual method"); 1.507 + int nargs = 1 + callee->signature()->size(); 1.508 + // Null check on self without removing any arguments. The argument 1.509 + // null check technically happens in the wrong place, which can lead to 1.510 + // invalid stack traces when the primitive is inlined into a method 1.511 + // which handles NullPointerExceptions. 1.512 + Node* receiver = argument(0); 1.513 + _sp += nargs; 1.514 + receiver = do_null_check(receiver, T_OBJECT); 1.515 + _sp -= nargs; 1.516 + return receiver; 1.517 + } 1.518 + 1.519 + // Fill in argument edges for the call from argument(0), argument(1), ... 1.520 + // (The next step is to call set_edges_for_java_call.) 1.521 + void set_arguments_for_java_call(CallJavaNode* call); 1.522 + 1.523 + // Fill in non-argument edges for the call. 1.524 + // Transform the call, and update the basics: control, i_o, memory. 1.525 + // (The next step is usually to call set_results_for_java_call.) 1.526 + void set_edges_for_java_call(CallJavaNode* call, 1.527 + bool must_throw = false); 1.528 + 1.529 + // Finish up a java call that was started by set_edges_for_java_call. 1.530 + // Call add_exception on any throw arising from the call. 1.531 + // Return the call result (transformed). 1.532 + Node* set_results_for_java_call(CallJavaNode* call); 1.533 + 1.534 + // Similar to set_edges_for_java_call, but simplified for runtime calls. 1.535 + void set_predefined_output_for_runtime_call(Node* call) { 1.536 + set_predefined_output_for_runtime_call(call, NULL, NULL); 1.537 + } 1.538 + void set_predefined_output_for_runtime_call(Node* call, 1.539 + Node* keep_mem, 1.540 + const TypePtr* hook_mem); 1.541 + Node* set_predefined_input_for_runtime_call(SafePointNode* call); 1.542 + 1.543 + // helper functions for statistics 1.544 + void increment_counter(address counter_addr); // increment a debug counter 1.545 + void increment_counter(Node* counter_addr); // increment a debug counter 1.546 + 1.547 + // Bail out to the interpreter right now 1.548 + // The optional klass is the one causing the trap. 1.549 + // The optional reason is debug information written to the compile log. 1.550 + // Optional must_throw is the same as with add_safepoint_edges. 1.551 + void uncommon_trap(int trap_request, 1.552 + ciKlass* klass = NULL, const char* reason_string = NULL, 1.553 + bool must_throw = false, bool keep_exact_action = false); 1.554 + 1.555 + // Shorthand, to avoid saying "Deoptimization::" so many times. 1.556 + void uncommon_trap(Deoptimization::DeoptReason reason, 1.557 + Deoptimization::DeoptAction action, 1.558 + ciKlass* klass = NULL, const char* reason_string = NULL, 1.559 + bool must_throw = false, bool keep_exact_action = false) { 1.560 + uncommon_trap(Deoptimization::make_trap_request(reason, action), 1.561 + klass, reason_string, must_throw, keep_exact_action); 1.562 + } 1.563 + 1.564 + // Report if there were too many traps at the current method and bci. 1.565 + // Report if a trap was recorded, and/or PerMethodTrapLimit was exceeded. 1.566 + // If there is no MDO at all, report no trap unless told to assume it. 1.567 + bool too_many_traps(Deoptimization::DeoptReason reason) { 1.568 + return C->too_many_traps(method(), bci(), reason); 1.569 + } 1.570 + 1.571 + // Report if there were too many recompiles at the current method and bci. 1.572 + bool too_many_recompiles(Deoptimization::DeoptReason reason) { 1.573 + return C->too_many_recompiles(method(), bci(), reason); 1.574 + } 1.575 + 1.576 + // vanilla/CMS post barrier 1.577 + void write_barrier_post(Node *store, Node* obj, Node* adr, Node* val, bool use_precise); 1.578 + 1.579 + // Returns the object (if any) which was created the moment before. 1.580 + Node* just_allocated_object(Node* current_control); 1.581 + 1.582 + static bool use_ReduceInitialCardMarks() { 1.583 + return (ReduceInitialCardMarks 1.584 + && Universe::heap()->can_elide_tlab_store_barriers()); 1.585 + } 1.586 + 1.587 + // Helper function to round double arguments before a call 1.588 + void round_double_arguments(ciMethod* dest_method); 1.589 + void round_double_result(ciMethod* dest_method); 1.590 + 1.591 + // rounding for strict float precision conformance 1.592 + Node* precision_rounding(Node* n); 1.593 + 1.594 + // rounding for strict double precision conformance 1.595 + Node* dprecision_rounding(Node* n); 1.596 + 1.597 + // rounding for non-strict double stores 1.598 + Node* dstore_rounding(Node* n); 1.599 + 1.600 + // Helper functions for fast/slow path codes 1.601 + Node* opt_iff(Node* region, Node* iff); 1.602 + Node* make_runtime_call(int flags, 1.603 + const TypeFunc* call_type, address call_addr, 1.604 + const char* call_name, 1.605 + const TypePtr* adr_type, // NULL if no memory effects 1.606 + Node* parm0 = NULL, Node* parm1 = NULL, 1.607 + Node* parm2 = NULL, Node* parm3 = NULL, 1.608 + Node* parm4 = NULL, Node* parm5 = NULL, 1.609 + Node* parm6 = NULL, Node* parm7 = NULL); 1.610 + enum { // flag values for make_runtime_call 1.611 + RC_NO_FP = 1, // CallLeafNoFPNode 1.612 + RC_NO_IO = 2, // do not hook IO edges 1.613 + RC_NO_LEAF = 4, // CallStaticJavaNode 1.614 + RC_MUST_THROW = 8, // flag passed to add_safepoint_edges 1.615 + RC_NARROW_MEM = 16, // input memory is same as output 1.616 + RC_UNCOMMON = 32, // freq. expected to be like uncommon trap 1.617 + RC_LEAF = 0 // null value: no flags set 1.618 + }; 1.619 + 1.620 + // merge in all memory slices from new_mem, along the given path 1.621 + void merge_memory(Node* new_mem, Node* region, int new_path); 1.622 + void make_slow_call_ex(Node* call, ciInstanceKlass* ex_klass, bool separate_io_proj); 1.623 + 1.624 + // Helper functions to build synchronizations 1.625 + int next_monitor(); 1.626 + Node* insert_mem_bar(int opcode, Node* precedent = NULL); 1.627 + Node* insert_mem_bar_volatile(int opcode, int alias_idx, Node* precedent = NULL); 1.628 + // Optional 'precedent' is appended as an extra edge, to force ordering. 1.629 + FastLockNode* shared_lock(Node* obj); 1.630 + void shared_unlock(Node* box, Node* obj); 1.631 + 1.632 + // helper functions for the fast path/slow path idioms 1.633 + Node* fast_and_slow(Node* in, const Type *result_type, Node* null_result, IfNode* fast_test, Node* fast_result, address slow_call, const TypeFunc *slow_call_type, Node* slow_arg, klassOop ex_klass, Node* slow_result); 1.634 + 1.635 + // Generate an instance-of idiom. Used by both the instance-of bytecode 1.636 + // and the reflective instance-of call. 1.637 + Node* gen_instanceof( Node *subobj, Node* superkls ); 1.638 + 1.639 + // Generate a check-cast idiom. Used by both the check-cast bytecode 1.640 + // and the array-store bytecode 1.641 + Node* gen_checkcast( Node *subobj, Node* superkls, 1.642 + Node* *failure_control = NULL ); 1.643 + 1.644 + // Generate a subtyping check. Takes as input the subtype and supertype. 1.645 + // Returns 2 values: sets the default control() to the true path and 1.646 + // returns the false path. Only reads from constant memory taken from the 1.647 + // default memory; does not write anything. It also doesn't take in an 1.648 + // Object; if you wish to check an Object you need to load the Object's 1.649 + // class prior to coming here. 1.650 + Node* gen_subtype_check(Node* subklass, Node* superklass); 1.651 + 1.652 + // Static parse-time type checking logic for gen_subtype_check: 1.653 + enum { SSC_always_false, SSC_always_true, SSC_easy_test, SSC_full_test }; 1.654 + int static_subtype_check(ciKlass* superk, ciKlass* subk); 1.655 + 1.656 + // Exact type check used for predicted calls and casts. 1.657 + // Rewrites (*casted_receiver) to be casted to the stronger type. 1.658 + // (Caller is responsible for doing replace_in_map.) 1.659 + Node* type_check_receiver(Node* receiver, ciKlass* klass, float prob, 1.660 + Node* *casted_receiver); 1.661 + 1.662 + // implementation of object creation 1.663 + Node* set_output_for_allocation(AllocateNode* alloc, 1.664 + const TypeOopPtr* oop_type, 1.665 + bool raw_mem_only); 1.666 + Node* get_layout_helper(Node* klass_node, jint& constant_value); 1.667 + Node* new_instance(Node* klass_node, 1.668 + Node* slow_test = NULL, 1.669 + bool raw_mem_only = false, 1.670 + Node* *return_size_val = NULL); 1.671 + Node* new_array(Node* klass_node, Node* count_val, 1.672 + bool raw_mem_only = false, Node* *return_size_val = NULL); 1.673 + 1.674 + // Handy for making control flow 1.675 + IfNode* create_and_map_if(Node* ctrl, Node* tst, float prob, float cnt) { 1.676 + IfNode* iff = new (C, 2) IfNode(ctrl, tst, prob, cnt);// New IfNode's 1.677 + _gvn.set_type(iff, iff->Value(&_gvn)); // Value may be known at parse-time 1.678 + // Place 'if' on worklist if it will be in graph 1.679 + if (!tst->is_Con()) record_for_igvn(iff); // Range-check and Null-check removal is later 1.680 + return iff; 1.681 + } 1.682 + 1.683 + IfNode* create_and_xform_if(Node* ctrl, Node* tst, float prob, float cnt) { 1.684 + IfNode* iff = new (C, 2) IfNode(ctrl, tst, prob, cnt);// New IfNode's 1.685 + _gvn.transform(iff); // Value may be known at parse-time 1.686 + // Place 'if' on worklist if it will be in graph 1.687 + if (!tst->is_Con()) record_for_igvn(iff); // Range-check and Null-check removal is later 1.688 + return iff; 1.689 + } 1.690 +}; 1.691 + 1.692 +// Helper class to support building of control flow branches. Upon 1.693 +// creation the map and sp at bci are cloned and restored upon de- 1.694 +// struction. Typical use: 1.695 +// 1.696 +// { PreserveJVMState pjvms(this); 1.697 +// // code of new branch 1.698 +// } 1.699 +// // here the JVM state at bci is established 1.700 + 1.701 +class PreserveJVMState: public StackObj { 1.702 + protected: 1.703 + GraphKit* _kit; 1.704 +#ifdef ASSERT 1.705 + int _block; // PO of current block, if a Parse 1.706 + int _bci; 1.707 +#endif 1.708 + SafePointNode* _map; 1.709 + uint _sp; 1.710 + 1.711 + public: 1.712 + PreserveJVMState(GraphKit* kit, bool clone_map = true); 1.713 + ~PreserveJVMState(); 1.714 +}; 1.715 + 1.716 +// Helper class to build cutouts of the form if (p) ; else {x...}. 1.717 +// The code {x...} must not fall through. 1.718 +// The kit's main flow of control is set to the "then" continuation of if(p). 1.719 +class BuildCutout: public PreserveJVMState { 1.720 + public: 1.721 + BuildCutout(GraphKit* kit, Node* p, float prob, float cnt = COUNT_UNKNOWN); 1.722 + ~BuildCutout(); 1.723 +};