Tue, 29 Dec 2009 19:08:54 +0100
6986046: C1 valuestack cleanup
Summary: fixes an historical oddity in C1 with inlining where all of the expression stacks are kept in the topmost ValueStack instead of being in their respective ValueStacks.
Reviewed-by: never
Contributed-by: Christian Wimmer <cwimmer@uci.edu>
1 /*
2 * Copyright (c) 1999, 2010, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
25 // Predefined classes
26 class ciField;
27 class ValueStack;
28 class InstructionPrinter;
29 class IRScope;
30 class LIR_OprDesc;
31 typedef LIR_OprDesc* LIR_Opr;
34 // Instruction class hierarchy
35 //
36 // All leaf classes in the class hierarchy are concrete classes
37 // (i.e., are instantiated). All other classes are abstract and
38 // serve factoring.
40 class Instruction;
41 class Phi;
42 class Local;
43 class Constant;
44 class AccessField;
45 class LoadField;
46 class StoreField;
47 class AccessArray;
48 class ArrayLength;
49 class AccessIndexed;
50 class LoadIndexed;
51 class StoreIndexed;
52 class NegateOp;
53 class Op2;
54 class ArithmeticOp;
55 class ShiftOp;
56 class LogicOp;
57 class CompareOp;
58 class IfOp;
59 class Convert;
60 class NullCheck;
61 class OsrEntry;
62 class ExceptionObject;
63 class StateSplit;
64 class Invoke;
65 class NewInstance;
66 class NewArray;
67 class NewTypeArray;
68 class NewObjectArray;
69 class NewMultiArray;
70 class TypeCheck;
71 class CheckCast;
72 class InstanceOf;
73 class AccessMonitor;
74 class MonitorEnter;
75 class MonitorExit;
76 class Intrinsic;
77 class BlockBegin;
78 class BlockEnd;
79 class Goto;
80 class If;
81 class IfInstanceOf;
82 class Switch;
83 class TableSwitch;
84 class LookupSwitch;
85 class Return;
86 class Throw;
87 class Base;
88 class RoundFP;
89 class UnsafeOp;
90 class UnsafeRawOp;
91 class UnsafeGetRaw;
92 class UnsafePutRaw;
93 class UnsafeObjectOp;
94 class UnsafeGetObject;
95 class UnsafePutObject;
96 class UnsafePrefetch;
97 class UnsafePrefetchRead;
98 class UnsafePrefetchWrite;
99 class ProfileCall;
100 class ProfileInvoke;
102 // A Value is a reference to the instruction creating the value
103 typedef Instruction* Value;
104 define_array(ValueArray, Value)
105 define_stack(Values, ValueArray)
107 define_array(ValueStackArray, ValueStack*)
108 define_stack(ValueStackStack, ValueStackArray)
110 // BlockClosure is the base class for block traversal/iteration.
112 class BlockClosure: public CompilationResourceObj {
113 public:
114 virtual void block_do(BlockBegin* block) = 0;
115 };
118 // A simple closure class for visiting the values of an Instruction
119 class ValueVisitor: public StackObj {
120 public:
121 virtual void visit(Value* v) = 0;
122 };
125 // Some array and list classes
126 define_array(BlockBeginArray, BlockBegin*)
127 define_stack(_BlockList, BlockBeginArray)
129 class BlockList: public _BlockList {
130 public:
131 BlockList(): _BlockList() {}
132 BlockList(const int size): _BlockList(size) {}
133 BlockList(const int size, BlockBegin* init): _BlockList(size, init) {}
135 void iterate_forward(BlockClosure* closure);
136 void iterate_backward(BlockClosure* closure);
137 void blocks_do(void f(BlockBegin*));
138 void values_do(ValueVisitor* f);
139 void print(bool cfg_only = false, bool live_only = false) PRODUCT_RETURN;
140 };
143 // InstructionVisitors provide type-based dispatch for instructions.
144 // For each concrete Instruction class X, a virtual function do_X is
145 // provided. Functionality that needs to be implemented for all classes
146 // (e.g., printing, code generation) is factored out into a specialised
147 // visitor instead of added to the Instruction classes itself.
149 class InstructionVisitor: public StackObj {
150 public:
151 virtual void do_Phi (Phi* x) = 0;
152 virtual void do_Local (Local* x) = 0;
153 virtual void do_Constant (Constant* x) = 0;
154 virtual void do_LoadField (LoadField* x) = 0;
155 virtual void do_StoreField (StoreField* x) = 0;
156 virtual void do_ArrayLength (ArrayLength* x) = 0;
157 virtual void do_LoadIndexed (LoadIndexed* x) = 0;
158 virtual void do_StoreIndexed (StoreIndexed* x) = 0;
159 virtual void do_NegateOp (NegateOp* x) = 0;
160 virtual void do_ArithmeticOp (ArithmeticOp* x) = 0;
161 virtual void do_ShiftOp (ShiftOp* x) = 0;
162 virtual void do_LogicOp (LogicOp* x) = 0;
163 virtual void do_CompareOp (CompareOp* x) = 0;
164 virtual void do_IfOp (IfOp* x) = 0;
165 virtual void do_Convert (Convert* x) = 0;
166 virtual void do_NullCheck (NullCheck* x) = 0;
167 virtual void do_Invoke (Invoke* x) = 0;
168 virtual void do_NewInstance (NewInstance* x) = 0;
169 virtual void do_NewTypeArray (NewTypeArray* x) = 0;
170 virtual void do_NewObjectArray (NewObjectArray* x) = 0;
171 virtual void do_NewMultiArray (NewMultiArray* x) = 0;
172 virtual void do_CheckCast (CheckCast* x) = 0;
173 virtual void do_InstanceOf (InstanceOf* x) = 0;
174 virtual void do_MonitorEnter (MonitorEnter* x) = 0;
175 virtual void do_MonitorExit (MonitorExit* x) = 0;
176 virtual void do_Intrinsic (Intrinsic* x) = 0;
177 virtual void do_BlockBegin (BlockBegin* x) = 0;
178 virtual void do_Goto (Goto* x) = 0;
179 virtual void do_If (If* x) = 0;
180 virtual void do_IfInstanceOf (IfInstanceOf* x) = 0;
181 virtual void do_TableSwitch (TableSwitch* x) = 0;
182 virtual void do_LookupSwitch (LookupSwitch* x) = 0;
183 virtual void do_Return (Return* x) = 0;
184 virtual void do_Throw (Throw* x) = 0;
185 virtual void do_Base (Base* x) = 0;
186 virtual void do_OsrEntry (OsrEntry* x) = 0;
187 virtual void do_ExceptionObject(ExceptionObject* x) = 0;
188 virtual void do_RoundFP (RoundFP* x) = 0;
189 virtual void do_UnsafeGetRaw (UnsafeGetRaw* x) = 0;
190 virtual void do_UnsafePutRaw (UnsafePutRaw* x) = 0;
191 virtual void do_UnsafeGetObject(UnsafeGetObject* x) = 0;
192 virtual void do_UnsafePutObject(UnsafePutObject* x) = 0;
193 virtual void do_UnsafePrefetchRead (UnsafePrefetchRead* x) = 0;
194 virtual void do_UnsafePrefetchWrite(UnsafePrefetchWrite* x) = 0;
195 virtual void do_ProfileCall (ProfileCall* x) = 0;
196 virtual void do_ProfileInvoke (ProfileInvoke* x) = 0;
197 };
200 // Hashing support
201 //
202 // Note: This hash functions affect the performance
203 // of ValueMap - make changes carefully!
205 #define HASH1(x1 ) ((intx)(x1))
206 #define HASH2(x1, x2 ) ((HASH1(x1 ) << 7) ^ HASH1(x2))
207 #define HASH3(x1, x2, x3 ) ((HASH2(x1, x2 ) << 7) ^ HASH1(x3))
208 #define HASH4(x1, x2, x3, x4) ((HASH3(x1, x2, x3) << 7) ^ HASH1(x4))
211 // The following macros are used to implement instruction-specific hashing.
212 // By default, each instruction implements hash() and is_equal(Value), used
213 // for value numbering/common subexpression elimination. The default imple-
214 // mentation disables value numbering. Each instruction which can be value-
215 // numbered, should define corresponding hash() and is_equal(Value) functions
216 // via the macros below. The f arguments specify all the values/op codes, etc.
217 // that need to be identical for two instructions to be identical.
218 //
219 // Note: The default implementation of hash() returns 0 in order to indicate
220 // that the instruction should not be considered for value numbering.
221 // The currently used hash functions do not guarantee that never a 0
222 // is produced. While this is still correct, it may be a performance
223 // bug (no value numbering for that node). However, this situation is
224 // so unlikely, that we are not going to handle it specially.
226 #define HASHING1(class_name, enabled, f1) \
227 virtual intx hash() const { \
228 return (enabled) ? HASH2(name(), f1) : 0; \
229 } \
230 virtual bool is_equal(Value v) const { \
231 if (!(enabled) ) return false; \
232 class_name* _v = v->as_##class_name(); \
233 if (_v == NULL ) return false; \
234 if (f1 != _v->f1) return false; \
235 return true; \
236 } \
239 #define HASHING2(class_name, enabled, f1, f2) \
240 virtual intx hash() const { \
241 return (enabled) ? HASH3(name(), f1, f2) : 0; \
242 } \
243 virtual bool is_equal(Value v) const { \
244 if (!(enabled) ) return false; \
245 class_name* _v = v->as_##class_name(); \
246 if (_v == NULL ) return false; \
247 if (f1 != _v->f1) return false; \
248 if (f2 != _v->f2) return false; \
249 return true; \
250 } \
253 #define HASHING3(class_name, enabled, f1, f2, f3) \
254 virtual intx hash() const { \
255 return (enabled) ? HASH4(name(), f1, f2, f3) : 0; \
256 } \
257 virtual bool is_equal(Value v) const { \
258 if (!(enabled) ) return false; \
259 class_name* _v = v->as_##class_name(); \
260 if (_v == NULL ) return false; \
261 if (f1 != _v->f1) return false; \
262 if (f2 != _v->f2) return false; \
263 if (f3 != _v->f3) return false; \
264 return true; \
265 } \
268 // The mother of all instructions...
270 class Instruction: public CompilationResourceObj {
271 private:
272 int _id; // the unique instruction id
273 #ifndef PRODUCT
274 int _printable_bci; // the bci of the instruction for printing
275 #endif
276 int _use_count; // the number of instructions refering to this value (w/o prev/next); only roots can have use count = 0 or > 1
277 int _pin_state; // set of PinReason describing the reason for pinning
278 ValueType* _type; // the instruction value type
279 Instruction* _next; // the next instruction if any (NULL for BlockEnd instructions)
280 Instruction* _subst; // the substitution instruction if any
281 LIR_Opr _operand; // LIR specific information
282 unsigned int _flags; // Flag bits
284 ValueStack* _state_before; // Copy of state with input operands still on stack (or NULL)
285 ValueStack* _exception_state; // Copy of state for exception handling
286 XHandlers* _exception_handlers; // Flat list of exception handlers covering this instruction
288 friend class UseCountComputer;
289 friend class BlockBegin;
291 void update_exception_state(ValueStack* state);
293 bool has_printable_bci() const { return NOT_PRODUCT(_printable_bci != -99) PRODUCT_ONLY(false); }
295 protected:
296 void set_type(ValueType* type) {
297 assert(type != NULL, "type must exist");
298 _type = type;
299 }
301 public:
302 void* operator new(size_t size) {
303 Compilation* c = Compilation::current();
304 void* res = c->arena()->Amalloc(size);
305 ((Instruction*)res)->_id = c->get_next_id();
306 return res;
307 }
309 enum InstructionFlag {
310 NeedsNullCheckFlag = 0,
311 CanTrapFlag,
312 DirectCompareFlag,
313 IsEliminatedFlag,
314 IsInitializedFlag,
315 IsLoadedFlag,
316 IsSafepointFlag,
317 IsStaticFlag,
318 IsStrictfpFlag,
319 NeedsStoreCheckFlag,
320 NeedsWriteBarrierFlag,
321 PreservesStateFlag,
322 TargetIsFinalFlag,
323 TargetIsLoadedFlag,
324 TargetIsStrictfpFlag,
325 UnorderedIsTrueFlag,
326 NeedsPatchingFlag,
327 ThrowIncompatibleClassChangeErrorFlag,
328 ProfileMDOFlag,
329 IsLinkedInBlockFlag,
330 InstructionLastFlag
331 };
333 public:
334 bool check_flag(InstructionFlag id) const { return (_flags & (1 << id)) != 0; }
335 void set_flag(InstructionFlag id, bool f) { _flags = f ? (_flags | (1 << id)) : (_flags & ~(1 << id)); };
337 // 'globally' used condition values
338 enum Condition {
339 eql, neq, lss, leq, gtr, geq
340 };
342 // Instructions may be pinned for many reasons and under certain conditions
343 // with enough knowledge it's possible to safely unpin them.
344 enum PinReason {
345 PinUnknown = 1 << 0
346 , PinExplicitNullCheck = 1 << 3
347 , PinStackForStateSplit= 1 << 12
348 , PinStateSplitConstructor= 1 << 13
349 , PinGlobalValueNumbering= 1 << 14
350 };
352 static Condition mirror(Condition cond);
353 static Condition negate(Condition cond);
355 // initialization
356 static int number_of_instructions() {
357 return Compilation::current()->number_of_instructions();
358 }
360 // creation
361 Instruction(ValueType* type, ValueStack* state_before = NULL, bool type_is_constant = false, bool create_hi = true)
362 : _use_count(0)
363 #ifndef PRODUCT
364 , _printable_bci(-99)
365 #endif
366 , _pin_state(0)
367 , _type(type)
368 , _next(NULL)
369 , _subst(NULL)
370 , _flags(0)
371 , _operand(LIR_OprFact::illegalOpr)
372 , _state_before(state_before)
373 , _exception_handlers(NULL)
374 {
375 check_state(state_before);
376 assert(type != NULL && (!type->is_constant() || type_is_constant), "type must exist");
377 update_exception_state(_state_before);
378 }
380 // accessors
381 int id() const { return _id; }
382 #ifndef PRODUCT
383 int printable_bci() const { assert(has_printable_bci(), "_printable_bci should have been set"); return _printable_bci; }
384 void set_printable_bci(int bci) { NOT_PRODUCT(_printable_bci = bci;) }
385 #endif
386 int use_count() const { return _use_count; }
387 int pin_state() const { return _pin_state; }
388 bool is_pinned() const { return _pin_state != 0 || PinAllInstructions; }
389 ValueType* type() const { return _type; }
390 Instruction* prev(BlockBegin* block); // use carefully, expensive operation
391 Instruction* next() const { return _next; }
392 bool has_subst() const { return _subst != NULL; }
393 Instruction* subst() { return _subst == NULL ? this : _subst->subst(); }
394 LIR_Opr operand() const { return _operand; }
396 void set_needs_null_check(bool f) { set_flag(NeedsNullCheckFlag, f); }
397 bool needs_null_check() const { return check_flag(NeedsNullCheckFlag); }
398 bool is_linked() const { return check_flag(IsLinkedInBlockFlag); }
399 bool can_be_linked() { return as_Local() == NULL && as_Phi() == NULL; }
401 bool has_uses() const { return use_count() > 0; }
402 ValueStack* state_before() const { return _state_before; }
403 ValueStack* exception_state() const { return _exception_state; }
404 virtual bool needs_exception_state() const { return true; }
405 XHandlers* exception_handlers() const { return _exception_handlers; }
407 // manipulation
408 void pin(PinReason reason) { _pin_state |= reason; }
409 void pin() { _pin_state |= PinUnknown; }
410 // DANGEROUS: only used by EliminateStores
411 void unpin(PinReason reason) { assert((reason & PinUnknown) == 0, "can't unpin unknown state"); _pin_state &= ~reason; }
413 Instruction* set_next(Instruction* next) {
414 assert(next->has_printable_bci(), "_printable_bci should have been set");
415 assert(next != NULL, "must not be NULL");
416 assert(as_BlockEnd() == NULL, "BlockEnd instructions must have no next");
417 assert(next->can_be_linked(), "shouldn't link these instructions into list");
419 next->set_flag(Instruction::IsLinkedInBlockFlag, true);
420 _next = next;
421 return next;
422 }
424 Instruction* set_next(Instruction* next, int bci) {
425 #ifndef PRODUCT
426 next->set_printable_bci(bci);
427 #endif
428 return set_next(next);
429 }
431 void set_subst(Instruction* subst) {
432 assert(subst == NULL ||
433 type()->base() == subst->type()->base() ||
434 subst->type()->base() == illegalType, "type can't change");
435 _subst = subst;
436 }
437 void set_exception_handlers(XHandlers *xhandlers) { _exception_handlers = xhandlers; }
438 void set_exception_state(ValueStack* s) { check_state(s); _exception_state = s; }
440 // machine-specifics
441 void set_operand(LIR_Opr operand) { assert(operand != LIR_OprFact::illegalOpr, "operand must exist"); _operand = operand; }
442 void clear_operand() { _operand = LIR_OprFact::illegalOpr; }
444 // generic
445 virtual Instruction* as_Instruction() { return this; } // to satisfy HASHING1 macro
446 virtual Phi* as_Phi() { return NULL; }
447 virtual Local* as_Local() { return NULL; }
448 virtual Constant* as_Constant() { return NULL; }
449 virtual AccessField* as_AccessField() { return NULL; }
450 virtual LoadField* as_LoadField() { return NULL; }
451 virtual StoreField* as_StoreField() { return NULL; }
452 virtual AccessArray* as_AccessArray() { return NULL; }
453 virtual ArrayLength* as_ArrayLength() { return NULL; }
454 virtual AccessIndexed* as_AccessIndexed() { return NULL; }
455 virtual LoadIndexed* as_LoadIndexed() { return NULL; }
456 virtual StoreIndexed* as_StoreIndexed() { return NULL; }
457 virtual NegateOp* as_NegateOp() { return NULL; }
458 virtual Op2* as_Op2() { return NULL; }
459 virtual ArithmeticOp* as_ArithmeticOp() { return NULL; }
460 virtual ShiftOp* as_ShiftOp() { return NULL; }
461 virtual LogicOp* as_LogicOp() { return NULL; }
462 virtual CompareOp* as_CompareOp() { return NULL; }
463 virtual IfOp* as_IfOp() { return NULL; }
464 virtual Convert* as_Convert() { return NULL; }
465 virtual NullCheck* as_NullCheck() { return NULL; }
466 virtual OsrEntry* as_OsrEntry() { return NULL; }
467 virtual StateSplit* as_StateSplit() { return NULL; }
468 virtual Invoke* as_Invoke() { return NULL; }
469 virtual NewInstance* as_NewInstance() { return NULL; }
470 virtual NewArray* as_NewArray() { return NULL; }
471 virtual NewTypeArray* as_NewTypeArray() { return NULL; }
472 virtual NewObjectArray* as_NewObjectArray() { return NULL; }
473 virtual NewMultiArray* as_NewMultiArray() { return NULL; }
474 virtual TypeCheck* as_TypeCheck() { return NULL; }
475 virtual CheckCast* as_CheckCast() { return NULL; }
476 virtual InstanceOf* as_InstanceOf() { return NULL; }
477 virtual AccessMonitor* as_AccessMonitor() { return NULL; }
478 virtual MonitorEnter* as_MonitorEnter() { return NULL; }
479 virtual MonitorExit* as_MonitorExit() { return NULL; }
480 virtual Intrinsic* as_Intrinsic() { return NULL; }
481 virtual BlockBegin* as_BlockBegin() { return NULL; }
482 virtual BlockEnd* as_BlockEnd() { return NULL; }
483 virtual Goto* as_Goto() { return NULL; }
484 virtual If* as_If() { return NULL; }
485 virtual IfInstanceOf* as_IfInstanceOf() { return NULL; }
486 virtual TableSwitch* as_TableSwitch() { return NULL; }
487 virtual LookupSwitch* as_LookupSwitch() { return NULL; }
488 virtual Return* as_Return() { return NULL; }
489 virtual Throw* as_Throw() { return NULL; }
490 virtual Base* as_Base() { return NULL; }
491 virtual RoundFP* as_RoundFP() { return NULL; }
492 virtual ExceptionObject* as_ExceptionObject() { return NULL; }
493 virtual UnsafeOp* as_UnsafeOp() { return NULL; }
495 virtual void visit(InstructionVisitor* v) = 0;
497 virtual bool can_trap() const { return false; }
499 virtual void input_values_do(ValueVisitor* f) = 0;
500 virtual void state_values_do(ValueVisitor* f);
501 virtual void other_values_do(ValueVisitor* f) { /* usually no other - override on demand */ }
502 void values_do(ValueVisitor* f) { input_values_do(f); state_values_do(f); other_values_do(f); }
504 virtual ciType* exact_type() const { return NULL; }
505 virtual ciType* declared_type() const { return NULL; }
507 // hashing
508 virtual const char* name() const = 0;
509 HASHING1(Instruction, false, id()) // hashing disabled by default
511 // debugging
512 static void check_state(ValueStack* state) PRODUCT_RETURN;
513 void print() PRODUCT_RETURN;
514 void print_line() PRODUCT_RETURN;
515 void print(InstructionPrinter& ip) PRODUCT_RETURN;
516 };
519 // The following macros are used to define base (i.e., non-leaf)
520 // and leaf instruction classes. They define class-name related
521 // generic functionality in one place.
523 #define BASE(class_name, super_class_name) \
524 class class_name: public super_class_name { \
525 public: \
526 virtual class_name* as_##class_name() { return this; } \
529 #define LEAF(class_name, super_class_name) \
530 BASE(class_name, super_class_name) \
531 public: \
532 virtual const char* name() const { return #class_name; } \
533 virtual void visit(InstructionVisitor* v) { v->do_##class_name(this); } \
536 // Debugging support
539 #ifdef ASSERT
540 class AssertValues: public ValueVisitor {
541 void visit(Value* x) { assert((*x) != NULL, "value must exist"); }
542 };
543 #define ASSERT_VALUES { AssertValues assert_value; values_do(&assert_value); }
544 #else
545 #define ASSERT_VALUES
546 #endif // ASSERT
549 // A Phi is a phi function in the sense of SSA form. It stands for
550 // the value of a local variable at the beginning of a join block.
551 // A Phi consists of n operands, one for every incoming branch.
553 LEAF(Phi, Instruction)
554 private:
555 BlockBegin* _block; // the block to which the phi function belongs
556 int _pf_flags; // the flags of the phi function
557 int _index; // to value on operand stack (index < 0) or to local
558 public:
559 // creation
560 Phi(ValueType* type, BlockBegin* b, int index)
561 : Instruction(type->base())
562 , _pf_flags(0)
563 , _block(b)
564 , _index(index)
565 {
566 if (type->is_illegal()) {
567 make_illegal();
568 }
569 }
571 // flags
572 enum Flag {
573 no_flag = 0,
574 visited = 1 << 0,
575 cannot_simplify = 1 << 1
576 };
578 // accessors
579 bool is_local() const { return _index >= 0; }
580 bool is_on_stack() const { return !is_local(); }
581 int local_index() const { assert(is_local(), ""); return _index; }
582 int stack_index() const { assert(is_on_stack(), ""); return -(_index+1); }
584 Value operand_at(int i) const;
585 int operand_count() const;
587 BlockBegin* block() const { return _block; }
589 void set(Flag f) { _pf_flags |= f; }
590 void clear(Flag f) { _pf_flags &= ~f; }
591 bool is_set(Flag f) const { return (_pf_flags & f) != 0; }
593 // Invalidates phis corresponding to merges of locals of two different types
594 // (these should never be referenced, otherwise the bytecodes are illegal)
595 void make_illegal() {
596 set(cannot_simplify);
597 set_type(illegalType);
598 }
600 bool is_illegal() const {
601 return type()->is_illegal();
602 }
604 // generic
605 virtual void input_values_do(ValueVisitor* f) {
606 }
607 };
610 // A local is a placeholder for an incoming argument to a function call.
611 LEAF(Local, Instruction)
612 private:
613 int _java_index; // the local index within the method to which the local belongs
614 public:
615 // creation
616 Local(ValueType* type, int index)
617 : Instruction(type)
618 , _java_index(index)
619 {}
621 // accessors
622 int java_index() const { return _java_index; }
624 // generic
625 virtual void input_values_do(ValueVisitor* f) { /* no values */ }
626 };
629 LEAF(Constant, Instruction)
630 public:
631 // creation
632 Constant(ValueType* type):
633 Instruction(type, NULL, true)
634 {
635 assert(type->is_constant(), "must be a constant");
636 }
638 Constant(ValueType* type, ValueStack* state_before):
639 Instruction(type, state_before, true)
640 {
641 assert(state_before != NULL, "only used for constants which need patching");
642 assert(type->is_constant(), "must be a constant");
643 // since it's patching it needs to be pinned
644 pin();
645 }
647 virtual bool can_trap() const { return state_before() != NULL; }
648 virtual void input_values_do(ValueVisitor* f) { /* no values */ }
650 virtual intx hash() const;
651 virtual bool is_equal(Value v) const;
653 virtual BlockBegin* compare(Instruction::Condition condition, Value right,
654 BlockBegin* true_sux, BlockBegin* false_sux);
655 };
658 BASE(AccessField, Instruction)
659 private:
660 Value _obj;
661 int _offset;
662 ciField* _field;
663 NullCheck* _explicit_null_check; // For explicit null check elimination
665 public:
666 // creation
667 AccessField(Value obj, int offset, ciField* field, bool is_static,
668 ValueStack* state_before, bool is_loaded, bool is_initialized)
669 : Instruction(as_ValueType(field->type()->basic_type()), state_before)
670 , _obj(obj)
671 , _offset(offset)
672 , _field(field)
673 , _explicit_null_check(NULL)
674 {
675 set_needs_null_check(!is_static);
676 set_flag(IsLoadedFlag, is_loaded);
677 set_flag(IsInitializedFlag, is_initialized);
678 set_flag(IsStaticFlag, is_static);
679 ASSERT_VALUES
680 if (!is_loaded || (PatchALot && !field->is_volatile())) {
681 // need to patch if the holder wasn't loaded or we're testing
682 // using PatchALot. Don't allow PatchALot for fields which are
683 // known to be volatile they aren't patchable.
684 set_flag(NeedsPatchingFlag, true);
685 }
686 // pin of all instructions with memory access
687 pin();
688 }
690 // accessors
691 Value obj() const { return _obj; }
692 int offset() const { return _offset; }
693 ciField* field() const { return _field; }
694 BasicType field_type() const { return _field->type()->basic_type(); }
695 bool is_static() const { return check_flag(IsStaticFlag); }
696 bool is_loaded() const { return check_flag(IsLoadedFlag); }
697 bool is_initialized() const { return check_flag(IsInitializedFlag); }
698 NullCheck* explicit_null_check() const { return _explicit_null_check; }
699 bool needs_patching() const { return check_flag(NeedsPatchingFlag); }
701 // manipulation
703 // Under certain circumstances, if a previous NullCheck instruction
704 // proved the target object non-null, we can eliminate the explicit
705 // null check and do an implicit one, simply specifying the debug
706 // information from the NullCheck. This field should only be consulted
707 // if needs_null_check() is true.
708 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; }
710 // generic
711 virtual bool can_trap() const { return needs_null_check() || needs_patching(); }
712 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); }
713 };
716 LEAF(LoadField, AccessField)
717 public:
718 // creation
719 LoadField(Value obj, int offset, ciField* field, bool is_static,
720 ValueStack* state_before, bool is_loaded, bool is_initialized)
721 : AccessField(obj, offset, field, is_static, state_before, is_loaded, is_initialized)
722 {}
724 ciType* declared_type() const;
725 ciType* exact_type() const;
727 // generic
728 HASHING2(LoadField, is_loaded() && !field()->is_volatile(), obj()->subst(), offset()) // cannot be eliminated if not yet loaded or if volatile
729 };
732 LEAF(StoreField, AccessField)
733 private:
734 Value _value;
736 public:
737 // creation
738 StoreField(Value obj, int offset, ciField* field, Value value, bool is_static,
739 ValueStack* state_before, bool is_loaded, bool is_initialized)
740 : AccessField(obj, offset, field, is_static, state_before, is_loaded, is_initialized)
741 , _value(value)
742 {
743 set_flag(NeedsWriteBarrierFlag, as_ValueType(field_type())->is_object());
744 ASSERT_VALUES
745 pin();
746 }
748 // accessors
749 Value value() const { return _value; }
750 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); }
752 // generic
753 virtual void input_values_do(ValueVisitor* f) { AccessField::input_values_do(f); f->visit(&_value); }
754 };
757 BASE(AccessArray, Instruction)
758 private:
759 Value _array;
761 public:
762 // creation
763 AccessArray(ValueType* type, Value array, ValueStack* state_before)
764 : Instruction(type, state_before)
765 , _array(array)
766 {
767 set_needs_null_check(true);
768 ASSERT_VALUES
769 pin(); // instruction with side effect (null exception or range check throwing)
770 }
772 Value array() const { return _array; }
774 // generic
775 virtual bool can_trap() const { return needs_null_check(); }
776 virtual void input_values_do(ValueVisitor* f) { f->visit(&_array); }
777 };
780 LEAF(ArrayLength, AccessArray)
781 private:
782 NullCheck* _explicit_null_check; // For explicit null check elimination
784 public:
785 // creation
786 ArrayLength(Value array, ValueStack* state_before)
787 : AccessArray(intType, array, state_before)
788 , _explicit_null_check(NULL) {}
790 // accessors
791 NullCheck* explicit_null_check() const { return _explicit_null_check; }
793 // setters
794 // See LoadField::set_explicit_null_check for documentation
795 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; }
797 // generic
798 HASHING1(ArrayLength, true, array()->subst())
799 };
802 BASE(AccessIndexed, AccessArray)
803 private:
804 Value _index;
805 Value _length;
806 BasicType _elt_type;
808 public:
809 // creation
810 AccessIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before)
811 : AccessArray(as_ValueType(elt_type), array, state_before)
812 , _index(index)
813 , _length(length)
814 , _elt_type(elt_type)
815 {
816 ASSERT_VALUES
817 }
819 // accessors
820 Value index() const { return _index; }
821 Value length() const { return _length; }
822 BasicType elt_type() const { return _elt_type; }
824 // perform elimination of range checks involving constants
825 bool compute_needs_range_check();
827 // generic
828 virtual void input_values_do(ValueVisitor* f) { AccessArray::input_values_do(f); f->visit(&_index); if (_length != NULL) f->visit(&_length); }
829 };
832 LEAF(LoadIndexed, AccessIndexed)
833 private:
834 NullCheck* _explicit_null_check; // For explicit null check elimination
836 public:
837 // creation
838 LoadIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before)
839 : AccessIndexed(array, index, length, elt_type, state_before)
840 , _explicit_null_check(NULL) {}
842 // accessors
843 NullCheck* explicit_null_check() const { return _explicit_null_check; }
845 // setters
846 // See LoadField::set_explicit_null_check for documentation
847 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; }
849 ciType* exact_type() const;
850 ciType* declared_type() const;
852 // generic
853 HASHING2(LoadIndexed, true, array()->subst(), index()->subst())
854 };
857 LEAF(StoreIndexed, AccessIndexed)
858 private:
859 Value _value;
861 ciMethod* _profiled_method;
862 int _profiled_bci;
863 public:
864 // creation
865 StoreIndexed(Value array, Value index, Value length, BasicType elt_type, Value value, ValueStack* state_before)
866 : AccessIndexed(array, index, length, elt_type, state_before)
867 , _value(value), _profiled_method(NULL), _profiled_bci(0)
868 {
869 set_flag(NeedsWriteBarrierFlag, (as_ValueType(elt_type)->is_object()));
870 set_flag(NeedsStoreCheckFlag, (as_ValueType(elt_type)->is_object()));
871 ASSERT_VALUES
872 pin();
873 }
875 // accessors
876 Value value() const { return _value; }
877 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); }
878 bool needs_store_check() const { return check_flag(NeedsStoreCheckFlag); }
879 // Helpers for methodDataOop profiling
880 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); }
881 void set_profiled_method(ciMethod* method) { _profiled_method = method; }
882 void set_profiled_bci(int bci) { _profiled_bci = bci; }
883 bool should_profile() const { return check_flag(ProfileMDOFlag); }
884 ciMethod* profiled_method() const { return _profiled_method; }
885 int profiled_bci() const { return _profiled_bci; }
886 // generic
887 virtual void input_values_do(ValueVisitor* f) { AccessIndexed::input_values_do(f); f->visit(&_value); }
888 };
891 LEAF(NegateOp, Instruction)
892 private:
893 Value _x;
895 public:
896 // creation
897 NegateOp(Value x) : Instruction(x->type()->base()), _x(x) {
898 ASSERT_VALUES
899 }
901 // accessors
902 Value x() const { return _x; }
904 // generic
905 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); }
906 };
909 BASE(Op2, Instruction)
910 private:
911 Bytecodes::Code _op;
912 Value _x;
913 Value _y;
915 public:
916 // creation
917 Op2(ValueType* type, Bytecodes::Code op, Value x, Value y, ValueStack* state_before = NULL)
918 : Instruction(type, state_before)
919 , _op(op)
920 , _x(x)
921 , _y(y)
922 {
923 ASSERT_VALUES
924 }
926 // accessors
927 Bytecodes::Code op() const { return _op; }
928 Value x() const { return _x; }
929 Value y() const { return _y; }
931 // manipulators
932 void swap_operands() {
933 assert(is_commutative(), "operation must be commutative");
934 Value t = _x; _x = _y; _y = t;
935 }
937 // generic
938 virtual bool is_commutative() const { return false; }
939 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); }
940 };
943 LEAF(ArithmeticOp, Op2)
944 public:
945 // creation
946 ArithmeticOp(Bytecodes::Code op, Value x, Value y, bool is_strictfp, ValueStack* state_before)
947 : Op2(x->type()->meet(y->type()), op, x, y, state_before)
948 {
949 set_flag(IsStrictfpFlag, is_strictfp);
950 if (can_trap()) pin();
951 }
953 // accessors
954 bool is_strictfp() const { return check_flag(IsStrictfpFlag); }
956 // generic
957 virtual bool is_commutative() const;
958 virtual bool can_trap() const;
959 HASHING3(Op2, true, op(), x()->subst(), y()->subst())
960 };
963 LEAF(ShiftOp, Op2)
964 public:
965 // creation
966 ShiftOp(Bytecodes::Code op, Value x, Value s) : Op2(x->type()->base(), op, x, s) {}
968 // generic
969 HASHING3(Op2, true, op(), x()->subst(), y()->subst())
970 };
973 LEAF(LogicOp, Op2)
974 public:
975 // creation
976 LogicOp(Bytecodes::Code op, Value x, Value y) : Op2(x->type()->meet(y->type()), op, x, y) {}
978 // generic
979 virtual bool is_commutative() const;
980 HASHING3(Op2, true, op(), x()->subst(), y()->subst())
981 };
984 LEAF(CompareOp, Op2)
985 public:
986 // creation
987 CompareOp(Bytecodes::Code op, Value x, Value y, ValueStack* state_before)
988 : Op2(intType, op, x, y, state_before)
989 {}
991 // generic
992 HASHING3(Op2, true, op(), x()->subst(), y()->subst())
993 };
996 LEAF(IfOp, Op2)
997 private:
998 Value _tval;
999 Value _fval;
1001 public:
1002 // creation
1003 IfOp(Value x, Condition cond, Value y, Value tval, Value fval)
1004 : Op2(tval->type()->meet(fval->type()), (Bytecodes::Code)cond, x, y)
1005 , _tval(tval)
1006 , _fval(fval)
1007 {
1008 ASSERT_VALUES
1009 assert(tval->type()->tag() == fval->type()->tag(), "types must match");
1010 }
1012 // accessors
1013 virtual bool is_commutative() const;
1014 Bytecodes::Code op() const { ShouldNotCallThis(); return Bytecodes::_illegal; }
1015 Condition cond() const { return (Condition)Op2::op(); }
1016 Value tval() const { return _tval; }
1017 Value fval() const { return _fval; }
1019 // generic
1020 virtual void input_values_do(ValueVisitor* f) { Op2::input_values_do(f); f->visit(&_tval); f->visit(&_fval); }
1021 };
1024 LEAF(Convert, Instruction)
1025 private:
1026 Bytecodes::Code _op;
1027 Value _value;
1029 public:
1030 // creation
1031 Convert(Bytecodes::Code op, Value value, ValueType* to_type) : Instruction(to_type), _op(op), _value(value) {
1032 ASSERT_VALUES
1033 }
1035 // accessors
1036 Bytecodes::Code op() const { return _op; }
1037 Value value() const { return _value; }
1039 // generic
1040 virtual void input_values_do(ValueVisitor* f) { f->visit(&_value); }
1041 HASHING2(Convert, true, op(), value()->subst())
1042 };
1045 LEAF(NullCheck, Instruction)
1046 private:
1047 Value _obj;
1049 public:
1050 // creation
1051 NullCheck(Value obj, ValueStack* state_before)
1052 : Instruction(obj->type()->base(), state_before)
1053 , _obj(obj)
1054 {
1055 ASSERT_VALUES
1056 set_can_trap(true);
1057 assert(_obj->type()->is_object(), "null check must be applied to objects only");
1058 pin(Instruction::PinExplicitNullCheck);
1059 }
1061 // accessors
1062 Value obj() const { return _obj; }
1064 // setters
1065 void set_can_trap(bool can_trap) { set_flag(CanTrapFlag, can_trap); }
1067 // generic
1068 virtual bool can_trap() const { return check_flag(CanTrapFlag); /* null-check elimination sets to false */ }
1069 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); }
1070 HASHING1(NullCheck, true, obj()->subst())
1071 };
1074 BASE(StateSplit, Instruction)
1075 private:
1076 ValueStack* _state;
1078 protected:
1079 static void substitute(BlockList& list, BlockBegin* old_block, BlockBegin* new_block);
1081 public:
1082 // creation
1083 StateSplit(ValueType* type, ValueStack* state_before = NULL)
1084 : Instruction(type, state_before)
1085 , _state(NULL)
1086 {
1087 pin(PinStateSplitConstructor);
1088 }
1090 // accessors
1091 ValueStack* state() const { return _state; }
1092 IRScope* scope() const; // the state's scope
1094 // manipulation
1095 void set_state(ValueStack* state) { assert(_state == NULL, "overwriting existing state"); check_state(state); _state = state; }
1097 // generic
1098 virtual void input_values_do(ValueVisitor* f) { /* no values */ }
1099 virtual void state_values_do(ValueVisitor* f);
1100 };
1103 LEAF(Invoke, StateSplit)
1104 private:
1105 Bytecodes::Code _code;
1106 Value _recv;
1107 Values* _args;
1108 BasicTypeList* _signature;
1109 int _vtable_index;
1110 ciMethod* _target;
1112 public:
1113 // creation
1114 Invoke(Bytecodes::Code code, ValueType* result_type, Value recv, Values* args,
1115 int vtable_index, ciMethod* target, ValueStack* state_before);
1117 // accessors
1118 Bytecodes::Code code() const { return _code; }
1119 Value receiver() const { return _recv; }
1120 bool has_receiver() const { return receiver() != NULL; }
1121 int number_of_arguments() const { return _args->length(); }
1122 Value argument_at(int i) const { return _args->at(i); }
1123 int vtable_index() const { return _vtable_index; }
1124 BasicTypeList* signature() const { return _signature; }
1125 ciMethod* target() const { return _target; }
1127 // Returns false if target is not loaded
1128 bool target_is_final() const { return check_flag(TargetIsFinalFlag); }
1129 bool target_is_loaded() const { return check_flag(TargetIsLoadedFlag); }
1130 // Returns false if target is not loaded
1131 bool target_is_strictfp() const { return check_flag(TargetIsStrictfpFlag); }
1133 // JSR 292 support
1134 bool is_invokedynamic() const { return code() == Bytecodes::_invokedynamic; }
1136 virtual bool needs_exception_state() const { return false; }
1138 // generic
1139 virtual bool can_trap() const { return true; }
1140 virtual void input_values_do(ValueVisitor* f) {
1141 StateSplit::input_values_do(f);
1142 if (has_receiver()) f->visit(&_recv);
1143 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i));
1144 }
1145 virtual void state_values_do(ValueVisitor *f);
1146 };
1149 LEAF(NewInstance, StateSplit)
1150 private:
1151 ciInstanceKlass* _klass;
1153 public:
1154 // creation
1155 NewInstance(ciInstanceKlass* klass, ValueStack* state_before)
1156 : StateSplit(instanceType, state_before)
1157 , _klass(klass)
1158 {}
1160 // accessors
1161 ciInstanceKlass* klass() const { return _klass; }
1163 virtual bool needs_exception_state() const { return false; }
1165 // generic
1166 virtual bool can_trap() const { return true; }
1167 ciType* exact_type() const;
1168 };
1171 BASE(NewArray, StateSplit)
1172 private:
1173 Value _length;
1175 public:
1176 // creation
1177 NewArray(Value length, ValueStack* state_before)
1178 : StateSplit(objectType, state_before)
1179 , _length(length)
1180 {
1181 // Do not ASSERT_VALUES since length is NULL for NewMultiArray
1182 }
1184 // accessors
1185 Value length() const { return _length; }
1187 virtual bool needs_exception_state() const { return false; }
1189 // generic
1190 virtual bool can_trap() const { return true; }
1191 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_length); }
1192 };
1195 LEAF(NewTypeArray, NewArray)
1196 private:
1197 BasicType _elt_type;
1199 public:
1200 // creation
1201 NewTypeArray(Value length, BasicType elt_type, ValueStack* state_before)
1202 : NewArray(length, state_before)
1203 , _elt_type(elt_type)
1204 {}
1206 // accessors
1207 BasicType elt_type() const { return _elt_type; }
1208 ciType* exact_type() const;
1209 };
1212 LEAF(NewObjectArray, NewArray)
1213 private:
1214 ciKlass* _klass;
1216 public:
1217 // creation
1218 NewObjectArray(ciKlass* klass, Value length, ValueStack* state_before) : NewArray(length, state_before), _klass(klass) {}
1220 // accessors
1221 ciKlass* klass() const { return _klass; }
1222 ciType* exact_type() const;
1223 };
1226 LEAF(NewMultiArray, NewArray)
1227 private:
1228 ciKlass* _klass;
1229 Values* _dims;
1231 public:
1232 // creation
1233 NewMultiArray(ciKlass* klass, Values* dims, ValueStack* state_before) : NewArray(NULL, state_before), _klass(klass), _dims(dims) {
1234 ASSERT_VALUES
1235 }
1237 // accessors
1238 ciKlass* klass() const { return _klass; }
1239 Values* dims() const { return _dims; }
1240 int rank() const { return dims()->length(); }
1242 // generic
1243 virtual void input_values_do(ValueVisitor* f) {
1244 // NOTE: we do not call NewArray::input_values_do since "length"
1245 // is meaningless for a multi-dimensional array; passing the
1246 // zeroth element down to NewArray as its length is a bad idea
1247 // since there will be a copy in the "dims" array which doesn't
1248 // get updated, and the value must not be traversed twice. Was bug
1249 // - kbr 4/10/2001
1250 StateSplit::input_values_do(f);
1251 for (int i = 0; i < _dims->length(); i++) f->visit(_dims->adr_at(i));
1252 }
1253 };
1256 BASE(TypeCheck, StateSplit)
1257 private:
1258 ciKlass* _klass;
1259 Value _obj;
1261 ciMethod* _profiled_method;
1262 int _profiled_bci;
1264 public:
1265 // creation
1266 TypeCheck(ciKlass* klass, Value obj, ValueType* type, ValueStack* state_before)
1267 : StateSplit(type, state_before), _klass(klass), _obj(obj),
1268 _profiled_method(NULL), _profiled_bci(0) {
1269 ASSERT_VALUES
1270 set_direct_compare(false);
1271 }
1273 // accessors
1274 ciKlass* klass() const { return _klass; }
1275 Value obj() const { return _obj; }
1276 bool is_loaded() const { return klass() != NULL; }
1277 bool direct_compare() const { return check_flag(DirectCompareFlag); }
1279 // manipulation
1280 void set_direct_compare(bool flag) { set_flag(DirectCompareFlag, flag); }
1282 // generic
1283 virtual bool can_trap() const { return true; }
1284 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); }
1286 // Helpers for methodDataOop profiling
1287 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); }
1288 void set_profiled_method(ciMethod* method) { _profiled_method = method; }
1289 void set_profiled_bci(int bci) { _profiled_bci = bci; }
1290 bool should_profile() const { return check_flag(ProfileMDOFlag); }
1291 ciMethod* profiled_method() const { return _profiled_method; }
1292 int profiled_bci() const { return _profiled_bci; }
1293 };
1296 LEAF(CheckCast, TypeCheck)
1297 public:
1298 // creation
1299 CheckCast(ciKlass* klass, Value obj, ValueStack* state_before)
1300 : TypeCheck(klass, obj, objectType, state_before) {}
1302 void set_incompatible_class_change_check() {
1303 set_flag(ThrowIncompatibleClassChangeErrorFlag, true);
1304 }
1305 bool is_incompatible_class_change_check() const {
1306 return check_flag(ThrowIncompatibleClassChangeErrorFlag);
1307 }
1309 ciType* declared_type() const;
1310 ciType* exact_type() const;
1311 };
1314 LEAF(InstanceOf, TypeCheck)
1315 public:
1316 // creation
1317 InstanceOf(ciKlass* klass, Value obj, ValueStack* state_before) : TypeCheck(klass, obj, intType, state_before) {}
1319 virtual bool needs_exception_state() const { return false; }
1320 };
1323 BASE(AccessMonitor, StateSplit)
1324 private:
1325 Value _obj;
1326 int _monitor_no;
1328 public:
1329 // creation
1330 AccessMonitor(Value obj, int monitor_no, ValueStack* state_before = NULL)
1331 : StateSplit(illegalType, state_before)
1332 , _obj(obj)
1333 , _monitor_no(monitor_no)
1334 {
1335 set_needs_null_check(true);
1336 ASSERT_VALUES
1337 }
1339 // accessors
1340 Value obj() const { return _obj; }
1341 int monitor_no() const { return _monitor_no; }
1343 // generic
1344 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); }
1345 };
1348 LEAF(MonitorEnter, AccessMonitor)
1349 public:
1350 // creation
1351 MonitorEnter(Value obj, int monitor_no, ValueStack* state_before)
1352 : AccessMonitor(obj, monitor_no, state_before)
1353 {
1354 ASSERT_VALUES
1355 }
1357 // generic
1358 virtual bool can_trap() const { return true; }
1359 };
1362 LEAF(MonitorExit, AccessMonitor)
1363 public:
1364 // creation
1365 MonitorExit(Value obj, int monitor_no)
1366 : AccessMonitor(obj, monitor_no, NULL)
1367 {
1368 ASSERT_VALUES
1369 }
1370 };
1373 LEAF(Intrinsic, StateSplit)
1374 private:
1375 vmIntrinsics::ID _id;
1376 Values* _args;
1377 Value _recv;
1379 public:
1380 // preserves_state can be set to true for Intrinsics
1381 // which are guaranteed to preserve register state across any slow
1382 // cases; setting it to true does not mean that the Intrinsic can
1383 // not trap, only that if we continue execution in the same basic
1384 // block after the Intrinsic, all of the registers are intact. This
1385 // allows load elimination and common expression elimination to be
1386 // performed across the Intrinsic. The default value is false.
1387 Intrinsic(ValueType* type,
1388 vmIntrinsics::ID id,
1389 Values* args,
1390 bool has_receiver,
1391 ValueStack* state_before,
1392 bool preserves_state,
1393 bool cantrap = true)
1394 : StateSplit(type, state_before)
1395 , _id(id)
1396 , _args(args)
1397 , _recv(NULL)
1398 {
1399 assert(args != NULL, "args must exist");
1400 ASSERT_VALUES
1401 set_flag(PreservesStateFlag, preserves_state);
1402 set_flag(CanTrapFlag, cantrap);
1403 if (has_receiver) {
1404 _recv = argument_at(0);
1405 }
1406 set_needs_null_check(has_receiver);
1408 // some intrinsics can't trap, so don't force them to be pinned
1409 if (!can_trap()) {
1410 unpin(PinStateSplitConstructor);
1411 }
1412 }
1414 // accessors
1415 vmIntrinsics::ID id() const { return _id; }
1416 int number_of_arguments() const { return _args->length(); }
1417 Value argument_at(int i) const { return _args->at(i); }
1419 bool has_receiver() const { return (_recv != NULL); }
1420 Value receiver() const { assert(has_receiver(), "must have receiver"); return _recv; }
1421 bool preserves_state() const { return check_flag(PreservesStateFlag); }
1423 // generic
1424 virtual bool can_trap() const { return check_flag(CanTrapFlag); }
1425 virtual void input_values_do(ValueVisitor* f) {
1426 StateSplit::input_values_do(f);
1427 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i));
1428 }
1429 };
1432 class LIR_List;
1434 LEAF(BlockBegin, StateSplit)
1435 private:
1436 int _block_id; // the unique block id
1437 int _bci; // start-bci of block
1438 int _depth_first_number; // number of this block in a depth-first ordering
1439 int _linear_scan_number; // number of this block in linear-scan ordering
1440 int _loop_depth; // the loop nesting level of this block
1441 int _loop_index; // number of the innermost loop of this block
1442 int _flags; // the flags associated with this block
1444 // fields used by BlockListBuilder
1445 int _total_preds; // number of predecessors found by BlockListBuilder
1446 BitMap _stores_to_locals; // bit is set when a local variable is stored in the block
1448 // SSA specific fields: (factor out later)
1449 BlockList _successors; // the successors of this block
1450 BlockList _predecessors; // the predecessors of this block
1451 BlockBegin* _dominator; // the dominator of this block
1452 // SSA specific ends
1453 BlockEnd* _end; // the last instruction of this block
1454 BlockList _exception_handlers; // the exception handlers potentially invoked by this block
1455 ValueStackStack* _exception_states; // only for xhandler entries: states of all instructions that have an edge to this xhandler
1456 int _exception_handler_pco; // if this block is the start of an exception handler,
1457 // this records the PC offset in the assembly code of the
1458 // first instruction in this block
1459 Label _label; // the label associated with this block
1460 LIR_List* _lir; // the low level intermediate representation for this block
1462 BitMap _live_in; // set of live LIR_Opr registers at entry to this block
1463 BitMap _live_out; // set of live LIR_Opr registers at exit from this block
1464 BitMap _live_gen; // set of registers used before any redefinition in this block
1465 BitMap _live_kill; // set of registers defined in this block
1467 BitMap _fpu_register_usage;
1468 intArray* _fpu_stack_state; // For x86 FPU code generation with UseLinearScan
1469 int _first_lir_instruction_id; // ID of first LIR instruction in this block
1470 int _last_lir_instruction_id; // ID of last LIR instruction in this block
1472 void iterate_preorder (boolArray& mark, BlockClosure* closure);
1473 void iterate_postorder(boolArray& mark, BlockClosure* closure);
1475 friend class SuxAndWeightAdjuster;
1477 public:
1478 void* operator new(size_t size) {
1479 Compilation* c = Compilation::current();
1480 void* res = c->arena()->Amalloc(size);
1481 ((BlockBegin*)res)->_id = c->get_next_id();
1482 ((BlockBegin*)res)->_block_id = c->get_next_block_id();
1483 return res;
1484 }
1486 // initialization/counting
1487 static int number_of_blocks() {
1488 return Compilation::current()->number_of_blocks();
1489 }
1491 // creation
1492 BlockBegin(int bci)
1493 : StateSplit(illegalType)
1494 , _bci(bci)
1495 , _depth_first_number(-1)
1496 , _linear_scan_number(-1)
1497 , _loop_depth(0)
1498 , _flags(0)
1499 , _dominator(NULL)
1500 , _end(NULL)
1501 , _predecessors(2)
1502 , _successors(2)
1503 , _exception_handlers(1)
1504 , _exception_states(NULL)
1505 , _exception_handler_pco(-1)
1506 , _lir(NULL)
1507 , _loop_index(-1)
1508 , _live_in()
1509 , _live_out()
1510 , _live_gen()
1511 , _live_kill()
1512 , _fpu_register_usage()
1513 , _fpu_stack_state(NULL)
1514 , _first_lir_instruction_id(-1)
1515 , _last_lir_instruction_id(-1)
1516 , _total_preds(0)
1517 , _stores_to_locals()
1518 {
1519 #ifndef PRODUCT
1520 set_printable_bci(bci);
1521 #endif
1522 }
1524 // accessors
1525 int block_id() const { return _block_id; }
1526 int bci() const { return _bci; }
1527 BlockList* successors() { return &_successors; }
1528 BlockBegin* dominator() const { return _dominator; }
1529 int loop_depth() const { return _loop_depth; }
1530 int depth_first_number() const { return _depth_first_number; }
1531 int linear_scan_number() const { return _linear_scan_number; }
1532 BlockEnd* end() const { return _end; }
1533 Label* label() { return &_label; }
1534 LIR_List* lir() const { return _lir; }
1535 int exception_handler_pco() const { return _exception_handler_pco; }
1536 BitMap& live_in() { return _live_in; }
1537 BitMap& live_out() { return _live_out; }
1538 BitMap& live_gen() { return _live_gen; }
1539 BitMap& live_kill() { return _live_kill; }
1540 BitMap& fpu_register_usage() { return _fpu_register_usage; }
1541 intArray* fpu_stack_state() const { return _fpu_stack_state; }
1542 int first_lir_instruction_id() const { return _first_lir_instruction_id; }
1543 int last_lir_instruction_id() const { return _last_lir_instruction_id; }
1544 int total_preds() const { return _total_preds; }
1545 BitMap& stores_to_locals() { return _stores_to_locals; }
1547 // manipulation
1548 void set_dominator(BlockBegin* dom) { _dominator = dom; }
1549 void set_loop_depth(int d) { _loop_depth = d; }
1550 void set_depth_first_number(int dfn) { _depth_first_number = dfn; }
1551 void set_linear_scan_number(int lsn) { _linear_scan_number = lsn; }
1552 void set_end(BlockEnd* end);
1553 void disconnect_from_graph();
1554 static void disconnect_edge(BlockBegin* from, BlockBegin* to);
1555 BlockBegin* insert_block_between(BlockBegin* sux);
1556 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux);
1557 void set_lir(LIR_List* lir) { _lir = lir; }
1558 void set_exception_handler_pco(int pco) { _exception_handler_pco = pco; }
1559 void set_live_in (BitMap map) { _live_in = map; }
1560 void set_live_out (BitMap map) { _live_out = map; }
1561 void set_live_gen (BitMap map) { _live_gen = map; }
1562 void set_live_kill (BitMap map) { _live_kill = map; }
1563 void set_fpu_register_usage(BitMap map) { _fpu_register_usage = map; }
1564 void set_fpu_stack_state(intArray* state) { _fpu_stack_state = state; }
1565 void set_first_lir_instruction_id(int id) { _first_lir_instruction_id = id; }
1566 void set_last_lir_instruction_id(int id) { _last_lir_instruction_id = id; }
1567 void increment_total_preds(int n = 1) { _total_preds += n; }
1568 void init_stores_to_locals(int locals_count) { _stores_to_locals = BitMap(locals_count); _stores_to_locals.clear(); }
1570 // generic
1571 virtual void state_values_do(ValueVisitor* f);
1573 // successors and predecessors
1574 int number_of_sux() const;
1575 BlockBegin* sux_at(int i) const;
1576 void add_successor(BlockBegin* sux);
1577 void remove_successor(BlockBegin* pred);
1578 bool is_successor(BlockBegin* sux) const { return _successors.contains(sux); }
1580 void add_predecessor(BlockBegin* pred);
1581 void remove_predecessor(BlockBegin* pred);
1582 bool is_predecessor(BlockBegin* pred) const { return _predecessors.contains(pred); }
1583 int number_of_preds() const { return _predecessors.length(); }
1584 BlockBegin* pred_at(int i) const { return _predecessors[i]; }
1586 // exception handlers potentially invoked by this block
1587 void add_exception_handler(BlockBegin* b);
1588 bool is_exception_handler(BlockBegin* b) const { return _exception_handlers.contains(b); }
1589 int number_of_exception_handlers() const { return _exception_handlers.length(); }
1590 BlockBegin* exception_handler_at(int i) const { return _exception_handlers.at(i); }
1592 // states of the instructions that have an edge to this exception handler
1593 int number_of_exception_states() { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states == NULL ? 0 : _exception_states->length(); }
1594 ValueStack* exception_state_at(int idx) const { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states->at(idx); }
1595 int add_exception_state(ValueStack* state);
1597 // flags
1598 enum Flag {
1599 no_flag = 0,
1600 std_entry_flag = 1 << 0,
1601 osr_entry_flag = 1 << 1,
1602 exception_entry_flag = 1 << 2,
1603 subroutine_entry_flag = 1 << 3,
1604 backward_branch_target_flag = 1 << 4,
1605 is_on_work_list_flag = 1 << 5,
1606 was_visited_flag = 1 << 6,
1607 parser_loop_header_flag = 1 << 7, // set by parser to identify blocks where phi functions can not be created on demand
1608 critical_edge_split_flag = 1 << 8, // set for all blocks that are introduced when critical edges are split
1609 linear_scan_loop_header_flag = 1 << 9, // set during loop-detection for LinearScan
1610 linear_scan_loop_end_flag = 1 << 10 // set during loop-detection for LinearScan
1611 };
1613 void set(Flag f) { _flags |= f; }
1614 void clear(Flag f) { _flags &= ~f; }
1615 bool is_set(Flag f) const { return (_flags & f) != 0; }
1616 bool is_entry_block() const {
1617 const int entry_mask = std_entry_flag | osr_entry_flag | exception_entry_flag;
1618 return (_flags & entry_mask) != 0;
1619 }
1621 // iteration
1622 void iterate_preorder (BlockClosure* closure);
1623 void iterate_postorder (BlockClosure* closure);
1625 void block_values_do(ValueVisitor* f);
1627 // loops
1628 void set_loop_index(int ix) { _loop_index = ix; }
1629 int loop_index() const { return _loop_index; }
1631 // merging
1632 bool try_merge(ValueStack* state); // try to merge states at block begin
1633 void merge(ValueStack* state) { bool b = try_merge(state); assert(b, "merge failed"); }
1635 // debugging
1636 void print_block() PRODUCT_RETURN;
1637 void print_block(InstructionPrinter& ip, bool live_only = false) PRODUCT_RETURN;
1638 };
1641 BASE(BlockEnd, StateSplit)
1642 private:
1643 BlockBegin* _begin;
1644 BlockList* _sux;
1646 protected:
1647 BlockList* sux() const { return _sux; }
1649 void set_sux(BlockList* sux) {
1650 #ifdef ASSERT
1651 assert(sux != NULL, "sux must exist");
1652 for (int i = sux->length() - 1; i >= 0; i--) assert(sux->at(i) != NULL, "sux must exist");
1653 #endif
1654 _sux = sux;
1655 }
1657 public:
1658 // creation
1659 BlockEnd(ValueType* type, ValueStack* state_before, bool is_safepoint)
1660 : StateSplit(type, state_before)
1661 , _begin(NULL)
1662 , _sux(NULL)
1663 {
1664 set_flag(IsSafepointFlag, is_safepoint);
1665 }
1667 // accessors
1668 bool is_safepoint() const { return check_flag(IsSafepointFlag); }
1669 BlockBegin* begin() const { return _begin; }
1671 // manipulation
1672 void set_begin(BlockBegin* begin);
1674 // successors
1675 int number_of_sux() const { return _sux != NULL ? _sux->length() : 0; }
1676 BlockBegin* sux_at(int i) const { return _sux->at(i); }
1677 BlockBegin* default_sux() const { return sux_at(number_of_sux() - 1); }
1678 BlockBegin** addr_sux_at(int i) const { return _sux->adr_at(i); }
1679 int sux_index(BlockBegin* sux) const { return _sux->find(sux); }
1680 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux);
1681 };
1684 LEAF(Goto, BlockEnd)
1685 public:
1686 enum Direction {
1687 none, // Just a regular goto
1688 taken, not_taken // Goto produced from If
1689 };
1690 private:
1691 ciMethod* _profiled_method;
1692 int _profiled_bci;
1693 Direction _direction;
1694 public:
1695 // creation
1696 Goto(BlockBegin* sux, ValueStack* state_before, bool is_safepoint = false)
1697 : BlockEnd(illegalType, state_before, is_safepoint)
1698 , _direction(none)
1699 , _profiled_method(NULL)
1700 , _profiled_bci(0) {
1701 BlockList* s = new BlockList(1);
1702 s->append(sux);
1703 set_sux(s);
1704 }
1706 Goto(BlockBegin* sux, bool is_safepoint) : BlockEnd(illegalType, NULL, is_safepoint)
1707 , _direction(none)
1708 , _profiled_method(NULL)
1709 , _profiled_bci(0) {
1710 BlockList* s = new BlockList(1);
1711 s->append(sux);
1712 set_sux(s);
1713 }
1715 bool should_profile() const { return check_flag(ProfileMDOFlag); }
1716 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches
1717 int profiled_bci() const { return _profiled_bci; }
1718 Direction direction() const { return _direction; }
1720 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); }
1721 void set_profiled_method(ciMethod* method) { _profiled_method = method; }
1722 void set_profiled_bci(int bci) { _profiled_bci = bci; }
1723 void set_direction(Direction d) { _direction = d; }
1724 };
1727 LEAF(If, BlockEnd)
1728 private:
1729 Value _x;
1730 Condition _cond;
1731 Value _y;
1732 ciMethod* _profiled_method;
1733 int _profiled_bci; // Canonicalizer may alter bci of If node
1734 bool _swapped; // Is the order reversed with respect to the original If in the
1735 // bytecode stream?
1736 public:
1737 // creation
1738 // unordered_is_true is valid for float/double compares only
1739 If(Value x, Condition cond, bool unordered_is_true, Value y, BlockBegin* tsux, BlockBegin* fsux, ValueStack* state_before, bool is_safepoint)
1740 : BlockEnd(illegalType, state_before, is_safepoint)
1741 , _x(x)
1742 , _cond(cond)
1743 , _y(y)
1744 , _profiled_method(NULL)
1745 , _profiled_bci(0)
1746 , _swapped(false)
1747 {
1748 ASSERT_VALUES
1749 set_flag(UnorderedIsTrueFlag, unordered_is_true);
1750 assert(x->type()->tag() == y->type()->tag(), "types must match");
1751 BlockList* s = new BlockList(2);
1752 s->append(tsux);
1753 s->append(fsux);
1754 set_sux(s);
1755 }
1757 // accessors
1758 Value x() const { return _x; }
1759 Condition cond() const { return _cond; }
1760 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); }
1761 Value y() const { return _y; }
1762 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); }
1763 BlockBegin* tsux() const { return sux_for(true); }
1764 BlockBegin* fsux() const { return sux_for(false); }
1765 BlockBegin* usux() const { return sux_for(unordered_is_true()); }
1766 bool should_profile() const { return check_flag(ProfileMDOFlag); }
1767 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches
1768 int profiled_bci() const { return _profiled_bci; } // set for profiled branches and tiered
1769 bool is_swapped() const { return _swapped; }
1771 // manipulation
1772 void swap_operands() {
1773 Value t = _x; _x = _y; _y = t;
1774 _cond = mirror(_cond);
1775 }
1777 void swap_sux() {
1778 assert(number_of_sux() == 2, "wrong number of successors");
1779 BlockList* s = sux();
1780 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t);
1781 _cond = negate(_cond);
1782 set_flag(UnorderedIsTrueFlag, !check_flag(UnorderedIsTrueFlag));
1783 }
1785 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); }
1786 void set_profiled_method(ciMethod* method) { _profiled_method = method; }
1787 void set_profiled_bci(int bci) { _profiled_bci = bci; }
1788 void set_swapped(bool value) { _swapped = value; }
1789 // generic
1790 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_x); f->visit(&_y); }
1791 };
1794 LEAF(IfInstanceOf, BlockEnd)
1795 private:
1796 ciKlass* _klass;
1797 Value _obj;
1798 bool _test_is_instance; // jump if instance
1799 int _instanceof_bci;
1801 public:
1802 IfInstanceOf(ciKlass* klass, Value obj, bool test_is_instance, int instanceof_bci, BlockBegin* tsux, BlockBegin* fsux)
1803 : BlockEnd(illegalType, NULL, false) // temporary set to false
1804 , _klass(klass)
1805 , _obj(obj)
1806 , _test_is_instance(test_is_instance)
1807 , _instanceof_bci(instanceof_bci)
1808 {
1809 ASSERT_VALUES
1810 assert(instanceof_bci >= 0, "illegal bci");
1811 BlockList* s = new BlockList(2);
1812 s->append(tsux);
1813 s->append(fsux);
1814 set_sux(s);
1815 }
1817 // accessors
1818 //
1819 // Note 1: If test_is_instance() is true, IfInstanceOf tests if obj *is* an
1820 // instance of klass; otherwise it tests if it is *not* and instance
1821 // of klass.
1822 //
1823 // Note 2: IfInstanceOf instructions are created by combining an InstanceOf
1824 // and an If instruction. The IfInstanceOf bci() corresponds to the
1825 // bci that the If would have had; the (this->) instanceof_bci() is
1826 // the bci of the original InstanceOf instruction.
1827 ciKlass* klass() const { return _klass; }
1828 Value obj() const { return _obj; }
1829 int instanceof_bci() const { return _instanceof_bci; }
1830 bool test_is_instance() const { return _test_is_instance; }
1831 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); }
1832 BlockBegin* tsux() const { return sux_for(true); }
1833 BlockBegin* fsux() const { return sux_for(false); }
1835 // manipulation
1836 void swap_sux() {
1837 assert(number_of_sux() == 2, "wrong number of successors");
1838 BlockList* s = sux();
1839 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t);
1840 _test_is_instance = !_test_is_instance;
1841 }
1843 // generic
1844 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_obj); }
1845 };
1848 BASE(Switch, BlockEnd)
1849 private:
1850 Value _tag;
1852 public:
1853 // creation
1854 Switch(Value tag, BlockList* sux, ValueStack* state_before, bool is_safepoint)
1855 : BlockEnd(illegalType, state_before, is_safepoint)
1856 , _tag(tag) {
1857 ASSERT_VALUES
1858 set_sux(sux);
1859 }
1861 // accessors
1862 Value tag() const { return _tag; }
1863 int length() const { return number_of_sux() - 1; }
1865 virtual bool needs_exception_state() const { return false; }
1867 // generic
1868 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_tag); }
1869 };
1872 LEAF(TableSwitch, Switch)
1873 private:
1874 int _lo_key;
1876 public:
1877 // creation
1878 TableSwitch(Value tag, BlockList* sux, int lo_key, ValueStack* state_before, bool is_safepoint)
1879 : Switch(tag, sux, state_before, is_safepoint)
1880 , _lo_key(lo_key) {}
1882 // accessors
1883 int lo_key() const { return _lo_key; }
1884 int hi_key() const { return _lo_key + length() - 1; }
1885 };
1888 LEAF(LookupSwitch, Switch)
1889 private:
1890 intArray* _keys;
1892 public:
1893 // creation
1894 LookupSwitch(Value tag, BlockList* sux, intArray* keys, ValueStack* state_before, bool is_safepoint)
1895 : Switch(tag, sux, state_before, is_safepoint)
1896 , _keys(keys) {
1897 assert(keys != NULL, "keys must exist");
1898 assert(keys->length() == length(), "sux & keys have incompatible lengths");
1899 }
1901 // accessors
1902 int key_at(int i) const { return _keys->at(i); }
1903 };
1906 LEAF(Return, BlockEnd)
1907 private:
1908 Value _result;
1910 public:
1911 // creation
1912 Return(Value result) :
1913 BlockEnd(result == NULL ? voidType : result->type()->base(), NULL, true),
1914 _result(result) {}
1916 // accessors
1917 Value result() const { return _result; }
1918 bool has_result() const { return result() != NULL; }
1920 // generic
1921 virtual void input_values_do(ValueVisitor* f) {
1922 BlockEnd::input_values_do(f);
1923 if (has_result()) f->visit(&_result);
1924 }
1925 };
1928 LEAF(Throw, BlockEnd)
1929 private:
1930 Value _exception;
1932 public:
1933 // creation
1934 Throw(Value exception, ValueStack* state_before) : BlockEnd(illegalType, state_before, true), _exception(exception) {
1935 ASSERT_VALUES
1936 }
1938 // accessors
1939 Value exception() const { return _exception; }
1941 // generic
1942 virtual bool can_trap() const { return true; }
1943 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_exception); }
1944 };
1947 LEAF(Base, BlockEnd)
1948 public:
1949 // creation
1950 Base(BlockBegin* std_entry, BlockBegin* osr_entry) : BlockEnd(illegalType, NULL, false) {
1951 assert(std_entry->is_set(BlockBegin::std_entry_flag), "std entry must be flagged");
1952 assert(osr_entry == NULL || osr_entry->is_set(BlockBegin::osr_entry_flag), "osr entry must be flagged");
1953 BlockList* s = new BlockList(2);
1954 if (osr_entry != NULL) s->append(osr_entry);
1955 s->append(std_entry); // must be default sux!
1956 set_sux(s);
1957 }
1959 // accessors
1960 BlockBegin* std_entry() const { return default_sux(); }
1961 BlockBegin* osr_entry() const { return number_of_sux() < 2 ? NULL : sux_at(0); }
1962 };
1965 LEAF(OsrEntry, Instruction)
1966 public:
1967 // creation
1968 #ifdef _LP64
1969 OsrEntry() : Instruction(longType, false) { pin(); }
1970 #else
1971 OsrEntry() : Instruction(intType, false) { pin(); }
1972 #endif
1974 // generic
1975 virtual void input_values_do(ValueVisitor* f) { }
1976 };
1979 // Models the incoming exception at a catch site
1980 LEAF(ExceptionObject, Instruction)
1981 public:
1982 // creation
1983 ExceptionObject() : Instruction(objectType, false) {
1984 pin();
1985 }
1987 // generic
1988 virtual void input_values_do(ValueVisitor* f) { }
1989 };
1992 // Models needed rounding for floating-point values on Intel.
1993 // Currently only used to represent rounding of double-precision
1994 // values stored into local variables, but could be used to model
1995 // intermediate rounding of single-precision values as well.
1996 LEAF(RoundFP, Instruction)
1997 private:
1998 Value _input; // floating-point value to be rounded
2000 public:
2001 RoundFP(Value input)
2002 : Instruction(input->type()) // Note: should not be used for constants
2003 , _input(input)
2004 {
2005 ASSERT_VALUES
2006 }
2008 // accessors
2009 Value input() const { return _input; }
2011 // generic
2012 virtual void input_values_do(ValueVisitor* f) { f->visit(&_input); }
2013 };
2016 BASE(UnsafeOp, Instruction)
2017 private:
2018 BasicType _basic_type; // ValueType can not express byte-sized integers
2020 protected:
2021 // creation
2022 UnsafeOp(BasicType basic_type, bool is_put)
2023 : Instruction(is_put ? voidType : as_ValueType(basic_type))
2024 , _basic_type(basic_type)
2025 {
2026 //Note: Unsafe ops are not not guaranteed to throw NPE.
2027 // Convservatively, Unsafe operations must be pinned though we could be
2028 // looser about this if we wanted to..
2029 pin();
2030 }
2032 public:
2033 // accessors
2034 BasicType basic_type() { return _basic_type; }
2036 // generic
2037 virtual void input_values_do(ValueVisitor* f) { }
2038 };
2041 BASE(UnsafeRawOp, UnsafeOp)
2042 private:
2043 Value _base; // Base address (a Java long)
2044 Value _index; // Index if computed by optimizer; initialized to NULL
2045 int _log2_scale; // Scale factor: 0, 1, 2, or 3.
2046 // Indicates log2 of number of bytes (1, 2, 4, or 8)
2047 // to scale index by.
2049 protected:
2050 UnsafeRawOp(BasicType basic_type, Value addr, bool is_put)
2051 : UnsafeOp(basic_type, is_put)
2052 , _base(addr)
2053 , _index(NULL)
2054 , _log2_scale(0)
2055 {
2056 // Can not use ASSERT_VALUES because index may be NULL
2057 assert(addr != NULL && addr->type()->is_long(), "just checking");
2058 }
2060 UnsafeRawOp(BasicType basic_type, Value base, Value index, int log2_scale, bool is_put)
2061 : UnsafeOp(basic_type, is_put)
2062 , _base(base)
2063 , _index(index)
2064 , _log2_scale(log2_scale)
2065 {
2066 }
2068 public:
2069 // accessors
2070 Value base() { return _base; }
2071 Value index() { return _index; }
2072 bool has_index() { return (_index != NULL); }
2073 int log2_scale() { return _log2_scale; }
2075 // setters
2076 void set_base (Value base) { _base = base; }
2077 void set_index(Value index) { _index = index; }
2078 void set_log2_scale(int log2_scale) { _log2_scale = log2_scale; }
2080 // generic
2081 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f);
2082 f->visit(&_base);
2083 if (has_index()) f->visit(&_index); }
2084 };
2087 LEAF(UnsafeGetRaw, UnsafeRawOp)
2088 private:
2089 bool _may_be_unaligned; // For OSREntry
2091 public:
2092 UnsafeGetRaw(BasicType basic_type, Value addr, bool may_be_unaligned)
2093 : UnsafeRawOp(basic_type, addr, false) {
2094 _may_be_unaligned = may_be_unaligned;
2095 }
2097 UnsafeGetRaw(BasicType basic_type, Value base, Value index, int log2_scale, bool may_be_unaligned)
2098 : UnsafeRawOp(basic_type, base, index, log2_scale, false) {
2099 _may_be_unaligned = may_be_unaligned;
2100 }
2102 bool may_be_unaligned() { return _may_be_unaligned; }
2103 };
2106 LEAF(UnsafePutRaw, UnsafeRawOp)
2107 private:
2108 Value _value; // Value to be stored
2110 public:
2111 UnsafePutRaw(BasicType basic_type, Value addr, Value value)
2112 : UnsafeRawOp(basic_type, addr, true)
2113 , _value(value)
2114 {
2115 assert(value != NULL, "just checking");
2116 ASSERT_VALUES
2117 }
2119 UnsafePutRaw(BasicType basic_type, Value base, Value index, int log2_scale, Value value)
2120 : UnsafeRawOp(basic_type, base, index, log2_scale, true)
2121 , _value(value)
2122 {
2123 assert(value != NULL, "just checking");
2124 ASSERT_VALUES
2125 }
2127 // accessors
2128 Value value() { return _value; }
2130 // generic
2131 virtual void input_values_do(ValueVisitor* f) { UnsafeRawOp::input_values_do(f);
2132 f->visit(&_value); }
2133 };
2136 BASE(UnsafeObjectOp, UnsafeOp)
2137 private:
2138 Value _object; // Object to be fetched from or mutated
2139 Value _offset; // Offset within object
2140 bool _is_volatile; // true if volatile - dl/JSR166
2141 public:
2142 UnsafeObjectOp(BasicType basic_type, Value object, Value offset, bool is_put, bool is_volatile)
2143 : UnsafeOp(basic_type, is_put), _object(object), _offset(offset), _is_volatile(is_volatile)
2144 {
2145 }
2147 // accessors
2148 Value object() { return _object; }
2149 Value offset() { return _offset; }
2150 bool is_volatile() { return _is_volatile; }
2151 // generic
2152 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f);
2153 f->visit(&_object);
2154 f->visit(&_offset); }
2155 };
2158 LEAF(UnsafeGetObject, UnsafeObjectOp)
2159 public:
2160 UnsafeGetObject(BasicType basic_type, Value object, Value offset, bool is_volatile)
2161 : UnsafeObjectOp(basic_type, object, offset, false, is_volatile)
2162 {
2163 ASSERT_VALUES
2164 }
2165 };
2168 LEAF(UnsafePutObject, UnsafeObjectOp)
2169 private:
2170 Value _value; // Value to be stored
2171 public:
2172 UnsafePutObject(BasicType basic_type, Value object, Value offset, Value value, bool is_volatile)
2173 : UnsafeObjectOp(basic_type, object, offset, true, is_volatile)
2174 , _value(value)
2175 {
2176 ASSERT_VALUES
2177 }
2179 // accessors
2180 Value value() { return _value; }
2182 // generic
2183 virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f);
2184 f->visit(&_value); }
2185 };
2188 BASE(UnsafePrefetch, UnsafeObjectOp)
2189 public:
2190 UnsafePrefetch(Value object, Value offset)
2191 : UnsafeObjectOp(T_VOID, object, offset, false, false)
2192 {
2193 }
2194 };
2197 LEAF(UnsafePrefetchRead, UnsafePrefetch)
2198 public:
2199 UnsafePrefetchRead(Value object, Value offset)
2200 : UnsafePrefetch(object, offset)
2201 {
2202 ASSERT_VALUES
2203 }
2204 };
2207 LEAF(UnsafePrefetchWrite, UnsafePrefetch)
2208 public:
2209 UnsafePrefetchWrite(Value object, Value offset)
2210 : UnsafePrefetch(object, offset)
2211 {
2212 ASSERT_VALUES
2213 }
2214 };
2216 LEAF(ProfileCall, Instruction)
2217 private:
2218 ciMethod* _method;
2219 int _bci_of_invoke;
2220 Value _recv;
2221 ciKlass* _known_holder;
2223 public:
2224 ProfileCall(ciMethod* method, int bci, Value recv, ciKlass* known_holder)
2225 : Instruction(voidType)
2226 , _method(method)
2227 , _bci_of_invoke(bci)
2228 , _recv(recv)
2229 , _known_holder(known_holder)
2230 {
2231 // The ProfileCall has side-effects and must occur precisely where located
2232 pin();
2233 }
2235 ciMethod* method() { return _method; }
2236 int bci_of_invoke() { return _bci_of_invoke; }
2237 Value recv() { return _recv; }
2238 ciKlass* known_holder() { return _known_holder; }
2240 virtual void input_values_do(ValueVisitor* f) { if (_recv != NULL) f->visit(&_recv); }
2241 };
2243 // Use to trip invocation counter of an inlined method
2245 LEAF(ProfileInvoke, Instruction)
2246 private:
2247 ciMethod* _inlinee;
2248 ValueStack* _state;
2249 int _bci_of_invoke;
2251 public:
2252 ProfileInvoke(ciMethod* inlinee, ValueStack* state, int bci)
2253 : Instruction(voidType)
2254 , _inlinee(inlinee)
2255 , _bci_of_invoke(bci)
2256 , _state(state)
2257 {
2258 // The ProfileInvoke has side-effects and must occur precisely where located QQQ???
2259 pin();
2260 }
2262 ciMethod* inlinee() { return _inlinee; }
2263 ValueStack* state() { return _state; }
2264 int bci_of_invoke() { return _bci_of_invoke; }
2265 virtual void input_values_do(ValueVisitor*) {}
2266 virtual void state_values_do(ValueVisitor*);
2267 };
2269 class BlockPair: public CompilationResourceObj {
2270 private:
2271 BlockBegin* _from;
2272 BlockBegin* _to;
2273 public:
2274 BlockPair(BlockBegin* from, BlockBegin* to): _from(from), _to(to) {}
2275 BlockBegin* from() const { return _from; }
2276 BlockBegin* to() const { return _to; }
2277 bool is_same(BlockBegin* from, BlockBegin* to) const { return _from == from && _to == to; }
2278 bool is_same(BlockPair* p) const { return _from == p->from() && _to == p->to(); }
2279 void set_to(BlockBegin* b) { _to = b; }
2280 void set_from(BlockBegin* b) { _from = b; }
2281 };
2284 define_array(BlockPairArray, BlockPair*)
2285 define_stack(BlockPairList, BlockPairArray)
2288 inline int BlockBegin::number_of_sux() const { assert(_end == NULL || _end->number_of_sux() == _successors.length(), "mismatch"); return _successors.length(); }
2289 inline BlockBegin* BlockBegin::sux_at(int i) const { assert(_end == NULL || _end->sux_at(i) == _successors.at(i), "mismatch"); return _successors.at(i); }
2290 inline void BlockBegin::add_successor(BlockBegin* sux) { assert(_end == NULL, "Would create mismatch with successors of BlockEnd"); _successors.append(sux); }
2292 #undef ASSERT_VALUES