Thu, 24 May 2018 17:06:56 +0800
Merge
1 /*
2 * Copyright (c) 1999, 2016, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
25 #ifndef SHARE_VM_C1_C1_INSTRUCTION_HPP
26 #define SHARE_VM_C1_C1_INSTRUCTION_HPP
28 #include "c1/c1_Compilation.hpp"
29 #include "c1/c1_LIR.hpp"
30 #include "c1/c1_ValueType.hpp"
31 #include "ci/ciField.hpp"
33 // Predefined classes
34 class ciField;
35 class ValueStack;
36 class InstructionPrinter;
37 class IRScope;
38 class LIR_OprDesc;
39 typedef LIR_OprDesc* LIR_Opr;
42 // Instruction class hierarchy
43 //
44 // All leaf classes in the class hierarchy are concrete classes
45 // (i.e., are instantiated). All other classes are abstract and
46 // serve factoring.
48 class Instruction;
49 class Phi;
50 class Local;
51 class Constant;
52 class AccessField;
53 class LoadField;
54 class StoreField;
55 class AccessArray;
56 class ArrayLength;
57 class AccessIndexed;
58 class LoadIndexed;
59 class StoreIndexed;
60 class NegateOp;
61 class Op2;
62 class ArithmeticOp;
63 class ShiftOp;
64 class LogicOp;
65 class CompareOp;
66 class IfOp;
67 class Convert;
68 class NullCheck;
69 class TypeCast;
70 class OsrEntry;
71 class ExceptionObject;
72 class StateSplit;
73 class Invoke;
74 class NewInstance;
75 class NewArray;
76 class NewTypeArray;
77 class NewObjectArray;
78 class NewMultiArray;
79 class TypeCheck;
80 class CheckCast;
81 class InstanceOf;
82 class AccessMonitor;
83 class MonitorEnter;
84 class MonitorExit;
85 class Intrinsic;
86 class BlockBegin;
87 class BlockEnd;
88 class Goto;
89 class If;
90 class IfInstanceOf;
91 class Switch;
92 class TableSwitch;
93 class LookupSwitch;
94 class Return;
95 class Throw;
96 class Base;
97 class RoundFP;
98 class UnsafeOp;
99 class UnsafeRawOp;
100 class UnsafeGetRaw;
101 class UnsafePutRaw;
102 class UnsafeObjectOp;
103 class UnsafeGetObject;
104 class UnsafePutObject;
105 class UnsafeGetAndSetObject;
106 class UnsafePrefetch;
107 class UnsafePrefetchRead;
108 class UnsafePrefetchWrite;
109 class ProfileCall;
110 class ProfileReturnType;
111 class ProfileInvoke;
112 class RuntimeCall;
113 class MemBar;
114 class RangeCheckPredicate;
115 #ifdef ASSERT
116 class Assert;
117 #endif
119 // A Value is a reference to the instruction creating the value
120 typedef Instruction* Value;
121 define_array(ValueArray, Value)
122 define_stack(Values, ValueArray)
124 define_array(ValueStackArray, ValueStack*)
125 define_stack(ValueStackStack, ValueStackArray)
127 // BlockClosure is the base class for block traversal/iteration.
129 class BlockClosure: public CompilationResourceObj {
130 public:
131 virtual void block_do(BlockBegin* block) = 0;
132 };
135 // A simple closure class for visiting the values of an Instruction
136 class ValueVisitor: public StackObj {
137 public:
138 virtual void visit(Value* v) = 0;
139 };
142 // Some array and list classes
143 define_array(BlockBeginArray, BlockBegin*)
144 define_stack(_BlockList, BlockBeginArray)
146 class BlockList: public _BlockList {
147 public:
148 BlockList(): _BlockList() {}
149 BlockList(const int size): _BlockList(size) {}
150 BlockList(const int size, BlockBegin* init): _BlockList(size, init) {}
152 void iterate_forward(BlockClosure* closure);
153 void iterate_backward(BlockClosure* closure);
154 void blocks_do(void f(BlockBegin*));
155 void values_do(ValueVisitor* f);
156 void print(bool cfg_only = false, bool live_only = false) PRODUCT_RETURN;
157 };
160 // InstructionVisitors provide type-based dispatch for instructions.
161 // For each concrete Instruction class X, a virtual function do_X is
162 // provided. Functionality that needs to be implemented for all classes
163 // (e.g., printing, code generation) is factored out into a specialised
164 // visitor instead of added to the Instruction classes itself.
166 class InstructionVisitor: public StackObj {
167 public:
168 virtual void do_Phi (Phi* x) = 0;
169 virtual void do_Local (Local* x) = 0;
170 virtual void do_Constant (Constant* x) = 0;
171 virtual void do_LoadField (LoadField* x) = 0;
172 virtual void do_StoreField (StoreField* x) = 0;
173 virtual void do_ArrayLength (ArrayLength* x) = 0;
174 virtual void do_LoadIndexed (LoadIndexed* x) = 0;
175 virtual void do_StoreIndexed (StoreIndexed* x) = 0;
176 virtual void do_NegateOp (NegateOp* x) = 0;
177 virtual void do_ArithmeticOp (ArithmeticOp* x) = 0;
178 virtual void do_ShiftOp (ShiftOp* x) = 0;
179 virtual void do_LogicOp (LogicOp* x) = 0;
180 virtual void do_CompareOp (CompareOp* x) = 0;
181 virtual void do_IfOp (IfOp* x) = 0;
182 virtual void do_Convert (Convert* x) = 0;
183 virtual void do_NullCheck (NullCheck* x) = 0;
184 virtual void do_TypeCast (TypeCast* x) = 0;
185 virtual void do_Invoke (Invoke* x) = 0;
186 virtual void do_NewInstance (NewInstance* x) = 0;
187 virtual void do_NewTypeArray (NewTypeArray* x) = 0;
188 virtual void do_NewObjectArray (NewObjectArray* x) = 0;
189 virtual void do_NewMultiArray (NewMultiArray* x) = 0;
190 virtual void do_CheckCast (CheckCast* x) = 0;
191 virtual void do_InstanceOf (InstanceOf* x) = 0;
192 virtual void do_MonitorEnter (MonitorEnter* x) = 0;
193 virtual void do_MonitorExit (MonitorExit* x) = 0;
194 virtual void do_Intrinsic (Intrinsic* x) = 0;
195 virtual void do_BlockBegin (BlockBegin* x) = 0;
196 virtual void do_Goto (Goto* x) = 0;
197 virtual void do_If (If* x) = 0;
198 virtual void do_IfInstanceOf (IfInstanceOf* x) = 0;
199 virtual void do_TableSwitch (TableSwitch* x) = 0;
200 virtual void do_LookupSwitch (LookupSwitch* x) = 0;
201 virtual void do_Return (Return* x) = 0;
202 virtual void do_Throw (Throw* x) = 0;
203 virtual void do_Base (Base* x) = 0;
204 virtual void do_OsrEntry (OsrEntry* x) = 0;
205 virtual void do_ExceptionObject(ExceptionObject* x) = 0;
206 virtual void do_RoundFP (RoundFP* x) = 0;
207 virtual void do_UnsafeGetRaw (UnsafeGetRaw* x) = 0;
208 virtual void do_UnsafePutRaw (UnsafePutRaw* x) = 0;
209 virtual void do_UnsafeGetObject(UnsafeGetObject* x) = 0;
210 virtual void do_UnsafePutObject(UnsafePutObject* x) = 0;
211 virtual void do_UnsafeGetAndSetObject(UnsafeGetAndSetObject* x) = 0;
212 virtual void do_UnsafePrefetchRead (UnsafePrefetchRead* x) = 0;
213 virtual void do_UnsafePrefetchWrite(UnsafePrefetchWrite* x) = 0;
214 virtual void do_ProfileCall (ProfileCall* x) = 0;
215 virtual void do_ProfileReturnType (ProfileReturnType* x) = 0;
216 virtual void do_ProfileInvoke (ProfileInvoke* x) = 0;
217 virtual void do_RuntimeCall (RuntimeCall* x) = 0;
218 virtual void do_MemBar (MemBar* x) = 0;
219 virtual void do_RangeCheckPredicate(RangeCheckPredicate* x) = 0;
220 #ifdef ASSERT
221 virtual void do_Assert (Assert* x) = 0;
222 #endif
223 };
226 // Hashing support
227 //
228 // Note: This hash functions affect the performance
229 // of ValueMap - make changes carefully!
231 #define HASH1(x1 ) ((intx)(x1))
232 #define HASH2(x1, x2 ) ((HASH1(x1 ) << 7) ^ HASH1(x2))
233 #define HASH3(x1, x2, x3 ) ((HASH2(x1, x2 ) << 7) ^ HASH1(x3))
234 #define HASH4(x1, x2, x3, x4) ((HASH3(x1, x2, x3) << 7) ^ HASH1(x4))
237 // The following macros are used to implement instruction-specific hashing.
238 // By default, each instruction implements hash() and is_equal(Value), used
239 // for value numbering/common subexpression elimination. The default imple-
240 // mentation disables value numbering. Each instruction which can be value-
241 // numbered, should define corresponding hash() and is_equal(Value) functions
242 // via the macros below. The f arguments specify all the values/op codes, etc.
243 // that need to be identical for two instructions to be identical.
244 //
245 // Note: The default implementation of hash() returns 0 in order to indicate
246 // that the instruction should not be considered for value numbering.
247 // The currently used hash functions do not guarantee that never a 0
248 // is produced. While this is still correct, it may be a performance
249 // bug (no value numbering for that node). However, this situation is
250 // so unlikely, that we are not going to handle it specially.
252 #define HASHING1(class_name, enabled, f1) \
253 virtual intx hash() const { \
254 return (enabled) ? HASH2(name(), f1) : 0; \
255 } \
256 virtual bool is_equal(Value v) const { \
257 if (!(enabled) ) return false; \
258 class_name* _v = v->as_##class_name(); \
259 if (_v == NULL ) return false; \
260 if (f1 != _v->f1) return false; \
261 return true; \
262 } \
265 #define HASHING2(class_name, enabled, f1, f2) \
266 virtual intx hash() const { \
267 return (enabled) ? HASH3(name(), f1, f2) : 0; \
268 } \
269 virtual bool is_equal(Value v) const { \
270 if (!(enabled) ) return false; \
271 class_name* _v = v->as_##class_name(); \
272 if (_v == NULL ) return false; \
273 if (f1 != _v->f1) return false; \
274 if (f2 != _v->f2) return false; \
275 return true; \
276 } \
279 #define HASHING3(class_name, enabled, f1, f2, f3) \
280 virtual intx hash() const { \
281 return (enabled) ? HASH4(name(), f1, f2, f3) : 0; \
282 } \
283 virtual bool is_equal(Value v) const { \
284 if (!(enabled) ) return false; \
285 class_name* _v = v->as_##class_name(); \
286 if (_v == NULL ) return false; \
287 if (f1 != _v->f1) return false; \
288 if (f2 != _v->f2) return false; \
289 if (f3 != _v->f3) return false; \
290 return true; \
291 } \
294 // The mother of all instructions...
296 class Instruction: public CompilationResourceObj {
297 private:
298 int _id; // the unique instruction id
299 #ifndef PRODUCT
300 int _printable_bci; // the bci of the instruction for printing
301 #endif
302 int _use_count; // the number of instructions refering to this value (w/o prev/next); only roots can have use count = 0 or > 1
303 int _pin_state; // set of PinReason describing the reason for pinning
304 ValueType* _type; // the instruction value type
305 Instruction* _next; // the next instruction if any (NULL for BlockEnd instructions)
306 Instruction* _subst; // the substitution instruction if any
307 LIR_Opr _operand; // LIR specific information
308 unsigned int _flags; // Flag bits
310 ValueStack* _state_before; // Copy of state with input operands still on stack (or NULL)
311 ValueStack* _exception_state; // Copy of state for exception handling
312 XHandlers* _exception_handlers; // Flat list of exception handlers covering this instruction
314 friend class UseCountComputer;
315 friend class BlockBegin;
317 void update_exception_state(ValueStack* state);
319 protected:
320 BlockBegin* _block; // Block that contains this instruction
322 void set_type(ValueType* type) {
323 assert(type != NULL, "type must exist");
324 _type = type;
325 }
327 // Helper class to keep track of which arguments need a null check
328 class ArgsNonNullState {
329 private:
330 int _nonnull_state; // mask identifying which args are nonnull
331 public:
332 ArgsNonNullState()
333 : _nonnull_state(AllBits) {}
335 // Does argument number i needs a null check?
336 bool arg_needs_null_check(int i) const {
337 // No data is kept for arguments starting at position 33 so
338 // conservatively assume that they need a null check.
339 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) {
340 return is_set_nth_bit(_nonnull_state, i);
341 }
342 return true;
343 }
345 // Set whether argument number i needs a null check or not
346 void set_arg_needs_null_check(int i, bool check) {
347 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) {
348 if (check) {
349 _nonnull_state |= nth_bit(i);
350 } else {
351 _nonnull_state &= ~(nth_bit(i));
352 }
353 }
354 }
355 };
357 public:
358 void* operator new(size_t size) throw() {
359 Compilation* c = Compilation::current();
360 void* res = c->arena()->Amalloc(size);
361 ((Instruction*)res)->_id = c->get_next_id();
362 return res;
363 }
365 static const int no_bci = -99;
367 enum InstructionFlag {
368 NeedsNullCheckFlag = 0,
369 CanTrapFlag,
370 DirectCompareFlag,
371 IsEliminatedFlag,
372 IsSafepointFlag,
373 IsStaticFlag,
374 IsStrictfpFlag,
375 NeedsStoreCheckFlag,
376 NeedsWriteBarrierFlag,
377 PreservesStateFlag,
378 TargetIsFinalFlag,
379 TargetIsLoadedFlag,
380 TargetIsStrictfpFlag,
381 UnorderedIsTrueFlag,
382 NeedsPatchingFlag,
383 ThrowIncompatibleClassChangeErrorFlag,
384 ProfileMDOFlag,
385 IsLinkedInBlockFlag,
386 NeedsRangeCheckFlag,
387 InWorkListFlag,
388 DeoptimizeOnException,
389 InstructionLastFlag
390 };
392 public:
393 bool check_flag(InstructionFlag id) const { return (_flags & (1 << id)) != 0; }
394 void set_flag(InstructionFlag id, bool f) { _flags = f ? (_flags | (1 << id)) : (_flags & ~(1 << id)); };
396 // 'globally' used condition values
397 enum Condition {
398 eql, neq, lss, leq, gtr, geq, aeq, beq
399 };
401 // Instructions may be pinned for many reasons and under certain conditions
402 // with enough knowledge it's possible to safely unpin them.
403 enum PinReason {
404 PinUnknown = 1 << 0
405 , PinExplicitNullCheck = 1 << 3
406 , PinStackForStateSplit= 1 << 12
407 , PinStateSplitConstructor= 1 << 13
408 , PinGlobalValueNumbering= 1 << 14
409 };
411 static Condition mirror(Condition cond);
412 static Condition negate(Condition cond);
414 // initialization
415 static int number_of_instructions() {
416 return Compilation::current()->number_of_instructions();
417 }
419 // creation
420 Instruction(ValueType* type, ValueStack* state_before = NULL, bool type_is_constant = false)
421 : _use_count(0)
422 #ifndef PRODUCT
423 , _printable_bci(-99)
424 #endif
425 , _pin_state(0)
426 , _type(type)
427 , _next(NULL)
428 , _block(NULL)
429 , _subst(NULL)
430 , _flags(0)
431 , _operand(LIR_OprFact::illegalOpr)
432 , _state_before(state_before)
433 , _exception_handlers(NULL)
434 {
435 check_state(state_before);
436 assert(type != NULL && (!type->is_constant() || type_is_constant), "type must exist");
437 update_exception_state(_state_before);
438 }
440 // accessors
441 int id() const { return _id; }
442 #ifndef PRODUCT
443 bool has_printable_bci() const { return _printable_bci != -99; }
444 int printable_bci() const { assert(has_printable_bci(), "_printable_bci should have been set"); return _printable_bci; }
445 void set_printable_bci(int bci) { _printable_bci = bci; }
446 #endif
447 int dominator_depth();
448 int use_count() const { return _use_count; }
449 int pin_state() const { return _pin_state; }
450 bool is_pinned() const { return _pin_state != 0 || PinAllInstructions; }
451 ValueType* type() const { return _type; }
452 BlockBegin *block() const { return _block; }
453 Instruction* prev(); // use carefully, expensive operation
454 Instruction* next() const { return _next; }
455 bool has_subst() const { return _subst != NULL; }
456 Instruction* subst() { return _subst == NULL ? this : _subst->subst(); }
457 LIR_Opr operand() const { return _operand; }
459 void set_needs_null_check(bool f) { set_flag(NeedsNullCheckFlag, f); }
460 bool needs_null_check() const { return check_flag(NeedsNullCheckFlag); }
461 bool is_linked() const { return check_flag(IsLinkedInBlockFlag); }
462 bool can_be_linked() { return as_Local() == NULL && as_Phi() == NULL; }
464 bool has_uses() const { return use_count() > 0; }
465 ValueStack* state_before() const { return _state_before; }
466 ValueStack* exception_state() const { return _exception_state; }
467 virtual bool needs_exception_state() const { return true; }
468 XHandlers* exception_handlers() const { return _exception_handlers; }
470 // manipulation
471 void pin(PinReason reason) { _pin_state |= reason; }
472 void pin() { _pin_state |= PinUnknown; }
473 // DANGEROUS: only used by EliminateStores
474 void unpin(PinReason reason) { assert((reason & PinUnknown) == 0, "can't unpin unknown state"); _pin_state &= ~reason; }
476 Instruction* set_next(Instruction* next) {
477 assert(next->has_printable_bci(), "_printable_bci should have been set");
478 assert(next != NULL, "must not be NULL");
479 assert(as_BlockEnd() == NULL, "BlockEnd instructions must have no next");
480 assert(next->can_be_linked(), "shouldn't link these instructions into list");
482 BlockBegin *block = this->block();
483 next->_block = block;
485 next->set_flag(Instruction::IsLinkedInBlockFlag, true);
486 _next = next;
487 return next;
488 }
490 Instruction* set_next(Instruction* next, int bci) {
491 #ifndef PRODUCT
492 next->set_printable_bci(bci);
493 #endif
494 return set_next(next);
495 }
497 // when blocks are merged
498 void fixup_block_pointers() {
499 Instruction *cur = next()->next(); // next()'s block is set in set_next
500 while (cur && cur->_block != block()) {
501 cur->_block = block();
502 cur = cur->next();
503 }
504 }
506 Instruction *insert_after(Instruction *i) {
507 Instruction* n = _next;
508 set_next(i);
509 i->set_next(n);
510 return _next;
511 }
513 Instruction *insert_after_same_bci(Instruction *i) {
514 #ifndef PRODUCT
515 i->set_printable_bci(printable_bci());
516 #endif
517 return insert_after(i);
518 }
520 void set_subst(Instruction* subst) {
521 assert(subst == NULL ||
522 type()->base() == subst->type()->base() ||
523 subst->type()->base() == illegalType, "type can't change");
524 _subst = subst;
525 }
526 void set_exception_handlers(XHandlers *xhandlers) { _exception_handlers = xhandlers; }
527 void set_exception_state(ValueStack* s) { check_state(s); _exception_state = s; }
528 void set_state_before(ValueStack* s) { check_state(s); _state_before = s; }
530 // machine-specifics
531 void set_operand(LIR_Opr operand) { assert(operand != LIR_OprFact::illegalOpr, "operand must exist"); _operand = operand; }
532 void clear_operand() { _operand = LIR_OprFact::illegalOpr; }
534 // generic
535 virtual Instruction* as_Instruction() { return this; } // to satisfy HASHING1 macro
536 virtual Phi* as_Phi() { return NULL; }
537 virtual Local* as_Local() { return NULL; }
538 virtual Constant* as_Constant() { return NULL; }
539 virtual AccessField* as_AccessField() { return NULL; }
540 virtual LoadField* as_LoadField() { return NULL; }
541 virtual StoreField* as_StoreField() { return NULL; }
542 virtual AccessArray* as_AccessArray() { return NULL; }
543 virtual ArrayLength* as_ArrayLength() { return NULL; }
544 virtual AccessIndexed* as_AccessIndexed() { return NULL; }
545 virtual LoadIndexed* as_LoadIndexed() { return NULL; }
546 virtual StoreIndexed* as_StoreIndexed() { return NULL; }
547 virtual NegateOp* as_NegateOp() { return NULL; }
548 virtual Op2* as_Op2() { return NULL; }
549 virtual ArithmeticOp* as_ArithmeticOp() { return NULL; }
550 virtual ShiftOp* as_ShiftOp() { return NULL; }
551 virtual LogicOp* as_LogicOp() { return NULL; }
552 virtual CompareOp* as_CompareOp() { return NULL; }
553 virtual IfOp* as_IfOp() { return NULL; }
554 virtual Convert* as_Convert() { return NULL; }
555 virtual NullCheck* as_NullCheck() { return NULL; }
556 virtual OsrEntry* as_OsrEntry() { return NULL; }
557 virtual StateSplit* as_StateSplit() { return NULL; }
558 virtual Invoke* as_Invoke() { return NULL; }
559 virtual NewInstance* as_NewInstance() { return NULL; }
560 virtual NewArray* as_NewArray() { return NULL; }
561 virtual NewTypeArray* as_NewTypeArray() { return NULL; }
562 virtual NewObjectArray* as_NewObjectArray() { return NULL; }
563 virtual NewMultiArray* as_NewMultiArray() { return NULL; }
564 virtual TypeCheck* as_TypeCheck() { return NULL; }
565 virtual CheckCast* as_CheckCast() { return NULL; }
566 virtual InstanceOf* as_InstanceOf() { return NULL; }
567 virtual TypeCast* as_TypeCast() { return NULL; }
568 virtual AccessMonitor* as_AccessMonitor() { return NULL; }
569 virtual MonitorEnter* as_MonitorEnter() { return NULL; }
570 virtual MonitorExit* as_MonitorExit() { return NULL; }
571 virtual Intrinsic* as_Intrinsic() { return NULL; }
572 virtual BlockBegin* as_BlockBegin() { return NULL; }
573 virtual BlockEnd* as_BlockEnd() { return NULL; }
574 virtual Goto* as_Goto() { return NULL; }
575 virtual If* as_If() { return NULL; }
576 virtual IfInstanceOf* as_IfInstanceOf() { return NULL; }
577 virtual TableSwitch* as_TableSwitch() { return NULL; }
578 virtual LookupSwitch* as_LookupSwitch() { return NULL; }
579 virtual Return* as_Return() { return NULL; }
580 virtual Throw* as_Throw() { return NULL; }
581 virtual Base* as_Base() { return NULL; }
582 virtual RoundFP* as_RoundFP() { return NULL; }
583 virtual ExceptionObject* as_ExceptionObject() { return NULL; }
584 virtual UnsafeOp* as_UnsafeOp() { return NULL; }
585 virtual ProfileInvoke* as_ProfileInvoke() { return NULL; }
586 virtual RangeCheckPredicate* as_RangeCheckPredicate() { return NULL; }
588 #ifdef ASSERT
589 virtual Assert* as_Assert() { return NULL; }
590 #endif
592 virtual void visit(InstructionVisitor* v) = 0;
594 virtual bool can_trap() const { return false; }
596 virtual void input_values_do(ValueVisitor* f) = 0;
597 virtual void state_values_do(ValueVisitor* f);
598 virtual void other_values_do(ValueVisitor* f) { /* usually no other - override on demand */ }
599 void values_do(ValueVisitor* f) { input_values_do(f); state_values_do(f); other_values_do(f); }
601 virtual ciType* exact_type() const;
602 virtual ciType* declared_type() const { return NULL; }
604 // hashing
605 virtual const char* name() const = 0;
606 HASHING1(Instruction, false, id()) // hashing disabled by default
608 // debugging
609 static void check_state(ValueStack* state) PRODUCT_RETURN;
610 void print() PRODUCT_RETURN;
611 void print_line() PRODUCT_RETURN;
612 void print(InstructionPrinter& ip) PRODUCT_RETURN;
613 };
616 // The following macros are used to define base (i.e., non-leaf)
617 // and leaf instruction classes. They define class-name related
618 // generic functionality in one place.
620 #define BASE(class_name, super_class_name) \
621 class class_name: public super_class_name { \
622 public: \
623 virtual class_name* as_##class_name() { return this; } \
626 #define LEAF(class_name, super_class_name) \
627 BASE(class_name, super_class_name) \
628 public: \
629 virtual const char* name() const { return #class_name; } \
630 virtual void visit(InstructionVisitor* v) { v->do_##class_name(this); } \
633 // Debugging support
636 #ifdef ASSERT
637 class AssertValues: public ValueVisitor {
638 void visit(Value* x) { assert((*x) != NULL, "value must exist"); }
639 };
640 #define ASSERT_VALUES { AssertValues assert_value; values_do(&assert_value); }
641 #else
642 #define ASSERT_VALUES
643 #endif // ASSERT
646 // A Phi is a phi function in the sense of SSA form. It stands for
647 // the value of a local variable at the beginning of a join block.
648 // A Phi consists of n operands, one for every incoming branch.
650 LEAF(Phi, Instruction)
651 private:
652 int _pf_flags; // the flags of the phi function
653 int _index; // to value on operand stack (index < 0) or to local
654 public:
655 // creation
656 Phi(ValueType* type, BlockBegin* b, int index)
657 : Instruction(type->base())
658 , _pf_flags(0)
659 , _index(index)
660 {
661 _block = b;
662 NOT_PRODUCT(set_printable_bci(Value(b)->printable_bci()));
663 if (type->is_illegal()) {
664 make_illegal();
665 }
666 }
668 // flags
669 enum Flag {
670 no_flag = 0,
671 visited = 1 << 0,
672 cannot_simplify = 1 << 1
673 };
675 // accessors
676 bool is_local() const { return _index >= 0; }
677 bool is_on_stack() const { return !is_local(); }
678 int local_index() const { assert(is_local(), ""); return _index; }
679 int stack_index() const { assert(is_on_stack(), ""); return -(_index+1); }
681 Value operand_at(int i) const;
682 int operand_count() const;
684 void set(Flag f) { _pf_flags |= f; }
685 void clear(Flag f) { _pf_flags &= ~f; }
686 bool is_set(Flag f) const { return (_pf_flags & f) != 0; }
688 // Invalidates phis corresponding to merges of locals of two different types
689 // (these should never be referenced, otherwise the bytecodes are illegal)
690 void make_illegal() {
691 set(cannot_simplify);
692 set_type(illegalType);
693 }
695 bool is_illegal() const {
696 return type()->is_illegal();
697 }
699 // generic
700 virtual void input_values_do(ValueVisitor* f) {
701 }
702 };
705 // A local is a placeholder for an incoming argument to a function call.
706 LEAF(Local, Instruction)
707 private:
708 int _java_index; // the local index within the method to which the local belongs
709 ciType* _declared_type;
710 public:
711 // creation
712 Local(ciType* declared, ValueType* type, int index)
713 : Instruction(type)
714 , _java_index(index)
715 , _declared_type(declared)
716 {
717 NOT_PRODUCT(set_printable_bci(-1));
718 }
720 // accessors
721 int java_index() const { return _java_index; }
723 virtual ciType* declared_type() const { return _declared_type; }
725 // generic
726 virtual void input_values_do(ValueVisitor* f) { /* no values */ }
727 };
730 LEAF(Constant, Instruction)
731 public:
732 // creation
733 Constant(ValueType* type):
734 Instruction(type, NULL, /*type_is_constant*/ true)
735 {
736 assert(type->is_constant(), "must be a constant");
737 }
739 Constant(ValueType* type, ValueStack* state_before):
740 Instruction(type, state_before, /*type_is_constant*/ true)
741 {
742 assert(state_before != NULL, "only used for constants which need patching");
743 assert(type->is_constant(), "must be a constant");
744 // since it's patching it needs to be pinned
745 pin();
746 }
748 // generic
749 virtual bool can_trap() const { return state_before() != NULL; }
750 virtual void input_values_do(ValueVisitor* f) { /* no values */ }
752 virtual intx hash() const;
753 virtual bool is_equal(Value v) const;
755 virtual ciType* exact_type() const;
757 enum CompareResult { not_comparable = -1, cond_false, cond_true };
759 virtual CompareResult compare(Instruction::Condition condition, Value right) const;
760 BlockBegin* compare(Instruction::Condition cond, Value right,
761 BlockBegin* true_sux, BlockBegin* false_sux) const {
762 switch (compare(cond, right)) {
763 case not_comparable:
764 return NULL;
765 case cond_false:
766 return false_sux;
767 case cond_true:
768 return true_sux;
769 default:
770 ShouldNotReachHere();
771 return NULL;
772 }
773 }
774 };
777 BASE(AccessField, Instruction)
778 private:
779 Value _obj;
780 int _offset;
781 ciField* _field;
782 NullCheck* _explicit_null_check; // For explicit null check elimination
784 public:
785 // creation
786 AccessField(Value obj, int offset, ciField* field, bool is_static,
787 ValueStack* state_before, bool needs_patching)
788 : Instruction(as_ValueType(field->type()->basic_type()), state_before)
789 , _obj(obj)
790 , _offset(offset)
791 , _field(field)
792 , _explicit_null_check(NULL)
793 {
794 set_needs_null_check(!is_static);
795 set_flag(IsStaticFlag, is_static);
796 set_flag(NeedsPatchingFlag, needs_patching);
797 ASSERT_VALUES
798 // pin of all instructions with memory access
799 pin();
800 }
802 // accessors
803 Value obj() const { return _obj; }
804 int offset() const { return _offset; }
805 ciField* field() const { return _field; }
806 BasicType field_type() const { return _field->type()->basic_type(); }
807 bool is_static() const { return check_flag(IsStaticFlag); }
808 NullCheck* explicit_null_check() const { return _explicit_null_check; }
809 bool needs_patching() const { return check_flag(NeedsPatchingFlag); }
811 // Unresolved getstatic and putstatic can cause initialization.
812 // Technically it occurs at the Constant that materializes the base
813 // of the static fields but it's simpler to model it here.
814 bool is_init_point() const { return is_static() && (needs_patching() || !_field->holder()->is_initialized()); }
816 // manipulation
818 // Under certain circumstances, if a previous NullCheck instruction
819 // proved the target object non-null, we can eliminate the explicit
820 // null check and do an implicit one, simply specifying the debug
821 // information from the NullCheck. This field should only be consulted
822 // if needs_null_check() is true.
823 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; }
825 // generic
826 virtual bool can_trap() const { return needs_null_check() || needs_patching(); }
827 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); }
828 };
831 LEAF(LoadField, AccessField)
832 public:
833 // creation
834 LoadField(Value obj, int offset, ciField* field, bool is_static,
835 ValueStack* state_before, bool needs_patching)
836 : AccessField(obj, offset, field, is_static, state_before, needs_patching)
837 {}
839 ciType* declared_type() const;
841 // generic
842 HASHING2(LoadField, !needs_patching() && !field()->is_volatile(), obj()->subst(), offset()) // cannot be eliminated if needs patching or if volatile
843 };
846 LEAF(StoreField, AccessField)
847 private:
848 Value _value;
850 public:
851 // creation
852 StoreField(Value obj, int offset, ciField* field, Value value, bool is_static,
853 ValueStack* state_before, bool needs_patching)
854 : AccessField(obj, offset, field, is_static, state_before, needs_patching)
855 , _value(value)
856 {
857 set_flag(NeedsWriteBarrierFlag, as_ValueType(field_type())->is_object());
858 ASSERT_VALUES
859 pin();
860 }
862 // accessors
863 Value value() const { return _value; }
864 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); }
866 // generic
867 virtual void input_values_do(ValueVisitor* f) { AccessField::input_values_do(f); f->visit(&_value); }
868 };
871 BASE(AccessArray, Instruction)
872 private:
873 Value _array;
875 public:
876 // creation
877 AccessArray(ValueType* type, Value array, ValueStack* state_before)
878 : Instruction(type, state_before)
879 , _array(array)
880 {
881 set_needs_null_check(true);
882 ASSERT_VALUES
883 pin(); // instruction with side effect (null exception or range check throwing)
884 }
886 Value array() const { return _array; }
888 // generic
889 virtual bool can_trap() const { return needs_null_check(); }
890 virtual void input_values_do(ValueVisitor* f) { f->visit(&_array); }
891 };
894 LEAF(ArrayLength, AccessArray)
895 private:
896 NullCheck* _explicit_null_check; // For explicit null check elimination
898 public:
899 // creation
900 ArrayLength(Value array, ValueStack* state_before)
901 : AccessArray(intType, array, state_before)
902 , _explicit_null_check(NULL) {}
904 // accessors
905 NullCheck* explicit_null_check() const { return _explicit_null_check; }
907 // setters
908 // See LoadField::set_explicit_null_check for documentation
909 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; }
911 // generic
912 HASHING1(ArrayLength, true, array()->subst())
913 };
916 BASE(AccessIndexed, AccessArray)
917 private:
918 Value _index;
919 Value _length;
920 BasicType _elt_type;
922 public:
923 // creation
924 AccessIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before)
925 : AccessArray(as_ValueType(elt_type), array, state_before)
926 , _index(index)
927 , _length(length)
928 , _elt_type(elt_type)
929 {
930 set_flag(Instruction::NeedsRangeCheckFlag, true);
931 ASSERT_VALUES
932 }
934 // accessors
935 Value index() const { return _index; }
936 Value length() const { return _length; }
937 BasicType elt_type() const { return _elt_type; }
939 void clear_length() { _length = NULL; }
940 // perform elimination of range checks involving constants
941 bool compute_needs_range_check();
943 // generic
944 virtual void input_values_do(ValueVisitor* f) { AccessArray::input_values_do(f); f->visit(&_index); if (_length != NULL) f->visit(&_length); }
945 };
948 LEAF(LoadIndexed, AccessIndexed)
949 private:
950 NullCheck* _explicit_null_check; // For explicit null check elimination
952 public:
953 // creation
954 LoadIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before)
955 : AccessIndexed(array, index, length, elt_type, state_before)
956 , _explicit_null_check(NULL) {}
958 // accessors
959 NullCheck* explicit_null_check() const { return _explicit_null_check; }
961 // setters
962 // See LoadField::set_explicit_null_check for documentation
963 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; }
965 ciType* exact_type() const;
966 ciType* declared_type() const;
968 // generic
969 HASHING2(LoadIndexed, true, array()->subst(), index()->subst())
970 };
973 LEAF(StoreIndexed, AccessIndexed)
974 private:
975 Value _value;
977 ciMethod* _profiled_method;
978 int _profiled_bci;
979 bool _check_boolean;
981 public:
982 // creation
983 StoreIndexed(Value array, Value index, Value length, BasicType elt_type, Value value, ValueStack* state_before, bool check_boolean)
984 : AccessIndexed(array, index, length, elt_type, state_before)
985 , _value(value), _profiled_method(NULL), _profiled_bci(0), _check_boolean(check_boolean)
986 {
987 set_flag(NeedsWriteBarrierFlag, (as_ValueType(elt_type)->is_object()));
988 set_flag(NeedsStoreCheckFlag, (as_ValueType(elt_type)->is_object()));
989 ASSERT_VALUES
990 pin();
991 }
993 // accessors
994 Value value() const { return _value; }
995 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); }
996 bool needs_store_check() const { return check_flag(NeedsStoreCheckFlag); }
997 bool check_boolean() const { return _check_boolean; }
998 // Helpers for MethodData* profiling
999 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); }
1000 void set_profiled_method(ciMethod* method) { _profiled_method = method; }
1001 void set_profiled_bci(int bci) { _profiled_bci = bci; }
1002 bool should_profile() const { return check_flag(ProfileMDOFlag); }
1003 ciMethod* profiled_method() const { return _profiled_method; }
1004 int profiled_bci() const { return _profiled_bci; }
1005 // generic
1006 virtual void input_values_do(ValueVisitor* f) { AccessIndexed::input_values_do(f); f->visit(&_value); }
1007 };
1010 LEAF(NegateOp, Instruction)
1011 private:
1012 Value _x;
1014 public:
1015 // creation
1016 NegateOp(Value x) : Instruction(x->type()->base()), _x(x) {
1017 ASSERT_VALUES
1018 }
1020 // accessors
1021 Value x() const { return _x; }
1023 // generic
1024 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); }
1025 };
1028 BASE(Op2, Instruction)
1029 private:
1030 Bytecodes::Code _op;
1031 Value _x;
1032 Value _y;
1034 public:
1035 // creation
1036 Op2(ValueType* type, Bytecodes::Code op, Value x, Value y, ValueStack* state_before = NULL)
1037 : Instruction(type, state_before)
1038 , _op(op)
1039 , _x(x)
1040 , _y(y)
1041 {
1042 ASSERT_VALUES
1043 }
1045 // accessors
1046 Bytecodes::Code op() const { return _op; }
1047 Value x() const { return _x; }
1048 Value y() const { return _y; }
1050 // manipulators
1051 void swap_operands() {
1052 assert(is_commutative(), "operation must be commutative");
1053 Value t = _x; _x = _y; _y = t;
1054 }
1056 // generic
1057 virtual bool is_commutative() const { return false; }
1058 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); }
1059 };
1062 LEAF(ArithmeticOp, Op2)
1063 public:
1064 // creation
1065 ArithmeticOp(Bytecodes::Code op, Value x, Value y, bool is_strictfp, ValueStack* state_before)
1066 : Op2(x->type()->meet(y->type()), op, x, y, state_before)
1067 {
1068 set_flag(IsStrictfpFlag, is_strictfp);
1069 if (can_trap()) pin();
1070 }
1072 // accessors
1073 bool is_strictfp() const { return check_flag(IsStrictfpFlag); }
1075 // generic
1076 virtual bool is_commutative() const;
1077 virtual bool can_trap() const;
1078 HASHING3(Op2, true, op(), x()->subst(), y()->subst())
1079 };
1082 LEAF(ShiftOp, Op2)
1083 public:
1084 // creation
1085 ShiftOp(Bytecodes::Code op, Value x, Value s) : Op2(x->type()->base(), op, x, s) {}
1087 // generic
1088 HASHING3(Op2, true, op(), x()->subst(), y()->subst())
1089 };
1092 LEAF(LogicOp, Op2)
1093 public:
1094 // creation
1095 LogicOp(Bytecodes::Code op, Value x, Value y) : Op2(x->type()->meet(y->type()), op, x, y) {}
1097 // generic
1098 virtual bool is_commutative() const;
1099 HASHING3(Op2, true, op(), x()->subst(), y()->subst())
1100 };
1103 LEAF(CompareOp, Op2)
1104 public:
1105 // creation
1106 CompareOp(Bytecodes::Code op, Value x, Value y, ValueStack* state_before)
1107 : Op2(intType, op, x, y, state_before)
1108 {}
1110 // generic
1111 HASHING3(Op2, true, op(), x()->subst(), y()->subst())
1112 };
1115 LEAF(IfOp, Op2)
1116 private:
1117 Value _tval;
1118 Value _fval;
1120 public:
1121 // creation
1122 IfOp(Value x, Condition cond, Value y, Value tval, Value fval)
1123 : Op2(tval->type()->meet(fval->type()), (Bytecodes::Code)cond, x, y)
1124 , _tval(tval)
1125 , _fval(fval)
1126 {
1127 ASSERT_VALUES
1128 assert(tval->type()->tag() == fval->type()->tag(), "types must match");
1129 }
1131 // accessors
1132 virtual bool is_commutative() const;
1133 Bytecodes::Code op() const { ShouldNotCallThis(); return Bytecodes::_illegal; }
1134 Condition cond() const { return (Condition)Op2::op(); }
1135 Value tval() const { return _tval; }
1136 Value fval() const { return _fval; }
1138 // generic
1139 virtual void input_values_do(ValueVisitor* f) { Op2::input_values_do(f); f->visit(&_tval); f->visit(&_fval); }
1140 };
1143 LEAF(Convert, Instruction)
1144 private:
1145 Bytecodes::Code _op;
1146 Value _value;
1148 public:
1149 // creation
1150 Convert(Bytecodes::Code op, Value value, ValueType* to_type) : Instruction(to_type), _op(op), _value(value) {
1151 ASSERT_VALUES
1152 }
1154 // accessors
1155 Bytecodes::Code op() const { return _op; }
1156 Value value() const { return _value; }
1158 // generic
1159 virtual void input_values_do(ValueVisitor* f) { f->visit(&_value); }
1160 HASHING2(Convert, true, op(), value()->subst())
1161 };
1164 LEAF(NullCheck, Instruction)
1165 private:
1166 Value _obj;
1168 public:
1169 // creation
1170 NullCheck(Value obj, ValueStack* state_before)
1171 : Instruction(obj->type()->base(), state_before)
1172 , _obj(obj)
1173 {
1174 ASSERT_VALUES
1175 set_can_trap(true);
1176 assert(_obj->type()->is_object(), "null check must be applied to objects only");
1177 pin(Instruction::PinExplicitNullCheck);
1178 }
1180 // accessors
1181 Value obj() const { return _obj; }
1183 // setters
1184 void set_can_trap(bool can_trap) { set_flag(CanTrapFlag, can_trap); }
1186 // generic
1187 virtual bool can_trap() const { return check_flag(CanTrapFlag); /* null-check elimination sets to false */ }
1188 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); }
1189 HASHING1(NullCheck, true, obj()->subst())
1190 };
1193 // This node is supposed to cast the type of another node to a more precise
1194 // declared type.
1195 LEAF(TypeCast, Instruction)
1196 private:
1197 ciType* _declared_type;
1198 Value _obj;
1200 public:
1201 // The type of this node is the same type as the object type (and it might be constant).
1202 TypeCast(ciType* type, Value obj, ValueStack* state_before)
1203 : Instruction(obj->type(), state_before, obj->type()->is_constant()),
1204 _declared_type(type),
1205 _obj(obj) {}
1207 // accessors
1208 ciType* declared_type() const { return _declared_type; }
1209 Value obj() const { return _obj; }
1211 // generic
1212 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); }
1213 };
1216 BASE(StateSplit, Instruction)
1217 private:
1218 ValueStack* _state;
1220 protected:
1221 static void substitute(BlockList& list, BlockBegin* old_block, BlockBegin* new_block);
1223 public:
1224 // creation
1225 StateSplit(ValueType* type, ValueStack* state_before = NULL)
1226 : Instruction(type, state_before)
1227 , _state(NULL)
1228 {
1229 pin(PinStateSplitConstructor);
1230 }
1232 // accessors
1233 ValueStack* state() const { return _state; }
1234 IRScope* scope() const; // the state's scope
1236 // manipulation
1237 void set_state(ValueStack* state) { assert(_state == NULL, "overwriting existing state"); check_state(state); _state = state; }
1239 // generic
1240 virtual void input_values_do(ValueVisitor* f) { /* no values */ }
1241 virtual void state_values_do(ValueVisitor* f);
1242 };
1245 LEAF(Invoke, StateSplit)
1246 private:
1247 Bytecodes::Code _code;
1248 Value _recv;
1249 Values* _args;
1250 BasicTypeList* _signature;
1251 int _vtable_index;
1252 ciMethod* _target;
1254 public:
1255 // creation
1256 Invoke(Bytecodes::Code code, ValueType* result_type, Value recv, Values* args,
1257 int vtable_index, ciMethod* target, ValueStack* state_before);
1259 // accessors
1260 Bytecodes::Code code() const { return _code; }
1261 Value receiver() const { return _recv; }
1262 bool has_receiver() const { return receiver() != NULL; }
1263 int number_of_arguments() const { return _args->length(); }
1264 Value argument_at(int i) const { return _args->at(i); }
1265 int vtable_index() const { return _vtable_index; }
1266 BasicTypeList* signature() const { return _signature; }
1267 ciMethod* target() const { return _target; }
1269 ciType* declared_type() const;
1271 // Returns false if target is not loaded
1272 bool target_is_final() const { return check_flag(TargetIsFinalFlag); }
1273 bool target_is_loaded() const { return check_flag(TargetIsLoadedFlag); }
1274 // Returns false if target is not loaded
1275 bool target_is_strictfp() const { return check_flag(TargetIsStrictfpFlag); }
1277 // JSR 292 support
1278 bool is_invokedynamic() const { return code() == Bytecodes::_invokedynamic; }
1279 bool is_method_handle_intrinsic() const { return target()->is_method_handle_intrinsic(); }
1281 virtual bool needs_exception_state() const { return false; }
1283 // generic
1284 virtual bool can_trap() const { return true; }
1285 virtual void input_values_do(ValueVisitor* f) {
1286 StateSplit::input_values_do(f);
1287 if (has_receiver()) f->visit(&_recv);
1288 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i));
1289 }
1290 virtual void state_values_do(ValueVisitor *f);
1291 };
1294 LEAF(NewInstance, StateSplit)
1295 private:
1296 ciInstanceKlass* _klass;
1297 bool _is_unresolved;
1299 public:
1300 // creation
1301 NewInstance(ciInstanceKlass* klass, ValueStack* state_before, bool is_unresolved)
1302 : StateSplit(instanceType, state_before)
1303 , _klass(klass), _is_unresolved(is_unresolved)
1304 {}
1306 // accessors
1307 ciInstanceKlass* klass() const { return _klass; }
1308 bool is_unresolved() const { return _is_unresolved; }
1310 virtual bool needs_exception_state() const { return false; }
1312 // generic
1313 virtual bool can_trap() const { return true; }
1314 ciType* exact_type() const;
1315 ciType* declared_type() const;
1316 };
1319 BASE(NewArray, StateSplit)
1320 private:
1321 Value _length;
1323 public:
1324 // creation
1325 NewArray(Value length, ValueStack* state_before)
1326 : StateSplit(objectType, state_before)
1327 , _length(length)
1328 {
1329 // Do not ASSERT_VALUES since length is NULL for NewMultiArray
1330 }
1332 // accessors
1333 Value length() const { return _length; }
1335 virtual bool needs_exception_state() const { return false; }
1337 ciType* exact_type() const { return NULL; }
1338 ciType* declared_type() const;
1340 // generic
1341 virtual bool can_trap() const { return true; }
1342 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_length); }
1343 };
1346 LEAF(NewTypeArray, NewArray)
1347 private:
1348 BasicType _elt_type;
1350 public:
1351 // creation
1352 NewTypeArray(Value length, BasicType elt_type, ValueStack* state_before)
1353 : NewArray(length, state_before)
1354 , _elt_type(elt_type)
1355 {}
1357 // accessors
1358 BasicType elt_type() const { return _elt_type; }
1359 ciType* exact_type() const;
1360 };
1363 LEAF(NewObjectArray, NewArray)
1364 private:
1365 ciKlass* _klass;
1367 public:
1368 // creation
1369 NewObjectArray(ciKlass* klass, Value length, ValueStack* state_before) : NewArray(length, state_before), _klass(klass) {}
1371 // accessors
1372 ciKlass* klass() const { return _klass; }
1373 ciType* exact_type() const;
1374 };
1377 LEAF(NewMultiArray, NewArray)
1378 private:
1379 ciKlass* _klass;
1380 Values* _dims;
1382 public:
1383 // creation
1384 NewMultiArray(ciKlass* klass, Values* dims, ValueStack* state_before) : NewArray(NULL, state_before), _klass(klass), _dims(dims) {
1385 ASSERT_VALUES
1386 }
1388 // accessors
1389 ciKlass* klass() const { return _klass; }
1390 Values* dims() const { return _dims; }
1391 int rank() const { return dims()->length(); }
1393 // generic
1394 virtual void input_values_do(ValueVisitor* f) {
1395 // NOTE: we do not call NewArray::input_values_do since "length"
1396 // is meaningless for a multi-dimensional array; passing the
1397 // zeroth element down to NewArray as its length is a bad idea
1398 // since there will be a copy in the "dims" array which doesn't
1399 // get updated, and the value must not be traversed twice. Was bug
1400 // - kbr 4/10/2001
1401 StateSplit::input_values_do(f);
1402 for (int i = 0; i < _dims->length(); i++) f->visit(_dims->adr_at(i));
1403 }
1404 };
1407 BASE(TypeCheck, StateSplit)
1408 private:
1409 ciKlass* _klass;
1410 Value _obj;
1412 ciMethod* _profiled_method;
1413 int _profiled_bci;
1415 public:
1416 // creation
1417 TypeCheck(ciKlass* klass, Value obj, ValueType* type, ValueStack* state_before)
1418 : StateSplit(type, state_before), _klass(klass), _obj(obj),
1419 _profiled_method(NULL), _profiled_bci(0) {
1420 ASSERT_VALUES
1421 set_direct_compare(false);
1422 }
1424 // accessors
1425 ciKlass* klass() const { return _klass; }
1426 Value obj() const { return _obj; }
1427 bool is_loaded() const { return klass() != NULL; }
1428 bool direct_compare() const { return check_flag(DirectCompareFlag); }
1430 // manipulation
1431 void set_direct_compare(bool flag) { set_flag(DirectCompareFlag, flag); }
1433 // generic
1434 virtual bool can_trap() const { return true; }
1435 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); }
1437 // Helpers for MethodData* profiling
1438 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); }
1439 void set_profiled_method(ciMethod* method) { _profiled_method = method; }
1440 void set_profiled_bci(int bci) { _profiled_bci = bci; }
1441 bool should_profile() const { return check_flag(ProfileMDOFlag); }
1442 ciMethod* profiled_method() const { return _profiled_method; }
1443 int profiled_bci() const { return _profiled_bci; }
1444 };
1447 LEAF(CheckCast, TypeCheck)
1448 public:
1449 // creation
1450 CheckCast(ciKlass* klass, Value obj, ValueStack* state_before)
1451 : TypeCheck(klass, obj, objectType, state_before) {}
1453 void set_incompatible_class_change_check() {
1454 set_flag(ThrowIncompatibleClassChangeErrorFlag, true);
1455 }
1456 bool is_incompatible_class_change_check() const {
1457 return check_flag(ThrowIncompatibleClassChangeErrorFlag);
1458 }
1460 ciType* declared_type() const;
1461 };
1464 LEAF(InstanceOf, TypeCheck)
1465 public:
1466 // creation
1467 InstanceOf(ciKlass* klass, Value obj, ValueStack* state_before) : TypeCheck(klass, obj, intType, state_before) {}
1469 virtual bool needs_exception_state() const { return false; }
1470 };
1473 BASE(AccessMonitor, StateSplit)
1474 private:
1475 Value _obj;
1476 int _monitor_no;
1478 public:
1479 // creation
1480 AccessMonitor(Value obj, int monitor_no, ValueStack* state_before = NULL)
1481 : StateSplit(illegalType, state_before)
1482 , _obj(obj)
1483 , _monitor_no(monitor_no)
1484 {
1485 set_needs_null_check(true);
1486 ASSERT_VALUES
1487 }
1489 // accessors
1490 Value obj() const { return _obj; }
1491 int monitor_no() const { return _monitor_no; }
1493 // generic
1494 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); }
1495 };
1498 LEAF(MonitorEnter, AccessMonitor)
1499 public:
1500 // creation
1501 MonitorEnter(Value obj, int monitor_no, ValueStack* state_before)
1502 : AccessMonitor(obj, monitor_no, state_before)
1503 {
1504 ASSERT_VALUES
1505 }
1507 // generic
1508 virtual bool can_trap() const { return true; }
1509 };
1512 LEAF(MonitorExit, AccessMonitor)
1513 public:
1514 // creation
1515 MonitorExit(Value obj, int monitor_no)
1516 : AccessMonitor(obj, monitor_no, NULL)
1517 {
1518 ASSERT_VALUES
1519 }
1520 };
1523 LEAF(Intrinsic, StateSplit)
1524 private:
1525 vmIntrinsics::ID _id;
1526 Values* _args;
1527 Value _recv;
1528 ArgsNonNullState _nonnull_state;
1530 public:
1531 // preserves_state can be set to true for Intrinsics
1532 // which are guaranteed to preserve register state across any slow
1533 // cases; setting it to true does not mean that the Intrinsic can
1534 // not trap, only that if we continue execution in the same basic
1535 // block after the Intrinsic, all of the registers are intact. This
1536 // allows load elimination and common expression elimination to be
1537 // performed across the Intrinsic. The default value is false.
1538 Intrinsic(ValueType* type,
1539 vmIntrinsics::ID id,
1540 Values* args,
1541 bool has_receiver,
1542 ValueStack* state_before,
1543 bool preserves_state,
1544 bool cantrap = true)
1545 : StateSplit(type, state_before)
1546 , _id(id)
1547 , _args(args)
1548 , _recv(NULL)
1549 {
1550 assert(args != NULL, "args must exist");
1551 ASSERT_VALUES
1552 set_flag(PreservesStateFlag, preserves_state);
1553 set_flag(CanTrapFlag, cantrap);
1554 if (has_receiver) {
1555 _recv = argument_at(0);
1556 }
1557 set_needs_null_check(has_receiver);
1559 // some intrinsics can't trap, so don't force them to be pinned
1560 if (!can_trap()) {
1561 unpin(PinStateSplitConstructor);
1562 }
1563 }
1565 // accessors
1566 vmIntrinsics::ID id() const { return _id; }
1567 int number_of_arguments() const { return _args->length(); }
1568 Value argument_at(int i) const { return _args->at(i); }
1570 bool has_receiver() const { return (_recv != NULL); }
1571 Value receiver() const { assert(has_receiver(), "must have receiver"); return _recv; }
1572 bool preserves_state() const { return check_flag(PreservesStateFlag); }
1574 bool arg_needs_null_check(int i) const {
1575 return _nonnull_state.arg_needs_null_check(i);
1576 }
1578 void set_arg_needs_null_check(int i, bool check) {
1579 _nonnull_state.set_arg_needs_null_check(i, check);
1580 }
1582 // generic
1583 virtual bool can_trap() const { return check_flag(CanTrapFlag); }
1584 virtual void input_values_do(ValueVisitor* f) {
1585 StateSplit::input_values_do(f);
1586 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i));
1587 }
1588 };
1591 class LIR_List;
1593 LEAF(BlockBegin, StateSplit)
1594 private:
1595 int _block_id; // the unique block id
1596 int _bci; // start-bci of block
1597 int _depth_first_number; // number of this block in a depth-first ordering
1598 int _linear_scan_number; // number of this block in linear-scan ordering
1599 int _dominator_depth;
1600 int _loop_depth; // the loop nesting level of this block
1601 int _loop_index; // number of the innermost loop of this block
1602 int _flags; // the flags associated with this block
1604 // fields used by BlockListBuilder
1605 int _total_preds; // number of predecessors found by BlockListBuilder
1606 BitMap _stores_to_locals; // bit is set when a local variable is stored in the block
1608 // SSA specific fields: (factor out later)
1609 BlockList _successors; // the successors of this block
1610 BlockList _predecessors; // the predecessors of this block
1611 BlockList _dominates; // list of blocks that are dominated by this block
1612 BlockBegin* _dominator; // the dominator of this block
1613 // SSA specific ends
1614 BlockEnd* _end; // the last instruction of this block
1615 BlockList _exception_handlers; // the exception handlers potentially invoked by this block
1616 ValueStackStack* _exception_states; // only for xhandler entries: states of all instructions that have an edge to this xhandler
1617 int _exception_handler_pco; // if this block is the start of an exception handler,
1618 // this records the PC offset in the assembly code of the
1619 // first instruction in this block
1620 Label _label; // the label associated with this block
1621 LIR_List* _lir; // the low level intermediate representation for this block
1623 BitMap _live_in; // set of live LIR_Opr registers at entry to this block
1624 BitMap _live_out; // set of live LIR_Opr registers at exit from this block
1625 BitMap _live_gen; // set of registers used before any redefinition in this block
1626 BitMap _live_kill; // set of registers defined in this block
1628 BitMap _fpu_register_usage;
1629 intArray* _fpu_stack_state; // For x86 FPU code generation with UseLinearScan
1630 int _first_lir_instruction_id; // ID of first LIR instruction in this block
1631 int _last_lir_instruction_id; // ID of last LIR instruction in this block
1633 void iterate_preorder (boolArray& mark, BlockClosure* closure);
1634 void iterate_postorder(boolArray& mark, BlockClosure* closure);
1636 friend class SuxAndWeightAdjuster;
1638 public:
1639 void* operator new(size_t size) throw() {
1640 Compilation* c = Compilation::current();
1641 void* res = c->arena()->Amalloc(size);
1642 ((BlockBegin*)res)->_id = c->get_next_id();
1643 ((BlockBegin*)res)->_block_id = c->get_next_block_id();
1644 return res;
1645 }
1647 // initialization/counting
1648 static int number_of_blocks() {
1649 return Compilation::current()->number_of_blocks();
1650 }
1652 // creation
1653 BlockBegin(int bci)
1654 : StateSplit(illegalType)
1655 , _bci(bci)
1656 , _depth_first_number(-1)
1657 , _linear_scan_number(-1)
1658 , _loop_depth(0)
1659 , _flags(0)
1660 , _dominator_depth(-1)
1661 , _dominator(NULL)
1662 , _end(NULL)
1663 , _predecessors(2)
1664 , _successors(2)
1665 , _dominates(2)
1666 , _exception_handlers(1)
1667 , _exception_states(NULL)
1668 , _exception_handler_pco(-1)
1669 , _lir(NULL)
1670 , _loop_index(-1)
1671 , _live_in()
1672 , _live_out()
1673 , _live_gen()
1674 , _live_kill()
1675 , _fpu_register_usage()
1676 , _fpu_stack_state(NULL)
1677 , _first_lir_instruction_id(-1)
1678 , _last_lir_instruction_id(-1)
1679 , _total_preds(0)
1680 , _stores_to_locals()
1681 {
1682 _block = this;
1683 #ifndef PRODUCT
1684 set_printable_bci(bci);
1685 #endif
1686 }
1688 // accessors
1689 int block_id() const { return _block_id; }
1690 int bci() const { return _bci; }
1691 BlockList* successors() { return &_successors; }
1692 BlockList* dominates() { return &_dominates; }
1693 BlockBegin* dominator() const { return _dominator; }
1694 int loop_depth() const { return _loop_depth; }
1695 int dominator_depth() const { return _dominator_depth; }
1696 int depth_first_number() const { return _depth_first_number; }
1697 int linear_scan_number() const { return _linear_scan_number; }
1698 BlockEnd* end() const { return _end; }
1699 Label* label() { return &_label; }
1700 LIR_List* lir() const { return _lir; }
1701 int exception_handler_pco() const { return _exception_handler_pco; }
1702 BitMap& live_in() { return _live_in; }
1703 BitMap& live_out() { return _live_out; }
1704 BitMap& live_gen() { return _live_gen; }
1705 BitMap& live_kill() { return _live_kill; }
1706 BitMap& fpu_register_usage() { return _fpu_register_usage; }
1707 intArray* fpu_stack_state() const { return _fpu_stack_state; }
1708 int first_lir_instruction_id() const { return _first_lir_instruction_id; }
1709 int last_lir_instruction_id() const { return _last_lir_instruction_id; }
1710 int total_preds() const { return _total_preds; }
1711 BitMap& stores_to_locals() { return _stores_to_locals; }
1713 // manipulation
1714 void set_dominator(BlockBegin* dom) { _dominator = dom; }
1715 void set_loop_depth(int d) { _loop_depth = d; }
1716 void set_dominator_depth(int d) { _dominator_depth = d; }
1717 void set_depth_first_number(int dfn) { _depth_first_number = dfn; }
1718 void set_linear_scan_number(int lsn) { _linear_scan_number = lsn; }
1719 void set_end(BlockEnd* end);
1720 void clear_end();
1721 void disconnect_from_graph();
1722 static void disconnect_edge(BlockBegin* from, BlockBegin* to);
1723 BlockBegin* insert_block_between(BlockBegin* sux);
1724 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux);
1725 void set_lir(LIR_List* lir) { _lir = lir; }
1726 void set_exception_handler_pco(int pco) { _exception_handler_pco = pco; }
1727 void set_live_in (BitMap map) { _live_in = map; }
1728 void set_live_out (BitMap map) { _live_out = map; }
1729 void set_live_gen (BitMap map) { _live_gen = map; }
1730 void set_live_kill (BitMap map) { _live_kill = map; }
1731 void set_fpu_register_usage(BitMap map) { _fpu_register_usage = map; }
1732 void set_fpu_stack_state(intArray* state) { _fpu_stack_state = state; }
1733 void set_first_lir_instruction_id(int id) { _first_lir_instruction_id = id; }
1734 void set_last_lir_instruction_id(int id) { _last_lir_instruction_id = id; }
1735 void increment_total_preds(int n = 1) { _total_preds += n; }
1736 void init_stores_to_locals(int locals_count) { _stores_to_locals = BitMap(locals_count); _stores_to_locals.clear(); }
1738 // generic
1739 virtual void state_values_do(ValueVisitor* f);
1741 // successors and predecessors
1742 int number_of_sux() const;
1743 BlockBegin* sux_at(int i) const;
1744 void add_successor(BlockBegin* sux);
1745 void remove_successor(BlockBegin* pred);
1746 bool is_successor(BlockBegin* sux) const { return _successors.contains(sux); }
1748 void add_predecessor(BlockBegin* pred);
1749 void remove_predecessor(BlockBegin* pred);
1750 bool is_predecessor(BlockBegin* pred) const { return _predecessors.contains(pred); }
1751 int number_of_preds() const { return _predecessors.length(); }
1752 BlockBegin* pred_at(int i) const { return _predecessors[i]; }
1754 // exception handlers potentially invoked by this block
1755 void add_exception_handler(BlockBegin* b);
1756 bool is_exception_handler(BlockBegin* b) const { return _exception_handlers.contains(b); }
1757 int number_of_exception_handlers() const { return _exception_handlers.length(); }
1758 BlockBegin* exception_handler_at(int i) const { return _exception_handlers.at(i); }
1760 // states of the instructions that have an edge to this exception handler
1761 int number_of_exception_states() { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states == NULL ? 0 : _exception_states->length(); }
1762 ValueStack* exception_state_at(int idx) const { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states->at(idx); }
1763 int add_exception_state(ValueStack* state);
1765 // flags
1766 enum Flag {
1767 no_flag = 0,
1768 std_entry_flag = 1 << 0,
1769 osr_entry_flag = 1 << 1,
1770 exception_entry_flag = 1 << 2,
1771 subroutine_entry_flag = 1 << 3,
1772 backward_branch_target_flag = 1 << 4,
1773 is_on_work_list_flag = 1 << 5,
1774 was_visited_flag = 1 << 6,
1775 parser_loop_header_flag = 1 << 7, // set by parser to identify blocks where phi functions can not be created on demand
1776 critical_edge_split_flag = 1 << 8, // set for all blocks that are introduced when critical edges are split
1777 linear_scan_loop_header_flag = 1 << 9, // set during loop-detection for LinearScan
1778 linear_scan_loop_end_flag = 1 << 10, // set during loop-detection for LinearScan
1779 donot_eliminate_range_checks = 1 << 11 // Should be try to eliminate range checks in this block
1780 };
1782 void set(Flag f) { _flags |= f; }
1783 void clear(Flag f) { _flags &= ~f; }
1784 bool is_set(Flag f) const { return (_flags & f) != 0; }
1785 bool is_entry_block() const {
1786 const int entry_mask = std_entry_flag | osr_entry_flag | exception_entry_flag;
1787 return (_flags & entry_mask) != 0;
1788 }
1790 // iteration
1791 void iterate_preorder (BlockClosure* closure);
1792 void iterate_postorder (BlockClosure* closure);
1794 void block_values_do(ValueVisitor* f);
1796 // loops
1797 void set_loop_index(int ix) { _loop_index = ix; }
1798 int loop_index() const { return _loop_index; }
1800 // merging
1801 bool try_merge(ValueStack* state); // try to merge states at block begin
1802 void merge(ValueStack* state) { bool b = try_merge(state); assert(b, "merge failed"); }
1804 // debugging
1805 void print_block() PRODUCT_RETURN;
1806 void print_block(InstructionPrinter& ip, bool live_only = false) PRODUCT_RETURN;
1807 };
1810 BASE(BlockEnd, StateSplit)
1811 private:
1812 BlockList* _sux;
1814 protected:
1815 BlockList* sux() const { return _sux; }
1817 void set_sux(BlockList* sux) {
1818 #ifdef ASSERT
1819 assert(sux != NULL, "sux must exist");
1820 for (int i = sux->length() - 1; i >= 0; i--) assert(sux->at(i) != NULL, "sux must exist");
1821 #endif
1822 _sux = sux;
1823 }
1825 public:
1826 // creation
1827 BlockEnd(ValueType* type, ValueStack* state_before, bool is_safepoint)
1828 : StateSplit(type, state_before)
1829 , _sux(NULL)
1830 {
1831 set_flag(IsSafepointFlag, is_safepoint);
1832 }
1834 // accessors
1835 bool is_safepoint() const { return check_flag(IsSafepointFlag); }
1836 // For compatibility with old code, for new code use block()
1837 BlockBegin* begin() const { return _block; }
1839 // manipulation
1840 void set_begin(BlockBegin* begin);
1842 // successors
1843 int number_of_sux() const { return _sux != NULL ? _sux->length() : 0; }
1844 BlockBegin* sux_at(int i) const { return _sux->at(i); }
1845 BlockBegin* default_sux() const { return sux_at(number_of_sux() - 1); }
1846 BlockBegin** addr_sux_at(int i) const { return _sux->adr_at(i); }
1847 int sux_index(BlockBegin* sux) const { return _sux->find(sux); }
1848 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux);
1849 };
1852 LEAF(Goto, BlockEnd)
1853 public:
1854 enum Direction {
1855 none, // Just a regular goto
1856 taken, not_taken // Goto produced from If
1857 };
1858 private:
1859 ciMethod* _profiled_method;
1860 int _profiled_bci;
1861 Direction _direction;
1862 public:
1863 // creation
1864 Goto(BlockBegin* sux, ValueStack* state_before, bool is_safepoint = false)
1865 : BlockEnd(illegalType, state_before, is_safepoint)
1866 , _direction(none)
1867 , _profiled_method(NULL)
1868 , _profiled_bci(0) {
1869 BlockList* s = new BlockList(1);
1870 s->append(sux);
1871 set_sux(s);
1872 }
1874 Goto(BlockBegin* sux, bool is_safepoint) : BlockEnd(illegalType, NULL, is_safepoint)
1875 , _direction(none)
1876 , _profiled_method(NULL)
1877 , _profiled_bci(0) {
1878 BlockList* s = new BlockList(1);
1879 s->append(sux);
1880 set_sux(s);
1881 }
1883 bool should_profile() const { return check_flag(ProfileMDOFlag); }
1884 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches
1885 int profiled_bci() const { return _profiled_bci; }
1886 Direction direction() const { return _direction; }
1888 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); }
1889 void set_profiled_method(ciMethod* method) { _profiled_method = method; }
1890 void set_profiled_bci(int bci) { _profiled_bci = bci; }
1891 void set_direction(Direction d) { _direction = d; }
1892 };
1894 #ifdef ASSERT
1895 LEAF(Assert, Instruction)
1896 private:
1897 Value _x;
1898 Condition _cond;
1899 Value _y;
1900 char *_message;
1902 public:
1903 // creation
1904 // unordered_is_true is valid for float/double compares only
1905 Assert(Value x, Condition cond, bool unordered_is_true, Value y);
1907 // accessors
1908 Value x() const { return _x; }
1909 Condition cond() const { return _cond; }
1910 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); }
1911 Value y() const { return _y; }
1912 const char *message() const { return _message; }
1914 // generic
1915 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); }
1916 };
1917 #endif
1919 LEAF(RangeCheckPredicate, StateSplit)
1920 private:
1921 Value _x;
1922 Condition _cond;
1923 Value _y;
1925 void check_state();
1927 public:
1928 // creation
1929 // unordered_is_true is valid for float/double compares only
1930 RangeCheckPredicate(Value x, Condition cond, bool unordered_is_true, Value y, ValueStack* state) : StateSplit(illegalType)
1931 , _x(x)
1932 , _cond(cond)
1933 , _y(y)
1934 {
1935 ASSERT_VALUES
1936 set_flag(UnorderedIsTrueFlag, unordered_is_true);
1937 assert(x->type()->tag() == y->type()->tag(), "types must match");
1938 this->set_state(state);
1939 check_state();
1940 }
1942 // Always deoptimize
1943 RangeCheckPredicate(ValueStack* state) : StateSplit(illegalType)
1944 {
1945 this->set_state(state);
1946 _x = _y = NULL;
1947 check_state();
1948 }
1950 // accessors
1951 Value x() const { return _x; }
1952 Condition cond() const { return _cond; }
1953 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); }
1954 Value y() const { return _y; }
1956 void always_fail() { _x = _y = NULL; }
1958 // generic
1959 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_x); f->visit(&_y); }
1960 HASHING3(RangeCheckPredicate, true, x()->subst(), y()->subst(), cond())
1961 };
1963 LEAF(If, BlockEnd)
1964 private:
1965 Value _x;
1966 Condition _cond;
1967 Value _y;
1968 ciMethod* _profiled_method;
1969 int _profiled_bci; // Canonicalizer may alter bci of If node
1970 bool _swapped; // Is the order reversed with respect to the original If in the
1971 // bytecode stream?
1972 public:
1973 // creation
1974 // unordered_is_true is valid for float/double compares only
1975 If(Value x, Condition cond, bool unordered_is_true, Value y, BlockBegin* tsux, BlockBegin* fsux, ValueStack* state_before, bool is_safepoint)
1976 : BlockEnd(illegalType, state_before, is_safepoint)
1977 , _x(x)
1978 , _cond(cond)
1979 , _y(y)
1980 , _profiled_method(NULL)
1981 , _profiled_bci(0)
1982 , _swapped(false)
1983 {
1984 ASSERT_VALUES
1985 set_flag(UnorderedIsTrueFlag, unordered_is_true);
1986 assert(x->type()->tag() == y->type()->tag(), "types must match");
1987 BlockList* s = new BlockList(2);
1988 s->append(tsux);
1989 s->append(fsux);
1990 set_sux(s);
1991 }
1993 // accessors
1994 Value x() const { return _x; }
1995 Condition cond() const { return _cond; }
1996 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); }
1997 Value y() const { return _y; }
1998 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); }
1999 BlockBegin* tsux() const { return sux_for(true); }
2000 BlockBegin* fsux() const { return sux_for(false); }
2001 BlockBegin* usux() const { return sux_for(unordered_is_true()); }
2002 bool should_profile() const { return check_flag(ProfileMDOFlag); }
2003 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches
2004 int profiled_bci() const { return _profiled_bci; } // set for profiled branches and tiered
2005 bool is_swapped() const { return _swapped; }
2007 // manipulation
2008 void swap_operands() {
2009 Value t = _x; _x = _y; _y = t;
2010 _cond = mirror(_cond);
2011 }
2013 void swap_sux() {
2014 assert(number_of_sux() == 2, "wrong number of successors");
2015 BlockList* s = sux();
2016 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t);
2017 _cond = negate(_cond);
2018 set_flag(UnorderedIsTrueFlag, !check_flag(UnorderedIsTrueFlag));
2019 }
2021 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); }
2022 void set_profiled_method(ciMethod* method) { _profiled_method = method; }
2023 void set_profiled_bci(int bci) { _profiled_bci = bci; }
2024 void set_swapped(bool value) { _swapped = value; }
2025 // generic
2026 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_x); f->visit(&_y); }
2027 };
2030 LEAF(IfInstanceOf, BlockEnd)
2031 private:
2032 ciKlass* _klass;
2033 Value _obj;
2034 bool _test_is_instance; // jump if instance
2035 int _instanceof_bci;
2037 public:
2038 IfInstanceOf(ciKlass* klass, Value obj, bool test_is_instance, int instanceof_bci, BlockBegin* tsux, BlockBegin* fsux)
2039 : BlockEnd(illegalType, NULL, false) // temporary set to false
2040 , _klass(klass)
2041 , _obj(obj)
2042 , _test_is_instance(test_is_instance)
2043 , _instanceof_bci(instanceof_bci)
2044 {
2045 ASSERT_VALUES
2046 assert(instanceof_bci >= 0, "illegal bci");
2047 BlockList* s = new BlockList(2);
2048 s->append(tsux);
2049 s->append(fsux);
2050 set_sux(s);
2051 }
2053 // accessors
2054 //
2055 // Note 1: If test_is_instance() is true, IfInstanceOf tests if obj *is* an
2056 // instance of klass; otherwise it tests if it is *not* and instance
2057 // of klass.
2058 //
2059 // Note 2: IfInstanceOf instructions are created by combining an InstanceOf
2060 // and an If instruction. The IfInstanceOf bci() corresponds to the
2061 // bci that the If would have had; the (this->) instanceof_bci() is
2062 // the bci of the original InstanceOf instruction.
2063 ciKlass* klass() const { return _klass; }
2064 Value obj() const { return _obj; }
2065 int instanceof_bci() const { return _instanceof_bci; }
2066 bool test_is_instance() const { return _test_is_instance; }
2067 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); }
2068 BlockBegin* tsux() const { return sux_for(true); }
2069 BlockBegin* fsux() const { return sux_for(false); }
2071 // manipulation
2072 void swap_sux() {
2073 assert(number_of_sux() == 2, "wrong number of successors");
2074 BlockList* s = sux();
2075 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t);
2076 _test_is_instance = !_test_is_instance;
2077 }
2079 // generic
2080 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_obj); }
2081 };
2084 BASE(Switch, BlockEnd)
2085 private:
2086 Value _tag;
2088 public:
2089 // creation
2090 Switch(Value tag, BlockList* sux, ValueStack* state_before, bool is_safepoint)
2091 : BlockEnd(illegalType, state_before, is_safepoint)
2092 , _tag(tag) {
2093 ASSERT_VALUES
2094 set_sux(sux);
2095 }
2097 // accessors
2098 Value tag() const { return _tag; }
2099 int length() const { return number_of_sux() - 1; }
2101 virtual bool needs_exception_state() const { return false; }
2103 // generic
2104 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_tag); }
2105 };
2108 LEAF(TableSwitch, Switch)
2109 private:
2110 int _lo_key;
2112 public:
2113 // creation
2114 TableSwitch(Value tag, BlockList* sux, int lo_key, ValueStack* state_before, bool is_safepoint)
2115 : Switch(tag, sux, state_before, is_safepoint)
2116 , _lo_key(lo_key) {}
2118 // accessors
2119 int lo_key() const { return _lo_key; }
2120 int hi_key() const { return _lo_key + length() - 1; }
2121 };
2124 LEAF(LookupSwitch, Switch)
2125 private:
2126 intArray* _keys;
2128 public:
2129 // creation
2130 LookupSwitch(Value tag, BlockList* sux, intArray* keys, ValueStack* state_before, bool is_safepoint)
2131 : Switch(tag, sux, state_before, is_safepoint)
2132 , _keys(keys) {
2133 assert(keys != NULL, "keys must exist");
2134 assert(keys->length() == length(), "sux & keys have incompatible lengths");
2135 }
2137 // accessors
2138 int key_at(int i) const { return _keys->at(i); }
2139 };
2142 LEAF(Return, BlockEnd)
2143 private:
2144 Value _result;
2146 public:
2147 // creation
2148 Return(Value result) :
2149 BlockEnd(result == NULL ? voidType : result->type()->base(), NULL, true),
2150 _result(result) {}
2152 // accessors
2153 Value result() const { return _result; }
2154 bool has_result() const { return result() != NULL; }
2156 // generic
2157 virtual void input_values_do(ValueVisitor* f) {
2158 BlockEnd::input_values_do(f);
2159 if (has_result()) f->visit(&_result);
2160 }
2161 };
2164 LEAF(Throw, BlockEnd)
2165 private:
2166 Value _exception;
2168 public:
2169 // creation
2170 Throw(Value exception, ValueStack* state_before) : BlockEnd(illegalType, state_before, true), _exception(exception) {
2171 ASSERT_VALUES
2172 }
2174 // accessors
2175 Value exception() const { return _exception; }
2177 // generic
2178 virtual bool can_trap() const { return true; }
2179 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_exception); }
2180 };
2183 LEAF(Base, BlockEnd)
2184 public:
2185 // creation
2186 Base(BlockBegin* std_entry, BlockBegin* osr_entry) : BlockEnd(illegalType, NULL, false) {
2187 assert(std_entry->is_set(BlockBegin::std_entry_flag), "std entry must be flagged");
2188 assert(osr_entry == NULL || osr_entry->is_set(BlockBegin::osr_entry_flag), "osr entry must be flagged");
2189 BlockList* s = new BlockList(2);
2190 if (osr_entry != NULL) s->append(osr_entry);
2191 s->append(std_entry); // must be default sux!
2192 set_sux(s);
2193 }
2195 // accessors
2196 BlockBegin* std_entry() const { return default_sux(); }
2197 BlockBegin* osr_entry() const { return number_of_sux() < 2 ? NULL : sux_at(0); }
2198 };
2201 LEAF(OsrEntry, Instruction)
2202 public:
2203 // creation
2204 #ifdef _LP64
2205 OsrEntry() : Instruction(longType) { pin(); }
2206 #else
2207 OsrEntry() : Instruction(intType) { pin(); }
2208 #endif
2210 // generic
2211 virtual void input_values_do(ValueVisitor* f) { }
2212 };
2215 // Models the incoming exception at a catch site
2216 LEAF(ExceptionObject, Instruction)
2217 public:
2218 // creation
2219 ExceptionObject() : Instruction(objectType) {
2220 pin();
2221 }
2223 // generic
2224 virtual void input_values_do(ValueVisitor* f) { }
2225 };
2228 // Models needed rounding for floating-point values on Intel.
2229 // Currently only used to represent rounding of double-precision
2230 // values stored into local variables, but could be used to model
2231 // intermediate rounding of single-precision values as well.
2232 LEAF(RoundFP, Instruction)
2233 private:
2234 Value _input; // floating-point value to be rounded
2236 public:
2237 RoundFP(Value input)
2238 : Instruction(input->type()) // Note: should not be used for constants
2239 , _input(input)
2240 {
2241 ASSERT_VALUES
2242 }
2244 // accessors
2245 Value input() const { return _input; }
2247 // generic
2248 virtual void input_values_do(ValueVisitor* f) { f->visit(&_input); }
2249 };
2252 BASE(UnsafeOp, Instruction)
2253 private:
2254 BasicType _basic_type; // ValueType can not express byte-sized integers
2256 protected:
2257 // creation
2258 UnsafeOp(BasicType basic_type, bool is_put)
2259 : Instruction(is_put ? voidType : as_ValueType(basic_type))
2260 , _basic_type(basic_type)
2261 {
2262 //Note: Unsafe ops are not not guaranteed to throw NPE.
2263 // Convservatively, Unsafe operations must be pinned though we could be
2264 // looser about this if we wanted to..
2265 pin();
2266 }
2268 public:
2269 // accessors
2270 BasicType basic_type() { return _basic_type; }
2272 // generic
2273 virtual void input_values_do(ValueVisitor* f) { }
2274 };
2277 BASE(UnsafeRawOp, UnsafeOp)
2278 private:
2279 Value _base; // Base address (a Java long)
2280 Value _index; // Index if computed by optimizer; initialized to NULL
2281 int _log2_scale; // Scale factor: 0, 1, 2, or 3.
2282 // Indicates log2 of number of bytes (1, 2, 4, or 8)
2283 // to scale index by.
2285 protected:
2286 UnsafeRawOp(BasicType basic_type, Value addr, bool is_put)
2287 : UnsafeOp(basic_type, is_put)
2288 , _base(addr)
2289 , _index(NULL)
2290 , _log2_scale(0)
2291 {
2292 // Can not use ASSERT_VALUES because index may be NULL
2293 assert(addr != NULL && addr->type()->is_long(), "just checking");
2294 }
2296 UnsafeRawOp(BasicType basic_type, Value base, Value index, int log2_scale, bool is_put)
2297 : UnsafeOp(basic_type, is_put)
2298 , _base(base)
2299 , _index(index)
2300 , _log2_scale(log2_scale)
2301 {
2302 }
2304 public:
2305 // accessors
2306 Value base() { return _base; }
2307 Value index() { return _index; }
2308 bool has_index() { return (_index != NULL); }
2309 int log2_scale() { return _log2_scale; }
2311 // setters
2312 void set_base (Value base) { _base = base; }
2313 void set_index(Value index) { _index = index; }
2314 void set_log2_scale(int log2_scale) { _log2_scale = log2_scale; }
2316 // generic
2317 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f);
2318 f->visit(&_base);
2319 if (has_index()) f->visit(&_index); }
2320 };
2323 LEAF(UnsafeGetRaw, UnsafeRawOp)
2324 private:
2325 bool _may_be_unaligned, _is_wide; // For OSREntry
2327 public:
2328 UnsafeGetRaw(BasicType basic_type, Value addr, bool may_be_unaligned, bool is_wide = false)
2329 : UnsafeRawOp(basic_type, addr, false) {
2330 _may_be_unaligned = may_be_unaligned;
2331 _is_wide = is_wide;
2332 }
2334 UnsafeGetRaw(BasicType basic_type, Value base, Value index, int log2_scale, bool may_be_unaligned, bool is_wide = false)
2335 : UnsafeRawOp(basic_type, base, index, log2_scale, false) {
2336 _may_be_unaligned = may_be_unaligned;
2337 _is_wide = is_wide;
2338 }
2340 bool may_be_unaligned() { return _may_be_unaligned; }
2341 bool is_wide() { return _is_wide; }
2342 };
2345 LEAF(UnsafePutRaw, UnsafeRawOp)
2346 private:
2347 Value _value; // Value to be stored
2349 public:
2350 UnsafePutRaw(BasicType basic_type, Value addr, Value value)
2351 : UnsafeRawOp(basic_type, addr, true)
2352 , _value(value)
2353 {
2354 assert(value != NULL, "just checking");
2355 ASSERT_VALUES
2356 }
2358 UnsafePutRaw(BasicType basic_type, Value base, Value index, int log2_scale, Value value)
2359 : UnsafeRawOp(basic_type, base, index, log2_scale, true)
2360 , _value(value)
2361 {
2362 assert(value != NULL, "just checking");
2363 ASSERT_VALUES
2364 }
2366 // accessors
2367 Value value() { return _value; }
2369 // generic
2370 virtual void input_values_do(ValueVisitor* f) { UnsafeRawOp::input_values_do(f);
2371 f->visit(&_value); }
2372 };
2375 BASE(UnsafeObjectOp, UnsafeOp)
2376 private:
2377 Value _object; // Object to be fetched from or mutated
2378 Value _offset; // Offset within object
2379 bool _is_volatile; // true if volatile - dl/JSR166
2380 public:
2381 UnsafeObjectOp(BasicType basic_type, Value object, Value offset, bool is_put, bool is_volatile)
2382 : UnsafeOp(basic_type, is_put), _object(object), _offset(offset), _is_volatile(is_volatile)
2383 {
2384 }
2386 // accessors
2387 Value object() { return _object; }
2388 Value offset() { return _offset; }
2389 bool is_volatile() { return _is_volatile; }
2390 // generic
2391 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f);
2392 f->visit(&_object);
2393 f->visit(&_offset); }
2394 };
2397 LEAF(UnsafeGetObject, UnsafeObjectOp)
2398 public:
2399 UnsafeGetObject(BasicType basic_type, Value object, Value offset, bool is_volatile)
2400 : UnsafeObjectOp(basic_type, object, offset, false, is_volatile)
2401 {
2402 ASSERT_VALUES
2403 }
2404 };
2407 LEAF(UnsafePutObject, UnsafeObjectOp)
2408 private:
2409 Value _value; // Value to be stored
2410 public:
2411 UnsafePutObject(BasicType basic_type, Value object, Value offset, Value value, bool is_volatile)
2412 : UnsafeObjectOp(basic_type, object, offset, true, is_volatile)
2413 , _value(value)
2414 {
2415 ASSERT_VALUES
2416 }
2418 // accessors
2419 Value value() { return _value; }
2421 // generic
2422 virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f);
2423 f->visit(&_value); }
2424 };
2426 LEAF(UnsafeGetAndSetObject, UnsafeObjectOp)
2427 private:
2428 Value _value; // Value to be stored
2429 bool _is_add;
2430 public:
2431 UnsafeGetAndSetObject(BasicType basic_type, Value object, Value offset, Value value, bool is_add)
2432 : UnsafeObjectOp(basic_type, object, offset, false, false)
2433 , _value(value)
2434 , _is_add(is_add)
2435 {
2436 ASSERT_VALUES
2437 }
2439 // accessors
2440 bool is_add() const { return _is_add; }
2441 Value value() { return _value; }
2443 // generic
2444 virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f);
2445 f->visit(&_value); }
2446 };
2448 BASE(UnsafePrefetch, UnsafeObjectOp)
2449 public:
2450 UnsafePrefetch(Value object, Value offset)
2451 : UnsafeObjectOp(T_VOID, object, offset, false, false)
2452 {
2453 }
2454 };
2457 LEAF(UnsafePrefetchRead, UnsafePrefetch)
2458 public:
2459 UnsafePrefetchRead(Value object, Value offset)
2460 : UnsafePrefetch(object, offset)
2461 {
2462 ASSERT_VALUES
2463 }
2464 };
2467 LEAF(UnsafePrefetchWrite, UnsafePrefetch)
2468 public:
2469 UnsafePrefetchWrite(Value object, Value offset)
2470 : UnsafePrefetch(object, offset)
2471 {
2472 ASSERT_VALUES
2473 }
2474 };
2476 LEAF(ProfileCall, Instruction)
2477 private:
2478 ciMethod* _method;
2479 int _bci_of_invoke;
2480 ciMethod* _callee; // the method that is called at the given bci
2481 Value _recv;
2482 ciKlass* _known_holder;
2483 Values* _obj_args; // arguments for type profiling
2484 ArgsNonNullState _nonnull_state; // Do we know whether some arguments are never null?
2485 bool _inlined; // Are we profiling a call that is inlined
2487 public:
2488 ProfileCall(ciMethod* method, int bci, ciMethod* callee, Value recv, ciKlass* known_holder, Values* obj_args, bool inlined)
2489 : Instruction(voidType)
2490 , _method(method)
2491 , _bci_of_invoke(bci)
2492 , _callee(callee)
2493 , _recv(recv)
2494 , _known_holder(known_holder)
2495 , _obj_args(obj_args)
2496 , _inlined(inlined)
2497 {
2498 // The ProfileCall has side-effects and must occur precisely where located
2499 pin();
2500 }
2502 ciMethod* method() const { return _method; }
2503 int bci_of_invoke() const { return _bci_of_invoke; }
2504 ciMethod* callee() const { return _callee; }
2505 Value recv() const { return _recv; }
2506 ciKlass* known_holder() const { return _known_holder; }
2507 int nb_profiled_args() const { return _obj_args == NULL ? 0 : _obj_args->length(); }
2508 Value profiled_arg_at(int i) const { return _obj_args->at(i); }
2509 bool arg_needs_null_check(int i) const {
2510 return _nonnull_state.arg_needs_null_check(i);
2511 }
2512 bool inlined() const { return _inlined; }
2514 void set_arg_needs_null_check(int i, bool check) {
2515 _nonnull_state.set_arg_needs_null_check(i, check);
2516 }
2518 virtual void input_values_do(ValueVisitor* f) {
2519 if (_recv != NULL) {
2520 f->visit(&_recv);
2521 }
2522 for (int i = 0; i < nb_profiled_args(); i++) {
2523 f->visit(_obj_args->adr_at(i));
2524 }
2525 }
2526 };
2528 LEAF(ProfileReturnType, Instruction)
2529 private:
2530 ciMethod* _method;
2531 ciMethod* _callee;
2532 int _bci_of_invoke;
2533 Value _ret;
2535 public:
2536 ProfileReturnType(ciMethod* method, int bci, ciMethod* callee, Value ret)
2537 : Instruction(voidType)
2538 , _method(method)
2539 , _callee(callee)
2540 , _bci_of_invoke(bci)
2541 , _ret(ret)
2542 {
2543 set_needs_null_check(true);
2544 // The ProfileType has side-effects and must occur precisely where located
2545 pin();
2546 }
2548 ciMethod* method() const { return _method; }
2549 ciMethod* callee() const { return _callee; }
2550 int bci_of_invoke() const { return _bci_of_invoke; }
2551 Value ret() const { return _ret; }
2553 virtual void input_values_do(ValueVisitor* f) {
2554 if (_ret != NULL) {
2555 f->visit(&_ret);
2556 }
2557 }
2558 };
2560 // Call some C runtime function that doesn't safepoint,
2561 // optionally passing the current thread as the first argument.
2562 LEAF(RuntimeCall, Instruction)
2563 private:
2564 const char* _entry_name;
2565 address _entry;
2566 Values* _args;
2567 bool _pass_thread; // Pass the JavaThread* as an implicit first argument
2569 public:
2570 RuntimeCall(ValueType* type, const char* entry_name, address entry, Values* args, bool pass_thread = true)
2571 : Instruction(type)
2572 , _entry(entry)
2573 , _args(args)
2574 , _entry_name(entry_name)
2575 , _pass_thread(pass_thread) {
2576 ASSERT_VALUES
2577 pin();
2578 }
2580 const char* entry_name() const { return _entry_name; }
2581 address entry() const { return _entry; }
2582 int number_of_arguments() const { return _args->length(); }
2583 Value argument_at(int i) const { return _args->at(i); }
2584 bool pass_thread() const { return _pass_thread; }
2586 virtual void input_values_do(ValueVisitor* f) {
2587 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i));
2588 }
2589 };
2591 // Use to trip invocation counter of an inlined method
2593 LEAF(ProfileInvoke, Instruction)
2594 private:
2595 ciMethod* _inlinee;
2596 ValueStack* _state;
2598 public:
2599 ProfileInvoke(ciMethod* inlinee, ValueStack* state)
2600 : Instruction(voidType)
2601 , _inlinee(inlinee)
2602 , _state(state)
2603 {
2604 // The ProfileInvoke has side-effects and must occur precisely where located QQQ???
2605 pin();
2606 }
2608 ciMethod* inlinee() { return _inlinee; }
2609 ValueStack* state() { return _state; }
2610 virtual void input_values_do(ValueVisitor*) {}
2611 virtual void state_values_do(ValueVisitor*);
2612 };
2614 LEAF(MemBar, Instruction)
2615 private:
2616 LIR_Code _code;
2618 public:
2619 MemBar(LIR_Code code)
2620 : Instruction(voidType)
2621 , _code(code)
2622 {
2623 pin();
2624 }
2626 LIR_Code code() { return _code; }
2628 virtual void input_values_do(ValueVisitor*) {}
2629 };
2631 class BlockPair: public CompilationResourceObj {
2632 private:
2633 BlockBegin* _from;
2634 BlockBegin* _to;
2635 public:
2636 BlockPair(BlockBegin* from, BlockBegin* to): _from(from), _to(to) {}
2637 BlockBegin* from() const { return _from; }
2638 BlockBegin* to() const { return _to; }
2639 bool is_same(BlockBegin* from, BlockBegin* to) const { return _from == from && _to == to; }
2640 bool is_same(BlockPair* p) const { return _from == p->from() && _to == p->to(); }
2641 void set_to(BlockBegin* b) { _to = b; }
2642 void set_from(BlockBegin* b) { _from = b; }
2643 };
2646 define_array(BlockPairArray, BlockPair*)
2647 define_stack(BlockPairList, BlockPairArray)
2650 inline int BlockBegin::number_of_sux() const { assert(_end == NULL || _end->number_of_sux() == _successors.length(), "mismatch"); return _successors.length(); }
2651 inline BlockBegin* BlockBegin::sux_at(int i) const { assert(_end == NULL || _end->sux_at(i) == _successors.at(i), "mismatch"); return _successors.at(i); }
2652 inline void BlockBegin::add_successor(BlockBegin* sux) { assert(_end == NULL, "Would create mismatch with successors of BlockEnd"); _successors.append(sux); }
2654 #undef ASSERT_VALUES
2656 #endif // SHARE_VM_C1_C1_INSTRUCTION_HPP