Wed, 09 Oct 2013 16:32:21 +0200
8023657: New type profiling points: arguments to call
Summary: x86 interpreter and c1 type profiling for arguments at calls
Reviewed-by: kvn, twisti
1 /*
2 * Copyright (c) 1999, 2013, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
25 #ifndef SHARE_VM_C1_C1_INSTRUCTION_HPP
26 #define SHARE_VM_C1_C1_INSTRUCTION_HPP
28 #include "c1/c1_Compilation.hpp"
29 #include "c1/c1_LIR.hpp"
30 #include "c1/c1_ValueType.hpp"
31 #include "ci/ciField.hpp"
33 // Predefined classes
34 class ciField;
35 class ValueStack;
36 class InstructionPrinter;
37 class IRScope;
38 class LIR_OprDesc;
39 typedef LIR_OprDesc* LIR_Opr;
42 // Instruction class hierarchy
43 //
44 // All leaf classes in the class hierarchy are concrete classes
45 // (i.e., are instantiated). All other classes are abstract and
46 // serve factoring.
48 class Instruction;
49 class Phi;
50 class Local;
51 class Constant;
52 class AccessField;
53 class LoadField;
54 class StoreField;
55 class AccessArray;
56 class ArrayLength;
57 class AccessIndexed;
58 class LoadIndexed;
59 class StoreIndexed;
60 class NegateOp;
61 class Op2;
62 class ArithmeticOp;
63 class ShiftOp;
64 class LogicOp;
65 class CompareOp;
66 class IfOp;
67 class Convert;
68 class NullCheck;
69 class TypeCast;
70 class OsrEntry;
71 class ExceptionObject;
72 class StateSplit;
73 class Invoke;
74 class NewInstance;
75 class NewArray;
76 class NewTypeArray;
77 class NewObjectArray;
78 class NewMultiArray;
79 class TypeCheck;
80 class CheckCast;
81 class InstanceOf;
82 class AccessMonitor;
83 class MonitorEnter;
84 class MonitorExit;
85 class Intrinsic;
86 class BlockBegin;
87 class BlockEnd;
88 class Goto;
89 class If;
90 class IfInstanceOf;
91 class Switch;
92 class TableSwitch;
93 class LookupSwitch;
94 class Return;
95 class Throw;
96 class Base;
97 class RoundFP;
98 class UnsafeOp;
99 class UnsafeRawOp;
100 class UnsafeGetRaw;
101 class UnsafePutRaw;
102 class UnsafeObjectOp;
103 class UnsafeGetObject;
104 class UnsafePutObject;
105 class UnsafeGetAndSetObject;
106 class UnsafePrefetch;
107 class UnsafePrefetchRead;
108 class UnsafePrefetchWrite;
109 class ProfileCall;
110 class ProfileInvoke;
111 class RuntimeCall;
112 class MemBar;
113 class RangeCheckPredicate;
114 #ifdef ASSERT
115 class Assert;
116 #endif
118 // A Value is a reference to the instruction creating the value
119 typedef Instruction* Value;
120 define_array(ValueArray, Value)
121 define_stack(Values, ValueArray)
123 define_array(ValueStackArray, ValueStack*)
124 define_stack(ValueStackStack, ValueStackArray)
126 // BlockClosure is the base class for block traversal/iteration.
128 class BlockClosure: public CompilationResourceObj {
129 public:
130 virtual void block_do(BlockBegin* block) = 0;
131 };
134 // A simple closure class for visiting the values of an Instruction
135 class ValueVisitor: public StackObj {
136 public:
137 virtual void visit(Value* v) = 0;
138 };
141 // Some array and list classes
142 define_array(BlockBeginArray, BlockBegin*)
143 define_stack(_BlockList, BlockBeginArray)
145 class BlockList: public _BlockList {
146 public:
147 BlockList(): _BlockList() {}
148 BlockList(const int size): _BlockList(size) {}
149 BlockList(const int size, BlockBegin* init): _BlockList(size, init) {}
151 void iterate_forward(BlockClosure* closure);
152 void iterate_backward(BlockClosure* closure);
153 void blocks_do(void f(BlockBegin*));
154 void values_do(ValueVisitor* f);
155 void print(bool cfg_only = false, bool live_only = false) PRODUCT_RETURN;
156 };
159 // InstructionVisitors provide type-based dispatch for instructions.
160 // For each concrete Instruction class X, a virtual function do_X is
161 // provided. Functionality that needs to be implemented for all classes
162 // (e.g., printing, code generation) is factored out into a specialised
163 // visitor instead of added to the Instruction classes itself.
165 class InstructionVisitor: public StackObj {
166 public:
167 virtual void do_Phi (Phi* x) = 0;
168 virtual void do_Local (Local* x) = 0;
169 virtual void do_Constant (Constant* x) = 0;
170 virtual void do_LoadField (LoadField* x) = 0;
171 virtual void do_StoreField (StoreField* x) = 0;
172 virtual void do_ArrayLength (ArrayLength* x) = 0;
173 virtual void do_LoadIndexed (LoadIndexed* x) = 0;
174 virtual void do_StoreIndexed (StoreIndexed* x) = 0;
175 virtual void do_NegateOp (NegateOp* x) = 0;
176 virtual void do_ArithmeticOp (ArithmeticOp* x) = 0;
177 virtual void do_ShiftOp (ShiftOp* x) = 0;
178 virtual void do_LogicOp (LogicOp* x) = 0;
179 virtual void do_CompareOp (CompareOp* x) = 0;
180 virtual void do_IfOp (IfOp* x) = 0;
181 virtual void do_Convert (Convert* x) = 0;
182 virtual void do_NullCheck (NullCheck* x) = 0;
183 virtual void do_TypeCast (TypeCast* x) = 0;
184 virtual void do_Invoke (Invoke* x) = 0;
185 virtual void do_NewInstance (NewInstance* x) = 0;
186 virtual void do_NewTypeArray (NewTypeArray* x) = 0;
187 virtual void do_NewObjectArray (NewObjectArray* x) = 0;
188 virtual void do_NewMultiArray (NewMultiArray* x) = 0;
189 virtual void do_CheckCast (CheckCast* x) = 0;
190 virtual void do_InstanceOf (InstanceOf* x) = 0;
191 virtual void do_MonitorEnter (MonitorEnter* x) = 0;
192 virtual void do_MonitorExit (MonitorExit* x) = 0;
193 virtual void do_Intrinsic (Intrinsic* x) = 0;
194 virtual void do_BlockBegin (BlockBegin* x) = 0;
195 virtual void do_Goto (Goto* x) = 0;
196 virtual void do_If (If* x) = 0;
197 virtual void do_IfInstanceOf (IfInstanceOf* x) = 0;
198 virtual void do_TableSwitch (TableSwitch* x) = 0;
199 virtual void do_LookupSwitch (LookupSwitch* x) = 0;
200 virtual void do_Return (Return* x) = 0;
201 virtual void do_Throw (Throw* x) = 0;
202 virtual void do_Base (Base* x) = 0;
203 virtual void do_OsrEntry (OsrEntry* x) = 0;
204 virtual void do_ExceptionObject(ExceptionObject* x) = 0;
205 virtual void do_RoundFP (RoundFP* x) = 0;
206 virtual void do_UnsafeGetRaw (UnsafeGetRaw* x) = 0;
207 virtual void do_UnsafePutRaw (UnsafePutRaw* x) = 0;
208 virtual void do_UnsafeGetObject(UnsafeGetObject* x) = 0;
209 virtual void do_UnsafePutObject(UnsafePutObject* x) = 0;
210 virtual void do_UnsafeGetAndSetObject(UnsafeGetAndSetObject* x) = 0;
211 virtual void do_UnsafePrefetchRead (UnsafePrefetchRead* x) = 0;
212 virtual void do_UnsafePrefetchWrite(UnsafePrefetchWrite* x) = 0;
213 virtual void do_ProfileCall (ProfileCall* x) = 0;
214 virtual void do_ProfileInvoke (ProfileInvoke* x) = 0;
215 virtual void do_RuntimeCall (RuntimeCall* x) = 0;
216 virtual void do_MemBar (MemBar* x) = 0;
217 virtual void do_RangeCheckPredicate(RangeCheckPredicate* x) = 0;
218 #ifdef ASSERT
219 virtual void do_Assert (Assert* x) = 0;
220 #endif
221 };
224 // Hashing support
225 //
226 // Note: This hash functions affect the performance
227 // of ValueMap - make changes carefully!
229 #define HASH1(x1 ) ((intx)(x1))
230 #define HASH2(x1, x2 ) ((HASH1(x1 ) << 7) ^ HASH1(x2))
231 #define HASH3(x1, x2, x3 ) ((HASH2(x1, x2 ) << 7) ^ HASH1(x3))
232 #define HASH4(x1, x2, x3, x4) ((HASH3(x1, x2, x3) << 7) ^ HASH1(x4))
235 // The following macros are used to implement instruction-specific hashing.
236 // By default, each instruction implements hash() and is_equal(Value), used
237 // for value numbering/common subexpression elimination. The default imple-
238 // mentation disables value numbering. Each instruction which can be value-
239 // numbered, should define corresponding hash() and is_equal(Value) functions
240 // via the macros below. The f arguments specify all the values/op codes, etc.
241 // that need to be identical for two instructions to be identical.
242 //
243 // Note: The default implementation of hash() returns 0 in order to indicate
244 // that the instruction should not be considered for value numbering.
245 // The currently used hash functions do not guarantee that never a 0
246 // is produced. While this is still correct, it may be a performance
247 // bug (no value numbering for that node). However, this situation is
248 // so unlikely, that we are not going to handle it specially.
250 #define HASHING1(class_name, enabled, f1) \
251 virtual intx hash() const { \
252 return (enabled) ? HASH2(name(), f1) : 0; \
253 } \
254 virtual bool is_equal(Value v) const { \
255 if (!(enabled) ) return false; \
256 class_name* _v = v->as_##class_name(); \
257 if (_v == NULL ) return false; \
258 if (f1 != _v->f1) return false; \
259 return true; \
260 } \
263 #define HASHING2(class_name, enabled, f1, f2) \
264 virtual intx hash() const { \
265 return (enabled) ? HASH3(name(), f1, f2) : 0; \
266 } \
267 virtual bool is_equal(Value v) const { \
268 if (!(enabled) ) return false; \
269 class_name* _v = v->as_##class_name(); \
270 if (_v == NULL ) return false; \
271 if (f1 != _v->f1) return false; \
272 if (f2 != _v->f2) return false; \
273 return true; \
274 } \
277 #define HASHING3(class_name, enabled, f1, f2, f3) \
278 virtual intx hash() const { \
279 return (enabled) ? HASH4(name(), f1, f2, f3) : 0; \
280 } \
281 virtual bool is_equal(Value v) const { \
282 if (!(enabled) ) return false; \
283 class_name* _v = v->as_##class_name(); \
284 if (_v == NULL ) return false; \
285 if (f1 != _v->f1) return false; \
286 if (f2 != _v->f2) return false; \
287 if (f3 != _v->f3) return false; \
288 return true; \
289 } \
292 // The mother of all instructions...
294 class Instruction: public CompilationResourceObj {
295 private:
296 int _id; // the unique instruction id
297 #ifndef PRODUCT
298 int _printable_bci; // the bci of the instruction for printing
299 #endif
300 int _use_count; // the number of instructions refering to this value (w/o prev/next); only roots can have use count = 0 or > 1
301 int _pin_state; // set of PinReason describing the reason for pinning
302 ValueType* _type; // the instruction value type
303 Instruction* _next; // the next instruction if any (NULL for BlockEnd instructions)
304 Instruction* _subst; // the substitution instruction if any
305 LIR_Opr _operand; // LIR specific information
306 unsigned int _flags; // Flag bits
308 ValueStack* _state_before; // Copy of state with input operands still on stack (or NULL)
309 ValueStack* _exception_state; // Copy of state for exception handling
310 XHandlers* _exception_handlers; // Flat list of exception handlers covering this instruction
312 friend class UseCountComputer;
313 friend class BlockBegin;
315 void update_exception_state(ValueStack* state);
317 protected:
318 BlockBegin* _block; // Block that contains this instruction
320 void set_type(ValueType* type) {
321 assert(type != NULL, "type must exist");
322 _type = type;
323 }
325 // Helper class to keep track of which arguments need a null check
326 class ArgsNonNullState {
327 private:
328 int _nonnull_state; // mask identifying which args are nonnull
329 public:
330 ArgsNonNullState()
331 : _nonnull_state(AllBits) {}
333 // Does argument number i needs a null check?
334 bool arg_needs_null_check(int i) const {
335 // No data is kept for arguments starting at position 33 so
336 // conservatively assume that they need a null check.
337 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) {
338 return is_set_nth_bit(_nonnull_state, i);
339 }
340 return true;
341 }
343 // Set whether argument number i needs a null check or not
344 void set_arg_needs_null_check(int i, bool check) {
345 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) {
346 if (check) {
347 _nonnull_state |= nth_bit(i);
348 } else {
349 _nonnull_state &= ~(nth_bit(i));
350 }
351 }
352 }
353 };
355 public:
356 void* operator new(size_t size) throw() {
357 Compilation* c = Compilation::current();
358 void* res = c->arena()->Amalloc(size);
359 ((Instruction*)res)->_id = c->get_next_id();
360 return res;
361 }
363 static const int no_bci = -99;
365 enum InstructionFlag {
366 NeedsNullCheckFlag = 0,
367 CanTrapFlag,
368 DirectCompareFlag,
369 IsEliminatedFlag,
370 IsSafepointFlag,
371 IsStaticFlag,
372 IsStrictfpFlag,
373 NeedsStoreCheckFlag,
374 NeedsWriteBarrierFlag,
375 PreservesStateFlag,
376 TargetIsFinalFlag,
377 TargetIsLoadedFlag,
378 TargetIsStrictfpFlag,
379 UnorderedIsTrueFlag,
380 NeedsPatchingFlag,
381 ThrowIncompatibleClassChangeErrorFlag,
382 ProfileMDOFlag,
383 IsLinkedInBlockFlag,
384 NeedsRangeCheckFlag,
385 InWorkListFlag,
386 DeoptimizeOnException,
387 InstructionLastFlag
388 };
390 public:
391 bool check_flag(InstructionFlag id) const { return (_flags & (1 << id)) != 0; }
392 void set_flag(InstructionFlag id, bool f) { _flags = f ? (_flags | (1 << id)) : (_flags & ~(1 << id)); };
394 // 'globally' used condition values
395 enum Condition {
396 eql, neq, lss, leq, gtr, geq, aeq, beq
397 };
399 // Instructions may be pinned for many reasons and under certain conditions
400 // with enough knowledge it's possible to safely unpin them.
401 enum PinReason {
402 PinUnknown = 1 << 0
403 , PinExplicitNullCheck = 1 << 3
404 , PinStackForStateSplit= 1 << 12
405 , PinStateSplitConstructor= 1 << 13
406 , PinGlobalValueNumbering= 1 << 14
407 };
409 static Condition mirror(Condition cond);
410 static Condition negate(Condition cond);
412 // initialization
413 static int number_of_instructions() {
414 return Compilation::current()->number_of_instructions();
415 }
417 // creation
418 Instruction(ValueType* type, ValueStack* state_before = NULL, bool type_is_constant = false)
419 : _use_count(0)
420 #ifndef PRODUCT
421 , _printable_bci(-99)
422 #endif
423 , _pin_state(0)
424 , _type(type)
425 , _next(NULL)
426 , _block(NULL)
427 , _subst(NULL)
428 , _flags(0)
429 , _operand(LIR_OprFact::illegalOpr)
430 , _state_before(state_before)
431 , _exception_handlers(NULL)
432 {
433 check_state(state_before);
434 assert(type != NULL && (!type->is_constant() || type_is_constant), "type must exist");
435 update_exception_state(_state_before);
436 }
438 // accessors
439 int id() const { return _id; }
440 #ifndef PRODUCT
441 bool has_printable_bci() const { return _printable_bci != -99; }
442 int printable_bci() const { assert(has_printable_bci(), "_printable_bci should have been set"); return _printable_bci; }
443 void set_printable_bci(int bci) { _printable_bci = bci; }
444 #endif
445 int dominator_depth();
446 int use_count() const { return _use_count; }
447 int pin_state() const { return _pin_state; }
448 bool is_pinned() const { return _pin_state != 0 || PinAllInstructions; }
449 ValueType* type() const { return _type; }
450 BlockBegin *block() const { return _block; }
451 Instruction* prev(); // use carefully, expensive operation
452 Instruction* next() const { return _next; }
453 bool has_subst() const { return _subst != NULL; }
454 Instruction* subst() { return _subst == NULL ? this : _subst->subst(); }
455 LIR_Opr operand() const { return _operand; }
457 void set_needs_null_check(bool f) { set_flag(NeedsNullCheckFlag, f); }
458 bool needs_null_check() const { return check_flag(NeedsNullCheckFlag); }
459 bool is_linked() const { return check_flag(IsLinkedInBlockFlag); }
460 bool can_be_linked() { return as_Local() == NULL && as_Phi() == NULL; }
462 bool has_uses() const { return use_count() > 0; }
463 ValueStack* state_before() const { return _state_before; }
464 ValueStack* exception_state() const { return _exception_state; }
465 virtual bool needs_exception_state() const { return true; }
466 XHandlers* exception_handlers() const { return _exception_handlers; }
468 // manipulation
469 void pin(PinReason reason) { _pin_state |= reason; }
470 void pin() { _pin_state |= PinUnknown; }
471 // DANGEROUS: only used by EliminateStores
472 void unpin(PinReason reason) { assert((reason & PinUnknown) == 0, "can't unpin unknown state"); _pin_state &= ~reason; }
474 Instruction* set_next(Instruction* next) {
475 assert(next->has_printable_bci(), "_printable_bci should have been set");
476 assert(next != NULL, "must not be NULL");
477 assert(as_BlockEnd() == NULL, "BlockEnd instructions must have no next");
478 assert(next->can_be_linked(), "shouldn't link these instructions into list");
480 BlockBegin *block = this->block();
481 next->_block = block;
483 next->set_flag(Instruction::IsLinkedInBlockFlag, true);
484 _next = next;
485 return next;
486 }
488 Instruction* set_next(Instruction* next, int bci) {
489 #ifndef PRODUCT
490 next->set_printable_bci(bci);
491 #endif
492 return set_next(next);
493 }
495 // when blocks are merged
496 void fixup_block_pointers() {
497 Instruction *cur = next()->next(); // next()'s block is set in set_next
498 while (cur && cur->_block != block()) {
499 cur->_block = block();
500 cur = cur->next();
501 }
502 }
504 Instruction *insert_after(Instruction *i) {
505 Instruction* n = _next;
506 set_next(i);
507 i->set_next(n);
508 return _next;
509 }
511 Instruction *insert_after_same_bci(Instruction *i) {
512 #ifndef PRODUCT
513 i->set_printable_bci(printable_bci());
514 #endif
515 return insert_after(i);
516 }
518 void set_subst(Instruction* subst) {
519 assert(subst == NULL ||
520 type()->base() == subst->type()->base() ||
521 subst->type()->base() == illegalType, "type can't change");
522 _subst = subst;
523 }
524 void set_exception_handlers(XHandlers *xhandlers) { _exception_handlers = xhandlers; }
525 void set_exception_state(ValueStack* s) { check_state(s); _exception_state = s; }
526 void set_state_before(ValueStack* s) { check_state(s); _state_before = s; }
528 // machine-specifics
529 void set_operand(LIR_Opr operand) { assert(operand != LIR_OprFact::illegalOpr, "operand must exist"); _operand = operand; }
530 void clear_operand() { _operand = LIR_OprFact::illegalOpr; }
532 // generic
533 virtual Instruction* as_Instruction() { return this; } // to satisfy HASHING1 macro
534 virtual Phi* as_Phi() { return NULL; }
535 virtual Local* as_Local() { return NULL; }
536 virtual Constant* as_Constant() { return NULL; }
537 virtual AccessField* as_AccessField() { return NULL; }
538 virtual LoadField* as_LoadField() { return NULL; }
539 virtual StoreField* as_StoreField() { return NULL; }
540 virtual AccessArray* as_AccessArray() { return NULL; }
541 virtual ArrayLength* as_ArrayLength() { return NULL; }
542 virtual AccessIndexed* as_AccessIndexed() { return NULL; }
543 virtual LoadIndexed* as_LoadIndexed() { return NULL; }
544 virtual StoreIndexed* as_StoreIndexed() { return NULL; }
545 virtual NegateOp* as_NegateOp() { return NULL; }
546 virtual Op2* as_Op2() { return NULL; }
547 virtual ArithmeticOp* as_ArithmeticOp() { return NULL; }
548 virtual ShiftOp* as_ShiftOp() { return NULL; }
549 virtual LogicOp* as_LogicOp() { return NULL; }
550 virtual CompareOp* as_CompareOp() { return NULL; }
551 virtual IfOp* as_IfOp() { return NULL; }
552 virtual Convert* as_Convert() { return NULL; }
553 virtual NullCheck* as_NullCheck() { return NULL; }
554 virtual OsrEntry* as_OsrEntry() { return NULL; }
555 virtual StateSplit* as_StateSplit() { return NULL; }
556 virtual Invoke* as_Invoke() { return NULL; }
557 virtual NewInstance* as_NewInstance() { return NULL; }
558 virtual NewArray* as_NewArray() { return NULL; }
559 virtual NewTypeArray* as_NewTypeArray() { return NULL; }
560 virtual NewObjectArray* as_NewObjectArray() { return NULL; }
561 virtual NewMultiArray* as_NewMultiArray() { return NULL; }
562 virtual TypeCheck* as_TypeCheck() { return NULL; }
563 virtual CheckCast* as_CheckCast() { return NULL; }
564 virtual InstanceOf* as_InstanceOf() { return NULL; }
565 virtual TypeCast* as_TypeCast() { return NULL; }
566 virtual AccessMonitor* as_AccessMonitor() { return NULL; }
567 virtual MonitorEnter* as_MonitorEnter() { return NULL; }
568 virtual MonitorExit* as_MonitorExit() { return NULL; }
569 virtual Intrinsic* as_Intrinsic() { return NULL; }
570 virtual BlockBegin* as_BlockBegin() { return NULL; }
571 virtual BlockEnd* as_BlockEnd() { return NULL; }
572 virtual Goto* as_Goto() { return NULL; }
573 virtual If* as_If() { return NULL; }
574 virtual IfInstanceOf* as_IfInstanceOf() { return NULL; }
575 virtual TableSwitch* as_TableSwitch() { return NULL; }
576 virtual LookupSwitch* as_LookupSwitch() { return NULL; }
577 virtual Return* as_Return() { return NULL; }
578 virtual Throw* as_Throw() { return NULL; }
579 virtual Base* as_Base() { return NULL; }
580 virtual RoundFP* as_RoundFP() { return NULL; }
581 virtual ExceptionObject* as_ExceptionObject() { return NULL; }
582 virtual UnsafeOp* as_UnsafeOp() { return NULL; }
583 virtual ProfileInvoke* as_ProfileInvoke() { return NULL; }
584 virtual RangeCheckPredicate* as_RangeCheckPredicate() { return NULL; }
586 #ifdef ASSERT
587 virtual Assert* as_Assert() { return NULL; }
588 #endif
590 virtual void visit(InstructionVisitor* v) = 0;
592 virtual bool can_trap() const { return false; }
594 virtual void input_values_do(ValueVisitor* f) = 0;
595 virtual void state_values_do(ValueVisitor* f);
596 virtual void other_values_do(ValueVisitor* f) { /* usually no other - override on demand */ }
597 void values_do(ValueVisitor* f) { input_values_do(f); state_values_do(f); other_values_do(f); }
599 virtual ciType* exact_type() const;
600 virtual ciType* declared_type() const { return NULL; }
602 // hashing
603 virtual const char* name() const = 0;
604 HASHING1(Instruction, false, id()) // hashing disabled by default
606 // debugging
607 static void check_state(ValueStack* state) PRODUCT_RETURN;
608 void print() PRODUCT_RETURN;
609 void print_line() PRODUCT_RETURN;
610 void print(InstructionPrinter& ip) PRODUCT_RETURN;
611 };
614 // The following macros are used to define base (i.e., non-leaf)
615 // and leaf instruction classes. They define class-name related
616 // generic functionality in one place.
618 #define BASE(class_name, super_class_name) \
619 class class_name: public super_class_name { \
620 public: \
621 virtual class_name* as_##class_name() { return this; } \
624 #define LEAF(class_name, super_class_name) \
625 BASE(class_name, super_class_name) \
626 public: \
627 virtual const char* name() const { return #class_name; } \
628 virtual void visit(InstructionVisitor* v) { v->do_##class_name(this); } \
631 // Debugging support
634 #ifdef ASSERT
635 class AssertValues: public ValueVisitor {
636 void visit(Value* x) { assert((*x) != NULL, "value must exist"); }
637 };
638 #define ASSERT_VALUES { AssertValues assert_value; values_do(&assert_value); }
639 #else
640 #define ASSERT_VALUES
641 #endif // ASSERT
644 // A Phi is a phi function in the sense of SSA form. It stands for
645 // the value of a local variable at the beginning of a join block.
646 // A Phi consists of n operands, one for every incoming branch.
648 LEAF(Phi, Instruction)
649 private:
650 int _pf_flags; // the flags of the phi function
651 int _index; // to value on operand stack (index < 0) or to local
652 public:
653 // creation
654 Phi(ValueType* type, BlockBegin* b, int index)
655 : Instruction(type->base())
656 , _pf_flags(0)
657 , _index(index)
658 {
659 _block = b;
660 NOT_PRODUCT(set_printable_bci(Value(b)->printable_bci()));
661 if (type->is_illegal()) {
662 make_illegal();
663 }
664 }
666 // flags
667 enum Flag {
668 no_flag = 0,
669 visited = 1 << 0,
670 cannot_simplify = 1 << 1
671 };
673 // accessors
674 bool is_local() const { return _index >= 0; }
675 bool is_on_stack() const { return !is_local(); }
676 int local_index() const { assert(is_local(), ""); return _index; }
677 int stack_index() const { assert(is_on_stack(), ""); return -(_index+1); }
679 Value operand_at(int i) const;
680 int operand_count() const;
682 void set(Flag f) { _pf_flags |= f; }
683 void clear(Flag f) { _pf_flags &= ~f; }
684 bool is_set(Flag f) const { return (_pf_flags & f) != 0; }
686 // Invalidates phis corresponding to merges of locals of two different types
687 // (these should never be referenced, otherwise the bytecodes are illegal)
688 void make_illegal() {
689 set(cannot_simplify);
690 set_type(illegalType);
691 }
693 bool is_illegal() const {
694 return type()->is_illegal();
695 }
697 // generic
698 virtual void input_values_do(ValueVisitor* f) {
699 }
700 };
703 // A local is a placeholder for an incoming argument to a function call.
704 LEAF(Local, Instruction)
705 private:
706 int _java_index; // the local index within the method to which the local belongs
707 ciType* _declared_type;
708 public:
709 // creation
710 Local(ciType* declared, ValueType* type, int index)
711 : Instruction(type)
712 , _java_index(index)
713 , _declared_type(declared)
714 {
715 NOT_PRODUCT(set_printable_bci(-1));
716 }
718 // accessors
719 int java_index() const { return _java_index; }
721 virtual ciType* declared_type() const { return _declared_type; }
723 // generic
724 virtual void input_values_do(ValueVisitor* f) { /* no values */ }
725 };
728 LEAF(Constant, Instruction)
729 public:
730 // creation
731 Constant(ValueType* type):
732 Instruction(type, NULL, /*type_is_constant*/ true)
733 {
734 assert(type->is_constant(), "must be a constant");
735 }
737 Constant(ValueType* type, ValueStack* state_before):
738 Instruction(type, state_before, /*type_is_constant*/ true)
739 {
740 assert(state_before != NULL, "only used for constants which need patching");
741 assert(type->is_constant(), "must be a constant");
742 // since it's patching it needs to be pinned
743 pin();
744 }
746 // generic
747 virtual bool can_trap() const { return state_before() != NULL; }
748 virtual void input_values_do(ValueVisitor* f) { /* no values */ }
750 virtual intx hash() const;
751 virtual bool is_equal(Value v) const;
753 virtual ciType* exact_type() const;
755 enum CompareResult { not_comparable = -1, cond_false, cond_true };
757 virtual CompareResult compare(Instruction::Condition condition, Value right) const;
758 BlockBegin* compare(Instruction::Condition cond, Value right,
759 BlockBegin* true_sux, BlockBegin* false_sux) const {
760 switch (compare(cond, right)) {
761 case not_comparable:
762 return NULL;
763 case cond_false:
764 return false_sux;
765 case cond_true:
766 return true_sux;
767 default:
768 ShouldNotReachHere();
769 return NULL;
770 }
771 }
772 };
775 BASE(AccessField, Instruction)
776 private:
777 Value _obj;
778 int _offset;
779 ciField* _field;
780 NullCheck* _explicit_null_check; // For explicit null check elimination
782 public:
783 // creation
784 AccessField(Value obj, int offset, ciField* field, bool is_static,
785 ValueStack* state_before, bool needs_patching)
786 : Instruction(as_ValueType(field->type()->basic_type()), state_before)
787 , _obj(obj)
788 , _offset(offset)
789 , _field(field)
790 , _explicit_null_check(NULL)
791 {
792 set_needs_null_check(!is_static);
793 set_flag(IsStaticFlag, is_static);
794 set_flag(NeedsPatchingFlag, needs_patching);
795 ASSERT_VALUES
796 // pin of all instructions with memory access
797 pin();
798 }
800 // accessors
801 Value obj() const { return _obj; }
802 int offset() const { return _offset; }
803 ciField* field() const { return _field; }
804 BasicType field_type() const { return _field->type()->basic_type(); }
805 bool is_static() const { return check_flag(IsStaticFlag); }
806 NullCheck* explicit_null_check() const { return _explicit_null_check; }
807 bool needs_patching() const { return check_flag(NeedsPatchingFlag); }
809 // Unresolved getstatic and putstatic can cause initialization.
810 // Technically it occurs at the Constant that materializes the base
811 // of the static fields but it's simpler to model it here.
812 bool is_init_point() const { return is_static() && (needs_patching() || !_field->holder()->is_initialized()); }
814 // manipulation
816 // Under certain circumstances, if a previous NullCheck instruction
817 // proved the target object non-null, we can eliminate the explicit
818 // null check and do an implicit one, simply specifying the debug
819 // information from the NullCheck. This field should only be consulted
820 // if needs_null_check() is true.
821 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; }
823 // generic
824 virtual bool can_trap() const { return needs_null_check() || needs_patching(); }
825 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); }
826 };
829 LEAF(LoadField, AccessField)
830 public:
831 // creation
832 LoadField(Value obj, int offset, ciField* field, bool is_static,
833 ValueStack* state_before, bool needs_patching)
834 : AccessField(obj, offset, field, is_static, state_before, needs_patching)
835 {}
837 ciType* declared_type() const;
839 // generic
840 HASHING2(LoadField, !needs_patching() && !field()->is_volatile(), obj()->subst(), offset()) // cannot be eliminated if needs patching or if volatile
841 };
844 LEAF(StoreField, AccessField)
845 private:
846 Value _value;
848 public:
849 // creation
850 StoreField(Value obj, int offset, ciField* field, Value value, bool is_static,
851 ValueStack* state_before, bool needs_patching)
852 : AccessField(obj, offset, field, is_static, state_before, needs_patching)
853 , _value(value)
854 {
855 set_flag(NeedsWriteBarrierFlag, as_ValueType(field_type())->is_object());
856 ASSERT_VALUES
857 pin();
858 }
860 // accessors
861 Value value() const { return _value; }
862 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); }
864 // generic
865 virtual void input_values_do(ValueVisitor* f) { AccessField::input_values_do(f); f->visit(&_value); }
866 };
869 BASE(AccessArray, Instruction)
870 private:
871 Value _array;
873 public:
874 // creation
875 AccessArray(ValueType* type, Value array, ValueStack* state_before)
876 : Instruction(type, state_before)
877 , _array(array)
878 {
879 set_needs_null_check(true);
880 ASSERT_VALUES
881 pin(); // instruction with side effect (null exception or range check throwing)
882 }
884 Value array() const { return _array; }
886 // generic
887 virtual bool can_trap() const { return needs_null_check(); }
888 virtual void input_values_do(ValueVisitor* f) { f->visit(&_array); }
889 };
892 LEAF(ArrayLength, AccessArray)
893 private:
894 NullCheck* _explicit_null_check; // For explicit null check elimination
896 public:
897 // creation
898 ArrayLength(Value array, ValueStack* state_before)
899 : AccessArray(intType, array, state_before)
900 , _explicit_null_check(NULL) {}
902 // accessors
903 NullCheck* explicit_null_check() const { return _explicit_null_check; }
905 // setters
906 // See LoadField::set_explicit_null_check for documentation
907 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; }
909 // generic
910 HASHING1(ArrayLength, true, array()->subst())
911 };
914 BASE(AccessIndexed, AccessArray)
915 private:
916 Value _index;
917 Value _length;
918 BasicType _elt_type;
920 public:
921 // creation
922 AccessIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before)
923 : AccessArray(as_ValueType(elt_type), array, state_before)
924 , _index(index)
925 , _length(length)
926 , _elt_type(elt_type)
927 {
928 set_flag(Instruction::NeedsRangeCheckFlag, true);
929 ASSERT_VALUES
930 }
932 // accessors
933 Value index() const { return _index; }
934 Value length() const { return _length; }
935 BasicType elt_type() const { return _elt_type; }
937 void clear_length() { _length = NULL; }
938 // perform elimination of range checks involving constants
939 bool compute_needs_range_check();
941 // generic
942 virtual void input_values_do(ValueVisitor* f) { AccessArray::input_values_do(f); f->visit(&_index); if (_length != NULL) f->visit(&_length); }
943 };
946 LEAF(LoadIndexed, AccessIndexed)
947 private:
948 NullCheck* _explicit_null_check; // For explicit null check elimination
950 public:
951 // creation
952 LoadIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before)
953 : AccessIndexed(array, index, length, elt_type, state_before)
954 , _explicit_null_check(NULL) {}
956 // accessors
957 NullCheck* explicit_null_check() const { return _explicit_null_check; }
959 // setters
960 // See LoadField::set_explicit_null_check for documentation
961 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; }
963 ciType* exact_type() const;
964 ciType* declared_type() const;
966 // generic
967 HASHING2(LoadIndexed, true, array()->subst(), index()->subst())
968 };
971 LEAF(StoreIndexed, AccessIndexed)
972 private:
973 Value _value;
975 ciMethod* _profiled_method;
976 int _profiled_bci;
977 public:
978 // creation
979 StoreIndexed(Value array, Value index, Value length, BasicType elt_type, Value value, ValueStack* state_before)
980 : AccessIndexed(array, index, length, elt_type, state_before)
981 , _value(value), _profiled_method(NULL), _profiled_bci(0)
982 {
983 set_flag(NeedsWriteBarrierFlag, (as_ValueType(elt_type)->is_object()));
984 set_flag(NeedsStoreCheckFlag, (as_ValueType(elt_type)->is_object()));
985 ASSERT_VALUES
986 pin();
987 }
989 // accessors
990 Value value() const { return _value; }
991 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); }
992 bool needs_store_check() const { return check_flag(NeedsStoreCheckFlag); }
993 // Helpers for MethodData* profiling
994 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); }
995 void set_profiled_method(ciMethod* method) { _profiled_method = method; }
996 void set_profiled_bci(int bci) { _profiled_bci = bci; }
997 bool should_profile() const { return check_flag(ProfileMDOFlag); }
998 ciMethod* profiled_method() const { return _profiled_method; }
999 int profiled_bci() const { return _profiled_bci; }
1000 // generic
1001 virtual void input_values_do(ValueVisitor* f) { AccessIndexed::input_values_do(f); f->visit(&_value); }
1002 };
1005 LEAF(NegateOp, Instruction)
1006 private:
1007 Value _x;
1009 public:
1010 // creation
1011 NegateOp(Value x) : Instruction(x->type()->base()), _x(x) {
1012 ASSERT_VALUES
1013 }
1015 // accessors
1016 Value x() const { return _x; }
1018 // generic
1019 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); }
1020 };
1023 BASE(Op2, Instruction)
1024 private:
1025 Bytecodes::Code _op;
1026 Value _x;
1027 Value _y;
1029 public:
1030 // creation
1031 Op2(ValueType* type, Bytecodes::Code op, Value x, Value y, ValueStack* state_before = NULL)
1032 : Instruction(type, state_before)
1033 , _op(op)
1034 , _x(x)
1035 , _y(y)
1036 {
1037 ASSERT_VALUES
1038 }
1040 // accessors
1041 Bytecodes::Code op() const { return _op; }
1042 Value x() const { return _x; }
1043 Value y() const { return _y; }
1045 // manipulators
1046 void swap_operands() {
1047 assert(is_commutative(), "operation must be commutative");
1048 Value t = _x; _x = _y; _y = t;
1049 }
1051 // generic
1052 virtual bool is_commutative() const { return false; }
1053 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); }
1054 };
1057 LEAF(ArithmeticOp, Op2)
1058 public:
1059 // creation
1060 ArithmeticOp(Bytecodes::Code op, Value x, Value y, bool is_strictfp, ValueStack* state_before)
1061 : Op2(x->type()->meet(y->type()), op, x, y, state_before)
1062 {
1063 set_flag(IsStrictfpFlag, is_strictfp);
1064 if (can_trap()) pin();
1065 }
1067 // accessors
1068 bool is_strictfp() const { return check_flag(IsStrictfpFlag); }
1070 // generic
1071 virtual bool is_commutative() const;
1072 virtual bool can_trap() const;
1073 HASHING3(Op2, true, op(), x()->subst(), y()->subst())
1074 };
1077 LEAF(ShiftOp, Op2)
1078 public:
1079 // creation
1080 ShiftOp(Bytecodes::Code op, Value x, Value s) : Op2(x->type()->base(), op, x, s) {}
1082 // generic
1083 HASHING3(Op2, true, op(), x()->subst(), y()->subst())
1084 };
1087 LEAF(LogicOp, Op2)
1088 public:
1089 // creation
1090 LogicOp(Bytecodes::Code op, Value x, Value y) : Op2(x->type()->meet(y->type()), op, x, y) {}
1092 // generic
1093 virtual bool is_commutative() const;
1094 HASHING3(Op2, true, op(), x()->subst(), y()->subst())
1095 };
1098 LEAF(CompareOp, Op2)
1099 public:
1100 // creation
1101 CompareOp(Bytecodes::Code op, Value x, Value y, ValueStack* state_before)
1102 : Op2(intType, op, x, y, state_before)
1103 {}
1105 // generic
1106 HASHING3(Op2, true, op(), x()->subst(), y()->subst())
1107 };
1110 LEAF(IfOp, Op2)
1111 private:
1112 Value _tval;
1113 Value _fval;
1115 public:
1116 // creation
1117 IfOp(Value x, Condition cond, Value y, Value tval, Value fval)
1118 : Op2(tval->type()->meet(fval->type()), (Bytecodes::Code)cond, x, y)
1119 , _tval(tval)
1120 , _fval(fval)
1121 {
1122 ASSERT_VALUES
1123 assert(tval->type()->tag() == fval->type()->tag(), "types must match");
1124 }
1126 // accessors
1127 virtual bool is_commutative() const;
1128 Bytecodes::Code op() const { ShouldNotCallThis(); return Bytecodes::_illegal; }
1129 Condition cond() const { return (Condition)Op2::op(); }
1130 Value tval() const { return _tval; }
1131 Value fval() const { return _fval; }
1133 // generic
1134 virtual void input_values_do(ValueVisitor* f) { Op2::input_values_do(f); f->visit(&_tval); f->visit(&_fval); }
1135 };
1138 LEAF(Convert, Instruction)
1139 private:
1140 Bytecodes::Code _op;
1141 Value _value;
1143 public:
1144 // creation
1145 Convert(Bytecodes::Code op, Value value, ValueType* to_type) : Instruction(to_type), _op(op), _value(value) {
1146 ASSERT_VALUES
1147 }
1149 // accessors
1150 Bytecodes::Code op() const { return _op; }
1151 Value value() const { return _value; }
1153 // generic
1154 virtual void input_values_do(ValueVisitor* f) { f->visit(&_value); }
1155 HASHING2(Convert, true, op(), value()->subst())
1156 };
1159 LEAF(NullCheck, Instruction)
1160 private:
1161 Value _obj;
1163 public:
1164 // creation
1165 NullCheck(Value obj, ValueStack* state_before)
1166 : Instruction(obj->type()->base(), state_before)
1167 , _obj(obj)
1168 {
1169 ASSERT_VALUES
1170 set_can_trap(true);
1171 assert(_obj->type()->is_object(), "null check must be applied to objects only");
1172 pin(Instruction::PinExplicitNullCheck);
1173 }
1175 // accessors
1176 Value obj() const { return _obj; }
1178 // setters
1179 void set_can_trap(bool can_trap) { set_flag(CanTrapFlag, can_trap); }
1181 // generic
1182 virtual bool can_trap() const { return check_flag(CanTrapFlag); /* null-check elimination sets to false */ }
1183 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); }
1184 HASHING1(NullCheck, true, obj()->subst())
1185 };
1188 // This node is supposed to cast the type of another node to a more precise
1189 // declared type.
1190 LEAF(TypeCast, Instruction)
1191 private:
1192 ciType* _declared_type;
1193 Value _obj;
1195 public:
1196 // The type of this node is the same type as the object type (and it might be constant).
1197 TypeCast(ciType* type, Value obj, ValueStack* state_before)
1198 : Instruction(obj->type(), state_before, obj->type()->is_constant()),
1199 _declared_type(type),
1200 _obj(obj) {}
1202 // accessors
1203 ciType* declared_type() const { return _declared_type; }
1204 Value obj() const { return _obj; }
1206 // generic
1207 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); }
1208 };
1211 BASE(StateSplit, Instruction)
1212 private:
1213 ValueStack* _state;
1215 protected:
1216 static void substitute(BlockList& list, BlockBegin* old_block, BlockBegin* new_block);
1218 public:
1219 // creation
1220 StateSplit(ValueType* type, ValueStack* state_before = NULL)
1221 : Instruction(type, state_before)
1222 , _state(NULL)
1223 {
1224 pin(PinStateSplitConstructor);
1225 }
1227 // accessors
1228 ValueStack* state() const { return _state; }
1229 IRScope* scope() const; // the state's scope
1231 // manipulation
1232 void set_state(ValueStack* state) { assert(_state == NULL, "overwriting existing state"); check_state(state); _state = state; }
1234 // generic
1235 virtual void input_values_do(ValueVisitor* f) { /* no values */ }
1236 virtual void state_values_do(ValueVisitor* f);
1237 };
1240 LEAF(Invoke, StateSplit)
1241 private:
1242 Bytecodes::Code _code;
1243 Value _recv;
1244 Values* _args;
1245 BasicTypeList* _signature;
1246 int _vtable_index;
1247 ciMethod* _target;
1249 public:
1250 // creation
1251 Invoke(Bytecodes::Code code, ValueType* result_type, Value recv, Values* args,
1252 int vtable_index, ciMethod* target, ValueStack* state_before);
1254 // accessors
1255 Bytecodes::Code code() const { return _code; }
1256 Value receiver() const { return _recv; }
1257 bool has_receiver() const { return receiver() != NULL; }
1258 int number_of_arguments() const { return _args->length(); }
1259 Value argument_at(int i) const { return _args->at(i); }
1260 int vtable_index() const { return _vtable_index; }
1261 BasicTypeList* signature() const { return _signature; }
1262 ciMethod* target() const { return _target; }
1264 ciType* declared_type() const;
1266 // Returns false if target is not loaded
1267 bool target_is_final() const { return check_flag(TargetIsFinalFlag); }
1268 bool target_is_loaded() const { return check_flag(TargetIsLoadedFlag); }
1269 // Returns false if target is not loaded
1270 bool target_is_strictfp() const { return check_flag(TargetIsStrictfpFlag); }
1272 // JSR 292 support
1273 bool is_invokedynamic() const { return code() == Bytecodes::_invokedynamic; }
1274 bool is_method_handle_intrinsic() const { return target()->is_method_handle_intrinsic(); }
1276 virtual bool needs_exception_state() const { return false; }
1278 // generic
1279 virtual bool can_trap() const { return true; }
1280 virtual void input_values_do(ValueVisitor* f) {
1281 StateSplit::input_values_do(f);
1282 if (has_receiver()) f->visit(&_recv);
1283 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i));
1284 }
1285 virtual void state_values_do(ValueVisitor *f);
1286 };
1289 LEAF(NewInstance, StateSplit)
1290 private:
1291 ciInstanceKlass* _klass;
1293 public:
1294 // creation
1295 NewInstance(ciInstanceKlass* klass, ValueStack* state_before)
1296 : StateSplit(instanceType, state_before)
1297 , _klass(klass)
1298 {}
1300 // accessors
1301 ciInstanceKlass* klass() const { return _klass; }
1303 virtual bool needs_exception_state() const { return false; }
1305 // generic
1306 virtual bool can_trap() const { return true; }
1307 ciType* exact_type() const;
1308 ciType* declared_type() const;
1309 };
1312 BASE(NewArray, StateSplit)
1313 private:
1314 Value _length;
1316 public:
1317 // creation
1318 NewArray(Value length, ValueStack* state_before)
1319 : StateSplit(objectType, state_before)
1320 , _length(length)
1321 {
1322 // Do not ASSERT_VALUES since length is NULL for NewMultiArray
1323 }
1325 // accessors
1326 Value length() const { return _length; }
1328 virtual bool needs_exception_state() const { return false; }
1330 ciType* exact_type() const { return NULL; }
1331 ciType* declared_type() const;
1333 // generic
1334 virtual bool can_trap() const { return true; }
1335 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_length); }
1336 };
1339 LEAF(NewTypeArray, NewArray)
1340 private:
1341 BasicType _elt_type;
1343 public:
1344 // creation
1345 NewTypeArray(Value length, BasicType elt_type, ValueStack* state_before)
1346 : NewArray(length, state_before)
1347 , _elt_type(elt_type)
1348 {}
1350 // accessors
1351 BasicType elt_type() const { return _elt_type; }
1352 ciType* exact_type() const;
1353 };
1356 LEAF(NewObjectArray, NewArray)
1357 private:
1358 ciKlass* _klass;
1360 public:
1361 // creation
1362 NewObjectArray(ciKlass* klass, Value length, ValueStack* state_before) : NewArray(length, state_before), _klass(klass) {}
1364 // accessors
1365 ciKlass* klass() const { return _klass; }
1366 ciType* exact_type() const;
1367 };
1370 LEAF(NewMultiArray, NewArray)
1371 private:
1372 ciKlass* _klass;
1373 Values* _dims;
1375 public:
1376 // creation
1377 NewMultiArray(ciKlass* klass, Values* dims, ValueStack* state_before) : NewArray(NULL, state_before), _klass(klass), _dims(dims) {
1378 ASSERT_VALUES
1379 }
1381 // accessors
1382 ciKlass* klass() const { return _klass; }
1383 Values* dims() const { return _dims; }
1384 int rank() const { return dims()->length(); }
1386 // generic
1387 virtual void input_values_do(ValueVisitor* f) {
1388 // NOTE: we do not call NewArray::input_values_do since "length"
1389 // is meaningless for a multi-dimensional array; passing the
1390 // zeroth element down to NewArray as its length is a bad idea
1391 // since there will be a copy in the "dims" array which doesn't
1392 // get updated, and the value must not be traversed twice. Was bug
1393 // - kbr 4/10/2001
1394 StateSplit::input_values_do(f);
1395 for (int i = 0; i < _dims->length(); i++) f->visit(_dims->adr_at(i));
1396 }
1397 };
1400 BASE(TypeCheck, StateSplit)
1401 private:
1402 ciKlass* _klass;
1403 Value _obj;
1405 ciMethod* _profiled_method;
1406 int _profiled_bci;
1408 public:
1409 // creation
1410 TypeCheck(ciKlass* klass, Value obj, ValueType* type, ValueStack* state_before)
1411 : StateSplit(type, state_before), _klass(klass), _obj(obj),
1412 _profiled_method(NULL), _profiled_bci(0) {
1413 ASSERT_VALUES
1414 set_direct_compare(false);
1415 }
1417 // accessors
1418 ciKlass* klass() const { return _klass; }
1419 Value obj() const { return _obj; }
1420 bool is_loaded() const { return klass() != NULL; }
1421 bool direct_compare() const { return check_flag(DirectCompareFlag); }
1423 // manipulation
1424 void set_direct_compare(bool flag) { set_flag(DirectCompareFlag, flag); }
1426 // generic
1427 virtual bool can_trap() const { return true; }
1428 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); }
1430 // Helpers for MethodData* profiling
1431 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); }
1432 void set_profiled_method(ciMethod* method) { _profiled_method = method; }
1433 void set_profiled_bci(int bci) { _profiled_bci = bci; }
1434 bool should_profile() const { return check_flag(ProfileMDOFlag); }
1435 ciMethod* profiled_method() const { return _profiled_method; }
1436 int profiled_bci() const { return _profiled_bci; }
1437 };
1440 LEAF(CheckCast, TypeCheck)
1441 public:
1442 // creation
1443 CheckCast(ciKlass* klass, Value obj, ValueStack* state_before)
1444 : TypeCheck(klass, obj, objectType, state_before) {}
1446 void set_incompatible_class_change_check() {
1447 set_flag(ThrowIncompatibleClassChangeErrorFlag, true);
1448 }
1449 bool is_incompatible_class_change_check() const {
1450 return check_flag(ThrowIncompatibleClassChangeErrorFlag);
1451 }
1453 ciType* declared_type() const;
1454 };
1457 LEAF(InstanceOf, TypeCheck)
1458 public:
1459 // creation
1460 InstanceOf(ciKlass* klass, Value obj, ValueStack* state_before) : TypeCheck(klass, obj, intType, state_before) {}
1462 virtual bool needs_exception_state() const { return false; }
1463 };
1466 BASE(AccessMonitor, StateSplit)
1467 private:
1468 Value _obj;
1469 int _monitor_no;
1471 public:
1472 // creation
1473 AccessMonitor(Value obj, int monitor_no, ValueStack* state_before = NULL)
1474 : StateSplit(illegalType, state_before)
1475 , _obj(obj)
1476 , _monitor_no(monitor_no)
1477 {
1478 set_needs_null_check(true);
1479 ASSERT_VALUES
1480 }
1482 // accessors
1483 Value obj() const { return _obj; }
1484 int monitor_no() const { return _monitor_no; }
1486 // generic
1487 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); }
1488 };
1491 LEAF(MonitorEnter, AccessMonitor)
1492 public:
1493 // creation
1494 MonitorEnter(Value obj, int monitor_no, ValueStack* state_before)
1495 : AccessMonitor(obj, monitor_no, state_before)
1496 {
1497 ASSERT_VALUES
1498 }
1500 // generic
1501 virtual bool can_trap() const { return true; }
1502 };
1505 LEAF(MonitorExit, AccessMonitor)
1506 public:
1507 // creation
1508 MonitorExit(Value obj, int monitor_no)
1509 : AccessMonitor(obj, monitor_no, NULL)
1510 {
1511 ASSERT_VALUES
1512 }
1513 };
1516 LEAF(Intrinsic, StateSplit)
1517 private:
1518 vmIntrinsics::ID _id;
1519 Values* _args;
1520 Value _recv;
1521 ArgsNonNullState _nonnull_state;
1523 public:
1524 // preserves_state can be set to true for Intrinsics
1525 // which are guaranteed to preserve register state across any slow
1526 // cases; setting it to true does not mean that the Intrinsic can
1527 // not trap, only that if we continue execution in the same basic
1528 // block after the Intrinsic, all of the registers are intact. This
1529 // allows load elimination and common expression elimination to be
1530 // performed across the Intrinsic. The default value is false.
1531 Intrinsic(ValueType* type,
1532 vmIntrinsics::ID id,
1533 Values* args,
1534 bool has_receiver,
1535 ValueStack* state_before,
1536 bool preserves_state,
1537 bool cantrap = true)
1538 : StateSplit(type, state_before)
1539 , _id(id)
1540 , _args(args)
1541 , _recv(NULL)
1542 {
1543 assert(args != NULL, "args must exist");
1544 ASSERT_VALUES
1545 set_flag(PreservesStateFlag, preserves_state);
1546 set_flag(CanTrapFlag, cantrap);
1547 if (has_receiver) {
1548 _recv = argument_at(0);
1549 }
1550 set_needs_null_check(has_receiver);
1552 // some intrinsics can't trap, so don't force them to be pinned
1553 if (!can_trap()) {
1554 unpin(PinStateSplitConstructor);
1555 }
1556 }
1558 // accessors
1559 vmIntrinsics::ID id() const { return _id; }
1560 int number_of_arguments() const { return _args->length(); }
1561 Value argument_at(int i) const { return _args->at(i); }
1563 bool has_receiver() const { return (_recv != NULL); }
1564 Value receiver() const { assert(has_receiver(), "must have receiver"); return _recv; }
1565 bool preserves_state() const { return check_flag(PreservesStateFlag); }
1567 bool arg_needs_null_check(int i) const {
1568 return _nonnull_state.arg_needs_null_check(i);
1569 }
1571 void set_arg_needs_null_check(int i, bool check) {
1572 _nonnull_state.set_arg_needs_null_check(i, check);
1573 }
1575 // generic
1576 virtual bool can_trap() const { return check_flag(CanTrapFlag); }
1577 virtual void input_values_do(ValueVisitor* f) {
1578 StateSplit::input_values_do(f);
1579 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i));
1580 }
1581 };
1584 class LIR_List;
1586 LEAF(BlockBegin, StateSplit)
1587 private:
1588 int _block_id; // the unique block id
1589 int _bci; // start-bci of block
1590 int _depth_first_number; // number of this block in a depth-first ordering
1591 int _linear_scan_number; // number of this block in linear-scan ordering
1592 int _dominator_depth;
1593 int _loop_depth; // the loop nesting level of this block
1594 int _loop_index; // number of the innermost loop of this block
1595 int _flags; // the flags associated with this block
1597 // fields used by BlockListBuilder
1598 int _total_preds; // number of predecessors found by BlockListBuilder
1599 BitMap _stores_to_locals; // bit is set when a local variable is stored in the block
1601 // SSA specific fields: (factor out later)
1602 BlockList _successors; // the successors of this block
1603 BlockList _predecessors; // the predecessors of this block
1604 BlockList _dominates; // list of blocks that are dominated by this block
1605 BlockBegin* _dominator; // the dominator of this block
1606 // SSA specific ends
1607 BlockEnd* _end; // the last instruction of this block
1608 BlockList _exception_handlers; // the exception handlers potentially invoked by this block
1609 ValueStackStack* _exception_states; // only for xhandler entries: states of all instructions that have an edge to this xhandler
1610 int _exception_handler_pco; // if this block is the start of an exception handler,
1611 // this records the PC offset in the assembly code of the
1612 // first instruction in this block
1613 Label _label; // the label associated with this block
1614 LIR_List* _lir; // the low level intermediate representation for this block
1616 BitMap _live_in; // set of live LIR_Opr registers at entry to this block
1617 BitMap _live_out; // set of live LIR_Opr registers at exit from this block
1618 BitMap _live_gen; // set of registers used before any redefinition in this block
1619 BitMap _live_kill; // set of registers defined in this block
1621 BitMap _fpu_register_usage;
1622 intArray* _fpu_stack_state; // For x86 FPU code generation with UseLinearScan
1623 int _first_lir_instruction_id; // ID of first LIR instruction in this block
1624 int _last_lir_instruction_id; // ID of last LIR instruction in this block
1626 void iterate_preorder (boolArray& mark, BlockClosure* closure);
1627 void iterate_postorder(boolArray& mark, BlockClosure* closure);
1629 friend class SuxAndWeightAdjuster;
1631 public:
1632 void* operator new(size_t size) throw() {
1633 Compilation* c = Compilation::current();
1634 void* res = c->arena()->Amalloc(size);
1635 ((BlockBegin*)res)->_id = c->get_next_id();
1636 ((BlockBegin*)res)->_block_id = c->get_next_block_id();
1637 return res;
1638 }
1640 // initialization/counting
1641 static int number_of_blocks() {
1642 return Compilation::current()->number_of_blocks();
1643 }
1645 // creation
1646 BlockBegin(int bci)
1647 : StateSplit(illegalType)
1648 , _bci(bci)
1649 , _depth_first_number(-1)
1650 , _linear_scan_number(-1)
1651 , _loop_depth(0)
1652 , _flags(0)
1653 , _dominator_depth(-1)
1654 , _dominator(NULL)
1655 , _end(NULL)
1656 , _predecessors(2)
1657 , _successors(2)
1658 , _dominates(2)
1659 , _exception_handlers(1)
1660 , _exception_states(NULL)
1661 , _exception_handler_pco(-1)
1662 , _lir(NULL)
1663 , _loop_index(-1)
1664 , _live_in()
1665 , _live_out()
1666 , _live_gen()
1667 , _live_kill()
1668 , _fpu_register_usage()
1669 , _fpu_stack_state(NULL)
1670 , _first_lir_instruction_id(-1)
1671 , _last_lir_instruction_id(-1)
1672 , _total_preds(0)
1673 , _stores_to_locals()
1674 {
1675 _block = this;
1676 #ifndef PRODUCT
1677 set_printable_bci(bci);
1678 #endif
1679 }
1681 // accessors
1682 int block_id() const { return _block_id; }
1683 int bci() const { return _bci; }
1684 BlockList* successors() { return &_successors; }
1685 BlockList* dominates() { return &_dominates; }
1686 BlockBegin* dominator() const { return _dominator; }
1687 int loop_depth() const { return _loop_depth; }
1688 int dominator_depth() const { return _dominator_depth; }
1689 int depth_first_number() const { return _depth_first_number; }
1690 int linear_scan_number() const { return _linear_scan_number; }
1691 BlockEnd* end() const { return _end; }
1692 Label* label() { return &_label; }
1693 LIR_List* lir() const { return _lir; }
1694 int exception_handler_pco() const { return _exception_handler_pco; }
1695 BitMap& live_in() { return _live_in; }
1696 BitMap& live_out() { return _live_out; }
1697 BitMap& live_gen() { return _live_gen; }
1698 BitMap& live_kill() { return _live_kill; }
1699 BitMap& fpu_register_usage() { return _fpu_register_usage; }
1700 intArray* fpu_stack_state() const { return _fpu_stack_state; }
1701 int first_lir_instruction_id() const { return _first_lir_instruction_id; }
1702 int last_lir_instruction_id() const { return _last_lir_instruction_id; }
1703 int total_preds() const { return _total_preds; }
1704 BitMap& stores_to_locals() { return _stores_to_locals; }
1706 // manipulation
1707 void set_dominator(BlockBegin* dom) { _dominator = dom; }
1708 void set_loop_depth(int d) { _loop_depth = d; }
1709 void set_dominator_depth(int d) { _dominator_depth = d; }
1710 void set_depth_first_number(int dfn) { _depth_first_number = dfn; }
1711 void set_linear_scan_number(int lsn) { _linear_scan_number = lsn; }
1712 void set_end(BlockEnd* end);
1713 void clear_end();
1714 void disconnect_from_graph();
1715 static void disconnect_edge(BlockBegin* from, BlockBegin* to);
1716 BlockBegin* insert_block_between(BlockBegin* sux);
1717 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux);
1718 void set_lir(LIR_List* lir) { _lir = lir; }
1719 void set_exception_handler_pco(int pco) { _exception_handler_pco = pco; }
1720 void set_live_in (BitMap map) { _live_in = map; }
1721 void set_live_out (BitMap map) { _live_out = map; }
1722 void set_live_gen (BitMap map) { _live_gen = map; }
1723 void set_live_kill (BitMap map) { _live_kill = map; }
1724 void set_fpu_register_usage(BitMap map) { _fpu_register_usage = map; }
1725 void set_fpu_stack_state(intArray* state) { _fpu_stack_state = state; }
1726 void set_first_lir_instruction_id(int id) { _first_lir_instruction_id = id; }
1727 void set_last_lir_instruction_id(int id) { _last_lir_instruction_id = id; }
1728 void increment_total_preds(int n = 1) { _total_preds += n; }
1729 void init_stores_to_locals(int locals_count) { _stores_to_locals = BitMap(locals_count); _stores_to_locals.clear(); }
1731 // generic
1732 virtual void state_values_do(ValueVisitor* f);
1734 // successors and predecessors
1735 int number_of_sux() const;
1736 BlockBegin* sux_at(int i) const;
1737 void add_successor(BlockBegin* sux);
1738 void remove_successor(BlockBegin* pred);
1739 bool is_successor(BlockBegin* sux) const { return _successors.contains(sux); }
1741 void add_predecessor(BlockBegin* pred);
1742 void remove_predecessor(BlockBegin* pred);
1743 bool is_predecessor(BlockBegin* pred) const { return _predecessors.contains(pred); }
1744 int number_of_preds() const { return _predecessors.length(); }
1745 BlockBegin* pred_at(int i) const { return _predecessors[i]; }
1747 // exception handlers potentially invoked by this block
1748 void add_exception_handler(BlockBegin* b);
1749 bool is_exception_handler(BlockBegin* b) const { return _exception_handlers.contains(b); }
1750 int number_of_exception_handlers() const { return _exception_handlers.length(); }
1751 BlockBegin* exception_handler_at(int i) const { return _exception_handlers.at(i); }
1753 // states of the instructions that have an edge to this exception handler
1754 int number_of_exception_states() { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states == NULL ? 0 : _exception_states->length(); }
1755 ValueStack* exception_state_at(int idx) const { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states->at(idx); }
1756 int add_exception_state(ValueStack* state);
1758 // flags
1759 enum Flag {
1760 no_flag = 0,
1761 std_entry_flag = 1 << 0,
1762 osr_entry_flag = 1 << 1,
1763 exception_entry_flag = 1 << 2,
1764 subroutine_entry_flag = 1 << 3,
1765 backward_branch_target_flag = 1 << 4,
1766 is_on_work_list_flag = 1 << 5,
1767 was_visited_flag = 1 << 6,
1768 parser_loop_header_flag = 1 << 7, // set by parser to identify blocks where phi functions can not be created on demand
1769 critical_edge_split_flag = 1 << 8, // set for all blocks that are introduced when critical edges are split
1770 linear_scan_loop_header_flag = 1 << 9, // set during loop-detection for LinearScan
1771 linear_scan_loop_end_flag = 1 << 10, // set during loop-detection for LinearScan
1772 donot_eliminate_range_checks = 1 << 11 // Should be try to eliminate range checks in this block
1773 };
1775 void set(Flag f) { _flags |= f; }
1776 void clear(Flag f) { _flags &= ~f; }
1777 bool is_set(Flag f) const { return (_flags & f) != 0; }
1778 bool is_entry_block() const {
1779 const int entry_mask = std_entry_flag | osr_entry_flag | exception_entry_flag;
1780 return (_flags & entry_mask) != 0;
1781 }
1783 // iteration
1784 void iterate_preorder (BlockClosure* closure);
1785 void iterate_postorder (BlockClosure* closure);
1787 void block_values_do(ValueVisitor* f);
1789 // loops
1790 void set_loop_index(int ix) { _loop_index = ix; }
1791 int loop_index() const { return _loop_index; }
1793 // merging
1794 bool try_merge(ValueStack* state); // try to merge states at block begin
1795 void merge(ValueStack* state) { bool b = try_merge(state); assert(b, "merge failed"); }
1797 // debugging
1798 void print_block() PRODUCT_RETURN;
1799 void print_block(InstructionPrinter& ip, bool live_only = false) PRODUCT_RETURN;
1800 };
1803 BASE(BlockEnd, StateSplit)
1804 private:
1805 BlockList* _sux;
1807 protected:
1808 BlockList* sux() const { return _sux; }
1810 void set_sux(BlockList* sux) {
1811 #ifdef ASSERT
1812 assert(sux != NULL, "sux must exist");
1813 for (int i = sux->length() - 1; i >= 0; i--) assert(sux->at(i) != NULL, "sux must exist");
1814 #endif
1815 _sux = sux;
1816 }
1818 public:
1819 // creation
1820 BlockEnd(ValueType* type, ValueStack* state_before, bool is_safepoint)
1821 : StateSplit(type, state_before)
1822 , _sux(NULL)
1823 {
1824 set_flag(IsSafepointFlag, is_safepoint);
1825 }
1827 // accessors
1828 bool is_safepoint() const { return check_flag(IsSafepointFlag); }
1829 // For compatibility with old code, for new code use block()
1830 BlockBegin* begin() const { return _block; }
1832 // manipulation
1833 void set_begin(BlockBegin* begin);
1835 // successors
1836 int number_of_sux() const { return _sux != NULL ? _sux->length() : 0; }
1837 BlockBegin* sux_at(int i) const { return _sux->at(i); }
1838 BlockBegin* default_sux() const { return sux_at(number_of_sux() - 1); }
1839 BlockBegin** addr_sux_at(int i) const { return _sux->adr_at(i); }
1840 int sux_index(BlockBegin* sux) const { return _sux->find(sux); }
1841 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux);
1842 };
1845 LEAF(Goto, BlockEnd)
1846 public:
1847 enum Direction {
1848 none, // Just a regular goto
1849 taken, not_taken // Goto produced from If
1850 };
1851 private:
1852 ciMethod* _profiled_method;
1853 int _profiled_bci;
1854 Direction _direction;
1855 public:
1856 // creation
1857 Goto(BlockBegin* sux, ValueStack* state_before, bool is_safepoint = false)
1858 : BlockEnd(illegalType, state_before, is_safepoint)
1859 , _direction(none)
1860 , _profiled_method(NULL)
1861 , _profiled_bci(0) {
1862 BlockList* s = new BlockList(1);
1863 s->append(sux);
1864 set_sux(s);
1865 }
1867 Goto(BlockBegin* sux, bool is_safepoint) : BlockEnd(illegalType, NULL, is_safepoint)
1868 , _direction(none)
1869 , _profiled_method(NULL)
1870 , _profiled_bci(0) {
1871 BlockList* s = new BlockList(1);
1872 s->append(sux);
1873 set_sux(s);
1874 }
1876 bool should_profile() const { return check_flag(ProfileMDOFlag); }
1877 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches
1878 int profiled_bci() const { return _profiled_bci; }
1879 Direction direction() const { return _direction; }
1881 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); }
1882 void set_profiled_method(ciMethod* method) { _profiled_method = method; }
1883 void set_profiled_bci(int bci) { _profiled_bci = bci; }
1884 void set_direction(Direction d) { _direction = d; }
1885 };
1887 #ifdef ASSERT
1888 LEAF(Assert, Instruction)
1889 private:
1890 Value _x;
1891 Condition _cond;
1892 Value _y;
1893 char *_message;
1895 public:
1896 // creation
1897 // unordered_is_true is valid for float/double compares only
1898 Assert(Value x, Condition cond, bool unordered_is_true, Value y);
1900 // accessors
1901 Value x() const { return _x; }
1902 Condition cond() const { return _cond; }
1903 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); }
1904 Value y() const { return _y; }
1905 const char *message() const { return _message; }
1907 // generic
1908 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); }
1909 };
1910 #endif
1912 LEAF(RangeCheckPredicate, StateSplit)
1913 private:
1914 Value _x;
1915 Condition _cond;
1916 Value _y;
1918 void check_state();
1920 public:
1921 // creation
1922 // unordered_is_true is valid for float/double compares only
1923 RangeCheckPredicate(Value x, Condition cond, bool unordered_is_true, Value y, ValueStack* state) : StateSplit(illegalType)
1924 , _x(x)
1925 , _cond(cond)
1926 , _y(y)
1927 {
1928 ASSERT_VALUES
1929 set_flag(UnorderedIsTrueFlag, unordered_is_true);
1930 assert(x->type()->tag() == y->type()->tag(), "types must match");
1931 this->set_state(state);
1932 check_state();
1933 }
1935 // Always deoptimize
1936 RangeCheckPredicate(ValueStack* state) : StateSplit(illegalType)
1937 {
1938 this->set_state(state);
1939 _x = _y = NULL;
1940 check_state();
1941 }
1943 // accessors
1944 Value x() const { return _x; }
1945 Condition cond() const { return _cond; }
1946 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); }
1947 Value y() const { return _y; }
1949 void always_fail() { _x = _y = NULL; }
1951 // generic
1952 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_x); f->visit(&_y); }
1953 HASHING3(RangeCheckPredicate, true, x()->subst(), y()->subst(), cond())
1954 };
1956 LEAF(If, BlockEnd)
1957 private:
1958 Value _x;
1959 Condition _cond;
1960 Value _y;
1961 ciMethod* _profiled_method;
1962 int _profiled_bci; // Canonicalizer may alter bci of If node
1963 bool _swapped; // Is the order reversed with respect to the original If in the
1964 // bytecode stream?
1965 public:
1966 // creation
1967 // unordered_is_true is valid for float/double compares only
1968 If(Value x, Condition cond, bool unordered_is_true, Value y, BlockBegin* tsux, BlockBegin* fsux, ValueStack* state_before, bool is_safepoint)
1969 : BlockEnd(illegalType, state_before, is_safepoint)
1970 , _x(x)
1971 , _cond(cond)
1972 , _y(y)
1973 , _profiled_method(NULL)
1974 , _profiled_bci(0)
1975 , _swapped(false)
1976 {
1977 ASSERT_VALUES
1978 set_flag(UnorderedIsTrueFlag, unordered_is_true);
1979 assert(x->type()->tag() == y->type()->tag(), "types must match");
1980 BlockList* s = new BlockList(2);
1981 s->append(tsux);
1982 s->append(fsux);
1983 set_sux(s);
1984 }
1986 // accessors
1987 Value x() const { return _x; }
1988 Condition cond() const { return _cond; }
1989 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); }
1990 Value y() const { return _y; }
1991 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); }
1992 BlockBegin* tsux() const { return sux_for(true); }
1993 BlockBegin* fsux() const { return sux_for(false); }
1994 BlockBegin* usux() const { return sux_for(unordered_is_true()); }
1995 bool should_profile() const { return check_flag(ProfileMDOFlag); }
1996 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches
1997 int profiled_bci() const { return _profiled_bci; } // set for profiled branches and tiered
1998 bool is_swapped() const { return _swapped; }
2000 // manipulation
2001 void swap_operands() {
2002 Value t = _x; _x = _y; _y = t;
2003 _cond = mirror(_cond);
2004 }
2006 void swap_sux() {
2007 assert(number_of_sux() == 2, "wrong number of successors");
2008 BlockList* s = sux();
2009 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t);
2010 _cond = negate(_cond);
2011 set_flag(UnorderedIsTrueFlag, !check_flag(UnorderedIsTrueFlag));
2012 }
2014 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); }
2015 void set_profiled_method(ciMethod* method) { _profiled_method = method; }
2016 void set_profiled_bci(int bci) { _profiled_bci = bci; }
2017 void set_swapped(bool value) { _swapped = value; }
2018 // generic
2019 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_x); f->visit(&_y); }
2020 };
2023 LEAF(IfInstanceOf, BlockEnd)
2024 private:
2025 ciKlass* _klass;
2026 Value _obj;
2027 bool _test_is_instance; // jump if instance
2028 int _instanceof_bci;
2030 public:
2031 IfInstanceOf(ciKlass* klass, Value obj, bool test_is_instance, int instanceof_bci, BlockBegin* tsux, BlockBegin* fsux)
2032 : BlockEnd(illegalType, NULL, false) // temporary set to false
2033 , _klass(klass)
2034 , _obj(obj)
2035 , _test_is_instance(test_is_instance)
2036 , _instanceof_bci(instanceof_bci)
2037 {
2038 ASSERT_VALUES
2039 assert(instanceof_bci >= 0, "illegal bci");
2040 BlockList* s = new BlockList(2);
2041 s->append(tsux);
2042 s->append(fsux);
2043 set_sux(s);
2044 }
2046 // accessors
2047 //
2048 // Note 1: If test_is_instance() is true, IfInstanceOf tests if obj *is* an
2049 // instance of klass; otherwise it tests if it is *not* and instance
2050 // of klass.
2051 //
2052 // Note 2: IfInstanceOf instructions are created by combining an InstanceOf
2053 // and an If instruction. The IfInstanceOf bci() corresponds to the
2054 // bci that the If would have had; the (this->) instanceof_bci() is
2055 // the bci of the original InstanceOf instruction.
2056 ciKlass* klass() const { return _klass; }
2057 Value obj() const { return _obj; }
2058 int instanceof_bci() const { return _instanceof_bci; }
2059 bool test_is_instance() const { return _test_is_instance; }
2060 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); }
2061 BlockBegin* tsux() const { return sux_for(true); }
2062 BlockBegin* fsux() const { return sux_for(false); }
2064 // manipulation
2065 void swap_sux() {
2066 assert(number_of_sux() == 2, "wrong number of successors");
2067 BlockList* s = sux();
2068 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t);
2069 _test_is_instance = !_test_is_instance;
2070 }
2072 // generic
2073 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_obj); }
2074 };
2077 BASE(Switch, BlockEnd)
2078 private:
2079 Value _tag;
2081 public:
2082 // creation
2083 Switch(Value tag, BlockList* sux, ValueStack* state_before, bool is_safepoint)
2084 : BlockEnd(illegalType, state_before, is_safepoint)
2085 , _tag(tag) {
2086 ASSERT_VALUES
2087 set_sux(sux);
2088 }
2090 // accessors
2091 Value tag() const { return _tag; }
2092 int length() const { return number_of_sux() - 1; }
2094 virtual bool needs_exception_state() const { return false; }
2096 // generic
2097 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_tag); }
2098 };
2101 LEAF(TableSwitch, Switch)
2102 private:
2103 int _lo_key;
2105 public:
2106 // creation
2107 TableSwitch(Value tag, BlockList* sux, int lo_key, ValueStack* state_before, bool is_safepoint)
2108 : Switch(tag, sux, state_before, is_safepoint)
2109 , _lo_key(lo_key) {}
2111 // accessors
2112 int lo_key() const { return _lo_key; }
2113 int hi_key() const { return _lo_key + length() - 1; }
2114 };
2117 LEAF(LookupSwitch, Switch)
2118 private:
2119 intArray* _keys;
2121 public:
2122 // creation
2123 LookupSwitch(Value tag, BlockList* sux, intArray* keys, ValueStack* state_before, bool is_safepoint)
2124 : Switch(tag, sux, state_before, is_safepoint)
2125 , _keys(keys) {
2126 assert(keys != NULL, "keys must exist");
2127 assert(keys->length() == length(), "sux & keys have incompatible lengths");
2128 }
2130 // accessors
2131 int key_at(int i) const { return _keys->at(i); }
2132 };
2135 LEAF(Return, BlockEnd)
2136 private:
2137 Value _result;
2139 public:
2140 // creation
2141 Return(Value result) :
2142 BlockEnd(result == NULL ? voidType : result->type()->base(), NULL, true),
2143 _result(result) {}
2145 // accessors
2146 Value result() const { return _result; }
2147 bool has_result() const { return result() != NULL; }
2149 // generic
2150 virtual void input_values_do(ValueVisitor* f) {
2151 BlockEnd::input_values_do(f);
2152 if (has_result()) f->visit(&_result);
2153 }
2154 };
2157 LEAF(Throw, BlockEnd)
2158 private:
2159 Value _exception;
2161 public:
2162 // creation
2163 Throw(Value exception, ValueStack* state_before) : BlockEnd(illegalType, state_before, true), _exception(exception) {
2164 ASSERT_VALUES
2165 }
2167 // accessors
2168 Value exception() const { return _exception; }
2170 // generic
2171 virtual bool can_trap() const { return true; }
2172 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_exception); }
2173 };
2176 LEAF(Base, BlockEnd)
2177 public:
2178 // creation
2179 Base(BlockBegin* std_entry, BlockBegin* osr_entry) : BlockEnd(illegalType, NULL, false) {
2180 assert(std_entry->is_set(BlockBegin::std_entry_flag), "std entry must be flagged");
2181 assert(osr_entry == NULL || osr_entry->is_set(BlockBegin::osr_entry_flag), "osr entry must be flagged");
2182 BlockList* s = new BlockList(2);
2183 if (osr_entry != NULL) s->append(osr_entry);
2184 s->append(std_entry); // must be default sux!
2185 set_sux(s);
2186 }
2188 // accessors
2189 BlockBegin* std_entry() const { return default_sux(); }
2190 BlockBegin* osr_entry() const { return number_of_sux() < 2 ? NULL : sux_at(0); }
2191 };
2194 LEAF(OsrEntry, Instruction)
2195 public:
2196 // creation
2197 #ifdef _LP64
2198 OsrEntry() : Instruction(longType) { pin(); }
2199 #else
2200 OsrEntry() : Instruction(intType) { pin(); }
2201 #endif
2203 // generic
2204 virtual void input_values_do(ValueVisitor* f) { }
2205 };
2208 // Models the incoming exception at a catch site
2209 LEAF(ExceptionObject, Instruction)
2210 public:
2211 // creation
2212 ExceptionObject() : Instruction(objectType) {
2213 pin();
2214 }
2216 // generic
2217 virtual void input_values_do(ValueVisitor* f) { }
2218 };
2221 // Models needed rounding for floating-point values on Intel.
2222 // Currently only used to represent rounding of double-precision
2223 // values stored into local variables, but could be used to model
2224 // intermediate rounding of single-precision values as well.
2225 LEAF(RoundFP, Instruction)
2226 private:
2227 Value _input; // floating-point value to be rounded
2229 public:
2230 RoundFP(Value input)
2231 : Instruction(input->type()) // Note: should not be used for constants
2232 , _input(input)
2233 {
2234 ASSERT_VALUES
2235 }
2237 // accessors
2238 Value input() const { return _input; }
2240 // generic
2241 virtual void input_values_do(ValueVisitor* f) { f->visit(&_input); }
2242 };
2245 BASE(UnsafeOp, Instruction)
2246 private:
2247 BasicType _basic_type; // ValueType can not express byte-sized integers
2249 protected:
2250 // creation
2251 UnsafeOp(BasicType basic_type, bool is_put)
2252 : Instruction(is_put ? voidType : as_ValueType(basic_type))
2253 , _basic_type(basic_type)
2254 {
2255 //Note: Unsafe ops are not not guaranteed to throw NPE.
2256 // Convservatively, Unsafe operations must be pinned though we could be
2257 // looser about this if we wanted to..
2258 pin();
2259 }
2261 public:
2262 // accessors
2263 BasicType basic_type() { return _basic_type; }
2265 // generic
2266 virtual void input_values_do(ValueVisitor* f) { }
2267 };
2270 BASE(UnsafeRawOp, UnsafeOp)
2271 private:
2272 Value _base; // Base address (a Java long)
2273 Value _index; // Index if computed by optimizer; initialized to NULL
2274 int _log2_scale; // Scale factor: 0, 1, 2, or 3.
2275 // Indicates log2 of number of bytes (1, 2, 4, or 8)
2276 // to scale index by.
2278 protected:
2279 UnsafeRawOp(BasicType basic_type, Value addr, bool is_put)
2280 : UnsafeOp(basic_type, is_put)
2281 , _base(addr)
2282 , _index(NULL)
2283 , _log2_scale(0)
2284 {
2285 // Can not use ASSERT_VALUES because index may be NULL
2286 assert(addr != NULL && addr->type()->is_long(), "just checking");
2287 }
2289 UnsafeRawOp(BasicType basic_type, Value base, Value index, int log2_scale, bool is_put)
2290 : UnsafeOp(basic_type, is_put)
2291 , _base(base)
2292 , _index(index)
2293 , _log2_scale(log2_scale)
2294 {
2295 }
2297 public:
2298 // accessors
2299 Value base() { return _base; }
2300 Value index() { return _index; }
2301 bool has_index() { return (_index != NULL); }
2302 int log2_scale() { return _log2_scale; }
2304 // setters
2305 void set_base (Value base) { _base = base; }
2306 void set_index(Value index) { _index = index; }
2307 void set_log2_scale(int log2_scale) { _log2_scale = log2_scale; }
2309 // generic
2310 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f);
2311 f->visit(&_base);
2312 if (has_index()) f->visit(&_index); }
2313 };
2316 LEAF(UnsafeGetRaw, UnsafeRawOp)
2317 private:
2318 bool _may_be_unaligned, _is_wide; // For OSREntry
2320 public:
2321 UnsafeGetRaw(BasicType basic_type, Value addr, bool may_be_unaligned, bool is_wide = false)
2322 : UnsafeRawOp(basic_type, addr, false) {
2323 _may_be_unaligned = may_be_unaligned;
2324 _is_wide = is_wide;
2325 }
2327 UnsafeGetRaw(BasicType basic_type, Value base, Value index, int log2_scale, bool may_be_unaligned, bool is_wide = false)
2328 : UnsafeRawOp(basic_type, base, index, log2_scale, false) {
2329 _may_be_unaligned = may_be_unaligned;
2330 _is_wide = is_wide;
2331 }
2333 bool may_be_unaligned() { return _may_be_unaligned; }
2334 bool is_wide() { return _is_wide; }
2335 };
2338 LEAF(UnsafePutRaw, UnsafeRawOp)
2339 private:
2340 Value _value; // Value to be stored
2342 public:
2343 UnsafePutRaw(BasicType basic_type, Value addr, Value value)
2344 : UnsafeRawOp(basic_type, addr, true)
2345 , _value(value)
2346 {
2347 assert(value != NULL, "just checking");
2348 ASSERT_VALUES
2349 }
2351 UnsafePutRaw(BasicType basic_type, Value base, Value index, int log2_scale, Value value)
2352 : UnsafeRawOp(basic_type, base, index, log2_scale, true)
2353 , _value(value)
2354 {
2355 assert(value != NULL, "just checking");
2356 ASSERT_VALUES
2357 }
2359 // accessors
2360 Value value() { return _value; }
2362 // generic
2363 virtual void input_values_do(ValueVisitor* f) { UnsafeRawOp::input_values_do(f);
2364 f->visit(&_value); }
2365 };
2368 BASE(UnsafeObjectOp, UnsafeOp)
2369 private:
2370 Value _object; // Object to be fetched from or mutated
2371 Value _offset; // Offset within object
2372 bool _is_volatile; // true if volatile - dl/JSR166
2373 public:
2374 UnsafeObjectOp(BasicType basic_type, Value object, Value offset, bool is_put, bool is_volatile)
2375 : UnsafeOp(basic_type, is_put), _object(object), _offset(offset), _is_volatile(is_volatile)
2376 {
2377 }
2379 // accessors
2380 Value object() { return _object; }
2381 Value offset() { return _offset; }
2382 bool is_volatile() { return _is_volatile; }
2383 // generic
2384 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f);
2385 f->visit(&_object);
2386 f->visit(&_offset); }
2387 };
2390 LEAF(UnsafeGetObject, UnsafeObjectOp)
2391 public:
2392 UnsafeGetObject(BasicType basic_type, Value object, Value offset, bool is_volatile)
2393 : UnsafeObjectOp(basic_type, object, offset, false, is_volatile)
2394 {
2395 ASSERT_VALUES
2396 }
2397 };
2400 LEAF(UnsafePutObject, UnsafeObjectOp)
2401 private:
2402 Value _value; // Value to be stored
2403 public:
2404 UnsafePutObject(BasicType basic_type, Value object, Value offset, Value value, bool is_volatile)
2405 : UnsafeObjectOp(basic_type, object, offset, true, is_volatile)
2406 , _value(value)
2407 {
2408 ASSERT_VALUES
2409 }
2411 // accessors
2412 Value value() { return _value; }
2414 // generic
2415 virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f);
2416 f->visit(&_value); }
2417 };
2419 LEAF(UnsafeGetAndSetObject, UnsafeObjectOp)
2420 private:
2421 Value _value; // Value to be stored
2422 bool _is_add;
2423 public:
2424 UnsafeGetAndSetObject(BasicType basic_type, Value object, Value offset, Value value, bool is_add)
2425 : UnsafeObjectOp(basic_type, object, offset, false, false)
2426 , _value(value)
2427 , _is_add(is_add)
2428 {
2429 ASSERT_VALUES
2430 }
2432 // accessors
2433 bool is_add() const { return _is_add; }
2434 Value value() { return _value; }
2436 // generic
2437 virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f);
2438 f->visit(&_value); }
2439 };
2441 BASE(UnsafePrefetch, UnsafeObjectOp)
2442 public:
2443 UnsafePrefetch(Value object, Value offset)
2444 : UnsafeObjectOp(T_VOID, object, offset, false, false)
2445 {
2446 }
2447 };
2450 LEAF(UnsafePrefetchRead, UnsafePrefetch)
2451 public:
2452 UnsafePrefetchRead(Value object, Value offset)
2453 : UnsafePrefetch(object, offset)
2454 {
2455 ASSERT_VALUES
2456 }
2457 };
2460 LEAF(UnsafePrefetchWrite, UnsafePrefetch)
2461 public:
2462 UnsafePrefetchWrite(Value object, Value offset)
2463 : UnsafePrefetch(object, offset)
2464 {
2465 ASSERT_VALUES
2466 }
2467 };
2469 LEAF(ProfileCall, Instruction)
2470 private:
2471 ciMethod* _method;
2472 int _bci_of_invoke;
2473 ciMethod* _callee; // the method that is called at the given bci
2474 Value _recv;
2475 ciKlass* _known_holder;
2476 Values* _obj_args; // arguments for type profiling
2477 ArgsNonNullState _nonnull_state; // Do we know whether some arguments are never null?
2478 bool _inlined; // Are we profiling a call that is inlined
2480 public:
2481 ProfileCall(ciMethod* method, int bci, ciMethod* callee, Value recv, ciKlass* known_holder, Values* obj_args, bool inlined)
2482 : Instruction(voidType)
2483 , _method(method)
2484 , _bci_of_invoke(bci)
2485 , _callee(callee)
2486 , _recv(recv)
2487 , _known_holder(known_holder)
2488 , _obj_args(obj_args)
2489 , _inlined(inlined)
2490 {
2491 // The ProfileCall has side-effects and must occur precisely where located
2492 pin();
2493 }
2495 ciMethod* method() const { return _method; }
2496 int bci_of_invoke() const { return _bci_of_invoke; }
2497 ciMethod* callee() const { return _callee; }
2498 Value recv() const { return _recv; }
2499 ciKlass* known_holder() const { return _known_holder; }
2500 int nb_profiled_args() const { return _obj_args == NULL ? 0 : _obj_args->length(); }
2501 Value profiled_arg_at(int i) const { return _obj_args->at(i); }
2502 bool arg_needs_null_check(int i) const {
2503 return _nonnull_state.arg_needs_null_check(i);
2504 }
2505 bool inlined() const { return _inlined; }
2507 void set_arg_needs_null_check(int i, bool check) {
2508 _nonnull_state.set_arg_needs_null_check(i, check);
2509 }
2511 virtual void input_values_do(ValueVisitor* f) {
2512 if (_recv != NULL) {
2513 f->visit(&_recv);
2514 }
2515 for (int i = 0; i < nb_profiled_args(); i++) {
2516 f->visit(_obj_args->adr_at(i));
2517 }
2518 }
2519 };
2521 // Call some C runtime function that doesn't safepoint,
2522 // optionally passing the current thread as the first argument.
2523 LEAF(RuntimeCall, Instruction)
2524 private:
2525 const char* _entry_name;
2526 address _entry;
2527 Values* _args;
2528 bool _pass_thread; // Pass the JavaThread* as an implicit first argument
2530 public:
2531 RuntimeCall(ValueType* type, const char* entry_name, address entry, Values* args, bool pass_thread = true)
2532 : Instruction(type)
2533 , _entry(entry)
2534 , _args(args)
2535 , _entry_name(entry_name)
2536 , _pass_thread(pass_thread) {
2537 ASSERT_VALUES
2538 pin();
2539 }
2541 const char* entry_name() const { return _entry_name; }
2542 address entry() const { return _entry; }
2543 int number_of_arguments() const { return _args->length(); }
2544 Value argument_at(int i) const { return _args->at(i); }
2545 bool pass_thread() const { return _pass_thread; }
2547 virtual void input_values_do(ValueVisitor* f) {
2548 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i));
2549 }
2550 };
2552 // Use to trip invocation counter of an inlined method
2554 LEAF(ProfileInvoke, Instruction)
2555 private:
2556 ciMethod* _inlinee;
2557 ValueStack* _state;
2559 public:
2560 ProfileInvoke(ciMethod* inlinee, ValueStack* state)
2561 : Instruction(voidType)
2562 , _inlinee(inlinee)
2563 , _state(state)
2564 {
2565 // The ProfileInvoke has side-effects and must occur precisely where located QQQ???
2566 pin();
2567 }
2569 ciMethod* inlinee() { return _inlinee; }
2570 ValueStack* state() { return _state; }
2571 virtual void input_values_do(ValueVisitor*) {}
2572 virtual void state_values_do(ValueVisitor*);
2573 };
2575 LEAF(MemBar, Instruction)
2576 private:
2577 LIR_Code _code;
2579 public:
2580 MemBar(LIR_Code code)
2581 : Instruction(voidType)
2582 , _code(code)
2583 {
2584 pin();
2585 }
2587 LIR_Code code() { return _code; }
2589 virtual void input_values_do(ValueVisitor*) {}
2590 };
2592 class BlockPair: public CompilationResourceObj {
2593 private:
2594 BlockBegin* _from;
2595 BlockBegin* _to;
2596 public:
2597 BlockPair(BlockBegin* from, BlockBegin* to): _from(from), _to(to) {}
2598 BlockBegin* from() const { return _from; }
2599 BlockBegin* to() const { return _to; }
2600 bool is_same(BlockBegin* from, BlockBegin* to) const { return _from == from && _to == to; }
2601 bool is_same(BlockPair* p) const { return _from == p->from() && _to == p->to(); }
2602 void set_to(BlockBegin* b) { _to = b; }
2603 void set_from(BlockBegin* b) { _from = b; }
2604 };
2607 define_array(BlockPairArray, BlockPair*)
2608 define_stack(BlockPairList, BlockPairArray)
2611 inline int BlockBegin::number_of_sux() const { assert(_end == NULL || _end->number_of_sux() == _successors.length(), "mismatch"); return _successors.length(); }
2612 inline BlockBegin* BlockBegin::sux_at(int i) const { assert(_end == NULL || _end->sux_at(i) == _successors.at(i), "mismatch"); return _successors.at(i); }
2613 inline void BlockBegin::add_successor(BlockBegin* sux) { assert(_end == NULL, "Would create mismatch with successors of BlockEnd"); _successors.append(sux); }
2615 #undef ASSERT_VALUES
2617 #endif // SHARE_VM_C1_C1_INSTRUCTION_HPP