Sat, 01 Sep 2012 13:25:18 -0400
6964458: Reimplement class meta-data storage to use native memory
Summary: Remove PermGen, allocate meta-data in metaspace linked to class loaders, rewrite GC walking, rewrite and rename metadata to be C++ classes
Reviewed-by: jmasa, stefank, never, coleenp, kvn, brutisso, mgerdin, dholmes, jrose, twisti, roland
Contributed-by: jmasa <jon.masamitsu@oracle.com>, stefank <stefan.karlsson@oracle.com>, mgerdin <mikael.gerdin@oracle.com>, never <tom.rodriguez@oracle.com>
1 /*
2 * Copyright (c) 1999, 2012, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
25 #ifndef SHARE_VM_C1_C1_INSTRUCTION_HPP
26 #define SHARE_VM_C1_C1_INSTRUCTION_HPP
28 #include "c1/c1_Compilation.hpp"
29 #include "c1/c1_LIR.hpp"
30 #include "c1/c1_ValueType.hpp"
31 #include "ci/ciField.hpp"
33 // Predefined classes
34 class ciField;
35 class ValueStack;
36 class InstructionPrinter;
37 class IRScope;
38 class LIR_OprDesc;
39 typedef LIR_OprDesc* LIR_Opr;
42 // Instruction class hierarchy
43 //
44 // All leaf classes in the class hierarchy are concrete classes
45 // (i.e., are instantiated). All other classes are abstract and
46 // serve factoring.
48 class Instruction;
49 class Phi;
50 class Local;
51 class Constant;
52 class AccessField;
53 class LoadField;
54 class StoreField;
55 class AccessArray;
56 class ArrayLength;
57 class AccessIndexed;
58 class LoadIndexed;
59 class StoreIndexed;
60 class NegateOp;
61 class Op2;
62 class ArithmeticOp;
63 class ShiftOp;
64 class LogicOp;
65 class CompareOp;
66 class IfOp;
67 class Convert;
68 class NullCheck;
69 class TypeCast;
70 class OsrEntry;
71 class ExceptionObject;
72 class StateSplit;
73 class Invoke;
74 class NewInstance;
75 class NewArray;
76 class NewTypeArray;
77 class NewObjectArray;
78 class NewMultiArray;
79 class TypeCheck;
80 class CheckCast;
81 class InstanceOf;
82 class AccessMonitor;
83 class MonitorEnter;
84 class MonitorExit;
85 class Intrinsic;
86 class BlockBegin;
87 class BlockEnd;
88 class Goto;
89 class If;
90 class IfInstanceOf;
91 class Switch;
92 class TableSwitch;
93 class LookupSwitch;
94 class Return;
95 class Throw;
96 class Base;
97 class RoundFP;
98 class UnsafeOp;
99 class UnsafeRawOp;
100 class UnsafeGetRaw;
101 class UnsafePutRaw;
102 class UnsafeObjectOp;
103 class UnsafeGetObject;
104 class UnsafePutObject;
105 class UnsafePrefetch;
106 class UnsafePrefetchRead;
107 class UnsafePrefetchWrite;
108 class ProfileCall;
109 class ProfileInvoke;
110 class RuntimeCall;
111 class MemBar;
113 // A Value is a reference to the instruction creating the value
114 typedef Instruction* Value;
115 define_array(ValueArray, Value)
116 define_stack(Values, ValueArray)
118 define_array(ValueStackArray, ValueStack*)
119 define_stack(ValueStackStack, ValueStackArray)
121 // BlockClosure is the base class for block traversal/iteration.
123 class BlockClosure: public CompilationResourceObj {
124 public:
125 virtual void block_do(BlockBegin* block) = 0;
126 };
129 // A simple closure class for visiting the values of an Instruction
130 class ValueVisitor: public StackObj {
131 public:
132 virtual void visit(Value* v) = 0;
133 };
136 // Some array and list classes
137 define_array(BlockBeginArray, BlockBegin*)
138 define_stack(_BlockList, BlockBeginArray)
140 class BlockList: public _BlockList {
141 public:
142 BlockList(): _BlockList() {}
143 BlockList(const int size): _BlockList(size) {}
144 BlockList(const int size, BlockBegin* init): _BlockList(size, init) {}
146 void iterate_forward(BlockClosure* closure);
147 void iterate_backward(BlockClosure* closure);
148 void blocks_do(void f(BlockBegin*));
149 void values_do(ValueVisitor* f);
150 void print(bool cfg_only = false, bool live_only = false) PRODUCT_RETURN;
151 };
154 // InstructionVisitors provide type-based dispatch for instructions.
155 // For each concrete Instruction class X, a virtual function do_X is
156 // provided. Functionality that needs to be implemented for all classes
157 // (e.g., printing, code generation) is factored out into a specialised
158 // visitor instead of added to the Instruction classes itself.
160 class InstructionVisitor: public StackObj {
161 public:
162 virtual void do_Phi (Phi* x) = 0;
163 virtual void do_Local (Local* x) = 0;
164 virtual void do_Constant (Constant* x) = 0;
165 virtual void do_LoadField (LoadField* x) = 0;
166 virtual void do_StoreField (StoreField* x) = 0;
167 virtual void do_ArrayLength (ArrayLength* x) = 0;
168 virtual void do_LoadIndexed (LoadIndexed* x) = 0;
169 virtual void do_StoreIndexed (StoreIndexed* x) = 0;
170 virtual void do_NegateOp (NegateOp* x) = 0;
171 virtual void do_ArithmeticOp (ArithmeticOp* x) = 0;
172 virtual void do_ShiftOp (ShiftOp* x) = 0;
173 virtual void do_LogicOp (LogicOp* x) = 0;
174 virtual void do_CompareOp (CompareOp* x) = 0;
175 virtual void do_IfOp (IfOp* x) = 0;
176 virtual void do_Convert (Convert* x) = 0;
177 virtual void do_NullCheck (NullCheck* x) = 0;
178 virtual void do_TypeCast (TypeCast* x) = 0;
179 virtual void do_Invoke (Invoke* x) = 0;
180 virtual void do_NewInstance (NewInstance* x) = 0;
181 virtual void do_NewTypeArray (NewTypeArray* x) = 0;
182 virtual void do_NewObjectArray (NewObjectArray* x) = 0;
183 virtual void do_NewMultiArray (NewMultiArray* x) = 0;
184 virtual void do_CheckCast (CheckCast* x) = 0;
185 virtual void do_InstanceOf (InstanceOf* x) = 0;
186 virtual void do_MonitorEnter (MonitorEnter* x) = 0;
187 virtual void do_MonitorExit (MonitorExit* x) = 0;
188 virtual void do_Intrinsic (Intrinsic* x) = 0;
189 virtual void do_BlockBegin (BlockBegin* x) = 0;
190 virtual void do_Goto (Goto* x) = 0;
191 virtual void do_If (If* x) = 0;
192 virtual void do_IfInstanceOf (IfInstanceOf* x) = 0;
193 virtual void do_TableSwitch (TableSwitch* x) = 0;
194 virtual void do_LookupSwitch (LookupSwitch* x) = 0;
195 virtual void do_Return (Return* x) = 0;
196 virtual void do_Throw (Throw* x) = 0;
197 virtual void do_Base (Base* x) = 0;
198 virtual void do_OsrEntry (OsrEntry* x) = 0;
199 virtual void do_ExceptionObject(ExceptionObject* x) = 0;
200 virtual void do_RoundFP (RoundFP* x) = 0;
201 virtual void do_UnsafeGetRaw (UnsafeGetRaw* x) = 0;
202 virtual void do_UnsafePutRaw (UnsafePutRaw* x) = 0;
203 virtual void do_UnsafeGetObject(UnsafeGetObject* x) = 0;
204 virtual void do_UnsafePutObject(UnsafePutObject* x) = 0;
205 virtual void do_UnsafePrefetchRead (UnsafePrefetchRead* x) = 0;
206 virtual void do_UnsafePrefetchWrite(UnsafePrefetchWrite* x) = 0;
207 virtual void do_ProfileCall (ProfileCall* x) = 0;
208 virtual void do_ProfileInvoke (ProfileInvoke* x) = 0;
209 virtual void do_RuntimeCall (RuntimeCall* x) = 0;
210 virtual void do_MemBar (MemBar* x) = 0;
211 };
214 // Hashing support
215 //
216 // Note: This hash functions affect the performance
217 // of ValueMap - make changes carefully!
219 #define HASH1(x1 ) ((intx)(x1))
220 #define HASH2(x1, x2 ) ((HASH1(x1 ) << 7) ^ HASH1(x2))
221 #define HASH3(x1, x2, x3 ) ((HASH2(x1, x2 ) << 7) ^ HASH1(x3))
222 #define HASH4(x1, x2, x3, x4) ((HASH3(x1, x2, x3) << 7) ^ HASH1(x4))
225 // The following macros are used to implement instruction-specific hashing.
226 // By default, each instruction implements hash() and is_equal(Value), used
227 // for value numbering/common subexpression elimination. The default imple-
228 // mentation disables value numbering. Each instruction which can be value-
229 // numbered, should define corresponding hash() and is_equal(Value) functions
230 // via the macros below. The f arguments specify all the values/op codes, etc.
231 // that need to be identical for two instructions to be identical.
232 //
233 // Note: The default implementation of hash() returns 0 in order to indicate
234 // that the instruction should not be considered for value numbering.
235 // The currently used hash functions do not guarantee that never a 0
236 // is produced. While this is still correct, it may be a performance
237 // bug (no value numbering for that node). However, this situation is
238 // so unlikely, that we are not going to handle it specially.
240 #define HASHING1(class_name, enabled, f1) \
241 virtual intx hash() const { \
242 return (enabled) ? HASH2(name(), f1) : 0; \
243 } \
244 virtual bool is_equal(Value v) const { \
245 if (!(enabled) ) return false; \
246 class_name* _v = v->as_##class_name(); \
247 if (_v == NULL ) return false; \
248 if (f1 != _v->f1) return false; \
249 return true; \
250 } \
253 #define HASHING2(class_name, enabled, f1, f2) \
254 virtual intx hash() const { \
255 return (enabled) ? HASH3(name(), f1, f2) : 0; \
256 } \
257 virtual bool is_equal(Value v) const { \
258 if (!(enabled) ) return false; \
259 class_name* _v = v->as_##class_name(); \
260 if (_v == NULL ) return false; \
261 if (f1 != _v->f1) return false; \
262 if (f2 != _v->f2) return false; \
263 return true; \
264 } \
267 #define HASHING3(class_name, enabled, f1, f2, f3) \
268 virtual intx hash() const { \
269 return (enabled) ? HASH4(name(), f1, f2, f3) : 0; \
270 } \
271 virtual bool is_equal(Value v) const { \
272 if (!(enabled) ) return false; \
273 class_name* _v = v->as_##class_name(); \
274 if (_v == NULL ) return false; \
275 if (f1 != _v->f1) return false; \
276 if (f2 != _v->f2) return false; \
277 if (f3 != _v->f3) return false; \
278 return true; \
279 } \
282 // The mother of all instructions...
284 class Instruction: public CompilationResourceObj {
285 private:
286 int _id; // the unique instruction id
287 #ifndef PRODUCT
288 int _printable_bci; // the bci of the instruction for printing
289 #endif
290 int _use_count; // the number of instructions refering to this value (w/o prev/next); only roots can have use count = 0 or > 1
291 int _pin_state; // set of PinReason describing the reason for pinning
292 ValueType* _type; // the instruction value type
293 Instruction* _next; // the next instruction if any (NULL for BlockEnd instructions)
294 Instruction* _subst; // the substitution instruction if any
295 LIR_Opr _operand; // LIR specific information
296 unsigned int _flags; // Flag bits
298 ValueStack* _state_before; // Copy of state with input operands still on stack (or NULL)
299 ValueStack* _exception_state; // Copy of state for exception handling
300 XHandlers* _exception_handlers; // Flat list of exception handlers covering this instruction
302 friend class UseCountComputer;
303 friend class BlockBegin;
305 void update_exception_state(ValueStack* state);
307 //protected:
308 public:
309 void set_type(ValueType* type) {
310 assert(type != NULL, "type must exist");
311 _type = type;
312 }
314 public:
315 void* operator new(size_t size) {
316 Compilation* c = Compilation::current();
317 void* res = c->arena()->Amalloc(size);
318 ((Instruction*)res)->_id = c->get_next_id();
319 return res;
320 }
322 static const int no_bci = -99;
324 enum InstructionFlag {
325 NeedsNullCheckFlag = 0,
326 CanTrapFlag,
327 DirectCompareFlag,
328 IsEliminatedFlag,
329 IsSafepointFlag,
330 IsStaticFlag,
331 IsStrictfpFlag,
332 NeedsStoreCheckFlag,
333 NeedsWriteBarrierFlag,
334 PreservesStateFlag,
335 TargetIsFinalFlag,
336 TargetIsLoadedFlag,
337 TargetIsStrictfpFlag,
338 UnorderedIsTrueFlag,
339 NeedsPatchingFlag,
340 ThrowIncompatibleClassChangeErrorFlag,
341 ProfileMDOFlag,
342 IsLinkedInBlockFlag,
343 InstructionLastFlag
344 };
346 public:
347 bool check_flag(InstructionFlag id) const { return (_flags & (1 << id)) != 0; }
348 void set_flag(InstructionFlag id, bool f) { _flags = f ? (_flags | (1 << id)) : (_flags & ~(1 << id)); };
350 // 'globally' used condition values
351 enum Condition {
352 eql, neq, lss, leq, gtr, geq
353 };
355 // Instructions may be pinned for many reasons and under certain conditions
356 // with enough knowledge it's possible to safely unpin them.
357 enum PinReason {
358 PinUnknown = 1 << 0
359 , PinExplicitNullCheck = 1 << 3
360 , PinStackForStateSplit= 1 << 12
361 , PinStateSplitConstructor= 1 << 13
362 , PinGlobalValueNumbering= 1 << 14
363 };
365 static Condition mirror(Condition cond);
366 static Condition negate(Condition cond);
368 // initialization
369 static int number_of_instructions() {
370 return Compilation::current()->number_of_instructions();
371 }
373 // creation
374 Instruction(ValueType* type, ValueStack* state_before = NULL, bool type_is_constant = false)
375 : _use_count(0)
376 #ifndef PRODUCT
377 , _printable_bci(-99)
378 #endif
379 , _pin_state(0)
380 , _type(type)
381 , _next(NULL)
382 , _subst(NULL)
383 , _flags(0)
384 , _operand(LIR_OprFact::illegalOpr)
385 , _state_before(state_before)
386 , _exception_handlers(NULL)
387 {
388 check_state(state_before);
389 assert(type != NULL && (!type->is_constant() || type_is_constant), "type must exist");
390 update_exception_state(_state_before);
391 }
393 // accessors
394 int id() const { return _id; }
395 #ifndef PRODUCT
396 bool has_printable_bci() const { return _printable_bci != -99; }
397 int printable_bci() const { assert(has_printable_bci(), "_printable_bci should have been set"); return _printable_bci; }
398 void set_printable_bci(int bci) { _printable_bci = bci; }
399 #endif
400 int use_count() const { return _use_count; }
401 int pin_state() const { return _pin_state; }
402 bool is_pinned() const { return _pin_state != 0 || PinAllInstructions; }
403 ValueType* type() const { return _type; }
404 Instruction* prev(BlockBegin* block); // use carefully, expensive operation
405 Instruction* next() const { return _next; }
406 bool has_subst() const { return _subst != NULL; }
407 Instruction* subst() { return _subst == NULL ? this : _subst->subst(); }
408 LIR_Opr operand() const { return _operand; }
410 void set_needs_null_check(bool f) { set_flag(NeedsNullCheckFlag, f); }
411 bool needs_null_check() const { return check_flag(NeedsNullCheckFlag); }
412 bool is_linked() const { return check_flag(IsLinkedInBlockFlag); }
413 bool can_be_linked() { return as_Local() == NULL && as_Phi() == NULL; }
415 bool has_uses() const { return use_count() > 0; }
416 ValueStack* state_before() const { return _state_before; }
417 ValueStack* exception_state() const { return _exception_state; }
418 virtual bool needs_exception_state() const { return true; }
419 XHandlers* exception_handlers() const { return _exception_handlers; }
421 // manipulation
422 void pin(PinReason reason) { _pin_state |= reason; }
423 void pin() { _pin_state |= PinUnknown; }
424 // DANGEROUS: only used by EliminateStores
425 void unpin(PinReason reason) { assert((reason & PinUnknown) == 0, "can't unpin unknown state"); _pin_state &= ~reason; }
427 Instruction* set_next(Instruction* next) {
428 assert(next->has_printable_bci(), "_printable_bci should have been set");
429 assert(next != NULL, "must not be NULL");
430 assert(as_BlockEnd() == NULL, "BlockEnd instructions must have no next");
431 assert(next->can_be_linked(), "shouldn't link these instructions into list");
433 next->set_flag(Instruction::IsLinkedInBlockFlag, true);
434 _next = next;
435 return next;
436 }
438 Instruction* set_next(Instruction* next, int bci) {
439 #ifndef PRODUCT
440 next->set_printable_bci(bci);
441 #endif
442 return set_next(next);
443 }
445 void set_subst(Instruction* subst) {
446 assert(subst == NULL ||
447 type()->base() == subst->type()->base() ||
448 subst->type()->base() == illegalType, "type can't change");
449 _subst = subst;
450 }
451 void set_exception_handlers(XHandlers *xhandlers) { _exception_handlers = xhandlers; }
452 void set_exception_state(ValueStack* s) { check_state(s); _exception_state = s; }
454 // machine-specifics
455 void set_operand(LIR_Opr operand) { assert(operand != LIR_OprFact::illegalOpr, "operand must exist"); _operand = operand; }
456 void clear_operand() { _operand = LIR_OprFact::illegalOpr; }
458 // generic
459 virtual Instruction* as_Instruction() { return this; } // to satisfy HASHING1 macro
460 virtual Phi* as_Phi() { return NULL; }
461 virtual Local* as_Local() { return NULL; }
462 virtual Constant* as_Constant() { return NULL; }
463 virtual AccessField* as_AccessField() { return NULL; }
464 virtual LoadField* as_LoadField() { return NULL; }
465 virtual StoreField* as_StoreField() { return NULL; }
466 virtual AccessArray* as_AccessArray() { return NULL; }
467 virtual ArrayLength* as_ArrayLength() { return NULL; }
468 virtual AccessIndexed* as_AccessIndexed() { return NULL; }
469 virtual LoadIndexed* as_LoadIndexed() { return NULL; }
470 virtual StoreIndexed* as_StoreIndexed() { return NULL; }
471 virtual NegateOp* as_NegateOp() { return NULL; }
472 virtual Op2* as_Op2() { return NULL; }
473 virtual ArithmeticOp* as_ArithmeticOp() { return NULL; }
474 virtual ShiftOp* as_ShiftOp() { return NULL; }
475 virtual LogicOp* as_LogicOp() { return NULL; }
476 virtual CompareOp* as_CompareOp() { return NULL; }
477 virtual IfOp* as_IfOp() { return NULL; }
478 virtual Convert* as_Convert() { return NULL; }
479 virtual NullCheck* as_NullCheck() { return NULL; }
480 virtual OsrEntry* as_OsrEntry() { return NULL; }
481 virtual StateSplit* as_StateSplit() { return NULL; }
482 virtual Invoke* as_Invoke() { return NULL; }
483 virtual NewInstance* as_NewInstance() { return NULL; }
484 virtual NewArray* as_NewArray() { return NULL; }
485 virtual NewTypeArray* as_NewTypeArray() { return NULL; }
486 virtual NewObjectArray* as_NewObjectArray() { return NULL; }
487 virtual NewMultiArray* as_NewMultiArray() { return NULL; }
488 virtual TypeCheck* as_TypeCheck() { return NULL; }
489 virtual CheckCast* as_CheckCast() { return NULL; }
490 virtual InstanceOf* as_InstanceOf() { return NULL; }
491 virtual TypeCast* as_TypeCast() { return NULL; }
492 virtual AccessMonitor* as_AccessMonitor() { return NULL; }
493 virtual MonitorEnter* as_MonitorEnter() { return NULL; }
494 virtual MonitorExit* as_MonitorExit() { return NULL; }
495 virtual Intrinsic* as_Intrinsic() { return NULL; }
496 virtual BlockBegin* as_BlockBegin() { return NULL; }
497 virtual BlockEnd* as_BlockEnd() { return NULL; }
498 virtual Goto* as_Goto() { return NULL; }
499 virtual If* as_If() { return NULL; }
500 virtual IfInstanceOf* as_IfInstanceOf() { return NULL; }
501 virtual TableSwitch* as_TableSwitch() { return NULL; }
502 virtual LookupSwitch* as_LookupSwitch() { return NULL; }
503 virtual Return* as_Return() { return NULL; }
504 virtual Throw* as_Throw() { return NULL; }
505 virtual Base* as_Base() { return NULL; }
506 virtual RoundFP* as_RoundFP() { return NULL; }
507 virtual ExceptionObject* as_ExceptionObject() { return NULL; }
508 virtual UnsafeOp* as_UnsafeOp() { return NULL; }
509 virtual ProfileInvoke* as_ProfileInvoke() { return NULL; }
511 virtual void visit(InstructionVisitor* v) = 0;
513 virtual bool can_trap() const { return false; }
515 virtual void input_values_do(ValueVisitor* f) = 0;
516 virtual void state_values_do(ValueVisitor* f);
517 virtual void other_values_do(ValueVisitor* f) { /* usually no other - override on demand */ }
518 void values_do(ValueVisitor* f) { input_values_do(f); state_values_do(f); other_values_do(f); }
520 virtual ciType* exact_type() const { return NULL; }
521 virtual ciType* declared_type() const { return NULL; }
523 // hashing
524 virtual const char* name() const = 0;
525 HASHING1(Instruction, false, id()) // hashing disabled by default
527 // debugging
528 static void check_state(ValueStack* state) PRODUCT_RETURN;
529 void print() PRODUCT_RETURN;
530 void print_line() PRODUCT_RETURN;
531 void print(InstructionPrinter& ip) PRODUCT_RETURN;
532 };
535 // The following macros are used to define base (i.e., non-leaf)
536 // and leaf instruction classes. They define class-name related
537 // generic functionality in one place.
539 #define BASE(class_name, super_class_name) \
540 class class_name: public super_class_name { \
541 public: \
542 virtual class_name* as_##class_name() { return this; } \
545 #define LEAF(class_name, super_class_name) \
546 BASE(class_name, super_class_name) \
547 public: \
548 virtual const char* name() const { return #class_name; } \
549 virtual void visit(InstructionVisitor* v) { v->do_##class_name(this); } \
552 // Debugging support
555 #ifdef ASSERT
556 class AssertValues: public ValueVisitor {
557 void visit(Value* x) { assert((*x) != NULL, "value must exist"); }
558 };
559 #define ASSERT_VALUES { AssertValues assert_value; values_do(&assert_value); }
560 #else
561 #define ASSERT_VALUES
562 #endif // ASSERT
565 // A Phi is a phi function in the sense of SSA form. It stands for
566 // the value of a local variable at the beginning of a join block.
567 // A Phi consists of n operands, one for every incoming branch.
569 LEAF(Phi, Instruction)
570 private:
571 BlockBegin* _block; // the block to which the phi function belongs
572 int _pf_flags; // the flags of the phi function
573 int _index; // to value on operand stack (index < 0) or to local
574 public:
575 // creation
576 Phi(ValueType* type, BlockBegin* b, int index)
577 : Instruction(type->base())
578 , _pf_flags(0)
579 , _block(b)
580 , _index(index)
581 {
582 NOT_PRODUCT(set_printable_bci(Value(b)->printable_bci()));
583 if (type->is_illegal()) {
584 make_illegal();
585 }
586 }
588 // flags
589 enum Flag {
590 no_flag = 0,
591 visited = 1 << 0,
592 cannot_simplify = 1 << 1
593 };
595 // accessors
596 bool is_local() const { return _index >= 0; }
597 bool is_on_stack() const { return !is_local(); }
598 int local_index() const { assert(is_local(), ""); return _index; }
599 int stack_index() const { assert(is_on_stack(), ""); return -(_index+1); }
601 Value operand_at(int i) const;
602 int operand_count() const;
604 BlockBegin* block() const { return _block; }
606 void set(Flag f) { _pf_flags |= f; }
607 void clear(Flag f) { _pf_flags &= ~f; }
608 bool is_set(Flag f) const { return (_pf_flags & f) != 0; }
610 // Invalidates phis corresponding to merges of locals of two different types
611 // (these should never be referenced, otherwise the bytecodes are illegal)
612 void make_illegal() {
613 set(cannot_simplify);
614 set_type(illegalType);
615 }
617 bool is_illegal() const {
618 return type()->is_illegal();
619 }
621 // generic
622 virtual void input_values_do(ValueVisitor* f) {
623 }
624 };
627 // A local is a placeholder for an incoming argument to a function call.
628 LEAF(Local, Instruction)
629 private:
630 int _java_index; // the local index within the method to which the local belongs
631 ciType* _declared_type;
632 public:
633 // creation
634 Local(ciType* declared, ValueType* type, int index)
635 : Instruction(type)
636 , _java_index(index)
637 , _declared_type(declared)
638 {
639 NOT_PRODUCT(set_printable_bci(-1));
640 }
642 // accessors
643 int java_index() const { return _java_index; }
645 virtual ciType* declared_type() const { return _declared_type; }
646 virtual ciType* exact_type() const;
648 // generic
649 virtual void input_values_do(ValueVisitor* f) { /* no values */ }
650 };
653 LEAF(Constant, Instruction)
654 public:
655 // creation
656 Constant(ValueType* type):
657 Instruction(type, NULL, /*type_is_constant*/ true)
658 {
659 assert(type->is_constant(), "must be a constant");
660 }
662 Constant(ValueType* type, ValueStack* state_before):
663 Instruction(type, state_before, /*type_is_constant*/ true)
664 {
665 assert(state_before != NULL, "only used for constants which need patching");
666 assert(type->is_constant(), "must be a constant");
667 // since it's patching it needs to be pinned
668 pin();
669 }
671 virtual bool can_trap() const { return state_before() != NULL; }
672 virtual void input_values_do(ValueVisitor* f) { /* no values */ }
674 virtual intx hash() const;
675 virtual bool is_equal(Value v) const;
677 virtual ciType* exact_type() const;
679 enum CompareResult { not_comparable = -1, cond_false, cond_true };
681 virtual CompareResult compare(Instruction::Condition condition, Value right) const;
682 BlockBegin* compare(Instruction::Condition cond, Value right,
683 BlockBegin* true_sux, BlockBegin* false_sux) const {
684 switch (compare(cond, right)) {
685 case not_comparable:
686 return NULL;
687 case cond_false:
688 return false_sux;
689 case cond_true:
690 return true_sux;
691 default:
692 ShouldNotReachHere();
693 return NULL;
694 }
695 }
696 };
699 BASE(AccessField, Instruction)
700 private:
701 Value _obj;
702 int _offset;
703 ciField* _field;
704 NullCheck* _explicit_null_check; // For explicit null check elimination
706 public:
707 // creation
708 AccessField(Value obj, int offset, ciField* field, bool is_static,
709 ValueStack* state_before, bool needs_patching)
710 : Instruction(as_ValueType(field->type()->basic_type()), state_before)
711 , _obj(obj)
712 , _offset(offset)
713 , _field(field)
714 , _explicit_null_check(NULL)
715 {
716 set_needs_null_check(!is_static);
717 set_flag(IsStaticFlag, is_static);
718 set_flag(NeedsPatchingFlag, needs_patching);
719 ASSERT_VALUES
720 // pin of all instructions with memory access
721 pin();
722 }
724 // accessors
725 Value obj() const { return _obj; }
726 int offset() const { return _offset; }
727 ciField* field() const { return _field; }
728 BasicType field_type() const { return _field->type()->basic_type(); }
729 bool is_static() const { return check_flag(IsStaticFlag); }
730 NullCheck* explicit_null_check() const { return _explicit_null_check; }
731 bool needs_patching() const { return check_flag(NeedsPatchingFlag); }
733 // Unresolved getstatic and putstatic can cause initialization.
734 // Technically it occurs at the Constant that materializes the base
735 // of the static fields but it's simpler to model it here.
736 bool is_init_point() const { return is_static() && (needs_patching() || !_field->holder()->is_initialized()); }
738 // manipulation
740 // Under certain circumstances, if a previous NullCheck instruction
741 // proved the target object non-null, we can eliminate the explicit
742 // null check and do an implicit one, simply specifying the debug
743 // information from the NullCheck. This field should only be consulted
744 // if needs_null_check() is true.
745 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; }
747 // generic
748 virtual bool can_trap() const { return needs_null_check() || needs_patching(); }
749 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); }
750 };
753 LEAF(LoadField, AccessField)
754 public:
755 // creation
756 LoadField(Value obj, int offset, ciField* field, bool is_static,
757 ValueStack* state_before, bool needs_patching)
758 : AccessField(obj, offset, field, is_static, state_before, needs_patching)
759 {}
761 ciType* declared_type() const;
762 ciType* exact_type() const;
764 // generic
765 HASHING2(LoadField, !needs_patching() && !field()->is_volatile(), obj()->subst(), offset()) // cannot be eliminated if needs patching or if volatile
766 };
769 LEAF(StoreField, AccessField)
770 private:
771 Value _value;
773 public:
774 // creation
775 StoreField(Value obj, int offset, ciField* field, Value value, bool is_static,
776 ValueStack* state_before, bool needs_patching)
777 : AccessField(obj, offset, field, is_static, state_before, needs_patching)
778 , _value(value)
779 {
780 set_flag(NeedsWriteBarrierFlag, as_ValueType(field_type())->is_object());
781 ASSERT_VALUES
782 pin();
783 }
785 // accessors
786 Value value() const { return _value; }
787 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); }
789 // generic
790 virtual void input_values_do(ValueVisitor* f) { AccessField::input_values_do(f); f->visit(&_value); }
791 };
794 BASE(AccessArray, Instruction)
795 private:
796 Value _array;
798 public:
799 // creation
800 AccessArray(ValueType* type, Value array, ValueStack* state_before)
801 : Instruction(type, state_before)
802 , _array(array)
803 {
804 set_needs_null_check(true);
805 ASSERT_VALUES
806 pin(); // instruction with side effect (null exception or range check throwing)
807 }
809 Value array() const { return _array; }
811 // generic
812 virtual bool can_trap() const { return needs_null_check(); }
813 virtual void input_values_do(ValueVisitor* f) { f->visit(&_array); }
814 };
817 LEAF(ArrayLength, AccessArray)
818 private:
819 NullCheck* _explicit_null_check; // For explicit null check elimination
821 public:
822 // creation
823 ArrayLength(Value array, ValueStack* state_before)
824 : AccessArray(intType, array, state_before)
825 , _explicit_null_check(NULL) {}
827 // accessors
828 NullCheck* explicit_null_check() const { return _explicit_null_check; }
830 // setters
831 // See LoadField::set_explicit_null_check for documentation
832 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; }
834 // generic
835 HASHING1(ArrayLength, true, array()->subst())
836 };
839 BASE(AccessIndexed, AccessArray)
840 private:
841 Value _index;
842 Value _length;
843 BasicType _elt_type;
845 public:
846 // creation
847 AccessIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before)
848 : AccessArray(as_ValueType(elt_type), array, state_before)
849 , _index(index)
850 , _length(length)
851 , _elt_type(elt_type)
852 {
853 ASSERT_VALUES
854 }
856 // accessors
857 Value index() const { return _index; }
858 Value length() const { return _length; }
859 BasicType elt_type() const { return _elt_type; }
861 // perform elimination of range checks involving constants
862 bool compute_needs_range_check();
864 // generic
865 virtual void input_values_do(ValueVisitor* f) { AccessArray::input_values_do(f); f->visit(&_index); if (_length != NULL) f->visit(&_length); }
866 };
869 LEAF(LoadIndexed, AccessIndexed)
870 private:
871 NullCheck* _explicit_null_check; // For explicit null check elimination
873 public:
874 // creation
875 LoadIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before)
876 : AccessIndexed(array, index, length, elt_type, state_before)
877 , _explicit_null_check(NULL) {}
879 // accessors
880 NullCheck* explicit_null_check() const { return _explicit_null_check; }
882 // setters
883 // See LoadField::set_explicit_null_check for documentation
884 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; }
886 ciType* exact_type() const;
887 ciType* declared_type() const;
889 // generic
890 HASHING2(LoadIndexed, true, array()->subst(), index()->subst())
891 };
894 LEAF(StoreIndexed, AccessIndexed)
895 private:
896 Value _value;
898 ciMethod* _profiled_method;
899 int _profiled_bci;
900 public:
901 // creation
902 StoreIndexed(Value array, Value index, Value length, BasicType elt_type, Value value, ValueStack* state_before)
903 : AccessIndexed(array, index, length, elt_type, state_before)
904 , _value(value), _profiled_method(NULL), _profiled_bci(0)
905 {
906 set_flag(NeedsWriteBarrierFlag, (as_ValueType(elt_type)->is_object()));
907 set_flag(NeedsStoreCheckFlag, (as_ValueType(elt_type)->is_object()));
908 ASSERT_VALUES
909 pin();
910 }
912 // accessors
913 Value value() const { return _value; }
914 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); }
915 bool needs_store_check() const { return check_flag(NeedsStoreCheckFlag); }
916 // Helpers for MethodData* profiling
917 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); }
918 void set_profiled_method(ciMethod* method) { _profiled_method = method; }
919 void set_profiled_bci(int bci) { _profiled_bci = bci; }
920 bool should_profile() const { return check_flag(ProfileMDOFlag); }
921 ciMethod* profiled_method() const { return _profiled_method; }
922 int profiled_bci() const { return _profiled_bci; }
923 // generic
924 virtual void input_values_do(ValueVisitor* f) { AccessIndexed::input_values_do(f); f->visit(&_value); }
925 };
928 LEAF(NegateOp, Instruction)
929 private:
930 Value _x;
932 public:
933 // creation
934 NegateOp(Value x) : Instruction(x->type()->base()), _x(x) {
935 ASSERT_VALUES
936 }
938 // accessors
939 Value x() const { return _x; }
941 // generic
942 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); }
943 };
946 BASE(Op2, Instruction)
947 private:
948 Bytecodes::Code _op;
949 Value _x;
950 Value _y;
952 public:
953 // creation
954 Op2(ValueType* type, Bytecodes::Code op, Value x, Value y, ValueStack* state_before = NULL)
955 : Instruction(type, state_before)
956 , _op(op)
957 , _x(x)
958 , _y(y)
959 {
960 ASSERT_VALUES
961 }
963 // accessors
964 Bytecodes::Code op() const { return _op; }
965 Value x() const { return _x; }
966 Value y() const { return _y; }
968 // manipulators
969 void swap_operands() {
970 assert(is_commutative(), "operation must be commutative");
971 Value t = _x; _x = _y; _y = t;
972 }
974 // generic
975 virtual bool is_commutative() const { return false; }
976 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); }
977 };
980 LEAF(ArithmeticOp, Op2)
981 public:
982 // creation
983 ArithmeticOp(Bytecodes::Code op, Value x, Value y, bool is_strictfp, ValueStack* state_before)
984 : Op2(x->type()->meet(y->type()), op, x, y, state_before)
985 {
986 set_flag(IsStrictfpFlag, is_strictfp);
987 if (can_trap()) pin();
988 }
990 // accessors
991 bool is_strictfp() const { return check_flag(IsStrictfpFlag); }
993 // generic
994 virtual bool is_commutative() const;
995 virtual bool can_trap() const;
996 HASHING3(Op2, true, op(), x()->subst(), y()->subst())
997 };
1000 LEAF(ShiftOp, Op2)
1001 public:
1002 // creation
1003 ShiftOp(Bytecodes::Code op, Value x, Value s) : Op2(x->type()->base(), op, x, s) {}
1005 // generic
1006 HASHING3(Op2, true, op(), x()->subst(), y()->subst())
1007 };
1010 LEAF(LogicOp, Op2)
1011 public:
1012 // creation
1013 LogicOp(Bytecodes::Code op, Value x, Value y) : Op2(x->type()->meet(y->type()), op, x, y) {}
1015 // generic
1016 virtual bool is_commutative() const;
1017 HASHING3(Op2, true, op(), x()->subst(), y()->subst())
1018 };
1021 LEAF(CompareOp, Op2)
1022 public:
1023 // creation
1024 CompareOp(Bytecodes::Code op, Value x, Value y, ValueStack* state_before)
1025 : Op2(intType, op, x, y, state_before)
1026 {}
1028 // generic
1029 HASHING3(Op2, true, op(), x()->subst(), y()->subst())
1030 };
1033 LEAF(IfOp, Op2)
1034 private:
1035 Value _tval;
1036 Value _fval;
1038 public:
1039 // creation
1040 IfOp(Value x, Condition cond, Value y, Value tval, Value fval)
1041 : Op2(tval->type()->meet(fval->type()), (Bytecodes::Code)cond, x, y)
1042 , _tval(tval)
1043 , _fval(fval)
1044 {
1045 ASSERT_VALUES
1046 assert(tval->type()->tag() == fval->type()->tag(), "types must match");
1047 }
1049 // accessors
1050 virtual bool is_commutative() const;
1051 Bytecodes::Code op() const { ShouldNotCallThis(); return Bytecodes::_illegal; }
1052 Condition cond() const { return (Condition)Op2::op(); }
1053 Value tval() const { return _tval; }
1054 Value fval() const { return _fval; }
1056 // generic
1057 virtual void input_values_do(ValueVisitor* f) { Op2::input_values_do(f); f->visit(&_tval); f->visit(&_fval); }
1058 };
1061 LEAF(Convert, Instruction)
1062 private:
1063 Bytecodes::Code _op;
1064 Value _value;
1066 public:
1067 // creation
1068 Convert(Bytecodes::Code op, Value value, ValueType* to_type) : Instruction(to_type), _op(op), _value(value) {
1069 ASSERT_VALUES
1070 }
1072 // accessors
1073 Bytecodes::Code op() const { return _op; }
1074 Value value() const { return _value; }
1076 // generic
1077 virtual void input_values_do(ValueVisitor* f) { f->visit(&_value); }
1078 HASHING2(Convert, true, op(), value()->subst())
1079 };
1082 LEAF(NullCheck, Instruction)
1083 private:
1084 Value _obj;
1086 public:
1087 // creation
1088 NullCheck(Value obj, ValueStack* state_before)
1089 : Instruction(obj->type()->base(), state_before)
1090 , _obj(obj)
1091 {
1092 ASSERT_VALUES
1093 set_can_trap(true);
1094 assert(_obj->type()->is_object(), "null check must be applied to objects only");
1095 pin(Instruction::PinExplicitNullCheck);
1096 }
1098 // accessors
1099 Value obj() const { return _obj; }
1101 // setters
1102 void set_can_trap(bool can_trap) { set_flag(CanTrapFlag, can_trap); }
1104 // generic
1105 virtual bool can_trap() const { return check_flag(CanTrapFlag); /* null-check elimination sets to false */ }
1106 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); }
1107 HASHING1(NullCheck, true, obj()->subst())
1108 };
1111 // This node is supposed to cast the type of another node to a more precise
1112 // declared type.
1113 LEAF(TypeCast, Instruction)
1114 private:
1115 ciType* _declared_type;
1116 Value _obj;
1118 public:
1119 // The type of this node is the same type as the object type (and it might be constant).
1120 TypeCast(ciType* type, Value obj, ValueStack* state_before)
1121 : Instruction(obj->type(), state_before, obj->type()->is_constant()),
1122 _declared_type(type),
1123 _obj(obj) {}
1125 // accessors
1126 ciType* declared_type() const { return _declared_type; }
1127 Value obj() const { return _obj; }
1129 // generic
1130 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); }
1131 };
1134 BASE(StateSplit, Instruction)
1135 private:
1136 ValueStack* _state;
1138 protected:
1139 static void substitute(BlockList& list, BlockBegin* old_block, BlockBegin* new_block);
1141 public:
1142 // creation
1143 StateSplit(ValueType* type, ValueStack* state_before = NULL)
1144 : Instruction(type, state_before)
1145 , _state(NULL)
1146 {
1147 pin(PinStateSplitConstructor);
1148 }
1150 // accessors
1151 ValueStack* state() const { return _state; }
1152 IRScope* scope() const; // the state's scope
1154 // manipulation
1155 void set_state(ValueStack* state) { assert(_state == NULL, "overwriting existing state"); check_state(state); _state = state; }
1157 // generic
1158 virtual void input_values_do(ValueVisitor* f) { /* no values */ }
1159 virtual void state_values_do(ValueVisitor* f);
1160 };
1163 LEAF(Invoke, StateSplit)
1164 private:
1165 Bytecodes::Code _code;
1166 Value _recv;
1167 Values* _args;
1168 BasicTypeList* _signature;
1169 int _vtable_index;
1170 ciMethod* _target;
1172 public:
1173 // creation
1174 Invoke(Bytecodes::Code code, ValueType* result_type, Value recv, Values* args,
1175 int vtable_index, ciMethod* target, ValueStack* state_before);
1177 // accessors
1178 Bytecodes::Code code() const { return _code; }
1179 Value receiver() const { return _recv; }
1180 bool has_receiver() const { return receiver() != NULL; }
1181 int number_of_arguments() const { return _args->length(); }
1182 Value argument_at(int i) const { return _args->at(i); }
1183 int vtable_index() const { return _vtable_index; }
1184 BasicTypeList* signature() const { return _signature; }
1185 ciMethod* target() const { return _target; }
1187 ciType* declared_type() const;
1189 // Returns false if target is not loaded
1190 bool target_is_final() const { return check_flag(TargetIsFinalFlag); }
1191 bool target_is_loaded() const { return check_flag(TargetIsLoadedFlag); }
1192 // Returns false if target is not loaded
1193 bool target_is_strictfp() const { return check_flag(TargetIsStrictfpFlag); }
1195 // JSR 292 support
1196 bool is_invokedynamic() const { return code() == Bytecodes::_invokedynamic; }
1197 bool is_method_handle_intrinsic() const { return target()->is_method_handle_intrinsic(); }
1199 virtual bool needs_exception_state() const { return false; }
1201 // generic
1202 virtual bool can_trap() const { return true; }
1203 virtual void input_values_do(ValueVisitor* f) {
1204 StateSplit::input_values_do(f);
1205 if (has_receiver()) f->visit(&_recv);
1206 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i));
1207 }
1208 virtual void state_values_do(ValueVisitor *f);
1209 };
1212 LEAF(NewInstance, StateSplit)
1213 private:
1214 ciInstanceKlass* _klass;
1216 public:
1217 // creation
1218 NewInstance(ciInstanceKlass* klass, ValueStack* state_before)
1219 : StateSplit(instanceType, state_before)
1220 , _klass(klass)
1221 {}
1223 // accessors
1224 ciInstanceKlass* klass() const { return _klass; }
1226 virtual bool needs_exception_state() const { return false; }
1228 // generic
1229 virtual bool can_trap() const { return true; }
1230 ciType* exact_type() const;
1231 ciType* declared_type() const;
1232 };
1235 BASE(NewArray, StateSplit)
1236 private:
1237 Value _length;
1239 public:
1240 // creation
1241 NewArray(Value length, ValueStack* state_before)
1242 : StateSplit(objectType, state_before)
1243 , _length(length)
1244 {
1245 // Do not ASSERT_VALUES since length is NULL for NewMultiArray
1246 }
1248 // accessors
1249 Value length() const { return _length; }
1251 virtual bool needs_exception_state() const { return false; }
1253 ciType* declared_type() const;
1255 // generic
1256 virtual bool can_trap() const { return true; }
1257 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_length); }
1258 };
1261 LEAF(NewTypeArray, NewArray)
1262 private:
1263 BasicType _elt_type;
1265 public:
1266 // creation
1267 NewTypeArray(Value length, BasicType elt_type, ValueStack* state_before)
1268 : NewArray(length, state_before)
1269 , _elt_type(elt_type)
1270 {}
1272 // accessors
1273 BasicType elt_type() const { return _elt_type; }
1274 ciType* exact_type() const;
1275 };
1278 LEAF(NewObjectArray, NewArray)
1279 private:
1280 ciKlass* _klass;
1282 public:
1283 // creation
1284 NewObjectArray(ciKlass* klass, Value length, ValueStack* state_before) : NewArray(length, state_before), _klass(klass) {}
1286 // accessors
1287 ciKlass* klass() const { return _klass; }
1288 ciType* exact_type() const;
1289 };
1292 LEAF(NewMultiArray, NewArray)
1293 private:
1294 ciKlass* _klass;
1295 Values* _dims;
1297 public:
1298 // creation
1299 NewMultiArray(ciKlass* klass, Values* dims, ValueStack* state_before) : NewArray(NULL, state_before), _klass(klass), _dims(dims) {
1300 ASSERT_VALUES
1301 }
1303 // accessors
1304 ciKlass* klass() const { return _klass; }
1305 Values* dims() const { return _dims; }
1306 int rank() const { return dims()->length(); }
1308 // generic
1309 virtual void input_values_do(ValueVisitor* f) {
1310 // NOTE: we do not call NewArray::input_values_do since "length"
1311 // is meaningless for a multi-dimensional array; passing the
1312 // zeroth element down to NewArray as its length is a bad idea
1313 // since there will be a copy in the "dims" array which doesn't
1314 // get updated, and the value must not be traversed twice. Was bug
1315 // - kbr 4/10/2001
1316 StateSplit::input_values_do(f);
1317 for (int i = 0; i < _dims->length(); i++) f->visit(_dims->adr_at(i));
1318 }
1319 };
1322 BASE(TypeCheck, StateSplit)
1323 private:
1324 ciKlass* _klass;
1325 Value _obj;
1327 ciMethod* _profiled_method;
1328 int _profiled_bci;
1330 public:
1331 // creation
1332 TypeCheck(ciKlass* klass, Value obj, ValueType* type, ValueStack* state_before)
1333 : StateSplit(type, state_before), _klass(klass), _obj(obj),
1334 _profiled_method(NULL), _profiled_bci(0) {
1335 ASSERT_VALUES
1336 set_direct_compare(false);
1337 }
1339 // accessors
1340 ciKlass* klass() const { return _klass; }
1341 Value obj() const { return _obj; }
1342 bool is_loaded() const { return klass() != NULL; }
1343 bool direct_compare() const { return check_flag(DirectCompareFlag); }
1345 // manipulation
1346 void set_direct_compare(bool flag) { set_flag(DirectCompareFlag, flag); }
1348 // generic
1349 virtual bool can_trap() const { return true; }
1350 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); }
1352 // Helpers for MethodData* profiling
1353 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); }
1354 void set_profiled_method(ciMethod* method) { _profiled_method = method; }
1355 void set_profiled_bci(int bci) { _profiled_bci = bci; }
1356 bool should_profile() const { return check_flag(ProfileMDOFlag); }
1357 ciMethod* profiled_method() const { return _profiled_method; }
1358 int profiled_bci() const { return _profiled_bci; }
1359 };
1362 LEAF(CheckCast, TypeCheck)
1363 public:
1364 // creation
1365 CheckCast(ciKlass* klass, Value obj, ValueStack* state_before)
1366 : TypeCheck(klass, obj, objectType, state_before) {}
1368 void set_incompatible_class_change_check() {
1369 set_flag(ThrowIncompatibleClassChangeErrorFlag, true);
1370 }
1371 bool is_incompatible_class_change_check() const {
1372 return check_flag(ThrowIncompatibleClassChangeErrorFlag);
1373 }
1375 ciType* declared_type() const;
1376 ciType* exact_type() const;
1377 };
1380 LEAF(InstanceOf, TypeCheck)
1381 public:
1382 // creation
1383 InstanceOf(ciKlass* klass, Value obj, ValueStack* state_before) : TypeCheck(klass, obj, intType, state_before) {}
1385 virtual bool needs_exception_state() const { return false; }
1386 };
1389 BASE(AccessMonitor, StateSplit)
1390 private:
1391 Value _obj;
1392 int _monitor_no;
1394 public:
1395 // creation
1396 AccessMonitor(Value obj, int monitor_no, ValueStack* state_before = NULL)
1397 : StateSplit(illegalType, state_before)
1398 , _obj(obj)
1399 , _monitor_no(monitor_no)
1400 {
1401 set_needs_null_check(true);
1402 ASSERT_VALUES
1403 }
1405 // accessors
1406 Value obj() const { return _obj; }
1407 int monitor_no() const { return _monitor_no; }
1409 // generic
1410 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); }
1411 };
1414 LEAF(MonitorEnter, AccessMonitor)
1415 public:
1416 // creation
1417 MonitorEnter(Value obj, int monitor_no, ValueStack* state_before)
1418 : AccessMonitor(obj, monitor_no, state_before)
1419 {
1420 ASSERT_VALUES
1421 }
1423 // generic
1424 virtual bool can_trap() const { return true; }
1425 };
1428 LEAF(MonitorExit, AccessMonitor)
1429 public:
1430 // creation
1431 MonitorExit(Value obj, int monitor_no)
1432 : AccessMonitor(obj, monitor_no, NULL)
1433 {
1434 ASSERT_VALUES
1435 }
1436 };
1439 LEAF(Intrinsic, StateSplit)
1440 private:
1441 vmIntrinsics::ID _id;
1442 Values* _args;
1443 Value _recv;
1444 int _nonnull_state; // mask identifying which args are nonnull
1446 public:
1447 // preserves_state can be set to true for Intrinsics
1448 // which are guaranteed to preserve register state across any slow
1449 // cases; setting it to true does not mean that the Intrinsic can
1450 // not trap, only that if we continue execution in the same basic
1451 // block after the Intrinsic, all of the registers are intact. This
1452 // allows load elimination and common expression elimination to be
1453 // performed across the Intrinsic. The default value is false.
1454 Intrinsic(ValueType* type,
1455 vmIntrinsics::ID id,
1456 Values* args,
1457 bool has_receiver,
1458 ValueStack* state_before,
1459 bool preserves_state,
1460 bool cantrap = true)
1461 : StateSplit(type, state_before)
1462 , _id(id)
1463 , _args(args)
1464 , _recv(NULL)
1465 , _nonnull_state(AllBits)
1466 {
1467 assert(args != NULL, "args must exist");
1468 ASSERT_VALUES
1469 set_flag(PreservesStateFlag, preserves_state);
1470 set_flag(CanTrapFlag, cantrap);
1471 if (has_receiver) {
1472 _recv = argument_at(0);
1473 }
1474 set_needs_null_check(has_receiver);
1476 // some intrinsics can't trap, so don't force them to be pinned
1477 if (!can_trap()) {
1478 unpin(PinStateSplitConstructor);
1479 }
1480 }
1482 // accessors
1483 vmIntrinsics::ID id() const { return _id; }
1484 int number_of_arguments() const { return _args->length(); }
1485 Value argument_at(int i) const { return _args->at(i); }
1487 bool has_receiver() const { return (_recv != NULL); }
1488 Value receiver() const { assert(has_receiver(), "must have receiver"); return _recv; }
1489 bool preserves_state() const { return check_flag(PreservesStateFlag); }
1491 bool arg_needs_null_check(int i) {
1492 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) {
1493 return is_set_nth_bit(_nonnull_state, i);
1494 }
1495 return true;
1496 }
1498 void set_arg_needs_null_check(int i, bool check) {
1499 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) {
1500 if (check) {
1501 _nonnull_state |= nth_bit(i);
1502 } else {
1503 _nonnull_state &= ~(nth_bit(i));
1504 }
1505 }
1506 }
1508 // generic
1509 virtual bool can_trap() const { return check_flag(CanTrapFlag); }
1510 virtual void input_values_do(ValueVisitor* f) {
1511 StateSplit::input_values_do(f);
1512 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i));
1513 }
1514 };
1517 class LIR_List;
1519 LEAF(BlockBegin, StateSplit)
1520 private:
1521 int _block_id; // the unique block id
1522 int _bci; // start-bci of block
1523 int _depth_first_number; // number of this block in a depth-first ordering
1524 int _linear_scan_number; // number of this block in linear-scan ordering
1525 int _loop_depth; // the loop nesting level of this block
1526 int _loop_index; // number of the innermost loop of this block
1527 int _flags; // the flags associated with this block
1529 // fields used by BlockListBuilder
1530 int _total_preds; // number of predecessors found by BlockListBuilder
1531 BitMap _stores_to_locals; // bit is set when a local variable is stored in the block
1533 // SSA specific fields: (factor out later)
1534 BlockList _successors; // the successors of this block
1535 BlockList _predecessors; // the predecessors of this block
1536 BlockBegin* _dominator; // the dominator of this block
1537 // SSA specific ends
1538 BlockEnd* _end; // the last instruction of this block
1539 BlockList _exception_handlers; // the exception handlers potentially invoked by this block
1540 ValueStackStack* _exception_states; // only for xhandler entries: states of all instructions that have an edge to this xhandler
1541 int _exception_handler_pco; // if this block is the start of an exception handler,
1542 // this records the PC offset in the assembly code of the
1543 // first instruction in this block
1544 Label _label; // the label associated with this block
1545 LIR_List* _lir; // the low level intermediate representation for this block
1547 BitMap _live_in; // set of live LIR_Opr registers at entry to this block
1548 BitMap _live_out; // set of live LIR_Opr registers at exit from this block
1549 BitMap _live_gen; // set of registers used before any redefinition in this block
1550 BitMap _live_kill; // set of registers defined in this block
1552 BitMap _fpu_register_usage;
1553 intArray* _fpu_stack_state; // For x86 FPU code generation with UseLinearScan
1554 int _first_lir_instruction_id; // ID of first LIR instruction in this block
1555 int _last_lir_instruction_id; // ID of last LIR instruction in this block
1557 void iterate_preorder (boolArray& mark, BlockClosure* closure);
1558 void iterate_postorder(boolArray& mark, BlockClosure* closure);
1560 friend class SuxAndWeightAdjuster;
1562 public:
1563 void* operator new(size_t size) {
1564 Compilation* c = Compilation::current();
1565 void* res = c->arena()->Amalloc(size);
1566 ((BlockBegin*)res)->_id = c->get_next_id();
1567 ((BlockBegin*)res)->_block_id = c->get_next_block_id();
1568 return res;
1569 }
1571 // initialization/counting
1572 static int number_of_blocks() {
1573 return Compilation::current()->number_of_blocks();
1574 }
1576 // creation
1577 BlockBegin(int bci)
1578 : StateSplit(illegalType)
1579 , _bci(bci)
1580 , _depth_first_number(-1)
1581 , _linear_scan_number(-1)
1582 , _loop_depth(0)
1583 , _flags(0)
1584 , _dominator(NULL)
1585 , _end(NULL)
1586 , _predecessors(2)
1587 , _successors(2)
1588 , _exception_handlers(1)
1589 , _exception_states(NULL)
1590 , _exception_handler_pco(-1)
1591 , _lir(NULL)
1592 , _loop_index(-1)
1593 , _live_in()
1594 , _live_out()
1595 , _live_gen()
1596 , _live_kill()
1597 , _fpu_register_usage()
1598 , _fpu_stack_state(NULL)
1599 , _first_lir_instruction_id(-1)
1600 , _last_lir_instruction_id(-1)
1601 , _total_preds(0)
1602 , _stores_to_locals()
1603 {
1604 #ifndef PRODUCT
1605 set_printable_bci(bci);
1606 #endif
1607 }
1609 // accessors
1610 int block_id() const { return _block_id; }
1611 int bci() const { return _bci; }
1612 BlockList* successors() { return &_successors; }
1613 BlockBegin* dominator() const { return _dominator; }
1614 int loop_depth() const { return _loop_depth; }
1615 int depth_first_number() const { return _depth_first_number; }
1616 int linear_scan_number() const { return _linear_scan_number; }
1617 BlockEnd* end() const { return _end; }
1618 Label* label() { return &_label; }
1619 LIR_List* lir() const { return _lir; }
1620 int exception_handler_pco() const { return _exception_handler_pco; }
1621 BitMap& live_in() { return _live_in; }
1622 BitMap& live_out() { return _live_out; }
1623 BitMap& live_gen() { return _live_gen; }
1624 BitMap& live_kill() { return _live_kill; }
1625 BitMap& fpu_register_usage() { return _fpu_register_usage; }
1626 intArray* fpu_stack_state() const { return _fpu_stack_state; }
1627 int first_lir_instruction_id() const { return _first_lir_instruction_id; }
1628 int last_lir_instruction_id() const { return _last_lir_instruction_id; }
1629 int total_preds() const { return _total_preds; }
1630 BitMap& stores_to_locals() { return _stores_to_locals; }
1632 // manipulation
1633 void set_dominator(BlockBegin* dom) { _dominator = dom; }
1634 void set_loop_depth(int d) { _loop_depth = d; }
1635 void set_depth_first_number(int dfn) { _depth_first_number = dfn; }
1636 void set_linear_scan_number(int lsn) { _linear_scan_number = lsn; }
1637 void set_end(BlockEnd* end);
1638 void clear_end();
1639 void disconnect_from_graph();
1640 static void disconnect_edge(BlockBegin* from, BlockBegin* to);
1641 BlockBegin* insert_block_between(BlockBegin* sux);
1642 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux);
1643 void set_lir(LIR_List* lir) { _lir = lir; }
1644 void set_exception_handler_pco(int pco) { _exception_handler_pco = pco; }
1645 void set_live_in (BitMap map) { _live_in = map; }
1646 void set_live_out (BitMap map) { _live_out = map; }
1647 void set_live_gen (BitMap map) { _live_gen = map; }
1648 void set_live_kill (BitMap map) { _live_kill = map; }
1649 void set_fpu_register_usage(BitMap map) { _fpu_register_usage = map; }
1650 void set_fpu_stack_state(intArray* state) { _fpu_stack_state = state; }
1651 void set_first_lir_instruction_id(int id) { _first_lir_instruction_id = id; }
1652 void set_last_lir_instruction_id(int id) { _last_lir_instruction_id = id; }
1653 void increment_total_preds(int n = 1) { _total_preds += n; }
1654 void init_stores_to_locals(int locals_count) { _stores_to_locals = BitMap(locals_count); _stores_to_locals.clear(); }
1656 // generic
1657 virtual void state_values_do(ValueVisitor* f);
1659 // successors and predecessors
1660 int number_of_sux() const;
1661 BlockBegin* sux_at(int i) const;
1662 void add_successor(BlockBegin* sux);
1663 void remove_successor(BlockBegin* pred);
1664 bool is_successor(BlockBegin* sux) const { return _successors.contains(sux); }
1666 void add_predecessor(BlockBegin* pred);
1667 void remove_predecessor(BlockBegin* pred);
1668 bool is_predecessor(BlockBegin* pred) const { return _predecessors.contains(pred); }
1669 int number_of_preds() const { return _predecessors.length(); }
1670 BlockBegin* pred_at(int i) const { return _predecessors[i]; }
1672 // exception handlers potentially invoked by this block
1673 void add_exception_handler(BlockBegin* b);
1674 bool is_exception_handler(BlockBegin* b) const { return _exception_handlers.contains(b); }
1675 int number_of_exception_handlers() const { return _exception_handlers.length(); }
1676 BlockBegin* exception_handler_at(int i) const { return _exception_handlers.at(i); }
1678 // states of the instructions that have an edge to this exception handler
1679 int number_of_exception_states() { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states == NULL ? 0 : _exception_states->length(); }
1680 ValueStack* exception_state_at(int idx) const { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states->at(idx); }
1681 int add_exception_state(ValueStack* state);
1683 // flags
1684 enum Flag {
1685 no_flag = 0,
1686 std_entry_flag = 1 << 0,
1687 osr_entry_flag = 1 << 1,
1688 exception_entry_flag = 1 << 2,
1689 subroutine_entry_flag = 1 << 3,
1690 backward_branch_target_flag = 1 << 4,
1691 is_on_work_list_flag = 1 << 5,
1692 was_visited_flag = 1 << 6,
1693 parser_loop_header_flag = 1 << 7, // set by parser to identify blocks where phi functions can not be created on demand
1694 critical_edge_split_flag = 1 << 8, // set for all blocks that are introduced when critical edges are split
1695 linear_scan_loop_header_flag = 1 << 9, // set during loop-detection for LinearScan
1696 linear_scan_loop_end_flag = 1 << 10 // set during loop-detection for LinearScan
1697 };
1699 void set(Flag f) { _flags |= f; }
1700 void clear(Flag f) { _flags &= ~f; }
1701 bool is_set(Flag f) const { return (_flags & f) != 0; }
1702 bool is_entry_block() const {
1703 const int entry_mask = std_entry_flag | osr_entry_flag | exception_entry_flag;
1704 return (_flags & entry_mask) != 0;
1705 }
1707 // iteration
1708 void iterate_preorder (BlockClosure* closure);
1709 void iterate_postorder (BlockClosure* closure);
1711 void block_values_do(ValueVisitor* f);
1713 // loops
1714 void set_loop_index(int ix) { _loop_index = ix; }
1715 int loop_index() const { return _loop_index; }
1717 // merging
1718 bool try_merge(ValueStack* state); // try to merge states at block begin
1719 void merge(ValueStack* state) { bool b = try_merge(state); assert(b, "merge failed"); }
1721 // debugging
1722 void print_block() PRODUCT_RETURN;
1723 void print_block(InstructionPrinter& ip, bool live_only = false) PRODUCT_RETURN;
1724 };
1727 BASE(BlockEnd, StateSplit)
1728 private:
1729 BlockBegin* _begin;
1730 BlockList* _sux;
1732 protected:
1733 BlockList* sux() const { return _sux; }
1735 void set_sux(BlockList* sux) {
1736 #ifdef ASSERT
1737 assert(sux != NULL, "sux must exist");
1738 for (int i = sux->length() - 1; i >= 0; i--) assert(sux->at(i) != NULL, "sux must exist");
1739 #endif
1740 _sux = sux;
1741 }
1743 public:
1744 // creation
1745 BlockEnd(ValueType* type, ValueStack* state_before, bool is_safepoint)
1746 : StateSplit(type, state_before)
1747 , _begin(NULL)
1748 , _sux(NULL)
1749 {
1750 set_flag(IsSafepointFlag, is_safepoint);
1751 }
1753 // accessors
1754 bool is_safepoint() const { return check_flag(IsSafepointFlag); }
1755 BlockBegin* begin() const { return _begin; }
1757 // manipulation
1758 void set_begin(BlockBegin* begin);
1760 // successors
1761 int number_of_sux() const { return _sux != NULL ? _sux->length() : 0; }
1762 BlockBegin* sux_at(int i) const { return _sux->at(i); }
1763 BlockBegin* default_sux() const { return sux_at(number_of_sux() - 1); }
1764 BlockBegin** addr_sux_at(int i) const { return _sux->adr_at(i); }
1765 int sux_index(BlockBegin* sux) const { return _sux->find(sux); }
1766 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux);
1767 };
1770 LEAF(Goto, BlockEnd)
1771 public:
1772 enum Direction {
1773 none, // Just a regular goto
1774 taken, not_taken // Goto produced from If
1775 };
1776 private:
1777 ciMethod* _profiled_method;
1778 int _profiled_bci;
1779 Direction _direction;
1780 public:
1781 // creation
1782 Goto(BlockBegin* sux, ValueStack* state_before, bool is_safepoint = false)
1783 : BlockEnd(illegalType, state_before, is_safepoint)
1784 , _direction(none)
1785 , _profiled_method(NULL)
1786 , _profiled_bci(0) {
1787 BlockList* s = new BlockList(1);
1788 s->append(sux);
1789 set_sux(s);
1790 }
1792 Goto(BlockBegin* sux, bool is_safepoint) : BlockEnd(illegalType, NULL, is_safepoint)
1793 , _direction(none)
1794 , _profiled_method(NULL)
1795 , _profiled_bci(0) {
1796 BlockList* s = new BlockList(1);
1797 s->append(sux);
1798 set_sux(s);
1799 }
1801 bool should_profile() const { return check_flag(ProfileMDOFlag); }
1802 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches
1803 int profiled_bci() const { return _profiled_bci; }
1804 Direction direction() const { return _direction; }
1806 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); }
1807 void set_profiled_method(ciMethod* method) { _profiled_method = method; }
1808 void set_profiled_bci(int bci) { _profiled_bci = bci; }
1809 void set_direction(Direction d) { _direction = d; }
1810 };
1813 LEAF(If, BlockEnd)
1814 private:
1815 Value _x;
1816 Condition _cond;
1817 Value _y;
1818 ciMethod* _profiled_method;
1819 int _profiled_bci; // Canonicalizer may alter bci of If node
1820 bool _swapped; // Is the order reversed with respect to the original If in the
1821 // bytecode stream?
1822 public:
1823 // creation
1824 // unordered_is_true is valid for float/double compares only
1825 If(Value x, Condition cond, bool unordered_is_true, Value y, BlockBegin* tsux, BlockBegin* fsux, ValueStack* state_before, bool is_safepoint)
1826 : BlockEnd(illegalType, state_before, is_safepoint)
1827 , _x(x)
1828 , _cond(cond)
1829 , _y(y)
1830 , _profiled_method(NULL)
1831 , _profiled_bci(0)
1832 , _swapped(false)
1833 {
1834 ASSERT_VALUES
1835 set_flag(UnorderedIsTrueFlag, unordered_is_true);
1836 assert(x->type()->tag() == y->type()->tag(), "types must match");
1837 BlockList* s = new BlockList(2);
1838 s->append(tsux);
1839 s->append(fsux);
1840 set_sux(s);
1841 }
1843 // accessors
1844 Value x() const { return _x; }
1845 Condition cond() const { return _cond; }
1846 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); }
1847 Value y() const { return _y; }
1848 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); }
1849 BlockBegin* tsux() const { return sux_for(true); }
1850 BlockBegin* fsux() const { return sux_for(false); }
1851 BlockBegin* usux() const { return sux_for(unordered_is_true()); }
1852 bool should_profile() const { return check_flag(ProfileMDOFlag); }
1853 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches
1854 int profiled_bci() const { return _profiled_bci; } // set for profiled branches and tiered
1855 bool is_swapped() const { return _swapped; }
1857 // manipulation
1858 void swap_operands() {
1859 Value t = _x; _x = _y; _y = t;
1860 _cond = mirror(_cond);
1861 }
1863 void swap_sux() {
1864 assert(number_of_sux() == 2, "wrong number of successors");
1865 BlockList* s = sux();
1866 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t);
1867 _cond = negate(_cond);
1868 set_flag(UnorderedIsTrueFlag, !check_flag(UnorderedIsTrueFlag));
1869 }
1871 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); }
1872 void set_profiled_method(ciMethod* method) { _profiled_method = method; }
1873 void set_profiled_bci(int bci) { _profiled_bci = bci; }
1874 void set_swapped(bool value) { _swapped = value; }
1875 // generic
1876 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_x); f->visit(&_y); }
1877 };
1880 LEAF(IfInstanceOf, BlockEnd)
1881 private:
1882 ciKlass* _klass;
1883 Value _obj;
1884 bool _test_is_instance; // jump if instance
1885 int _instanceof_bci;
1887 public:
1888 IfInstanceOf(ciKlass* klass, Value obj, bool test_is_instance, int instanceof_bci, BlockBegin* tsux, BlockBegin* fsux)
1889 : BlockEnd(illegalType, NULL, false) // temporary set to false
1890 , _klass(klass)
1891 , _obj(obj)
1892 , _test_is_instance(test_is_instance)
1893 , _instanceof_bci(instanceof_bci)
1894 {
1895 ASSERT_VALUES
1896 assert(instanceof_bci >= 0, "illegal bci");
1897 BlockList* s = new BlockList(2);
1898 s->append(tsux);
1899 s->append(fsux);
1900 set_sux(s);
1901 }
1903 // accessors
1904 //
1905 // Note 1: If test_is_instance() is true, IfInstanceOf tests if obj *is* an
1906 // instance of klass; otherwise it tests if it is *not* and instance
1907 // of klass.
1908 //
1909 // Note 2: IfInstanceOf instructions are created by combining an InstanceOf
1910 // and an If instruction. The IfInstanceOf bci() corresponds to the
1911 // bci that the If would have had; the (this->) instanceof_bci() is
1912 // the bci of the original InstanceOf instruction.
1913 ciKlass* klass() const { return _klass; }
1914 Value obj() const { return _obj; }
1915 int instanceof_bci() const { return _instanceof_bci; }
1916 bool test_is_instance() const { return _test_is_instance; }
1917 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); }
1918 BlockBegin* tsux() const { return sux_for(true); }
1919 BlockBegin* fsux() const { return sux_for(false); }
1921 // manipulation
1922 void swap_sux() {
1923 assert(number_of_sux() == 2, "wrong number of successors");
1924 BlockList* s = sux();
1925 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t);
1926 _test_is_instance = !_test_is_instance;
1927 }
1929 // generic
1930 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_obj); }
1931 };
1934 BASE(Switch, BlockEnd)
1935 private:
1936 Value _tag;
1938 public:
1939 // creation
1940 Switch(Value tag, BlockList* sux, ValueStack* state_before, bool is_safepoint)
1941 : BlockEnd(illegalType, state_before, is_safepoint)
1942 , _tag(tag) {
1943 ASSERT_VALUES
1944 set_sux(sux);
1945 }
1947 // accessors
1948 Value tag() const { return _tag; }
1949 int length() const { return number_of_sux() - 1; }
1951 virtual bool needs_exception_state() const { return false; }
1953 // generic
1954 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_tag); }
1955 };
1958 LEAF(TableSwitch, Switch)
1959 private:
1960 int _lo_key;
1962 public:
1963 // creation
1964 TableSwitch(Value tag, BlockList* sux, int lo_key, ValueStack* state_before, bool is_safepoint)
1965 : Switch(tag, sux, state_before, is_safepoint)
1966 , _lo_key(lo_key) {}
1968 // accessors
1969 int lo_key() const { return _lo_key; }
1970 int hi_key() const { return _lo_key + length() - 1; }
1971 };
1974 LEAF(LookupSwitch, Switch)
1975 private:
1976 intArray* _keys;
1978 public:
1979 // creation
1980 LookupSwitch(Value tag, BlockList* sux, intArray* keys, ValueStack* state_before, bool is_safepoint)
1981 : Switch(tag, sux, state_before, is_safepoint)
1982 , _keys(keys) {
1983 assert(keys != NULL, "keys must exist");
1984 assert(keys->length() == length(), "sux & keys have incompatible lengths");
1985 }
1987 // accessors
1988 int key_at(int i) const { return _keys->at(i); }
1989 };
1992 LEAF(Return, BlockEnd)
1993 private:
1994 Value _result;
1996 public:
1997 // creation
1998 Return(Value result) :
1999 BlockEnd(result == NULL ? voidType : result->type()->base(), NULL, true),
2000 _result(result) {}
2002 // accessors
2003 Value result() const { return _result; }
2004 bool has_result() const { return result() != NULL; }
2006 // generic
2007 virtual void input_values_do(ValueVisitor* f) {
2008 BlockEnd::input_values_do(f);
2009 if (has_result()) f->visit(&_result);
2010 }
2011 };
2014 LEAF(Throw, BlockEnd)
2015 private:
2016 Value _exception;
2018 public:
2019 // creation
2020 Throw(Value exception, ValueStack* state_before) : BlockEnd(illegalType, state_before, true), _exception(exception) {
2021 ASSERT_VALUES
2022 }
2024 // accessors
2025 Value exception() const { return _exception; }
2027 // generic
2028 virtual bool can_trap() const { return true; }
2029 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_exception); }
2030 };
2033 LEAF(Base, BlockEnd)
2034 public:
2035 // creation
2036 Base(BlockBegin* std_entry, BlockBegin* osr_entry) : BlockEnd(illegalType, NULL, false) {
2037 assert(std_entry->is_set(BlockBegin::std_entry_flag), "std entry must be flagged");
2038 assert(osr_entry == NULL || osr_entry->is_set(BlockBegin::osr_entry_flag), "osr entry must be flagged");
2039 BlockList* s = new BlockList(2);
2040 if (osr_entry != NULL) s->append(osr_entry);
2041 s->append(std_entry); // must be default sux!
2042 set_sux(s);
2043 }
2045 // accessors
2046 BlockBegin* std_entry() const { return default_sux(); }
2047 BlockBegin* osr_entry() const { return number_of_sux() < 2 ? NULL : sux_at(0); }
2048 };
2051 LEAF(OsrEntry, Instruction)
2052 public:
2053 // creation
2054 #ifdef _LP64
2055 OsrEntry() : Instruction(longType) { pin(); }
2056 #else
2057 OsrEntry() : Instruction(intType) { pin(); }
2058 #endif
2060 // generic
2061 virtual void input_values_do(ValueVisitor* f) { }
2062 };
2065 // Models the incoming exception at a catch site
2066 LEAF(ExceptionObject, Instruction)
2067 public:
2068 // creation
2069 ExceptionObject() : Instruction(objectType) {
2070 pin();
2071 }
2073 // generic
2074 virtual void input_values_do(ValueVisitor* f) { }
2075 };
2078 // Models needed rounding for floating-point values on Intel.
2079 // Currently only used to represent rounding of double-precision
2080 // values stored into local variables, but could be used to model
2081 // intermediate rounding of single-precision values as well.
2082 LEAF(RoundFP, Instruction)
2083 private:
2084 Value _input; // floating-point value to be rounded
2086 public:
2087 RoundFP(Value input)
2088 : Instruction(input->type()) // Note: should not be used for constants
2089 , _input(input)
2090 {
2091 ASSERT_VALUES
2092 }
2094 // accessors
2095 Value input() const { return _input; }
2097 // generic
2098 virtual void input_values_do(ValueVisitor* f) { f->visit(&_input); }
2099 };
2102 BASE(UnsafeOp, Instruction)
2103 private:
2104 BasicType _basic_type; // ValueType can not express byte-sized integers
2106 protected:
2107 // creation
2108 UnsafeOp(BasicType basic_type, bool is_put)
2109 : Instruction(is_put ? voidType : as_ValueType(basic_type))
2110 , _basic_type(basic_type)
2111 {
2112 //Note: Unsafe ops are not not guaranteed to throw NPE.
2113 // Convservatively, Unsafe operations must be pinned though we could be
2114 // looser about this if we wanted to..
2115 pin();
2116 }
2118 public:
2119 // accessors
2120 BasicType basic_type() { return _basic_type; }
2122 // generic
2123 virtual void input_values_do(ValueVisitor* f) { }
2124 };
2127 BASE(UnsafeRawOp, UnsafeOp)
2128 private:
2129 Value _base; // Base address (a Java long)
2130 Value _index; // Index if computed by optimizer; initialized to NULL
2131 int _log2_scale; // Scale factor: 0, 1, 2, or 3.
2132 // Indicates log2 of number of bytes (1, 2, 4, or 8)
2133 // to scale index by.
2135 protected:
2136 UnsafeRawOp(BasicType basic_type, Value addr, bool is_put)
2137 : UnsafeOp(basic_type, is_put)
2138 , _base(addr)
2139 , _index(NULL)
2140 , _log2_scale(0)
2141 {
2142 // Can not use ASSERT_VALUES because index may be NULL
2143 assert(addr != NULL && addr->type()->is_long(), "just checking");
2144 }
2146 UnsafeRawOp(BasicType basic_type, Value base, Value index, int log2_scale, bool is_put)
2147 : UnsafeOp(basic_type, is_put)
2148 , _base(base)
2149 , _index(index)
2150 , _log2_scale(log2_scale)
2151 {
2152 }
2154 public:
2155 // accessors
2156 Value base() { return _base; }
2157 Value index() { return _index; }
2158 bool has_index() { return (_index != NULL); }
2159 int log2_scale() { return _log2_scale; }
2161 // setters
2162 void set_base (Value base) { _base = base; }
2163 void set_index(Value index) { _index = index; }
2164 void set_log2_scale(int log2_scale) { _log2_scale = log2_scale; }
2166 // generic
2167 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f);
2168 f->visit(&_base);
2169 if (has_index()) f->visit(&_index); }
2170 };
2173 LEAF(UnsafeGetRaw, UnsafeRawOp)
2174 private:
2175 bool _may_be_unaligned, _is_wide; // For OSREntry
2177 public:
2178 UnsafeGetRaw(BasicType basic_type, Value addr, bool may_be_unaligned, bool is_wide = false)
2179 : UnsafeRawOp(basic_type, addr, false) {
2180 _may_be_unaligned = may_be_unaligned;
2181 _is_wide = is_wide;
2182 }
2184 UnsafeGetRaw(BasicType basic_type, Value base, Value index, int log2_scale, bool may_be_unaligned, bool is_wide = false)
2185 : UnsafeRawOp(basic_type, base, index, log2_scale, false) {
2186 _may_be_unaligned = may_be_unaligned;
2187 _is_wide = is_wide;
2188 }
2190 bool may_be_unaligned() { return _may_be_unaligned; }
2191 bool is_wide() { return _is_wide; }
2192 };
2195 LEAF(UnsafePutRaw, UnsafeRawOp)
2196 private:
2197 Value _value; // Value to be stored
2199 public:
2200 UnsafePutRaw(BasicType basic_type, Value addr, Value value)
2201 : UnsafeRawOp(basic_type, addr, true)
2202 , _value(value)
2203 {
2204 assert(value != NULL, "just checking");
2205 ASSERT_VALUES
2206 }
2208 UnsafePutRaw(BasicType basic_type, Value base, Value index, int log2_scale, Value value)
2209 : UnsafeRawOp(basic_type, base, index, log2_scale, true)
2210 , _value(value)
2211 {
2212 assert(value != NULL, "just checking");
2213 ASSERT_VALUES
2214 }
2216 // accessors
2217 Value value() { return _value; }
2219 // generic
2220 virtual void input_values_do(ValueVisitor* f) { UnsafeRawOp::input_values_do(f);
2221 f->visit(&_value); }
2222 };
2225 BASE(UnsafeObjectOp, UnsafeOp)
2226 private:
2227 Value _object; // Object to be fetched from or mutated
2228 Value _offset; // Offset within object
2229 bool _is_volatile; // true if volatile - dl/JSR166
2230 public:
2231 UnsafeObjectOp(BasicType basic_type, Value object, Value offset, bool is_put, bool is_volatile)
2232 : UnsafeOp(basic_type, is_put), _object(object), _offset(offset), _is_volatile(is_volatile)
2233 {
2234 }
2236 // accessors
2237 Value object() { return _object; }
2238 Value offset() { return _offset; }
2239 bool is_volatile() { return _is_volatile; }
2240 // generic
2241 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f);
2242 f->visit(&_object);
2243 f->visit(&_offset); }
2244 };
2247 LEAF(UnsafeGetObject, UnsafeObjectOp)
2248 public:
2249 UnsafeGetObject(BasicType basic_type, Value object, Value offset, bool is_volatile)
2250 : UnsafeObjectOp(basic_type, object, offset, false, is_volatile)
2251 {
2252 ASSERT_VALUES
2253 }
2254 };
2257 LEAF(UnsafePutObject, UnsafeObjectOp)
2258 private:
2259 Value _value; // Value to be stored
2260 public:
2261 UnsafePutObject(BasicType basic_type, Value object, Value offset, Value value, bool is_volatile)
2262 : UnsafeObjectOp(basic_type, object, offset, true, is_volatile)
2263 , _value(value)
2264 {
2265 ASSERT_VALUES
2266 }
2268 // accessors
2269 Value value() { return _value; }
2271 // generic
2272 virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f);
2273 f->visit(&_value); }
2274 };
2277 BASE(UnsafePrefetch, UnsafeObjectOp)
2278 public:
2279 UnsafePrefetch(Value object, Value offset)
2280 : UnsafeObjectOp(T_VOID, object, offset, false, false)
2281 {
2282 }
2283 };
2286 LEAF(UnsafePrefetchRead, UnsafePrefetch)
2287 public:
2288 UnsafePrefetchRead(Value object, Value offset)
2289 : UnsafePrefetch(object, offset)
2290 {
2291 ASSERT_VALUES
2292 }
2293 };
2296 LEAF(UnsafePrefetchWrite, UnsafePrefetch)
2297 public:
2298 UnsafePrefetchWrite(Value object, Value offset)
2299 : UnsafePrefetch(object, offset)
2300 {
2301 ASSERT_VALUES
2302 }
2303 };
2305 LEAF(ProfileCall, Instruction)
2306 private:
2307 ciMethod* _method;
2308 int _bci_of_invoke;
2309 ciMethod* _callee; // the method that is called at the given bci
2310 Value _recv;
2311 ciKlass* _known_holder;
2313 public:
2314 ProfileCall(ciMethod* method, int bci, ciMethod* callee, Value recv, ciKlass* known_holder)
2315 : Instruction(voidType)
2316 , _method(method)
2317 , _bci_of_invoke(bci)
2318 , _callee(callee)
2319 , _recv(recv)
2320 , _known_holder(known_holder)
2321 {
2322 // The ProfileCall has side-effects and must occur precisely where located
2323 pin();
2324 }
2326 ciMethod* method() { return _method; }
2327 int bci_of_invoke() { return _bci_of_invoke; }
2328 ciMethod* callee() { return _callee; }
2329 Value recv() { return _recv; }
2330 ciKlass* known_holder() { return _known_holder; }
2332 virtual void input_values_do(ValueVisitor* f) { if (_recv != NULL) f->visit(&_recv); }
2333 };
2336 // Call some C runtime function that doesn't safepoint,
2337 // optionally passing the current thread as the first argument.
2338 LEAF(RuntimeCall, Instruction)
2339 private:
2340 const char* _entry_name;
2341 address _entry;
2342 Values* _args;
2343 bool _pass_thread; // Pass the JavaThread* as an implicit first argument
2345 public:
2346 RuntimeCall(ValueType* type, const char* entry_name, address entry, Values* args, bool pass_thread = true)
2347 : Instruction(type)
2348 , _entry(entry)
2349 , _args(args)
2350 , _entry_name(entry_name)
2351 , _pass_thread(pass_thread) {
2352 ASSERT_VALUES
2353 pin();
2354 }
2356 const char* entry_name() const { return _entry_name; }
2357 address entry() const { return _entry; }
2358 int number_of_arguments() const { return _args->length(); }
2359 Value argument_at(int i) const { return _args->at(i); }
2360 bool pass_thread() const { return _pass_thread; }
2362 virtual void input_values_do(ValueVisitor* f) {
2363 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i));
2364 }
2365 };
2367 // Use to trip invocation counter of an inlined method
2369 LEAF(ProfileInvoke, Instruction)
2370 private:
2371 ciMethod* _inlinee;
2372 ValueStack* _state;
2374 public:
2375 ProfileInvoke(ciMethod* inlinee, ValueStack* state)
2376 : Instruction(voidType)
2377 , _inlinee(inlinee)
2378 , _state(state)
2379 {
2380 // The ProfileInvoke has side-effects and must occur precisely where located QQQ???
2381 pin();
2382 }
2384 ciMethod* inlinee() { return _inlinee; }
2385 ValueStack* state() { return _state; }
2386 virtual void input_values_do(ValueVisitor*) {}
2387 virtual void state_values_do(ValueVisitor*);
2388 };
2390 LEAF(MemBar, Instruction)
2391 private:
2392 LIR_Code _code;
2394 public:
2395 MemBar(LIR_Code code)
2396 : Instruction(voidType)
2397 , _code(code)
2398 {
2399 pin();
2400 }
2402 LIR_Code code() { return _code; }
2404 virtual void input_values_do(ValueVisitor*) {}
2405 };
2407 class BlockPair: public CompilationResourceObj {
2408 private:
2409 BlockBegin* _from;
2410 BlockBegin* _to;
2411 public:
2412 BlockPair(BlockBegin* from, BlockBegin* to): _from(from), _to(to) {}
2413 BlockBegin* from() const { return _from; }
2414 BlockBegin* to() const { return _to; }
2415 bool is_same(BlockBegin* from, BlockBegin* to) const { return _from == from && _to == to; }
2416 bool is_same(BlockPair* p) const { return _from == p->from() && _to == p->to(); }
2417 void set_to(BlockBegin* b) { _to = b; }
2418 void set_from(BlockBegin* b) { _from = b; }
2419 };
2422 define_array(BlockPairArray, BlockPair*)
2423 define_stack(BlockPairList, BlockPairArray)
2426 inline int BlockBegin::number_of_sux() const { assert(_end == NULL || _end->number_of_sux() == _successors.length(), "mismatch"); return _successors.length(); }
2427 inline BlockBegin* BlockBegin::sux_at(int i) const { assert(_end == NULL || _end->sux_at(i) == _successors.at(i), "mismatch"); return _successors.at(i); }
2428 inline void BlockBegin::add_successor(BlockBegin* sux) { assert(_end == NULL, "Would create mismatch with successors of BlockEnd"); _successors.append(sux); }
2430 #undef ASSERT_VALUES
2432 #endif // SHARE_VM_C1_C1_INSTRUCTION_HPP