1 /* 2 * Copyright (c) 1999, 2013, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_VM_C1_C1_INSTRUCTION_HPP 26 #define SHARE_VM_C1_C1_INSTRUCTION_HPP 27 28 #include "c1/c1_Compilation.hpp" 29 #include "c1/c1_LIR.hpp" 30 #include "c1/c1_ValueType.hpp" 31 #include "ci/ciField.hpp" 32 33 // Predefined classes 34 class ciField; 35 class ValueStack; 36 class InstructionPrinter; 37 class IRScope; 38 class LIR_OprDesc; 39 typedef LIR_OprDesc* LIR_Opr; 40 41 42 // Instruction class hierarchy 43 // 44 // All leaf classes in the class hierarchy are concrete classes 45 // (i.e., are instantiated). All other classes are abstract and 46 // serve factoring. 47 48 class Instruction; 49 class Phi; 50 class Local; 51 class Constant; 52 class AccessField; 53 class LoadField; 54 class StoreField; 55 class AccessArray; 56 class ArrayLength; 57 class AccessIndexed; 58 class LoadIndexed; 59 class StoreIndexed; 60 class NegateOp; 61 class Op2; 62 class ArithmeticOp; 63 class ShiftOp; 64 class LogicOp; 65 class CompareOp; 66 class IfOp; 67 class Convert; 68 class NullCheck; 69 class TypeCast; 70 class OsrEntry; 71 class ExceptionObject; 72 class StateSplit; 73 class Invoke; 74 class NewInstance; 75 class NewArray; 76 class NewTypeArray; 77 class NewObjectArray; 78 class NewMultiArray; 79 class TypeCheck; 80 class CheckCast; 81 class InstanceOf; 82 class AccessMonitor; 83 class MonitorEnter; 84 class MonitorExit; 85 class Intrinsic; 86 class BlockBegin; 87 class BlockEnd; 88 class Goto; 89 class If; 90 class IfInstanceOf; 91 class Switch; 92 class TableSwitch; 93 class LookupSwitch; 94 class Return; 95 class Throw; 96 class Base; 97 class RoundFP; 98 class UnsafeOp; 99 class UnsafeRawOp; 100 class UnsafeGetRaw; 101 class UnsafePutRaw; 102 class UnsafeObjectOp; 103 class UnsafeGetObject; 104 class UnsafePutObject; 105 class UnsafeGetAndSetObject; 106 class UnsafePrefetch; 107 class UnsafePrefetchRead; 108 class UnsafePrefetchWrite; 109 class ProfileCall; 110 class ProfileInvoke; 111 class RuntimeCall; 112 class MemBar; 113 class RangeCheckPredicate; 114 #ifdef ASSERT 115 class Assert; 116 #endif 117 118 // A Value is a reference to the instruction creating the value 119 typedef Instruction* Value; 120 define_array(ValueArray, Value) 121 define_stack(Values, ValueArray) 122 123 define_array(ValueStackArray, ValueStack*) 124 define_stack(ValueStackStack, ValueStackArray) 125 126 // BlockClosure is the base class for block traversal/iteration. 127 128 class BlockClosure: public CompilationResourceObj { 129 public: 130 virtual void block_do(BlockBegin* block) = 0; 131 }; 132 133 134 // A simple closure class for visiting the values of an Instruction 135 class ValueVisitor: public StackObj { 136 public: 137 virtual void visit(Value* v) = 0; 138 }; 139 140 141 // Some array and list classes 142 define_array(BlockBeginArray, BlockBegin*) 143 define_stack(_BlockList, BlockBeginArray) 144 145 class BlockList: public _BlockList { 146 public: 147 BlockList(): _BlockList() {} 148 BlockList(const int size): _BlockList(size) {} 149 BlockList(const int size, BlockBegin* init): _BlockList(size, init) {} 150 151 void iterate_forward(BlockClosure* closure); 152 void iterate_backward(BlockClosure* closure); 153 void blocks_do(void f(BlockBegin*)); 154 void values_do(ValueVisitor* f); 155 void print(bool cfg_only = false, bool live_only = false) PRODUCT_RETURN; 156 }; 157 158 159 // InstructionVisitors provide type-based dispatch for instructions. 160 // For each concrete Instruction class X, a virtual function do_X is 161 // provided. Functionality that needs to be implemented for all classes 162 // (e.g., printing, code generation) is factored out into a specialised 163 // visitor instead of added to the Instruction classes itself. 164 165 class InstructionVisitor: public StackObj { 166 public: 167 virtual void do_Phi (Phi* x) = 0; 168 virtual void do_Local (Local* x) = 0; 169 virtual void do_Constant (Constant* x) = 0; 170 virtual void do_LoadField (LoadField* x) = 0; 171 virtual void do_StoreField (StoreField* x) = 0; 172 virtual void do_ArrayLength (ArrayLength* x) = 0; 173 virtual void do_LoadIndexed (LoadIndexed* x) = 0; 174 virtual void do_StoreIndexed (StoreIndexed* x) = 0; 175 virtual void do_NegateOp (NegateOp* x) = 0; 176 virtual void do_ArithmeticOp (ArithmeticOp* x) = 0; 177 virtual void do_ShiftOp (ShiftOp* x) = 0; 178 virtual void do_LogicOp (LogicOp* x) = 0; 179 virtual void do_CompareOp (CompareOp* x) = 0; 180 virtual void do_IfOp (IfOp* x) = 0; 181 virtual void do_Convert (Convert* x) = 0; 182 virtual void do_NullCheck (NullCheck* x) = 0; 183 virtual void do_TypeCast (TypeCast* x) = 0; 184 virtual void do_Invoke (Invoke* x) = 0; 185 virtual void do_NewInstance (NewInstance* x) = 0; 186 virtual void do_NewTypeArray (NewTypeArray* x) = 0; 187 virtual void do_NewObjectArray (NewObjectArray* x) = 0; 188 virtual void do_NewMultiArray (NewMultiArray* x) = 0; 189 virtual void do_CheckCast (CheckCast* x) = 0; 190 virtual void do_InstanceOf (InstanceOf* x) = 0; 191 virtual void do_MonitorEnter (MonitorEnter* x) = 0; 192 virtual void do_MonitorExit (MonitorExit* x) = 0; 193 virtual void do_Intrinsic (Intrinsic* x) = 0; 194 virtual void do_BlockBegin (BlockBegin* x) = 0; 195 virtual void do_Goto (Goto* x) = 0; 196 virtual void do_If (If* x) = 0; 197 virtual void do_IfInstanceOf (IfInstanceOf* x) = 0; 198 virtual void do_TableSwitch (TableSwitch* x) = 0; 199 virtual void do_LookupSwitch (LookupSwitch* x) = 0; 200 virtual void do_Return (Return* x) = 0; 201 virtual void do_Throw (Throw* x) = 0; 202 virtual void do_Base (Base* x) = 0; 203 virtual void do_OsrEntry (OsrEntry* x) = 0; 204 virtual void do_ExceptionObject(ExceptionObject* x) = 0; 205 virtual void do_RoundFP (RoundFP* x) = 0; 206 virtual void do_UnsafeGetRaw (UnsafeGetRaw* x) = 0; 207 virtual void do_UnsafePutRaw (UnsafePutRaw* x) = 0; 208 virtual void do_UnsafeGetObject(UnsafeGetObject* x) = 0; 209 virtual void do_UnsafePutObject(UnsafePutObject* x) = 0; 210 virtual void do_UnsafeGetAndSetObject(UnsafeGetAndSetObject* x) = 0; 211 virtual void do_UnsafePrefetchRead (UnsafePrefetchRead* x) = 0; 212 virtual void do_UnsafePrefetchWrite(UnsafePrefetchWrite* x) = 0; 213 virtual void do_ProfileCall (ProfileCall* x) = 0; 214 virtual void do_ProfileInvoke (ProfileInvoke* x) = 0; 215 virtual void do_RuntimeCall (RuntimeCall* x) = 0; 216 virtual void do_MemBar (MemBar* x) = 0; 217 virtual void do_RangeCheckPredicate(RangeCheckPredicate* x) = 0; 218 #ifdef ASSERT 219 virtual void do_Assert (Assert* x) = 0; 220 #endif 221 }; 222 223 224 // Hashing support 225 // 226 // Note: This hash functions affect the performance 227 // of ValueMap - make changes carefully! 228 229 #define HASH1(x1 ) ((intx)(x1)) 230 #define HASH2(x1, x2 ) ((HASH1(x1 ) << 7) ^ HASH1(x2)) 231 #define HASH3(x1, x2, x3 ) ((HASH2(x1, x2 ) << 7) ^ HASH1(x3)) 232 #define HASH4(x1, x2, x3, x4) ((HASH3(x1, x2, x3) << 7) ^ HASH1(x4)) 233 234 235 // The following macros are used to implement instruction-specific hashing. 236 // By default, each instruction implements hash() and is_equal(Value), used 237 // for value numbering/common subexpression elimination. The default imple- 238 // mentation disables value numbering. Each instruction which can be value- 239 // numbered, should define corresponding hash() and is_equal(Value) functions 240 // via the macros below. The f arguments specify all the values/op codes, etc. 241 // that need to be identical for two instructions to be identical. 242 // 243 // Note: The default implementation of hash() returns 0 in order to indicate 244 // that the instruction should not be considered for value numbering. 245 // The currently used hash functions do not guarantee that never a 0 246 // is produced. While this is still correct, it may be a performance 247 // bug (no value numbering for that node). However, this situation is 248 // so unlikely, that we are not going to handle it specially. 249 250 #define HASHING1(class_name, enabled, f1) \ 251 virtual intx hash() const { \ 252 return (enabled) ? HASH2(name(), f1) : 0; \ 253 } \ 254 virtual bool is_equal(Value v) const { \ 255 if (!(enabled) ) return false; \ 256 class_name* _v = v->as_##class_name(); \ 257 if (_v == NULL ) return false; \ 258 if (f1 != _v->f1) return false; \ 259 return true; \ 260 } \ 261 262 263 #define HASHING2(class_name, enabled, f1, f2) \ 264 virtual intx hash() const { \ 265 return (enabled) ? HASH3(name(), f1, f2) : 0; \ 266 } \ 267 virtual bool is_equal(Value v) const { \ 268 if (!(enabled) ) return false; \ 269 class_name* _v = v->as_##class_name(); \ 270 if (_v == NULL ) return false; \ 271 if (f1 != _v->f1) return false; \ 272 if (f2 != _v->f2) return false; \ 273 return true; \ 274 } \ 275 276 277 #define HASHING3(class_name, enabled, f1, f2, f3) \ 278 virtual intx hash() const { \ 279 return (enabled) ? HASH4(name(), f1, f2, f3) : 0; \ 280 } \ 281 virtual bool is_equal(Value v) const { \ 282 if (!(enabled) ) return false; \ 283 class_name* _v = v->as_##class_name(); \ 284 if (_v == NULL ) return false; \ 285 if (f1 != _v->f1) return false; \ 286 if (f2 != _v->f2) return false; \ 287 if (f3 != _v->f3) return false; \ 288 return true; \ 289 } \ 290 291 292 // The mother of all instructions... 293 294 class Instruction: public CompilationResourceObj { 295 private: 296 int _id; // the unique instruction id 297 #ifndef PRODUCT 298 int _printable_bci; // the bci of the instruction for printing 299 #endif 300 int _use_count; // the number of instructions refering to this value (w/o prev/next); only roots can have use count = 0 or > 1 301 int _pin_state; // set of PinReason describing the reason for pinning 302 ValueType* _type; // the instruction value type 303 Instruction* _next; // the next instruction if any (NULL for BlockEnd instructions) 304 Instruction* _subst; // the substitution instruction if any 305 LIR_Opr _operand; // LIR specific information 306 unsigned int _flags; // Flag bits 307 308 ValueStack* _state_before; // Copy of state with input operands still on stack (or NULL) 309 ValueStack* _exception_state; // Copy of state for exception handling 310 XHandlers* _exception_handlers; // Flat list of exception handlers covering this instruction 311 312 friend class UseCountComputer; 313 friend class BlockBegin; 314 315 void update_exception_state(ValueStack* state); 316 317 protected: 318 BlockBegin* _block; // Block that contains this instruction 319 320 void set_type(ValueType* type) { 321 assert(type != NULL, "type must exist"); 322 _type = type; 323 } 324 325 public: 326 void* operator new(size_t size) throw() { 327 Compilation* c = Compilation::current(); 328 void* res = c->arena()->Amalloc(size); 329 ((Instruction*)res)->_id = c->get_next_id(); 330 return res; 331 } 332 333 static const int no_bci = -99; 334 335 enum InstructionFlag { 336 NeedsNullCheckFlag = 0, 337 CanTrapFlag, 338 DirectCompareFlag, 339 IsEliminatedFlag, 340 IsSafepointFlag, 341 IsStaticFlag, 342 IsStrictfpFlag, 343 NeedsStoreCheckFlag, 344 NeedsWriteBarrierFlag, 345 PreservesStateFlag, 346 TargetIsFinalFlag, 347 TargetIsLoadedFlag, 348 TargetIsStrictfpFlag, 349 UnorderedIsTrueFlag, 350 NeedsPatchingFlag, 351 ThrowIncompatibleClassChangeErrorFlag, 352 ProfileMDOFlag, 353 IsLinkedInBlockFlag, 354 NeedsRangeCheckFlag, 355 InWorkListFlag, 356 DeoptimizeOnException, 357 InstructionLastFlag 358 }; 359 360 public: 361 bool check_flag(InstructionFlag id) const { return (_flags & (1 << id)) != 0; } 362 void set_flag(InstructionFlag id, bool f) { _flags = f ? (_flags | (1 << id)) : (_flags & ~(1 << id)); }; 363 364 // 'globally' used condition values 365 enum Condition { 366 eql, neq, lss, leq, gtr, geq, aeq, beq 367 }; 368 369 // Instructions may be pinned for many reasons and under certain conditions 370 // with enough knowledge it's possible to safely unpin them. 371 enum PinReason { 372 PinUnknown = 1 << 0 373 , PinExplicitNullCheck = 1 << 3 374 , PinStackForStateSplit= 1 << 12 375 , PinStateSplitConstructor= 1 << 13 376 , PinGlobalValueNumbering= 1 << 14 377 }; 378 379 static Condition mirror(Condition cond); 380 static Condition negate(Condition cond); 381 382 // initialization 383 static int number_of_instructions() { 384 return Compilation::current()->number_of_instructions(); 385 } 386 387 // creation 388 Instruction(ValueType* type, ValueStack* state_before = NULL, bool type_is_constant = false) 389 : _use_count(0) 390 #ifndef PRODUCT 391 , _printable_bci(-99) 392 #endif 393 , _pin_state(0) 394 , _type(type) 395 , _next(NULL) 396 , _block(NULL) 397 , _subst(NULL) 398 , _flags(0) 399 , _operand(LIR_OprFact::illegalOpr) 400 , _state_before(state_before) 401 , _exception_handlers(NULL) 402 { 403 check_state(state_before); 404 assert(type != NULL && (!type->is_constant() || type_is_constant), "type must exist"); 405 update_exception_state(_state_before); 406 } 407 408 // accessors 409 int id() const { return _id; } 410 #ifndef PRODUCT 411 bool has_printable_bci() const { return _printable_bci != -99; } 412 int printable_bci() const { assert(has_printable_bci(), "_printable_bci should have been set"); return _printable_bci; } 413 void set_printable_bci(int bci) { _printable_bci = bci; } 414 #endif 415 int dominator_depth(); 416 int use_count() const { return _use_count; } 417 int pin_state() const { return _pin_state; } 418 bool is_pinned() const { return _pin_state != 0 || PinAllInstructions; } 419 ValueType* type() const { return _type; } 420 BlockBegin *block() const { return _block; } 421 Instruction* prev(); // use carefully, expensive operation 422 Instruction* next() const { return _next; } 423 bool has_subst() const { return _subst != NULL; } 424 Instruction* subst() { return _subst == NULL ? this : _subst->subst(); } 425 LIR_Opr operand() const { return _operand; } 426 427 void set_needs_null_check(bool f) { set_flag(NeedsNullCheckFlag, f); } 428 bool needs_null_check() const { return check_flag(NeedsNullCheckFlag); } 429 bool is_linked() const { return check_flag(IsLinkedInBlockFlag); } 430 bool can_be_linked() { return as_Local() == NULL && as_Phi() == NULL; } 431 432 bool has_uses() const { return use_count() > 0; } 433 ValueStack* state_before() const { return _state_before; } 434 ValueStack* exception_state() const { return _exception_state; } 435 virtual bool needs_exception_state() const { return true; } 436 XHandlers* exception_handlers() const { return _exception_handlers; } 437 438 // manipulation 439 void pin(PinReason reason) { _pin_state |= reason; } 440 void pin() { _pin_state |= PinUnknown; } 441 // DANGEROUS: only used by EliminateStores 442 void unpin(PinReason reason) { assert((reason & PinUnknown) == 0, "can't unpin unknown state"); _pin_state &= ~reason; } 443 444 Instruction* set_next(Instruction* next) { 445 assert(next->has_printable_bci(), "_printable_bci should have been set"); 446 assert(next != NULL, "must not be NULL"); 447 assert(as_BlockEnd() == NULL, "BlockEnd instructions must have no next"); 448 assert(next->can_be_linked(), "shouldn't link these instructions into list"); 449 450 BlockBegin *block = this->block(); 451 next->_block = block; 452 453 next->set_flag(Instruction::IsLinkedInBlockFlag, true); 454 _next = next; 455 return next; 456 } 457 458 Instruction* set_next(Instruction* next, int bci) { 459 #ifndef PRODUCT 460 next->set_printable_bci(bci); 461 #endif 462 return set_next(next); 463 } 464 465 // when blocks are merged 466 void fixup_block_pointers() { 467 Instruction *cur = next()->next(); // next()'s block is set in set_next 468 while (cur && cur->_block != block()) { 469 cur->_block = block(); 470 cur = cur->next(); 471 } 472 } 473 474 Instruction *insert_after(Instruction *i) { 475 Instruction* n = _next; 476 set_next(i); 477 i->set_next(n); 478 return _next; 479 } 480 481 Instruction *insert_after_same_bci(Instruction *i) { 482 #ifndef PRODUCT 483 i->set_printable_bci(printable_bci()); 484 #endif 485 return insert_after(i); 486 } 487 488 void set_subst(Instruction* subst) { 489 assert(subst == NULL || 490 type()->base() == subst->type()->base() || 491 subst->type()->base() == illegalType, "type can't change"); 492 _subst = subst; 493 } 494 void set_exception_handlers(XHandlers *xhandlers) { _exception_handlers = xhandlers; } 495 void set_exception_state(ValueStack* s) { check_state(s); _exception_state = s; } 496 void set_state_before(ValueStack* s) { check_state(s); _state_before = s; } 497 498 // machine-specifics 499 void set_operand(LIR_Opr operand) { assert(operand != LIR_OprFact::illegalOpr, "operand must exist"); _operand = operand; } 500 void clear_operand() { _operand = LIR_OprFact::illegalOpr; } 501 502 // generic 503 virtual Instruction* as_Instruction() { return this; } // to satisfy HASHING1 macro 504 virtual Phi* as_Phi() { return NULL; } 505 virtual Local* as_Local() { return NULL; } 506 virtual Constant* as_Constant() { return NULL; } 507 virtual AccessField* as_AccessField() { return NULL; } 508 virtual LoadField* as_LoadField() { return NULL; } 509 virtual StoreField* as_StoreField() { return NULL; } 510 virtual AccessArray* as_AccessArray() { return NULL; } 511 virtual ArrayLength* as_ArrayLength() { return NULL; } 512 virtual AccessIndexed* as_AccessIndexed() { return NULL; } 513 virtual LoadIndexed* as_LoadIndexed() { return NULL; } 514 virtual StoreIndexed* as_StoreIndexed() { return NULL; } 515 virtual NegateOp* as_NegateOp() { return NULL; } 516 virtual Op2* as_Op2() { return NULL; } 517 virtual ArithmeticOp* as_ArithmeticOp() { return NULL; } 518 virtual ShiftOp* as_ShiftOp() { return NULL; } 519 virtual LogicOp* as_LogicOp() { return NULL; } 520 virtual CompareOp* as_CompareOp() { return NULL; } 521 virtual IfOp* as_IfOp() { return NULL; } 522 virtual Convert* as_Convert() { return NULL; } 523 virtual NullCheck* as_NullCheck() { return NULL; } 524 virtual OsrEntry* as_OsrEntry() { return NULL; } 525 virtual StateSplit* as_StateSplit() { return NULL; } 526 virtual Invoke* as_Invoke() { return NULL; } 527 virtual NewInstance* as_NewInstance() { return NULL; } 528 virtual NewArray* as_NewArray() { return NULL; } 529 virtual NewTypeArray* as_NewTypeArray() { return NULL; } 530 virtual NewObjectArray* as_NewObjectArray() { return NULL; } 531 virtual NewMultiArray* as_NewMultiArray() { return NULL; } 532 virtual TypeCheck* as_TypeCheck() { return NULL; } 533 virtual CheckCast* as_CheckCast() { return NULL; } 534 virtual InstanceOf* as_InstanceOf() { return NULL; } 535 virtual TypeCast* as_TypeCast() { return NULL; } 536 virtual AccessMonitor* as_AccessMonitor() { return NULL; } 537 virtual MonitorEnter* as_MonitorEnter() { return NULL; } 538 virtual MonitorExit* as_MonitorExit() { return NULL; } 539 virtual Intrinsic* as_Intrinsic() { return NULL; } 540 virtual BlockBegin* as_BlockBegin() { return NULL; } 541 virtual BlockEnd* as_BlockEnd() { return NULL; } 542 virtual Goto* as_Goto() { return NULL; } 543 virtual If* as_If() { return NULL; } 544 virtual IfInstanceOf* as_IfInstanceOf() { return NULL; } 545 virtual TableSwitch* as_TableSwitch() { return NULL; } 546 virtual LookupSwitch* as_LookupSwitch() { return NULL; } 547 virtual Return* as_Return() { return NULL; } 548 virtual Throw* as_Throw() { return NULL; } 549 virtual Base* as_Base() { return NULL; } 550 virtual RoundFP* as_RoundFP() { return NULL; } 551 virtual ExceptionObject* as_ExceptionObject() { return NULL; } 552 virtual UnsafeOp* as_UnsafeOp() { return NULL; } 553 virtual ProfileInvoke* as_ProfileInvoke() { return NULL; } 554 virtual RangeCheckPredicate* as_RangeCheckPredicate() { return NULL; } 555 556 #ifdef ASSERT 557 virtual Assert* as_Assert() { return NULL; } 558 #endif 559 560 virtual void visit(InstructionVisitor* v) = 0; 561 562 virtual bool can_trap() const { return false; } 563 564 virtual void input_values_do(ValueVisitor* f) = 0; 565 virtual void state_values_do(ValueVisitor* f); 566 virtual void other_values_do(ValueVisitor* f) { /* usually no other - override on demand */ } 567 void values_do(ValueVisitor* f) { input_values_do(f); state_values_do(f); other_values_do(f); } 568 569 virtual ciType* exact_type() const { return NULL; } 570 virtual ciType* declared_type() const { return NULL; } 571 572 // hashing 573 virtual const char* name() const = 0; 574 HASHING1(Instruction, false, id()) // hashing disabled by default 575 576 // debugging 577 static void check_state(ValueStack* state) PRODUCT_RETURN; 578 void print() PRODUCT_RETURN; 579 void print_line() PRODUCT_RETURN; 580 void print(InstructionPrinter& ip) PRODUCT_RETURN; 581 }; 582 583 584 // The following macros are used to define base (i.e., non-leaf) 585 // and leaf instruction classes. They define class-name related 586 // generic functionality in one place. 587 588 #define BASE(class_name, super_class_name) \ 589 class class_name: public super_class_name { \ 590 public: \ 591 virtual class_name* as_##class_name() { return this; } \ 592 593 594 #define LEAF(class_name, super_class_name) \ 595 BASE(class_name, super_class_name) \ 596 public: \ 597 virtual const char* name() const { return #class_name; } \ 598 virtual void visit(InstructionVisitor* v) { v->do_##class_name(this); } \ 599 600 601 // Debugging support 602 603 604 #ifdef ASSERT 605 class AssertValues: public ValueVisitor { 606 void visit(Value* x) { assert((*x) != NULL, "value must exist"); } 607 }; 608 #define ASSERT_VALUES { AssertValues assert_value; values_do(&assert_value); } 609 #else 610 #define ASSERT_VALUES 611 #endif // ASSERT 612 613 614 // A Phi is a phi function in the sense of SSA form. It stands for 615 // the value of a local variable at the beginning of a join block. 616 // A Phi consists of n operands, one for every incoming branch. 617 618 LEAF(Phi, Instruction) 619 private: 620 int _pf_flags; // the flags of the phi function 621 int _index; // to value on operand stack (index < 0) or to local 622 public: 623 // creation 624 Phi(ValueType* type, BlockBegin* b, int index) 625 : Instruction(type->base()) 626 , _pf_flags(0) 627 , _index(index) 628 { 629 _block = b; 630 NOT_PRODUCT(set_printable_bci(Value(b)->printable_bci())); 631 if (type->is_illegal()) { 632 make_illegal(); 633 } 634 } 635 636 // flags 637 enum Flag { 638 no_flag = 0, 639 visited = 1 << 0, 640 cannot_simplify = 1 << 1 641 }; 642 643 // accessors 644 bool is_local() const { return _index >= 0; } 645 bool is_on_stack() const { return !is_local(); } 646 int local_index() const { assert(is_local(), ""); return _index; } 647 int stack_index() const { assert(is_on_stack(), ""); return -(_index+1); } 648 649 Value operand_at(int i) const; 650 int operand_count() const; 651 652 void set(Flag f) { _pf_flags |= f; } 653 void clear(Flag f) { _pf_flags &= ~f; } 654 bool is_set(Flag f) const { return (_pf_flags & f) != 0; } 655 656 // Invalidates phis corresponding to merges of locals of two different types 657 // (these should never be referenced, otherwise the bytecodes are illegal) 658 void make_illegal() { 659 set(cannot_simplify); 660 set_type(illegalType); 661 } 662 663 bool is_illegal() const { 664 return type()->is_illegal(); 665 } 666 667 // generic 668 virtual void input_values_do(ValueVisitor* f) { 669 } 670 }; 671 672 673 // A local is a placeholder for an incoming argument to a function call. 674 LEAF(Local, Instruction) 675 private: 676 int _java_index; // the local index within the method to which the local belongs 677 ciType* _declared_type; 678 public: 679 // creation 680 Local(ciType* declared, ValueType* type, int index) 681 : Instruction(type) 682 , _java_index(index) 683 , _declared_type(declared) 684 { 685 NOT_PRODUCT(set_printable_bci(-1)); 686 } 687 688 // accessors 689 int java_index() const { return _java_index; } 690 691 virtual ciType* declared_type() const { return _declared_type; } 692 virtual ciType* exact_type() const; 693 694 // generic 695 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 696 }; 697 698 699 LEAF(Constant, Instruction) 700 public: 701 // creation 702 Constant(ValueType* type): 703 Instruction(type, NULL, /*type_is_constant*/ true) 704 { 705 assert(type->is_constant(), "must be a constant"); 706 } 707 708 Constant(ValueType* type, ValueStack* state_before): 709 Instruction(type, state_before, /*type_is_constant*/ true) 710 { 711 assert(state_before != NULL, "only used for constants which need patching"); 712 assert(type->is_constant(), "must be a constant"); 713 // since it's patching it needs to be pinned 714 pin(); 715 } 716 717 // generic 718 virtual bool can_trap() const { return state_before() != NULL; } 719 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 720 721 virtual intx hash() const; 722 virtual bool is_equal(Value v) const; 723 724 virtual ciType* exact_type() const; 725 726 enum CompareResult { not_comparable = -1, cond_false, cond_true }; 727 728 virtual CompareResult compare(Instruction::Condition condition, Value right) const; 729 BlockBegin* compare(Instruction::Condition cond, Value right, 730 BlockBegin* true_sux, BlockBegin* false_sux) const { 731 switch (compare(cond, right)) { 732 case not_comparable: 733 return NULL; 734 case cond_false: 735 return false_sux; 736 case cond_true: 737 return true_sux; 738 default: 739 ShouldNotReachHere(); 740 return NULL; 741 } 742 } 743 }; 744 745 746 BASE(AccessField, Instruction) 747 private: 748 Value _obj; 749 int _offset; 750 ciField* _field; 751 NullCheck* _explicit_null_check; // For explicit null check elimination 752 753 public: 754 // creation 755 AccessField(Value obj, int offset, ciField* field, bool is_static, 756 ValueStack* state_before, bool needs_patching) 757 : Instruction(as_ValueType(field->type()->basic_type()), state_before) 758 , _obj(obj) 759 , _offset(offset) 760 , _field(field) 761 , _explicit_null_check(NULL) 762 { 763 set_needs_null_check(!is_static); 764 set_flag(IsStaticFlag, is_static); 765 set_flag(NeedsPatchingFlag, needs_patching); 766 ASSERT_VALUES 767 // pin of all instructions with memory access 768 pin(); 769 } 770 771 // accessors 772 Value obj() const { return _obj; } 773 int offset() const { return _offset; } 774 ciField* field() const { return _field; } 775 BasicType field_type() const { return _field->type()->basic_type(); } 776 bool is_static() const { return check_flag(IsStaticFlag); } 777 NullCheck* explicit_null_check() const { return _explicit_null_check; } 778 bool needs_patching() const { return check_flag(NeedsPatchingFlag); } 779 780 // Unresolved getstatic and putstatic can cause initialization. 781 // Technically it occurs at the Constant that materializes the base 782 // of the static fields but it's simpler to model it here. 783 bool is_init_point() const { return is_static() && (needs_patching() || !_field->holder()->is_initialized()); } 784 785 // manipulation 786 787 // Under certain circumstances, if a previous NullCheck instruction 788 // proved the target object non-null, we can eliminate the explicit 789 // null check and do an implicit one, simply specifying the debug 790 // information from the NullCheck. This field should only be consulted 791 // if needs_null_check() is true. 792 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 793 794 // generic 795 virtual bool can_trap() const { return needs_null_check() || needs_patching(); } 796 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 797 }; 798 799 800 LEAF(LoadField, AccessField) 801 public: 802 // creation 803 LoadField(Value obj, int offset, ciField* field, bool is_static, 804 ValueStack* state_before, bool needs_patching) 805 : AccessField(obj, offset, field, is_static, state_before, needs_patching) 806 {} 807 808 ciType* declared_type() const; 809 ciType* exact_type() const; 810 811 // generic 812 HASHING2(LoadField, !needs_patching() && !field()->is_volatile(), obj()->subst(), offset()) // cannot be eliminated if needs patching or if volatile 813 }; 814 815 816 LEAF(StoreField, AccessField) 817 private: 818 Value _value; 819 820 public: 821 // creation 822 StoreField(Value obj, int offset, ciField* field, Value value, bool is_static, 823 ValueStack* state_before, bool needs_patching) 824 : AccessField(obj, offset, field, is_static, state_before, needs_patching) 825 , _value(value) 826 { 827 set_flag(NeedsWriteBarrierFlag, as_ValueType(field_type())->is_object()); 828 ASSERT_VALUES 829 pin(); 830 } 831 832 // accessors 833 Value value() const { return _value; } 834 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); } 835 836 // generic 837 virtual void input_values_do(ValueVisitor* f) { AccessField::input_values_do(f); f->visit(&_value); } 838 }; 839 840 841 BASE(AccessArray, Instruction) 842 private: 843 Value _array; 844 845 public: 846 // creation 847 AccessArray(ValueType* type, Value array, ValueStack* state_before) 848 : Instruction(type, state_before) 849 , _array(array) 850 { 851 set_needs_null_check(true); 852 ASSERT_VALUES 853 pin(); // instruction with side effect (null exception or range check throwing) 854 } 855 856 Value array() const { return _array; } 857 858 // generic 859 virtual bool can_trap() const { return needs_null_check(); } 860 virtual void input_values_do(ValueVisitor* f) { f->visit(&_array); } 861 }; 862 863 864 LEAF(ArrayLength, AccessArray) 865 private: 866 NullCheck* _explicit_null_check; // For explicit null check elimination 867 868 public: 869 // creation 870 ArrayLength(Value array, ValueStack* state_before) 871 : AccessArray(intType, array, state_before) 872 , _explicit_null_check(NULL) {} 873 874 // accessors 875 NullCheck* explicit_null_check() const { return _explicit_null_check; } 876 877 // setters 878 // See LoadField::set_explicit_null_check for documentation 879 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 880 881 // generic 882 HASHING1(ArrayLength, true, array()->subst()) 883 }; 884 885 886 BASE(AccessIndexed, AccessArray) 887 private: 888 Value _index; 889 Value _length; 890 BasicType _elt_type; 891 892 public: 893 // creation 894 AccessIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before) 895 : AccessArray(as_ValueType(elt_type), array, state_before) 896 , _index(index) 897 , _length(length) 898 , _elt_type(elt_type) 899 { 900 set_flag(Instruction::NeedsRangeCheckFlag, true); 901 ASSERT_VALUES 902 } 903 904 // accessors 905 Value index() const { return _index; } 906 Value length() const { return _length; } 907 BasicType elt_type() const { return _elt_type; } 908 909 void clear_length() { _length = NULL; } 910 // perform elimination of range checks involving constants 911 bool compute_needs_range_check(); 912 913 // generic 914 virtual void input_values_do(ValueVisitor* f) { AccessArray::input_values_do(f); f->visit(&_index); if (_length != NULL) f->visit(&_length); } 915 }; 916 917 918 LEAF(LoadIndexed, AccessIndexed) 919 private: 920 NullCheck* _explicit_null_check; // For explicit null check elimination 921 922 public: 923 // creation 924 LoadIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before) 925 : AccessIndexed(array, index, length, elt_type, state_before) 926 , _explicit_null_check(NULL) {} 927 928 // accessors 929 NullCheck* explicit_null_check() const { return _explicit_null_check; } 930 931 // setters 932 // See LoadField::set_explicit_null_check for documentation 933 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 934 935 ciType* exact_type() const; 936 ciType* declared_type() const; 937 938 // generic 939 HASHING2(LoadIndexed, true, array()->subst(), index()->subst()) 940 }; 941 942 943 LEAF(StoreIndexed, AccessIndexed) 944 private: 945 Value _value; 946 947 ciMethod* _profiled_method; 948 int _profiled_bci; 949 public: 950 // creation 951 StoreIndexed(Value array, Value index, Value length, BasicType elt_type, Value value, ValueStack* state_before) 952 : AccessIndexed(array, index, length, elt_type, state_before) 953 , _value(value), _profiled_method(NULL), _profiled_bci(0) 954 { 955 set_flag(NeedsWriteBarrierFlag, (as_ValueType(elt_type)->is_object())); 956 set_flag(NeedsStoreCheckFlag, (as_ValueType(elt_type)->is_object())); 957 ASSERT_VALUES 958 pin(); 959 } 960 961 // accessors 962 Value value() const { return _value; } 963 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); } 964 bool needs_store_check() const { return check_flag(NeedsStoreCheckFlag); } 965 // Helpers for MethodData* profiling 966 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 967 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 968 void set_profiled_bci(int bci) { _profiled_bci = bci; } 969 bool should_profile() const { return check_flag(ProfileMDOFlag); } 970 ciMethod* profiled_method() const { return _profiled_method; } 971 int profiled_bci() const { return _profiled_bci; } 972 // generic 973 virtual void input_values_do(ValueVisitor* f) { AccessIndexed::input_values_do(f); f->visit(&_value); } 974 }; 975 976 977 LEAF(NegateOp, Instruction) 978 private: 979 Value _x; 980 981 public: 982 // creation 983 NegateOp(Value x) : Instruction(x->type()->base()), _x(x) { 984 ASSERT_VALUES 985 } 986 987 // accessors 988 Value x() const { return _x; } 989 990 // generic 991 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); } 992 }; 993 994 995 BASE(Op2, Instruction) 996 private: 997 Bytecodes::Code _op; 998 Value _x; 999 Value _y; 1000 1001 public: 1002 // creation 1003 Op2(ValueType* type, Bytecodes::Code op, Value x, Value y, ValueStack* state_before = NULL) 1004 : Instruction(type, state_before) 1005 , _op(op) 1006 , _x(x) 1007 , _y(y) 1008 { 1009 ASSERT_VALUES 1010 } 1011 1012 // accessors 1013 Bytecodes::Code op() const { return _op; } 1014 Value x() const { return _x; } 1015 Value y() const { return _y; } 1016 1017 // manipulators 1018 void swap_operands() { 1019 assert(is_commutative(), "operation must be commutative"); 1020 Value t = _x; _x = _y; _y = t; 1021 } 1022 1023 // generic 1024 virtual bool is_commutative() const { return false; } 1025 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); } 1026 }; 1027 1028 1029 LEAF(ArithmeticOp, Op2) 1030 public: 1031 // creation 1032 ArithmeticOp(Bytecodes::Code op, Value x, Value y, bool is_strictfp, ValueStack* state_before) 1033 : Op2(x->type()->meet(y->type()), op, x, y, state_before) 1034 { 1035 set_flag(IsStrictfpFlag, is_strictfp); 1036 if (can_trap()) pin(); 1037 } 1038 1039 // accessors 1040 bool is_strictfp() const { return check_flag(IsStrictfpFlag); } 1041 1042 // generic 1043 virtual bool is_commutative() const; 1044 virtual bool can_trap() const; 1045 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1046 }; 1047 1048 1049 LEAF(ShiftOp, Op2) 1050 public: 1051 // creation 1052 ShiftOp(Bytecodes::Code op, Value x, Value s) : Op2(x->type()->base(), op, x, s) {} 1053 1054 // generic 1055 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1056 }; 1057 1058 1059 LEAF(LogicOp, Op2) 1060 public: 1061 // creation 1062 LogicOp(Bytecodes::Code op, Value x, Value y) : Op2(x->type()->meet(y->type()), op, x, y) {} 1063 1064 // generic 1065 virtual bool is_commutative() const; 1066 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1067 }; 1068 1069 1070 LEAF(CompareOp, Op2) 1071 public: 1072 // creation 1073 CompareOp(Bytecodes::Code op, Value x, Value y, ValueStack* state_before) 1074 : Op2(intType, op, x, y, state_before) 1075 {} 1076 1077 // generic 1078 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1079 }; 1080 1081 1082 LEAF(IfOp, Op2) 1083 private: 1084 Value _tval; 1085 Value _fval; 1086 1087 public: 1088 // creation 1089 IfOp(Value x, Condition cond, Value y, Value tval, Value fval) 1090 : Op2(tval->type()->meet(fval->type()), (Bytecodes::Code)cond, x, y) 1091 , _tval(tval) 1092 , _fval(fval) 1093 { 1094 ASSERT_VALUES 1095 assert(tval->type()->tag() == fval->type()->tag(), "types must match"); 1096 } 1097 1098 // accessors 1099 virtual bool is_commutative() const; 1100 Bytecodes::Code op() const { ShouldNotCallThis(); return Bytecodes::_illegal; } 1101 Condition cond() const { return (Condition)Op2::op(); } 1102 Value tval() const { return _tval; } 1103 Value fval() const { return _fval; } 1104 1105 // generic 1106 virtual void input_values_do(ValueVisitor* f) { Op2::input_values_do(f); f->visit(&_tval); f->visit(&_fval); } 1107 }; 1108 1109 1110 LEAF(Convert, Instruction) 1111 private: 1112 Bytecodes::Code _op; 1113 Value _value; 1114 1115 public: 1116 // creation 1117 Convert(Bytecodes::Code op, Value value, ValueType* to_type) : Instruction(to_type), _op(op), _value(value) { 1118 ASSERT_VALUES 1119 } 1120 1121 // accessors 1122 Bytecodes::Code op() const { return _op; } 1123 Value value() const { return _value; } 1124 1125 // generic 1126 virtual void input_values_do(ValueVisitor* f) { f->visit(&_value); } 1127 HASHING2(Convert, true, op(), value()->subst()) 1128 }; 1129 1130 1131 LEAF(NullCheck, Instruction) 1132 private: 1133 Value _obj; 1134 1135 public: 1136 // creation 1137 NullCheck(Value obj, ValueStack* state_before) 1138 : Instruction(obj->type()->base(), state_before) 1139 , _obj(obj) 1140 { 1141 ASSERT_VALUES 1142 set_can_trap(true); 1143 assert(_obj->type()->is_object(), "null check must be applied to objects only"); 1144 pin(Instruction::PinExplicitNullCheck); 1145 } 1146 1147 // accessors 1148 Value obj() const { return _obj; } 1149 1150 // setters 1151 void set_can_trap(bool can_trap) { set_flag(CanTrapFlag, can_trap); } 1152 1153 // generic 1154 virtual bool can_trap() const { return check_flag(CanTrapFlag); /* null-check elimination sets to false */ } 1155 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 1156 HASHING1(NullCheck, true, obj()->subst()) 1157 }; 1158 1159 1160 // This node is supposed to cast the type of another node to a more precise 1161 // declared type. 1162 LEAF(TypeCast, Instruction) 1163 private: 1164 ciType* _declared_type; 1165 Value _obj; 1166 1167 public: 1168 // The type of this node is the same type as the object type (and it might be constant). 1169 TypeCast(ciType* type, Value obj, ValueStack* state_before) 1170 : Instruction(obj->type(), state_before, obj->type()->is_constant()), 1171 _declared_type(type), 1172 _obj(obj) {} 1173 1174 // accessors 1175 ciType* declared_type() const { return _declared_type; } 1176 Value obj() const { return _obj; } 1177 1178 // generic 1179 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 1180 }; 1181 1182 1183 BASE(StateSplit, Instruction) 1184 private: 1185 ValueStack* _state; 1186 1187 protected: 1188 static void substitute(BlockList& list, BlockBegin* old_block, BlockBegin* new_block); 1189 1190 public: 1191 // creation 1192 StateSplit(ValueType* type, ValueStack* state_before = NULL) 1193 : Instruction(type, state_before) 1194 , _state(NULL) 1195 { 1196 pin(PinStateSplitConstructor); 1197 } 1198 1199 // accessors 1200 ValueStack* state() const { return _state; } 1201 IRScope* scope() const; // the state's scope 1202 1203 // manipulation 1204 void set_state(ValueStack* state) { assert(_state == NULL, "overwriting existing state"); check_state(state); _state = state; } 1205 1206 // generic 1207 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 1208 virtual void state_values_do(ValueVisitor* f); 1209 }; 1210 1211 1212 LEAF(Invoke, StateSplit) 1213 private: 1214 Bytecodes::Code _code; 1215 Value _recv; 1216 Values* _args; 1217 BasicTypeList* _signature; 1218 int _vtable_index; 1219 ciMethod* _target; 1220 1221 public: 1222 // creation 1223 Invoke(Bytecodes::Code code, ValueType* result_type, Value recv, Values* args, 1224 int vtable_index, ciMethod* target, ValueStack* state_before); 1225 1226 // accessors 1227 Bytecodes::Code code() const { return _code; } 1228 Value receiver() const { return _recv; } 1229 bool has_receiver() const { return receiver() != NULL; } 1230 int number_of_arguments() const { return _args->length(); } 1231 Value argument_at(int i) const { return _args->at(i); } 1232 int vtable_index() const { return _vtable_index; } 1233 BasicTypeList* signature() const { return _signature; } 1234 ciMethod* target() const { return _target; } 1235 1236 ciType* declared_type() const; 1237 1238 // Returns false if target is not loaded 1239 bool target_is_final() const { return check_flag(TargetIsFinalFlag); } 1240 bool target_is_loaded() const { return check_flag(TargetIsLoadedFlag); } 1241 // Returns false if target is not loaded 1242 bool target_is_strictfp() const { return check_flag(TargetIsStrictfpFlag); } 1243 1244 // JSR 292 support 1245 bool is_invokedynamic() const { return code() == Bytecodes::_invokedynamic; } 1246 bool is_method_handle_intrinsic() const { return target()->is_method_handle_intrinsic(); } 1247 1248 virtual bool needs_exception_state() const { return false; } 1249 1250 // generic 1251 virtual bool can_trap() const { return true; } 1252 virtual void input_values_do(ValueVisitor* f) { 1253 StateSplit::input_values_do(f); 1254 if (has_receiver()) f->visit(&_recv); 1255 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 1256 } 1257 virtual void state_values_do(ValueVisitor *f); 1258 }; 1259 1260 1261 LEAF(NewInstance, StateSplit) 1262 private: 1263 ciInstanceKlass* _klass; 1264 1265 public: 1266 // creation 1267 NewInstance(ciInstanceKlass* klass, ValueStack* state_before) 1268 : StateSplit(instanceType, state_before) 1269 , _klass(klass) 1270 {} 1271 1272 // accessors 1273 ciInstanceKlass* klass() const { return _klass; } 1274 1275 virtual bool needs_exception_state() const { return false; } 1276 1277 // generic 1278 virtual bool can_trap() const { return true; } 1279 ciType* exact_type() const; 1280 ciType* declared_type() const; 1281 }; 1282 1283 1284 BASE(NewArray, StateSplit) 1285 private: 1286 Value _length; 1287 1288 public: 1289 // creation 1290 NewArray(Value length, ValueStack* state_before) 1291 : StateSplit(objectType, state_before) 1292 , _length(length) 1293 { 1294 // Do not ASSERT_VALUES since length is NULL for NewMultiArray 1295 } 1296 1297 // accessors 1298 Value length() const { return _length; } 1299 1300 virtual bool needs_exception_state() const { return false; } 1301 1302 ciType* declared_type() const; 1303 1304 // generic 1305 virtual bool can_trap() const { return true; } 1306 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_length); } 1307 }; 1308 1309 1310 LEAF(NewTypeArray, NewArray) 1311 private: 1312 BasicType _elt_type; 1313 1314 public: 1315 // creation 1316 NewTypeArray(Value length, BasicType elt_type, ValueStack* state_before) 1317 : NewArray(length, state_before) 1318 , _elt_type(elt_type) 1319 {} 1320 1321 // accessors 1322 BasicType elt_type() const { return _elt_type; } 1323 ciType* exact_type() const; 1324 }; 1325 1326 1327 LEAF(NewObjectArray, NewArray) 1328 private: 1329 ciKlass* _klass; 1330 1331 public: 1332 // creation 1333 NewObjectArray(ciKlass* klass, Value length, ValueStack* state_before) : NewArray(length, state_before), _klass(klass) {} 1334 1335 // accessors 1336 ciKlass* klass() const { return _klass; } 1337 ciType* exact_type() const; 1338 }; 1339 1340 1341 LEAF(NewMultiArray, NewArray) 1342 private: 1343 ciKlass* _klass; 1344 Values* _dims; 1345 1346 public: 1347 // creation 1348 NewMultiArray(ciKlass* klass, Values* dims, ValueStack* state_before) : NewArray(NULL, state_before), _klass(klass), _dims(dims) { 1349 ASSERT_VALUES 1350 } 1351 1352 // accessors 1353 ciKlass* klass() const { return _klass; } 1354 Values* dims() const { return _dims; } 1355 int rank() const { return dims()->length(); } 1356 1357 // generic 1358 virtual void input_values_do(ValueVisitor* f) { 1359 // NOTE: we do not call NewArray::input_values_do since "length" 1360 // is meaningless for a multi-dimensional array; passing the 1361 // zeroth element down to NewArray as its length is a bad idea 1362 // since there will be a copy in the "dims" array which doesn't 1363 // get updated, and the value must not be traversed twice. Was bug 1364 // - kbr 4/10/2001 1365 StateSplit::input_values_do(f); 1366 for (int i = 0; i < _dims->length(); i++) f->visit(_dims->adr_at(i)); 1367 } 1368 }; 1369 1370 1371 BASE(TypeCheck, StateSplit) 1372 private: 1373 ciKlass* _klass; 1374 Value _obj; 1375 1376 ciMethod* _profiled_method; 1377 int _profiled_bci; 1378 1379 public: 1380 // creation 1381 TypeCheck(ciKlass* klass, Value obj, ValueType* type, ValueStack* state_before) 1382 : StateSplit(type, state_before), _klass(klass), _obj(obj), 1383 _profiled_method(NULL), _profiled_bci(0) { 1384 ASSERT_VALUES 1385 set_direct_compare(false); 1386 } 1387 1388 // accessors 1389 ciKlass* klass() const { return _klass; } 1390 Value obj() const { return _obj; } 1391 bool is_loaded() const { return klass() != NULL; } 1392 bool direct_compare() const { return check_flag(DirectCompareFlag); } 1393 1394 // manipulation 1395 void set_direct_compare(bool flag) { set_flag(DirectCompareFlag, flag); } 1396 1397 // generic 1398 virtual bool can_trap() const { return true; } 1399 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } 1400 1401 // Helpers for MethodData* profiling 1402 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1403 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1404 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1405 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1406 ciMethod* profiled_method() const { return _profiled_method; } 1407 int profiled_bci() const { return _profiled_bci; } 1408 }; 1409 1410 1411 LEAF(CheckCast, TypeCheck) 1412 public: 1413 // creation 1414 CheckCast(ciKlass* klass, Value obj, ValueStack* state_before) 1415 : TypeCheck(klass, obj, objectType, state_before) {} 1416 1417 void set_incompatible_class_change_check() { 1418 set_flag(ThrowIncompatibleClassChangeErrorFlag, true); 1419 } 1420 bool is_incompatible_class_change_check() const { 1421 return check_flag(ThrowIncompatibleClassChangeErrorFlag); 1422 } 1423 1424 ciType* declared_type() const; 1425 ciType* exact_type() const; 1426 }; 1427 1428 1429 LEAF(InstanceOf, TypeCheck) 1430 public: 1431 // creation 1432 InstanceOf(ciKlass* klass, Value obj, ValueStack* state_before) : TypeCheck(klass, obj, intType, state_before) {} 1433 1434 virtual bool needs_exception_state() const { return false; } 1435 }; 1436 1437 1438 BASE(AccessMonitor, StateSplit) 1439 private: 1440 Value _obj; 1441 int _monitor_no; 1442 1443 public: 1444 // creation 1445 AccessMonitor(Value obj, int monitor_no, ValueStack* state_before = NULL) 1446 : StateSplit(illegalType, state_before) 1447 , _obj(obj) 1448 , _monitor_no(monitor_no) 1449 { 1450 set_needs_null_check(true); 1451 ASSERT_VALUES 1452 } 1453 1454 // accessors 1455 Value obj() const { return _obj; } 1456 int monitor_no() const { return _monitor_no; } 1457 1458 // generic 1459 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } 1460 }; 1461 1462 1463 LEAF(MonitorEnter, AccessMonitor) 1464 public: 1465 // creation 1466 MonitorEnter(Value obj, int monitor_no, ValueStack* state_before) 1467 : AccessMonitor(obj, monitor_no, state_before) 1468 { 1469 ASSERT_VALUES 1470 } 1471 1472 // generic 1473 virtual bool can_trap() const { return true; } 1474 }; 1475 1476 1477 LEAF(MonitorExit, AccessMonitor) 1478 public: 1479 // creation 1480 MonitorExit(Value obj, int monitor_no) 1481 : AccessMonitor(obj, monitor_no, NULL) 1482 { 1483 ASSERT_VALUES 1484 } 1485 }; 1486 1487 1488 LEAF(Intrinsic, StateSplit) 1489 private: 1490 vmIntrinsics::ID _id; 1491 Values* _args; 1492 Value _recv; 1493 int _nonnull_state; // mask identifying which args are nonnull 1494 1495 public: 1496 // preserves_state can be set to true for Intrinsics 1497 // which are guaranteed to preserve register state across any slow 1498 // cases; setting it to true does not mean that the Intrinsic can 1499 // not trap, only that if we continue execution in the same basic 1500 // block after the Intrinsic, all of the registers are intact. This 1501 // allows load elimination and common expression elimination to be 1502 // performed across the Intrinsic. The default value is false. 1503 Intrinsic(ValueType* type, 1504 vmIntrinsics::ID id, 1505 Values* args, 1506 bool has_receiver, 1507 ValueStack* state_before, 1508 bool preserves_state, 1509 bool cantrap = true) 1510 : StateSplit(type, state_before) 1511 , _id(id) 1512 , _args(args) 1513 , _recv(NULL) 1514 , _nonnull_state(AllBits) 1515 { 1516 assert(args != NULL, "args must exist"); 1517 ASSERT_VALUES 1518 set_flag(PreservesStateFlag, preserves_state); 1519 set_flag(CanTrapFlag, cantrap); 1520 if (has_receiver) { 1521 _recv = argument_at(0); 1522 } 1523 set_needs_null_check(has_receiver); 1524 1525 // some intrinsics can't trap, so don't force them to be pinned 1526 if (!can_trap()) { 1527 unpin(PinStateSplitConstructor); 1528 } 1529 } 1530 1531 // accessors 1532 vmIntrinsics::ID id() const { return _id; } 1533 int number_of_arguments() const { return _args->length(); } 1534 Value argument_at(int i) const { return _args->at(i); } 1535 1536 bool has_receiver() const { return (_recv != NULL); } 1537 Value receiver() const { assert(has_receiver(), "must have receiver"); return _recv; } 1538 bool preserves_state() const { return check_flag(PreservesStateFlag); } 1539 1540 bool arg_needs_null_check(int i) { 1541 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) { 1542 return is_set_nth_bit(_nonnull_state, i); 1543 } 1544 return true; 1545 } 1546 1547 void set_arg_needs_null_check(int i, bool check) { 1548 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) { 1549 if (check) { 1550 _nonnull_state |= nth_bit(i); 1551 } else { 1552 _nonnull_state &= ~(nth_bit(i)); 1553 } 1554 } 1555 } 1556 1557 // generic 1558 virtual bool can_trap() const { return check_flag(CanTrapFlag); } 1559 virtual void input_values_do(ValueVisitor* f) { 1560 StateSplit::input_values_do(f); 1561 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 1562 } 1563 }; 1564 1565 1566 class LIR_List; 1567 1568 LEAF(BlockBegin, StateSplit) 1569 private: 1570 int _block_id; // the unique block id 1571 int _bci; // start-bci of block 1572 int _depth_first_number; // number of this block in a depth-first ordering 1573 int _linear_scan_number; // number of this block in linear-scan ordering 1574 int _dominator_depth; 1575 int _loop_depth; // the loop nesting level of this block 1576 int _loop_index; // number of the innermost loop of this block 1577 int _flags; // the flags associated with this block 1578 1579 // fields used by BlockListBuilder 1580 int _total_preds; // number of predecessors found by BlockListBuilder 1581 BitMap _stores_to_locals; // bit is set when a local variable is stored in the block 1582 1583 // SSA specific fields: (factor out later) 1584 BlockList _successors; // the successors of this block 1585 BlockList _predecessors; // the predecessors of this block 1586 BlockList _dominates; // list of blocks that are dominated by this block 1587 BlockBegin* _dominator; // the dominator of this block 1588 // SSA specific ends 1589 BlockEnd* _end; // the last instruction of this block 1590 BlockList _exception_handlers; // the exception handlers potentially invoked by this block 1591 ValueStackStack* _exception_states; // only for xhandler entries: states of all instructions that have an edge to this xhandler 1592 int _exception_handler_pco; // if this block is the start of an exception handler, 1593 // this records the PC offset in the assembly code of the 1594 // first instruction in this block 1595 Label _label; // the label associated with this block 1596 LIR_List* _lir; // the low level intermediate representation for this block 1597 1598 BitMap _live_in; // set of live LIR_Opr registers at entry to this block 1599 BitMap _live_out; // set of live LIR_Opr registers at exit from this block 1600 BitMap _live_gen; // set of registers used before any redefinition in this block 1601 BitMap _live_kill; // set of registers defined in this block 1602 1603 BitMap _fpu_register_usage; 1604 intArray* _fpu_stack_state; // For x86 FPU code generation with UseLinearScan 1605 int _first_lir_instruction_id; // ID of first LIR instruction in this block 1606 int _last_lir_instruction_id; // ID of last LIR instruction in this block 1607 1608 void iterate_preorder (boolArray& mark, BlockClosure* closure); 1609 void iterate_postorder(boolArray& mark, BlockClosure* closure); 1610 1611 friend class SuxAndWeightAdjuster; 1612 1613 public: 1614 void* operator new(size_t size) throw() { 1615 Compilation* c = Compilation::current(); 1616 void* res = c->arena()->Amalloc(size); 1617 ((BlockBegin*)res)->_id = c->get_next_id(); 1618 ((BlockBegin*)res)->_block_id = c->get_next_block_id(); 1619 return res; 1620 } 1621 1622 // initialization/counting 1623 static int number_of_blocks() { 1624 return Compilation::current()->number_of_blocks(); 1625 } 1626 1627 // creation 1628 BlockBegin(int bci) 1629 : StateSplit(illegalType) 1630 , _bci(bci) 1631 , _depth_first_number(-1) 1632 , _linear_scan_number(-1) 1633 , _loop_depth(0) 1634 , _flags(0) 1635 , _dominator_depth(-1) 1636 , _dominator(NULL) 1637 , _end(NULL) 1638 , _predecessors(2) 1639 , _successors(2) 1640 , _dominates(2) 1641 , _exception_handlers(1) 1642 , _exception_states(NULL) 1643 , _exception_handler_pco(-1) 1644 , _lir(NULL) 1645 , _loop_index(-1) 1646 , _live_in() 1647 , _live_out() 1648 , _live_gen() 1649 , _live_kill() 1650 , _fpu_register_usage() 1651 , _fpu_stack_state(NULL) 1652 , _first_lir_instruction_id(-1) 1653 , _last_lir_instruction_id(-1) 1654 , _total_preds(0) 1655 , _stores_to_locals() 1656 { 1657 _block = this; 1658 #ifndef PRODUCT 1659 set_printable_bci(bci); 1660 #endif 1661 } 1662 1663 // accessors 1664 int block_id() const { return _block_id; } 1665 int bci() const { return _bci; } 1666 BlockList* successors() { return &_successors; } 1667 BlockList* dominates() { return &_dominates; } 1668 BlockBegin* dominator() const { return _dominator; } 1669 int loop_depth() const { return _loop_depth; } 1670 int dominator_depth() const { return _dominator_depth; } 1671 int depth_first_number() const { return _depth_first_number; } 1672 int linear_scan_number() const { return _linear_scan_number; } 1673 BlockEnd* end() const { return _end; } 1674 Label* label() { return &_label; } 1675 LIR_List* lir() const { return _lir; } 1676 int exception_handler_pco() const { return _exception_handler_pco; } 1677 BitMap& live_in() { return _live_in; } 1678 BitMap& live_out() { return _live_out; } 1679 BitMap& live_gen() { return _live_gen; } 1680 BitMap& live_kill() { return _live_kill; } 1681 BitMap& fpu_register_usage() { return _fpu_register_usage; } 1682 intArray* fpu_stack_state() const { return _fpu_stack_state; } 1683 int first_lir_instruction_id() const { return _first_lir_instruction_id; } 1684 int last_lir_instruction_id() const { return _last_lir_instruction_id; } 1685 int total_preds() const { return _total_preds; } 1686 BitMap& stores_to_locals() { return _stores_to_locals; } 1687 1688 // manipulation 1689 void set_dominator(BlockBegin* dom) { _dominator = dom; } 1690 void set_loop_depth(int d) { _loop_depth = d; } 1691 void set_dominator_depth(int d) { _dominator_depth = d; } 1692 void set_depth_first_number(int dfn) { _depth_first_number = dfn; } 1693 void set_linear_scan_number(int lsn) { _linear_scan_number = lsn; } 1694 void set_end(BlockEnd* end); 1695 void clear_end(); 1696 void disconnect_from_graph(); 1697 static void disconnect_edge(BlockBegin* from, BlockBegin* to); 1698 BlockBegin* insert_block_between(BlockBegin* sux); 1699 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux); 1700 void set_lir(LIR_List* lir) { _lir = lir; } 1701 void set_exception_handler_pco(int pco) { _exception_handler_pco = pco; } 1702 void set_live_in (BitMap map) { _live_in = map; } 1703 void set_live_out (BitMap map) { _live_out = map; } 1704 void set_live_gen (BitMap map) { _live_gen = map; } 1705 void set_live_kill (BitMap map) { _live_kill = map; } 1706 void set_fpu_register_usage(BitMap map) { _fpu_register_usage = map; } 1707 void set_fpu_stack_state(intArray* state) { _fpu_stack_state = state; } 1708 void set_first_lir_instruction_id(int id) { _first_lir_instruction_id = id; } 1709 void set_last_lir_instruction_id(int id) { _last_lir_instruction_id = id; } 1710 void increment_total_preds(int n = 1) { _total_preds += n; } 1711 void init_stores_to_locals(int locals_count) { _stores_to_locals = BitMap(locals_count); _stores_to_locals.clear(); } 1712 1713 // generic 1714 virtual void state_values_do(ValueVisitor* f); 1715 1716 // successors and predecessors 1717 int number_of_sux() const; 1718 BlockBegin* sux_at(int i) const; 1719 void add_successor(BlockBegin* sux); 1720 void remove_successor(BlockBegin* pred); 1721 bool is_successor(BlockBegin* sux) const { return _successors.contains(sux); } 1722 1723 void add_predecessor(BlockBegin* pred); 1724 void remove_predecessor(BlockBegin* pred); 1725 bool is_predecessor(BlockBegin* pred) const { return _predecessors.contains(pred); } 1726 int number_of_preds() const { return _predecessors.length(); } 1727 BlockBegin* pred_at(int i) const { return _predecessors[i]; } 1728 1729 // exception handlers potentially invoked by this block 1730 void add_exception_handler(BlockBegin* b); 1731 bool is_exception_handler(BlockBegin* b) const { return _exception_handlers.contains(b); } 1732 int number_of_exception_handlers() const { return _exception_handlers.length(); } 1733 BlockBegin* exception_handler_at(int i) const { return _exception_handlers.at(i); } 1734 1735 // states of the instructions that have an edge to this exception handler 1736 int number_of_exception_states() { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states == NULL ? 0 : _exception_states->length(); } 1737 ValueStack* exception_state_at(int idx) const { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states->at(idx); } 1738 int add_exception_state(ValueStack* state); 1739 1740 // flags 1741 enum Flag { 1742 no_flag = 0, 1743 std_entry_flag = 1 << 0, 1744 osr_entry_flag = 1 << 1, 1745 exception_entry_flag = 1 << 2, 1746 subroutine_entry_flag = 1 << 3, 1747 backward_branch_target_flag = 1 << 4, 1748 is_on_work_list_flag = 1 << 5, 1749 was_visited_flag = 1 << 6, 1750 parser_loop_header_flag = 1 << 7, // set by parser to identify blocks where phi functions can not be created on demand 1751 critical_edge_split_flag = 1 << 8, // set for all blocks that are introduced when critical edges are split 1752 linear_scan_loop_header_flag = 1 << 9, // set during loop-detection for LinearScan 1753 linear_scan_loop_end_flag = 1 << 10, // set during loop-detection for LinearScan 1754 donot_eliminate_range_checks = 1 << 11 // Should be try to eliminate range checks in this block 1755 }; 1756 1757 void set(Flag f) { _flags |= f; } 1758 void clear(Flag f) { _flags &= ~f; } 1759 bool is_set(Flag f) const { return (_flags & f) != 0; } 1760 bool is_entry_block() const { 1761 const int entry_mask = std_entry_flag | osr_entry_flag | exception_entry_flag; 1762 return (_flags & entry_mask) != 0; 1763 } 1764 1765 // iteration 1766 void iterate_preorder (BlockClosure* closure); 1767 void iterate_postorder (BlockClosure* closure); 1768 1769 void block_values_do(ValueVisitor* f); 1770 1771 // loops 1772 void set_loop_index(int ix) { _loop_index = ix; } 1773 int loop_index() const { return _loop_index; } 1774 1775 // merging 1776 bool try_merge(ValueStack* state); // try to merge states at block begin 1777 void merge(ValueStack* state) { bool b = try_merge(state); assert(b, "merge failed"); } 1778 1779 // debugging 1780 void print_block() PRODUCT_RETURN; 1781 void print_block(InstructionPrinter& ip, bool live_only = false) PRODUCT_RETURN; 1782 }; 1783 1784 1785 BASE(BlockEnd, StateSplit) 1786 private: 1787 BlockList* _sux; 1788 1789 protected: 1790 BlockList* sux() const { return _sux; } 1791 1792 void set_sux(BlockList* sux) { 1793 #ifdef ASSERT 1794 assert(sux != NULL, "sux must exist"); 1795 for (int i = sux->length() - 1; i >= 0; i--) assert(sux->at(i) != NULL, "sux must exist"); 1796 #endif 1797 _sux = sux; 1798 } 1799 1800 public: 1801 // creation 1802 BlockEnd(ValueType* type, ValueStack* state_before, bool is_safepoint) 1803 : StateSplit(type, state_before) 1804 , _sux(NULL) 1805 { 1806 set_flag(IsSafepointFlag, is_safepoint); 1807 } 1808 1809 // accessors 1810 bool is_safepoint() const { return check_flag(IsSafepointFlag); } 1811 // For compatibility with old code, for new code use block() 1812 BlockBegin* begin() const { return _block; } 1813 1814 // manipulation 1815 void set_begin(BlockBegin* begin); 1816 1817 // successors 1818 int number_of_sux() const { return _sux != NULL ? _sux->length() : 0; } 1819 BlockBegin* sux_at(int i) const { return _sux->at(i); } 1820 BlockBegin* default_sux() const { return sux_at(number_of_sux() - 1); } 1821 BlockBegin** addr_sux_at(int i) const { return _sux->adr_at(i); } 1822 int sux_index(BlockBegin* sux) const { return _sux->find(sux); } 1823 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux); 1824 }; 1825 1826 1827 LEAF(Goto, BlockEnd) 1828 public: 1829 enum Direction { 1830 none, // Just a regular goto 1831 taken, not_taken // Goto produced from If 1832 }; 1833 private: 1834 ciMethod* _profiled_method; 1835 int _profiled_bci; 1836 Direction _direction; 1837 public: 1838 // creation 1839 Goto(BlockBegin* sux, ValueStack* state_before, bool is_safepoint = false) 1840 : BlockEnd(illegalType, state_before, is_safepoint) 1841 , _direction(none) 1842 , _profiled_method(NULL) 1843 , _profiled_bci(0) { 1844 BlockList* s = new BlockList(1); 1845 s->append(sux); 1846 set_sux(s); 1847 } 1848 1849 Goto(BlockBegin* sux, bool is_safepoint) : BlockEnd(illegalType, NULL, is_safepoint) 1850 , _direction(none) 1851 , _profiled_method(NULL) 1852 , _profiled_bci(0) { 1853 BlockList* s = new BlockList(1); 1854 s->append(sux); 1855 set_sux(s); 1856 } 1857 1858 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1859 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches 1860 int profiled_bci() const { return _profiled_bci; } 1861 Direction direction() const { return _direction; } 1862 1863 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1864 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1865 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1866 void set_direction(Direction d) { _direction = d; } 1867 }; 1868 1869 #ifdef ASSERT 1870 LEAF(Assert, Instruction) 1871 private: 1872 Value _x; 1873 Condition _cond; 1874 Value _y; 1875 char *_message; 1876 1877 public: 1878 // creation 1879 // unordered_is_true is valid for float/double compares only 1880 Assert(Value x, Condition cond, bool unordered_is_true, Value y); 1881 1882 // accessors 1883 Value x() const { return _x; } 1884 Condition cond() const { return _cond; } 1885 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 1886 Value y() const { return _y; } 1887 const char *message() const { return _message; } 1888 1889 // generic 1890 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); } 1891 }; 1892 #endif 1893 1894 LEAF(RangeCheckPredicate, StateSplit) 1895 private: 1896 Value _x; 1897 Condition _cond; 1898 Value _y; 1899 1900 void check_state(); 1901 1902 public: 1903 // creation 1904 // unordered_is_true is valid for float/double compares only 1905 RangeCheckPredicate(Value x, Condition cond, bool unordered_is_true, Value y, ValueStack* state) : StateSplit(illegalType) 1906 , _x(x) 1907 , _cond(cond) 1908 , _y(y) 1909 { 1910 ASSERT_VALUES 1911 set_flag(UnorderedIsTrueFlag, unordered_is_true); 1912 assert(x->type()->tag() == y->type()->tag(), "types must match"); 1913 this->set_state(state); 1914 check_state(); 1915 } 1916 1917 // Always deoptimize 1918 RangeCheckPredicate(ValueStack* state) : StateSplit(illegalType) 1919 { 1920 this->set_state(state); 1921 _x = _y = NULL; 1922 check_state(); 1923 } 1924 1925 // accessors 1926 Value x() const { return _x; } 1927 Condition cond() const { return _cond; } 1928 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 1929 Value y() const { return _y; } 1930 1931 void always_fail() { _x = _y = NULL; } 1932 1933 // generic 1934 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_x); f->visit(&_y); } 1935 HASHING3(RangeCheckPredicate, true, x()->subst(), y()->subst(), cond()) 1936 }; 1937 1938 LEAF(If, BlockEnd) 1939 private: 1940 Value _x; 1941 Condition _cond; 1942 Value _y; 1943 ciMethod* _profiled_method; 1944 int _profiled_bci; // Canonicalizer may alter bci of If node 1945 bool _swapped; // Is the order reversed with respect to the original If in the 1946 // bytecode stream? 1947 public: 1948 // creation 1949 // unordered_is_true is valid for float/double compares only 1950 If(Value x, Condition cond, bool unordered_is_true, Value y, BlockBegin* tsux, BlockBegin* fsux, ValueStack* state_before, bool is_safepoint) 1951 : BlockEnd(illegalType, state_before, is_safepoint) 1952 , _x(x) 1953 , _cond(cond) 1954 , _y(y) 1955 , _profiled_method(NULL) 1956 , _profiled_bci(0) 1957 , _swapped(false) 1958 { 1959 ASSERT_VALUES 1960 set_flag(UnorderedIsTrueFlag, unordered_is_true); 1961 assert(x->type()->tag() == y->type()->tag(), "types must match"); 1962 BlockList* s = new BlockList(2); 1963 s->append(tsux); 1964 s->append(fsux); 1965 set_sux(s); 1966 } 1967 1968 // accessors 1969 Value x() const { return _x; } 1970 Condition cond() const { return _cond; } 1971 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 1972 Value y() const { return _y; } 1973 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); } 1974 BlockBegin* tsux() const { return sux_for(true); } 1975 BlockBegin* fsux() const { return sux_for(false); } 1976 BlockBegin* usux() const { return sux_for(unordered_is_true()); } 1977 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1978 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches 1979 int profiled_bci() const { return _profiled_bci; } // set for profiled branches and tiered 1980 bool is_swapped() const { return _swapped; } 1981 1982 // manipulation 1983 void swap_operands() { 1984 Value t = _x; _x = _y; _y = t; 1985 _cond = mirror(_cond); 1986 } 1987 1988 void swap_sux() { 1989 assert(number_of_sux() == 2, "wrong number of successors"); 1990 BlockList* s = sux(); 1991 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t); 1992 _cond = negate(_cond); 1993 set_flag(UnorderedIsTrueFlag, !check_flag(UnorderedIsTrueFlag)); 1994 } 1995 1996 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1997 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1998 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1999 void set_swapped(bool value) { _swapped = value; } 2000 // generic 2001 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_x); f->visit(&_y); } 2002 }; 2003 2004 2005 LEAF(IfInstanceOf, BlockEnd) 2006 private: 2007 ciKlass* _klass; 2008 Value _obj; 2009 bool _test_is_instance; // jump if instance 2010 int _instanceof_bci; 2011 2012 public: 2013 IfInstanceOf(ciKlass* klass, Value obj, bool test_is_instance, int instanceof_bci, BlockBegin* tsux, BlockBegin* fsux) 2014 : BlockEnd(illegalType, NULL, false) // temporary set to false 2015 , _klass(klass) 2016 , _obj(obj) 2017 , _test_is_instance(test_is_instance) 2018 , _instanceof_bci(instanceof_bci) 2019 { 2020 ASSERT_VALUES 2021 assert(instanceof_bci >= 0, "illegal bci"); 2022 BlockList* s = new BlockList(2); 2023 s->append(tsux); 2024 s->append(fsux); 2025 set_sux(s); 2026 } 2027 2028 // accessors 2029 // 2030 // Note 1: If test_is_instance() is true, IfInstanceOf tests if obj *is* an 2031 // instance of klass; otherwise it tests if it is *not* and instance 2032 // of klass. 2033 // 2034 // Note 2: IfInstanceOf instructions are created by combining an InstanceOf 2035 // and an If instruction. The IfInstanceOf bci() corresponds to the 2036 // bci that the If would have had; the (this->) instanceof_bci() is 2037 // the bci of the original InstanceOf instruction. 2038 ciKlass* klass() const { return _klass; } 2039 Value obj() const { return _obj; } 2040 int instanceof_bci() const { return _instanceof_bci; } 2041 bool test_is_instance() const { return _test_is_instance; } 2042 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); } 2043 BlockBegin* tsux() const { return sux_for(true); } 2044 BlockBegin* fsux() const { return sux_for(false); } 2045 2046 // manipulation 2047 void swap_sux() { 2048 assert(number_of_sux() == 2, "wrong number of successors"); 2049 BlockList* s = sux(); 2050 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t); 2051 _test_is_instance = !_test_is_instance; 2052 } 2053 2054 // generic 2055 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_obj); } 2056 }; 2057 2058 2059 BASE(Switch, BlockEnd) 2060 private: 2061 Value _tag; 2062 2063 public: 2064 // creation 2065 Switch(Value tag, BlockList* sux, ValueStack* state_before, bool is_safepoint) 2066 : BlockEnd(illegalType, state_before, is_safepoint) 2067 , _tag(tag) { 2068 ASSERT_VALUES 2069 set_sux(sux); 2070 } 2071 2072 // accessors 2073 Value tag() const { return _tag; } 2074 int length() const { return number_of_sux() - 1; } 2075 2076 virtual bool needs_exception_state() const { return false; } 2077 2078 // generic 2079 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_tag); } 2080 }; 2081 2082 2083 LEAF(TableSwitch, Switch) 2084 private: 2085 int _lo_key; 2086 2087 public: 2088 // creation 2089 TableSwitch(Value tag, BlockList* sux, int lo_key, ValueStack* state_before, bool is_safepoint) 2090 : Switch(tag, sux, state_before, is_safepoint) 2091 , _lo_key(lo_key) {} 2092 2093 // accessors 2094 int lo_key() const { return _lo_key; } 2095 int hi_key() const { return _lo_key + length() - 1; } 2096 }; 2097 2098 2099 LEAF(LookupSwitch, Switch) 2100 private: 2101 intArray* _keys; 2102 2103 public: 2104 // creation 2105 LookupSwitch(Value tag, BlockList* sux, intArray* keys, ValueStack* state_before, bool is_safepoint) 2106 : Switch(tag, sux, state_before, is_safepoint) 2107 , _keys(keys) { 2108 assert(keys != NULL, "keys must exist"); 2109 assert(keys->length() == length(), "sux & keys have incompatible lengths"); 2110 } 2111 2112 // accessors 2113 int key_at(int i) const { return _keys->at(i); } 2114 }; 2115 2116 2117 LEAF(Return, BlockEnd) 2118 private: 2119 Value _result; 2120 2121 public: 2122 // creation 2123 Return(Value result) : 2124 BlockEnd(result == NULL ? voidType : result->type()->base(), NULL, true), 2125 _result(result) {} 2126 2127 // accessors 2128 Value result() const { return _result; } 2129 bool has_result() const { return result() != NULL; } 2130 2131 // generic 2132 virtual void input_values_do(ValueVisitor* f) { 2133 BlockEnd::input_values_do(f); 2134 if (has_result()) f->visit(&_result); 2135 } 2136 }; 2137 2138 2139 LEAF(Throw, BlockEnd) 2140 private: 2141 Value _exception; 2142 2143 public: 2144 // creation 2145 Throw(Value exception, ValueStack* state_before) : BlockEnd(illegalType, state_before, true), _exception(exception) { 2146 ASSERT_VALUES 2147 } 2148 2149 // accessors 2150 Value exception() const { return _exception; } 2151 2152 // generic 2153 virtual bool can_trap() const { return true; } 2154 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_exception); } 2155 }; 2156 2157 2158 LEAF(Base, BlockEnd) 2159 public: 2160 // creation 2161 Base(BlockBegin* std_entry, BlockBegin* osr_entry) : BlockEnd(illegalType, NULL, false) { 2162 assert(std_entry->is_set(BlockBegin::std_entry_flag), "std entry must be flagged"); 2163 assert(osr_entry == NULL || osr_entry->is_set(BlockBegin::osr_entry_flag), "osr entry must be flagged"); 2164 BlockList* s = new BlockList(2); 2165 if (osr_entry != NULL) s->append(osr_entry); 2166 s->append(std_entry); // must be default sux! 2167 set_sux(s); 2168 } 2169 2170 // accessors 2171 BlockBegin* std_entry() const { return default_sux(); } 2172 BlockBegin* osr_entry() const { return number_of_sux() < 2 ? NULL : sux_at(0); } 2173 }; 2174 2175 2176 LEAF(OsrEntry, Instruction) 2177 public: 2178 // creation 2179 #ifdef _LP64 2180 OsrEntry() : Instruction(longType) { pin(); } 2181 #else 2182 OsrEntry() : Instruction(intType) { pin(); } 2183 #endif 2184 2185 // generic 2186 virtual void input_values_do(ValueVisitor* f) { } 2187 }; 2188 2189 2190 // Models the incoming exception at a catch site 2191 LEAF(ExceptionObject, Instruction) 2192 public: 2193 // creation 2194 ExceptionObject() : Instruction(objectType) { 2195 pin(); 2196 } 2197 2198 // generic 2199 virtual void input_values_do(ValueVisitor* f) { } 2200 }; 2201 2202 2203 // Models needed rounding for floating-point values on Intel. 2204 // Currently only used to represent rounding of double-precision 2205 // values stored into local variables, but could be used to model 2206 // intermediate rounding of single-precision values as well. 2207 LEAF(RoundFP, Instruction) 2208 private: 2209 Value _input; // floating-point value to be rounded 2210 2211 public: 2212 RoundFP(Value input) 2213 : Instruction(input->type()) // Note: should not be used for constants 2214 , _input(input) 2215 { 2216 ASSERT_VALUES 2217 } 2218 2219 // accessors 2220 Value input() const { return _input; } 2221 2222 // generic 2223 virtual void input_values_do(ValueVisitor* f) { f->visit(&_input); } 2224 }; 2225 2226 2227 BASE(UnsafeOp, Instruction) 2228 private: 2229 BasicType _basic_type; // ValueType can not express byte-sized integers 2230 2231 protected: 2232 // creation 2233 UnsafeOp(BasicType basic_type, bool is_put) 2234 : Instruction(is_put ? voidType : as_ValueType(basic_type)) 2235 , _basic_type(basic_type) 2236 { 2237 //Note: Unsafe ops are not not guaranteed to throw NPE. 2238 // Convservatively, Unsafe operations must be pinned though we could be 2239 // looser about this if we wanted to.. 2240 pin(); 2241 } 2242 2243 public: 2244 // accessors 2245 BasicType basic_type() { return _basic_type; } 2246 2247 // generic 2248 virtual void input_values_do(ValueVisitor* f) { } 2249 }; 2250 2251 2252 BASE(UnsafeRawOp, UnsafeOp) 2253 private: 2254 Value _base; // Base address (a Java long) 2255 Value _index; // Index if computed by optimizer; initialized to NULL 2256 int _log2_scale; // Scale factor: 0, 1, 2, or 3. 2257 // Indicates log2 of number of bytes (1, 2, 4, or 8) 2258 // to scale index by. 2259 2260 protected: 2261 UnsafeRawOp(BasicType basic_type, Value addr, bool is_put) 2262 : UnsafeOp(basic_type, is_put) 2263 , _base(addr) 2264 , _index(NULL) 2265 , _log2_scale(0) 2266 { 2267 // Can not use ASSERT_VALUES because index may be NULL 2268 assert(addr != NULL && addr->type()->is_long(), "just checking"); 2269 } 2270 2271 UnsafeRawOp(BasicType basic_type, Value base, Value index, int log2_scale, bool is_put) 2272 : UnsafeOp(basic_type, is_put) 2273 , _base(base) 2274 , _index(index) 2275 , _log2_scale(log2_scale) 2276 { 2277 } 2278 2279 public: 2280 // accessors 2281 Value base() { return _base; } 2282 Value index() { return _index; } 2283 bool has_index() { return (_index != NULL); } 2284 int log2_scale() { return _log2_scale; } 2285 2286 // setters 2287 void set_base (Value base) { _base = base; } 2288 void set_index(Value index) { _index = index; } 2289 void set_log2_scale(int log2_scale) { _log2_scale = log2_scale; } 2290 2291 // generic 2292 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f); 2293 f->visit(&_base); 2294 if (has_index()) f->visit(&_index); } 2295 }; 2296 2297 2298 LEAF(UnsafeGetRaw, UnsafeRawOp) 2299 private: 2300 bool _may_be_unaligned, _is_wide; // For OSREntry 2301 2302 public: 2303 UnsafeGetRaw(BasicType basic_type, Value addr, bool may_be_unaligned, bool is_wide = false) 2304 : UnsafeRawOp(basic_type, addr, false) { 2305 _may_be_unaligned = may_be_unaligned; 2306 _is_wide = is_wide; 2307 } 2308 2309 UnsafeGetRaw(BasicType basic_type, Value base, Value index, int log2_scale, bool may_be_unaligned, bool is_wide = false) 2310 : UnsafeRawOp(basic_type, base, index, log2_scale, false) { 2311 _may_be_unaligned = may_be_unaligned; 2312 _is_wide = is_wide; 2313 } 2314 2315 bool may_be_unaligned() { return _may_be_unaligned; } 2316 bool is_wide() { return _is_wide; } 2317 }; 2318 2319 2320 LEAF(UnsafePutRaw, UnsafeRawOp) 2321 private: 2322 Value _value; // Value to be stored 2323 2324 public: 2325 UnsafePutRaw(BasicType basic_type, Value addr, Value value) 2326 : UnsafeRawOp(basic_type, addr, true) 2327 , _value(value) 2328 { 2329 assert(value != NULL, "just checking"); 2330 ASSERT_VALUES 2331 } 2332 2333 UnsafePutRaw(BasicType basic_type, Value base, Value index, int log2_scale, Value value) 2334 : UnsafeRawOp(basic_type, base, index, log2_scale, true) 2335 , _value(value) 2336 { 2337 assert(value != NULL, "just checking"); 2338 ASSERT_VALUES 2339 } 2340 2341 // accessors 2342 Value value() { return _value; } 2343 2344 // generic 2345 virtual void input_values_do(ValueVisitor* f) { UnsafeRawOp::input_values_do(f); 2346 f->visit(&_value); } 2347 }; 2348 2349 2350 BASE(UnsafeObjectOp, UnsafeOp) 2351 private: 2352 Value _object; // Object to be fetched from or mutated 2353 Value _offset; // Offset within object 2354 bool _is_volatile; // true if volatile - dl/JSR166 2355 public: 2356 UnsafeObjectOp(BasicType basic_type, Value object, Value offset, bool is_put, bool is_volatile) 2357 : UnsafeOp(basic_type, is_put), _object(object), _offset(offset), _is_volatile(is_volatile) 2358 { 2359 } 2360 2361 // accessors 2362 Value object() { return _object; } 2363 Value offset() { return _offset; } 2364 bool is_volatile() { return _is_volatile; } 2365 // generic 2366 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f); 2367 f->visit(&_object); 2368 f->visit(&_offset); } 2369 }; 2370 2371 2372 LEAF(UnsafeGetObject, UnsafeObjectOp) 2373 public: 2374 UnsafeGetObject(BasicType basic_type, Value object, Value offset, bool is_volatile) 2375 : UnsafeObjectOp(basic_type, object, offset, false, is_volatile) 2376 { 2377 ASSERT_VALUES 2378 } 2379 }; 2380 2381 2382 LEAF(UnsafePutObject, UnsafeObjectOp) 2383 private: 2384 Value _value; // Value to be stored 2385 public: 2386 UnsafePutObject(BasicType basic_type, Value object, Value offset, Value value, bool is_volatile) 2387 : UnsafeObjectOp(basic_type, object, offset, true, is_volatile) 2388 , _value(value) 2389 { 2390 ASSERT_VALUES 2391 } 2392 2393 // accessors 2394 Value value() { return _value; } 2395 2396 // generic 2397 virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f); 2398 f->visit(&_value); } 2399 }; 2400 2401 LEAF(UnsafeGetAndSetObject, UnsafeObjectOp) 2402 private: 2403 Value _value; // Value to be stored 2404 bool _is_add; 2405 public: 2406 UnsafeGetAndSetObject(BasicType basic_type, Value object, Value offset, Value value, bool is_add) 2407 : UnsafeObjectOp(basic_type, object, offset, false, false) 2408 , _value(value) 2409 , _is_add(is_add) 2410 { 2411 ASSERT_VALUES 2412 } 2413 2414 // accessors 2415 bool is_add() const { return _is_add; } 2416 Value value() { return _value; } 2417 2418 // generic 2419 virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f); 2420 f->visit(&_value); } 2421 }; 2422 2423 BASE(UnsafePrefetch, UnsafeObjectOp) 2424 public: 2425 UnsafePrefetch(Value object, Value offset) 2426 : UnsafeObjectOp(T_VOID, object, offset, false, false) 2427 { 2428 } 2429 }; 2430 2431 2432 LEAF(UnsafePrefetchRead, UnsafePrefetch) 2433 public: 2434 UnsafePrefetchRead(Value object, Value offset) 2435 : UnsafePrefetch(object, offset) 2436 { 2437 ASSERT_VALUES 2438 } 2439 }; 2440 2441 2442 LEAF(UnsafePrefetchWrite, UnsafePrefetch) 2443 public: 2444 UnsafePrefetchWrite(Value object, Value offset) 2445 : UnsafePrefetch(object, offset) 2446 { 2447 ASSERT_VALUES 2448 } 2449 }; 2450 2451 LEAF(ProfileCall, Instruction) 2452 private: 2453 ciMethod* _method; 2454 int _bci_of_invoke; 2455 ciMethod* _callee; // the method that is called at the given bci 2456 Value _recv; 2457 ciKlass* _known_holder; 2458 2459 public: 2460 ProfileCall(ciMethod* method, int bci, ciMethod* callee, Value recv, ciKlass* known_holder) 2461 : Instruction(voidType) 2462 , _method(method) 2463 , _bci_of_invoke(bci) 2464 , _callee(callee) 2465 , _recv(recv) 2466 , _known_holder(known_holder) 2467 { 2468 // The ProfileCall has side-effects and must occur precisely where located 2469 pin(); 2470 } 2471 2472 ciMethod* method() { return _method; } 2473 int bci_of_invoke() { return _bci_of_invoke; } 2474 ciMethod* callee() { return _callee; } 2475 Value recv() { return _recv; } 2476 ciKlass* known_holder() { return _known_holder; } 2477 2478 virtual void input_values_do(ValueVisitor* f) { if (_recv != NULL) f->visit(&_recv); } 2479 }; 2480 2481 2482 // Call some C runtime function that doesn't safepoint, 2483 // optionally passing the current thread as the first argument. 2484 LEAF(RuntimeCall, Instruction) 2485 private: 2486 const char* _entry_name; 2487 address _entry; 2488 Values* _args; 2489 bool _pass_thread; // Pass the JavaThread* as an implicit first argument 2490 2491 public: 2492 RuntimeCall(ValueType* type, const char* entry_name, address entry, Values* args, bool pass_thread = true) 2493 : Instruction(type) 2494 , _entry(entry) 2495 , _args(args) 2496 , _entry_name(entry_name) 2497 , _pass_thread(pass_thread) { 2498 ASSERT_VALUES 2499 pin(); 2500 } 2501 2502 const char* entry_name() const { return _entry_name; } 2503 address entry() const { return _entry; } 2504 int number_of_arguments() const { return _args->length(); } 2505 Value argument_at(int i) const { return _args->at(i); } 2506 bool pass_thread() const { return _pass_thread; } 2507 2508 virtual void input_values_do(ValueVisitor* f) { 2509 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 2510 } 2511 }; 2512 2513 // Use to trip invocation counter of an inlined method 2514 2515 LEAF(ProfileInvoke, Instruction) 2516 private: 2517 ciMethod* _inlinee; 2518 ValueStack* _state; 2519 2520 public: 2521 ProfileInvoke(ciMethod* inlinee, ValueStack* state) 2522 : Instruction(voidType) 2523 , _inlinee(inlinee) 2524 , _state(state) 2525 { 2526 // The ProfileInvoke has side-effects and must occur precisely where located QQQ??? 2527 pin(); 2528 } 2529 2530 ciMethod* inlinee() { return _inlinee; } 2531 ValueStack* state() { return _state; } 2532 virtual void input_values_do(ValueVisitor*) {} 2533 virtual void state_values_do(ValueVisitor*); 2534 }; 2535 2536 LEAF(MemBar, Instruction) 2537 private: 2538 LIR_Code _code; 2539 2540 public: 2541 MemBar(LIR_Code code) 2542 : Instruction(voidType) 2543 , _code(code) 2544 { 2545 pin(); 2546 } 2547 2548 LIR_Code code() { return _code; } 2549 2550 virtual void input_values_do(ValueVisitor*) {} 2551 }; 2552 2553 class BlockPair: public CompilationResourceObj { 2554 private: 2555 BlockBegin* _from; 2556 BlockBegin* _to; 2557 public: 2558 BlockPair(BlockBegin* from, BlockBegin* to): _from(from), _to(to) {} 2559 BlockBegin* from() const { return _from; } 2560 BlockBegin* to() const { return _to; } 2561 bool is_same(BlockBegin* from, BlockBegin* to) const { return _from == from && _to == to; } 2562 bool is_same(BlockPair* p) const { return _from == p->from() && _to == p->to(); } 2563 void set_to(BlockBegin* b) { _to = b; } 2564 void set_from(BlockBegin* b) { _from = b; } 2565 }; 2566 2567 2568 define_array(BlockPairArray, BlockPair*) 2569 define_stack(BlockPairList, BlockPairArray) 2570 2571 2572 inline int BlockBegin::number_of_sux() const { assert(_end == NULL || _end->number_of_sux() == _successors.length(), "mismatch"); return _successors.length(); } 2573 inline BlockBegin* BlockBegin::sux_at(int i) const { assert(_end == NULL || _end->sux_at(i) == _successors.at(i), "mismatch"); return _successors.at(i); } 2574 inline void BlockBegin::add_successor(BlockBegin* sux) { assert(_end == NULL, "Would create mismatch with successors of BlockEnd"); _successors.append(sux); } 2575 2576 #undef ASSERT_VALUES 2577 2578 #endif // SHARE_VM_C1_C1_INSTRUCTION_HPP