1 /* 2 * Copyright (c) 1999, 2013, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_VM_C1_C1_INSTRUCTION_HPP 26 #define SHARE_VM_C1_C1_INSTRUCTION_HPP 27 28 #include "c1/c1_Compilation.hpp" 29 #include "c1/c1_LIR.hpp" 30 #include "c1/c1_ValueType.hpp" 31 #include "ci/ciField.hpp" 32 33 // Predefined classes 34 class ciField; 35 class ValueStack; 36 class InstructionPrinter; 37 class IRScope; 38 class LIR_OprDesc; 39 typedef LIR_OprDesc* LIR_Opr; 40 41 42 // Instruction class hierarchy 43 // 44 // All leaf classes in the class hierarchy are concrete classes 45 // (i.e., are instantiated). All other classes are abstract and 46 // serve factoring. 47 48 class Instruction; 49 class Phi; 50 class Local; 51 class Constant; 52 class AccessField; 53 class LoadField; 54 class StoreField; 55 class AccessArray; 56 class ArrayLength; 57 class AccessIndexed; 58 class LoadIndexed; 59 class StoreIndexed; 60 class NegateOp; 61 class Op2; 62 class ArithmeticOp; 63 class ShiftOp; 64 class LogicOp; 65 class CompareOp; 66 class IfOp; 67 class Convert; 68 class NullCheck; 69 class TypeCast; 70 class OsrEntry; 71 class ExceptionObject; 72 class StateSplit; 73 class Invoke; 74 class NewInstance; 75 class NewArray; 76 class NewTypeArray; 77 class NewObjectArray; 78 class NewMultiArray; 79 class TypeCheck; 80 class CheckCast; 81 class InstanceOf; 82 class AccessMonitor; 83 class MonitorEnter; 84 class MonitorExit; 85 class Intrinsic; 86 class BlockBegin; 87 class BlockEnd; 88 class Goto; 89 class If; 90 class IfInstanceOf; 91 class Switch; 92 class TableSwitch; 93 class LookupSwitch; 94 class Return; 95 class Throw; 96 class Base; 97 class RoundFP; 98 class UnsafeOp; 99 class UnsafeRawOp; 100 class UnsafeGetRaw; 101 class UnsafePutRaw; 102 class UnsafeObjectOp; 103 class UnsafeGetObject; 104 class UnsafePutObject; 105 class UnsafeGetAndSetObject; 106 class UnsafePrefetch; 107 class UnsafePrefetchRead; 108 class UnsafePrefetchWrite; 109 class ProfileCall; 110 class ProfileReturnType; 111 class ProfileInvoke; 112 class RuntimeCall; 113 class MemBar; 114 class RangeCheckPredicate; 115 #ifdef ASSERT 116 class Assert; 117 #endif 118 119 // A Value is a reference to the instruction creating the value 120 typedef Instruction* Value; 121 define_array(ValueArray, Value) 122 define_stack(Values, ValueArray) 123 124 define_array(ValueStackArray, ValueStack*) 125 define_stack(ValueStackStack, ValueStackArray) 126 127 // BlockClosure is the base class for block traversal/iteration. 128 129 class BlockClosure: public CompilationResourceObj { 130 public: 131 virtual void block_do(BlockBegin* block) = 0; 132 }; 133 134 135 // A simple closure class for visiting the values of an Instruction 136 class ValueVisitor: public StackObj { 137 public: 138 virtual void visit(Value* v) = 0; 139 }; 140 141 142 // Some array and list classes 143 define_array(BlockBeginArray, BlockBegin*) 144 define_stack(_BlockList, BlockBeginArray) 145 146 class BlockList: public _BlockList { 147 public: 148 BlockList(): _BlockList() {} 149 BlockList(const int size): _BlockList(size) {} 150 BlockList(const int size, BlockBegin* init): _BlockList(size, init) {} 151 152 void iterate_forward(BlockClosure* closure); 153 void iterate_backward(BlockClosure* closure); 154 void blocks_do(void f(BlockBegin*)); 155 void values_do(ValueVisitor* f); 156 void print(bool cfg_only = false, bool live_only = false) PRODUCT_RETURN; 157 }; 158 159 160 // InstructionVisitors provide type-based dispatch for instructions. 161 // For each concrete Instruction class X, a virtual function do_X is 162 // provided. Functionality that needs to be implemented for all classes 163 // (e.g., printing, code generation) is factored out into a specialised 164 // visitor instead of added to the Instruction classes itself. 165 166 class InstructionVisitor: public StackObj { 167 public: 168 virtual void do_Phi (Phi* x) = 0; 169 virtual void do_Local (Local* x) = 0; 170 virtual void do_Constant (Constant* x) = 0; 171 virtual void do_LoadField (LoadField* x) = 0; 172 virtual void do_StoreField (StoreField* x) = 0; 173 virtual void do_ArrayLength (ArrayLength* x) = 0; 174 virtual void do_LoadIndexed (LoadIndexed* x) = 0; 175 virtual void do_StoreIndexed (StoreIndexed* x) = 0; 176 virtual void do_NegateOp (NegateOp* x) = 0; 177 virtual void do_ArithmeticOp (ArithmeticOp* x) = 0; 178 virtual void do_ShiftOp (ShiftOp* x) = 0; 179 virtual void do_LogicOp (LogicOp* x) = 0; 180 virtual void do_CompareOp (CompareOp* x) = 0; 181 virtual void do_IfOp (IfOp* x) = 0; 182 virtual void do_Convert (Convert* x) = 0; 183 virtual void do_NullCheck (NullCheck* x) = 0; 184 virtual void do_TypeCast (TypeCast* x) = 0; 185 virtual void do_Invoke (Invoke* x) = 0; 186 virtual void do_NewInstance (NewInstance* x) = 0; 187 virtual void do_NewTypeArray (NewTypeArray* x) = 0; 188 virtual void do_NewObjectArray (NewObjectArray* x) = 0; 189 virtual void do_NewMultiArray (NewMultiArray* x) = 0; 190 virtual void do_CheckCast (CheckCast* x) = 0; 191 virtual void do_InstanceOf (InstanceOf* x) = 0; 192 virtual void do_MonitorEnter (MonitorEnter* x) = 0; 193 virtual void do_MonitorExit (MonitorExit* x) = 0; 194 virtual void do_Intrinsic (Intrinsic* x) = 0; 195 virtual void do_BlockBegin (BlockBegin* x) = 0; 196 virtual void do_Goto (Goto* x) = 0; 197 virtual void do_If (If* x) = 0; 198 virtual void do_IfInstanceOf (IfInstanceOf* x) = 0; 199 virtual void do_TableSwitch (TableSwitch* x) = 0; 200 virtual void do_LookupSwitch (LookupSwitch* x) = 0; 201 virtual void do_Return (Return* x) = 0; 202 virtual void do_Throw (Throw* x) = 0; 203 virtual void do_Base (Base* x) = 0; 204 virtual void do_OsrEntry (OsrEntry* x) = 0; 205 virtual void do_ExceptionObject(ExceptionObject* x) = 0; 206 virtual void do_RoundFP (RoundFP* x) = 0; 207 virtual void do_UnsafeGetRaw (UnsafeGetRaw* x) = 0; 208 virtual void do_UnsafePutRaw (UnsafePutRaw* x) = 0; 209 virtual void do_UnsafeGetObject(UnsafeGetObject* x) = 0; 210 virtual void do_UnsafePutObject(UnsafePutObject* x) = 0; 211 virtual void do_UnsafeGetAndSetObject(UnsafeGetAndSetObject* x) = 0; 212 virtual void do_UnsafePrefetchRead (UnsafePrefetchRead* x) = 0; 213 virtual void do_UnsafePrefetchWrite(UnsafePrefetchWrite* x) = 0; 214 virtual void do_ProfileCall (ProfileCall* x) = 0; 215 virtual void do_ProfileReturnType (ProfileReturnType* x) = 0; 216 virtual void do_ProfileInvoke (ProfileInvoke* x) = 0; 217 virtual void do_RuntimeCall (RuntimeCall* x) = 0; 218 virtual void do_MemBar (MemBar* x) = 0; 219 virtual void do_RangeCheckPredicate(RangeCheckPredicate* x) = 0; 220 #ifdef ASSERT 221 virtual void do_Assert (Assert* x) = 0; 222 #endif 223 }; 224 225 226 // Hashing support 227 // 228 // Note: This hash functions affect the performance 229 // of ValueMap - make changes carefully! 230 231 #define HASH1(x1 ) ((intx)(x1)) 232 #define HASH2(x1, x2 ) ((HASH1(x1 ) << 7) ^ HASH1(x2)) 233 #define HASH3(x1, x2, x3 ) ((HASH2(x1, x2 ) << 7) ^ HASH1(x3)) 234 #define HASH4(x1, x2, x3, x4) ((HASH3(x1, x2, x3) << 7) ^ HASH1(x4)) 235 236 237 // The following macros are used to implement instruction-specific hashing. 238 // By default, each instruction implements hash() and is_equal(Value), used 239 // for value numbering/common subexpression elimination. The default imple- 240 // mentation disables value numbering. Each instruction which can be value- 241 // numbered, should define corresponding hash() and is_equal(Value) functions 242 // via the macros below. The f arguments specify all the values/op codes, etc. 243 // that need to be identical for two instructions to be identical. 244 // 245 // Note: The default implementation of hash() returns 0 in order to indicate 246 // that the instruction should not be considered for value numbering. 247 // The currently used hash functions do not guarantee that never a 0 248 // is produced. While this is still correct, it may be a performance 249 // bug (no value numbering for that node). However, this situation is 250 // so unlikely, that we are not going to handle it specially. 251 252 #define HASHING1(class_name, enabled, f1) \ 253 virtual intx hash() const { \ 254 return (enabled) ? HASH2(name(), f1) : 0; \ 255 } \ 256 virtual bool is_equal(Value v) const { \ 257 if (!(enabled) ) return false; \ 258 class_name* _v = v->as_##class_name(); \ 259 if (_v == NULL ) return false; \ 260 if (f1 != _v->f1) return false; \ 261 return true; \ 262 } \ 263 264 265 #define HASHING2(class_name, enabled, f1, f2) \ 266 virtual intx hash() const { \ 267 return (enabled) ? HASH3(name(), f1, f2) : 0; \ 268 } \ 269 virtual bool is_equal(Value v) const { \ 270 if (!(enabled) ) return false; \ 271 class_name* _v = v->as_##class_name(); \ 272 if (_v == NULL ) return false; \ 273 if (f1 != _v->f1) return false; \ 274 if (f2 != _v->f2) return false; \ 275 return true; \ 276 } \ 277 278 279 #define HASHING3(class_name, enabled, f1, f2, f3) \ 280 virtual intx hash() const { \ 281 return (enabled) ? HASH4(name(), f1, f2, f3) : 0; \ 282 } \ 283 virtual bool is_equal(Value v) const { \ 284 if (!(enabled) ) return false; \ 285 class_name* _v = v->as_##class_name(); \ 286 if (_v == NULL ) return false; \ 287 if (f1 != _v->f1) return false; \ 288 if (f2 != _v->f2) return false; \ 289 if (f3 != _v->f3) return false; \ 290 return true; \ 291 } \ 292 293 294 // The mother of all instructions... 295 296 class Instruction: public CompilationResourceObj { 297 private: 298 int _id; // the unique instruction id 299 #ifndef PRODUCT 300 int _printable_bci; // the bci of the instruction for printing 301 #endif 302 int _use_count; // the number of instructions refering to this value (w/o prev/next); only roots can have use count = 0 or > 1 303 int _pin_state; // set of PinReason describing the reason for pinning 304 ValueType* _type; // the instruction value type 305 Instruction* _next; // the next instruction if any (NULL for BlockEnd instructions) 306 Instruction* _subst; // the substitution instruction if any 307 LIR_Opr _operand; // LIR specific information 308 unsigned int _flags; // Flag bits 309 310 ValueStack* _state_before; // Copy of state with input operands still on stack (or NULL) 311 ValueStack* _exception_state; // Copy of state for exception handling 312 XHandlers* _exception_handlers; // Flat list of exception handlers covering this instruction 313 314 friend class UseCountComputer; 315 friend class BlockBegin; 316 317 void update_exception_state(ValueStack* state); 318 319 protected: 320 BlockBegin* _block; // Block that contains this instruction 321 322 void set_type(ValueType* type) { 323 assert(type != NULL, "type must exist"); 324 _type = type; 325 } 326 327 // Helper class to keep track of which arguments need a null check 328 class ArgsNonNullState { 329 private: 330 int _nonnull_state; // mask identifying which args are nonnull 331 public: 332 ArgsNonNullState() 333 : _nonnull_state(AllBits) {} 334 335 // Does argument number i needs a null check? 336 bool arg_needs_null_check(int i) const { 337 // No data is kept for arguments starting at position 33 so 338 // conservatively assume that they need a null check. 339 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) { 340 return is_set_nth_bit(_nonnull_state, i); 341 } 342 return true; 343 } 344 345 // Set whether argument number i needs a null check or not 346 void set_arg_needs_null_check(int i, bool check) { 347 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) { 348 if (check) { 349 _nonnull_state |= nth_bit(i); 350 } else { 351 _nonnull_state &= ~(nth_bit(i)); 352 } 353 } 354 } 355 }; 356 357 public: 358 void* operator new(size_t size) throw() { 359 Compilation* c = Compilation::current(); 360 void* res = c->arena()->Amalloc(size); 361 ((Instruction*)res)->_id = c->get_next_id(); 362 return res; 363 } 364 365 static const int no_bci = -99; 366 367 enum InstructionFlag { 368 NeedsNullCheckFlag = 0, 369 CanTrapFlag, 370 DirectCompareFlag, 371 IsEliminatedFlag, 372 IsSafepointFlag, 373 IsStaticFlag, 374 IsStrictfpFlag, 375 NeedsStoreCheckFlag, 376 NeedsWriteBarrierFlag, 377 PreservesStateFlag, 378 TargetIsFinalFlag, 379 TargetIsLoadedFlag, 380 TargetIsStrictfpFlag, 381 UnorderedIsTrueFlag, 382 NeedsPatchingFlag, 383 ThrowIncompatibleClassChangeErrorFlag, 384 ProfileMDOFlag, 385 IsLinkedInBlockFlag, 386 NeedsRangeCheckFlag, 387 InWorkListFlag, 388 DeoptimizeOnException, 389 InstructionLastFlag 390 }; 391 392 public: 393 bool check_flag(InstructionFlag id) const { return (_flags & (1 << id)) != 0; } 394 void set_flag(InstructionFlag id, bool f) { _flags = f ? (_flags | (1 << id)) : (_flags & ~(1 << id)); }; 395 396 // 'globally' used condition values 397 enum Condition { 398 eql, neq, lss, leq, gtr, geq, aeq, beq 399 }; 400 401 // Instructions may be pinned for many reasons and under certain conditions 402 // with enough knowledge it's possible to safely unpin them. 403 enum PinReason { 404 PinUnknown = 1 << 0 405 , PinExplicitNullCheck = 1 << 3 406 , PinStackForStateSplit= 1 << 12 407 , PinStateSplitConstructor= 1 << 13 408 , PinGlobalValueNumbering= 1 << 14 409 }; 410 411 static Condition mirror(Condition cond); 412 static Condition negate(Condition cond); 413 414 // initialization 415 static int number_of_instructions() { 416 return Compilation::current()->number_of_instructions(); 417 } 418 419 // creation 420 Instruction(ValueType* type, ValueStack* state_before = NULL, bool type_is_constant = false) 421 : _use_count(0) 422 #ifndef PRODUCT 423 , _printable_bci(-99) 424 #endif 425 , _pin_state(0) 426 , _type(type) 427 , _next(NULL) 428 , _block(NULL) 429 , _subst(NULL) 430 , _flags(0) 431 , _operand(LIR_OprFact::illegalOpr) 432 , _state_before(state_before) 433 , _exception_handlers(NULL) 434 { 435 check_state(state_before); 436 assert(type != NULL && (!type->is_constant() || type_is_constant), "type must exist"); 437 update_exception_state(_state_before); 438 } 439 440 // accessors 441 int id() const { return _id; } 442 #ifndef PRODUCT 443 bool has_printable_bci() const { return _printable_bci != -99; } 444 int printable_bci() const { assert(has_printable_bci(), "_printable_bci should have been set"); return _printable_bci; } 445 void set_printable_bci(int bci) { _printable_bci = bci; } 446 #endif 447 int dominator_depth(); 448 int use_count() const { return _use_count; } 449 int pin_state() const { return _pin_state; } 450 bool is_pinned() const { return _pin_state != 0 || PinAllInstructions; } 451 ValueType* type() const { return _type; } 452 BlockBegin *block() const { return _block; } 453 Instruction* prev(); // use carefully, expensive operation 454 Instruction* next() const { return _next; } 455 bool has_subst() const { return _subst != NULL; } 456 Instruction* subst() { return _subst == NULL ? this : _subst->subst(); } 457 LIR_Opr operand() const { return _operand; } 458 459 void set_needs_null_check(bool f) { set_flag(NeedsNullCheckFlag, f); } 460 bool needs_null_check() const { return check_flag(NeedsNullCheckFlag); } 461 bool is_linked() const { return check_flag(IsLinkedInBlockFlag); } 462 bool can_be_linked() { return as_Local() == NULL && as_Phi() == NULL; } 463 464 bool has_uses() const { return use_count() > 0; } 465 ValueStack* state_before() const { return _state_before; } 466 ValueStack* exception_state() const { return _exception_state; } 467 virtual bool needs_exception_state() const { return true; } 468 XHandlers* exception_handlers() const { return _exception_handlers; } 469 470 // manipulation 471 void pin(PinReason reason) { _pin_state |= reason; } 472 void pin() { _pin_state |= PinUnknown; } 473 // DANGEROUS: only used by EliminateStores 474 void unpin(PinReason reason) { assert((reason & PinUnknown) == 0, "can't unpin unknown state"); _pin_state &= ~reason; } 475 476 Instruction* set_next(Instruction* next) { 477 assert(next->has_printable_bci(), "_printable_bci should have been set"); 478 assert(next != NULL, "must not be NULL"); 479 assert(as_BlockEnd() == NULL, "BlockEnd instructions must have no next"); 480 assert(next->can_be_linked(), "shouldn't link these instructions into list"); 481 482 BlockBegin *block = this->block(); 483 next->_block = block; 484 485 next->set_flag(Instruction::IsLinkedInBlockFlag, true); 486 _next = next; 487 return next; 488 } 489 490 Instruction* set_next(Instruction* next, int bci) { 491 #ifndef PRODUCT 492 next->set_printable_bci(bci); 493 #endif 494 return set_next(next); 495 } 496 497 // when blocks are merged 498 void fixup_block_pointers() { 499 Instruction *cur = next()->next(); // next()'s block is set in set_next 500 while (cur && cur->_block != block()) { 501 cur->_block = block(); 502 cur = cur->next(); 503 } 504 } 505 506 Instruction *insert_after(Instruction *i) { 507 Instruction* n = _next; 508 set_next(i); 509 i->set_next(n); 510 return _next; 511 } 512 513 Instruction *insert_after_same_bci(Instruction *i) { 514 #ifndef PRODUCT 515 i->set_printable_bci(printable_bci()); 516 #endif 517 return insert_after(i); 518 } 519 520 void set_subst(Instruction* subst) { 521 assert(subst == NULL || 522 type()->base() == subst->type()->base() || 523 subst->type()->base() == illegalType, "type can't change"); 524 _subst = subst; 525 } 526 void set_exception_handlers(XHandlers *xhandlers) { _exception_handlers = xhandlers; } 527 void set_exception_state(ValueStack* s) { check_state(s); _exception_state = s; } 528 void set_state_before(ValueStack* s) { check_state(s); _state_before = s; } 529 530 // machine-specifics 531 void set_operand(LIR_Opr operand) { assert(operand != LIR_OprFact::illegalOpr, "operand must exist"); _operand = operand; } 532 void clear_operand() { _operand = LIR_OprFact::illegalOpr; } 533 534 // generic 535 virtual Instruction* as_Instruction() { return this; } // to satisfy HASHING1 macro 536 virtual Phi* as_Phi() { return NULL; } 537 virtual Local* as_Local() { return NULL; } 538 virtual Constant* as_Constant() { return NULL; } 539 virtual AccessField* as_AccessField() { return NULL; } 540 virtual LoadField* as_LoadField() { return NULL; } 541 virtual StoreField* as_StoreField() { return NULL; } 542 virtual AccessArray* as_AccessArray() { return NULL; } 543 virtual ArrayLength* as_ArrayLength() { return NULL; } 544 virtual AccessIndexed* as_AccessIndexed() { return NULL; } 545 virtual LoadIndexed* as_LoadIndexed() { return NULL; } 546 virtual StoreIndexed* as_StoreIndexed() { return NULL; } 547 virtual NegateOp* as_NegateOp() { return NULL; } 548 virtual Op2* as_Op2() { return NULL; } 549 virtual ArithmeticOp* as_ArithmeticOp() { return NULL; } 550 virtual ShiftOp* as_ShiftOp() { return NULL; } 551 virtual LogicOp* as_LogicOp() { return NULL; } 552 virtual CompareOp* as_CompareOp() { return NULL; } 553 virtual IfOp* as_IfOp() { return NULL; } 554 virtual Convert* as_Convert() { return NULL; } 555 virtual NullCheck* as_NullCheck() { return NULL; } 556 virtual OsrEntry* as_OsrEntry() { return NULL; } 557 virtual StateSplit* as_StateSplit() { return NULL; } 558 virtual Invoke* as_Invoke() { return NULL; } 559 virtual NewInstance* as_NewInstance() { return NULL; } 560 virtual NewArray* as_NewArray() { return NULL; } 561 virtual NewTypeArray* as_NewTypeArray() { return NULL; } 562 virtual NewObjectArray* as_NewObjectArray() { return NULL; } 563 virtual NewMultiArray* as_NewMultiArray() { return NULL; } 564 virtual TypeCheck* as_TypeCheck() { return NULL; } 565 virtual CheckCast* as_CheckCast() { return NULL; } 566 virtual InstanceOf* as_InstanceOf() { return NULL; } 567 virtual TypeCast* as_TypeCast() { return NULL; } 568 virtual AccessMonitor* as_AccessMonitor() { return NULL; } 569 virtual MonitorEnter* as_MonitorEnter() { return NULL; } 570 virtual MonitorExit* as_MonitorExit() { return NULL; } 571 virtual Intrinsic* as_Intrinsic() { return NULL; } 572 virtual BlockBegin* as_BlockBegin() { return NULL; } 573 virtual BlockEnd* as_BlockEnd() { return NULL; } 574 virtual Goto* as_Goto() { return NULL; } 575 virtual If* as_If() { return NULL; } 576 virtual IfInstanceOf* as_IfInstanceOf() { return NULL; } 577 virtual TableSwitch* as_TableSwitch() { return NULL; } 578 virtual LookupSwitch* as_LookupSwitch() { return NULL; } 579 virtual Return* as_Return() { return NULL; } 580 virtual Throw* as_Throw() { return NULL; } 581 virtual Base* as_Base() { return NULL; } 582 virtual RoundFP* as_RoundFP() { return NULL; } 583 virtual ExceptionObject* as_ExceptionObject() { return NULL; } 584 virtual UnsafeOp* as_UnsafeOp() { return NULL; } 585 virtual ProfileInvoke* as_ProfileInvoke() { return NULL; } 586 virtual RangeCheckPredicate* as_RangeCheckPredicate() { return NULL; } 587 588 #ifdef ASSERT 589 virtual Assert* as_Assert() { return NULL; } 590 #endif 591 592 virtual void visit(InstructionVisitor* v) = 0; 593 594 virtual bool can_trap() const { return false; } 595 596 virtual void input_values_do(ValueVisitor* f) = 0; 597 virtual void state_values_do(ValueVisitor* f); 598 virtual void other_values_do(ValueVisitor* f) { /* usually no other - override on demand */ } 599 void values_do(ValueVisitor* f) { input_values_do(f); state_values_do(f); other_values_do(f); } 600 601 virtual ciType* exact_type() const; 602 virtual ciType* declared_type() const { return NULL; } 603 604 // hashing 605 virtual const char* name() const = 0; 606 HASHING1(Instruction, false, id()) // hashing disabled by default 607 608 // debugging 609 static void check_state(ValueStack* state) PRODUCT_RETURN; 610 void print() PRODUCT_RETURN; 611 void print_line() PRODUCT_RETURN; 612 void print(InstructionPrinter& ip) PRODUCT_RETURN; 613 }; 614 615 616 // The following macros are used to define base (i.e., non-leaf) 617 // and leaf instruction classes. They define class-name related 618 // generic functionality in one place. 619 620 #define BASE(class_name, super_class_name) \ 621 class class_name: public super_class_name { \ 622 public: \ 623 virtual class_name* as_##class_name() { return this; } \ 624 625 626 #define LEAF(class_name, super_class_name) \ 627 BASE(class_name, super_class_name) \ 628 public: \ 629 virtual const char* name() const { return #class_name; } \ 630 virtual void visit(InstructionVisitor* v) { v->do_##class_name(this); } \ 631 632 633 // Debugging support 634 635 636 #ifdef ASSERT 637 class AssertValues: public ValueVisitor { 638 void visit(Value* x) { assert((*x) != NULL, "value must exist"); } 639 }; 640 #define ASSERT_VALUES { AssertValues assert_value; values_do(&assert_value); } 641 #else 642 #define ASSERT_VALUES 643 #endif // ASSERT 644 645 646 // A Phi is a phi function in the sense of SSA form. It stands for 647 // the value of a local variable at the beginning of a join block. 648 // A Phi consists of n operands, one for every incoming branch. 649 650 LEAF(Phi, Instruction) 651 private: 652 int _pf_flags; // the flags of the phi function 653 int _index; // to value on operand stack (index < 0) or to local 654 public: 655 // creation 656 Phi(ValueType* type, BlockBegin* b, int index) 657 : Instruction(type->base()) 658 , _pf_flags(0) 659 , _index(index) 660 { 661 _block = b; 662 NOT_PRODUCT(set_printable_bci(Value(b)->printable_bci())); 663 if (type->is_illegal()) { 664 make_illegal(); 665 } 666 } 667 668 // flags 669 enum Flag { 670 no_flag = 0, 671 visited = 1 << 0, 672 cannot_simplify = 1 << 1 673 }; 674 675 // accessors 676 bool is_local() const { return _index >= 0; } 677 bool is_on_stack() const { return !is_local(); } 678 int local_index() const { assert(is_local(), ""); return _index; } 679 int stack_index() const { assert(is_on_stack(), ""); return -(_index+1); } 680 681 Value operand_at(int i) const; 682 int operand_count() const; 683 684 void set(Flag f) { _pf_flags |= f; } 685 void clear(Flag f) { _pf_flags &= ~f; } 686 bool is_set(Flag f) const { return (_pf_flags & f) != 0; } 687 688 // Invalidates phis corresponding to merges of locals of two different types 689 // (these should never be referenced, otherwise the bytecodes are illegal) 690 void make_illegal() { 691 set(cannot_simplify); 692 set_type(illegalType); 693 } 694 695 bool is_illegal() const { 696 return type()->is_illegal(); 697 } 698 699 // generic 700 virtual void input_values_do(ValueVisitor* f) { 701 } 702 }; 703 704 705 // A local is a placeholder for an incoming argument to a function call. 706 LEAF(Local, Instruction) 707 private: 708 int _java_index; // the local index within the method to which the local belongs 709 ciType* _declared_type; 710 public: 711 // creation 712 Local(ciType* declared, ValueType* type, int index) 713 : Instruction(type) 714 , _java_index(index) 715 , _declared_type(declared) 716 { 717 NOT_PRODUCT(set_printable_bci(-1)); 718 } 719 720 // accessors 721 int java_index() const { return _java_index; } 722 723 virtual ciType* declared_type() const { return _declared_type; } 724 725 // generic 726 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 727 }; 728 729 730 LEAF(Constant, Instruction) 731 public: 732 // creation 733 Constant(ValueType* type): 734 Instruction(type, NULL, /*type_is_constant*/ true) 735 { 736 assert(type->is_constant(), "must be a constant"); 737 } 738 739 Constant(ValueType* type, ValueStack* state_before): 740 Instruction(type, state_before, /*type_is_constant*/ true) 741 { 742 assert(state_before != NULL, "only used for constants which need patching"); 743 assert(type->is_constant(), "must be a constant"); 744 // since it's patching it needs to be pinned 745 pin(); 746 } 747 748 // generic 749 virtual bool can_trap() const { return state_before() != NULL; } 750 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 751 752 virtual intx hash() const; 753 virtual bool is_equal(Value v) const; 754 755 virtual ciType* exact_type() const; 756 757 enum CompareResult { not_comparable = -1, cond_false, cond_true }; 758 759 virtual CompareResult compare(Instruction::Condition condition, Value right) const; 760 BlockBegin* compare(Instruction::Condition cond, Value right, 761 BlockBegin* true_sux, BlockBegin* false_sux) const { 762 switch (compare(cond, right)) { 763 case not_comparable: 764 return NULL; 765 case cond_false: 766 return false_sux; 767 case cond_true: 768 return true_sux; 769 default: 770 ShouldNotReachHere(); 771 return NULL; 772 } 773 } 774 }; 775 776 777 BASE(AccessField, Instruction) 778 private: 779 Value _obj; 780 int _offset; 781 ciField* _field; 782 NullCheck* _explicit_null_check; // For explicit null check elimination 783 784 public: 785 // creation 786 AccessField(Value obj, int offset, ciField* field, bool is_static, 787 ValueStack* state_before, bool needs_patching) 788 : Instruction(as_ValueType(field->type()->basic_type()), state_before) 789 , _obj(obj) 790 , _offset(offset) 791 , _field(field) 792 , _explicit_null_check(NULL) 793 { 794 set_needs_null_check(!is_static); 795 set_flag(IsStaticFlag, is_static); 796 set_flag(NeedsPatchingFlag, needs_patching); 797 ASSERT_VALUES 798 // pin of all instructions with memory access 799 pin(); 800 } 801 802 // accessors 803 Value obj() const { return _obj; } 804 int offset() const { return _offset; } 805 ciField* field() const { return _field; } 806 BasicType field_type() const { return _field->type()->basic_type(); } 807 bool is_static() const { return check_flag(IsStaticFlag); } 808 NullCheck* explicit_null_check() const { return _explicit_null_check; } 809 bool needs_patching() const { return check_flag(NeedsPatchingFlag); } 810 811 // Unresolved getstatic and putstatic can cause initialization. 812 // Technically it occurs at the Constant that materializes the base 813 // of the static fields but it's simpler to model it here. 814 bool is_init_point() const { return is_static() && (needs_patching() || !_field->holder()->is_initialized()); } 815 816 // manipulation 817 818 // Under certain circumstances, if a previous NullCheck instruction 819 // proved the target object non-null, we can eliminate the explicit 820 // null check and do an implicit one, simply specifying the debug 821 // information from the NullCheck. This field should only be consulted 822 // if needs_null_check() is true. 823 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 824 825 // generic 826 virtual bool can_trap() const { return needs_null_check() || needs_patching(); } 827 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 828 }; 829 830 831 LEAF(LoadField, AccessField) 832 public: 833 // creation 834 LoadField(Value obj, int offset, ciField* field, bool is_static, 835 ValueStack* state_before, bool needs_patching) 836 : AccessField(obj, offset, field, is_static, state_before, needs_patching) 837 {} 838 839 ciType* declared_type() const; 840 841 // generic 842 HASHING2(LoadField, !needs_patching() && !field()->is_volatile(), obj()->subst(), offset()) // cannot be eliminated if needs patching or if volatile 843 }; 844 845 846 LEAF(StoreField, AccessField) 847 private: 848 Value _value; 849 850 public: 851 // creation 852 StoreField(Value obj, int offset, ciField* field, Value value, bool is_static, 853 ValueStack* state_before, bool needs_patching) 854 : AccessField(obj, offset, field, is_static, state_before, needs_patching) 855 , _value(value) 856 { 857 set_flag(NeedsWriteBarrierFlag, as_ValueType(field_type())->is_object()); 858 ASSERT_VALUES 859 pin(); 860 } 861 862 // accessors 863 Value value() const { return _value; } 864 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); } 865 866 // generic 867 virtual void input_values_do(ValueVisitor* f) { AccessField::input_values_do(f); f->visit(&_value); } 868 }; 869 870 871 BASE(AccessArray, Instruction) 872 private: 873 Value _array; 874 875 public: 876 // creation 877 AccessArray(ValueType* type, Value array, ValueStack* state_before) 878 : Instruction(type, state_before) 879 , _array(array) 880 { 881 set_needs_null_check(true); 882 ASSERT_VALUES 883 pin(); // instruction with side effect (null exception or range check throwing) 884 } 885 886 Value array() const { return _array; } 887 888 // generic 889 virtual bool can_trap() const { return needs_null_check(); } 890 virtual void input_values_do(ValueVisitor* f) { f->visit(&_array); } 891 }; 892 893 894 LEAF(ArrayLength, AccessArray) 895 private: 896 NullCheck* _explicit_null_check; // For explicit null check elimination 897 898 public: 899 // creation 900 ArrayLength(Value array, ValueStack* state_before) 901 : AccessArray(intType, array, state_before) 902 , _explicit_null_check(NULL) {} 903 904 // accessors 905 NullCheck* explicit_null_check() const { return _explicit_null_check; } 906 907 // setters 908 // See LoadField::set_explicit_null_check for documentation 909 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 910 911 // generic 912 HASHING1(ArrayLength, true, array()->subst()) 913 }; 914 915 916 BASE(AccessIndexed, AccessArray) 917 private: 918 Value _index; 919 Value _length; 920 BasicType _elt_type; 921 922 public: 923 // creation 924 AccessIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before) 925 : AccessArray(as_ValueType(elt_type), array, state_before) 926 , _index(index) 927 , _length(length) 928 , _elt_type(elt_type) 929 { 930 set_flag(Instruction::NeedsRangeCheckFlag, true); 931 ASSERT_VALUES 932 } 933 934 // accessors 935 Value index() const { return _index; } 936 Value length() const { return _length; } 937 BasicType elt_type() const { return _elt_type; } 938 939 void clear_length() { _length = NULL; } 940 // perform elimination of range checks involving constants 941 bool compute_needs_range_check(); 942 943 // generic 944 virtual void input_values_do(ValueVisitor* f) { AccessArray::input_values_do(f); f->visit(&_index); if (_length != NULL) f->visit(&_length); } 945 }; 946 947 948 LEAF(LoadIndexed, AccessIndexed) 949 private: 950 NullCheck* _explicit_null_check; // For explicit null check elimination 951 952 public: 953 // creation 954 LoadIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before) 955 : AccessIndexed(array, index, length, elt_type, state_before) 956 , _explicit_null_check(NULL) {} 957 958 // accessors 959 NullCheck* explicit_null_check() const { return _explicit_null_check; } 960 961 // setters 962 // See LoadField::set_explicit_null_check for documentation 963 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 964 965 ciType* exact_type() const; 966 ciType* declared_type() const; 967 968 // generic 969 HASHING2(LoadIndexed, true, array()->subst(), index()->subst()) 970 }; 971 972 973 LEAF(StoreIndexed, AccessIndexed) 974 private: 975 Value _value; 976 977 ciMethod* _profiled_method; 978 int _profiled_bci; 979 public: 980 // creation 981 StoreIndexed(Value array, Value index, Value length, BasicType elt_type, Value value, ValueStack* state_before) 982 : AccessIndexed(array, index, length, elt_type, state_before) 983 , _value(value), _profiled_method(NULL), _profiled_bci(0) 984 { 985 set_flag(NeedsWriteBarrierFlag, (as_ValueType(elt_type)->is_object())); 986 set_flag(NeedsStoreCheckFlag, (as_ValueType(elt_type)->is_object())); 987 ASSERT_VALUES 988 pin(); 989 } 990 991 // accessors 992 Value value() const { return _value; } 993 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); } 994 bool needs_store_check() const { return check_flag(NeedsStoreCheckFlag); } 995 // Helpers for MethodData* profiling 996 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 997 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 998 void set_profiled_bci(int bci) { _profiled_bci = bci; } 999 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1000 ciMethod* profiled_method() const { return _profiled_method; } 1001 int profiled_bci() const { return _profiled_bci; } 1002 // generic 1003 virtual void input_values_do(ValueVisitor* f) { AccessIndexed::input_values_do(f); f->visit(&_value); } 1004 }; 1005 1006 1007 LEAF(NegateOp, Instruction) 1008 private: 1009 Value _x; 1010 1011 public: 1012 // creation 1013 NegateOp(Value x) : Instruction(x->type()->base()), _x(x) { 1014 ASSERT_VALUES 1015 } 1016 1017 // accessors 1018 Value x() const { return _x; } 1019 1020 // generic 1021 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); } 1022 }; 1023 1024 1025 BASE(Op2, Instruction) 1026 private: 1027 Bytecodes::Code _op; 1028 Value _x; 1029 Value _y; 1030 1031 public: 1032 // creation 1033 Op2(ValueType* type, Bytecodes::Code op, Value x, Value y, ValueStack* state_before = NULL) 1034 : Instruction(type, state_before) 1035 , _op(op) 1036 , _x(x) 1037 , _y(y) 1038 { 1039 ASSERT_VALUES 1040 } 1041 1042 // accessors 1043 Bytecodes::Code op() const { return _op; } 1044 Value x() const { return _x; } 1045 Value y() const { return _y; } 1046 1047 // manipulators 1048 void swap_operands() { 1049 assert(is_commutative(), "operation must be commutative"); 1050 Value t = _x; _x = _y; _y = t; 1051 } 1052 1053 // generic 1054 virtual bool is_commutative() const { return false; } 1055 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); } 1056 }; 1057 1058 1059 LEAF(ArithmeticOp, Op2) 1060 public: 1061 // creation 1062 ArithmeticOp(Bytecodes::Code op, Value x, Value y, bool is_strictfp, ValueStack* state_before) 1063 : Op2(x->type()->meet(y->type()), op, x, y, state_before) 1064 { 1065 set_flag(IsStrictfpFlag, is_strictfp); 1066 if (can_trap()) pin(); 1067 } 1068 1069 // accessors 1070 bool is_strictfp() const { return check_flag(IsStrictfpFlag); } 1071 1072 // generic 1073 virtual bool is_commutative() const; 1074 virtual bool can_trap() const; 1075 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1076 }; 1077 1078 1079 LEAF(ShiftOp, Op2) 1080 public: 1081 // creation 1082 ShiftOp(Bytecodes::Code op, Value x, Value s) : Op2(x->type()->base(), op, x, s) {} 1083 1084 // generic 1085 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1086 }; 1087 1088 1089 LEAF(LogicOp, Op2) 1090 public: 1091 // creation 1092 LogicOp(Bytecodes::Code op, Value x, Value y) : Op2(x->type()->meet(y->type()), op, x, y) {} 1093 1094 // generic 1095 virtual bool is_commutative() const; 1096 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1097 }; 1098 1099 1100 LEAF(CompareOp, Op2) 1101 public: 1102 // creation 1103 CompareOp(Bytecodes::Code op, Value x, Value y, ValueStack* state_before) 1104 : Op2(intType, op, x, y, state_before) 1105 {} 1106 1107 // generic 1108 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1109 }; 1110 1111 1112 LEAF(IfOp, Op2) 1113 private: 1114 Value _tval; 1115 Value _fval; 1116 1117 public: 1118 // creation 1119 IfOp(Value x, Condition cond, Value y, Value tval, Value fval) 1120 : Op2(tval->type()->meet(fval->type()), (Bytecodes::Code)cond, x, y) 1121 , _tval(tval) 1122 , _fval(fval) 1123 { 1124 ASSERT_VALUES 1125 assert(tval->type()->tag() == fval->type()->tag(), "types must match"); 1126 } 1127 1128 // accessors 1129 virtual bool is_commutative() const; 1130 Bytecodes::Code op() const { ShouldNotCallThis(); return Bytecodes::_illegal; } 1131 Condition cond() const { return (Condition)Op2::op(); } 1132 Value tval() const { return _tval; } 1133 Value fval() const { return _fval; } 1134 1135 // generic 1136 virtual void input_values_do(ValueVisitor* f) { Op2::input_values_do(f); f->visit(&_tval); f->visit(&_fval); } 1137 }; 1138 1139 1140 LEAF(Convert, Instruction) 1141 private: 1142 Bytecodes::Code _op; 1143 Value _value; 1144 1145 public: 1146 // creation 1147 Convert(Bytecodes::Code op, Value value, ValueType* to_type) : Instruction(to_type), _op(op), _value(value) { 1148 ASSERT_VALUES 1149 } 1150 1151 // accessors 1152 Bytecodes::Code op() const { return _op; } 1153 Value value() const { return _value; } 1154 1155 // generic 1156 virtual void input_values_do(ValueVisitor* f) { f->visit(&_value); } 1157 HASHING2(Convert, true, op(), value()->subst()) 1158 }; 1159 1160 1161 LEAF(NullCheck, Instruction) 1162 private: 1163 Value _obj; 1164 1165 public: 1166 // creation 1167 NullCheck(Value obj, ValueStack* state_before) 1168 : Instruction(obj->type()->base(), state_before) 1169 , _obj(obj) 1170 { 1171 ASSERT_VALUES 1172 set_can_trap(true); 1173 assert(_obj->type()->is_object(), "null check must be applied to objects only"); 1174 pin(Instruction::PinExplicitNullCheck); 1175 } 1176 1177 // accessors 1178 Value obj() const { return _obj; } 1179 1180 // setters 1181 void set_can_trap(bool can_trap) { set_flag(CanTrapFlag, can_trap); } 1182 1183 // generic 1184 virtual bool can_trap() const { return check_flag(CanTrapFlag); /* null-check elimination sets to false */ } 1185 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 1186 HASHING1(NullCheck, true, obj()->subst()) 1187 }; 1188 1189 1190 // This node is supposed to cast the type of another node to a more precise 1191 // declared type. 1192 LEAF(TypeCast, Instruction) 1193 private: 1194 ciType* _declared_type; 1195 Value _obj; 1196 1197 public: 1198 // The type of this node is the same type as the object type (and it might be constant). 1199 TypeCast(ciType* type, Value obj, ValueStack* state_before) 1200 : Instruction(obj->type(), state_before, obj->type()->is_constant()), 1201 _declared_type(type), 1202 _obj(obj) {} 1203 1204 // accessors 1205 ciType* declared_type() const { return _declared_type; } 1206 Value obj() const { return _obj; } 1207 1208 // generic 1209 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 1210 }; 1211 1212 1213 BASE(StateSplit, Instruction) 1214 private: 1215 ValueStack* _state; 1216 1217 protected: 1218 static void substitute(BlockList& list, BlockBegin* old_block, BlockBegin* new_block); 1219 1220 public: 1221 // creation 1222 StateSplit(ValueType* type, ValueStack* state_before = NULL) 1223 : Instruction(type, state_before) 1224 , _state(NULL) 1225 { 1226 pin(PinStateSplitConstructor); 1227 } 1228 1229 // accessors 1230 ValueStack* state() const { return _state; } 1231 IRScope* scope() const; // the state's scope 1232 1233 // manipulation 1234 void set_state(ValueStack* state) { assert(_state == NULL, "overwriting existing state"); check_state(state); _state = state; } 1235 1236 // generic 1237 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 1238 virtual void state_values_do(ValueVisitor* f); 1239 }; 1240 1241 1242 LEAF(Invoke, StateSplit) 1243 private: 1244 Bytecodes::Code _code; 1245 Value _recv; 1246 Values* _args; 1247 BasicTypeList* _signature; 1248 int _vtable_index; 1249 ciMethod* _target; 1250 1251 public: 1252 // creation 1253 Invoke(Bytecodes::Code code, ValueType* result_type, Value recv, Values* args, 1254 int vtable_index, ciMethod* target, ValueStack* state_before); 1255 1256 // accessors 1257 Bytecodes::Code code() const { return _code; } 1258 Value receiver() const { return _recv; } 1259 bool has_receiver() const { return receiver() != NULL; } 1260 int number_of_arguments() const { return _args->length(); } 1261 Value argument_at(int i) const { return _args->at(i); } 1262 int vtable_index() const { return _vtable_index; } 1263 BasicTypeList* signature() const { return _signature; } 1264 ciMethod* target() const { return _target; } 1265 1266 ciType* declared_type() const; 1267 1268 // Returns false if target is not loaded 1269 bool target_is_final() const { return check_flag(TargetIsFinalFlag); } 1270 bool target_is_loaded() const { return check_flag(TargetIsLoadedFlag); } 1271 // Returns false if target is not loaded 1272 bool target_is_strictfp() const { return check_flag(TargetIsStrictfpFlag); } 1273 1274 // JSR 292 support 1275 bool is_invokedynamic() const { return code() == Bytecodes::_invokedynamic; } 1276 bool is_method_handle_intrinsic() const { return target()->is_method_handle_intrinsic(); } 1277 1278 virtual bool needs_exception_state() const { return false; } 1279 1280 // generic 1281 virtual bool can_trap() const { return true; } 1282 virtual void input_values_do(ValueVisitor* f) { 1283 StateSplit::input_values_do(f); 1284 if (has_receiver()) f->visit(&_recv); 1285 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 1286 } 1287 virtual void state_values_do(ValueVisitor *f); 1288 }; 1289 1290 1291 LEAF(NewInstance, StateSplit) 1292 private: 1293 ciInstanceKlass* _klass; 1294 1295 public: 1296 // creation 1297 NewInstance(ciInstanceKlass* klass, ValueStack* state_before) 1298 : StateSplit(instanceType, state_before) 1299 , _klass(klass) 1300 {} 1301 1302 // accessors 1303 ciInstanceKlass* klass() const { return _klass; } 1304 1305 virtual bool needs_exception_state() const { return false; } 1306 1307 // generic 1308 virtual bool can_trap() const { return true; } 1309 ciType* exact_type() const; 1310 ciType* declared_type() const; 1311 }; 1312 1313 1314 BASE(NewArray, StateSplit) 1315 private: 1316 Value _length; 1317 1318 public: 1319 // creation 1320 NewArray(Value length, ValueStack* state_before) 1321 : StateSplit(objectType, state_before) 1322 , _length(length) 1323 { 1324 // Do not ASSERT_VALUES since length is NULL for NewMultiArray 1325 } 1326 1327 // accessors 1328 Value length() const { return _length; } 1329 1330 virtual bool needs_exception_state() const { return false; } 1331 1332 ciType* exact_type() const { return NULL; } 1333 ciType* declared_type() const; 1334 1335 // generic 1336 virtual bool can_trap() const { return true; } 1337 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_length); } 1338 }; 1339 1340 1341 LEAF(NewTypeArray, NewArray) 1342 private: 1343 BasicType _elt_type; 1344 1345 public: 1346 // creation 1347 NewTypeArray(Value length, BasicType elt_type, ValueStack* state_before) 1348 : NewArray(length, state_before) 1349 , _elt_type(elt_type) 1350 {} 1351 1352 // accessors 1353 BasicType elt_type() const { return _elt_type; } 1354 ciType* exact_type() const; 1355 }; 1356 1357 1358 LEAF(NewObjectArray, NewArray) 1359 private: 1360 ciKlass* _klass; 1361 1362 public: 1363 // creation 1364 NewObjectArray(ciKlass* klass, Value length, ValueStack* state_before) : NewArray(length, state_before), _klass(klass) {} 1365 1366 // accessors 1367 ciKlass* klass() const { return _klass; } 1368 ciType* exact_type() const; 1369 }; 1370 1371 1372 LEAF(NewMultiArray, NewArray) 1373 private: 1374 ciKlass* _klass; 1375 Values* _dims; 1376 1377 public: 1378 // creation 1379 NewMultiArray(ciKlass* klass, Values* dims, ValueStack* state_before) : NewArray(NULL, state_before), _klass(klass), _dims(dims) { 1380 ASSERT_VALUES 1381 } 1382 1383 // accessors 1384 ciKlass* klass() const { return _klass; } 1385 Values* dims() const { return _dims; } 1386 int rank() const { return dims()->length(); } 1387 1388 // generic 1389 virtual void input_values_do(ValueVisitor* f) { 1390 // NOTE: we do not call NewArray::input_values_do since "length" 1391 // is meaningless for a multi-dimensional array; passing the 1392 // zeroth element down to NewArray as its length is a bad idea 1393 // since there will be a copy in the "dims" array which doesn't 1394 // get updated, and the value must not be traversed twice. Was bug 1395 // - kbr 4/10/2001 1396 StateSplit::input_values_do(f); 1397 for (int i = 0; i < _dims->length(); i++) f->visit(_dims->adr_at(i)); 1398 } 1399 }; 1400 1401 1402 BASE(TypeCheck, StateSplit) 1403 private: 1404 ciKlass* _klass; 1405 Value _obj; 1406 1407 ciMethod* _profiled_method; 1408 int _profiled_bci; 1409 1410 public: 1411 // creation 1412 TypeCheck(ciKlass* klass, Value obj, ValueType* type, ValueStack* state_before) 1413 : StateSplit(type, state_before), _klass(klass), _obj(obj), 1414 _profiled_method(NULL), _profiled_bci(0) { 1415 ASSERT_VALUES 1416 set_direct_compare(false); 1417 } 1418 1419 // accessors 1420 ciKlass* klass() const { return _klass; } 1421 Value obj() const { return _obj; } 1422 bool is_loaded() const { return klass() != NULL; } 1423 bool direct_compare() const { return check_flag(DirectCompareFlag); } 1424 1425 // manipulation 1426 void set_direct_compare(bool flag) { set_flag(DirectCompareFlag, flag); } 1427 1428 // generic 1429 virtual bool can_trap() const { return true; } 1430 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } 1431 1432 // Helpers for MethodData* profiling 1433 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1434 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1435 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1436 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1437 ciMethod* profiled_method() const { return _profiled_method; } 1438 int profiled_bci() const { return _profiled_bci; } 1439 }; 1440 1441 1442 LEAF(CheckCast, TypeCheck) 1443 public: 1444 // creation 1445 CheckCast(ciKlass* klass, Value obj, ValueStack* state_before) 1446 : TypeCheck(klass, obj, objectType, state_before) {} 1447 1448 void set_incompatible_class_change_check() { 1449 set_flag(ThrowIncompatibleClassChangeErrorFlag, true); 1450 } 1451 bool is_incompatible_class_change_check() const { 1452 return check_flag(ThrowIncompatibleClassChangeErrorFlag); 1453 } 1454 1455 ciType* declared_type() const; 1456 }; 1457 1458 1459 LEAF(InstanceOf, TypeCheck) 1460 public: 1461 // creation 1462 InstanceOf(ciKlass* klass, Value obj, ValueStack* state_before) : TypeCheck(klass, obj, intType, state_before) {} 1463 1464 virtual bool needs_exception_state() const { return false; } 1465 }; 1466 1467 1468 BASE(AccessMonitor, StateSplit) 1469 private: 1470 Value _obj; 1471 int _monitor_no; 1472 1473 public: 1474 // creation 1475 AccessMonitor(Value obj, int monitor_no, ValueStack* state_before = NULL) 1476 : StateSplit(illegalType, state_before) 1477 , _obj(obj) 1478 , _monitor_no(monitor_no) 1479 { 1480 set_needs_null_check(true); 1481 ASSERT_VALUES 1482 } 1483 1484 // accessors 1485 Value obj() const { return _obj; } 1486 int monitor_no() const { return _monitor_no; } 1487 1488 // generic 1489 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } 1490 }; 1491 1492 1493 LEAF(MonitorEnter, AccessMonitor) 1494 public: 1495 // creation 1496 MonitorEnter(Value obj, int monitor_no, ValueStack* state_before) 1497 : AccessMonitor(obj, monitor_no, state_before) 1498 { 1499 ASSERT_VALUES 1500 } 1501 1502 // generic 1503 virtual bool can_trap() const { return true; } 1504 }; 1505 1506 1507 LEAF(MonitorExit, AccessMonitor) 1508 public: 1509 // creation 1510 MonitorExit(Value obj, int monitor_no) 1511 : AccessMonitor(obj, monitor_no, NULL) 1512 { 1513 ASSERT_VALUES 1514 } 1515 }; 1516 1517 1518 LEAF(Intrinsic, StateSplit) 1519 private: 1520 vmIntrinsics::ID _id; 1521 Values* _args; 1522 Value _recv; 1523 ArgsNonNullState _nonnull_state; 1524 1525 public: 1526 // preserves_state can be set to true for Intrinsics 1527 // which are guaranteed to preserve register state across any slow 1528 // cases; setting it to true does not mean that the Intrinsic can 1529 // not trap, only that if we continue execution in the same basic 1530 // block after the Intrinsic, all of the registers are intact. This 1531 // allows load elimination and common expression elimination to be 1532 // performed across the Intrinsic. The default value is false. 1533 Intrinsic(ValueType* type, 1534 vmIntrinsics::ID id, 1535 Values* args, 1536 bool has_receiver, 1537 ValueStack* state_before, 1538 bool preserves_state, 1539 bool cantrap = true) 1540 : StateSplit(type, state_before) 1541 , _id(id) 1542 , _args(args) 1543 , _recv(NULL) 1544 { 1545 assert(args != NULL, "args must exist"); 1546 ASSERT_VALUES 1547 set_flag(PreservesStateFlag, preserves_state); 1548 set_flag(CanTrapFlag, cantrap); 1549 if (has_receiver) { 1550 _recv = argument_at(0); 1551 } 1552 set_needs_null_check(has_receiver); 1553 1554 // some intrinsics can't trap, so don't force them to be pinned 1555 if (!can_trap()) { 1556 unpin(PinStateSplitConstructor); 1557 } 1558 } 1559 1560 // accessors 1561 vmIntrinsics::ID id() const { return _id; } 1562 int number_of_arguments() const { return _args->length(); } 1563 Value argument_at(int i) const { return _args->at(i); } 1564 1565 bool has_receiver() const { return (_recv != NULL); } 1566 Value receiver() const { assert(has_receiver(), "must have receiver"); return _recv; } 1567 bool preserves_state() const { return check_flag(PreservesStateFlag); } 1568 1569 bool arg_needs_null_check(int i) const { 1570 return _nonnull_state.arg_needs_null_check(i); 1571 } 1572 1573 void set_arg_needs_null_check(int i, bool check) { 1574 _nonnull_state.set_arg_needs_null_check(i, check); 1575 } 1576 1577 // generic 1578 virtual bool can_trap() const { return check_flag(CanTrapFlag); } 1579 virtual void input_values_do(ValueVisitor* f) { 1580 StateSplit::input_values_do(f); 1581 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 1582 } 1583 }; 1584 1585 1586 class LIR_List; 1587 1588 LEAF(BlockBegin, StateSplit) 1589 private: 1590 int _block_id; // the unique block id 1591 int _bci; // start-bci of block 1592 int _depth_first_number; // number of this block in a depth-first ordering 1593 int _linear_scan_number; // number of this block in linear-scan ordering 1594 int _dominator_depth; 1595 int _loop_depth; // the loop nesting level of this block 1596 int _loop_index; // number of the innermost loop of this block 1597 int _flags; // the flags associated with this block 1598 1599 // fields used by BlockListBuilder 1600 int _total_preds; // number of predecessors found by BlockListBuilder 1601 BitMap _stores_to_locals; // bit is set when a local variable is stored in the block 1602 1603 // SSA specific fields: (factor out later) 1604 BlockList _successors; // the successors of this block 1605 BlockList _predecessors; // the predecessors of this block 1606 BlockList _dominates; // list of blocks that are dominated by this block 1607 BlockBegin* _dominator; // the dominator of this block 1608 // SSA specific ends 1609 BlockEnd* _end; // the last instruction of this block 1610 BlockList _exception_handlers; // the exception handlers potentially invoked by this block 1611 ValueStackStack* _exception_states; // only for xhandler entries: states of all instructions that have an edge to this xhandler 1612 int _exception_handler_pco; // if this block is the start of an exception handler, 1613 // this records the PC offset in the assembly code of the 1614 // first instruction in this block 1615 Label _label; // the label associated with this block 1616 LIR_List* _lir; // the low level intermediate representation for this block 1617 1618 BitMap _live_in; // set of live LIR_Opr registers at entry to this block 1619 BitMap _live_out; // set of live LIR_Opr registers at exit from this block 1620 BitMap _live_gen; // set of registers used before any redefinition in this block 1621 BitMap _live_kill; // set of registers defined in this block 1622 1623 BitMap _fpu_register_usage; 1624 intArray* _fpu_stack_state; // For x86 FPU code generation with UseLinearScan 1625 int _first_lir_instruction_id; // ID of first LIR instruction in this block 1626 int _last_lir_instruction_id; // ID of last LIR instruction in this block 1627 1628 void iterate_preorder (boolArray& mark, BlockClosure* closure); 1629 void iterate_postorder(boolArray& mark, BlockClosure* closure); 1630 1631 friend class SuxAndWeightAdjuster; 1632 1633 public: 1634 void* operator new(size_t size) throw() { 1635 Compilation* c = Compilation::current(); 1636 void* res = c->arena()->Amalloc(size); 1637 ((BlockBegin*)res)->_id = c->get_next_id(); 1638 ((BlockBegin*)res)->_block_id = c->get_next_block_id(); 1639 return res; 1640 } 1641 1642 // initialization/counting 1643 static int number_of_blocks() { 1644 return Compilation::current()->number_of_blocks(); 1645 } 1646 1647 // creation 1648 BlockBegin(int bci) 1649 : StateSplit(illegalType) 1650 , _bci(bci) 1651 , _depth_first_number(-1) 1652 , _linear_scan_number(-1) 1653 , _loop_depth(0) 1654 , _flags(0) 1655 , _dominator_depth(-1) 1656 , _dominator(NULL) 1657 , _end(NULL) 1658 , _predecessors(2) 1659 , _successors(2) 1660 , _dominates(2) 1661 , _exception_handlers(1) 1662 , _exception_states(NULL) 1663 , _exception_handler_pco(-1) 1664 , _lir(NULL) 1665 , _loop_index(-1) 1666 , _live_in() 1667 , _live_out() 1668 , _live_gen() 1669 , _live_kill() 1670 , _fpu_register_usage() 1671 , _fpu_stack_state(NULL) 1672 , _first_lir_instruction_id(-1) 1673 , _last_lir_instruction_id(-1) 1674 , _total_preds(0) 1675 , _stores_to_locals() 1676 { 1677 _block = this; 1678 #ifndef PRODUCT 1679 set_printable_bci(bci); 1680 #endif 1681 } 1682 1683 // accessors 1684 int block_id() const { return _block_id; } 1685 int bci() const { return _bci; } 1686 BlockList* successors() { return &_successors; } 1687 BlockList* dominates() { return &_dominates; } 1688 BlockBegin* dominator() const { return _dominator; } 1689 int loop_depth() const { return _loop_depth; } 1690 int dominator_depth() const { return _dominator_depth; } 1691 int depth_first_number() const { return _depth_first_number; } 1692 int linear_scan_number() const { return _linear_scan_number; } 1693 BlockEnd* end() const { return _end; } 1694 Label* label() { return &_label; } 1695 LIR_List* lir() const { return _lir; } 1696 int exception_handler_pco() const { return _exception_handler_pco; } 1697 BitMap& live_in() { return _live_in; } 1698 BitMap& live_out() { return _live_out; } 1699 BitMap& live_gen() { return _live_gen; } 1700 BitMap& live_kill() { return _live_kill; } 1701 BitMap& fpu_register_usage() { return _fpu_register_usage; } 1702 intArray* fpu_stack_state() const { return _fpu_stack_state; } 1703 int first_lir_instruction_id() const { return _first_lir_instruction_id; } 1704 int last_lir_instruction_id() const { return _last_lir_instruction_id; } 1705 int total_preds() const { return _total_preds; } 1706 BitMap& stores_to_locals() { return _stores_to_locals; } 1707 1708 // manipulation 1709 void set_dominator(BlockBegin* dom) { _dominator = dom; } 1710 void set_loop_depth(int d) { _loop_depth = d; } 1711 void set_dominator_depth(int d) { _dominator_depth = d; } 1712 void set_depth_first_number(int dfn) { _depth_first_number = dfn; } 1713 void set_linear_scan_number(int lsn) { _linear_scan_number = lsn; } 1714 void set_end(BlockEnd* end); 1715 void clear_end(); 1716 void disconnect_from_graph(); 1717 static void disconnect_edge(BlockBegin* from, BlockBegin* to); 1718 BlockBegin* insert_block_between(BlockBegin* sux); 1719 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux); 1720 void set_lir(LIR_List* lir) { _lir = lir; } 1721 void set_exception_handler_pco(int pco) { _exception_handler_pco = pco; } 1722 void set_live_in (BitMap map) { _live_in = map; } 1723 void set_live_out (BitMap map) { _live_out = map; } 1724 void set_live_gen (BitMap map) { _live_gen = map; } 1725 void set_live_kill (BitMap map) { _live_kill = map; } 1726 void set_fpu_register_usage(BitMap map) { _fpu_register_usage = map; } 1727 void set_fpu_stack_state(intArray* state) { _fpu_stack_state = state; } 1728 void set_first_lir_instruction_id(int id) { _first_lir_instruction_id = id; } 1729 void set_last_lir_instruction_id(int id) { _last_lir_instruction_id = id; } 1730 void increment_total_preds(int n = 1) { _total_preds += n; } 1731 void init_stores_to_locals(int locals_count) { _stores_to_locals = BitMap(locals_count); _stores_to_locals.clear(); } 1732 1733 // generic 1734 virtual void state_values_do(ValueVisitor* f); 1735 1736 // successors and predecessors 1737 int number_of_sux() const; 1738 BlockBegin* sux_at(int i) const; 1739 void add_successor(BlockBegin* sux); 1740 void remove_successor(BlockBegin* pred); 1741 bool is_successor(BlockBegin* sux) const { return _successors.contains(sux); } 1742 1743 void add_predecessor(BlockBegin* pred); 1744 void remove_predecessor(BlockBegin* pred); 1745 bool is_predecessor(BlockBegin* pred) const { return _predecessors.contains(pred); } 1746 int number_of_preds() const { return _predecessors.length(); } 1747 BlockBegin* pred_at(int i) const { return _predecessors[i]; } 1748 1749 // exception handlers potentially invoked by this block 1750 void add_exception_handler(BlockBegin* b); 1751 bool is_exception_handler(BlockBegin* b) const { return _exception_handlers.contains(b); } 1752 int number_of_exception_handlers() const { return _exception_handlers.length(); } 1753 BlockBegin* exception_handler_at(int i) const { return _exception_handlers.at(i); } 1754 1755 // states of the instructions that have an edge to this exception handler 1756 int number_of_exception_states() { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states == NULL ? 0 : _exception_states->length(); } 1757 ValueStack* exception_state_at(int idx) const { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states->at(idx); } 1758 int add_exception_state(ValueStack* state); 1759 1760 // flags 1761 enum Flag { 1762 no_flag = 0, 1763 std_entry_flag = 1 << 0, 1764 osr_entry_flag = 1 << 1, 1765 exception_entry_flag = 1 << 2, 1766 subroutine_entry_flag = 1 << 3, 1767 backward_branch_target_flag = 1 << 4, 1768 is_on_work_list_flag = 1 << 5, 1769 was_visited_flag = 1 << 6, 1770 parser_loop_header_flag = 1 << 7, // set by parser to identify blocks where phi functions can not be created on demand 1771 critical_edge_split_flag = 1 << 8, // set for all blocks that are introduced when critical edges are split 1772 linear_scan_loop_header_flag = 1 << 9, // set during loop-detection for LinearScan 1773 linear_scan_loop_end_flag = 1 << 10, // set during loop-detection for LinearScan 1774 donot_eliminate_range_checks = 1 << 11 // Should be try to eliminate range checks in this block 1775 }; 1776 1777 void set(Flag f) { _flags |= f; } 1778 void clear(Flag f) { _flags &= ~f; } 1779 bool is_set(Flag f) const { return (_flags & f) != 0; } 1780 bool is_entry_block() const { 1781 const int entry_mask = std_entry_flag | osr_entry_flag | exception_entry_flag; 1782 return (_flags & entry_mask) != 0; 1783 } 1784 1785 // iteration 1786 void iterate_preorder (BlockClosure* closure); 1787 void iterate_postorder (BlockClosure* closure); 1788 1789 void block_values_do(ValueVisitor* f); 1790 1791 // loops 1792 void set_loop_index(int ix) { _loop_index = ix; } 1793 int loop_index() const { return _loop_index; } 1794 1795 // merging 1796 bool try_merge(ValueStack* state); // try to merge states at block begin 1797 void merge(ValueStack* state) { bool b = try_merge(state); assert(b, "merge failed"); } 1798 1799 // debugging 1800 void print_block() PRODUCT_RETURN; 1801 void print_block(InstructionPrinter& ip, bool live_only = false) PRODUCT_RETURN; 1802 }; 1803 1804 1805 BASE(BlockEnd, StateSplit) 1806 private: 1807 BlockList* _sux; 1808 1809 protected: 1810 BlockList* sux() const { return _sux; } 1811 1812 void set_sux(BlockList* sux) { 1813 #ifdef ASSERT 1814 assert(sux != NULL, "sux must exist"); 1815 for (int i = sux->length() - 1; i >= 0; i--) assert(sux->at(i) != NULL, "sux must exist"); 1816 #endif 1817 _sux = sux; 1818 } 1819 1820 public: 1821 // creation 1822 BlockEnd(ValueType* type, ValueStack* state_before, bool is_safepoint) 1823 : StateSplit(type, state_before) 1824 , _sux(NULL) 1825 { 1826 set_flag(IsSafepointFlag, is_safepoint); 1827 } 1828 1829 // accessors 1830 bool is_safepoint() const { return check_flag(IsSafepointFlag); } 1831 // For compatibility with old code, for new code use block() 1832 BlockBegin* begin() const { return _block; } 1833 1834 // manipulation 1835 void set_begin(BlockBegin* begin); 1836 1837 // successors 1838 int number_of_sux() const { return _sux != NULL ? _sux->length() : 0; } 1839 BlockBegin* sux_at(int i) const { return _sux->at(i); } 1840 BlockBegin* default_sux() const { return sux_at(number_of_sux() - 1); } 1841 BlockBegin** addr_sux_at(int i) const { return _sux->adr_at(i); } 1842 int sux_index(BlockBegin* sux) const { return _sux->find(sux); } 1843 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux); 1844 }; 1845 1846 1847 LEAF(Goto, BlockEnd) 1848 public: 1849 enum Direction { 1850 none, // Just a regular goto 1851 taken, not_taken // Goto produced from If 1852 }; 1853 private: 1854 ciMethod* _profiled_method; 1855 int _profiled_bci; 1856 Direction _direction; 1857 public: 1858 // creation 1859 Goto(BlockBegin* sux, ValueStack* state_before, bool is_safepoint = false) 1860 : BlockEnd(illegalType, state_before, is_safepoint) 1861 , _direction(none) 1862 , _profiled_method(NULL) 1863 , _profiled_bci(0) { 1864 BlockList* s = new BlockList(1); 1865 s->append(sux); 1866 set_sux(s); 1867 } 1868 1869 Goto(BlockBegin* sux, bool is_safepoint) : BlockEnd(illegalType, NULL, is_safepoint) 1870 , _direction(none) 1871 , _profiled_method(NULL) 1872 , _profiled_bci(0) { 1873 BlockList* s = new BlockList(1); 1874 s->append(sux); 1875 set_sux(s); 1876 } 1877 1878 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1879 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches 1880 int profiled_bci() const { return _profiled_bci; } 1881 Direction direction() const { return _direction; } 1882 1883 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1884 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1885 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1886 void set_direction(Direction d) { _direction = d; } 1887 }; 1888 1889 #ifdef ASSERT 1890 LEAF(Assert, Instruction) 1891 private: 1892 Value _x; 1893 Condition _cond; 1894 Value _y; 1895 char *_message; 1896 1897 public: 1898 // creation 1899 // unordered_is_true is valid for float/double compares only 1900 Assert(Value x, Condition cond, bool unordered_is_true, Value y); 1901 1902 // accessors 1903 Value x() const { return _x; } 1904 Condition cond() const { return _cond; } 1905 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 1906 Value y() const { return _y; } 1907 const char *message() const { return _message; } 1908 1909 // generic 1910 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); } 1911 }; 1912 #endif 1913 1914 LEAF(RangeCheckPredicate, StateSplit) 1915 private: 1916 Value _x; 1917 Condition _cond; 1918 Value _y; 1919 1920 void check_state(); 1921 1922 public: 1923 // creation 1924 // unordered_is_true is valid for float/double compares only 1925 RangeCheckPredicate(Value x, Condition cond, bool unordered_is_true, Value y, ValueStack* state) : StateSplit(illegalType) 1926 , _x(x) 1927 , _cond(cond) 1928 , _y(y) 1929 { 1930 ASSERT_VALUES 1931 set_flag(UnorderedIsTrueFlag, unordered_is_true); 1932 assert(x->type()->tag() == y->type()->tag(), "types must match"); 1933 this->set_state(state); 1934 check_state(); 1935 } 1936 1937 // Always deoptimize 1938 RangeCheckPredicate(ValueStack* state) : StateSplit(illegalType) 1939 { 1940 this->set_state(state); 1941 _x = _y = NULL; 1942 check_state(); 1943 } 1944 1945 // accessors 1946 Value x() const { return _x; } 1947 Condition cond() const { return _cond; } 1948 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 1949 Value y() const { return _y; } 1950 1951 void always_fail() { _x = _y = NULL; } 1952 1953 // generic 1954 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_x); f->visit(&_y); } 1955 HASHING3(RangeCheckPredicate, true, x()->subst(), y()->subst(), cond()) 1956 }; 1957 1958 LEAF(If, BlockEnd) 1959 private: 1960 Value _x; 1961 Condition _cond; 1962 Value _y; 1963 ciMethod* _profiled_method; 1964 int _profiled_bci; // Canonicalizer may alter bci of If node 1965 bool _swapped; // Is the order reversed with respect to the original If in the 1966 // bytecode stream? 1967 public: 1968 // creation 1969 // unordered_is_true is valid for float/double compares only 1970 If(Value x, Condition cond, bool unordered_is_true, Value y, BlockBegin* tsux, BlockBegin* fsux, ValueStack* state_before, bool is_safepoint) 1971 : BlockEnd(illegalType, state_before, is_safepoint) 1972 , _x(x) 1973 , _cond(cond) 1974 , _y(y) 1975 , _profiled_method(NULL) 1976 , _profiled_bci(0) 1977 , _swapped(false) 1978 { 1979 ASSERT_VALUES 1980 set_flag(UnorderedIsTrueFlag, unordered_is_true); 1981 assert(x->type()->tag() == y->type()->tag(), "types must match"); 1982 BlockList* s = new BlockList(2); 1983 s->append(tsux); 1984 s->append(fsux); 1985 set_sux(s); 1986 } 1987 1988 // accessors 1989 Value x() const { return _x; } 1990 Condition cond() const { return _cond; } 1991 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 1992 Value y() const { return _y; } 1993 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); } 1994 BlockBegin* tsux() const { return sux_for(true); } 1995 BlockBegin* fsux() const { return sux_for(false); } 1996 BlockBegin* usux() const { return sux_for(unordered_is_true()); } 1997 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1998 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches 1999 int profiled_bci() const { return _profiled_bci; } // set for profiled branches and tiered 2000 bool is_swapped() const { return _swapped; } 2001 2002 // manipulation 2003 void swap_operands() { 2004 Value t = _x; _x = _y; _y = t; 2005 _cond = mirror(_cond); 2006 } 2007 2008 void swap_sux() { 2009 assert(number_of_sux() == 2, "wrong number of successors"); 2010 BlockList* s = sux(); 2011 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t); 2012 _cond = negate(_cond); 2013 set_flag(UnorderedIsTrueFlag, !check_flag(UnorderedIsTrueFlag)); 2014 } 2015 2016 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 2017 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 2018 void set_profiled_bci(int bci) { _profiled_bci = bci; } 2019 void set_swapped(bool value) { _swapped = value; } 2020 // generic 2021 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_x); f->visit(&_y); } 2022 }; 2023 2024 2025 LEAF(IfInstanceOf, BlockEnd) 2026 private: 2027 ciKlass* _klass; 2028 Value _obj; 2029 bool _test_is_instance; // jump if instance 2030 int _instanceof_bci; 2031 2032 public: 2033 IfInstanceOf(ciKlass* klass, Value obj, bool test_is_instance, int instanceof_bci, BlockBegin* tsux, BlockBegin* fsux) 2034 : BlockEnd(illegalType, NULL, false) // temporary set to false 2035 , _klass(klass) 2036 , _obj(obj) 2037 , _test_is_instance(test_is_instance) 2038 , _instanceof_bci(instanceof_bci) 2039 { 2040 ASSERT_VALUES 2041 assert(instanceof_bci >= 0, "illegal bci"); 2042 BlockList* s = new BlockList(2); 2043 s->append(tsux); 2044 s->append(fsux); 2045 set_sux(s); 2046 } 2047 2048 // accessors 2049 // 2050 // Note 1: If test_is_instance() is true, IfInstanceOf tests if obj *is* an 2051 // instance of klass; otherwise it tests if it is *not* and instance 2052 // of klass. 2053 // 2054 // Note 2: IfInstanceOf instructions are created by combining an InstanceOf 2055 // and an If instruction. The IfInstanceOf bci() corresponds to the 2056 // bci that the If would have had; the (this->) instanceof_bci() is 2057 // the bci of the original InstanceOf instruction. 2058 ciKlass* klass() const { return _klass; } 2059 Value obj() const { return _obj; } 2060 int instanceof_bci() const { return _instanceof_bci; } 2061 bool test_is_instance() const { return _test_is_instance; } 2062 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); } 2063 BlockBegin* tsux() const { return sux_for(true); } 2064 BlockBegin* fsux() const { return sux_for(false); } 2065 2066 // manipulation 2067 void swap_sux() { 2068 assert(number_of_sux() == 2, "wrong number of successors"); 2069 BlockList* s = sux(); 2070 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t); 2071 _test_is_instance = !_test_is_instance; 2072 } 2073 2074 // generic 2075 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_obj); } 2076 }; 2077 2078 2079 BASE(Switch, BlockEnd) 2080 private: 2081 Value _tag; 2082 2083 public: 2084 // creation 2085 Switch(Value tag, BlockList* sux, ValueStack* state_before, bool is_safepoint) 2086 : BlockEnd(illegalType, state_before, is_safepoint) 2087 , _tag(tag) { 2088 ASSERT_VALUES 2089 set_sux(sux); 2090 } 2091 2092 // accessors 2093 Value tag() const { return _tag; } 2094 int length() const { return number_of_sux() - 1; } 2095 2096 virtual bool needs_exception_state() const { return false; } 2097 2098 // generic 2099 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_tag); } 2100 }; 2101 2102 2103 LEAF(TableSwitch, Switch) 2104 private: 2105 int _lo_key; 2106 2107 public: 2108 // creation 2109 TableSwitch(Value tag, BlockList* sux, int lo_key, ValueStack* state_before, bool is_safepoint) 2110 : Switch(tag, sux, state_before, is_safepoint) 2111 , _lo_key(lo_key) {} 2112 2113 // accessors 2114 int lo_key() const { return _lo_key; } 2115 int hi_key() const { return _lo_key + length() - 1; } 2116 }; 2117 2118 2119 LEAF(LookupSwitch, Switch) 2120 private: 2121 intArray* _keys; 2122 2123 public: 2124 // creation 2125 LookupSwitch(Value tag, BlockList* sux, intArray* keys, ValueStack* state_before, bool is_safepoint) 2126 : Switch(tag, sux, state_before, is_safepoint) 2127 , _keys(keys) { 2128 assert(keys != NULL, "keys must exist"); 2129 assert(keys->length() == length(), "sux & keys have incompatible lengths"); 2130 } 2131 2132 // accessors 2133 int key_at(int i) const { return _keys->at(i); } 2134 }; 2135 2136 2137 LEAF(Return, BlockEnd) 2138 private: 2139 Value _result; 2140 2141 public: 2142 // creation 2143 Return(Value result) : 2144 BlockEnd(result == NULL ? voidType : result->type()->base(), NULL, true), 2145 _result(result) {} 2146 2147 // accessors 2148 Value result() const { return _result; } 2149 bool has_result() const { return result() != NULL; } 2150 2151 // generic 2152 virtual void input_values_do(ValueVisitor* f) { 2153 BlockEnd::input_values_do(f); 2154 if (has_result()) f->visit(&_result); 2155 } 2156 }; 2157 2158 2159 LEAF(Throw, BlockEnd) 2160 private: 2161 Value _exception; 2162 2163 public: 2164 // creation 2165 Throw(Value exception, ValueStack* state_before) : BlockEnd(illegalType, state_before, true), _exception(exception) { 2166 ASSERT_VALUES 2167 } 2168 2169 // accessors 2170 Value exception() const { return _exception; } 2171 2172 // generic 2173 virtual bool can_trap() const { return true; } 2174 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_exception); } 2175 }; 2176 2177 2178 LEAF(Base, BlockEnd) 2179 public: 2180 // creation 2181 Base(BlockBegin* std_entry, BlockBegin* osr_entry) : BlockEnd(illegalType, NULL, false) { 2182 assert(std_entry->is_set(BlockBegin::std_entry_flag), "std entry must be flagged"); 2183 assert(osr_entry == NULL || osr_entry->is_set(BlockBegin::osr_entry_flag), "osr entry must be flagged"); 2184 BlockList* s = new BlockList(2); 2185 if (osr_entry != NULL) s->append(osr_entry); 2186 s->append(std_entry); // must be default sux! 2187 set_sux(s); 2188 } 2189 2190 // accessors 2191 BlockBegin* std_entry() const { return default_sux(); } 2192 BlockBegin* osr_entry() const { return number_of_sux() < 2 ? NULL : sux_at(0); } 2193 }; 2194 2195 2196 LEAF(OsrEntry, Instruction) 2197 public: 2198 // creation 2199 #ifdef _LP64 2200 OsrEntry() : Instruction(longType) { pin(); } 2201 #else 2202 OsrEntry() : Instruction(intType) { pin(); } 2203 #endif 2204 2205 // generic 2206 virtual void input_values_do(ValueVisitor* f) { } 2207 }; 2208 2209 2210 // Models the incoming exception at a catch site 2211 LEAF(ExceptionObject, Instruction) 2212 public: 2213 // creation 2214 ExceptionObject() : Instruction(objectType) { 2215 pin(); 2216 } 2217 2218 // generic 2219 virtual void input_values_do(ValueVisitor* f) { } 2220 }; 2221 2222 2223 // Models needed rounding for floating-point values on Intel. 2224 // Currently only used to represent rounding of double-precision 2225 // values stored into local variables, but could be used to model 2226 // intermediate rounding of single-precision values as well. 2227 LEAF(RoundFP, Instruction) 2228 private: 2229 Value _input; // floating-point value to be rounded 2230 2231 public: 2232 RoundFP(Value input) 2233 : Instruction(input->type()) // Note: should not be used for constants 2234 , _input(input) 2235 { 2236 ASSERT_VALUES 2237 } 2238 2239 // accessors 2240 Value input() const { return _input; } 2241 2242 // generic 2243 virtual void input_values_do(ValueVisitor* f) { f->visit(&_input); } 2244 }; 2245 2246 2247 BASE(UnsafeOp, Instruction) 2248 private: 2249 BasicType _basic_type; // ValueType can not express byte-sized integers 2250 2251 protected: 2252 // creation 2253 UnsafeOp(BasicType basic_type, bool is_put) 2254 : Instruction(is_put ? voidType : as_ValueType(basic_type)) 2255 , _basic_type(basic_type) 2256 { 2257 //Note: Unsafe ops are not not guaranteed to throw NPE. 2258 // Convservatively, Unsafe operations must be pinned though we could be 2259 // looser about this if we wanted to.. 2260 pin(); 2261 } 2262 2263 public: 2264 // accessors 2265 BasicType basic_type() { return _basic_type; } 2266 2267 // generic 2268 virtual void input_values_do(ValueVisitor* f) { } 2269 }; 2270 2271 2272 BASE(UnsafeRawOp, UnsafeOp) 2273 private: 2274 Value _base; // Base address (a Java long) 2275 Value _index; // Index if computed by optimizer; initialized to NULL 2276 int _log2_scale; // Scale factor: 0, 1, 2, or 3. 2277 // Indicates log2 of number of bytes (1, 2, 4, or 8) 2278 // to scale index by. 2279 2280 protected: 2281 UnsafeRawOp(BasicType basic_type, Value addr, bool is_put) 2282 : UnsafeOp(basic_type, is_put) 2283 , _base(addr) 2284 , _index(NULL) 2285 , _log2_scale(0) 2286 { 2287 // Can not use ASSERT_VALUES because index may be NULL 2288 assert(addr != NULL && addr->type()->is_long(), "just checking"); 2289 } 2290 2291 UnsafeRawOp(BasicType basic_type, Value base, Value index, int log2_scale, bool is_put) 2292 : UnsafeOp(basic_type, is_put) 2293 , _base(base) 2294 , _index(index) 2295 , _log2_scale(log2_scale) 2296 { 2297 } 2298 2299 public: 2300 // accessors 2301 Value base() { return _base; } 2302 Value index() { return _index; } 2303 bool has_index() { return (_index != NULL); } 2304 int log2_scale() { return _log2_scale; } 2305 2306 // setters 2307 void set_base (Value base) { _base = base; } 2308 void set_index(Value index) { _index = index; } 2309 void set_log2_scale(int log2_scale) { _log2_scale = log2_scale; } 2310 2311 // generic 2312 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f); 2313 f->visit(&_base); 2314 if (has_index()) f->visit(&_index); } 2315 }; 2316 2317 2318 LEAF(UnsafeGetRaw, UnsafeRawOp) 2319 private: 2320 bool _may_be_unaligned, _is_wide; // For OSREntry 2321 2322 public: 2323 UnsafeGetRaw(BasicType basic_type, Value addr, bool may_be_unaligned, bool is_wide = false) 2324 : UnsafeRawOp(basic_type, addr, false) { 2325 _may_be_unaligned = may_be_unaligned; 2326 _is_wide = is_wide; 2327 } 2328 2329 UnsafeGetRaw(BasicType basic_type, Value base, Value index, int log2_scale, bool may_be_unaligned, bool is_wide = false) 2330 : UnsafeRawOp(basic_type, base, index, log2_scale, false) { 2331 _may_be_unaligned = may_be_unaligned; 2332 _is_wide = is_wide; 2333 } 2334 2335 bool may_be_unaligned() { return _may_be_unaligned; } 2336 bool is_wide() { return _is_wide; } 2337 }; 2338 2339 2340 LEAF(UnsafePutRaw, UnsafeRawOp) 2341 private: 2342 Value _value; // Value to be stored 2343 2344 public: 2345 UnsafePutRaw(BasicType basic_type, Value addr, Value value) 2346 : UnsafeRawOp(basic_type, addr, true) 2347 , _value(value) 2348 { 2349 assert(value != NULL, "just checking"); 2350 ASSERT_VALUES 2351 } 2352 2353 UnsafePutRaw(BasicType basic_type, Value base, Value index, int log2_scale, Value value) 2354 : UnsafeRawOp(basic_type, base, index, log2_scale, true) 2355 , _value(value) 2356 { 2357 assert(value != NULL, "just checking"); 2358 ASSERT_VALUES 2359 } 2360 2361 // accessors 2362 Value value() { return _value; } 2363 2364 // generic 2365 virtual void input_values_do(ValueVisitor* f) { UnsafeRawOp::input_values_do(f); 2366 f->visit(&_value); } 2367 }; 2368 2369 2370 BASE(UnsafeObjectOp, UnsafeOp) 2371 private: 2372 Value _object; // Object to be fetched from or mutated 2373 Value _offset; // Offset within object 2374 bool _is_volatile; // true if volatile - dl/JSR166 2375 public: 2376 UnsafeObjectOp(BasicType basic_type, Value object, Value offset, bool is_put, bool is_volatile) 2377 : UnsafeOp(basic_type, is_put), _object(object), _offset(offset), _is_volatile(is_volatile) 2378 { 2379 } 2380 2381 // accessors 2382 Value object() { return _object; } 2383 Value offset() { return _offset; } 2384 bool is_volatile() { return _is_volatile; } 2385 // generic 2386 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f); 2387 f->visit(&_object); 2388 f->visit(&_offset); } 2389 }; 2390 2391 2392 LEAF(UnsafeGetObject, UnsafeObjectOp) 2393 public: 2394 UnsafeGetObject(BasicType basic_type, Value object, Value offset, bool is_volatile) 2395 : UnsafeObjectOp(basic_type, object, offset, false, is_volatile) 2396 { 2397 ASSERT_VALUES 2398 } 2399 }; 2400 2401 2402 LEAF(UnsafePutObject, UnsafeObjectOp) 2403 private: 2404 Value _value; // Value to be stored 2405 public: 2406 UnsafePutObject(BasicType basic_type, Value object, Value offset, Value value, bool is_volatile) 2407 : UnsafeObjectOp(basic_type, object, offset, true, is_volatile) 2408 , _value(value) 2409 { 2410 ASSERT_VALUES 2411 } 2412 2413 // accessors 2414 Value value() { return _value; } 2415 2416 // generic 2417 virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f); 2418 f->visit(&_value); } 2419 }; 2420 2421 LEAF(UnsafeGetAndSetObject, UnsafeObjectOp) 2422 private: 2423 Value _value; // Value to be stored 2424 bool _is_add; 2425 public: 2426 UnsafeGetAndSetObject(BasicType basic_type, Value object, Value offset, Value value, bool is_add) 2427 : UnsafeObjectOp(basic_type, object, offset, false, false) 2428 , _value(value) 2429 , _is_add(is_add) 2430 { 2431 ASSERT_VALUES 2432 } 2433 2434 // accessors 2435 bool is_add() const { return _is_add; } 2436 Value value() { return _value; } 2437 2438 // generic 2439 virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f); 2440 f->visit(&_value); } 2441 }; 2442 2443 BASE(UnsafePrefetch, UnsafeObjectOp) 2444 public: 2445 UnsafePrefetch(Value object, Value offset) 2446 : UnsafeObjectOp(T_VOID, object, offset, false, false) 2447 { 2448 } 2449 }; 2450 2451 2452 LEAF(UnsafePrefetchRead, UnsafePrefetch) 2453 public: 2454 UnsafePrefetchRead(Value object, Value offset) 2455 : UnsafePrefetch(object, offset) 2456 { 2457 ASSERT_VALUES 2458 } 2459 }; 2460 2461 2462 LEAF(UnsafePrefetchWrite, UnsafePrefetch) 2463 public: 2464 UnsafePrefetchWrite(Value object, Value offset) 2465 : UnsafePrefetch(object, offset) 2466 { 2467 ASSERT_VALUES 2468 } 2469 }; 2470 2471 LEAF(ProfileCall, Instruction) 2472 private: 2473 ciMethod* _method; 2474 int _bci_of_invoke; 2475 ciMethod* _callee; // the method that is called at the given bci 2476 Value _recv; 2477 ciKlass* _known_holder; 2478 Values* _obj_args; // arguments for type profiling 2479 ArgsNonNullState _nonnull_state; // Do we know whether some arguments are never null? 2480 bool _inlined; // Are we profiling a call that is inlined 2481 2482 public: 2483 ProfileCall(ciMethod* method, int bci, ciMethod* callee, Value recv, ciKlass* known_holder, Values* obj_args, bool inlined) 2484 : Instruction(voidType) 2485 , _method(method) 2486 , _bci_of_invoke(bci) 2487 , _callee(callee) 2488 , _recv(recv) 2489 , _known_holder(known_holder) 2490 , _obj_args(obj_args) 2491 , _inlined(inlined) 2492 { 2493 // The ProfileCall has side-effects and must occur precisely where located 2494 pin(); 2495 } 2496 2497 ciMethod* method() const { return _method; } 2498 int bci_of_invoke() const { return _bci_of_invoke; } 2499 ciMethod* callee() const { return _callee; } 2500 Value recv() const { return _recv; } 2501 ciKlass* known_holder() const { return _known_holder; } 2502 int nb_profiled_args() const { return _obj_args == NULL ? 0 : _obj_args->length(); } 2503 Value profiled_arg_at(int i) const { return _obj_args->at(i); } 2504 bool arg_needs_null_check(int i) const { 2505 return _nonnull_state.arg_needs_null_check(i); 2506 } 2507 bool inlined() const { return _inlined; } 2508 2509 void set_arg_needs_null_check(int i, bool check) { 2510 _nonnull_state.set_arg_needs_null_check(i, check); 2511 } 2512 2513 virtual void input_values_do(ValueVisitor* f) { 2514 if (_recv != NULL) { 2515 f->visit(&_recv); 2516 } 2517 for (int i = 0; i < nb_profiled_args(); i++) { 2518 f->visit(_obj_args->adr_at(i)); 2519 } 2520 } 2521 }; 2522 2523 LEAF(ProfileReturnType, Instruction) 2524 private: 2525 ciMethod* _method; 2526 ciMethod* _callee; 2527 int _bci_of_invoke; 2528 Value _ret; 2529 2530 public: 2531 ProfileReturnType(ciMethod* method, int bci, ciMethod* callee, Value ret) 2532 : Instruction(voidType) 2533 , _method(method) 2534 , _callee(callee) 2535 , _bci_of_invoke(bci) 2536 , _ret(ret) 2537 { 2538 set_needs_null_check(true); 2539 // The ProfileType has side-effects and must occur precisely where located 2540 pin(); 2541 } 2542 2543 ciMethod* method() const { return _method; } 2544 ciMethod* callee() const { return _callee; } 2545 int bci_of_invoke() const { return _bci_of_invoke; } 2546 Value ret() const { return _ret; } 2547 2548 virtual void input_values_do(ValueVisitor* f) { 2549 if (_ret != NULL) { 2550 f->visit(&_ret); 2551 } 2552 } 2553 }; 2554 2555 // Call some C runtime function that doesn't safepoint, 2556 // optionally passing the current thread as the first argument. 2557 LEAF(RuntimeCall, Instruction) 2558 private: 2559 const char* _entry_name; 2560 address _entry; 2561 Values* _args; 2562 bool _pass_thread; // Pass the JavaThread* as an implicit first argument 2563 2564 public: 2565 RuntimeCall(ValueType* type, const char* entry_name, address entry, Values* args, bool pass_thread = true) 2566 : Instruction(type) 2567 , _entry(entry) 2568 , _args(args) 2569 , _entry_name(entry_name) 2570 , _pass_thread(pass_thread) { 2571 ASSERT_VALUES 2572 pin(); 2573 } 2574 2575 const char* entry_name() const { return _entry_name; } 2576 address entry() const { return _entry; } 2577 int number_of_arguments() const { return _args->length(); } 2578 Value argument_at(int i) const { return _args->at(i); } 2579 bool pass_thread() const { return _pass_thread; } 2580 2581 virtual void input_values_do(ValueVisitor* f) { 2582 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 2583 } 2584 }; 2585 2586 // Use to trip invocation counter of an inlined method 2587 2588 LEAF(ProfileInvoke, Instruction) 2589 private: 2590 ciMethod* _inlinee; 2591 ValueStack* _state; 2592 2593 public: 2594 ProfileInvoke(ciMethod* inlinee, ValueStack* state) 2595 : Instruction(voidType) 2596 , _inlinee(inlinee) 2597 , _state(state) 2598 { 2599 // The ProfileInvoke has side-effects and must occur precisely where located QQQ??? 2600 pin(); 2601 } 2602 2603 ciMethod* inlinee() { return _inlinee; } 2604 ValueStack* state() { return _state; } 2605 virtual void input_values_do(ValueVisitor*) {} 2606 virtual void state_values_do(ValueVisitor*); 2607 }; 2608 2609 LEAF(MemBar, Instruction) 2610 private: 2611 LIR_Code _code; 2612 2613 public: 2614 MemBar(LIR_Code code) 2615 : Instruction(voidType) 2616 , _code(code) 2617 { 2618 pin(); 2619 } 2620 2621 LIR_Code code() { return _code; } 2622 2623 virtual void input_values_do(ValueVisitor*) {} 2624 }; 2625 2626 class BlockPair: public CompilationResourceObj { 2627 private: 2628 BlockBegin* _from; 2629 BlockBegin* _to; 2630 public: 2631 BlockPair(BlockBegin* from, BlockBegin* to): _from(from), _to(to) {} 2632 BlockBegin* from() const { return _from; } 2633 BlockBegin* to() const { return _to; } 2634 bool is_same(BlockBegin* from, BlockBegin* to) const { return _from == from && _to == to; } 2635 bool is_same(BlockPair* p) const { return _from == p->from() && _to == p->to(); } 2636 void set_to(BlockBegin* b) { _to = b; } 2637 void set_from(BlockBegin* b) { _from = b; } 2638 }; 2639 2640 2641 define_array(BlockPairArray, BlockPair*) 2642 define_stack(BlockPairList, BlockPairArray) 2643 2644 2645 inline int BlockBegin::number_of_sux() const { assert(_end == NULL || _end->number_of_sux() == _successors.length(), "mismatch"); return _successors.length(); } 2646 inline BlockBegin* BlockBegin::sux_at(int i) const { assert(_end == NULL || _end->sux_at(i) == _successors.at(i), "mismatch"); return _successors.at(i); } 2647 inline void BlockBegin::add_successor(BlockBegin* sux) { assert(_end == NULL, "Would create mismatch with successors of BlockEnd"); _successors.append(sux); } 2648 2649 #undef ASSERT_VALUES 2650 2651 #endif // SHARE_VM_C1_C1_INSTRUCTION_HPP