1 /* 2 * Copyright (c) 1999, 2016, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_VM_C1_C1_INSTRUCTION_HPP 26 #define SHARE_VM_C1_C1_INSTRUCTION_HPP 27 28 #include "c1/c1_Compilation.hpp" 29 #include "c1/c1_LIR.hpp" 30 #include "c1/c1_ValueType.hpp" 31 #include "ci/ciField.hpp" 32 33 // Predefined classes 34 class ciField; 35 class ValueStack; 36 class InstructionPrinter; 37 class IRScope; 38 class LIR_OprDesc; 39 typedef LIR_OprDesc* LIR_Opr; 40 41 42 // Instruction class hierarchy 43 // 44 // All leaf classes in the class hierarchy are concrete classes 45 // (i.e., are instantiated). All other classes are abstract and 46 // serve factoring. 47 48 class Instruction; 49 class Phi; 50 class Local; 51 class Constant; 52 class AccessField; 53 class LoadField; 54 class StoreField; 55 class AccessArray; 56 class ArrayLength; 57 class AccessIndexed; 58 class LoadIndexed; 59 class StoreIndexed; 60 class NegateOp; 61 class Op2; 62 class ArithmeticOp; 63 class ShiftOp; 64 class LogicOp; 65 class CompareOp; 66 class IfOp; 67 class Convert; 68 class NullCheck; 69 class TypeCast; 70 class OsrEntry; 71 class ExceptionObject; 72 class StateSplit; 73 class Invoke; 74 class NewInstance; 75 class NewArray; 76 class NewTypeArray; 77 class NewObjectArray; 78 class NewMultiArray; 79 class TypeCheck; 80 class CheckCast; 81 class InstanceOf; 82 class AccessMonitor; 83 class MonitorEnter; 84 class MonitorExit; 85 class Intrinsic; 86 class BlockBegin; 87 class BlockEnd; 88 class Goto; 89 class If; 90 class IfInstanceOf; 91 class Switch; 92 class TableSwitch; 93 class LookupSwitch; 94 class Return; 95 class Throw; 96 class Base; 97 class RoundFP; 98 class UnsafeOp; 99 class UnsafeRawOp; 100 class UnsafeGetRaw; 101 class UnsafePutRaw; 102 class UnsafeObjectOp; 103 class UnsafeGetObject; 104 class UnsafePutObject; 105 class UnsafeGetAndSetObject; 106 class ProfileCall; 107 class ProfileReturnType; 108 class ProfileInvoke; 109 class RuntimeCall; 110 class MemBar; 111 class RangeCheckPredicate; 112 #ifdef ASSERT 113 class Assert; 114 #endif 115 116 // A Value is a reference to the instruction creating the value 117 typedef Instruction* Value; 118 typedef GrowableArray<Value> Values; 119 typedef GrowableArray<ValueStack*> ValueStackStack; 120 121 // BlockClosure is the base class for block traversal/iteration. 122 123 class BlockClosure: public CompilationResourceObj { 124 public: 125 virtual void block_do(BlockBegin* block) = 0; 126 }; 127 128 129 // A simple closure class for visiting the values of an Instruction 130 class ValueVisitor: public StackObj { 131 public: 132 virtual void visit(Value* v) = 0; 133 }; 134 135 136 // Some array and list classes 137 typedef GrowableArray<BlockBegin*> BlockBeginArray; 138 139 class BlockList: public GrowableArray<BlockBegin*> { 140 public: 141 BlockList(): GrowableArray<BlockBegin*>() {} 142 BlockList(const int size): GrowableArray<BlockBegin*>(size) {} 143 BlockList(const int size, BlockBegin* init): GrowableArray<BlockBegin*>(size, size, init) {} 144 145 void iterate_forward(BlockClosure* closure); 146 void iterate_backward(BlockClosure* closure); 147 void blocks_do(void f(BlockBegin*)); 148 void values_do(ValueVisitor* f); 149 void print(bool cfg_only = false, bool live_only = false) PRODUCT_RETURN; 150 }; 151 152 153 // InstructionVisitors provide type-based dispatch for instructions. 154 // For each concrete Instruction class X, a virtual function do_X is 155 // provided. Functionality that needs to be implemented for all classes 156 // (e.g., printing, code generation) is factored out into a specialised 157 // visitor instead of added to the Instruction classes itself. 158 159 class InstructionVisitor: public StackObj { 160 public: 161 virtual void do_Phi (Phi* x) = 0; 162 virtual void do_Local (Local* x) = 0; 163 virtual void do_Constant (Constant* x) = 0; 164 virtual void do_LoadField (LoadField* x) = 0; 165 virtual void do_StoreField (StoreField* x) = 0; 166 virtual void do_ArrayLength (ArrayLength* x) = 0; 167 virtual void do_LoadIndexed (LoadIndexed* x) = 0; 168 virtual void do_StoreIndexed (StoreIndexed* x) = 0; 169 virtual void do_NegateOp (NegateOp* x) = 0; 170 virtual void do_ArithmeticOp (ArithmeticOp* x) = 0; 171 virtual void do_ShiftOp (ShiftOp* x) = 0; 172 virtual void do_LogicOp (LogicOp* x) = 0; 173 virtual void do_CompareOp (CompareOp* x) = 0; 174 virtual void do_IfOp (IfOp* x) = 0; 175 virtual void do_Convert (Convert* x) = 0; 176 virtual void do_NullCheck (NullCheck* x) = 0; 177 virtual void do_TypeCast (TypeCast* x) = 0; 178 virtual void do_Invoke (Invoke* x) = 0; 179 virtual void do_NewInstance (NewInstance* x) = 0; 180 virtual void do_NewTypeArray (NewTypeArray* x) = 0; 181 virtual void do_NewObjectArray (NewObjectArray* x) = 0; 182 virtual void do_NewMultiArray (NewMultiArray* x) = 0; 183 virtual void do_CheckCast (CheckCast* x) = 0; 184 virtual void do_InstanceOf (InstanceOf* x) = 0; 185 virtual void do_MonitorEnter (MonitorEnter* x) = 0; 186 virtual void do_MonitorExit (MonitorExit* x) = 0; 187 virtual void do_Intrinsic (Intrinsic* x) = 0; 188 virtual void do_BlockBegin (BlockBegin* x) = 0; 189 virtual void do_Goto (Goto* x) = 0; 190 virtual void do_If (If* x) = 0; 191 virtual void do_IfInstanceOf (IfInstanceOf* x) = 0; 192 virtual void do_TableSwitch (TableSwitch* x) = 0; 193 virtual void do_LookupSwitch (LookupSwitch* x) = 0; 194 virtual void do_Return (Return* x) = 0; 195 virtual void do_Throw (Throw* x) = 0; 196 virtual void do_Base (Base* x) = 0; 197 virtual void do_OsrEntry (OsrEntry* x) = 0; 198 virtual void do_ExceptionObject(ExceptionObject* x) = 0; 199 virtual void do_RoundFP (RoundFP* x) = 0; 200 virtual void do_UnsafeGetRaw (UnsafeGetRaw* x) = 0; 201 virtual void do_UnsafePutRaw (UnsafePutRaw* x) = 0; 202 virtual void do_UnsafeGetObject(UnsafeGetObject* x) = 0; 203 virtual void do_UnsafePutObject(UnsafePutObject* x) = 0; 204 virtual void do_UnsafeGetAndSetObject(UnsafeGetAndSetObject* x) = 0; 205 virtual void do_ProfileCall (ProfileCall* x) = 0; 206 virtual void do_ProfileReturnType (ProfileReturnType* x) = 0; 207 virtual void do_ProfileInvoke (ProfileInvoke* x) = 0; 208 virtual void do_RuntimeCall (RuntimeCall* x) = 0; 209 virtual void do_MemBar (MemBar* x) = 0; 210 virtual void do_RangeCheckPredicate(RangeCheckPredicate* x) = 0; 211 #ifdef ASSERT 212 virtual void do_Assert (Assert* x) = 0; 213 #endif 214 }; 215 216 217 // Hashing support 218 // 219 // Note: This hash functions affect the performance 220 // of ValueMap - make changes carefully! 221 222 #define HASH1(x1 ) ((intx)(x1)) 223 #define HASH2(x1, x2 ) ((HASH1(x1 ) << 7) ^ HASH1(x2)) 224 #define HASH3(x1, x2, x3 ) ((HASH2(x1, x2 ) << 7) ^ HASH1(x3)) 225 #define HASH4(x1, x2, x3, x4) ((HASH3(x1, x2, x3) << 7) ^ HASH1(x4)) 226 227 228 // The following macros are used to implement instruction-specific hashing. 229 // By default, each instruction implements hash() and is_equal(Value), used 230 // for value numbering/common subexpression elimination. The default imple- 231 // mentation disables value numbering. Each instruction which can be value- 232 // numbered, should define corresponding hash() and is_equal(Value) functions 233 // via the macros below. The f arguments specify all the values/op codes, etc. 234 // that need to be identical for two instructions to be identical. 235 // 236 // Note: The default implementation of hash() returns 0 in order to indicate 237 // that the instruction should not be considered for value numbering. 238 // The currently used hash functions do not guarantee that never a 0 239 // is produced. While this is still correct, it may be a performance 240 // bug (no value numbering for that node). However, this situation is 241 // so unlikely, that we are not going to handle it specially. 242 243 #define HASHING1(class_name, enabled, f1) \ 244 virtual intx hash() const { \ 245 return (enabled) ? HASH2(name(), f1) : 0; \ 246 } \ 247 virtual bool is_equal(Value v) const { \ 248 if (!(enabled) ) return false; \ 249 class_name* _v = v->as_##class_name(); \ 250 if (_v == NULL ) return false; \ 251 if (f1 != _v->f1) return false; \ 252 return true; \ 253 } \ 254 255 256 #define HASHING2(class_name, enabled, f1, f2) \ 257 virtual intx hash() const { \ 258 return (enabled) ? HASH3(name(), f1, f2) : 0; \ 259 } \ 260 virtual bool is_equal(Value v) const { \ 261 if (!(enabled) ) return false; \ 262 class_name* _v = v->as_##class_name(); \ 263 if (_v == NULL ) return false; \ 264 if (f1 != _v->f1) return false; \ 265 if (f2 != _v->f2) return false; \ 266 return true; \ 267 } \ 268 269 270 #define HASHING3(class_name, enabled, f1, f2, f3) \ 271 virtual intx hash() const { \ 272 return (enabled) ? HASH4(name(), f1, f2, f3) : 0; \ 273 } \ 274 virtual bool is_equal(Value v) const { \ 275 if (!(enabled) ) return false; \ 276 class_name* _v = v->as_##class_name(); \ 277 if (_v == NULL ) return false; \ 278 if (f1 != _v->f1) return false; \ 279 if (f2 != _v->f2) return false; \ 280 if (f3 != _v->f3) return false; \ 281 return true; \ 282 } \ 283 284 285 // The mother of all instructions... 286 287 class Instruction: public CompilationResourceObj { 288 private: 289 int _id; // the unique instruction id 290 #ifndef PRODUCT 291 int _printable_bci; // the bci of the instruction for printing 292 #endif 293 int _use_count; // the number of instructions refering to this value (w/o prev/next); only roots can have use count = 0 or > 1 294 int _pin_state; // set of PinReason describing the reason for pinning 295 ValueType* _type; // the instruction value type 296 Instruction* _next; // the next instruction if any (NULL for BlockEnd instructions) 297 Instruction* _subst; // the substitution instruction if any 298 LIR_Opr _operand; // LIR specific information 299 unsigned int _flags; // Flag bits 300 301 ValueStack* _state_before; // Copy of state with input operands still on stack (or NULL) 302 ValueStack* _exception_state; // Copy of state for exception handling 303 XHandlers* _exception_handlers; // Flat list of exception handlers covering this instruction 304 305 friend class UseCountComputer; 306 friend class BlockBegin; 307 308 void update_exception_state(ValueStack* state); 309 310 protected: 311 BlockBegin* _block; // Block that contains this instruction 312 313 void set_type(ValueType* type) { 314 assert(type != NULL, "type must exist"); 315 _type = type; 316 } 317 318 // Helper class to keep track of which arguments need a null check 319 class ArgsNonNullState { 320 private: 321 int _nonnull_state; // mask identifying which args are nonnull 322 public: 323 ArgsNonNullState() 324 : _nonnull_state(AllBits) {} 325 326 // Does argument number i needs a null check? 327 bool arg_needs_null_check(int i) const { 328 // No data is kept for arguments starting at position 33 so 329 // conservatively assume that they need a null check. 330 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) { 331 return is_set_nth_bit(_nonnull_state, i); 332 } 333 return true; 334 } 335 336 // Set whether argument number i needs a null check or not 337 void set_arg_needs_null_check(int i, bool check) { 338 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) { 339 if (check) { 340 _nonnull_state |= nth_bit(i); 341 } else { 342 _nonnull_state &= ~(nth_bit(i)); 343 } 344 } 345 } 346 }; 347 348 public: 349 void* operator new(size_t size) throw() { 350 Compilation* c = Compilation::current(); 351 void* res = c->arena()->Amalloc(size); 352 ((Instruction*)res)->_id = c->get_next_id(); 353 return res; 354 } 355 356 static const int no_bci = -99; 357 358 enum InstructionFlag { 359 NeedsNullCheckFlag = 0, 360 CanTrapFlag, 361 DirectCompareFlag, 362 IsEliminatedFlag, 363 IsSafepointFlag, 364 IsStaticFlag, 365 IsStrictfpFlag, 366 NeedsStoreCheckFlag, 367 NeedsWriteBarrierFlag, 368 PreservesStateFlag, 369 TargetIsFinalFlag, 370 TargetIsLoadedFlag, 371 TargetIsStrictfpFlag, 372 UnorderedIsTrueFlag, 373 NeedsPatchingFlag, 374 ThrowIncompatibleClassChangeErrorFlag, 375 InvokeSpecialReceiverCheckFlag, 376 ProfileMDOFlag, 377 IsLinkedInBlockFlag, 378 NeedsRangeCheckFlag, 379 InWorkListFlag, 380 DeoptimizeOnException, 381 InstructionLastFlag 382 }; 383 384 public: 385 bool check_flag(InstructionFlag id) const { return (_flags & (1 << id)) != 0; } 386 void set_flag(InstructionFlag id, bool f) { _flags = f ? (_flags | (1 << id)) : (_flags & ~(1 << id)); }; 387 388 // 'globally' used condition values 389 enum Condition { 390 eql, neq, lss, leq, gtr, geq, aeq, beq 391 }; 392 393 // Instructions may be pinned for many reasons and under certain conditions 394 // with enough knowledge it's possible to safely unpin them. 395 enum PinReason { 396 PinUnknown = 1 << 0 397 , PinExplicitNullCheck = 1 << 3 398 , PinStackForStateSplit= 1 << 12 399 , PinStateSplitConstructor= 1 << 13 400 , PinGlobalValueNumbering= 1 << 14 401 }; 402 403 static Condition mirror(Condition cond); 404 static Condition negate(Condition cond); 405 406 // initialization 407 static int number_of_instructions() { 408 return Compilation::current()->number_of_instructions(); 409 } 410 411 // creation 412 Instruction(ValueType* type, ValueStack* state_before = NULL, bool type_is_constant = false) 413 : _use_count(0) 414 #ifndef PRODUCT 415 , _printable_bci(-99) 416 #endif 417 , _pin_state(0) 418 , _type(type) 419 , _next(NULL) 420 , _block(NULL) 421 , _subst(NULL) 422 , _flags(0) 423 , _operand(LIR_OprFact::illegalOpr) 424 , _state_before(state_before) 425 , _exception_handlers(NULL) 426 { 427 check_state(state_before); 428 assert(type != NULL && (!type->is_constant() || type_is_constant), "type must exist"); 429 update_exception_state(_state_before); 430 } 431 432 // accessors 433 int id() const { return _id; } 434 #ifndef PRODUCT 435 bool has_printable_bci() const { return _printable_bci != -99; } 436 int printable_bci() const { assert(has_printable_bci(), "_printable_bci should have been set"); return _printable_bci; } 437 void set_printable_bci(int bci) { _printable_bci = bci; } 438 #endif 439 int dominator_depth(); 440 int use_count() const { return _use_count; } 441 int pin_state() const { return _pin_state; } 442 bool is_pinned() const { return _pin_state != 0 || PinAllInstructions; } 443 ValueType* type() const { return _type; } 444 BlockBegin *block() const { return _block; } 445 Instruction* prev(); // use carefully, expensive operation 446 Instruction* next() const { return _next; } 447 bool has_subst() const { return _subst != NULL; } 448 Instruction* subst() { return _subst == NULL ? this : _subst->subst(); } 449 LIR_Opr operand() const { return _operand; } 450 451 void set_needs_null_check(bool f) { set_flag(NeedsNullCheckFlag, f); } 452 bool needs_null_check() const { return check_flag(NeedsNullCheckFlag); } 453 bool is_linked() const { return check_flag(IsLinkedInBlockFlag); } 454 bool can_be_linked() { return as_Local() == NULL && as_Phi() == NULL; } 455 456 bool has_uses() const { return use_count() > 0; } 457 ValueStack* state_before() const { return _state_before; } 458 ValueStack* exception_state() const { return _exception_state; } 459 virtual bool needs_exception_state() const { return true; } 460 XHandlers* exception_handlers() const { return _exception_handlers; } 461 462 // manipulation 463 void pin(PinReason reason) { _pin_state |= reason; } 464 void pin() { _pin_state |= PinUnknown; } 465 // DANGEROUS: only used by EliminateStores 466 void unpin(PinReason reason) { assert((reason & PinUnknown) == 0, "can't unpin unknown state"); _pin_state &= ~reason; } 467 468 Instruction* set_next(Instruction* next) { 469 assert(next->has_printable_bci(), "_printable_bci should have been set"); 470 assert(next != NULL, "must not be NULL"); 471 assert(as_BlockEnd() == NULL, "BlockEnd instructions must have no next"); 472 assert(next->can_be_linked(), "shouldn't link these instructions into list"); 473 474 BlockBegin *block = this->block(); 475 next->_block = block; 476 477 next->set_flag(Instruction::IsLinkedInBlockFlag, true); 478 _next = next; 479 return next; 480 } 481 482 Instruction* set_next(Instruction* next, int bci) { 483 #ifndef PRODUCT 484 next->set_printable_bci(bci); 485 #endif 486 return set_next(next); 487 } 488 489 // when blocks are merged 490 void fixup_block_pointers() { 491 Instruction *cur = next()->next(); // next()'s block is set in set_next 492 while (cur && cur->_block != block()) { 493 cur->_block = block(); 494 cur = cur->next(); 495 } 496 } 497 498 Instruction *insert_after(Instruction *i) { 499 Instruction* n = _next; 500 set_next(i); 501 i->set_next(n); 502 return _next; 503 } 504 505 Instruction *insert_after_same_bci(Instruction *i) { 506 #ifndef PRODUCT 507 i->set_printable_bci(printable_bci()); 508 #endif 509 return insert_after(i); 510 } 511 512 void set_subst(Instruction* subst) { 513 assert(subst == NULL || 514 type()->base() == subst->type()->base() || 515 subst->type()->base() == illegalType, "type can't change"); 516 _subst = subst; 517 } 518 void set_exception_handlers(XHandlers *xhandlers) { _exception_handlers = xhandlers; } 519 void set_exception_state(ValueStack* s) { check_state(s); _exception_state = s; } 520 void set_state_before(ValueStack* s) { check_state(s); _state_before = s; } 521 522 // machine-specifics 523 void set_operand(LIR_Opr operand) { assert(operand != LIR_OprFact::illegalOpr, "operand must exist"); _operand = operand; } 524 void clear_operand() { _operand = LIR_OprFact::illegalOpr; } 525 526 // generic 527 virtual Instruction* as_Instruction() { return this; } // to satisfy HASHING1 macro 528 virtual Phi* as_Phi() { return NULL; } 529 virtual Local* as_Local() { return NULL; } 530 virtual Constant* as_Constant() { return NULL; } 531 virtual AccessField* as_AccessField() { return NULL; } 532 virtual LoadField* as_LoadField() { return NULL; } 533 virtual StoreField* as_StoreField() { return NULL; } 534 virtual AccessArray* as_AccessArray() { return NULL; } 535 virtual ArrayLength* as_ArrayLength() { return NULL; } 536 virtual AccessIndexed* as_AccessIndexed() { return NULL; } 537 virtual LoadIndexed* as_LoadIndexed() { return NULL; } 538 virtual StoreIndexed* as_StoreIndexed() { return NULL; } 539 virtual NegateOp* as_NegateOp() { return NULL; } 540 virtual Op2* as_Op2() { return NULL; } 541 virtual ArithmeticOp* as_ArithmeticOp() { return NULL; } 542 virtual ShiftOp* as_ShiftOp() { return NULL; } 543 virtual LogicOp* as_LogicOp() { return NULL; } 544 virtual CompareOp* as_CompareOp() { return NULL; } 545 virtual IfOp* as_IfOp() { return NULL; } 546 virtual Convert* as_Convert() { return NULL; } 547 virtual NullCheck* as_NullCheck() { return NULL; } 548 virtual OsrEntry* as_OsrEntry() { return NULL; } 549 virtual StateSplit* as_StateSplit() { return NULL; } 550 virtual Invoke* as_Invoke() { return NULL; } 551 virtual NewInstance* as_NewInstance() { return NULL; } 552 virtual NewArray* as_NewArray() { return NULL; } 553 virtual NewTypeArray* as_NewTypeArray() { return NULL; } 554 virtual NewObjectArray* as_NewObjectArray() { return NULL; } 555 virtual NewMultiArray* as_NewMultiArray() { return NULL; } 556 virtual TypeCheck* as_TypeCheck() { return NULL; } 557 virtual CheckCast* as_CheckCast() { return NULL; } 558 virtual InstanceOf* as_InstanceOf() { return NULL; } 559 virtual TypeCast* as_TypeCast() { return NULL; } 560 virtual AccessMonitor* as_AccessMonitor() { return NULL; } 561 virtual MonitorEnter* as_MonitorEnter() { return NULL; } 562 virtual MonitorExit* as_MonitorExit() { return NULL; } 563 virtual Intrinsic* as_Intrinsic() { return NULL; } 564 virtual BlockBegin* as_BlockBegin() { return NULL; } 565 virtual BlockEnd* as_BlockEnd() { return NULL; } 566 virtual Goto* as_Goto() { return NULL; } 567 virtual If* as_If() { return NULL; } 568 virtual IfInstanceOf* as_IfInstanceOf() { return NULL; } 569 virtual TableSwitch* as_TableSwitch() { return NULL; } 570 virtual LookupSwitch* as_LookupSwitch() { return NULL; } 571 virtual Return* as_Return() { return NULL; } 572 virtual Throw* as_Throw() { return NULL; } 573 virtual Base* as_Base() { return NULL; } 574 virtual RoundFP* as_RoundFP() { return NULL; } 575 virtual ExceptionObject* as_ExceptionObject() { return NULL; } 576 virtual UnsafeOp* as_UnsafeOp() { return NULL; } 577 virtual ProfileInvoke* as_ProfileInvoke() { return NULL; } 578 virtual RangeCheckPredicate* as_RangeCheckPredicate() { return NULL; } 579 580 #ifdef ASSERT 581 virtual Assert* as_Assert() { return NULL; } 582 #endif 583 584 virtual void visit(InstructionVisitor* v) = 0; 585 586 virtual bool can_trap() const { return false; } 587 588 virtual void input_values_do(ValueVisitor* f) = 0; 589 virtual void state_values_do(ValueVisitor* f); 590 virtual void other_values_do(ValueVisitor* f) { /* usually no other - override on demand */ } 591 void values_do(ValueVisitor* f) { input_values_do(f); state_values_do(f); other_values_do(f); } 592 593 virtual ciType* exact_type() const; 594 virtual ciType* declared_type() const { return NULL; } 595 596 // hashing 597 virtual const char* name() const = 0; 598 HASHING1(Instruction, false, id()) // hashing disabled by default 599 600 // debugging 601 static void check_state(ValueStack* state) PRODUCT_RETURN; 602 void print() PRODUCT_RETURN; 603 void print_line() PRODUCT_RETURN; 604 void print(InstructionPrinter& ip) PRODUCT_RETURN; 605 }; 606 607 608 // The following macros are used to define base (i.e., non-leaf) 609 // and leaf instruction classes. They define class-name related 610 // generic functionality in one place. 611 612 #define BASE(class_name, super_class_name) \ 613 class class_name: public super_class_name { \ 614 public: \ 615 virtual class_name* as_##class_name() { return this; } \ 616 617 618 #define LEAF(class_name, super_class_name) \ 619 BASE(class_name, super_class_name) \ 620 public: \ 621 virtual const char* name() const { return #class_name; } \ 622 virtual void visit(InstructionVisitor* v) { v->do_##class_name(this); } \ 623 624 625 // Debugging support 626 627 628 #ifdef ASSERT 629 class AssertValues: public ValueVisitor { 630 void visit(Value* x) { assert((*x) != NULL, "value must exist"); } 631 }; 632 #define ASSERT_VALUES { AssertValues assert_value; values_do(&assert_value); } 633 #else 634 #define ASSERT_VALUES 635 #endif // ASSERT 636 637 638 // A Phi is a phi function in the sense of SSA form. It stands for 639 // the value of a local variable at the beginning of a join block. 640 // A Phi consists of n operands, one for every incoming branch. 641 642 LEAF(Phi, Instruction) 643 private: 644 int _pf_flags; // the flags of the phi function 645 int _index; // to value on operand stack (index < 0) or to local 646 public: 647 // creation 648 Phi(ValueType* type, BlockBegin* b, int index) 649 : Instruction(type->base()) 650 , _pf_flags(0) 651 , _index(index) 652 { 653 _block = b; 654 NOT_PRODUCT(set_printable_bci(Value(b)->printable_bci())); 655 if (type->is_illegal()) { 656 make_illegal(); 657 } 658 } 659 660 // flags 661 enum Flag { 662 no_flag = 0, 663 visited = 1 << 0, 664 cannot_simplify = 1 << 1 665 }; 666 667 // accessors 668 bool is_local() const { return _index >= 0; } 669 bool is_on_stack() const { return !is_local(); } 670 int local_index() const { assert(is_local(), ""); return _index; } 671 int stack_index() const { assert(is_on_stack(), ""); return -(_index+1); } 672 673 Value operand_at(int i) const; 674 int operand_count() const; 675 676 void set(Flag f) { _pf_flags |= f; } 677 void clear(Flag f) { _pf_flags &= ~f; } 678 bool is_set(Flag f) const { return (_pf_flags & f) != 0; } 679 680 // Invalidates phis corresponding to merges of locals of two different types 681 // (these should never be referenced, otherwise the bytecodes are illegal) 682 void make_illegal() { 683 set(cannot_simplify); 684 set_type(illegalType); 685 } 686 687 bool is_illegal() const { 688 return type()->is_illegal(); 689 } 690 691 // generic 692 virtual void input_values_do(ValueVisitor* f) { 693 } 694 }; 695 696 697 // A local is a placeholder for an incoming argument to a function call. 698 LEAF(Local, Instruction) 699 private: 700 int _java_index; // the local index within the method to which the local belongs 701 bool _is_receiver; // if local variable holds the receiver: "this" for non-static methods 702 ciType* _declared_type; 703 public: 704 // creation 705 Local(ciType* declared, ValueType* type, int index, bool receiver) 706 : Instruction(type) 707 , _java_index(index) 708 , _declared_type(declared) 709 , _is_receiver(receiver) 710 { 711 NOT_PRODUCT(set_printable_bci(-1)); 712 } 713 714 // accessors 715 int java_index() const { return _java_index; } 716 bool is_receiver() const { return _is_receiver; } 717 718 virtual ciType* declared_type() const { return _declared_type; } 719 720 // generic 721 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 722 }; 723 724 725 LEAF(Constant, Instruction) 726 public: 727 // creation 728 Constant(ValueType* type): 729 Instruction(type, NULL, /*type_is_constant*/ true) 730 { 731 assert(type->is_constant(), "must be a constant"); 732 } 733 734 Constant(ValueType* type, ValueStack* state_before): 735 Instruction(type, state_before, /*type_is_constant*/ true) 736 { 737 assert(state_before != NULL, "only used for constants which need patching"); 738 assert(type->is_constant(), "must be a constant"); 739 // since it's patching it needs to be pinned 740 pin(); 741 } 742 743 // generic 744 virtual bool can_trap() const { return state_before() != NULL; } 745 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 746 747 virtual intx hash() const; 748 virtual bool is_equal(Value v) const; 749 750 virtual ciType* exact_type() const; 751 752 enum CompareResult { not_comparable = -1, cond_false, cond_true }; 753 754 virtual CompareResult compare(Instruction::Condition condition, Value right) const; 755 BlockBegin* compare(Instruction::Condition cond, Value right, 756 BlockBegin* true_sux, BlockBegin* false_sux) const { 757 switch (compare(cond, right)) { 758 case not_comparable: 759 return NULL; 760 case cond_false: 761 return false_sux; 762 case cond_true: 763 return true_sux; 764 default: 765 ShouldNotReachHere(); 766 return NULL; 767 } 768 } 769 }; 770 771 772 BASE(AccessField, Instruction) 773 private: 774 Value _obj; 775 int _offset; 776 ciField* _field; 777 NullCheck* _explicit_null_check; // For explicit null check elimination 778 779 public: 780 // creation 781 AccessField(Value obj, int offset, ciField* field, bool is_static, 782 ValueStack* state_before, bool needs_patching) 783 : Instruction(as_ValueType(field->type()->basic_type()), state_before) 784 , _obj(obj) 785 , _offset(offset) 786 , _field(field) 787 , _explicit_null_check(NULL) 788 { 789 set_needs_null_check(!is_static); 790 set_flag(IsStaticFlag, is_static); 791 set_flag(NeedsPatchingFlag, needs_patching); 792 ASSERT_VALUES 793 // pin of all instructions with memory access 794 pin(); 795 } 796 797 // accessors 798 Value obj() const { return _obj; } 799 int offset() const { return _offset; } 800 ciField* field() const { return _field; } 801 BasicType field_type() const { return _field->type()->basic_type(); } 802 bool is_static() const { return check_flag(IsStaticFlag); } 803 NullCheck* explicit_null_check() const { return _explicit_null_check; } 804 bool needs_patching() const { return check_flag(NeedsPatchingFlag); } 805 806 // Unresolved getstatic and putstatic can cause initialization. 807 // Technically it occurs at the Constant that materializes the base 808 // of the static fields but it's simpler to model it here. 809 bool is_init_point() const { return is_static() && (needs_patching() || !_field->holder()->is_initialized()); } 810 811 // manipulation 812 813 // Under certain circumstances, if a previous NullCheck instruction 814 // proved the target object non-null, we can eliminate the explicit 815 // null check and do an implicit one, simply specifying the debug 816 // information from the NullCheck. This field should only be consulted 817 // if needs_null_check() is true. 818 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 819 820 // generic 821 virtual bool can_trap() const { return needs_null_check() || needs_patching(); } 822 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 823 }; 824 825 826 LEAF(LoadField, AccessField) 827 public: 828 // creation 829 LoadField(Value obj, int offset, ciField* field, bool is_static, 830 ValueStack* state_before, bool needs_patching) 831 : AccessField(obj, offset, field, is_static, state_before, needs_patching) 832 {} 833 834 ciType* declared_type() const; 835 836 // generic 837 HASHING2(LoadField, !needs_patching() && !field()->is_volatile(), obj()->subst(), offset()) // cannot be eliminated if needs patching or if volatile 838 }; 839 840 841 LEAF(StoreField, AccessField) 842 private: 843 Value _value; 844 845 public: 846 // creation 847 StoreField(Value obj, int offset, ciField* field, Value value, bool is_static, 848 ValueStack* state_before, bool needs_patching) 849 : AccessField(obj, offset, field, is_static, state_before, needs_patching) 850 , _value(value) 851 { 852 set_flag(NeedsWriteBarrierFlag, as_ValueType(field_type())->is_object()); 853 ASSERT_VALUES 854 pin(); 855 } 856 857 // accessors 858 Value value() const { return _value; } 859 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); } 860 861 // generic 862 virtual void input_values_do(ValueVisitor* f) { AccessField::input_values_do(f); f->visit(&_value); } 863 }; 864 865 866 BASE(AccessArray, Instruction) 867 private: 868 Value _array; 869 870 public: 871 // creation 872 AccessArray(ValueType* type, Value array, ValueStack* state_before) 873 : Instruction(type, state_before) 874 , _array(array) 875 { 876 set_needs_null_check(true); 877 ASSERT_VALUES 878 pin(); // instruction with side effect (null exception or range check throwing) 879 } 880 881 Value array() const { return _array; } 882 883 // generic 884 virtual bool can_trap() const { return needs_null_check(); } 885 virtual void input_values_do(ValueVisitor* f) { f->visit(&_array); } 886 }; 887 888 889 LEAF(ArrayLength, AccessArray) 890 private: 891 NullCheck* _explicit_null_check; // For explicit null check elimination 892 893 public: 894 // creation 895 ArrayLength(Value array, ValueStack* state_before) 896 : AccessArray(intType, array, state_before) 897 , _explicit_null_check(NULL) {} 898 899 // accessors 900 NullCheck* explicit_null_check() const { return _explicit_null_check; } 901 902 // setters 903 // See LoadField::set_explicit_null_check for documentation 904 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 905 906 // generic 907 HASHING1(ArrayLength, true, array()->subst()) 908 }; 909 910 911 BASE(AccessIndexed, AccessArray) 912 private: 913 Value _index; 914 Value _length; 915 BasicType _elt_type; 916 bool _mismatched; 917 918 public: 919 // creation 920 AccessIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before, bool mismatched) 921 : AccessArray(as_ValueType(elt_type), array, state_before) 922 , _index(index) 923 , _length(length) 924 , _elt_type(elt_type) 925 , _mismatched(mismatched) 926 { 927 set_flag(Instruction::NeedsRangeCheckFlag, true); 928 ASSERT_VALUES 929 } 930 931 // accessors 932 Value index() const { return _index; } 933 Value length() const { return _length; } 934 BasicType elt_type() const { return _elt_type; } 935 bool mismatched() const { return _mismatched; } 936 937 void clear_length() { _length = NULL; } 938 // perform elimination of range checks involving constants 939 bool compute_needs_range_check(); 940 941 // generic 942 virtual void input_values_do(ValueVisitor* f) { AccessArray::input_values_do(f); f->visit(&_index); if (_length != NULL) f->visit(&_length); } 943 }; 944 945 946 LEAF(LoadIndexed, AccessIndexed) 947 private: 948 NullCheck* _explicit_null_check; // For explicit null check elimination 949 950 public: 951 // creation 952 LoadIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before, bool mismatched = false) 953 : AccessIndexed(array, index, length, elt_type, state_before, mismatched) 954 , _explicit_null_check(NULL) {} 955 956 // accessors 957 NullCheck* explicit_null_check() const { return _explicit_null_check; } 958 959 // setters 960 // See LoadField::set_explicit_null_check for documentation 961 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 962 963 ciType* exact_type() const; 964 ciType* declared_type() const; 965 966 // generic 967 HASHING2(LoadIndexed, true, array()->subst(), index()->subst()) 968 }; 969 970 971 LEAF(StoreIndexed, AccessIndexed) 972 private: 973 Value _value; 974 975 ciMethod* _profiled_method; 976 int _profiled_bci; 977 bool _check_boolean; 978 979 public: 980 // creation 981 StoreIndexed(Value array, Value index, Value length, BasicType elt_type, Value value, ValueStack* state_before, 982 bool check_boolean, bool mismatched = false) 983 : AccessIndexed(array, index, length, elt_type, state_before, mismatched) 984 , _value(value), _profiled_method(NULL), _profiled_bci(0), _check_boolean(check_boolean) 985 { 986 set_flag(NeedsWriteBarrierFlag, (as_ValueType(elt_type)->is_object())); 987 set_flag(NeedsStoreCheckFlag, (as_ValueType(elt_type)->is_object())); 988 ASSERT_VALUES 989 pin(); 990 } 991 992 // accessors 993 Value value() const { return _value; } 994 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); } 995 bool needs_store_check() const { return check_flag(NeedsStoreCheckFlag); } 996 bool check_boolean() const { return _check_boolean; } 997 // Helpers for MethodData* profiling 998 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 999 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1000 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1001 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1002 ciMethod* profiled_method() const { return _profiled_method; } 1003 int profiled_bci() const { return _profiled_bci; } 1004 // generic 1005 virtual void input_values_do(ValueVisitor* f) { AccessIndexed::input_values_do(f); f->visit(&_value); } 1006 }; 1007 1008 1009 LEAF(NegateOp, Instruction) 1010 private: 1011 Value _x; 1012 1013 public: 1014 // creation 1015 NegateOp(Value x) : Instruction(x->type()->base()), _x(x) { 1016 ASSERT_VALUES 1017 } 1018 1019 // accessors 1020 Value x() const { return _x; } 1021 1022 // generic 1023 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); } 1024 }; 1025 1026 1027 BASE(Op2, Instruction) 1028 private: 1029 Bytecodes::Code _op; 1030 Value _x; 1031 Value _y; 1032 1033 public: 1034 // creation 1035 Op2(ValueType* type, Bytecodes::Code op, Value x, Value y, ValueStack* state_before = NULL) 1036 : Instruction(type, state_before) 1037 , _op(op) 1038 , _x(x) 1039 , _y(y) 1040 { 1041 ASSERT_VALUES 1042 } 1043 1044 // accessors 1045 Bytecodes::Code op() const { return _op; } 1046 Value x() const { return _x; } 1047 Value y() const { return _y; } 1048 1049 // manipulators 1050 void swap_operands() { 1051 assert(is_commutative(), "operation must be commutative"); 1052 Value t = _x; _x = _y; _y = t; 1053 } 1054 1055 // generic 1056 virtual bool is_commutative() const { return false; } 1057 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); } 1058 }; 1059 1060 1061 LEAF(ArithmeticOp, Op2) 1062 public: 1063 // creation 1064 ArithmeticOp(Bytecodes::Code op, Value x, Value y, bool is_strictfp, ValueStack* state_before) 1065 : Op2(x->type()->meet(y->type()), op, x, y, state_before) 1066 { 1067 set_flag(IsStrictfpFlag, is_strictfp); 1068 if (can_trap()) pin(); 1069 } 1070 1071 // accessors 1072 bool is_strictfp() const { return check_flag(IsStrictfpFlag); } 1073 1074 // generic 1075 virtual bool is_commutative() const; 1076 virtual bool can_trap() const; 1077 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1078 }; 1079 1080 1081 LEAF(ShiftOp, Op2) 1082 public: 1083 // creation 1084 ShiftOp(Bytecodes::Code op, Value x, Value s) : Op2(x->type()->base(), op, x, s) {} 1085 1086 // generic 1087 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1088 }; 1089 1090 1091 LEAF(LogicOp, Op2) 1092 public: 1093 // creation 1094 LogicOp(Bytecodes::Code op, Value x, Value y) : Op2(x->type()->meet(y->type()), op, x, y) {} 1095 1096 // generic 1097 virtual bool is_commutative() const; 1098 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1099 }; 1100 1101 1102 LEAF(CompareOp, Op2) 1103 public: 1104 // creation 1105 CompareOp(Bytecodes::Code op, Value x, Value y, ValueStack* state_before) 1106 : Op2(intType, op, x, y, state_before) 1107 {} 1108 1109 // generic 1110 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1111 }; 1112 1113 1114 LEAF(IfOp, Op2) 1115 private: 1116 Value _tval; 1117 Value _fval; 1118 1119 public: 1120 // creation 1121 IfOp(Value x, Condition cond, Value y, Value tval, Value fval) 1122 : Op2(tval->type()->meet(fval->type()), (Bytecodes::Code)cond, x, y) 1123 , _tval(tval) 1124 , _fval(fval) 1125 { 1126 ASSERT_VALUES 1127 assert(tval->type()->tag() == fval->type()->tag(), "types must match"); 1128 } 1129 1130 // accessors 1131 virtual bool is_commutative() const; 1132 Bytecodes::Code op() const { ShouldNotCallThis(); return Bytecodes::_illegal; } 1133 Condition cond() const { return (Condition)Op2::op(); } 1134 Value tval() const { return _tval; } 1135 Value fval() const { return _fval; } 1136 1137 // generic 1138 virtual void input_values_do(ValueVisitor* f) { Op2::input_values_do(f); f->visit(&_tval); f->visit(&_fval); } 1139 }; 1140 1141 1142 LEAF(Convert, Instruction) 1143 private: 1144 Bytecodes::Code _op; 1145 Value _value; 1146 1147 public: 1148 // creation 1149 Convert(Bytecodes::Code op, Value value, ValueType* to_type) : Instruction(to_type), _op(op), _value(value) { 1150 ASSERT_VALUES 1151 } 1152 1153 // accessors 1154 Bytecodes::Code op() const { return _op; } 1155 Value value() const { return _value; } 1156 1157 // generic 1158 virtual void input_values_do(ValueVisitor* f) { f->visit(&_value); } 1159 HASHING2(Convert, true, op(), value()->subst()) 1160 }; 1161 1162 1163 LEAF(NullCheck, Instruction) 1164 private: 1165 Value _obj; 1166 1167 public: 1168 // creation 1169 NullCheck(Value obj, ValueStack* state_before) 1170 : Instruction(obj->type()->base(), state_before) 1171 , _obj(obj) 1172 { 1173 ASSERT_VALUES 1174 set_can_trap(true); 1175 assert(_obj->type()->is_object(), "null check must be applied to objects only"); 1176 pin(Instruction::PinExplicitNullCheck); 1177 } 1178 1179 // accessors 1180 Value obj() const { return _obj; } 1181 1182 // setters 1183 void set_can_trap(bool can_trap) { set_flag(CanTrapFlag, can_trap); } 1184 1185 // generic 1186 virtual bool can_trap() const { return check_flag(CanTrapFlag); /* null-check elimination sets to false */ } 1187 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 1188 HASHING1(NullCheck, true, obj()->subst()) 1189 }; 1190 1191 1192 // This node is supposed to cast the type of another node to a more precise 1193 // declared type. 1194 LEAF(TypeCast, Instruction) 1195 private: 1196 ciType* _declared_type; 1197 Value _obj; 1198 1199 public: 1200 // The type of this node is the same type as the object type (and it might be constant). 1201 TypeCast(ciType* type, Value obj, ValueStack* state_before) 1202 : Instruction(obj->type(), state_before, obj->type()->is_constant()), 1203 _declared_type(type), 1204 _obj(obj) {} 1205 1206 // accessors 1207 ciType* declared_type() const { return _declared_type; } 1208 Value obj() const { return _obj; } 1209 1210 // generic 1211 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 1212 }; 1213 1214 1215 BASE(StateSplit, Instruction) 1216 private: 1217 ValueStack* _state; 1218 1219 protected: 1220 static void substitute(BlockList& list, BlockBegin* old_block, BlockBegin* new_block); 1221 1222 public: 1223 // creation 1224 StateSplit(ValueType* type, ValueStack* state_before = NULL) 1225 : Instruction(type, state_before) 1226 , _state(NULL) 1227 { 1228 pin(PinStateSplitConstructor); 1229 } 1230 1231 // accessors 1232 ValueStack* state() const { return _state; } 1233 IRScope* scope() const; // the state's scope 1234 1235 // manipulation 1236 void set_state(ValueStack* state) { assert(_state == NULL, "overwriting existing state"); check_state(state); _state = state; } 1237 1238 // generic 1239 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 1240 virtual void state_values_do(ValueVisitor* f); 1241 }; 1242 1243 1244 LEAF(Invoke, StateSplit) 1245 private: 1246 Bytecodes::Code _code; 1247 Value _recv; 1248 Values* _args; 1249 BasicTypeList* _signature; 1250 int _vtable_index; 1251 ciMethod* _target; 1252 1253 public: 1254 // creation 1255 Invoke(Bytecodes::Code code, ValueType* result_type, Value recv, Values* args, 1256 int vtable_index, ciMethod* target, ValueStack* state_before); 1257 1258 // accessors 1259 Bytecodes::Code code() const { return _code; } 1260 Value receiver() const { return _recv; } 1261 bool has_receiver() const { return receiver() != NULL; } 1262 int number_of_arguments() const { return _args->length(); } 1263 Value argument_at(int i) const { return _args->at(i); } 1264 int vtable_index() const { return _vtable_index; } 1265 BasicTypeList* signature() const { return _signature; } 1266 ciMethod* target() const { return _target; } 1267 1268 ciType* declared_type() const; 1269 1270 // Returns false if target is not loaded 1271 bool target_is_final() const { return check_flag(TargetIsFinalFlag); } 1272 bool target_is_loaded() const { return check_flag(TargetIsLoadedFlag); } 1273 // Returns false if target is not loaded 1274 bool target_is_strictfp() const { return check_flag(TargetIsStrictfpFlag); } 1275 1276 // JSR 292 support 1277 bool is_invokedynamic() const { return code() == Bytecodes::_invokedynamic; } 1278 bool is_method_handle_intrinsic() const { return target()->is_method_handle_intrinsic(); } 1279 1280 virtual bool needs_exception_state() const { return false; } 1281 1282 // generic 1283 virtual bool can_trap() const { return true; } 1284 virtual void input_values_do(ValueVisitor* f) { 1285 StateSplit::input_values_do(f); 1286 if (has_receiver()) f->visit(&_recv); 1287 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 1288 } 1289 virtual void state_values_do(ValueVisitor *f); 1290 }; 1291 1292 1293 LEAF(NewInstance, StateSplit) 1294 private: 1295 ciInstanceKlass* _klass; 1296 bool _is_unresolved; 1297 1298 public: 1299 // creation 1300 NewInstance(ciInstanceKlass* klass, ValueStack* state_before, bool is_unresolved) 1301 : StateSplit(instanceType, state_before) 1302 , _klass(klass), _is_unresolved(is_unresolved) 1303 {} 1304 1305 // accessors 1306 ciInstanceKlass* klass() const { return _klass; } 1307 bool is_unresolved() const { return _is_unresolved; } 1308 1309 virtual bool needs_exception_state() const { return false; } 1310 1311 // generic 1312 virtual bool can_trap() const { return true; } 1313 ciType* exact_type() const; 1314 ciType* declared_type() const; 1315 }; 1316 1317 1318 BASE(NewArray, StateSplit) 1319 private: 1320 Value _length; 1321 1322 public: 1323 // creation 1324 NewArray(Value length, ValueStack* state_before) 1325 : StateSplit(objectType, state_before) 1326 , _length(length) 1327 { 1328 // Do not ASSERT_VALUES since length is NULL for NewMultiArray 1329 } 1330 1331 // accessors 1332 Value length() const { return _length; } 1333 1334 virtual bool needs_exception_state() const { return false; } 1335 1336 ciType* exact_type() const { return NULL; } 1337 ciType* declared_type() const; 1338 1339 // generic 1340 virtual bool can_trap() const { return true; } 1341 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_length); } 1342 }; 1343 1344 1345 LEAF(NewTypeArray, NewArray) 1346 private: 1347 BasicType _elt_type; 1348 1349 public: 1350 // creation 1351 NewTypeArray(Value length, BasicType elt_type, ValueStack* state_before) 1352 : NewArray(length, state_before) 1353 , _elt_type(elt_type) 1354 {} 1355 1356 // accessors 1357 BasicType elt_type() const { return _elt_type; } 1358 ciType* exact_type() const; 1359 }; 1360 1361 1362 LEAF(NewObjectArray, NewArray) 1363 private: 1364 ciKlass* _klass; 1365 1366 public: 1367 // creation 1368 NewObjectArray(ciKlass* klass, Value length, ValueStack* state_before) : NewArray(length, state_before), _klass(klass) {} 1369 1370 // accessors 1371 ciKlass* klass() const { return _klass; } 1372 ciType* exact_type() const; 1373 }; 1374 1375 1376 LEAF(NewMultiArray, NewArray) 1377 private: 1378 ciKlass* _klass; 1379 Values* _dims; 1380 1381 public: 1382 // creation 1383 NewMultiArray(ciKlass* klass, Values* dims, ValueStack* state_before) : NewArray(NULL, state_before), _klass(klass), _dims(dims) { 1384 ASSERT_VALUES 1385 } 1386 1387 // accessors 1388 ciKlass* klass() const { return _klass; } 1389 Values* dims() const { return _dims; } 1390 int rank() const { return dims()->length(); } 1391 1392 // generic 1393 virtual void input_values_do(ValueVisitor* f) { 1394 // NOTE: we do not call NewArray::input_values_do since "length" 1395 // is meaningless for a multi-dimensional array; passing the 1396 // zeroth element down to NewArray as its length is a bad idea 1397 // since there will be a copy in the "dims" array which doesn't 1398 // get updated, and the value must not be traversed twice. Was bug 1399 // - kbr 4/10/2001 1400 StateSplit::input_values_do(f); 1401 for (int i = 0; i < _dims->length(); i++) f->visit(_dims->adr_at(i)); 1402 } 1403 }; 1404 1405 1406 BASE(TypeCheck, StateSplit) 1407 private: 1408 ciKlass* _klass; 1409 Value _obj; 1410 1411 ciMethod* _profiled_method; 1412 int _profiled_bci; 1413 1414 public: 1415 // creation 1416 TypeCheck(ciKlass* klass, Value obj, ValueType* type, ValueStack* state_before) 1417 : StateSplit(type, state_before), _klass(klass), _obj(obj), 1418 _profiled_method(NULL), _profiled_bci(0) { 1419 ASSERT_VALUES 1420 set_direct_compare(false); 1421 } 1422 1423 // accessors 1424 ciKlass* klass() const { return _klass; } 1425 Value obj() const { return _obj; } 1426 bool is_loaded() const { return klass() != NULL; } 1427 bool direct_compare() const { return check_flag(DirectCompareFlag); } 1428 1429 // manipulation 1430 void set_direct_compare(bool flag) { set_flag(DirectCompareFlag, flag); } 1431 1432 // generic 1433 virtual bool can_trap() const { return true; } 1434 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } 1435 1436 // Helpers for MethodData* profiling 1437 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1438 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1439 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1440 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1441 ciMethod* profiled_method() const { return _profiled_method; } 1442 int profiled_bci() const { return _profiled_bci; } 1443 }; 1444 1445 1446 LEAF(CheckCast, TypeCheck) 1447 public: 1448 // creation 1449 CheckCast(ciKlass* klass, Value obj, ValueStack* state_before) 1450 : TypeCheck(klass, obj, objectType, state_before) {} 1451 1452 void set_incompatible_class_change_check() { 1453 set_flag(ThrowIncompatibleClassChangeErrorFlag, true); 1454 } 1455 bool is_incompatible_class_change_check() const { 1456 return check_flag(ThrowIncompatibleClassChangeErrorFlag); 1457 } 1458 void set_invokespecial_receiver_check() { 1459 set_flag(InvokeSpecialReceiverCheckFlag, true); 1460 } 1461 bool is_invokespecial_receiver_check() const { 1462 return check_flag(InvokeSpecialReceiverCheckFlag); 1463 } 1464 1465 virtual bool needs_exception_state() const { 1466 return !is_invokespecial_receiver_check(); 1467 } 1468 1469 ciType* declared_type() const; 1470 }; 1471 1472 1473 LEAF(InstanceOf, TypeCheck) 1474 public: 1475 // creation 1476 InstanceOf(ciKlass* klass, Value obj, ValueStack* state_before) : TypeCheck(klass, obj, intType, state_before) {} 1477 1478 virtual bool needs_exception_state() const { return false; } 1479 }; 1480 1481 1482 BASE(AccessMonitor, StateSplit) 1483 private: 1484 Value _obj; 1485 int _monitor_no; 1486 1487 public: 1488 // creation 1489 AccessMonitor(Value obj, int monitor_no, ValueStack* state_before = NULL) 1490 : StateSplit(illegalType, state_before) 1491 , _obj(obj) 1492 , _monitor_no(monitor_no) 1493 { 1494 set_needs_null_check(true); 1495 ASSERT_VALUES 1496 } 1497 1498 // accessors 1499 Value obj() const { return _obj; } 1500 int monitor_no() const { return _monitor_no; } 1501 1502 // generic 1503 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } 1504 }; 1505 1506 1507 LEAF(MonitorEnter, AccessMonitor) 1508 public: 1509 // creation 1510 MonitorEnter(Value obj, int monitor_no, ValueStack* state_before) 1511 : AccessMonitor(obj, monitor_no, state_before) 1512 { 1513 ASSERT_VALUES 1514 } 1515 1516 // generic 1517 virtual bool can_trap() const { return true; } 1518 }; 1519 1520 1521 LEAF(MonitorExit, AccessMonitor) 1522 public: 1523 // creation 1524 MonitorExit(Value obj, int monitor_no) 1525 : AccessMonitor(obj, monitor_no, NULL) 1526 { 1527 ASSERT_VALUES 1528 } 1529 }; 1530 1531 1532 LEAF(Intrinsic, StateSplit) 1533 private: 1534 vmIntrinsics::ID _id; 1535 Values* _args; 1536 Value _recv; 1537 ArgsNonNullState _nonnull_state; 1538 1539 public: 1540 // preserves_state can be set to true for Intrinsics 1541 // which are guaranteed to preserve register state across any slow 1542 // cases; setting it to true does not mean that the Intrinsic can 1543 // not trap, only that if we continue execution in the same basic 1544 // block after the Intrinsic, all of the registers are intact. This 1545 // allows load elimination and common expression elimination to be 1546 // performed across the Intrinsic. The default value is false. 1547 Intrinsic(ValueType* type, 1548 vmIntrinsics::ID id, 1549 Values* args, 1550 bool has_receiver, 1551 ValueStack* state_before, 1552 bool preserves_state, 1553 bool cantrap = true) 1554 : StateSplit(type, state_before) 1555 , _id(id) 1556 , _args(args) 1557 , _recv(NULL) 1558 { 1559 assert(args != NULL, "args must exist"); 1560 ASSERT_VALUES 1561 set_flag(PreservesStateFlag, preserves_state); 1562 set_flag(CanTrapFlag, cantrap); 1563 if (has_receiver) { 1564 _recv = argument_at(0); 1565 } 1566 set_needs_null_check(has_receiver); 1567 1568 // some intrinsics can't trap, so don't force them to be pinned 1569 if (!can_trap()) { 1570 unpin(PinStateSplitConstructor); 1571 } 1572 } 1573 1574 // accessors 1575 vmIntrinsics::ID id() const { return _id; } 1576 int number_of_arguments() const { return _args->length(); } 1577 Value argument_at(int i) const { return _args->at(i); } 1578 1579 bool has_receiver() const { return (_recv != NULL); } 1580 Value receiver() const { assert(has_receiver(), "must have receiver"); return _recv; } 1581 bool preserves_state() const { return check_flag(PreservesStateFlag); } 1582 1583 bool arg_needs_null_check(int i) const { 1584 return _nonnull_state.arg_needs_null_check(i); 1585 } 1586 1587 void set_arg_needs_null_check(int i, bool check) { 1588 _nonnull_state.set_arg_needs_null_check(i, check); 1589 } 1590 1591 // generic 1592 virtual bool can_trap() const { return check_flag(CanTrapFlag); } 1593 virtual void input_values_do(ValueVisitor* f) { 1594 StateSplit::input_values_do(f); 1595 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 1596 } 1597 }; 1598 1599 1600 class LIR_List; 1601 1602 LEAF(BlockBegin, StateSplit) 1603 private: 1604 int _block_id; // the unique block id 1605 int _bci; // start-bci of block 1606 int _depth_first_number; // number of this block in a depth-first ordering 1607 int _linear_scan_number; // number of this block in linear-scan ordering 1608 int _dominator_depth; 1609 int _loop_depth; // the loop nesting level of this block 1610 int _loop_index; // number of the innermost loop of this block 1611 int _flags; // the flags associated with this block 1612 1613 // fields used by BlockListBuilder 1614 int _total_preds; // number of predecessors found by BlockListBuilder 1615 ResourceBitMap _stores_to_locals; // bit is set when a local variable is stored in the block 1616 1617 // SSA specific fields: (factor out later) 1618 BlockList _successors; // the successors of this block 1619 BlockList _predecessors; // the predecessors of this block 1620 BlockList _dominates; // list of blocks that are dominated by this block 1621 BlockBegin* _dominator; // the dominator of this block 1622 // SSA specific ends 1623 BlockEnd* _end; // the last instruction of this block 1624 BlockList _exception_handlers; // the exception handlers potentially invoked by this block 1625 ValueStackStack* _exception_states; // only for xhandler entries: states of all instructions that have an edge to this xhandler 1626 int _exception_handler_pco; // if this block is the start of an exception handler, 1627 // this records the PC offset in the assembly code of the 1628 // first instruction in this block 1629 Label _label; // the label associated with this block 1630 LIR_List* _lir; // the low level intermediate representation for this block 1631 1632 ResourceBitMap _live_in; // set of live LIR_Opr registers at entry to this block 1633 ResourceBitMap _live_out; // set of live LIR_Opr registers at exit from this block 1634 ResourceBitMap _live_gen; // set of registers used before any redefinition in this block 1635 ResourceBitMap _live_kill; // set of registers defined in this block 1636 1637 ResourceBitMap _fpu_register_usage; 1638 intArray* _fpu_stack_state; // For x86 FPU code generation with UseLinearScan 1639 int _first_lir_instruction_id; // ID of first LIR instruction in this block 1640 int _last_lir_instruction_id; // ID of last LIR instruction in this block 1641 1642 void iterate_preorder (boolArray& mark, BlockClosure* closure); 1643 void iterate_postorder(boolArray& mark, BlockClosure* closure); 1644 1645 friend class SuxAndWeightAdjuster; 1646 1647 public: 1648 void* operator new(size_t size) throw() { 1649 Compilation* c = Compilation::current(); 1650 void* res = c->arena()->Amalloc(size); 1651 ((BlockBegin*)res)->_id = c->get_next_id(); 1652 ((BlockBegin*)res)->_block_id = c->get_next_block_id(); 1653 return res; 1654 } 1655 1656 // initialization/counting 1657 static int number_of_blocks() { 1658 return Compilation::current()->number_of_blocks(); 1659 } 1660 1661 // creation 1662 BlockBegin(int bci) 1663 : StateSplit(illegalType) 1664 , _bci(bci) 1665 , _depth_first_number(-1) 1666 , _linear_scan_number(-1) 1667 , _loop_depth(0) 1668 , _flags(0) 1669 , _dominator_depth(-1) 1670 , _dominator(NULL) 1671 , _end(NULL) 1672 , _predecessors(2) 1673 , _successors(2) 1674 , _dominates(2) 1675 , _exception_handlers(1) 1676 , _exception_states(NULL) 1677 , _exception_handler_pco(-1) 1678 , _lir(NULL) 1679 , _loop_index(-1) 1680 , _live_in() 1681 , _live_out() 1682 , _live_gen() 1683 , _live_kill() 1684 , _fpu_register_usage() 1685 , _fpu_stack_state(NULL) 1686 , _first_lir_instruction_id(-1) 1687 , _last_lir_instruction_id(-1) 1688 , _total_preds(0) 1689 , _stores_to_locals() 1690 { 1691 _block = this; 1692 #ifndef PRODUCT 1693 set_printable_bci(bci); 1694 #endif 1695 } 1696 1697 // accessors 1698 int block_id() const { return _block_id; } 1699 int bci() const { return _bci; } 1700 BlockList* successors() { return &_successors; } 1701 BlockList* dominates() { return &_dominates; } 1702 BlockBegin* dominator() const { return _dominator; } 1703 int loop_depth() const { return _loop_depth; } 1704 int dominator_depth() const { return _dominator_depth; } 1705 int depth_first_number() const { return _depth_first_number; } 1706 int linear_scan_number() const { return _linear_scan_number; } 1707 BlockEnd* end() const { return _end; } 1708 Label* label() { return &_label; } 1709 LIR_List* lir() const { return _lir; } 1710 int exception_handler_pco() const { return _exception_handler_pco; } 1711 ResourceBitMap& live_in() { return _live_in; } 1712 ResourceBitMap& live_out() { return _live_out; } 1713 ResourceBitMap& live_gen() { return _live_gen; } 1714 ResourceBitMap& live_kill() { return _live_kill; } 1715 ResourceBitMap& fpu_register_usage() { return _fpu_register_usage; } 1716 intArray* fpu_stack_state() const { return _fpu_stack_state; } 1717 int first_lir_instruction_id() const { return _first_lir_instruction_id; } 1718 int last_lir_instruction_id() const { return _last_lir_instruction_id; } 1719 int total_preds() const { return _total_preds; } 1720 BitMap& stores_to_locals() { return _stores_to_locals; } 1721 1722 // manipulation 1723 void set_dominator(BlockBegin* dom) { _dominator = dom; } 1724 void set_loop_depth(int d) { _loop_depth = d; } 1725 void set_dominator_depth(int d) { _dominator_depth = d; } 1726 void set_depth_first_number(int dfn) { _depth_first_number = dfn; } 1727 void set_linear_scan_number(int lsn) { _linear_scan_number = lsn; } 1728 void set_end(BlockEnd* end); 1729 void clear_end(); 1730 void disconnect_from_graph(); 1731 static void disconnect_edge(BlockBegin* from, BlockBegin* to); 1732 BlockBegin* insert_block_between(BlockBegin* sux); 1733 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux); 1734 void set_lir(LIR_List* lir) { _lir = lir; } 1735 void set_exception_handler_pco(int pco) { _exception_handler_pco = pco; } 1736 void set_live_in (const ResourceBitMap& map) { _live_in = map; } 1737 void set_live_out (const ResourceBitMap& map) { _live_out = map; } 1738 void set_live_gen (const ResourceBitMap& map) { _live_gen = map; } 1739 void set_live_kill(const ResourceBitMap& map) { _live_kill = map; } 1740 void set_fpu_register_usage(const ResourceBitMap& map) { _fpu_register_usage = map; } 1741 void set_fpu_stack_state(intArray* state) { _fpu_stack_state = state; } 1742 void set_first_lir_instruction_id(int id) { _first_lir_instruction_id = id; } 1743 void set_last_lir_instruction_id(int id) { _last_lir_instruction_id = id; } 1744 void increment_total_preds(int n = 1) { _total_preds += n; } 1745 void init_stores_to_locals(int locals_count) { _stores_to_locals.initialize(locals_count); } 1746 1747 // generic 1748 virtual void state_values_do(ValueVisitor* f); 1749 1750 // successors and predecessors 1751 int number_of_sux() const; 1752 BlockBegin* sux_at(int i) const; 1753 void add_successor(BlockBegin* sux); 1754 void remove_successor(BlockBegin* pred); 1755 bool is_successor(BlockBegin* sux) const { return _successors.contains(sux); } 1756 1757 void add_predecessor(BlockBegin* pred); 1758 void remove_predecessor(BlockBegin* pred); 1759 bool is_predecessor(BlockBegin* pred) const { return _predecessors.contains(pred); } 1760 int number_of_preds() const { return _predecessors.length(); } 1761 BlockBegin* pred_at(int i) const { return _predecessors.at(i); } 1762 1763 // exception handlers potentially invoked by this block 1764 void add_exception_handler(BlockBegin* b); 1765 bool is_exception_handler(BlockBegin* b) const { return _exception_handlers.contains(b); } 1766 int number_of_exception_handlers() const { return _exception_handlers.length(); } 1767 BlockBegin* exception_handler_at(int i) const { return _exception_handlers.at(i); } 1768 1769 // states of the instructions that have an edge to this exception handler 1770 int number_of_exception_states() { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states == NULL ? 0 : _exception_states->length(); } 1771 ValueStack* exception_state_at(int idx) const { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states->at(idx); } 1772 int add_exception_state(ValueStack* state); 1773 1774 // flags 1775 enum Flag { 1776 no_flag = 0, 1777 std_entry_flag = 1 << 0, 1778 osr_entry_flag = 1 << 1, 1779 exception_entry_flag = 1 << 2, 1780 subroutine_entry_flag = 1 << 3, 1781 backward_branch_target_flag = 1 << 4, 1782 is_on_work_list_flag = 1 << 5, 1783 was_visited_flag = 1 << 6, 1784 parser_loop_header_flag = 1 << 7, // set by parser to identify blocks where phi functions can not be created on demand 1785 critical_edge_split_flag = 1 << 8, // set for all blocks that are introduced when critical edges are split 1786 linear_scan_loop_header_flag = 1 << 9, // set during loop-detection for LinearScan 1787 linear_scan_loop_end_flag = 1 << 10, // set during loop-detection for LinearScan 1788 donot_eliminate_range_checks = 1 << 11 // Should be try to eliminate range checks in this block 1789 }; 1790 1791 void set(Flag f) { _flags |= f; } 1792 void clear(Flag f) { _flags &= ~f; } 1793 bool is_set(Flag f) const { return (_flags & f) != 0; } 1794 bool is_entry_block() const { 1795 const int entry_mask = std_entry_flag | osr_entry_flag | exception_entry_flag; 1796 return (_flags & entry_mask) != 0; 1797 } 1798 1799 // iteration 1800 void iterate_preorder (BlockClosure* closure); 1801 void iterate_postorder (BlockClosure* closure); 1802 1803 void block_values_do(ValueVisitor* f); 1804 1805 // loops 1806 void set_loop_index(int ix) { _loop_index = ix; } 1807 int loop_index() const { return _loop_index; } 1808 1809 // merging 1810 bool try_merge(ValueStack* state); // try to merge states at block begin 1811 void merge(ValueStack* state) { bool b = try_merge(state); assert(b, "merge failed"); } 1812 1813 // debugging 1814 void print_block() PRODUCT_RETURN; 1815 void print_block(InstructionPrinter& ip, bool live_only = false) PRODUCT_RETURN; 1816 }; 1817 1818 1819 BASE(BlockEnd, StateSplit) 1820 private: 1821 BlockList* _sux; 1822 1823 protected: 1824 BlockList* sux() const { return _sux; } 1825 1826 void set_sux(BlockList* sux) { 1827 #ifdef ASSERT 1828 assert(sux != NULL, "sux must exist"); 1829 for (int i = sux->length() - 1; i >= 0; i--) assert(sux->at(i) != NULL, "sux must exist"); 1830 #endif 1831 _sux = sux; 1832 } 1833 1834 public: 1835 // creation 1836 BlockEnd(ValueType* type, ValueStack* state_before, bool is_safepoint) 1837 : StateSplit(type, state_before) 1838 , _sux(NULL) 1839 { 1840 set_flag(IsSafepointFlag, is_safepoint); 1841 } 1842 1843 // accessors 1844 bool is_safepoint() const { return check_flag(IsSafepointFlag); } 1845 // For compatibility with old code, for new code use block() 1846 BlockBegin* begin() const { return _block; } 1847 1848 // manipulation 1849 void set_begin(BlockBegin* begin); 1850 1851 // successors 1852 int number_of_sux() const { return _sux != NULL ? _sux->length() : 0; } 1853 BlockBegin* sux_at(int i) const { return _sux->at(i); } 1854 BlockBegin* default_sux() const { return sux_at(number_of_sux() - 1); } 1855 BlockBegin** addr_sux_at(int i) const { return _sux->adr_at(i); } 1856 int sux_index(BlockBegin* sux) const { return _sux->find(sux); } 1857 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux); 1858 }; 1859 1860 1861 LEAF(Goto, BlockEnd) 1862 public: 1863 enum Direction { 1864 none, // Just a regular goto 1865 taken, not_taken // Goto produced from If 1866 }; 1867 private: 1868 ciMethod* _profiled_method; 1869 int _profiled_bci; 1870 Direction _direction; 1871 public: 1872 // creation 1873 Goto(BlockBegin* sux, ValueStack* state_before, bool is_safepoint = false) 1874 : BlockEnd(illegalType, state_before, is_safepoint) 1875 , _direction(none) 1876 , _profiled_method(NULL) 1877 , _profiled_bci(0) { 1878 BlockList* s = new BlockList(1); 1879 s->append(sux); 1880 set_sux(s); 1881 } 1882 1883 Goto(BlockBegin* sux, bool is_safepoint) : BlockEnd(illegalType, NULL, is_safepoint) 1884 , _direction(none) 1885 , _profiled_method(NULL) 1886 , _profiled_bci(0) { 1887 BlockList* s = new BlockList(1); 1888 s->append(sux); 1889 set_sux(s); 1890 } 1891 1892 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1893 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches 1894 int profiled_bci() const { return _profiled_bci; } 1895 Direction direction() const { return _direction; } 1896 1897 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1898 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1899 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1900 void set_direction(Direction d) { _direction = d; } 1901 }; 1902 1903 #ifdef ASSERT 1904 LEAF(Assert, Instruction) 1905 private: 1906 Value _x; 1907 Condition _cond; 1908 Value _y; 1909 char *_message; 1910 1911 public: 1912 // creation 1913 // unordered_is_true is valid for float/double compares only 1914 Assert(Value x, Condition cond, bool unordered_is_true, Value y); 1915 1916 // accessors 1917 Value x() const { return _x; } 1918 Condition cond() const { return _cond; } 1919 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 1920 Value y() const { return _y; } 1921 const char *message() const { return _message; } 1922 1923 // generic 1924 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); } 1925 }; 1926 #endif 1927 1928 LEAF(RangeCheckPredicate, StateSplit) 1929 private: 1930 Value _x; 1931 Condition _cond; 1932 Value _y; 1933 1934 void check_state(); 1935 1936 public: 1937 // creation 1938 // unordered_is_true is valid for float/double compares only 1939 RangeCheckPredicate(Value x, Condition cond, bool unordered_is_true, Value y, ValueStack* state) : StateSplit(illegalType) 1940 , _x(x) 1941 , _cond(cond) 1942 , _y(y) 1943 { 1944 ASSERT_VALUES 1945 set_flag(UnorderedIsTrueFlag, unordered_is_true); 1946 assert(x->type()->tag() == y->type()->tag(), "types must match"); 1947 this->set_state(state); 1948 check_state(); 1949 } 1950 1951 // Always deoptimize 1952 RangeCheckPredicate(ValueStack* state) : StateSplit(illegalType) 1953 { 1954 this->set_state(state); 1955 _x = _y = NULL; 1956 check_state(); 1957 } 1958 1959 // accessors 1960 Value x() const { return _x; } 1961 Condition cond() const { return _cond; } 1962 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 1963 Value y() const { return _y; } 1964 1965 void always_fail() { _x = _y = NULL; } 1966 1967 // generic 1968 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_x); f->visit(&_y); } 1969 HASHING3(RangeCheckPredicate, true, x()->subst(), y()->subst(), cond()) 1970 }; 1971 1972 LEAF(If, BlockEnd) 1973 private: 1974 Value _x; 1975 Condition _cond; 1976 Value _y; 1977 ciMethod* _profiled_method; 1978 int _profiled_bci; // Canonicalizer may alter bci of If node 1979 bool _swapped; // Is the order reversed with respect to the original If in the 1980 // bytecode stream? 1981 public: 1982 // creation 1983 // unordered_is_true is valid for float/double compares only 1984 If(Value x, Condition cond, bool unordered_is_true, Value y, BlockBegin* tsux, BlockBegin* fsux, ValueStack* state_before, bool is_safepoint) 1985 : BlockEnd(illegalType, state_before, is_safepoint) 1986 , _x(x) 1987 , _cond(cond) 1988 , _y(y) 1989 , _profiled_method(NULL) 1990 , _profiled_bci(0) 1991 , _swapped(false) 1992 { 1993 ASSERT_VALUES 1994 set_flag(UnorderedIsTrueFlag, unordered_is_true); 1995 assert(x->type()->tag() == y->type()->tag(), "types must match"); 1996 BlockList* s = new BlockList(2); 1997 s->append(tsux); 1998 s->append(fsux); 1999 set_sux(s); 2000 } 2001 2002 // accessors 2003 Value x() const { return _x; } 2004 Condition cond() const { return _cond; } 2005 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 2006 Value y() const { return _y; } 2007 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); } 2008 BlockBegin* tsux() const { return sux_for(true); } 2009 BlockBegin* fsux() const { return sux_for(false); } 2010 BlockBegin* usux() const { return sux_for(unordered_is_true()); } 2011 bool should_profile() const { return check_flag(ProfileMDOFlag); } 2012 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches 2013 int profiled_bci() const { return _profiled_bci; } // set for profiled branches and tiered 2014 bool is_swapped() const { return _swapped; } 2015 2016 // manipulation 2017 void swap_operands() { 2018 Value t = _x; _x = _y; _y = t; 2019 _cond = mirror(_cond); 2020 } 2021 2022 void swap_sux() { 2023 assert(number_of_sux() == 2, "wrong number of successors"); 2024 BlockList* s = sux(); 2025 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t); 2026 _cond = negate(_cond); 2027 set_flag(UnorderedIsTrueFlag, !check_flag(UnorderedIsTrueFlag)); 2028 } 2029 2030 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 2031 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 2032 void set_profiled_bci(int bci) { _profiled_bci = bci; } 2033 void set_swapped(bool value) { _swapped = value; } 2034 // generic 2035 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_x); f->visit(&_y); } 2036 }; 2037 2038 2039 LEAF(IfInstanceOf, BlockEnd) 2040 private: 2041 ciKlass* _klass; 2042 Value _obj; 2043 bool _test_is_instance; // jump if instance 2044 int _instanceof_bci; 2045 2046 public: 2047 IfInstanceOf(ciKlass* klass, Value obj, bool test_is_instance, int instanceof_bci, BlockBegin* tsux, BlockBegin* fsux) 2048 : BlockEnd(illegalType, NULL, false) // temporary set to false 2049 , _klass(klass) 2050 , _obj(obj) 2051 , _test_is_instance(test_is_instance) 2052 , _instanceof_bci(instanceof_bci) 2053 { 2054 ASSERT_VALUES 2055 assert(instanceof_bci >= 0, "illegal bci"); 2056 BlockList* s = new BlockList(2); 2057 s->append(tsux); 2058 s->append(fsux); 2059 set_sux(s); 2060 } 2061 2062 // accessors 2063 // 2064 // Note 1: If test_is_instance() is true, IfInstanceOf tests if obj *is* an 2065 // instance of klass; otherwise it tests if it is *not* and instance 2066 // of klass. 2067 // 2068 // Note 2: IfInstanceOf instructions are created by combining an InstanceOf 2069 // and an If instruction. The IfInstanceOf bci() corresponds to the 2070 // bci that the If would have had; the (this->) instanceof_bci() is 2071 // the bci of the original InstanceOf instruction. 2072 ciKlass* klass() const { return _klass; } 2073 Value obj() const { return _obj; } 2074 int instanceof_bci() const { return _instanceof_bci; } 2075 bool test_is_instance() const { return _test_is_instance; } 2076 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); } 2077 BlockBegin* tsux() const { return sux_for(true); } 2078 BlockBegin* fsux() const { return sux_for(false); } 2079 2080 // manipulation 2081 void swap_sux() { 2082 assert(number_of_sux() == 2, "wrong number of successors"); 2083 BlockList* s = sux(); 2084 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t); 2085 _test_is_instance = !_test_is_instance; 2086 } 2087 2088 // generic 2089 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_obj); } 2090 }; 2091 2092 2093 BASE(Switch, BlockEnd) 2094 private: 2095 Value _tag; 2096 2097 public: 2098 // creation 2099 Switch(Value tag, BlockList* sux, ValueStack* state_before, bool is_safepoint) 2100 : BlockEnd(illegalType, state_before, is_safepoint) 2101 , _tag(tag) { 2102 ASSERT_VALUES 2103 set_sux(sux); 2104 } 2105 2106 // accessors 2107 Value tag() const { return _tag; } 2108 int length() const { return number_of_sux() - 1; } 2109 2110 virtual bool needs_exception_state() const { return false; } 2111 2112 // generic 2113 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_tag); } 2114 }; 2115 2116 2117 LEAF(TableSwitch, Switch) 2118 private: 2119 int _lo_key; 2120 2121 public: 2122 // creation 2123 TableSwitch(Value tag, BlockList* sux, int lo_key, ValueStack* state_before, bool is_safepoint) 2124 : Switch(tag, sux, state_before, is_safepoint) 2125 , _lo_key(lo_key) {} 2126 2127 // accessors 2128 int lo_key() const { return _lo_key; } 2129 int hi_key() const { return _lo_key + length() - 1; } 2130 }; 2131 2132 2133 LEAF(LookupSwitch, Switch) 2134 private: 2135 intArray* _keys; 2136 2137 public: 2138 // creation 2139 LookupSwitch(Value tag, BlockList* sux, intArray* keys, ValueStack* state_before, bool is_safepoint) 2140 : Switch(tag, sux, state_before, is_safepoint) 2141 , _keys(keys) { 2142 assert(keys != NULL, "keys must exist"); 2143 assert(keys->length() == length(), "sux & keys have incompatible lengths"); 2144 } 2145 2146 // accessors 2147 int key_at(int i) const { return _keys->at(i); } 2148 }; 2149 2150 2151 LEAF(Return, BlockEnd) 2152 private: 2153 Value _result; 2154 2155 public: 2156 // creation 2157 Return(Value result) : 2158 BlockEnd(result == NULL ? voidType : result->type()->base(), NULL, true), 2159 _result(result) {} 2160 2161 // accessors 2162 Value result() const { return _result; } 2163 bool has_result() const { return result() != NULL; } 2164 2165 // generic 2166 virtual void input_values_do(ValueVisitor* f) { 2167 BlockEnd::input_values_do(f); 2168 if (has_result()) f->visit(&_result); 2169 } 2170 }; 2171 2172 2173 LEAF(Throw, BlockEnd) 2174 private: 2175 Value _exception; 2176 2177 public: 2178 // creation 2179 Throw(Value exception, ValueStack* state_before) : BlockEnd(illegalType, state_before, true), _exception(exception) { 2180 ASSERT_VALUES 2181 } 2182 2183 // accessors 2184 Value exception() const { return _exception; } 2185 2186 // generic 2187 virtual bool can_trap() const { return true; } 2188 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_exception); } 2189 }; 2190 2191 2192 LEAF(Base, BlockEnd) 2193 public: 2194 // creation 2195 Base(BlockBegin* std_entry, BlockBegin* osr_entry) : BlockEnd(illegalType, NULL, false) { 2196 assert(std_entry->is_set(BlockBegin::std_entry_flag), "std entry must be flagged"); 2197 assert(osr_entry == NULL || osr_entry->is_set(BlockBegin::osr_entry_flag), "osr entry must be flagged"); 2198 BlockList* s = new BlockList(2); 2199 if (osr_entry != NULL) s->append(osr_entry); 2200 s->append(std_entry); // must be default sux! 2201 set_sux(s); 2202 } 2203 2204 // accessors 2205 BlockBegin* std_entry() const { return default_sux(); } 2206 BlockBegin* osr_entry() const { return number_of_sux() < 2 ? NULL : sux_at(0); } 2207 }; 2208 2209 2210 LEAF(OsrEntry, Instruction) 2211 public: 2212 // creation 2213 #ifdef _LP64 2214 OsrEntry() : Instruction(longType) { pin(); } 2215 #else 2216 OsrEntry() : Instruction(intType) { pin(); } 2217 #endif 2218 2219 // generic 2220 virtual void input_values_do(ValueVisitor* f) { } 2221 }; 2222 2223 2224 // Models the incoming exception at a catch site 2225 LEAF(ExceptionObject, Instruction) 2226 public: 2227 // creation 2228 ExceptionObject() : Instruction(objectType) { 2229 pin(); 2230 } 2231 2232 // generic 2233 virtual void input_values_do(ValueVisitor* f) { } 2234 }; 2235 2236 2237 // Models needed rounding for floating-point values on Intel. 2238 // Currently only used to represent rounding of double-precision 2239 // values stored into local variables, but could be used to model 2240 // intermediate rounding of single-precision values as well. 2241 LEAF(RoundFP, Instruction) 2242 private: 2243 Value _input; // floating-point value to be rounded 2244 2245 public: 2246 RoundFP(Value input) 2247 : Instruction(input->type()) // Note: should not be used for constants 2248 , _input(input) 2249 { 2250 ASSERT_VALUES 2251 } 2252 2253 // accessors 2254 Value input() const { return _input; } 2255 2256 // generic 2257 virtual void input_values_do(ValueVisitor* f) { f->visit(&_input); } 2258 }; 2259 2260 2261 BASE(UnsafeOp, Instruction) 2262 private: 2263 BasicType _basic_type; // ValueType can not express byte-sized integers 2264 2265 protected: 2266 // creation 2267 UnsafeOp(BasicType basic_type, bool is_put) 2268 : Instruction(is_put ? voidType : as_ValueType(basic_type)) 2269 , _basic_type(basic_type) 2270 { 2271 //Note: Unsafe ops are not not guaranteed to throw NPE. 2272 // Convservatively, Unsafe operations must be pinned though we could be 2273 // looser about this if we wanted to.. 2274 pin(); 2275 } 2276 2277 public: 2278 // accessors 2279 BasicType basic_type() { return _basic_type; } 2280 2281 // generic 2282 virtual void input_values_do(ValueVisitor* f) { } 2283 }; 2284 2285 2286 BASE(UnsafeRawOp, UnsafeOp) 2287 private: 2288 Value _base; // Base address (a Java long) 2289 Value _index; // Index if computed by optimizer; initialized to NULL 2290 int _log2_scale; // Scale factor: 0, 1, 2, or 3. 2291 // Indicates log2 of number of bytes (1, 2, 4, or 8) 2292 // to scale index by. 2293 2294 protected: 2295 UnsafeRawOp(BasicType basic_type, Value addr, bool is_put) 2296 : UnsafeOp(basic_type, is_put) 2297 , _base(addr) 2298 , _index(NULL) 2299 , _log2_scale(0) 2300 { 2301 // Can not use ASSERT_VALUES because index may be NULL 2302 assert(addr != NULL && addr->type()->is_long(), "just checking"); 2303 } 2304 2305 UnsafeRawOp(BasicType basic_type, Value base, Value index, int log2_scale, bool is_put) 2306 : UnsafeOp(basic_type, is_put) 2307 , _base(base) 2308 , _index(index) 2309 , _log2_scale(log2_scale) 2310 { 2311 } 2312 2313 public: 2314 // accessors 2315 Value base() { return _base; } 2316 Value index() { return _index; } 2317 bool has_index() { return (_index != NULL); } 2318 int log2_scale() { return _log2_scale; } 2319 2320 // setters 2321 void set_base (Value base) { _base = base; } 2322 void set_index(Value index) { _index = index; } 2323 void set_log2_scale(int log2_scale) { _log2_scale = log2_scale; } 2324 2325 // generic 2326 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f); 2327 f->visit(&_base); 2328 if (has_index()) f->visit(&_index); } 2329 }; 2330 2331 2332 LEAF(UnsafeGetRaw, UnsafeRawOp) 2333 private: 2334 bool _may_be_unaligned, _is_wide; // For OSREntry 2335 2336 public: 2337 UnsafeGetRaw(BasicType basic_type, Value addr, bool may_be_unaligned, bool is_wide = false) 2338 : UnsafeRawOp(basic_type, addr, false) { 2339 _may_be_unaligned = may_be_unaligned; 2340 _is_wide = is_wide; 2341 } 2342 2343 UnsafeGetRaw(BasicType basic_type, Value base, Value index, int log2_scale, bool may_be_unaligned, bool is_wide = false) 2344 : UnsafeRawOp(basic_type, base, index, log2_scale, false) { 2345 _may_be_unaligned = may_be_unaligned; 2346 _is_wide = is_wide; 2347 } 2348 2349 bool may_be_unaligned() { return _may_be_unaligned; } 2350 bool is_wide() { return _is_wide; } 2351 }; 2352 2353 2354 LEAF(UnsafePutRaw, UnsafeRawOp) 2355 private: 2356 Value _value; // Value to be stored 2357 2358 public: 2359 UnsafePutRaw(BasicType basic_type, Value addr, Value value) 2360 : UnsafeRawOp(basic_type, addr, true) 2361 , _value(value) 2362 { 2363 assert(value != NULL, "just checking"); 2364 ASSERT_VALUES 2365 } 2366 2367 UnsafePutRaw(BasicType basic_type, Value base, Value index, int log2_scale, Value value) 2368 : UnsafeRawOp(basic_type, base, index, log2_scale, true) 2369 , _value(value) 2370 { 2371 assert(value != NULL, "just checking"); 2372 ASSERT_VALUES 2373 } 2374 2375 // accessors 2376 Value value() { return _value; } 2377 2378 // generic 2379 virtual void input_values_do(ValueVisitor* f) { UnsafeRawOp::input_values_do(f); 2380 f->visit(&_value); } 2381 }; 2382 2383 2384 BASE(UnsafeObjectOp, UnsafeOp) 2385 private: 2386 Value _object; // Object to be fetched from or mutated 2387 Value _offset; // Offset within object 2388 bool _is_volatile; // true if volatile - dl/JSR166 2389 public: 2390 UnsafeObjectOp(BasicType basic_type, Value object, Value offset, bool is_put, bool is_volatile) 2391 : UnsafeOp(basic_type, is_put), _object(object), _offset(offset), _is_volatile(is_volatile) 2392 { 2393 } 2394 2395 // accessors 2396 Value object() { return _object; } 2397 Value offset() { return _offset; } 2398 bool is_volatile() { return _is_volatile; } 2399 // generic 2400 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f); 2401 f->visit(&_object); 2402 f->visit(&_offset); } 2403 }; 2404 2405 2406 LEAF(UnsafeGetObject, UnsafeObjectOp) 2407 public: 2408 UnsafeGetObject(BasicType basic_type, Value object, Value offset, bool is_volatile) 2409 : UnsafeObjectOp(basic_type, object, offset, false, is_volatile) 2410 { 2411 ASSERT_VALUES 2412 } 2413 }; 2414 2415 2416 LEAF(UnsafePutObject, UnsafeObjectOp) 2417 private: 2418 Value _value; // Value to be stored 2419 public: 2420 UnsafePutObject(BasicType basic_type, Value object, Value offset, Value value, bool is_volatile) 2421 : UnsafeObjectOp(basic_type, object, offset, true, is_volatile) 2422 , _value(value) 2423 { 2424 ASSERT_VALUES 2425 } 2426 2427 // accessors 2428 Value value() { return _value; } 2429 2430 // generic 2431 virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f); 2432 f->visit(&_value); } 2433 }; 2434 2435 LEAF(UnsafeGetAndSetObject, UnsafeObjectOp) 2436 private: 2437 Value _value; // Value to be stored 2438 bool _is_add; 2439 public: 2440 UnsafeGetAndSetObject(BasicType basic_type, Value object, Value offset, Value value, bool is_add) 2441 : UnsafeObjectOp(basic_type, object, offset, false, false) 2442 , _value(value) 2443 , _is_add(is_add) 2444 { 2445 ASSERT_VALUES 2446 } 2447 2448 // accessors 2449 bool is_add() const { return _is_add; } 2450 Value value() { return _value; } 2451 2452 // generic 2453 virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f); 2454 f->visit(&_value); } 2455 }; 2456 2457 LEAF(ProfileCall, Instruction) 2458 private: 2459 ciMethod* _method; 2460 int _bci_of_invoke; 2461 ciMethod* _callee; // the method that is called at the given bci 2462 Value _recv; 2463 ciKlass* _known_holder; 2464 Values* _obj_args; // arguments for type profiling 2465 ArgsNonNullState _nonnull_state; // Do we know whether some arguments are never null? 2466 bool _inlined; // Are we profiling a call that is inlined 2467 2468 public: 2469 ProfileCall(ciMethod* method, int bci, ciMethod* callee, Value recv, ciKlass* known_holder, Values* obj_args, bool inlined) 2470 : Instruction(voidType) 2471 , _method(method) 2472 , _bci_of_invoke(bci) 2473 , _callee(callee) 2474 , _recv(recv) 2475 , _known_holder(known_holder) 2476 , _obj_args(obj_args) 2477 , _inlined(inlined) 2478 { 2479 // The ProfileCall has side-effects and must occur precisely where located 2480 pin(); 2481 } 2482 2483 ciMethod* method() const { return _method; } 2484 int bci_of_invoke() const { return _bci_of_invoke; } 2485 ciMethod* callee() const { return _callee; } 2486 Value recv() const { return _recv; } 2487 ciKlass* known_holder() const { return _known_holder; } 2488 int nb_profiled_args() const { return _obj_args == NULL ? 0 : _obj_args->length(); } 2489 Value profiled_arg_at(int i) const { return _obj_args->at(i); } 2490 bool arg_needs_null_check(int i) const { 2491 return _nonnull_state.arg_needs_null_check(i); 2492 } 2493 bool inlined() const { return _inlined; } 2494 2495 void set_arg_needs_null_check(int i, bool check) { 2496 _nonnull_state.set_arg_needs_null_check(i, check); 2497 } 2498 2499 virtual void input_values_do(ValueVisitor* f) { 2500 if (_recv != NULL) { 2501 f->visit(&_recv); 2502 } 2503 for (int i = 0; i < nb_profiled_args(); i++) { 2504 f->visit(_obj_args->adr_at(i)); 2505 } 2506 } 2507 }; 2508 2509 LEAF(ProfileReturnType, Instruction) 2510 private: 2511 ciMethod* _method; 2512 ciMethod* _callee; 2513 int _bci_of_invoke; 2514 Value _ret; 2515 2516 public: 2517 ProfileReturnType(ciMethod* method, int bci, ciMethod* callee, Value ret) 2518 : Instruction(voidType) 2519 , _method(method) 2520 , _callee(callee) 2521 , _bci_of_invoke(bci) 2522 , _ret(ret) 2523 { 2524 set_needs_null_check(true); 2525 // The ProfileType has side-effects and must occur precisely where located 2526 pin(); 2527 } 2528 2529 ciMethod* method() const { return _method; } 2530 ciMethod* callee() const { return _callee; } 2531 int bci_of_invoke() const { return _bci_of_invoke; } 2532 Value ret() const { return _ret; } 2533 2534 virtual void input_values_do(ValueVisitor* f) { 2535 if (_ret != NULL) { 2536 f->visit(&_ret); 2537 } 2538 } 2539 }; 2540 2541 // Call some C runtime function that doesn't safepoint, 2542 // optionally passing the current thread as the first argument. 2543 LEAF(RuntimeCall, Instruction) 2544 private: 2545 const char* _entry_name; 2546 address _entry; 2547 Values* _args; 2548 bool _pass_thread; // Pass the JavaThread* as an implicit first argument 2549 2550 public: 2551 RuntimeCall(ValueType* type, const char* entry_name, address entry, Values* args, bool pass_thread = true) 2552 : Instruction(type) 2553 , _entry(entry) 2554 , _args(args) 2555 , _entry_name(entry_name) 2556 , _pass_thread(pass_thread) { 2557 ASSERT_VALUES 2558 pin(); 2559 } 2560 2561 const char* entry_name() const { return _entry_name; } 2562 address entry() const { return _entry; } 2563 int number_of_arguments() const { return _args->length(); } 2564 Value argument_at(int i) const { return _args->at(i); } 2565 bool pass_thread() const { return _pass_thread; } 2566 2567 virtual void input_values_do(ValueVisitor* f) { 2568 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 2569 } 2570 }; 2571 2572 // Use to trip invocation counter of an inlined method 2573 2574 LEAF(ProfileInvoke, Instruction) 2575 private: 2576 ciMethod* _inlinee; 2577 ValueStack* _state; 2578 2579 public: 2580 ProfileInvoke(ciMethod* inlinee, ValueStack* state) 2581 : Instruction(voidType) 2582 , _inlinee(inlinee) 2583 , _state(state) 2584 { 2585 // The ProfileInvoke has side-effects and must occur precisely where located QQQ??? 2586 pin(); 2587 } 2588 2589 ciMethod* inlinee() { return _inlinee; } 2590 ValueStack* state() { return _state; } 2591 virtual void input_values_do(ValueVisitor*) {} 2592 virtual void state_values_do(ValueVisitor*); 2593 }; 2594 2595 LEAF(MemBar, Instruction) 2596 private: 2597 LIR_Code _code; 2598 2599 public: 2600 MemBar(LIR_Code code) 2601 : Instruction(voidType) 2602 , _code(code) 2603 { 2604 pin(); 2605 } 2606 2607 LIR_Code code() { return _code; } 2608 2609 virtual void input_values_do(ValueVisitor*) {} 2610 }; 2611 2612 class BlockPair: public CompilationResourceObj { 2613 private: 2614 BlockBegin* _from; 2615 BlockBegin* _to; 2616 public: 2617 BlockPair(BlockBegin* from, BlockBegin* to): _from(from), _to(to) {} 2618 BlockBegin* from() const { return _from; } 2619 BlockBegin* to() const { return _to; } 2620 bool is_same(BlockBegin* from, BlockBegin* to) const { return _from == from && _to == to; } 2621 bool is_same(BlockPair* p) const { return _from == p->from() && _to == p->to(); } 2622 void set_to(BlockBegin* b) { _to = b; } 2623 void set_from(BlockBegin* b) { _from = b; } 2624 }; 2625 2626 typedef GrowableArray<BlockPair*> BlockPairList; 2627 2628 inline int BlockBegin::number_of_sux() const { assert(_end == NULL || _end->number_of_sux() == _successors.length(), "mismatch"); return _successors.length(); } 2629 inline BlockBegin* BlockBegin::sux_at(int i) const { assert(_end == NULL || _end->sux_at(i) == _successors.at(i), "mismatch"); return _successors.at(i); } 2630 inline void BlockBegin::add_successor(BlockBegin* sux) { assert(_end == NULL, "Would create mismatch with successors of BlockEnd"); _successors.append(sux); } 2631 2632 #undef ASSERT_VALUES 2633 2634 #endif // SHARE_VM_C1_C1_INSTRUCTION_HPP