1 /* 2 * Copyright (c) 1999, 2016, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_VM_C1_C1_INSTRUCTION_HPP 26 #define SHARE_VM_C1_C1_INSTRUCTION_HPP 27 28 #include "c1/c1_Compilation.hpp" 29 #include "c1/c1_LIR.hpp" 30 #include "c1/c1_ValueType.hpp" 31 #include "ci/ciField.hpp" 32 33 // Predefined classes 34 class ciField; 35 class ValueStack; 36 class InstructionPrinter; 37 class IRScope; 38 class LIR_OprDesc; 39 typedef LIR_OprDesc* LIR_Opr; 40 41 42 // Instruction class hierarchy 43 // 44 // All leaf classes in the class hierarchy are concrete classes 45 // (i.e., are instantiated). All other classes are abstract and 46 // serve factoring. 47 48 class Instruction; 49 class Phi; 50 class Local; 51 class Constant; 52 class AccessField; 53 class LoadField; 54 class StoreField; 55 class AccessArray; 56 class ArrayLength; 57 class AccessIndexed; 58 class LoadIndexed; 59 class StoreIndexed; 60 class NegateOp; 61 class Op2; 62 class ArithmeticOp; 63 class ShiftOp; 64 class LogicOp; 65 class CompareOp; 66 class IfOp; 67 class Convert; 68 class NullCheck; 69 class TypeCast; 70 class OsrEntry; 71 class ExceptionObject; 72 class StateSplit; 73 class Invoke; 74 class NewInstance; 75 class NewValueTypeInstance; 76 class NewArray; 77 class NewTypeArray; 78 class NewObjectArray; 79 class NewMultiArray; 80 class TypeCheck; 81 class CheckCast; 82 class InstanceOf; 83 class AccessMonitor; 84 class MonitorEnter; 85 class MonitorExit; 86 class Intrinsic; 87 class BlockBegin; 88 class BlockEnd; 89 class Goto; 90 class If; 91 class IfInstanceOf; 92 class Switch; 93 class TableSwitch; 94 class LookupSwitch; 95 class Return; 96 class Throw; 97 class Base; 98 class RoundFP; 99 class UnsafeOp; 100 class UnsafeRawOp; 101 class UnsafeGetRaw; 102 class UnsafePutRaw; 103 class UnsafeObjectOp; 104 class UnsafeGetObject; 105 class UnsafePutObject; 106 class UnsafeGetAndSetObject; 107 class ProfileCall; 108 class ProfileReturnType; 109 class ProfileInvoke; 110 class RuntimeCall; 111 class MemBar; 112 class RangeCheckPredicate; 113 #ifdef ASSERT 114 class Assert; 115 #endif 116 117 // A Value is a reference to the instruction creating the value 118 typedef Instruction* Value; 119 typedef GrowableArray<Value> Values; 120 typedef GrowableArray<ValueStack*> ValueStackStack; 121 122 // BlockClosure is the base class for block traversal/iteration. 123 124 class BlockClosure: public CompilationResourceObj { 125 public: 126 virtual void block_do(BlockBegin* block) = 0; 127 }; 128 129 130 // A simple closure class for visiting the values of an Instruction 131 class ValueVisitor: public StackObj { 132 public: 133 virtual void visit(Value* v) = 0; 134 }; 135 136 137 // Some array and list classes 138 typedef GrowableArray<BlockBegin*> BlockBeginArray; 139 140 class BlockList: public GrowableArray<BlockBegin*> { 141 public: 142 BlockList(): GrowableArray<BlockBegin*>() {} 143 BlockList(const int size): GrowableArray<BlockBegin*>(size) {} 144 BlockList(const int size, BlockBegin* init): GrowableArray<BlockBegin*>(size, size, init) {} 145 146 void iterate_forward(BlockClosure* closure); 147 void iterate_backward(BlockClosure* closure); 148 void blocks_do(void f(BlockBegin*)); 149 void values_do(ValueVisitor* f); 150 void print(bool cfg_only = false, bool live_only = false) PRODUCT_RETURN; 151 }; 152 153 154 // InstructionVisitors provide type-based dispatch for instructions. 155 // For each concrete Instruction class X, a virtual function do_X is 156 // provided. Functionality that needs to be implemented for all classes 157 // (e.g., printing, code generation) is factored out into a specialised 158 // visitor instead of added to the Instruction classes itself. 159 160 class InstructionVisitor: public StackObj { 161 public: 162 virtual void do_Phi (Phi* x) = 0; 163 virtual void do_Local (Local* x) = 0; 164 virtual void do_Constant (Constant* x) = 0; 165 virtual void do_LoadField (LoadField* x) = 0; 166 virtual void do_StoreField (StoreField* x) = 0; 167 virtual void do_ArrayLength (ArrayLength* x) = 0; 168 virtual void do_LoadIndexed (LoadIndexed* x) = 0; 169 virtual void do_StoreIndexed (StoreIndexed* x) = 0; 170 virtual void do_NegateOp (NegateOp* x) = 0; 171 virtual void do_ArithmeticOp (ArithmeticOp* x) = 0; 172 virtual void do_ShiftOp (ShiftOp* x) = 0; 173 virtual void do_LogicOp (LogicOp* x) = 0; 174 virtual void do_CompareOp (CompareOp* x) = 0; 175 virtual void do_IfOp (IfOp* x) = 0; 176 virtual void do_Convert (Convert* x) = 0; 177 virtual void do_NullCheck (NullCheck* x) = 0; 178 virtual void do_TypeCast (TypeCast* x) = 0; 179 virtual void do_Invoke (Invoke* x) = 0; 180 virtual void do_NewInstance (NewInstance* x) = 0; 181 virtual void do_NewValueTypeInstance(NewValueTypeInstance* x) = 0; 182 virtual void do_NewTypeArray (NewTypeArray* x) = 0; 183 virtual void do_NewObjectArray (NewObjectArray* x) = 0; 184 virtual void do_NewMultiArray (NewMultiArray* x) = 0; 185 virtual void do_CheckCast (CheckCast* x) = 0; 186 virtual void do_InstanceOf (InstanceOf* x) = 0; 187 virtual void do_MonitorEnter (MonitorEnter* x) = 0; 188 virtual void do_MonitorExit (MonitorExit* x) = 0; 189 virtual void do_Intrinsic (Intrinsic* x) = 0; 190 virtual void do_BlockBegin (BlockBegin* x) = 0; 191 virtual void do_Goto (Goto* x) = 0; 192 virtual void do_If (If* x) = 0; 193 virtual void do_IfInstanceOf (IfInstanceOf* x) = 0; 194 virtual void do_TableSwitch (TableSwitch* x) = 0; 195 virtual void do_LookupSwitch (LookupSwitch* x) = 0; 196 virtual void do_Return (Return* x) = 0; 197 virtual void do_Throw (Throw* x) = 0; 198 virtual void do_Base (Base* x) = 0; 199 virtual void do_OsrEntry (OsrEntry* x) = 0; 200 virtual void do_ExceptionObject(ExceptionObject* x) = 0; 201 virtual void do_RoundFP (RoundFP* x) = 0; 202 virtual void do_UnsafeGetRaw (UnsafeGetRaw* x) = 0; 203 virtual void do_UnsafePutRaw (UnsafePutRaw* x) = 0; 204 virtual void do_UnsafeGetObject(UnsafeGetObject* x) = 0; 205 virtual void do_UnsafePutObject(UnsafePutObject* x) = 0; 206 virtual void do_UnsafeGetAndSetObject(UnsafeGetAndSetObject* x) = 0; 207 virtual void do_ProfileCall (ProfileCall* x) = 0; 208 virtual void do_ProfileReturnType (ProfileReturnType* x) = 0; 209 virtual void do_ProfileInvoke (ProfileInvoke* x) = 0; 210 virtual void do_RuntimeCall (RuntimeCall* x) = 0; 211 virtual void do_MemBar (MemBar* x) = 0; 212 virtual void do_RangeCheckPredicate(RangeCheckPredicate* x) = 0; 213 #ifdef ASSERT 214 virtual void do_Assert (Assert* x) = 0; 215 #endif 216 }; 217 218 219 // Hashing support 220 // 221 // Note: This hash functions affect the performance 222 // of ValueMap - make changes carefully! 223 224 #define HASH1(x1 ) ((intx)(x1)) 225 #define HASH2(x1, x2 ) ((HASH1(x1 ) << 7) ^ HASH1(x2)) 226 #define HASH3(x1, x2, x3 ) ((HASH2(x1, x2 ) << 7) ^ HASH1(x3)) 227 #define HASH4(x1, x2, x3, x4) ((HASH3(x1, x2, x3) << 7) ^ HASH1(x4)) 228 229 230 // The following macros are used to implement instruction-specific hashing. 231 // By default, each instruction implements hash() and is_equal(Value), used 232 // for value numbering/common subexpression elimination. The default imple- 233 // mentation disables value numbering. Each instruction which can be value- 234 // numbered, should define corresponding hash() and is_equal(Value) functions 235 // via the macros below. The f arguments specify all the values/op codes, etc. 236 // that need to be identical for two instructions to be identical. 237 // 238 // Note: The default implementation of hash() returns 0 in order to indicate 239 // that the instruction should not be considered for value numbering. 240 // The currently used hash functions do not guarantee that never a 0 241 // is produced. While this is still correct, it may be a performance 242 // bug (no value numbering for that node). However, this situation is 243 // so unlikely, that we are not going to handle it specially. 244 245 #define HASHING1(class_name, enabled, f1) \ 246 virtual intx hash() const { \ 247 return (enabled) ? HASH2(name(), f1) : 0; \ 248 } \ 249 virtual bool is_equal(Value v) const { \ 250 if (!(enabled) ) return false; \ 251 class_name* _v = v->as_##class_name(); \ 252 if (_v == NULL ) return false; \ 253 if (f1 != _v->f1) return false; \ 254 return true; \ 255 } \ 256 257 258 #define HASHING2(class_name, enabled, f1, f2) \ 259 virtual intx hash() const { \ 260 return (enabled) ? HASH3(name(), f1, f2) : 0; \ 261 } \ 262 virtual bool is_equal(Value v) const { \ 263 if (!(enabled) ) return false; \ 264 class_name* _v = v->as_##class_name(); \ 265 if (_v == NULL ) return false; \ 266 if (f1 != _v->f1) return false; \ 267 if (f2 != _v->f2) return false; \ 268 return true; \ 269 } \ 270 271 272 #define HASHING3(class_name, enabled, f1, f2, f3) \ 273 virtual intx hash() const { \ 274 return (enabled) ? HASH4(name(), f1, f2, f3) : 0; \ 275 } \ 276 virtual bool is_equal(Value v) const { \ 277 if (!(enabled) ) return false; \ 278 class_name* _v = v->as_##class_name(); \ 279 if (_v == NULL ) return false; \ 280 if (f1 != _v->f1) return false; \ 281 if (f2 != _v->f2) return false; \ 282 if (f3 != _v->f3) return false; \ 283 return true; \ 284 } \ 285 286 287 // The mother of all instructions... 288 289 class Instruction: public CompilationResourceObj { 290 private: 291 int _id; // the unique instruction id 292 #ifndef PRODUCT 293 int _printable_bci; // the bci of the instruction for printing 294 #endif 295 int _use_count; // the number of instructions refering to this value (w/o prev/next); only roots can have use count = 0 or > 1 296 int _pin_state; // set of PinReason describing the reason for pinning 297 ValueType* _type; // the instruction value type 298 Instruction* _next; // the next instruction if any (NULL for BlockEnd instructions) 299 Instruction* _subst; // the substitution instruction if any 300 LIR_Opr _operand; // LIR specific information 301 unsigned int _flags; // Flag bits 302 303 ValueStack* _state_before; // Copy of state with input operands still on stack (or NULL) 304 ValueStack* _exception_state; // Copy of state for exception handling 305 XHandlers* _exception_handlers; // Flat list of exception handlers covering this instruction 306 307 friend class UseCountComputer; 308 friend class BlockBegin; 309 310 void update_exception_state(ValueStack* state); 311 312 protected: 313 BlockBegin* _block; // Block that contains this instruction 314 315 void set_type(ValueType* type) { 316 assert(type != NULL, "type must exist"); 317 _type = type; 318 } 319 320 // Helper class to keep track of which arguments need a null check 321 class ArgsNonNullState { 322 private: 323 int _nonnull_state; // mask identifying which args are nonnull 324 public: 325 ArgsNonNullState() 326 : _nonnull_state(AllBits) {} 327 328 // Does argument number i needs a null check? 329 bool arg_needs_null_check(int i) const { 330 // No data is kept for arguments starting at position 33 so 331 // conservatively assume that they need a null check. 332 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) { 333 return is_set_nth_bit(_nonnull_state, i); 334 } 335 return true; 336 } 337 338 // Set whether argument number i needs a null check or not 339 void set_arg_needs_null_check(int i, bool check) { 340 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) { 341 if (check) { 342 _nonnull_state |= nth_bit(i); 343 } else { 344 _nonnull_state &= ~(nth_bit(i)); 345 } 346 } 347 } 348 }; 349 350 public: 351 void* operator new(size_t size) throw() { 352 Compilation* c = Compilation::current(); 353 void* res = c->arena()->Amalloc(size); 354 ((Instruction*)res)->_id = c->get_next_id(); 355 return res; 356 } 357 358 static const int no_bci = -99; 359 360 enum InstructionFlag { 361 NeedsNullCheckFlag = 0, 362 CanTrapFlag, 363 DirectCompareFlag, 364 IsEliminatedFlag, 365 IsSafepointFlag, 366 IsStaticFlag, 367 IsStrictfpFlag, 368 NeedsStoreCheckFlag, 369 NeedsWriteBarrierFlag, 370 PreservesStateFlag, 371 TargetIsFinalFlag, 372 TargetIsLoadedFlag, 373 TargetIsStrictfpFlag, 374 UnorderedIsTrueFlag, 375 NeedsPatchingFlag, 376 ThrowIncompatibleClassChangeErrorFlag, 377 InvokeSpecialReceiverCheckFlag, 378 ProfileMDOFlag, 379 IsLinkedInBlockFlag, 380 NeedsRangeCheckFlag, 381 InWorkListFlag, 382 DeoptimizeOnException, 383 InstructionLastFlag 384 }; 385 386 public: 387 bool check_flag(InstructionFlag id) const { return (_flags & (1 << id)) != 0; } 388 void set_flag(InstructionFlag id, bool f) { _flags = f ? (_flags | (1 << id)) : (_flags & ~(1 << id)); }; 389 390 // 'globally' used condition values 391 enum Condition { 392 eql, neq, lss, leq, gtr, geq, aeq, beq 393 }; 394 395 // Instructions may be pinned for many reasons and under certain conditions 396 // with enough knowledge it's possible to safely unpin them. 397 enum PinReason { 398 PinUnknown = 1 << 0 399 , PinExplicitNullCheck = 1 << 3 400 , PinStackForStateSplit= 1 << 12 401 , PinStateSplitConstructor= 1 << 13 402 , PinGlobalValueNumbering= 1 << 14 403 }; 404 405 static Condition mirror(Condition cond); 406 static Condition negate(Condition cond); 407 408 // initialization 409 static int number_of_instructions() { 410 return Compilation::current()->number_of_instructions(); 411 } 412 413 // creation 414 Instruction(ValueType* type, ValueStack* state_before = NULL, bool type_is_constant = false) 415 : 416 #ifndef PRODUCT 417 _printable_bci(-99), 418 #endif 419 _use_count(0) 420 , _pin_state(0) 421 , _type(type) 422 , _next(NULL) 423 , _subst(NULL) 424 , _operand(LIR_OprFact::illegalOpr) 425 , _flags(0) 426 , _state_before(state_before) 427 , _exception_handlers(NULL) 428 , _block(NULL) 429 { 430 check_state(state_before); 431 assert(type != NULL && (!type->is_constant() || type_is_constant), "type must exist"); 432 update_exception_state(_state_before); 433 } 434 435 // accessors 436 int id() const { return _id; } 437 #ifndef PRODUCT 438 bool has_printable_bci() const { return _printable_bci != -99; } 439 int printable_bci() const { assert(has_printable_bci(), "_printable_bci should have been set"); return _printable_bci; } 440 void set_printable_bci(int bci) { _printable_bci = bci; } 441 #endif 442 int dominator_depth(); 443 int use_count() const { return _use_count; } 444 int pin_state() const { return _pin_state; } 445 bool is_pinned() const { return _pin_state != 0 || PinAllInstructions; } 446 ValueType* type() const { return _type; } 447 BlockBegin *block() const { return _block; } 448 Instruction* prev(); // use carefully, expensive operation 449 Instruction* next() const { return _next; } 450 bool has_subst() const { return _subst != NULL; } 451 Instruction* subst() { return _subst == NULL ? this : _subst->subst(); } 452 LIR_Opr operand() const { return _operand; } 453 454 void set_needs_null_check(bool f) { set_flag(NeedsNullCheckFlag, f); } 455 bool needs_null_check() const { return check_flag(NeedsNullCheckFlag); } 456 bool is_linked() const { return check_flag(IsLinkedInBlockFlag); } 457 bool can_be_linked() { return as_Local() == NULL && as_Phi() == NULL; } 458 459 bool has_uses() const { return use_count() > 0; } 460 ValueStack* state_before() const { return _state_before; } 461 ValueStack* exception_state() const { return _exception_state; } 462 virtual bool needs_exception_state() const { return true; } 463 XHandlers* exception_handlers() const { return _exception_handlers; } 464 465 // manipulation 466 void pin(PinReason reason) { _pin_state |= reason; } 467 void pin() { _pin_state |= PinUnknown; } 468 // DANGEROUS: only used by EliminateStores 469 void unpin(PinReason reason) { assert((reason & PinUnknown) == 0, "can't unpin unknown state"); _pin_state &= ~reason; } 470 471 Instruction* set_next(Instruction* next) { 472 assert(next->has_printable_bci(), "_printable_bci should have been set"); 473 assert(next != NULL, "must not be NULL"); 474 assert(as_BlockEnd() == NULL, "BlockEnd instructions must have no next"); 475 assert(next->can_be_linked(), "shouldn't link these instructions into list"); 476 477 BlockBegin *block = this->block(); 478 next->_block = block; 479 480 next->set_flag(Instruction::IsLinkedInBlockFlag, true); 481 _next = next; 482 return next; 483 } 484 485 Instruction* set_next(Instruction* next, int bci) { 486 #ifndef PRODUCT 487 next->set_printable_bci(bci); 488 #endif 489 return set_next(next); 490 } 491 492 // when blocks are merged 493 void fixup_block_pointers() { 494 Instruction *cur = next()->next(); // next()'s block is set in set_next 495 while (cur && cur->_block != block()) { 496 cur->_block = block(); 497 cur = cur->next(); 498 } 499 } 500 501 Instruction *insert_after(Instruction *i) { 502 Instruction* n = _next; 503 set_next(i); 504 i->set_next(n); 505 return _next; 506 } 507 508 bool is_flattened_array() const; 509 510 Instruction *insert_after_same_bci(Instruction *i) { 511 #ifndef PRODUCT 512 i->set_printable_bci(printable_bci()); 513 #endif 514 return insert_after(i); 515 } 516 517 void set_subst(Instruction* subst) { 518 assert(subst == NULL || 519 type()->base() == subst->type()->base() || 520 subst->type()->base() == illegalType, "type can't change"); 521 _subst = subst; 522 } 523 void set_exception_handlers(XHandlers *xhandlers) { _exception_handlers = xhandlers; } 524 void set_exception_state(ValueStack* s) { check_state(s); _exception_state = s; } 525 void set_state_before(ValueStack* s) { check_state(s); _state_before = s; } 526 527 // machine-specifics 528 void set_operand(LIR_Opr operand) { assert(operand != LIR_OprFact::illegalOpr, "operand must exist"); _operand = operand; } 529 void clear_operand() { _operand = LIR_OprFact::illegalOpr; } 530 531 // generic 532 virtual Instruction* as_Instruction() { return this; } // to satisfy HASHING1 macro 533 virtual Phi* as_Phi() { return NULL; } 534 virtual Local* as_Local() { return NULL; } 535 virtual Constant* as_Constant() { return NULL; } 536 virtual AccessField* as_AccessField() { return NULL; } 537 virtual LoadField* as_LoadField() { return NULL; } 538 virtual StoreField* as_StoreField() { return NULL; } 539 virtual AccessArray* as_AccessArray() { return NULL; } 540 virtual ArrayLength* as_ArrayLength() { return NULL; } 541 virtual AccessIndexed* as_AccessIndexed() { return NULL; } 542 virtual LoadIndexed* as_LoadIndexed() { return NULL; } 543 virtual StoreIndexed* as_StoreIndexed() { return NULL; } 544 virtual NegateOp* as_NegateOp() { return NULL; } 545 virtual Op2* as_Op2() { return NULL; } 546 virtual ArithmeticOp* as_ArithmeticOp() { return NULL; } 547 virtual ShiftOp* as_ShiftOp() { return NULL; } 548 virtual LogicOp* as_LogicOp() { return NULL; } 549 virtual CompareOp* as_CompareOp() { return NULL; } 550 virtual IfOp* as_IfOp() { return NULL; } 551 virtual Convert* as_Convert() { return NULL; } 552 virtual NullCheck* as_NullCheck() { return NULL; } 553 virtual OsrEntry* as_OsrEntry() { return NULL; } 554 virtual StateSplit* as_StateSplit() { return NULL; } 555 virtual Invoke* as_Invoke() { return NULL; } 556 virtual NewInstance* as_NewInstance() { return NULL; } 557 virtual NewValueTypeInstance* as_NewValueTypeInstance() { return NULL; } 558 virtual NewArray* as_NewArray() { return NULL; } 559 virtual NewTypeArray* as_NewTypeArray() { return NULL; } 560 virtual NewObjectArray* as_NewObjectArray() { return NULL; } 561 virtual NewMultiArray* as_NewMultiArray() { return NULL; } 562 virtual TypeCheck* as_TypeCheck() { return NULL; } 563 virtual CheckCast* as_CheckCast() { return NULL; } 564 virtual InstanceOf* as_InstanceOf() { return NULL; } 565 virtual TypeCast* as_TypeCast() { return NULL; } 566 virtual AccessMonitor* as_AccessMonitor() { return NULL; } 567 virtual MonitorEnter* as_MonitorEnter() { return NULL; } 568 virtual MonitorExit* as_MonitorExit() { return NULL; } 569 virtual Intrinsic* as_Intrinsic() { return NULL; } 570 virtual BlockBegin* as_BlockBegin() { return NULL; } 571 virtual BlockEnd* as_BlockEnd() { return NULL; } 572 virtual Goto* as_Goto() { return NULL; } 573 virtual If* as_If() { return NULL; } 574 virtual IfInstanceOf* as_IfInstanceOf() { return NULL; } 575 virtual TableSwitch* as_TableSwitch() { return NULL; } 576 virtual LookupSwitch* as_LookupSwitch() { return NULL; } 577 virtual Return* as_Return() { return NULL; } 578 virtual Throw* as_Throw() { return NULL; } 579 virtual Base* as_Base() { return NULL; } 580 virtual RoundFP* as_RoundFP() { return NULL; } 581 virtual ExceptionObject* as_ExceptionObject() { return NULL; } 582 virtual UnsafeOp* as_UnsafeOp() { return NULL; } 583 virtual ProfileInvoke* as_ProfileInvoke() { return NULL; } 584 virtual RangeCheckPredicate* as_RangeCheckPredicate() { return NULL; } 585 586 #ifdef ASSERT 587 virtual Assert* as_Assert() { return NULL; } 588 #endif 589 590 virtual void visit(InstructionVisitor* v) = 0; 591 592 virtual bool can_trap() const { return false; } 593 594 virtual void input_values_do(ValueVisitor* f) = 0; 595 virtual void state_values_do(ValueVisitor* f); 596 virtual void other_values_do(ValueVisitor* f) { /* usually no other - override on demand */ } 597 void values_do(ValueVisitor* f) { input_values_do(f); state_values_do(f); other_values_do(f); } 598 599 virtual ciType* exact_type() const; 600 virtual ciType* declared_type() const { return NULL; } 601 602 // hashing 603 virtual const char* name() const = 0; 604 HASHING1(Instruction, false, id()) // hashing disabled by default 605 606 // debugging 607 static void check_state(ValueStack* state) PRODUCT_RETURN; 608 void print() PRODUCT_RETURN; 609 void print_line() PRODUCT_RETURN; 610 void print(InstructionPrinter& ip) PRODUCT_RETURN; 611 }; 612 613 614 // The following macros are used to define base (i.e., non-leaf) 615 // and leaf instruction classes. They define class-name related 616 // generic functionality in one place. 617 618 #define BASE(class_name, super_class_name) \ 619 class class_name: public super_class_name { \ 620 public: \ 621 virtual class_name* as_##class_name() { return this; } \ 622 623 624 #define LEAF(class_name, super_class_name) \ 625 BASE(class_name, super_class_name) \ 626 public: \ 627 virtual const char* name() const { return #class_name; } \ 628 virtual void visit(InstructionVisitor* v) { v->do_##class_name(this); } \ 629 630 631 // Debugging support 632 633 634 #ifdef ASSERT 635 class AssertValues: public ValueVisitor { 636 void visit(Value* x) { assert((*x) != NULL, "value must exist"); } 637 }; 638 #define ASSERT_VALUES { AssertValues assert_value; values_do(&assert_value); } 639 #else 640 #define ASSERT_VALUES 641 #endif // ASSERT 642 643 644 // A Phi is a phi function in the sense of SSA form. It stands for 645 // the value of a local variable at the beginning of a join block. 646 // A Phi consists of n operands, one for every incoming branch. 647 648 LEAF(Phi, Instruction) 649 private: 650 int _pf_flags; // the flags of the phi function 651 int _index; // to value on operand stack (index < 0) or to local 652 ciType* _exact_type; // preserve type information for flattened arrays. 653 public: 654 // creation 655 Phi(ValueType* type, BlockBegin* b, int index, ciType* exact_type) 656 : Instruction(type->base()) 657 , _pf_flags(0) 658 , _index(index) 659 , _exact_type(exact_type) 660 { 661 _block = b; 662 NOT_PRODUCT(set_printable_bci(Value(b)->printable_bci())); 663 if (type->is_illegal()) { 664 make_illegal(); 665 } 666 } 667 668 virtual ciType* exact_type() const { 669 return _exact_type; 670 } 671 672 virtual ciType* declared_type() const { 673 return _exact_type; 674 } 675 676 // flags 677 enum Flag { 678 no_flag = 0, 679 visited = 1 << 0, 680 cannot_simplify = 1 << 1 681 }; 682 683 // accessors 684 bool is_local() const { return _index >= 0; } 685 bool is_on_stack() const { return !is_local(); } 686 int local_index() const { assert(is_local(), ""); return _index; } 687 int stack_index() const { assert(is_on_stack(), ""); return -(_index+1); } 688 689 Value operand_at(int i) const; 690 int operand_count() const; 691 692 void set(Flag f) { _pf_flags |= f; } 693 void clear(Flag f) { _pf_flags &= ~f; } 694 bool is_set(Flag f) const { return (_pf_flags & f) != 0; } 695 696 // Invalidates phis corresponding to merges of locals of two different types 697 // (these should never be referenced, otherwise the bytecodes are illegal) 698 void make_illegal() { 699 set(cannot_simplify); 700 set_type(illegalType); 701 } 702 703 bool is_illegal() const { 704 return type()->is_illegal(); 705 } 706 707 // generic 708 virtual void input_values_do(ValueVisitor* f) { 709 } 710 }; 711 712 713 // A local is a placeholder for an incoming argument to a function call. 714 LEAF(Local, Instruction) 715 private: 716 int _java_index; // the local index within the method to which the local belongs 717 bool _is_receiver; // if local variable holds the receiver: "this" for non-static methods 718 ciType* _declared_type; 719 public: 720 // creation 721 Local(ciType* declared, ValueType* type, int index, bool receiver) 722 : Instruction(type) 723 , _java_index(index) 724 , _is_receiver(receiver) 725 , _declared_type(declared) 726 { 727 NOT_PRODUCT(set_printable_bci(-1)); 728 } 729 730 // accessors 731 int java_index() const { return _java_index; } 732 bool is_receiver() const { return _is_receiver; } 733 734 virtual ciType* declared_type() const { return _declared_type; } 735 736 // generic 737 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 738 }; 739 740 741 LEAF(Constant, Instruction) 742 public: 743 // creation 744 Constant(ValueType* type): 745 Instruction(type, NULL, /*type_is_constant*/ true) 746 { 747 assert(type->is_constant(), "must be a constant"); 748 } 749 750 Constant(ValueType* type, ValueStack* state_before): 751 Instruction(type, state_before, /*type_is_constant*/ true) 752 { 753 assert(state_before != NULL, "only used for constants which need patching"); 754 assert(type->is_constant(), "must be a constant"); 755 // since it's patching it needs to be pinned 756 pin(); 757 } 758 759 // generic 760 virtual bool can_trap() const { return state_before() != NULL; } 761 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 762 763 virtual intx hash() const; 764 virtual bool is_equal(Value v) const; 765 766 virtual ciType* exact_type() const; 767 768 enum CompareResult { not_comparable = -1, cond_false, cond_true }; 769 770 virtual CompareResult compare(Instruction::Condition condition, Value right) const; 771 BlockBegin* compare(Instruction::Condition cond, Value right, 772 BlockBegin* true_sux, BlockBegin* false_sux) const { 773 switch (compare(cond, right)) { 774 case not_comparable: 775 return NULL; 776 case cond_false: 777 return false_sux; 778 case cond_true: 779 return true_sux; 780 default: 781 ShouldNotReachHere(); 782 return NULL; 783 } 784 } 785 }; 786 787 788 BASE(AccessField, Instruction) 789 private: 790 Value _obj; 791 int _offset; 792 ciField* _field; 793 NullCheck* _explicit_null_check; // For explicit null check elimination 794 795 public: 796 // creation 797 AccessField(Value obj, int offset, ciField* field, bool is_static, 798 ValueStack* state_before, bool needs_patching) 799 : Instruction(as_ValueType(field->type()->basic_type()), state_before) 800 , _obj(obj) 801 , _offset(offset) 802 , _field(field) 803 , _explicit_null_check(NULL) 804 { 805 set_needs_null_check(!is_static); 806 set_flag(IsStaticFlag, is_static); 807 set_flag(NeedsPatchingFlag, needs_patching); 808 ASSERT_VALUES 809 // pin of all instructions with memory access 810 pin(); 811 } 812 813 // accessors 814 Value obj() const { return _obj; } 815 int offset() const { return _offset; } 816 ciField* field() const { return _field; } 817 BasicType field_type() const { return _field->type()->basic_type(); } 818 bool is_static() const { return check_flag(IsStaticFlag); } 819 NullCheck* explicit_null_check() const { return _explicit_null_check; } 820 bool needs_patching() const { return check_flag(NeedsPatchingFlag); } 821 822 // Unresolved getstatic and putstatic can cause initialization. 823 // Technically it occurs at the Constant that materializes the base 824 // of the static fields but it's simpler to model it here. 825 bool is_init_point() const { return is_static() && (needs_patching() || !_field->holder()->is_initialized()); } 826 827 // manipulation 828 829 // Under certain circumstances, if a previous NullCheck instruction 830 // proved the target object non-null, we can eliminate the explicit 831 // null check and do an implicit one, simply specifying the debug 832 // information from the NullCheck. This field should only be consulted 833 // if needs_null_check() is true. 834 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 835 836 // generic 837 virtual bool can_trap() const { return needs_null_check() || needs_patching(); } 838 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 839 }; 840 841 842 LEAF(LoadField, AccessField) 843 public: 844 // creation 845 LoadField(Value obj, int offset, ciField* field, bool is_static, 846 ValueStack* state_before, bool needs_patching) 847 : AccessField(obj, offset, field, is_static, state_before, needs_patching) 848 {} 849 850 ciType* declared_type() const; 851 852 // generic 853 HASHING2(LoadField, !needs_patching() && !field()->is_volatile(), obj()->subst(), offset()) // cannot be eliminated if needs patching or if volatile 854 }; 855 856 857 LEAF(StoreField, AccessField) 858 private: 859 Value _value; 860 861 public: 862 // creation 863 StoreField(Value obj, int offset, ciField* field, Value value, bool is_static, 864 ValueStack* state_before, bool needs_patching) 865 : AccessField(obj, offset, field, is_static, state_before, needs_patching) 866 , _value(value) 867 { 868 set_flag(NeedsWriteBarrierFlag, as_ValueType(field_type())->is_object()); 869 ASSERT_VALUES 870 pin(); 871 } 872 873 // accessors 874 Value value() const { return _value; } 875 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); } 876 877 // generic 878 virtual void input_values_do(ValueVisitor* f) { AccessField::input_values_do(f); f->visit(&_value); } 879 }; 880 881 882 BASE(AccessArray, Instruction) 883 private: 884 Value _array; 885 886 public: 887 // creation 888 AccessArray(ValueType* type, Value array, ValueStack* state_before) 889 : Instruction(type, state_before) 890 , _array(array) 891 { 892 set_needs_null_check(true); 893 ASSERT_VALUES 894 pin(); // instruction with side effect (null exception or range check throwing) 895 } 896 897 Value array() const { return _array; } 898 899 // generic 900 virtual bool can_trap() const { return needs_null_check(); } 901 virtual void input_values_do(ValueVisitor* f) { f->visit(&_array); } 902 }; 903 904 905 LEAF(ArrayLength, AccessArray) 906 private: 907 NullCheck* _explicit_null_check; // For explicit null check elimination 908 909 public: 910 // creation 911 ArrayLength(Value array, ValueStack* state_before) 912 : AccessArray(intType, array, state_before) 913 , _explicit_null_check(NULL) {} 914 915 // accessors 916 NullCheck* explicit_null_check() const { return _explicit_null_check; } 917 918 // setters 919 // See LoadField::set_explicit_null_check for documentation 920 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 921 922 // generic 923 HASHING1(ArrayLength, true, array()->subst()) 924 }; 925 926 927 BASE(AccessIndexed, AccessArray) 928 private: 929 Value _index; 930 Value _length; 931 BasicType _elt_type; 932 bool _mismatched; 933 934 public: 935 // creation 936 AccessIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before, bool mismatched) 937 : AccessArray(as_ValueType(elt_type), array, state_before) 938 , _index(index) 939 , _length(length) 940 , _elt_type(elt_type) 941 , _mismatched(mismatched) 942 { 943 set_flag(Instruction::NeedsRangeCheckFlag, true); 944 ASSERT_VALUES 945 } 946 947 // accessors 948 Value index() const { return _index; } 949 Value length() const { return _length; } 950 BasicType elt_type() const { return _elt_type; } 951 bool mismatched() const { return _mismatched; } 952 953 void clear_length() { _length = NULL; } 954 // perform elimination of range checks involving constants 955 bool compute_needs_range_check(); 956 957 // generic 958 virtual void input_values_do(ValueVisitor* f) { AccessArray::input_values_do(f); f->visit(&_index); if (_length != NULL) f->visit(&_length); } 959 }; 960 961 962 LEAF(LoadIndexed, AccessIndexed) 963 private: 964 NullCheck* _explicit_null_check; // For explicit null check elimination 965 NewValueTypeInstance* _vt; 966 967 public: 968 // creation 969 LoadIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before, bool mismatched = false) 970 : AccessIndexed(array, index, length, elt_type, state_before, mismatched) 971 , _explicit_null_check(NULL) {} 972 973 // accessors 974 NullCheck* explicit_null_check() const { return _explicit_null_check; } 975 976 // setters 977 // See LoadField::set_explicit_null_check for documentation 978 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 979 980 ciType* exact_type() const; 981 ciType* declared_type() const; 982 983 NewValueTypeInstance* vt() { return _vt; } 984 void set_vt(NewValueTypeInstance* vt) { _vt = vt; } 985 986 // generic 987 HASHING2(LoadIndexed, true, array()->subst(), index()->subst()) 988 }; 989 990 991 LEAF(StoreIndexed, AccessIndexed) 992 private: 993 Value _value; 994 995 ciMethod* _profiled_method; 996 int _profiled_bci; 997 bool _check_boolean; 998 999 public: 1000 // creation 1001 StoreIndexed(Value array, Value index, Value length, BasicType elt_type, Value value, ValueStack* state_before, 1002 bool check_boolean, bool mismatched = false) 1003 : AccessIndexed(array, index, length, elt_type, state_before, mismatched) 1004 , _value(value), _profiled_method(NULL), _profiled_bci(0), _check_boolean(check_boolean) 1005 { 1006 set_flag(NeedsWriteBarrierFlag, (as_ValueType(elt_type)->is_object())); 1007 set_flag(NeedsStoreCheckFlag, (as_ValueType(elt_type)->is_object())); 1008 ASSERT_VALUES 1009 pin(); 1010 } 1011 1012 // accessors 1013 Value value() const { return _value; } 1014 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); } 1015 bool needs_store_check() const { return check_flag(NeedsStoreCheckFlag); } 1016 bool check_boolean() const { return _check_boolean; } 1017 // Helpers for MethodData* profiling 1018 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1019 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1020 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1021 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1022 ciMethod* profiled_method() const { return _profiled_method; } 1023 int profiled_bci() const { return _profiled_bci; } 1024 // generic 1025 virtual void input_values_do(ValueVisitor* f) { AccessIndexed::input_values_do(f); f->visit(&_value); } 1026 }; 1027 1028 1029 LEAF(NegateOp, Instruction) 1030 private: 1031 Value _x; 1032 1033 public: 1034 // creation 1035 NegateOp(Value x) : Instruction(x->type()->base()), _x(x) { 1036 ASSERT_VALUES 1037 } 1038 1039 // accessors 1040 Value x() const { return _x; } 1041 1042 // generic 1043 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); } 1044 }; 1045 1046 1047 BASE(Op2, Instruction) 1048 private: 1049 Bytecodes::Code _op; 1050 Value _x; 1051 Value _y; 1052 1053 public: 1054 // creation 1055 Op2(ValueType* type, Bytecodes::Code op, Value x, Value y, ValueStack* state_before = NULL) 1056 : Instruction(type, state_before) 1057 , _op(op) 1058 , _x(x) 1059 , _y(y) 1060 { 1061 ASSERT_VALUES 1062 } 1063 1064 // accessors 1065 Bytecodes::Code op() const { return _op; } 1066 Value x() const { return _x; } 1067 Value y() const { return _y; } 1068 1069 // manipulators 1070 void swap_operands() { 1071 assert(is_commutative(), "operation must be commutative"); 1072 Value t = _x; _x = _y; _y = t; 1073 } 1074 1075 // generic 1076 virtual bool is_commutative() const { return false; } 1077 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); } 1078 }; 1079 1080 1081 LEAF(ArithmeticOp, Op2) 1082 public: 1083 // creation 1084 ArithmeticOp(Bytecodes::Code op, Value x, Value y, bool is_strictfp, ValueStack* state_before) 1085 : Op2(x->type()->meet(y->type()), op, x, y, state_before) 1086 { 1087 set_flag(IsStrictfpFlag, is_strictfp); 1088 if (can_trap()) pin(); 1089 } 1090 1091 // accessors 1092 bool is_strictfp() const { return check_flag(IsStrictfpFlag); } 1093 1094 // generic 1095 virtual bool is_commutative() const; 1096 virtual bool can_trap() const; 1097 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1098 }; 1099 1100 1101 LEAF(ShiftOp, Op2) 1102 public: 1103 // creation 1104 ShiftOp(Bytecodes::Code op, Value x, Value s) : Op2(x->type()->base(), op, x, s) {} 1105 1106 // generic 1107 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1108 }; 1109 1110 1111 LEAF(LogicOp, Op2) 1112 public: 1113 // creation 1114 LogicOp(Bytecodes::Code op, Value x, Value y) : Op2(x->type()->meet(y->type()), op, x, y) {} 1115 1116 // generic 1117 virtual bool is_commutative() const; 1118 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1119 }; 1120 1121 1122 LEAF(CompareOp, Op2) 1123 public: 1124 // creation 1125 CompareOp(Bytecodes::Code op, Value x, Value y, ValueStack* state_before) 1126 : Op2(intType, op, x, y, state_before) 1127 {} 1128 1129 // generic 1130 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1131 }; 1132 1133 1134 LEAF(IfOp, Op2) 1135 private: 1136 Value _tval; 1137 Value _fval; 1138 1139 public: 1140 // creation 1141 IfOp(Value x, Condition cond, Value y, Value tval, Value fval) 1142 : Op2(tval->type()->meet(fval->type()), (Bytecodes::Code)cond, x, y) 1143 , _tval(tval) 1144 , _fval(fval) 1145 { 1146 ASSERT_VALUES 1147 assert(tval->type()->tag() == fval->type()->tag(), "types must match"); 1148 } 1149 1150 // accessors 1151 virtual bool is_commutative() const; 1152 Bytecodes::Code op() const { ShouldNotCallThis(); return Bytecodes::_illegal; } 1153 Condition cond() const { return (Condition)Op2::op(); } 1154 Value tval() const { return _tval; } 1155 Value fval() const { return _fval; } 1156 1157 // generic 1158 virtual void input_values_do(ValueVisitor* f) { Op2::input_values_do(f); f->visit(&_tval); f->visit(&_fval); } 1159 }; 1160 1161 1162 LEAF(Convert, Instruction) 1163 private: 1164 Bytecodes::Code _op; 1165 Value _value; 1166 1167 public: 1168 // creation 1169 Convert(Bytecodes::Code op, Value value, ValueType* to_type) : Instruction(to_type), _op(op), _value(value) { 1170 ASSERT_VALUES 1171 } 1172 1173 // accessors 1174 Bytecodes::Code op() const { return _op; } 1175 Value value() const { return _value; } 1176 1177 // generic 1178 virtual void input_values_do(ValueVisitor* f) { f->visit(&_value); } 1179 HASHING2(Convert, true, op(), value()->subst()) 1180 }; 1181 1182 1183 LEAF(NullCheck, Instruction) 1184 private: 1185 Value _obj; 1186 1187 public: 1188 // creation 1189 NullCheck(Value obj, ValueStack* state_before) 1190 : Instruction(obj->type()->base(), state_before) 1191 , _obj(obj) 1192 { 1193 ASSERT_VALUES 1194 set_can_trap(true); 1195 assert(_obj->type()->is_object(), "null check must be applied to objects only"); 1196 pin(Instruction::PinExplicitNullCheck); 1197 } 1198 1199 // accessors 1200 Value obj() const { return _obj; } 1201 1202 // setters 1203 void set_can_trap(bool can_trap) { set_flag(CanTrapFlag, can_trap); } 1204 1205 // generic 1206 virtual bool can_trap() const { return check_flag(CanTrapFlag); /* null-check elimination sets to false */ } 1207 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 1208 HASHING1(NullCheck, true, obj()->subst()) 1209 }; 1210 1211 1212 // This node is supposed to cast the type of another node to a more precise 1213 // declared type. 1214 LEAF(TypeCast, Instruction) 1215 private: 1216 ciType* _declared_type; 1217 Value _obj; 1218 1219 public: 1220 // The type of this node is the same type as the object type (and it might be constant). 1221 TypeCast(ciType* type, Value obj, ValueStack* state_before) 1222 : Instruction(obj->type(), state_before, obj->type()->is_constant()), 1223 _declared_type(type), 1224 _obj(obj) {} 1225 1226 // accessors 1227 ciType* declared_type() const { return _declared_type; } 1228 Value obj() const { return _obj; } 1229 1230 // generic 1231 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 1232 }; 1233 1234 1235 BASE(StateSplit, Instruction) 1236 private: 1237 ValueStack* _state; 1238 1239 protected: 1240 static void substitute(BlockList& list, BlockBegin* old_block, BlockBegin* new_block); 1241 1242 public: 1243 // creation 1244 StateSplit(ValueType* type, ValueStack* state_before = NULL) 1245 : Instruction(type, state_before) 1246 , _state(NULL) 1247 { 1248 pin(PinStateSplitConstructor); 1249 } 1250 1251 // accessors 1252 ValueStack* state() const { return _state; } 1253 IRScope* scope() const; // the state's scope 1254 1255 // manipulation 1256 void set_state(ValueStack* state) { assert(_state == NULL, "overwriting existing state"); check_state(state); _state = state; } 1257 1258 // generic 1259 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 1260 virtual void state_values_do(ValueVisitor* f); 1261 }; 1262 1263 1264 LEAF(Invoke, StateSplit) 1265 private: 1266 Bytecodes::Code _code; 1267 Value _recv; 1268 Values* _args; 1269 BasicTypeList* _signature; 1270 int _vtable_index; 1271 ciMethod* _target; 1272 1273 public: 1274 // creation 1275 Invoke(Bytecodes::Code code, ValueType* result_type, Value recv, Values* args, 1276 int vtable_index, ciMethod* target, ValueStack* state_before); 1277 1278 // accessors 1279 Bytecodes::Code code() const { return _code; } 1280 Value receiver() const { return _recv; } 1281 bool has_receiver() const { return receiver() != NULL; } 1282 int number_of_arguments() const { return _args->length(); } 1283 Value argument_at(int i) const { return _args->at(i); } 1284 int vtable_index() const { return _vtable_index; } 1285 BasicTypeList* signature() const { return _signature; } 1286 ciMethod* target() const { return _target; } 1287 1288 ciType* declared_type() const; 1289 1290 // Returns false if target is not loaded 1291 bool target_is_final() const { return check_flag(TargetIsFinalFlag); } 1292 bool target_is_loaded() const { return check_flag(TargetIsLoadedFlag); } 1293 // Returns false if target is not loaded 1294 bool target_is_strictfp() const { return check_flag(TargetIsStrictfpFlag); } 1295 1296 // JSR 292 support 1297 bool is_invokedynamic() const { return code() == Bytecodes::_invokedynamic; } 1298 bool is_method_handle_intrinsic() const { return target()->is_method_handle_intrinsic(); } 1299 1300 virtual bool needs_exception_state() const { return false; } 1301 1302 // generic 1303 virtual bool can_trap() const { return true; } 1304 virtual void input_values_do(ValueVisitor* f) { 1305 StateSplit::input_values_do(f); 1306 if (has_receiver()) f->visit(&_recv); 1307 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 1308 } 1309 virtual void state_values_do(ValueVisitor *f); 1310 }; 1311 1312 1313 LEAF(NewInstance, StateSplit) 1314 private: 1315 ciInstanceKlass* _klass; 1316 bool _is_unresolved; 1317 1318 public: 1319 // creation 1320 NewInstance(ciInstanceKlass* klass, ValueStack* state_before, bool is_unresolved) 1321 : StateSplit(instanceType, state_before) 1322 , _klass(klass), _is_unresolved(is_unresolved) 1323 {} 1324 1325 // accessors 1326 ciInstanceKlass* klass() const { return _klass; } 1327 bool is_unresolved() const { return _is_unresolved; } 1328 1329 virtual bool needs_exception_state() const { return false; } 1330 1331 // generic 1332 virtual bool can_trap() const { return true; } 1333 ciType* exact_type() const; 1334 ciType* declared_type() const; 1335 }; 1336 1337 LEAF(NewValueTypeInstance, StateSplit) 1338 bool _is_unresolved; 1339 ciValueKlass* _klass; 1340 Value _depends_on; // Link to instance on with withfield was called on 1341 1342 public: 1343 1344 // Default creation, always allocated for now 1345 NewValueTypeInstance(ciValueKlass* klass, ValueStack* state_before, bool is_unresolved, Value depends_on = NULL) 1346 : StateSplit(instanceType, state_before) 1347 , _is_unresolved(is_unresolved) 1348 , _klass(klass) 1349 { 1350 if (depends_on == NULL) { 1351 _depends_on = this; 1352 } else { 1353 _depends_on = depends_on; 1354 } 1355 } 1356 1357 // accessors 1358 bool is_unresolved() const { return _is_unresolved; } 1359 Value depends_on(); 1360 1361 ciValueKlass* klass() const { return _klass; } 1362 1363 virtual bool needs_exception_state() const { return false; } 1364 1365 // generic 1366 virtual bool can_trap() const { return true; } 1367 ciType* exact_type() const; 1368 ciType* declared_type() const; 1369 1370 // Only done in LIR Generator -> map everything to object 1371 void set_to_object_type() { set_type(instanceType); } 1372 }; 1373 1374 BASE(NewArray, StateSplit) 1375 private: 1376 Value _length; 1377 1378 public: 1379 // creation 1380 NewArray(Value length, ValueStack* state_before) 1381 : StateSplit(objectType, state_before) 1382 , _length(length) 1383 { 1384 // Do not ASSERT_VALUES since length is NULL for NewMultiArray 1385 } 1386 1387 // accessors 1388 Value length() const { return _length; } 1389 1390 virtual bool needs_exception_state() const { return false; } 1391 1392 ciType* exact_type() const { return NULL; } 1393 ciType* declared_type() const; 1394 1395 // generic 1396 virtual bool can_trap() const { return true; } 1397 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_length); } 1398 }; 1399 1400 1401 LEAF(NewTypeArray, NewArray) 1402 private: 1403 BasicType _elt_type; 1404 1405 public: 1406 // creation 1407 NewTypeArray(Value length, BasicType elt_type, ValueStack* state_before) 1408 : NewArray(length, state_before) 1409 , _elt_type(elt_type) 1410 {} 1411 1412 // accessors 1413 BasicType elt_type() const { return _elt_type; } 1414 ciType* exact_type() const; 1415 }; 1416 1417 1418 LEAF(NewObjectArray, NewArray) 1419 private: 1420 ciKlass* _klass; 1421 1422 public: 1423 // creation 1424 NewObjectArray(ciKlass* klass, Value length, ValueStack* state_before) : NewArray(length, state_before), _klass(klass) {} 1425 1426 // accessors 1427 ciKlass* klass() const { return _klass; } 1428 ciType* exact_type() const; 1429 }; 1430 1431 1432 LEAF(NewMultiArray, NewArray) 1433 private: 1434 ciKlass* _klass; 1435 Values* _dims; 1436 1437 public: 1438 // creation 1439 NewMultiArray(ciKlass* klass, Values* dims, ValueStack* state_before) : NewArray(NULL, state_before), _klass(klass), _dims(dims) { 1440 ASSERT_VALUES 1441 } 1442 1443 // accessors 1444 ciKlass* klass() const { return _klass; } 1445 Values* dims() const { return _dims; } 1446 int rank() const { return dims()->length(); } 1447 1448 // generic 1449 virtual void input_values_do(ValueVisitor* f) { 1450 // NOTE: we do not call NewArray::input_values_do since "length" 1451 // is meaningless for a multi-dimensional array; passing the 1452 // zeroth element down to NewArray as its length is a bad idea 1453 // since there will be a copy in the "dims" array which doesn't 1454 // get updated, and the value must not be traversed twice. Was bug 1455 // - kbr 4/10/2001 1456 StateSplit::input_values_do(f); 1457 for (int i = 0; i < _dims->length(); i++) f->visit(_dims->adr_at(i)); 1458 } 1459 1460 ciType* exact_type() const; 1461 }; 1462 1463 1464 BASE(TypeCheck, StateSplit) 1465 private: 1466 ciKlass* _klass; 1467 Value _obj; 1468 1469 ciMethod* _profiled_method; 1470 int _profiled_bci; 1471 1472 public: 1473 // creation 1474 TypeCheck(ciKlass* klass, Value obj, ValueType* type, ValueStack* state_before) 1475 : StateSplit(type, state_before), _klass(klass), _obj(obj), 1476 _profiled_method(NULL), _profiled_bci(0) { 1477 ASSERT_VALUES 1478 set_direct_compare(false); 1479 } 1480 1481 // accessors 1482 ciKlass* klass() const { return _klass; } 1483 Value obj() const { return _obj; } 1484 bool is_loaded() const { return klass() != NULL; } 1485 bool direct_compare() const { return check_flag(DirectCompareFlag); } 1486 1487 // manipulation 1488 void set_direct_compare(bool flag) { set_flag(DirectCompareFlag, flag); } 1489 1490 // generic 1491 virtual bool can_trap() const { return true; } 1492 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } 1493 1494 // Helpers for MethodData* profiling 1495 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1496 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1497 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1498 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1499 ciMethod* profiled_method() const { return _profiled_method; } 1500 int profiled_bci() const { return _profiled_bci; } 1501 }; 1502 1503 1504 LEAF(CheckCast, TypeCheck) 1505 public: 1506 // creation 1507 CheckCast(ciKlass* klass, Value obj, ValueStack* state_before) 1508 : TypeCheck(klass, obj, objectType, state_before) {} 1509 1510 void set_incompatible_class_change_check() { 1511 set_flag(ThrowIncompatibleClassChangeErrorFlag, true); 1512 } 1513 bool is_incompatible_class_change_check() const { 1514 return check_flag(ThrowIncompatibleClassChangeErrorFlag); 1515 } 1516 void set_invokespecial_receiver_check() { 1517 set_flag(InvokeSpecialReceiverCheckFlag, true); 1518 } 1519 bool is_invokespecial_receiver_check() const { 1520 return check_flag(InvokeSpecialReceiverCheckFlag); 1521 } 1522 1523 virtual bool needs_exception_state() const { 1524 return !is_invokespecial_receiver_check(); 1525 } 1526 1527 ciType* declared_type() const; 1528 }; 1529 1530 1531 LEAF(InstanceOf, TypeCheck) 1532 public: 1533 // creation 1534 InstanceOf(ciKlass* klass, Value obj, ValueStack* state_before) : TypeCheck(klass, obj, intType, state_before) {} 1535 1536 virtual bool needs_exception_state() const { return false; } 1537 }; 1538 1539 1540 BASE(AccessMonitor, StateSplit) 1541 private: 1542 Value _obj; 1543 int _monitor_no; 1544 1545 public: 1546 // creation 1547 AccessMonitor(Value obj, int monitor_no, ValueStack* state_before = NULL) 1548 : StateSplit(illegalType, state_before) 1549 , _obj(obj) 1550 , _monitor_no(monitor_no) 1551 { 1552 set_needs_null_check(true); 1553 ASSERT_VALUES 1554 } 1555 1556 // accessors 1557 Value obj() const { return _obj; } 1558 int monitor_no() const { return _monitor_no; } 1559 1560 // generic 1561 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } 1562 }; 1563 1564 1565 LEAF(MonitorEnter, AccessMonitor) 1566 public: 1567 // creation 1568 MonitorEnter(Value obj, int monitor_no, ValueStack* state_before) 1569 : AccessMonitor(obj, monitor_no, state_before) 1570 { 1571 ASSERT_VALUES 1572 } 1573 1574 // generic 1575 virtual bool can_trap() const { return true; } 1576 }; 1577 1578 1579 LEAF(MonitorExit, AccessMonitor) 1580 public: 1581 // creation 1582 MonitorExit(Value obj, int monitor_no) 1583 : AccessMonitor(obj, monitor_no, NULL) 1584 { 1585 ASSERT_VALUES 1586 } 1587 }; 1588 1589 1590 LEAF(Intrinsic, StateSplit) 1591 private: 1592 vmIntrinsics::ID _id; 1593 Values* _args; 1594 Value _recv; 1595 ArgsNonNullState _nonnull_state; 1596 1597 public: 1598 // preserves_state can be set to true for Intrinsics 1599 // which are guaranteed to preserve register state across any slow 1600 // cases; setting it to true does not mean that the Intrinsic can 1601 // not trap, only that if we continue execution in the same basic 1602 // block after the Intrinsic, all of the registers are intact. This 1603 // allows load elimination and common expression elimination to be 1604 // performed across the Intrinsic. The default value is false. 1605 Intrinsic(ValueType* type, 1606 vmIntrinsics::ID id, 1607 Values* args, 1608 bool has_receiver, 1609 ValueStack* state_before, 1610 bool preserves_state, 1611 bool cantrap = true) 1612 : StateSplit(type, state_before) 1613 , _id(id) 1614 , _args(args) 1615 , _recv(NULL) 1616 { 1617 assert(args != NULL, "args must exist"); 1618 ASSERT_VALUES 1619 set_flag(PreservesStateFlag, preserves_state); 1620 set_flag(CanTrapFlag, cantrap); 1621 if (has_receiver) { 1622 _recv = argument_at(0); 1623 } 1624 set_needs_null_check(has_receiver); 1625 1626 // some intrinsics can't trap, so don't force them to be pinned 1627 if (!can_trap() && !vmIntrinsics::should_be_pinned(_id)) { 1628 unpin(PinStateSplitConstructor); 1629 } 1630 } 1631 1632 // accessors 1633 vmIntrinsics::ID id() const { return _id; } 1634 int number_of_arguments() const { return _args->length(); } 1635 Value argument_at(int i) const { return _args->at(i); } 1636 1637 bool has_receiver() const { return (_recv != NULL); } 1638 Value receiver() const { assert(has_receiver(), "must have receiver"); return _recv; } 1639 bool preserves_state() const { return check_flag(PreservesStateFlag); } 1640 1641 bool arg_needs_null_check(int i) const { 1642 return _nonnull_state.arg_needs_null_check(i); 1643 } 1644 1645 void set_arg_needs_null_check(int i, bool check) { 1646 _nonnull_state.set_arg_needs_null_check(i, check); 1647 } 1648 1649 // generic 1650 virtual bool can_trap() const { return check_flag(CanTrapFlag); } 1651 virtual void input_values_do(ValueVisitor* f) { 1652 StateSplit::input_values_do(f); 1653 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 1654 } 1655 }; 1656 1657 1658 class LIR_List; 1659 1660 LEAF(BlockBegin, StateSplit) 1661 private: 1662 int _block_id; // the unique block id 1663 int _bci; // start-bci of block 1664 int _depth_first_number; // number of this block in a depth-first ordering 1665 int _linear_scan_number; // number of this block in linear-scan ordering 1666 int _dominator_depth; 1667 int _loop_depth; // the loop nesting level of this block 1668 int _loop_index; // number of the innermost loop of this block 1669 int _flags; // the flags associated with this block 1670 1671 // fields used by BlockListBuilder 1672 int _total_preds; // number of predecessors found by BlockListBuilder 1673 ResourceBitMap _stores_to_locals; // bit is set when a local variable is stored in the block 1674 1675 // SSA specific fields: (factor out later) 1676 BlockList _successors; // the successors of this block 1677 BlockList _predecessors; // the predecessors of this block 1678 BlockList _dominates; // list of blocks that are dominated by this block 1679 BlockBegin* _dominator; // the dominator of this block 1680 // SSA specific ends 1681 BlockEnd* _end; // the last instruction of this block 1682 BlockList _exception_handlers; // the exception handlers potentially invoked by this block 1683 ValueStackStack* _exception_states; // only for xhandler entries: states of all instructions that have an edge to this xhandler 1684 int _exception_handler_pco; // if this block is the start of an exception handler, 1685 // this records the PC offset in the assembly code of the 1686 // first instruction in this block 1687 Label _label; // the label associated with this block 1688 LIR_List* _lir; // the low level intermediate representation for this block 1689 1690 ResourceBitMap _live_in; // set of live LIR_Opr registers at entry to this block 1691 ResourceBitMap _live_out; // set of live LIR_Opr registers at exit from this block 1692 ResourceBitMap _live_gen; // set of registers used before any redefinition in this block 1693 ResourceBitMap _live_kill; // set of registers defined in this block 1694 1695 ResourceBitMap _fpu_register_usage; 1696 intArray* _fpu_stack_state; // For x86 FPU code generation with UseLinearScan 1697 int _first_lir_instruction_id; // ID of first LIR instruction in this block 1698 int _last_lir_instruction_id; // ID of last LIR instruction in this block 1699 1700 void iterate_preorder (boolArray& mark, BlockClosure* closure); 1701 void iterate_postorder(boolArray& mark, BlockClosure* closure); 1702 1703 friend class SuxAndWeightAdjuster; 1704 1705 public: 1706 void* operator new(size_t size) throw() { 1707 Compilation* c = Compilation::current(); 1708 void* res = c->arena()->Amalloc(size); 1709 ((BlockBegin*)res)->_id = c->get_next_id(); 1710 ((BlockBegin*)res)->_block_id = c->get_next_block_id(); 1711 return res; 1712 } 1713 1714 // initialization/counting 1715 static int number_of_blocks() { 1716 return Compilation::current()->number_of_blocks(); 1717 } 1718 1719 // creation 1720 BlockBegin(int bci) 1721 : StateSplit(illegalType) 1722 , _bci(bci) 1723 , _depth_first_number(-1) 1724 , _linear_scan_number(-1) 1725 , _dominator_depth(-1) 1726 , _loop_depth(0) 1727 , _loop_index(-1) 1728 , _flags(0) 1729 , _total_preds(0) 1730 , _stores_to_locals() 1731 , _successors(2) 1732 , _predecessors(2) 1733 , _dominates(2) 1734 , _dominator(NULL) 1735 , _end(NULL) 1736 , _exception_handlers(1) 1737 , _exception_states(NULL) 1738 , _exception_handler_pco(-1) 1739 , _lir(NULL) 1740 , _live_in() 1741 , _live_out() 1742 , _live_gen() 1743 , _live_kill() 1744 , _fpu_register_usage() 1745 , _fpu_stack_state(NULL) 1746 , _first_lir_instruction_id(-1) 1747 , _last_lir_instruction_id(-1) 1748 { 1749 _block = this; 1750 #ifndef PRODUCT 1751 set_printable_bci(bci); 1752 #endif 1753 } 1754 1755 // accessors 1756 int block_id() const { return _block_id; } 1757 int bci() const { return _bci; } 1758 BlockList* successors() { return &_successors; } 1759 BlockList* dominates() { return &_dominates; } 1760 BlockBegin* dominator() const { return _dominator; } 1761 int loop_depth() const { return _loop_depth; } 1762 int dominator_depth() const { return _dominator_depth; } 1763 int depth_first_number() const { return _depth_first_number; } 1764 int linear_scan_number() const { return _linear_scan_number; } 1765 BlockEnd* end() const { return _end; } 1766 Label* label() { return &_label; } 1767 LIR_List* lir() const { return _lir; } 1768 int exception_handler_pco() const { return _exception_handler_pco; } 1769 ResourceBitMap& live_in() { return _live_in; } 1770 ResourceBitMap& live_out() { return _live_out; } 1771 ResourceBitMap& live_gen() { return _live_gen; } 1772 ResourceBitMap& live_kill() { return _live_kill; } 1773 ResourceBitMap& fpu_register_usage() { return _fpu_register_usage; } 1774 intArray* fpu_stack_state() const { return _fpu_stack_state; } 1775 int first_lir_instruction_id() const { return _first_lir_instruction_id; } 1776 int last_lir_instruction_id() const { return _last_lir_instruction_id; } 1777 int total_preds() const { return _total_preds; } 1778 BitMap& stores_to_locals() { return _stores_to_locals; } 1779 1780 // manipulation 1781 void set_dominator(BlockBegin* dom) { _dominator = dom; } 1782 void set_loop_depth(int d) { _loop_depth = d; } 1783 void set_dominator_depth(int d) { _dominator_depth = d; } 1784 void set_depth_first_number(int dfn) { _depth_first_number = dfn; } 1785 void set_linear_scan_number(int lsn) { _linear_scan_number = lsn; } 1786 void set_end(BlockEnd* end); 1787 void clear_end(); 1788 void disconnect_from_graph(); 1789 static void disconnect_edge(BlockBegin* from, BlockBegin* to); 1790 BlockBegin* insert_block_between(BlockBegin* sux); 1791 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux); 1792 void set_lir(LIR_List* lir) { _lir = lir; } 1793 void set_exception_handler_pco(int pco) { _exception_handler_pco = pco; } 1794 void set_live_in (const ResourceBitMap& map) { _live_in = map; } 1795 void set_live_out (const ResourceBitMap& map) { _live_out = map; } 1796 void set_live_gen (const ResourceBitMap& map) { _live_gen = map; } 1797 void set_live_kill(const ResourceBitMap& map) { _live_kill = map; } 1798 void set_fpu_register_usage(const ResourceBitMap& map) { _fpu_register_usage = map; } 1799 void set_fpu_stack_state(intArray* state) { _fpu_stack_state = state; } 1800 void set_first_lir_instruction_id(int id) { _first_lir_instruction_id = id; } 1801 void set_last_lir_instruction_id(int id) { _last_lir_instruction_id = id; } 1802 void increment_total_preds(int n = 1) { _total_preds += n; } 1803 void init_stores_to_locals(int locals_count) { _stores_to_locals.initialize(locals_count); } 1804 1805 // generic 1806 virtual void state_values_do(ValueVisitor* f); 1807 1808 // successors and predecessors 1809 int number_of_sux() const; 1810 BlockBegin* sux_at(int i) const; 1811 void add_successor(BlockBegin* sux); 1812 void remove_successor(BlockBegin* pred); 1813 bool is_successor(BlockBegin* sux) const { return _successors.contains(sux); } 1814 1815 void add_predecessor(BlockBegin* pred); 1816 void remove_predecessor(BlockBegin* pred); 1817 bool is_predecessor(BlockBegin* pred) const { return _predecessors.contains(pred); } 1818 int number_of_preds() const { return _predecessors.length(); } 1819 BlockBegin* pred_at(int i) const { return _predecessors.at(i); } 1820 1821 // exception handlers potentially invoked by this block 1822 void add_exception_handler(BlockBegin* b); 1823 bool is_exception_handler(BlockBegin* b) const { return _exception_handlers.contains(b); } 1824 int number_of_exception_handlers() const { return _exception_handlers.length(); } 1825 BlockBegin* exception_handler_at(int i) const { return _exception_handlers.at(i); } 1826 1827 // states of the instructions that have an edge to this exception handler 1828 int number_of_exception_states() { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states == NULL ? 0 : _exception_states->length(); } 1829 ValueStack* exception_state_at(int idx) const { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states->at(idx); } 1830 int add_exception_state(ValueStack* state); 1831 1832 // flags 1833 enum Flag { 1834 no_flag = 0, 1835 std_entry_flag = 1 << 0, 1836 osr_entry_flag = 1 << 1, 1837 exception_entry_flag = 1 << 2, 1838 subroutine_entry_flag = 1 << 3, 1839 backward_branch_target_flag = 1 << 4, 1840 is_on_work_list_flag = 1 << 5, 1841 was_visited_flag = 1 << 6, 1842 parser_loop_header_flag = 1 << 7, // set by parser to identify blocks where phi functions can not be created on demand 1843 critical_edge_split_flag = 1 << 8, // set for all blocks that are introduced when critical edges are split 1844 linear_scan_loop_header_flag = 1 << 9, // set during loop-detection for LinearScan 1845 linear_scan_loop_end_flag = 1 << 10, // set during loop-detection for LinearScan 1846 donot_eliminate_range_checks = 1 << 11 // Should be try to eliminate range checks in this block 1847 }; 1848 1849 void set(Flag f) { _flags |= f; } 1850 void clear(Flag f) { _flags &= ~f; } 1851 bool is_set(Flag f) const { return (_flags & f) != 0; } 1852 bool is_entry_block() const { 1853 const int entry_mask = std_entry_flag | osr_entry_flag | exception_entry_flag; 1854 return (_flags & entry_mask) != 0; 1855 } 1856 1857 // iteration 1858 void iterate_preorder (BlockClosure* closure); 1859 void iterate_postorder (BlockClosure* closure); 1860 1861 void block_values_do(ValueVisitor* f); 1862 1863 // loops 1864 void set_loop_index(int ix) { _loop_index = ix; } 1865 int loop_index() const { return _loop_index; } 1866 1867 // merging 1868 bool try_merge(ValueStack* state); // try to merge states at block begin 1869 void merge(ValueStack* state) { bool b = try_merge(state); assert(b, "merge failed"); } 1870 1871 // debugging 1872 void print_block() PRODUCT_RETURN; 1873 void print_block(InstructionPrinter& ip, bool live_only = false) PRODUCT_RETURN; 1874 }; 1875 1876 1877 BASE(BlockEnd, StateSplit) 1878 private: 1879 BlockList* _sux; 1880 1881 protected: 1882 BlockList* sux() const { return _sux; } 1883 1884 void set_sux(BlockList* sux) { 1885 #ifdef ASSERT 1886 assert(sux != NULL, "sux must exist"); 1887 for (int i = sux->length() - 1; i >= 0; i--) assert(sux->at(i) != NULL, "sux must exist"); 1888 #endif 1889 _sux = sux; 1890 } 1891 1892 public: 1893 // creation 1894 BlockEnd(ValueType* type, ValueStack* state_before, bool is_safepoint) 1895 : StateSplit(type, state_before) 1896 , _sux(NULL) 1897 { 1898 set_flag(IsSafepointFlag, is_safepoint); 1899 } 1900 1901 // accessors 1902 bool is_safepoint() const { return check_flag(IsSafepointFlag); } 1903 // For compatibility with old code, for new code use block() 1904 BlockBegin* begin() const { return _block; } 1905 1906 // manipulation 1907 void set_begin(BlockBegin* begin); 1908 1909 // successors 1910 int number_of_sux() const { return _sux != NULL ? _sux->length() : 0; } 1911 BlockBegin* sux_at(int i) const { return _sux->at(i); } 1912 BlockBegin* default_sux() const { return sux_at(number_of_sux() - 1); } 1913 BlockBegin** addr_sux_at(int i) const { return _sux->adr_at(i); } 1914 int sux_index(BlockBegin* sux) const { return _sux->find(sux); } 1915 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux); 1916 }; 1917 1918 1919 LEAF(Goto, BlockEnd) 1920 public: 1921 enum Direction { 1922 none, // Just a regular goto 1923 taken, not_taken // Goto produced from If 1924 }; 1925 private: 1926 ciMethod* _profiled_method; 1927 int _profiled_bci; 1928 Direction _direction; 1929 public: 1930 // creation 1931 Goto(BlockBegin* sux, ValueStack* state_before, bool is_safepoint = false) 1932 : BlockEnd(illegalType, state_before, is_safepoint) 1933 , _profiled_method(NULL) 1934 , _profiled_bci(0) 1935 , _direction(none) { 1936 BlockList* s = new BlockList(1); 1937 s->append(sux); 1938 set_sux(s); 1939 } 1940 1941 Goto(BlockBegin* sux, bool is_safepoint) : BlockEnd(illegalType, NULL, is_safepoint) 1942 , _profiled_method(NULL) 1943 , _profiled_bci(0) 1944 , _direction(none) { 1945 BlockList* s = new BlockList(1); 1946 s->append(sux); 1947 set_sux(s); 1948 } 1949 1950 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1951 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches 1952 int profiled_bci() const { return _profiled_bci; } 1953 Direction direction() const { return _direction; } 1954 1955 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1956 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1957 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1958 void set_direction(Direction d) { _direction = d; } 1959 }; 1960 1961 #ifdef ASSERT 1962 LEAF(Assert, Instruction) 1963 private: 1964 Value _x; 1965 Condition _cond; 1966 Value _y; 1967 char *_message; 1968 1969 public: 1970 // creation 1971 // unordered_is_true is valid for float/double compares only 1972 Assert(Value x, Condition cond, bool unordered_is_true, Value y); 1973 1974 // accessors 1975 Value x() const { return _x; } 1976 Condition cond() const { return _cond; } 1977 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 1978 Value y() const { return _y; } 1979 const char *message() const { return _message; } 1980 1981 // generic 1982 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); } 1983 }; 1984 #endif 1985 1986 LEAF(RangeCheckPredicate, StateSplit) 1987 private: 1988 Value _x; 1989 Condition _cond; 1990 Value _y; 1991 1992 void check_state(); 1993 1994 public: 1995 // creation 1996 // unordered_is_true is valid for float/double compares only 1997 RangeCheckPredicate(Value x, Condition cond, bool unordered_is_true, Value y, ValueStack* state) : StateSplit(illegalType) 1998 , _x(x) 1999 , _cond(cond) 2000 , _y(y) 2001 { 2002 ASSERT_VALUES 2003 set_flag(UnorderedIsTrueFlag, unordered_is_true); 2004 assert(x->type()->tag() == y->type()->tag(), "types must match"); 2005 this->set_state(state); 2006 check_state(); 2007 } 2008 2009 // Always deoptimize 2010 RangeCheckPredicate(ValueStack* state) : StateSplit(illegalType) 2011 { 2012 this->set_state(state); 2013 _x = _y = NULL; 2014 check_state(); 2015 } 2016 2017 // accessors 2018 Value x() const { return _x; } 2019 Condition cond() const { return _cond; } 2020 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 2021 Value y() const { return _y; } 2022 2023 void always_fail() { _x = _y = NULL; } 2024 2025 // generic 2026 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_x); f->visit(&_y); } 2027 HASHING3(RangeCheckPredicate, true, x()->subst(), y()->subst(), cond()) 2028 }; 2029 2030 LEAF(If, BlockEnd) 2031 private: 2032 Value _x; 2033 Condition _cond; 2034 Value _y; 2035 ciMethod* _profiled_method; 2036 int _profiled_bci; // Canonicalizer may alter bci of If node 2037 bool _swapped; // Is the order reversed with respect to the original If in the 2038 // bytecode stream? 2039 public: 2040 // creation 2041 // unordered_is_true is valid for float/double compares only 2042 If(Value x, Condition cond, bool unordered_is_true, Value y, BlockBegin* tsux, BlockBegin* fsux, ValueStack* state_before, bool is_safepoint) 2043 : BlockEnd(illegalType, state_before, is_safepoint) 2044 , _x(x) 2045 , _cond(cond) 2046 , _y(y) 2047 , _profiled_method(NULL) 2048 , _profiled_bci(0) 2049 , _swapped(false) 2050 { 2051 ASSERT_VALUES 2052 set_flag(UnorderedIsTrueFlag, unordered_is_true); 2053 assert(x->type()->tag() == y->type()->tag(), "types must match"); 2054 BlockList* s = new BlockList(2); 2055 s->append(tsux); 2056 s->append(fsux); 2057 set_sux(s); 2058 } 2059 2060 // accessors 2061 Value x() const { return _x; } 2062 Condition cond() const { return _cond; } 2063 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 2064 Value y() const { return _y; } 2065 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); } 2066 BlockBegin* tsux() const { return sux_for(true); } 2067 BlockBegin* fsux() const { return sux_for(false); } 2068 BlockBegin* usux() const { return sux_for(unordered_is_true()); } 2069 bool should_profile() const { return check_flag(ProfileMDOFlag); } 2070 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches 2071 int profiled_bci() const { return _profiled_bci; } // set for profiled branches and tiered 2072 bool is_swapped() const { return _swapped; } 2073 2074 // manipulation 2075 void swap_operands() { 2076 Value t = _x; _x = _y; _y = t; 2077 _cond = mirror(_cond); 2078 } 2079 2080 void swap_sux() { 2081 assert(number_of_sux() == 2, "wrong number of successors"); 2082 BlockList* s = sux(); 2083 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t); 2084 _cond = negate(_cond); 2085 set_flag(UnorderedIsTrueFlag, !check_flag(UnorderedIsTrueFlag)); 2086 } 2087 2088 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 2089 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 2090 void set_profiled_bci(int bci) { _profiled_bci = bci; } 2091 void set_swapped(bool value) { _swapped = value; } 2092 // generic 2093 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_x); f->visit(&_y); } 2094 }; 2095 2096 2097 LEAF(IfInstanceOf, BlockEnd) 2098 private: 2099 ciKlass* _klass; 2100 Value _obj; 2101 bool _test_is_instance; // jump if instance 2102 int _instanceof_bci; 2103 2104 public: 2105 IfInstanceOf(ciKlass* klass, Value obj, bool test_is_instance, int instanceof_bci, BlockBegin* tsux, BlockBegin* fsux) 2106 : BlockEnd(illegalType, NULL, false) // temporary set to false 2107 , _klass(klass) 2108 , _obj(obj) 2109 , _test_is_instance(test_is_instance) 2110 , _instanceof_bci(instanceof_bci) 2111 { 2112 ASSERT_VALUES 2113 assert(instanceof_bci >= 0, "illegal bci"); 2114 BlockList* s = new BlockList(2); 2115 s->append(tsux); 2116 s->append(fsux); 2117 set_sux(s); 2118 } 2119 2120 // accessors 2121 // 2122 // Note 1: If test_is_instance() is true, IfInstanceOf tests if obj *is* an 2123 // instance of klass; otherwise it tests if it is *not* and instance 2124 // of klass. 2125 // 2126 // Note 2: IfInstanceOf instructions are created by combining an InstanceOf 2127 // and an If instruction. The IfInstanceOf bci() corresponds to the 2128 // bci that the If would have had; the (this->) instanceof_bci() is 2129 // the bci of the original InstanceOf instruction. 2130 ciKlass* klass() const { return _klass; } 2131 Value obj() const { return _obj; } 2132 int instanceof_bci() const { return _instanceof_bci; } 2133 bool test_is_instance() const { return _test_is_instance; } 2134 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); } 2135 BlockBegin* tsux() const { return sux_for(true); } 2136 BlockBegin* fsux() const { return sux_for(false); } 2137 2138 // manipulation 2139 void swap_sux() { 2140 assert(number_of_sux() == 2, "wrong number of successors"); 2141 BlockList* s = sux(); 2142 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t); 2143 _test_is_instance = !_test_is_instance; 2144 } 2145 2146 // generic 2147 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_obj); } 2148 }; 2149 2150 2151 BASE(Switch, BlockEnd) 2152 private: 2153 Value _tag; 2154 2155 public: 2156 // creation 2157 Switch(Value tag, BlockList* sux, ValueStack* state_before, bool is_safepoint) 2158 : BlockEnd(illegalType, state_before, is_safepoint) 2159 , _tag(tag) { 2160 ASSERT_VALUES 2161 set_sux(sux); 2162 } 2163 2164 // accessors 2165 Value tag() const { return _tag; } 2166 int length() const { return number_of_sux() - 1; } 2167 2168 virtual bool needs_exception_state() const { return false; } 2169 2170 // generic 2171 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_tag); } 2172 }; 2173 2174 2175 LEAF(TableSwitch, Switch) 2176 private: 2177 int _lo_key; 2178 2179 public: 2180 // creation 2181 TableSwitch(Value tag, BlockList* sux, int lo_key, ValueStack* state_before, bool is_safepoint) 2182 : Switch(tag, sux, state_before, is_safepoint) 2183 , _lo_key(lo_key) { assert(_lo_key <= hi_key(), "integer overflow"); } 2184 2185 // accessors 2186 int lo_key() const { return _lo_key; } 2187 int hi_key() const { return _lo_key + (length() - 1); } 2188 }; 2189 2190 2191 LEAF(LookupSwitch, Switch) 2192 private: 2193 intArray* _keys; 2194 2195 public: 2196 // creation 2197 LookupSwitch(Value tag, BlockList* sux, intArray* keys, ValueStack* state_before, bool is_safepoint) 2198 : Switch(tag, sux, state_before, is_safepoint) 2199 , _keys(keys) { 2200 assert(keys != NULL, "keys must exist"); 2201 assert(keys->length() == length(), "sux & keys have incompatible lengths"); 2202 } 2203 2204 // accessors 2205 int key_at(int i) const { return _keys->at(i); } 2206 }; 2207 2208 2209 LEAF(Return, BlockEnd) 2210 private: 2211 Value _result; 2212 2213 public: 2214 // creation 2215 Return(Value result) : 2216 BlockEnd(result == NULL ? voidType : result->type()->base(), NULL, true), 2217 _result(result) {} 2218 2219 // accessors 2220 Value result() const { return _result; } 2221 bool has_result() const { return result() != NULL; } 2222 2223 // generic 2224 virtual void input_values_do(ValueVisitor* f) { 2225 BlockEnd::input_values_do(f); 2226 if (has_result()) f->visit(&_result); 2227 } 2228 }; 2229 2230 2231 LEAF(Throw, BlockEnd) 2232 private: 2233 Value _exception; 2234 2235 public: 2236 // creation 2237 Throw(Value exception, ValueStack* state_before) : BlockEnd(illegalType, state_before, true), _exception(exception) { 2238 ASSERT_VALUES 2239 } 2240 2241 // accessors 2242 Value exception() const { return _exception; } 2243 2244 // generic 2245 virtual bool can_trap() const { return true; } 2246 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_exception); } 2247 }; 2248 2249 2250 LEAF(Base, BlockEnd) 2251 public: 2252 // creation 2253 Base(BlockBegin* std_entry, BlockBegin* osr_entry) : BlockEnd(illegalType, NULL, false) { 2254 assert(std_entry->is_set(BlockBegin::std_entry_flag), "std entry must be flagged"); 2255 assert(osr_entry == NULL || osr_entry->is_set(BlockBegin::osr_entry_flag), "osr entry must be flagged"); 2256 BlockList* s = new BlockList(2); 2257 if (osr_entry != NULL) s->append(osr_entry); 2258 s->append(std_entry); // must be default sux! 2259 set_sux(s); 2260 } 2261 2262 // accessors 2263 BlockBegin* std_entry() const { return default_sux(); } 2264 BlockBegin* osr_entry() const { return number_of_sux() < 2 ? NULL : sux_at(0); } 2265 }; 2266 2267 2268 LEAF(OsrEntry, Instruction) 2269 public: 2270 // creation 2271 #ifdef _LP64 2272 OsrEntry() : Instruction(longType) { pin(); } 2273 #else 2274 OsrEntry() : Instruction(intType) { pin(); } 2275 #endif 2276 2277 // generic 2278 virtual void input_values_do(ValueVisitor* f) { } 2279 }; 2280 2281 2282 // Models the incoming exception at a catch site 2283 LEAF(ExceptionObject, Instruction) 2284 public: 2285 // creation 2286 ExceptionObject() : Instruction(objectType) { 2287 pin(); 2288 } 2289 2290 // generic 2291 virtual void input_values_do(ValueVisitor* f) { } 2292 }; 2293 2294 2295 // Models needed rounding for floating-point values on Intel. 2296 // Currently only used to represent rounding of double-precision 2297 // values stored into local variables, but could be used to model 2298 // intermediate rounding of single-precision values as well. 2299 LEAF(RoundFP, Instruction) 2300 private: 2301 Value _input; // floating-point value to be rounded 2302 2303 public: 2304 RoundFP(Value input) 2305 : Instruction(input->type()) // Note: should not be used for constants 2306 , _input(input) 2307 { 2308 ASSERT_VALUES 2309 } 2310 2311 // accessors 2312 Value input() const { return _input; } 2313 2314 // generic 2315 virtual void input_values_do(ValueVisitor* f) { f->visit(&_input); } 2316 }; 2317 2318 2319 BASE(UnsafeOp, Instruction) 2320 private: 2321 BasicType _basic_type; // ValueType can not express byte-sized integers 2322 2323 protected: 2324 // creation 2325 UnsafeOp(BasicType basic_type, bool is_put) 2326 : Instruction(is_put ? voidType : as_ValueType(basic_type)) 2327 , _basic_type(basic_type) 2328 { 2329 //Note: Unsafe ops are not not guaranteed to throw NPE. 2330 // Convservatively, Unsafe operations must be pinned though we could be 2331 // looser about this if we wanted to.. 2332 pin(); 2333 } 2334 2335 public: 2336 // accessors 2337 BasicType basic_type() { return _basic_type; } 2338 2339 // generic 2340 virtual void input_values_do(ValueVisitor* f) { } 2341 }; 2342 2343 2344 BASE(UnsafeRawOp, UnsafeOp) 2345 private: 2346 Value _base; // Base address (a Java long) 2347 Value _index; // Index if computed by optimizer; initialized to NULL 2348 int _log2_scale; // Scale factor: 0, 1, 2, or 3. 2349 // Indicates log2 of number of bytes (1, 2, 4, or 8) 2350 // to scale index by. 2351 2352 protected: 2353 UnsafeRawOp(BasicType basic_type, Value addr, bool is_put) 2354 : UnsafeOp(basic_type, is_put) 2355 , _base(addr) 2356 , _index(NULL) 2357 , _log2_scale(0) 2358 { 2359 // Can not use ASSERT_VALUES because index may be NULL 2360 assert(addr != NULL && addr->type()->is_long(), "just checking"); 2361 } 2362 2363 UnsafeRawOp(BasicType basic_type, Value base, Value index, int log2_scale, bool is_put) 2364 : UnsafeOp(basic_type, is_put) 2365 , _base(base) 2366 , _index(index) 2367 , _log2_scale(log2_scale) 2368 { 2369 } 2370 2371 public: 2372 // accessors 2373 Value base() { return _base; } 2374 Value index() { return _index; } 2375 bool has_index() { return (_index != NULL); } 2376 int log2_scale() { return _log2_scale; } 2377 2378 // setters 2379 void set_base (Value base) { _base = base; } 2380 void set_index(Value index) { _index = index; } 2381 void set_log2_scale(int log2_scale) { _log2_scale = log2_scale; } 2382 2383 // generic 2384 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f); 2385 f->visit(&_base); 2386 if (has_index()) f->visit(&_index); } 2387 }; 2388 2389 2390 LEAF(UnsafeGetRaw, UnsafeRawOp) 2391 private: 2392 bool _may_be_unaligned, _is_wide; // For OSREntry 2393 2394 public: 2395 UnsafeGetRaw(BasicType basic_type, Value addr, bool may_be_unaligned, bool is_wide = false) 2396 : UnsafeRawOp(basic_type, addr, false) { 2397 _may_be_unaligned = may_be_unaligned; 2398 _is_wide = is_wide; 2399 } 2400 2401 UnsafeGetRaw(BasicType basic_type, Value base, Value index, int log2_scale, bool may_be_unaligned, bool is_wide = false) 2402 : UnsafeRawOp(basic_type, base, index, log2_scale, false) { 2403 _may_be_unaligned = may_be_unaligned; 2404 _is_wide = is_wide; 2405 } 2406 2407 bool may_be_unaligned() { return _may_be_unaligned; } 2408 bool is_wide() { return _is_wide; } 2409 }; 2410 2411 2412 LEAF(UnsafePutRaw, UnsafeRawOp) 2413 private: 2414 Value _value; // Value to be stored 2415 2416 public: 2417 UnsafePutRaw(BasicType basic_type, Value addr, Value value) 2418 : UnsafeRawOp(basic_type, addr, true) 2419 , _value(value) 2420 { 2421 assert(value != NULL, "just checking"); 2422 ASSERT_VALUES 2423 } 2424 2425 UnsafePutRaw(BasicType basic_type, Value base, Value index, int log2_scale, Value value) 2426 : UnsafeRawOp(basic_type, base, index, log2_scale, true) 2427 , _value(value) 2428 { 2429 assert(value != NULL, "just checking"); 2430 ASSERT_VALUES 2431 } 2432 2433 // accessors 2434 Value value() { return _value; } 2435 2436 // generic 2437 virtual void input_values_do(ValueVisitor* f) { UnsafeRawOp::input_values_do(f); 2438 f->visit(&_value); } 2439 }; 2440 2441 2442 BASE(UnsafeObjectOp, UnsafeOp) 2443 private: 2444 Value _object; // Object to be fetched from or mutated 2445 Value _offset; // Offset within object 2446 bool _is_volatile; // true if volatile - dl/JSR166 2447 public: 2448 UnsafeObjectOp(BasicType basic_type, Value object, Value offset, bool is_put, bool is_volatile) 2449 : UnsafeOp(basic_type, is_put), _object(object), _offset(offset), _is_volatile(is_volatile) 2450 { 2451 } 2452 2453 // accessors 2454 Value object() { return _object; } 2455 Value offset() { return _offset; } 2456 bool is_volatile() { return _is_volatile; } 2457 // generic 2458 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f); 2459 f->visit(&_object); 2460 f->visit(&_offset); } 2461 }; 2462 2463 2464 LEAF(UnsafeGetObject, UnsafeObjectOp) 2465 public: 2466 UnsafeGetObject(BasicType basic_type, Value object, Value offset, bool is_volatile) 2467 : UnsafeObjectOp(basic_type, object, offset, false, is_volatile) 2468 { 2469 ASSERT_VALUES 2470 } 2471 }; 2472 2473 2474 LEAF(UnsafePutObject, UnsafeObjectOp) 2475 private: 2476 Value _value; // Value to be stored 2477 public: 2478 UnsafePutObject(BasicType basic_type, Value object, Value offset, Value value, bool is_volatile) 2479 : UnsafeObjectOp(basic_type, object, offset, true, is_volatile) 2480 , _value(value) 2481 { 2482 ASSERT_VALUES 2483 } 2484 2485 // accessors 2486 Value value() { return _value; } 2487 2488 // generic 2489 virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f); 2490 f->visit(&_value); } 2491 }; 2492 2493 LEAF(UnsafeGetAndSetObject, UnsafeObjectOp) 2494 private: 2495 Value _value; // Value to be stored 2496 bool _is_add; 2497 public: 2498 UnsafeGetAndSetObject(BasicType basic_type, Value object, Value offset, Value value, bool is_add) 2499 : UnsafeObjectOp(basic_type, object, offset, false, false) 2500 , _value(value) 2501 , _is_add(is_add) 2502 { 2503 ASSERT_VALUES 2504 } 2505 2506 // accessors 2507 bool is_add() const { return _is_add; } 2508 Value value() { return _value; } 2509 2510 // generic 2511 virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f); 2512 f->visit(&_value); } 2513 }; 2514 2515 LEAF(ProfileCall, Instruction) 2516 private: 2517 ciMethod* _method; 2518 int _bci_of_invoke; 2519 ciMethod* _callee; // the method that is called at the given bci 2520 Value _recv; 2521 ciKlass* _known_holder; 2522 Values* _obj_args; // arguments for type profiling 2523 ArgsNonNullState _nonnull_state; // Do we know whether some arguments are never null? 2524 bool _inlined; // Are we profiling a call that is inlined 2525 2526 public: 2527 ProfileCall(ciMethod* method, int bci, ciMethod* callee, Value recv, ciKlass* known_holder, Values* obj_args, bool inlined) 2528 : Instruction(voidType) 2529 , _method(method) 2530 , _bci_of_invoke(bci) 2531 , _callee(callee) 2532 , _recv(recv) 2533 , _known_holder(known_holder) 2534 , _obj_args(obj_args) 2535 , _inlined(inlined) 2536 { 2537 // The ProfileCall has side-effects and must occur precisely where located 2538 pin(); 2539 } 2540 2541 ciMethod* method() const { return _method; } 2542 int bci_of_invoke() const { return _bci_of_invoke; } 2543 ciMethod* callee() const { return _callee; } 2544 Value recv() const { return _recv; } 2545 ciKlass* known_holder() const { return _known_holder; } 2546 int nb_profiled_args() const { return _obj_args == NULL ? 0 : _obj_args->length(); } 2547 Value profiled_arg_at(int i) const { return _obj_args->at(i); } 2548 bool arg_needs_null_check(int i) const { 2549 return _nonnull_state.arg_needs_null_check(i); 2550 } 2551 bool inlined() const { return _inlined; } 2552 2553 void set_arg_needs_null_check(int i, bool check) { 2554 _nonnull_state.set_arg_needs_null_check(i, check); 2555 } 2556 2557 virtual void input_values_do(ValueVisitor* f) { 2558 if (_recv != NULL) { 2559 f->visit(&_recv); 2560 } 2561 for (int i = 0; i < nb_profiled_args(); i++) { 2562 f->visit(_obj_args->adr_at(i)); 2563 } 2564 } 2565 }; 2566 2567 LEAF(ProfileReturnType, Instruction) 2568 private: 2569 ciMethod* _method; 2570 ciMethod* _callee; 2571 int _bci_of_invoke; 2572 Value _ret; 2573 2574 public: 2575 ProfileReturnType(ciMethod* method, int bci, ciMethod* callee, Value ret) 2576 : Instruction(voidType) 2577 , _method(method) 2578 , _callee(callee) 2579 , _bci_of_invoke(bci) 2580 , _ret(ret) 2581 { 2582 set_needs_null_check(true); 2583 // The ProfileType has side-effects and must occur precisely where located 2584 pin(); 2585 } 2586 2587 ciMethod* method() const { return _method; } 2588 ciMethod* callee() const { return _callee; } 2589 int bci_of_invoke() const { return _bci_of_invoke; } 2590 Value ret() const { return _ret; } 2591 2592 virtual void input_values_do(ValueVisitor* f) { 2593 if (_ret != NULL) { 2594 f->visit(&_ret); 2595 } 2596 } 2597 }; 2598 2599 // Call some C runtime function that doesn't safepoint, 2600 // optionally passing the current thread as the first argument. 2601 LEAF(RuntimeCall, Instruction) 2602 private: 2603 const char* _entry_name; 2604 address _entry; 2605 Values* _args; 2606 bool _pass_thread; // Pass the JavaThread* as an implicit first argument 2607 2608 public: 2609 RuntimeCall(ValueType* type, const char* entry_name, address entry, Values* args, bool pass_thread = true) 2610 : Instruction(type) 2611 , _entry_name(entry_name) 2612 , _entry(entry) 2613 , _args(args) 2614 , _pass_thread(pass_thread) { 2615 ASSERT_VALUES 2616 pin(); 2617 } 2618 2619 const char* entry_name() const { return _entry_name; } 2620 address entry() const { return _entry; } 2621 int number_of_arguments() const { return _args->length(); } 2622 Value argument_at(int i) const { return _args->at(i); } 2623 bool pass_thread() const { return _pass_thread; } 2624 2625 virtual void input_values_do(ValueVisitor* f) { 2626 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 2627 } 2628 }; 2629 2630 // Use to trip invocation counter of an inlined method 2631 2632 LEAF(ProfileInvoke, Instruction) 2633 private: 2634 ciMethod* _inlinee; 2635 ValueStack* _state; 2636 2637 public: 2638 ProfileInvoke(ciMethod* inlinee, ValueStack* state) 2639 : Instruction(voidType) 2640 , _inlinee(inlinee) 2641 , _state(state) 2642 { 2643 // The ProfileInvoke has side-effects and must occur precisely where located QQQ??? 2644 pin(); 2645 } 2646 2647 ciMethod* inlinee() { return _inlinee; } 2648 ValueStack* state() { return _state; } 2649 virtual void input_values_do(ValueVisitor*) {} 2650 virtual void state_values_do(ValueVisitor*); 2651 }; 2652 2653 LEAF(MemBar, Instruction) 2654 private: 2655 LIR_Code _code; 2656 2657 public: 2658 MemBar(LIR_Code code) 2659 : Instruction(voidType) 2660 , _code(code) 2661 { 2662 pin(); 2663 } 2664 2665 LIR_Code code() { return _code; } 2666 2667 virtual void input_values_do(ValueVisitor*) {} 2668 }; 2669 2670 class BlockPair: public CompilationResourceObj { 2671 private: 2672 BlockBegin* _from; 2673 BlockBegin* _to; 2674 public: 2675 BlockPair(BlockBegin* from, BlockBegin* to): _from(from), _to(to) {} 2676 BlockBegin* from() const { return _from; } 2677 BlockBegin* to() const { return _to; } 2678 bool is_same(BlockBegin* from, BlockBegin* to) const { return _from == from && _to == to; } 2679 bool is_same(BlockPair* p) const { return _from == p->from() && _to == p->to(); } 2680 void set_to(BlockBegin* b) { _to = b; } 2681 void set_from(BlockBegin* b) { _from = b; } 2682 }; 2683 2684 typedef GrowableArray<BlockPair*> BlockPairList; 2685 2686 inline int BlockBegin::number_of_sux() const { assert(_end == NULL || _end->number_of_sux() == _successors.length(), "mismatch"); return _successors.length(); } 2687 inline BlockBegin* BlockBegin::sux_at(int i) const { assert(_end == NULL || _end->sux_at(i) == _successors.at(i), "mismatch"); return _successors.at(i); } 2688 inline void BlockBegin::add_successor(BlockBegin* sux) { assert(_end == NULL, "Would create mismatch with successors of BlockEnd"); _successors.append(sux); } 2689 2690 #undef ASSERT_VALUES 2691 2692 #endif // SHARE_VM_C1_C1_INSTRUCTION_HPP