1 /* 2 * Copyright (c) 1999, 2016, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_VM_C1_C1_INSTRUCTION_HPP 26 #define SHARE_VM_C1_C1_INSTRUCTION_HPP 27 28 #include "c1/c1_Compilation.hpp" 29 #include "c1/c1_LIR.hpp" 30 #include "c1/c1_ValueType.hpp" 31 #include "ci/ciField.hpp" 32 33 // Predefined classes 34 class ciField; 35 class ValueStack; 36 class InstructionPrinter; 37 class IRScope; 38 class LIR_OprDesc; 39 typedef LIR_OprDesc* LIR_Opr; 40 41 42 // Instruction class hierarchy 43 // 44 // All leaf classes in the class hierarchy are concrete classes 45 // (i.e., are instantiated). All other classes are abstract and 46 // serve factoring. 47 48 class Instruction; 49 class Phi; 50 class Local; 51 class Constant; 52 class AccessField; 53 class LoadField; 54 class StoreField; 55 class AccessArray; 56 class ArrayLength; 57 class AccessIndexed; 58 class LoadIndexed; 59 class StoreIndexed; 60 class NegateOp; 61 class Op2; 62 class ArithmeticOp; 63 class ShiftOp; 64 class LogicOp; 65 class CompareOp; 66 class IfOp; 67 class Convert; 68 class NullCheck; 69 class TypeCast; 70 class OsrEntry; 71 class ExceptionObject; 72 class StateSplit; 73 class Invoke; 74 class NewInstance; 75 class NewValueTypeInstance; 76 class NewArray; 77 class NewTypeArray; 78 class NewObjectArray; 79 class NewMultiArray; 80 class TypeCheck; 81 class CheckCast; 82 class InstanceOf; 83 class AccessMonitor; 84 class MonitorEnter; 85 class MonitorExit; 86 class Intrinsic; 87 class BlockBegin; 88 class BlockEnd; 89 class Goto; 90 class If; 91 class IfInstanceOf; 92 class Switch; 93 class TableSwitch; 94 class LookupSwitch; 95 class Return; 96 class Throw; 97 class Base; 98 class RoundFP; 99 class UnsafeOp; 100 class UnsafeRawOp; 101 class UnsafeGetRaw; 102 class UnsafePutRaw; 103 class UnsafeObjectOp; 104 class UnsafeGetObject; 105 class UnsafePutObject; 106 class UnsafeGetAndSetObject; 107 class ProfileCall; 108 class ProfileReturnType; 109 class ProfileInvoke; 110 class RuntimeCall; 111 class MemBar; 112 class RangeCheckPredicate; 113 #ifdef ASSERT 114 class Assert; 115 #endif 116 117 // A Value is a reference to the instruction creating the value 118 typedef Instruction* Value; 119 typedef GrowableArray<Value> Values; 120 typedef GrowableArray<ValueStack*> ValueStackStack; 121 122 // BlockClosure is the base class for block traversal/iteration. 123 124 class BlockClosure: public CompilationResourceObj { 125 public: 126 virtual void block_do(BlockBegin* block) = 0; 127 }; 128 129 130 // A simple closure class for visiting the values of an Instruction 131 class ValueVisitor: public StackObj { 132 public: 133 virtual void visit(Value* v) = 0; 134 }; 135 136 137 // Some array and list classes 138 typedef GrowableArray<BlockBegin*> BlockBeginArray; 139 140 class BlockList: public GrowableArray<BlockBegin*> { 141 public: 142 BlockList(): GrowableArray<BlockBegin*>() {} 143 BlockList(const int size): GrowableArray<BlockBegin*>(size) {} 144 BlockList(const int size, BlockBegin* init): GrowableArray<BlockBegin*>(size, size, init) {} 145 146 void iterate_forward(BlockClosure* closure); 147 void iterate_backward(BlockClosure* closure); 148 void blocks_do(void f(BlockBegin*)); 149 void values_do(ValueVisitor* f); 150 void print(bool cfg_only = false, bool live_only = false) PRODUCT_RETURN; 151 }; 152 153 154 // InstructionVisitors provide type-based dispatch for instructions. 155 // For each concrete Instruction class X, a virtual function do_X is 156 // provided. Functionality that needs to be implemented for all classes 157 // (e.g., printing, code generation) is factored out into a specialised 158 // visitor instead of added to the Instruction classes itself. 159 160 class InstructionVisitor: public StackObj { 161 public: 162 virtual void do_Phi (Phi* x) = 0; 163 virtual void do_Local (Local* x) = 0; 164 virtual void do_Constant (Constant* x) = 0; 165 virtual void do_LoadField (LoadField* x) = 0; 166 virtual void do_StoreField (StoreField* x) = 0; 167 virtual void do_ArrayLength (ArrayLength* x) = 0; 168 virtual void do_LoadIndexed (LoadIndexed* x) = 0; 169 virtual void do_StoreIndexed (StoreIndexed* x) = 0; 170 virtual void do_NegateOp (NegateOp* x) = 0; 171 virtual void do_ArithmeticOp (ArithmeticOp* x) = 0; 172 virtual void do_ShiftOp (ShiftOp* x) = 0; 173 virtual void do_LogicOp (LogicOp* x) = 0; 174 virtual void do_CompareOp (CompareOp* x) = 0; 175 virtual void do_IfOp (IfOp* x) = 0; 176 virtual void do_Convert (Convert* x) = 0; 177 virtual void do_NullCheck (NullCheck* x) = 0; 178 virtual void do_TypeCast (TypeCast* x) = 0; 179 virtual void do_Invoke (Invoke* x) = 0; 180 virtual void do_NewInstance (NewInstance* x) = 0; 181 virtual void do_NewValueTypeInstance(NewValueTypeInstance* x) = 0; 182 virtual void do_NewTypeArray (NewTypeArray* x) = 0; 183 virtual void do_NewObjectArray (NewObjectArray* x) = 0; 184 virtual void do_NewMultiArray (NewMultiArray* x) = 0; 185 virtual void do_CheckCast (CheckCast* x) = 0; 186 virtual void do_InstanceOf (InstanceOf* x) = 0; 187 virtual void do_MonitorEnter (MonitorEnter* x) = 0; 188 virtual void do_MonitorExit (MonitorExit* x) = 0; 189 virtual void do_Intrinsic (Intrinsic* x) = 0; 190 virtual void do_BlockBegin (BlockBegin* x) = 0; 191 virtual void do_Goto (Goto* x) = 0; 192 virtual void do_If (If* x) = 0; 193 virtual void do_IfInstanceOf (IfInstanceOf* x) = 0; 194 virtual void do_TableSwitch (TableSwitch* x) = 0; 195 virtual void do_LookupSwitch (LookupSwitch* x) = 0; 196 virtual void do_Return (Return* x) = 0; 197 virtual void do_Throw (Throw* x) = 0; 198 virtual void do_Base (Base* x) = 0; 199 virtual void do_OsrEntry (OsrEntry* x) = 0; 200 virtual void do_ExceptionObject(ExceptionObject* x) = 0; 201 virtual void do_RoundFP (RoundFP* x) = 0; 202 virtual void do_UnsafeGetRaw (UnsafeGetRaw* x) = 0; 203 virtual void do_UnsafePutRaw (UnsafePutRaw* x) = 0; 204 virtual void do_UnsafeGetObject(UnsafeGetObject* x) = 0; 205 virtual void do_UnsafePutObject(UnsafePutObject* x) = 0; 206 virtual void do_UnsafeGetAndSetObject(UnsafeGetAndSetObject* x) = 0; 207 virtual void do_ProfileCall (ProfileCall* x) = 0; 208 virtual void do_ProfileReturnType (ProfileReturnType* x) = 0; 209 virtual void do_ProfileInvoke (ProfileInvoke* x) = 0; 210 virtual void do_RuntimeCall (RuntimeCall* x) = 0; 211 virtual void do_MemBar (MemBar* x) = 0; 212 virtual void do_RangeCheckPredicate(RangeCheckPredicate* x) = 0; 213 #ifdef ASSERT 214 virtual void do_Assert (Assert* x) = 0; 215 #endif 216 }; 217 218 219 // Hashing support 220 // 221 // Note: This hash functions affect the performance 222 // of ValueMap - make changes carefully! 223 224 #define HASH1(x1 ) ((intx)(x1)) 225 #define HASH2(x1, x2 ) ((HASH1(x1 ) << 7) ^ HASH1(x2)) 226 #define HASH3(x1, x2, x3 ) ((HASH2(x1, x2 ) << 7) ^ HASH1(x3)) 227 #define HASH4(x1, x2, x3, x4) ((HASH3(x1, x2, x3) << 7) ^ HASH1(x4)) 228 229 230 // The following macros are used to implement instruction-specific hashing. 231 // By default, each instruction implements hash() and is_equal(Value), used 232 // for value numbering/common subexpression elimination. The default imple- 233 // mentation disables value numbering. Each instruction which can be value- 234 // numbered, should define corresponding hash() and is_equal(Value) functions 235 // via the macros below. The f arguments specify all the values/op codes, etc. 236 // that need to be identical for two instructions to be identical. 237 // 238 // Note: The default implementation of hash() returns 0 in order to indicate 239 // that the instruction should not be considered for value numbering. 240 // The currently used hash functions do not guarantee that never a 0 241 // is produced. While this is still correct, it may be a performance 242 // bug (no value numbering for that node). However, this situation is 243 // so unlikely, that we are not going to handle it specially. 244 245 #define HASHING1(class_name, enabled, f1) \ 246 virtual intx hash() const { \ 247 return (enabled) ? HASH2(name(), f1) : 0; \ 248 } \ 249 virtual bool is_equal(Value v) const { \ 250 if (!(enabled) ) return false; \ 251 class_name* _v = v->as_##class_name(); \ 252 if (_v == NULL ) return false; \ 253 if (f1 != _v->f1) return false; \ 254 return true; \ 255 } \ 256 257 258 #define HASHING2(class_name, enabled, f1, f2) \ 259 virtual intx hash() const { \ 260 return (enabled) ? HASH3(name(), f1, f2) : 0; \ 261 } \ 262 virtual bool is_equal(Value v) const { \ 263 if (!(enabled) ) return false; \ 264 class_name* _v = v->as_##class_name(); \ 265 if (_v == NULL ) return false; \ 266 if (f1 != _v->f1) return false; \ 267 if (f2 != _v->f2) return false; \ 268 return true; \ 269 } \ 270 271 272 #define HASHING3(class_name, enabled, f1, f2, f3) \ 273 virtual intx hash() const { \ 274 return (enabled) ? HASH4(name(), f1, f2, f3) : 0; \ 275 } \ 276 virtual bool is_equal(Value v) const { \ 277 if (!(enabled) ) return false; \ 278 class_name* _v = v->as_##class_name(); \ 279 if (_v == NULL ) return false; \ 280 if (f1 != _v->f1) return false; \ 281 if (f2 != _v->f2) return false; \ 282 if (f3 != _v->f3) return false; \ 283 return true; \ 284 } \ 285 286 287 // The mother of all instructions... 288 289 class Instruction: public CompilationResourceObj { 290 private: 291 int _id; // the unique instruction id 292 #ifndef PRODUCT 293 int _printable_bci; // the bci of the instruction for printing 294 #endif 295 int _use_count; // the number of instructions refering to this value (w/o prev/next); only roots can have use count = 0 or > 1 296 int _pin_state; // set of PinReason describing the reason for pinning 297 ValueType* _type; // the instruction value type 298 Instruction* _next; // the next instruction if any (NULL for BlockEnd instructions) 299 Instruction* _subst; // the substitution instruction if any 300 LIR_Opr _operand; // LIR specific information 301 unsigned int _flags; // Flag bits 302 303 ValueStack* _state_before; // Copy of state with input operands still on stack (or NULL) 304 ValueStack* _exception_state; // Copy of state for exception handling 305 XHandlers* _exception_handlers; // Flat list of exception handlers covering this instruction 306 307 friend class UseCountComputer; 308 friend class BlockBegin; 309 310 void update_exception_state(ValueStack* state); 311 312 protected: 313 BlockBegin* _block; // Block that contains this instruction 314 315 void set_type(ValueType* type) { 316 assert(type != NULL, "type must exist"); 317 _type = type; 318 } 319 320 // Helper class to keep track of which arguments need a null check 321 class ArgsNonNullState { 322 private: 323 int _nonnull_state; // mask identifying which args are nonnull 324 public: 325 ArgsNonNullState() 326 : _nonnull_state(AllBits) {} 327 328 // Does argument number i needs a null check? 329 bool arg_needs_null_check(int i) const { 330 // No data is kept for arguments starting at position 33 so 331 // conservatively assume that they need a null check. 332 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) { 333 return is_set_nth_bit(_nonnull_state, i); 334 } 335 return true; 336 } 337 338 // Set whether argument number i needs a null check or not 339 void set_arg_needs_null_check(int i, bool check) { 340 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) { 341 if (check) { 342 _nonnull_state |= nth_bit(i); 343 } else { 344 _nonnull_state &= ~(nth_bit(i)); 345 } 346 } 347 } 348 }; 349 350 public: 351 void* operator new(size_t size) throw() { 352 Compilation* c = Compilation::current(); 353 void* res = c->arena()->Amalloc(size); 354 ((Instruction*)res)->_id = c->get_next_id(); 355 return res; 356 } 357 358 static const int no_bci = -99; 359 360 enum InstructionFlag { 361 NeedsNullCheckFlag = 0, 362 CanTrapFlag, 363 DirectCompareFlag, 364 IsEliminatedFlag, 365 IsSafepointFlag, 366 IsStaticFlag, 367 IsStrictfpFlag, 368 NeedsStoreCheckFlag, 369 NeedsWriteBarrierFlag, 370 PreservesStateFlag, 371 TargetIsFinalFlag, 372 TargetIsLoadedFlag, 373 TargetIsStrictfpFlag, 374 UnorderedIsTrueFlag, 375 NeedsPatchingFlag, 376 ThrowIncompatibleClassChangeErrorFlag, 377 InvokeSpecialReceiverCheckFlag, 378 ProfileMDOFlag, 379 IsLinkedInBlockFlag, 380 NeedsRangeCheckFlag, 381 InWorkListFlag, 382 DeoptimizeOnException, 383 InstructionLastFlag 384 }; 385 386 public: 387 bool check_flag(InstructionFlag id) const { return (_flags & (1 << id)) != 0; } 388 void set_flag(InstructionFlag id, bool f) { _flags = f ? (_flags | (1 << id)) : (_flags & ~(1 << id)); }; 389 390 // 'globally' used condition values 391 enum Condition { 392 eql, neq, lss, leq, gtr, geq, aeq, beq 393 }; 394 395 // Instructions may be pinned for many reasons and under certain conditions 396 // with enough knowledge it's possible to safely unpin them. 397 enum PinReason { 398 PinUnknown = 1 << 0 399 , PinExplicitNullCheck = 1 << 3 400 , PinStackForStateSplit= 1 << 12 401 , PinStateSplitConstructor= 1 << 13 402 , PinGlobalValueNumbering= 1 << 14 403 }; 404 405 static Condition mirror(Condition cond); 406 static Condition negate(Condition cond); 407 408 // initialization 409 static int number_of_instructions() { 410 return Compilation::current()->number_of_instructions(); 411 } 412 413 // creation 414 Instruction(ValueType* type, ValueStack* state_before = NULL, bool type_is_constant = false) 415 : 416 #ifndef PRODUCT 417 _printable_bci(-99), 418 #endif 419 _use_count(0) 420 , _pin_state(0) 421 , _type(type) 422 , _next(NULL) 423 , _subst(NULL) 424 , _operand(LIR_OprFact::illegalOpr) 425 , _flags(0) 426 , _state_before(state_before) 427 , _exception_handlers(NULL) 428 , _block(NULL) 429 { 430 check_state(state_before); 431 assert(type != NULL && (!type->is_constant() || type_is_constant), "type must exist"); 432 update_exception_state(_state_before); 433 } 434 435 // accessors 436 int id() const { return _id; } 437 #ifndef PRODUCT 438 bool has_printable_bci() const { return _printable_bci != -99; } 439 int printable_bci() const { assert(has_printable_bci(), "_printable_bci should have been set"); return _printable_bci; } 440 void set_printable_bci(int bci) { _printable_bci = bci; } 441 #endif 442 int dominator_depth(); 443 int use_count() const { return _use_count; } 444 int pin_state() const { return _pin_state; } 445 bool is_pinned() const { return _pin_state != 0 || PinAllInstructions; } 446 ValueType* type() const { return _type; } 447 BlockBegin *block() const { return _block; } 448 Instruction* prev(); // use carefully, expensive operation 449 Instruction* next() const { return _next; } 450 bool has_subst() const { return _subst != NULL; } 451 Instruction* subst() { return _subst == NULL ? this : _subst->subst(); } 452 LIR_Opr operand() const { return _operand; } 453 454 void set_needs_null_check(bool f) { set_flag(NeedsNullCheckFlag, f); } 455 bool needs_null_check() const { return check_flag(NeedsNullCheckFlag); } 456 bool is_linked() const { return check_flag(IsLinkedInBlockFlag); } 457 bool can_be_linked() { return as_Local() == NULL && as_Phi() == NULL; } 458 459 bool has_uses() const { return use_count() > 0; } 460 ValueStack* state_before() const { return _state_before; } 461 ValueStack* exception_state() const { return _exception_state; } 462 virtual bool needs_exception_state() const { return true; } 463 XHandlers* exception_handlers() const { return _exception_handlers; } 464 465 // manipulation 466 void pin(PinReason reason) { _pin_state |= reason; } 467 void pin() { _pin_state |= PinUnknown; } 468 // DANGEROUS: only used by EliminateStores 469 void unpin(PinReason reason) { assert((reason & PinUnknown) == 0, "can't unpin unknown state"); _pin_state &= ~reason; } 470 471 Instruction* set_next(Instruction* next) { 472 assert(next->has_printable_bci(), "_printable_bci should have been set"); 473 assert(next != NULL, "must not be NULL"); 474 assert(as_BlockEnd() == NULL, "BlockEnd instructions must have no next"); 475 assert(next->can_be_linked(), "shouldn't link these instructions into list"); 476 477 BlockBegin *block = this->block(); 478 next->_block = block; 479 480 next->set_flag(Instruction::IsLinkedInBlockFlag, true); 481 _next = next; 482 return next; 483 } 484 485 Instruction* set_next(Instruction* next, int bci) { 486 #ifndef PRODUCT 487 next->set_printable_bci(bci); 488 #endif 489 return set_next(next); 490 } 491 492 // when blocks are merged 493 void fixup_block_pointers() { 494 Instruction *cur = next()->next(); // next()'s block is set in set_next 495 while (cur && cur->_block != block()) { 496 cur->_block = block(); 497 cur = cur->next(); 498 } 499 } 500 501 Instruction *insert_after(Instruction *i) { 502 Instruction* n = _next; 503 set_next(i); 504 i->set_next(n); 505 return _next; 506 } 507 508 bool is_flattened_array() const; 509 510 Instruction *insert_after_same_bci(Instruction *i) { 511 #ifndef PRODUCT 512 i->set_printable_bci(printable_bci()); 513 #endif 514 return insert_after(i); 515 } 516 517 void set_subst(Instruction* subst) { 518 assert(subst == NULL || 519 type()->base() == subst->type()->base() || 520 subst->type()->base() == illegalType, "type can't change"); 521 _subst = subst; 522 } 523 void set_exception_handlers(XHandlers *xhandlers) { _exception_handlers = xhandlers; } 524 void set_exception_state(ValueStack* s) { check_state(s); _exception_state = s; } 525 void set_state_before(ValueStack* s) { check_state(s); _state_before = s; } 526 527 // machine-specifics 528 void set_operand(LIR_Opr operand) { assert(operand != LIR_OprFact::illegalOpr, "operand must exist"); _operand = operand; } 529 void clear_operand() { _operand = LIR_OprFact::illegalOpr; } 530 531 // generic 532 virtual Instruction* as_Instruction() { return this; } // to satisfy HASHING1 macro 533 virtual Phi* as_Phi() { return NULL; } 534 virtual Local* as_Local() { return NULL; } 535 virtual Constant* as_Constant() { return NULL; } 536 virtual AccessField* as_AccessField() { return NULL; } 537 virtual LoadField* as_LoadField() { return NULL; } 538 virtual StoreField* as_StoreField() { return NULL; } 539 virtual AccessArray* as_AccessArray() { return NULL; } 540 virtual ArrayLength* as_ArrayLength() { return NULL; } 541 virtual AccessIndexed* as_AccessIndexed() { return NULL; } 542 virtual LoadIndexed* as_LoadIndexed() { return NULL; } 543 virtual StoreIndexed* as_StoreIndexed() { return NULL; } 544 virtual NegateOp* as_NegateOp() { return NULL; } 545 virtual Op2* as_Op2() { return NULL; } 546 virtual ArithmeticOp* as_ArithmeticOp() { return NULL; } 547 virtual ShiftOp* as_ShiftOp() { return NULL; } 548 virtual LogicOp* as_LogicOp() { return NULL; } 549 virtual CompareOp* as_CompareOp() { return NULL; } 550 virtual IfOp* as_IfOp() { return NULL; } 551 virtual Convert* as_Convert() { return NULL; } 552 virtual NullCheck* as_NullCheck() { return NULL; } 553 virtual OsrEntry* as_OsrEntry() { return NULL; } 554 virtual StateSplit* as_StateSplit() { return NULL; } 555 virtual Invoke* as_Invoke() { return NULL; } 556 virtual NewInstance* as_NewInstance() { return NULL; } 557 virtual NewValueTypeInstance* as_NewValueTypeInstance() { return NULL; } 558 virtual NewArray* as_NewArray() { return NULL; } 559 virtual NewTypeArray* as_NewTypeArray() { return NULL; } 560 virtual NewObjectArray* as_NewObjectArray() { return NULL; } 561 virtual NewMultiArray* as_NewMultiArray() { return NULL; } 562 virtual TypeCheck* as_TypeCheck() { return NULL; } 563 virtual CheckCast* as_CheckCast() { return NULL; } 564 virtual InstanceOf* as_InstanceOf() { return NULL; } 565 virtual TypeCast* as_TypeCast() { return NULL; } 566 virtual AccessMonitor* as_AccessMonitor() { return NULL; } 567 virtual MonitorEnter* as_MonitorEnter() { return NULL; } 568 virtual MonitorExit* as_MonitorExit() { return NULL; } 569 virtual Intrinsic* as_Intrinsic() { return NULL; } 570 virtual BlockBegin* as_BlockBegin() { return NULL; } 571 virtual BlockEnd* as_BlockEnd() { return NULL; } 572 virtual Goto* as_Goto() { return NULL; } 573 virtual If* as_If() { return NULL; } 574 virtual IfInstanceOf* as_IfInstanceOf() { return NULL; } 575 virtual TableSwitch* as_TableSwitch() { return NULL; } 576 virtual LookupSwitch* as_LookupSwitch() { return NULL; } 577 virtual Return* as_Return() { return NULL; } 578 virtual Throw* as_Throw() { return NULL; } 579 virtual Base* as_Base() { return NULL; } 580 virtual RoundFP* as_RoundFP() { return NULL; } 581 virtual ExceptionObject* as_ExceptionObject() { return NULL; } 582 virtual UnsafeOp* as_UnsafeOp() { return NULL; } 583 virtual ProfileInvoke* as_ProfileInvoke() { return NULL; } 584 virtual RangeCheckPredicate* as_RangeCheckPredicate() { return NULL; } 585 586 #ifdef ASSERT 587 virtual Assert* as_Assert() { return NULL; } 588 #endif 589 590 virtual void visit(InstructionVisitor* v) = 0; 591 592 virtual bool can_trap() const { return false; } 593 594 virtual void input_values_do(ValueVisitor* f) = 0; 595 virtual void state_values_do(ValueVisitor* f); 596 virtual void other_values_do(ValueVisitor* f) { /* usually no other - override on demand */ } 597 void values_do(ValueVisitor* f) { input_values_do(f); state_values_do(f); other_values_do(f); } 598 599 virtual ciType* exact_type() const; 600 virtual ciType* declared_type() const { return NULL; } 601 602 // hashing 603 virtual const char* name() const = 0; 604 HASHING1(Instruction, false, id()) // hashing disabled by default 605 606 // debugging 607 static void check_state(ValueStack* state) PRODUCT_RETURN; 608 void print() PRODUCT_RETURN; 609 void print_line() PRODUCT_RETURN; 610 void print(InstructionPrinter& ip) PRODUCT_RETURN; 611 }; 612 613 614 // The following macros are used to define base (i.e., non-leaf) 615 // and leaf instruction classes. They define class-name related 616 // generic functionality in one place. 617 618 #define BASE(class_name, super_class_name) \ 619 class class_name: public super_class_name { \ 620 public: \ 621 virtual class_name* as_##class_name() { return this; } \ 622 623 624 #define LEAF(class_name, super_class_name) \ 625 BASE(class_name, super_class_name) \ 626 public: \ 627 virtual const char* name() const { return #class_name; } \ 628 virtual void visit(InstructionVisitor* v) { v->do_##class_name(this); } \ 629 630 631 // Debugging support 632 633 634 #ifdef ASSERT 635 class AssertValues: public ValueVisitor { 636 void visit(Value* x) { assert((*x) != NULL, "value must exist"); } 637 }; 638 #define ASSERT_VALUES { AssertValues assert_value; values_do(&assert_value); } 639 #else 640 #define ASSERT_VALUES 641 #endif // ASSERT 642 643 644 // A Phi is a phi function in the sense of SSA form. It stands for 645 // the value of a local variable at the beginning of a join block. 646 // A Phi consists of n operands, one for every incoming branch. 647 648 LEAF(Phi, Instruction) 649 private: 650 int _pf_flags; // the flags of the phi function 651 int _index; // to value on operand stack (index < 0) or to local 652 ciType* _exact_type; // currently is set only for flattened arrays, NULL otherwise. 653 public: 654 // creation 655 Phi(ValueType* type, BlockBegin* b, int index, ciType* exact_type) 656 : Instruction(type->base()) 657 , _pf_flags(0) 658 , _index(index) 659 , _exact_type(exact_type) 660 { 661 _block = b; 662 NOT_PRODUCT(set_printable_bci(Value(b)->printable_bci())); 663 if (type->is_illegal()) { 664 make_illegal(); 665 } 666 } 667 668 virtual ciType* exact_type() const { 669 return _exact_type; 670 } 671 672 virtual ciType* declared_type() const { 673 return _exact_type; 674 } 675 676 // flags 677 enum Flag { 678 no_flag = 0, 679 visited = 1 << 0, 680 cannot_simplify = 1 << 1 681 }; 682 683 // accessors 684 bool is_local() const { return _index >= 0; } 685 bool is_on_stack() const { return !is_local(); } 686 int local_index() const { assert(is_local(), ""); return _index; } 687 int stack_index() const { assert(is_on_stack(), ""); return -(_index+1); } 688 689 Value operand_at(int i) const; 690 int operand_count() const; 691 692 void set(Flag f) { _pf_flags |= f; } 693 void clear(Flag f) { _pf_flags &= ~f; } 694 bool is_set(Flag f) const { return (_pf_flags & f) != 0; } 695 696 // Invalidates phis corresponding to merges of locals of two different types 697 // (these should never be referenced, otherwise the bytecodes are illegal) 698 void make_illegal() { 699 set(cannot_simplify); 700 set_type(illegalType); 701 } 702 703 bool is_illegal() const { 704 return type()->is_illegal(); 705 } 706 707 // generic 708 virtual void input_values_do(ValueVisitor* f) { 709 } 710 }; 711 712 713 // A local is a placeholder for an incoming argument to a function call. 714 LEAF(Local, Instruction) 715 private: 716 int _java_index; // the local index within the method to which the local belongs 717 bool _is_receiver; // if local variable holds the receiver: "this" for non-static methods 718 ciType* _declared_type; 719 public: 720 // creation 721 Local(ciType* declared, ValueType* type, int index, bool receiver) 722 : Instruction(type) 723 , _java_index(index) 724 , _is_receiver(receiver) 725 , _declared_type(declared) 726 { 727 NOT_PRODUCT(set_printable_bci(-1)); 728 } 729 730 // accessors 731 int java_index() const { return _java_index; } 732 bool is_receiver() const { return _is_receiver; } 733 734 virtual ciType* declared_type() const { return _declared_type; } 735 736 // generic 737 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 738 }; 739 740 741 LEAF(Constant, Instruction) 742 public: 743 // creation 744 Constant(ValueType* type): 745 Instruction(type, NULL, /*type_is_constant*/ true) 746 { 747 assert(type->is_constant(), "must be a constant"); 748 } 749 750 Constant(ValueType* type, ValueStack* state_before): 751 Instruction(type, state_before, /*type_is_constant*/ true) 752 { 753 assert(state_before != NULL, "only used for constants which need patching"); 754 assert(type->is_constant(), "must be a constant"); 755 // since it's patching it needs to be pinned 756 pin(); 757 } 758 759 // generic 760 virtual bool can_trap() const { return state_before() != NULL; } 761 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 762 763 virtual intx hash() const; 764 virtual bool is_equal(Value v) const; 765 766 virtual ciType* exact_type() const; 767 768 enum CompareResult { not_comparable = -1, cond_false, cond_true }; 769 770 virtual CompareResult compare(Instruction::Condition condition, Value right) const; 771 BlockBegin* compare(Instruction::Condition cond, Value right, 772 BlockBegin* true_sux, BlockBegin* false_sux) const { 773 switch (compare(cond, right)) { 774 case not_comparable: 775 return NULL; 776 case cond_false: 777 return false_sux; 778 case cond_true: 779 return true_sux; 780 default: 781 ShouldNotReachHere(); 782 return NULL; 783 } 784 } 785 }; 786 787 788 BASE(AccessField, Instruction) 789 private: 790 Value _obj; 791 int _offset; 792 ciField* _field; 793 NullCheck* _explicit_null_check; // For explicit null check elimination 794 795 public: 796 // creation 797 AccessField(Value obj, int offset, ciField* field, bool is_static, 798 ValueStack* state_before, bool needs_patching) 799 : Instruction(as_ValueType(field->type()->basic_type()), state_before) 800 , _obj(obj) 801 , _offset(offset) 802 , _field(field) 803 , _explicit_null_check(NULL) 804 { 805 set_needs_null_check(!is_static); 806 set_flag(IsStaticFlag, is_static); 807 set_flag(NeedsPatchingFlag, needs_patching); 808 ASSERT_VALUES 809 // pin of all instructions with memory access 810 pin(); 811 } 812 813 // accessors 814 Value obj() const { return _obj; } 815 int offset() const { return _offset; } 816 ciField* field() const { return _field; } 817 BasicType field_type() const { return _field->type()->basic_type(); } 818 bool is_static() const { return check_flag(IsStaticFlag); } 819 NullCheck* explicit_null_check() const { return _explicit_null_check; } 820 bool needs_patching() const { return check_flag(NeedsPatchingFlag); } 821 822 // Unresolved getstatic and putstatic can cause initialization. 823 // Technically it occurs at the Constant that materializes the base 824 // of the static fields but it's simpler to model it here. 825 bool is_init_point() const { return is_static() && (needs_patching() || !_field->holder()->is_initialized()); } 826 827 // manipulation 828 829 // Under certain circumstances, if a previous NullCheck instruction 830 // proved the target object non-null, we can eliminate the explicit 831 // null check and do an implicit one, simply specifying the debug 832 // information from the NullCheck. This field should only be consulted 833 // if needs_null_check() is true. 834 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 835 836 // generic 837 virtual bool can_trap() const { return needs_null_check() || needs_patching(); } 838 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 839 }; 840 841 842 LEAF(LoadField, AccessField) 843 public: 844 // creation 845 LoadField(Value obj, int offset, ciField* field, bool is_static, 846 ValueStack* state_before, bool needs_patching) 847 : AccessField(obj, offset, field, is_static, state_before, needs_patching) 848 {} 849 850 ciType* declared_type() const; 851 852 // generic 853 HASHING2(LoadField, !needs_patching() && !field()->is_volatile(), obj()->subst(), offset()) // cannot be eliminated if needs patching or if volatile 854 }; 855 856 857 LEAF(StoreField, AccessField) 858 private: 859 Value _value; 860 861 public: 862 // creation 863 StoreField(Value obj, int offset, ciField* field, Value value, bool is_static, 864 ValueStack* state_before, bool needs_patching) 865 : AccessField(obj, offset, field, is_static, state_before, needs_patching) 866 , _value(value) 867 { 868 set_flag(NeedsWriteBarrierFlag, as_ValueType(field_type())->is_object()); 869 ASSERT_VALUES 870 pin(); 871 } 872 873 // accessors 874 Value value() const { return _value; } 875 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); } 876 877 // generic 878 virtual void input_values_do(ValueVisitor* f) { AccessField::input_values_do(f); f->visit(&_value); } 879 }; 880 881 882 BASE(AccessArray, Instruction) 883 private: 884 Value _array; 885 886 public: 887 // creation 888 AccessArray(ValueType* type, Value array, ValueStack* state_before) 889 : Instruction(type, state_before) 890 , _array(array) 891 { 892 set_needs_null_check(true); 893 ASSERT_VALUES 894 pin(); // instruction with side effect (null exception or range check throwing) 895 } 896 897 Value array() const { return _array; } 898 899 // generic 900 virtual bool can_trap() const { return needs_null_check(); } 901 virtual void input_values_do(ValueVisitor* f) { f->visit(&_array); } 902 }; 903 904 905 LEAF(ArrayLength, AccessArray) 906 private: 907 NullCheck* _explicit_null_check; // For explicit null check elimination 908 909 public: 910 // creation 911 ArrayLength(Value array, ValueStack* state_before) 912 : AccessArray(intType, array, state_before) 913 , _explicit_null_check(NULL) {} 914 915 // accessors 916 NullCheck* explicit_null_check() const { return _explicit_null_check; } 917 918 // setters 919 // See LoadField::set_explicit_null_check for documentation 920 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 921 922 // generic 923 HASHING1(ArrayLength, true, array()->subst()) 924 }; 925 926 927 BASE(AccessIndexed, AccessArray) 928 private: 929 Value _index; 930 Value _length; 931 BasicType _elt_type; 932 bool _mismatched; 933 934 public: 935 // creation 936 AccessIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before, bool mismatched) 937 : AccessArray(as_ValueType(elt_type), array, state_before) 938 , _index(index) 939 , _length(length) 940 , _elt_type(elt_type) 941 , _mismatched(mismatched) 942 { 943 set_flag(Instruction::NeedsRangeCheckFlag, true); 944 ASSERT_VALUES 945 } 946 947 // accessors 948 Value index() const { return _index; } 949 Value length() const { return _length; } 950 BasicType elt_type() const { return _elt_type; } 951 bool mismatched() const { return _mismatched; } 952 953 void clear_length() { _length = NULL; } 954 // perform elimination of range checks involving constants 955 bool compute_needs_range_check(); 956 957 // generic 958 virtual void input_values_do(ValueVisitor* f) { AccessArray::input_values_do(f); f->visit(&_index); if (_length != NULL) f->visit(&_length); } 959 }; 960 961 962 LEAF(LoadIndexed, AccessIndexed) 963 private: 964 NullCheck* _explicit_null_check; // For explicit null check elimination 965 NewValueTypeInstance* _vt; 966 967 public: 968 // creation 969 LoadIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before, bool mismatched = false) 970 : AccessIndexed(array, index, length, elt_type, state_before, mismatched) 971 , _explicit_null_check(NULL) {} 972 973 // accessors 974 NullCheck* explicit_null_check() const { return _explicit_null_check; } 975 976 // setters 977 // See LoadField::set_explicit_null_check for documentation 978 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 979 980 ciType* exact_type() const; 981 ciType* declared_type() const; 982 983 NewValueTypeInstance* vt() { return _vt; } 984 void set_vt(NewValueTypeInstance* vt) { _vt = vt; } 985 986 // generic 987 HASHING2(LoadIndexed, true, array()->subst(), index()->subst()) 988 }; 989 990 991 LEAF(StoreIndexed, AccessIndexed) 992 private: 993 Value _value; 994 995 ciMethod* _profiled_method; 996 int _profiled_bci; 997 bool _check_boolean; 998 999 public: 1000 // creation 1001 StoreIndexed(Value array, Value index, Value length, BasicType elt_type, Value value, ValueStack* state_before, 1002 bool check_boolean, bool mismatched = false) 1003 : AccessIndexed(array, index, length, elt_type, state_before, mismatched) 1004 , _value(value), _profiled_method(NULL), _profiled_bci(0), _check_boolean(check_boolean) 1005 { 1006 set_flag(NeedsWriteBarrierFlag, (as_ValueType(elt_type)->is_object())); 1007 set_flag(NeedsStoreCheckFlag, (as_ValueType(elt_type)->is_object())); 1008 ASSERT_VALUES 1009 pin(); 1010 } 1011 1012 // accessors 1013 Value value() const { return _value; } 1014 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); } 1015 bool needs_store_check() const { return check_flag(NeedsStoreCheckFlag); } 1016 bool check_boolean() const { return _check_boolean; } 1017 // Helpers for MethodData* profiling 1018 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1019 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1020 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1021 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1022 ciMethod* profiled_method() const { return _profiled_method; } 1023 int profiled_bci() const { return _profiled_bci; } 1024 // generic 1025 virtual void input_values_do(ValueVisitor* f) { AccessIndexed::input_values_do(f); f->visit(&_value); } 1026 }; 1027 1028 1029 LEAF(NegateOp, Instruction) 1030 private: 1031 Value _x; 1032 1033 public: 1034 // creation 1035 NegateOp(Value x) : Instruction(x->type()->base()), _x(x) { 1036 ASSERT_VALUES 1037 } 1038 1039 // accessors 1040 Value x() const { return _x; } 1041 1042 // generic 1043 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); } 1044 }; 1045 1046 1047 BASE(Op2, Instruction) 1048 private: 1049 Bytecodes::Code _op; 1050 Value _x; 1051 Value _y; 1052 1053 public: 1054 // creation 1055 Op2(ValueType* type, Bytecodes::Code op, Value x, Value y, ValueStack* state_before = NULL) 1056 : Instruction(type, state_before) 1057 , _op(op) 1058 , _x(x) 1059 , _y(y) 1060 { 1061 ASSERT_VALUES 1062 } 1063 1064 // accessors 1065 Bytecodes::Code op() const { return _op; } 1066 Value x() const { return _x; } 1067 Value y() const { return _y; } 1068 1069 // manipulators 1070 void swap_operands() { 1071 assert(is_commutative(), "operation must be commutative"); 1072 Value t = _x; _x = _y; _y = t; 1073 } 1074 1075 // generic 1076 virtual bool is_commutative() const { return false; } 1077 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); } 1078 }; 1079 1080 1081 LEAF(ArithmeticOp, Op2) 1082 public: 1083 // creation 1084 ArithmeticOp(Bytecodes::Code op, Value x, Value y, bool is_strictfp, ValueStack* state_before) 1085 : Op2(x->type()->meet(y->type()), op, x, y, state_before) 1086 { 1087 set_flag(IsStrictfpFlag, is_strictfp); 1088 if (can_trap()) pin(); 1089 } 1090 1091 // accessors 1092 bool is_strictfp() const { return check_flag(IsStrictfpFlag); } 1093 1094 // generic 1095 virtual bool is_commutative() const; 1096 virtual bool can_trap() const; 1097 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1098 }; 1099 1100 1101 LEAF(ShiftOp, Op2) 1102 public: 1103 // creation 1104 ShiftOp(Bytecodes::Code op, Value x, Value s) : Op2(x->type()->base(), op, x, s) {} 1105 1106 // generic 1107 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1108 }; 1109 1110 1111 LEAF(LogicOp, Op2) 1112 public: 1113 // creation 1114 LogicOp(Bytecodes::Code op, Value x, Value y) : Op2(x->type()->meet(y->type()), op, x, y) {} 1115 1116 // generic 1117 virtual bool is_commutative() const; 1118 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1119 }; 1120 1121 1122 LEAF(CompareOp, Op2) 1123 public: 1124 // creation 1125 CompareOp(Bytecodes::Code op, Value x, Value y, ValueStack* state_before) 1126 : Op2(intType, op, x, y, state_before) 1127 {} 1128 1129 // generic 1130 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1131 }; 1132 1133 1134 LEAF(IfOp, Op2) 1135 private: 1136 Value _tval; 1137 Value _fval; 1138 1139 public: 1140 // creation 1141 IfOp(Value x, Condition cond, Value y, Value tval, Value fval) 1142 : Op2(tval->type()->meet(fval->type()), (Bytecodes::Code)cond, x, y) 1143 , _tval(tval) 1144 , _fval(fval) 1145 { 1146 ASSERT_VALUES 1147 assert(tval->type()->tag() == fval->type()->tag(), "types must match"); 1148 } 1149 1150 // accessors 1151 virtual bool is_commutative() const; 1152 Bytecodes::Code op() const { ShouldNotCallThis(); return Bytecodes::_illegal; } 1153 Condition cond() const { return (Condition)Op2::op(); } 1154 Value tval() const { return _tval; } 1155 Value fval() const { return _fval; } 1156 1157 // generic 1158 virtual void input_values_do(ValueVisitor* f) { Op2::input_values_do(f); f->visit(&_tval); f->visit(&_fval); } 1159 }; 1160 1161 1162 LEAF(Convert, Instruction) 1163 private: 1164 Bytecodes::Code _op; 1165 Value _value; 1166 1167 public: 1168 // creation 1169 Convert(Bytecodes::Code op, Value value, ValueType* to_type) : Instruction(to_type), _op(op), _value(value) { 1170 ASSERT_VALUES 1171 } 1172 1173 // accessors 1174 Bytecodes::Code op() const { return _op; } 1175 Value value() const { return _value; } 1176 1177 // generic 1178 virtual void input_values_do(ValueVisitor* f) { f->visit(&_value); } 1179 HASHING2(Convert, true, op(), value()->subst()) 1180 }; 1181 1182 1183 LEAF(NullCheck, Instruction) 1184 private: 1185 Value _obj; 1186 1187 public: 1188 // creation 1189 NullCheck(Value obj, ValueStack* state_before) 1190 : Instruction(obj->type()->base(), state_before) 1191 , _obj(obj) 1192 { 1193 ASSERT_VALUES 1194 set_can_trap(true); 1195 assert(_obj->type()->is_object(), "null check must be applied to objects only"); 1196 pin(Instruction::PinExplicitNullCheck); 1197 } 1198 1199 // accessors 1200 Value obj() const { return _obj; } 1201 1202 // setters 1203 void set_can_trap(bool can_trap) { set_flag(CanTrapFlag, can_trap); } 1204 1205 // generic 1206 virtual bool can_trap() const { return check_flag(CanTrapFlag); /* null-check elimination sets to false */ } 1207 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 1208 HASHING1(NullCheck, true, obj()->subst()) 1209 }; 1210 1211 1212 // This node is supposed to cast the type of another node to a more precise 1213 // declared type. 1214 LEAF(TypeCast, Instruction) 1215 private: 1216 ciType* _declared_type; 1217 Value _obj; 1218 1219 public: 1220 // The type of this node is the same type as the object type (and it might be constant). 1221 TypeCast(ciType* type, Value obj, ValueStack* state_before) 1222 : Instruction(obj->type(), state_before, obj->type()->is_constant()), 1223 _declared_type(type), 1224 _obj(obj) {} 1225 1226 // accessors 1227 ciType* declared_type() const { return _declared_type; } 1228 Value obj() const { return _obj; } 1229 1230 // generic 1231 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 1232 }; 1233 1234 1235 BASE(StateSplit, Instruction) 1236 private: 1237 ValueStack* _state; 1238 1239 protected: 1240 static void substitute(BlockList& list, BlockBegin* old_block, BlockBegin* new_block); 1241 1242 public: 1243 // creation 1244 StateSplit(ValueType* type, ValueStack* state_before = NULL) 1245 : Instruction(type, state_before) 1246 , _state(NULL) 1247 { 1248 pin(PinStateSplitConstructor); 1249 } 1250 1251 // accessors 1252 ValueStack* state() const { return _state; } 1253 IRScope* scope() const; // the state's scope 1254 1255 // manipulation 1256 void set_state(ValueStack* state) { assert(_state == NULL, "overwriting existing state"); check_state(state); _state = state; } 1257 1258 // generic 1259 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 1260 virtual void state_values_do(ValueVisitor* f); 1261 }; 1262 1263 1264 LEAF(Invoke, StateSplit) 1265 private: 1266 Bytecodes::Code _code; 1267 Value _recv; 1268 Values* _args; 1269 BasicTypeList* _signature; 1270 int _vtable_index; 1271 ciMethod* _target; 1272 1273 public: 1274 // creation 1275 Invoke(Bytecodes::Code code, ValueType* result_type, Value recv, Values* args, 1276 int vtable_index, ciMethod* target, ValueStack* state_before); 1277 1278 // accessors 1279 Bytecodes::Code code() const { return _code; } 1280 Value receiver() const { return _recv; } 1281 bool has_receiver() const { return receiver() != NULL; } 1282 int number_of_arguments() const { return _args->length(); } 1283 Value argument_at(int i) const { return _args->at(i); } 1284 int vtable_index() const { return _vtable_index; } 1285 BasicTypeList* signature() const { return _signature; } 1286 ciMethod* target() const { return _target; } 1287 1288 ciType* declared_type() const; 1289 1290 // Returns false if target is not loaded 1291 bool target_is_final() const { return check_flag(TargetIsFinalFlag); } 1292 bool target_is_loaded() const { return check_flag(TargetIsLoadedFlag); } 1293 // Returns false if target is not loaded 1294 bool target_is_strictfp() const { return check_flag(TargetIsStrictfpFlag); } 1295 1296 // JSR 292 support 1297 bool is_invokedynamic() const { return code() == Bytecodes::_invokedynamic; } 1298 bool is_method_handle_intrinsic() const { return target()->is_method_handle_intrinsic(); } 1299 1300 virtual bool needs_exception_state() const { return false; } 1301 1302 // generic 1303 virtual bool can_trap() const { return true; } 1304 virtual void input_values_do(ValueVisitor* f) { 1305 StateSplit::input_values_do(f); 1306 if (has_receiver()) f->visit(&_recv); 1307 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 1308 } 1309 virtual void state_values_do(ValueVisitor *f); 1310 }; 1311 1312 1313 LEAF(NewInstance, StateSplit) 1314 private: 1315 ciInstanceKlass* _klass; 1316 bool _is_unresolved; 1317 1318 public: 1319 // creation 1320 NewInstance(ciInstanceKlass* klass, ValueStack* state_before, bool is_unresolved) 1321 : StateSplit(instanceType, state_before) 1322 , _klass(klass), _is_unresolved(is_unresolved) 1323 {} 1324 1325 // accessors 1326 ciInstanceKlass* klass() const { return _klass; } 1327 bool is_unresolved() const { return _is_unresolved; } 1328 1329 virtual bool needs_exception_state() const { return false; } 1330 1331 // generic 1332 virtual bool can_trap() const { return true; } 1333 ciType* exact_type() const; 1334 ciType* declared_type() const; 1335 }; 1336 1337 LEAF(NewValueTypeInstance, StateSplit) 1338 bool _is_unresolved; 1339 ciValueKlass* _klass; 1340 Value _depends_on; // Link to instance on with withfield was called on 1341 1342 public: 1343 1344 // Default creation, always allocated for now 1345 NewValueTypeInstance(ciValueKlass* klass, ValueStack* state_before, bool is_unresolved, Value depends_on = NULL) 1346 : StateSplit(instanceType, state_before) 1347 , _is_unresolved(is_unresolved) 1348 , _klass(klass) 1349 { 1350 if (depends_on == NULL) { 1351 _depends_on = this; 1352 } else { 1353 _depends_on = depends_on; 1354 } 1355 } 1356 1357 // accessors 1358 bool is_unresolved() const { return _is_unresolved; } 1359 Value depends_on(); 1360 1361 ciValueKlass* klass() const { return _klass; } 1362 1363 virtual bool needs_exception_state() const { return false; } 1364 1365 // generic 1366 virtual bool can_trap() const { return true; } 1367 ciType* exact_type() const; 1368 ciType* declared_type() const; 1369 1370 // Only done in LIR Generator -> map everything to object 1371 void set_to_object_type() { set_type(instanceType); } 1372 }; 1373 1374 BASE(NewArray, StateSplit) 1375 private: 1376 Value _length; 1377 1378 public: 1379 // creation 1380 NewArray(Value length, ValueStack* state_before) 1381 : StateSplit(objectType, state_before) 1382 , _length(length) 1383 { 1384 // Do not ASSERT_VALUES since length is NULL for NewMultiArray 1385 } 1386 1387 // accessors 1388 Value length() const { return _length; } 1389 1390 virtual bool needs_exception_state() const { return false; } 1391 1392 ciType* exact_type() const { return NULL; } 1393 ciType* declared_type() const; 1394 1395 // generic 1396 virtual bool can_trap() const { return true; } 1397 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_length); } 1398 }; 1399 1400 1401 LEAF(NewTypeArray, NewArray) 1402 private: 1403 BasicType _elt_type; 1404 1405 public: 1406 // creation 1407 NewTypeArray(Value length, BasicType elt_type, ValueStack* state_before) 1408 : NewArray(length, state_before) 1409 , _elt_type(elt_type) 1410 {} 1411 1412 // accessors 1413 BasicType elt_type() const { return _elt_type; } 1414 ciType* exact_type() const; 1415 }; 1416 1417 1418 LEAF(NewObjectArray, NewArray) 1419 private: 1420 ciKlass* _klass; 1421 1422 public: 1423 // creation 1424 NewObjectArray(ciKlass* klass, Value length, ValueStack* state_before) : NewArray(length, state_before), _klass(klass) {} 1425 1426 // accessors 1427 ciKlass* klass() const { return _klass; } 1428 ciType* exact_type() const; 1429 }; 1430 1431 1432 LEAF(NewMultiArray, NewArray) 1433 private: 1434 ciKlass* _klass; 1435 Values* _dims; 1436 1437 public: 1438 // creation 1439 NewMultiArray(ciKlass* klass, Values* dims, ValueStack* state_before) : NewArray(NULL, state_before), _klass(klass), _dims(dims) { 1440 ASSERT_VALUES 1441 } 1442 1443 // accessors 1444 ciKlass* klass() const { return _klass; } 1445 Values* dims() const { return _dims; } 1446 int rank() const { return dims()->length(); } 1447 1448 // generic 1449 virtual void input_values_do(ValueVisitor* f) { 1450 // NOTE: we do not call NewArray::input_values_do since "length" 1451 // is meaningless for a multi-dimensional array; passing the 1452 // zeroth element down to NewArray as its length is a bad idea 1453 // since there will be a copy in the "dims" array which doesn't 1454 // get updated, and the value must not be traversed twice. Was bug 1455 // - kbr 4/10/2001 1456 StateSplit::input_values_do(f); 1457 for (int i = 0; i < _dims->length(); i++) f->visit(_dims->adr_at(i)); 1458 } 1459 1460 ciType* exact_type() const; 1461 }; 1462 1463 1464 BASE(TypeCheck, StateSplit) 1465 private: 1466 ciKlass* _klass; 1467 Value _obj; 1468 1469 ciMethod* _profiled_method; 1470 int _profiled_bci; 1471 1472 public: 1473 // creation 1474 TypeCheck(ciKlass* klass, Value obj, ValueType* type, ValueStack* state_before) 1475 : StateSplit(type, state_before), _klass(klass), _obj(obj), 1476 _profiled_method(NULL), _profiled_bci(0) { 1477 ASSERT_VALUES 1478 set_direct_compare(false); 1479 } 1480 1481 // accessors 1482 ciKlass* klass() const { return _klass; } 1483 Value obj() const { return _obj; } 1484 bool is_loaded() const { return klass() != NULL; } 1485 bool direct_compare() const { return check_flag(DirectCompareFlag); } 1486 1487 // manipulation 1488 void set_direct_compare(bool flag) { set_flag(DirectCompareFlag, flag); } 1489 1490 // generic 1491 virtual bool can_trap() const { return true; } 1492 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } 1493 1494 // Helpers for MethodData* profiling 1495 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1496 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1497 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1498 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1499 ciMethod* profiled_method() const { return _profiled_method; } 1500 int profiled_bci() const { return _profiled_bci; } 1501 }; 1502 1503 1504 LEAF(CheckCast, TypeCheck) 1505 bool _is_never_null; 1506 public: 1507 // creation 1508 CheckCast(ciKlass* klass, Value obj, ValueStack* state_before, bool never_null = false) 1509 : TypeCheck(klass, obj, objectType, state_before), _is_never_null(never_null) {} 1510 1511 void set_incompatible_class_change_check() { 1512 set_flag(ThrowIncompatibleClassChangeErrorFlag, true); 1513 } 1514 bool is_incompatible_class_change_check() const { 1515 return check_flag(ThrowIncompatibleClassChangeErrorFlag); 1516 } 1517 void set_invokespecial_receiver_check() { 1518 set_flag(InvokeSpecialReceiverCheckFlag, true); 1519 } 1520 bool is_invokespecial_receiver_check() const { 1521 return check_flag(InvokeSpecialReceiverCheckFlag); 1522 } 1523 bool is_never_null() const { 1524 return _is_never_null; 1525 } 1526 1527 virtual bool needs_exception_state() const { 1528 return !is_invokespecial_receiver_check(); 1529 } 1530 1531 ciType* declared_type() const; 1532 }; 1533 1534 1535 LEAF(InstanceOf, TypeCheck) 1536 public: 1537 // creation 1538 InstanceOf(ciKlass* klass, Value obj, ValueStack* state_before) : TypeCheck(klass, obj, intType, state_before) {} 1539 1540 virtual bool needs_exception_state() const { return false; } 1541 }; 1542 1543 1544 BASE(AccessMonitor, StateSplit) 1545 private: 1546 Value _obj; 1547 int _monitor_no; 1548 1549 public: 1550 // creation 1551 AccessMonitor(Value obj, int monitor_no, ValueStack* state_before = NULL) 1552 : StateSplit(illegalType, state_before) 1553 , _obj(obj) 1554 , _monitor_no(monitor_no) 1555 { 1556 set_needs_null_check(true); 1557 ASSERT_VALUES 1558 } 1559 1560 // accessors 1561 Value obj() const { return _obj; } 1562 int monitor_no() const { return _monitor_no; } 1563 1564 // generic 1565 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } 1566 }; 1567 1568 1569 LEAF(MonitorEnter, AccessMonitor) 1570 public: 1571 // creation 1572 MonitorEnter(Value obj, int monitor_no, ValueStack* state_before) 1573 : AccessMonitor(obj, monitor_no, state_before) 1574 { 1575 ASSERT_VALUES 1576 } 1577 1578 // generic 1579 virtual bool can_trap() const { return true; } 1580 }; 1581 1582 1583 LEAF(MonitorExit, AccessMonitor) 1584 public: 1585 // creation 1586 MonitorExit(Value obj, int monitor_no) 1587 : AccessMonitor(obj, monitor_no, NULL) 1588 { 1589 ASSERT_VALUES 1590 } 1591 }; 1592 1593 1594 LEAF(Intrinsic, StateSplit) 1595 private: 1596 vmIntrinsics::ID _id; 1597 Values* _args; 1598 Value _recv; 1599 ArgsNonNullState _nonnull_state; 1600 1601 public: 1602 // preserves_state can be set to true for Intrinsics 1603 // which are guaranteed to preserve register state across any slow 1604 // cases; setting it to true does not mean that the Intrinsic can 1605 // not trap, only that if we continue execution in the same basic 1606 // block after the Intrinsic, all of the registers are intact. This 1607 // allows load elimination and common expression elimination to be 1608 // performed across the Intrinsic. The default value is false. 1609 Intrinsic(ValueType* type, 1610 vmIntrinsics::ID id, 1611 Values* args, 1612 bool has_receiver, 1613 ValueStack* state_before, 1614 bool preserves_state, 1615 bool cantrap = true) 1616 : StateSplit(type, state_before) 1617 , _id(id) 1618 , _args(args) 1619 , _recv(NULL) 1620 { 1621 assert(args != NULL, "args must exist"); 1622 ASSERT_VALUES 1623 set_flag(PreservesStateFlag, preserves_state); 1624 set_flag(CanTrapFlag, cantrap); 1625 if (has_receiver) { 1626 _recv = argument_at(0); 1627 } 1628 set_needs_null_check(has_receiver); 1629 1630 // some intrinsics can't trap, so don't force them to be pinned 1631 if (!can_trap() && !vmIntrinsics::should_be_pinned(_id)) { 1632 unpin(PinStateSplitConstructor); 1633 } 1634 } 1635 1636 // accessors 1637 vmIntrinsics::ID id() const { return _id; } 1638 int number_of_arguments() const { return _args->length(); } 1639 Value argument_at(int i) const { return _args->at(i); } 1640 1641 bool has_receiver() const { return (_recv != NULL); } 1642 Value receiver() const { assert(has_receiver(), "must have receiver"); return _recv; } 1643 bool preserves_state() const { return check_flag(PreservesStateFlag); } 1644 1645 bool arg_needs_null_check(int i) const { 1646 return _nonnull_state.arg_needs_null_check(i); 1647 } 1648 1649 void set_arg_needs_null_check(int i, bool check) { 1650 _nonnull_state.set_arg_needs_null_check(i, check); 1651 } 1652 1653 // generic 1654 virtual bool can_trap() const { return check_flag(CanTrapFlag); } 1655 virtual void input_values_do(ValueVisitor* f) { 1656 StateSplit::input_values_do(f); 1657 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 1658 } 1659 }; 1660 1661 1662 class LIR_List; 1663 1664 LEAF(BlockBegin, StateSplit) 1665 private: 1666 int _block_id; // the unique block id 1667 int _bci; // start-bci of block 1668 int _depth_first_number; // number of this block in a depth-first ordering 1669 int _linear_scan_number; // number of this block in linear-scan ordering 1670 int _dominator_depth; 1671 int _loop_depth; // the loop nesting level of this block 1672 int _loop_index; // number of the innermost loop of this block 1673 int _flags; // the flags associated with this block 1674 1675 // fields used by BlockListBuilder 1676 int _total_preds; // number of predecessors found by BlockListBuilder 1677 ResourceBitMap _stores_to_locals; // bit is set when a local variable is stored in the block 1678 1679 // SSA specific fields: (factor out later) 1680 BlockList _successors; // the successors of this block 1681 BlockList _predecessors; // the predecessors of this block 1682 BlockList _dominates; // list of blocks that are dominated by this block 1683 BlockBegin* _dominator; // the dominator of this block 1684 // SSA specific ends 1685 BlockEnd* _end; // the last instruction of this block 1686 BlockList _exception_handlers; // the exception handlers potentially invoked by this block 1687 ValueStackStack* _exception_states; // only for xhandler entries: states of all instructions that have an edge to this xhandler 1688 int _exception_handler_pco; // if this block is the start of an exception handler, 1689 // this records the PC offset in the assembly code of the 1690 // first instruction in this block 1691 Label _label; // the label associated with this block 1692 LIR_List* _lir; // the low level intermediate representation for this block 1693 1694 ResourceBitMap _live_in; // set of live LIR_Opr registers at entry to this block 1695 ResourceBitMap _live_out; // set of live LIR_Opr registers at exit from this block 1696 ResourceBitMap _live_gen; // set of registers used before any redefinition in this block 1697 ResourceBitMap _live_kill; // set of registers defined in this block 1698 1699 ResourceBitMap _fpu_register_usage; 1700 intArray* _fpu_stack_state; // For x86 FPU code generation with UseLinearScan 1701 int _first_lir_instruction_id; // ID of first LIR instruction in this block 1702 int _last_lir_instruction_id; // ID of last LIR instruction in this block 1703 1704 void iterate_preorder (boolArray& mark, BlockClosure* closure); 1705 void iterate_postorder(boolArray& mark, BlockClosure* closure); 1706 1707 friend class SuxAndWeightAdjuster; 1708 1709 public: 1710 void* operator new(size_t size) throw() { 1711 Compilation* c = Compilation::current(); 1712 void* res = c->arena()->Amalloc(size); 1713 ((BlockBegin*)res)->_id = c->get_next_id(); 1714 ((BlockBegin*)res)->_block_id = c->get_next_block_id(); 1715 return res; 1716 } 1717 1718 // initialization/counting 1719 static int number_of_blocks() { 1720 return Compilation::current()->number_of_blocks(); 1721 } 1722 1723 // creation 1724 BlockBegin(int bci) 1725 : StateSplit(illegalType) 1726 , _bci(bci) 1727 , _depth_first_number(-1) 1728 , _linear_scan_number(-1) 1729 , _dominator_depth(-1) 1730 , _loop_depth(0) 1731 , _loop_index(-1) 1732 , _flags(0) 1733 , _total_preds(0) 1734 , _stores_to_locals() 1735 , _successors(2) 1736 , _predecessors(2) 1737 , _dominates(2) 1738 , _dominator(NULL) 1739 , _end(NULL) 1740 , _exception_handlers(1) 1741 , _exception_states(NULL) 1742 , _exception_handler_pco(-1) 1743 , _lir(NULL) 1744 , _live_in() 1745 , _live_out() 1746 , _live_gen() 1747 , _live_kill() 1748 , _fpu_register_usage() 1749 , _fpu_stack_state(NULL) 1750 , _first_lir_instruction_id(-1) 1751 , _last_lir_instruction_id(-1) 1752 { 1753 _block = this; 1754 #ifndef PRODUCT 1755 set_printable_bci(bci); 1756 #endif 1757 } 1758 1759 // accessors 1760 int block_id() const { return _block_id; } 1761 int bci() const { return _bci; } 1762 BlockList* successors() { return &_successors; } 1763 BlockList* dominates() { return &_dominates; } 1764 BlockBegin* dominator() const { return _dominator; } 1765 int loop_depth() const { return _loop_depth; } 1766 int dominator_depth() const { return _dominator_depth; } 1767 int depth_first_number() const { return _depth_first_number; } 1768 int linear_scan_number() const { return _linear_scan_number; } 1769 BlockEnd* end() const { return _end; } 1770 Label* label() { return &_label; } 1771 LIR_List* lir() const { return _lir; } 1772 int exception_handler_pco() const { return _exception_handler_pco; } 1773 ResourceBitMap& live_in() { return _live_in; } 1774 ResourceBitMap& live_out() { return _live_out; } 1775 ResourceBitMap& live_gen() { return _live_gen; } 1776 ResourceBitMap& live_kill() { return _live_kill; } 1777 ResourceBitMap& fpu_register_usage() { return _fpu_register_usage; } 1778 intArray* fpu_stack_state() const { return _fpu_stack_state; } 1779 int first_lir_instruction_id() const { return _first_lir_instruction_id; } 1780 int last_lir_instruction_id() const { return _last_lir_instruction_id; } 1781 int total_preds() const { return _total_preds; } 1782 BitMap& stores_to_locals() { return _stores_to_locals; } 1783 1784 // manipulation 1785 void set_dominator(BlockBegin* dom) { _dominator = dom; } 1786 void set_loop_depth(int d) { _loop_depth = d; } 1787 void set_dominator_depth(int d) { _dominator_depth = d; } 1788 void set_depth_first_number(int dfn) { _depth_first_number = dfn; } 1789 void set_linear_scan_number(int lsn) { _linear_scan_number = lsn; } 1790 void set_end(BlockEnd* end); 1791 void clear_end(); 1792 void disconnect_from_graph(); 1793 static void disconnect_edge(BlockBegin* from, BlockBegin* to); 1794 BlockBegin* insert_block_between(BlockBegin* sux); 1795 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux); 1796 void set_lir(LIR_List* lir) { _lir = lir; } 1797 void set_exception_handler_pco(int pco) { _exception_handler_pco = pco; } 1798 void set_live_in (const ResourceBitMap& map) { _live_in = map; } 1799 void set_live_out (const ResourceBitMap& map) { _live_out = map; } 1800 void set_live_gen (const ResourceBitMap& map) { _live_gen = map; } 1801 void set_live_kill(const ResourceBitMap& map) { _live_kill = map; } 1802 void set_fpu_register_usage(const ResourceBitMap& map) { _fpu_register_usage = map; } 1803 void set_fpu_stack_state(intArray* state) { _fpu_stack_state = state; } 1804 void set_first_lir_instruction_id(int id) { _first_lir_instruction_id = id; } 1805 void set_last_lir_instruction_id(int id) { _last_lir_instruction_id = id; } 1806 void increment_total_preds(int n = 1) { _total_preds += n; } 1807 void init_stores_to_locals(int locals_count) { _stores_to_locals.initialize(locals_count); } 1808 1809 // generic 1810 virtual void state_values_do(ValueVisitor* f); 1811 1812 // successors and predecessors 1813 int number_of_sux() const; 1814 BlockBegin* sux_at(int i) const; 1815 void add_successor(BlockBegin* sux); 1816 void remove_successor(BlockBegin* pred); 1817 bool is_successor(BlockBegin* sux) const { return _successors.contains(sux); } 1818 1819 void add_predecessor(BlockBegin* pred); 1820 void remove_predecessor(BlockBegin* pred); 1821 bool is_predecessor(BlockBegin* pred) const { return _predecessors.contains(pred); } 1822 int number_of_preds() const { return _predecessors.length(); } 1823 BlockBegin* pred_at(int i) const { return _predecessors.at(i); } 1824 1825 // exception handlers potentially invoked by this block 1826 void add_exception_handler(BlockBegin* b); 1827 bool is_exception_handler(BlockBegin* b) const { return _exception_handlers.contains(b); } 1828 int number_of_exception_handlers() const { return _exception_handlers.length(); } 1829 BlockBegin* exception_handler_at(int i) const { return _exception_handlers.at(i); } 1830 1831 // states of the instructions that have an edge to this exception handler 1832 int number_of_exception_states() { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states == NULL ? 0 : _exception_states->length(); } 1833 ValueStack* exception_state_at(int idx) const { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states->at(idx); } 1834 int add_exception_state(ValueStack* state); 1835 1836 // flags 1837 enum Flag { 1838 no_flag = 0, 1839 std_entry_flag = 1 << 0, 1840 osr_entry_flag = 1 << 1, 1841 exception_entry_flag = 1 << 2, 1842 subroutine_entry_flag = 1 << 3, 1843 backward_branch_target_flag = 1 << 4, 1844 is_on_work_list_flag = 1 << 5, 1845 was_visited_flag = 1 << 6, 1846 parser_loop_header_flag = 1 << 7, // set by parser to identify blocks where phi functions can not be created on demand 1847 critical_edge_split_flag = 1 << 8, // set for all blocks that are introduced when critical edges are split 1848 linear_scan_loop_header_flag = 1 << 9, // set during loop-detection for LinearScan 1849 linear_scan_loop_end_flag = 1 << 10, // set during loop-detection for LinearScan 1850 donot_eliminate_range_checks = 1 << 11 // Should be try to eliminate range checks in this block 1851 }; 1852 1853 void set(Flag f) { _flags |= f; } 1854 void clear(Flag f) { _flags &= ~f; } 1855 bool is_set(Flag f) const { return (_flags & f) != 0; } 1856 bool is_entry_block() const { 1857 const int entry_mask = std_entry_flag | osr_entry_flag | exception_entry_flag; 1858 return (_flags & entry_mask) != 0; 1859 } 1860 1861 // iteration 1862 void iterate_preorder (BlockClosure* closure); 1863 void iterate_postorder (BlockClosure* closure); 1864 1865 void block_values_do(ValueVisitor* f); 1866 1867 // loops 1868 void set_loop_index(int ix) { _loop_index = ix; } 1869 int loop_index() const { return _loop_index; } 1870 1871 // merging 1872 bool try_merge(ValueStack* state); // try to merge states at block begin 1873 void merge(ValueStack* state) { bool b = try_merge(state); assert(b, "merge failed"); } 1874 1875 // debugging 1876 void print_block() PRODUCT_RETURN; 1877 void print_block(InstructionPrinter& ip, bool live_only = false) PRODUCT_RETURN; 1878 }; 1879 1880 1881 BASE(BlockEnd, StateSplit) 1882 private: 1883 BlockList* _sux; 1884 1885 protected: 1886 BlockList* sux() const { return _sux; } 1887 1888 void set_sux(BlockList* sux) { 1889 #ifdef ASSERT 1890 assert(sux != NULL, "sux must exist"); 1891 for (int i = sux->length() - 1; i >= 0; i--) assert(sux->at(i) != NULL, "sux must exist"); 1892 #endif 1893 _sux = sux; 1894 } 1895 1896 public: 1897 // creation 1898 BlockEnd(ValueType* type, ValueStack* state_before, bool is_safepoint) 1899 : StateSplit(type, state_before) 1900 , _sux(NULL) 1901 { 1902 set_flag(IsSafepointFlag, is_safepoint); 1903 } 1904 1905 // accessors 1906 bool is_safepoint() const { return check_flag(IsSafepointFlag); } 1907 // For compatibility with old code, for new code use block() 1908 BlockBegin* begin() const { return _block; } 1909 1910 // manipulation 1911 void set_begin(BlockBegin* begin); 1912 1913 // successors 1914 int number_of_sux() const { return _sux != NULL ? _sux->length() : 0; } 1915 BlockBegin* sux_at(int i) const { return _sux->at(i); } 1916 BlockBegin* default_sux() const { return sux_at(number_of_sux() - 1); } 1917 BlockBegin** addr_sux_at(int i) const { return _sux->adr_at(i); } 1918 int sux_index(BlockBegin* sux) const { return _sux->find(sux); } 1919 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux); 1920 }; 1921 1922 1923 LEAF(Goto, BlockEnd) 1924 public: 1925 enum Direction { 1926 none, // Just a regular goto 1927 taken, not_taken // Goto produced from If 1928 }; 1929 private: 1930 ciMethod* _profiled_method; 1931 int _profiled_bci; 1932 Direction _direction; 1933 public: 1934 // creation 1935 Goto(BlockBegin* sux, ValueStack* state_before, bool is_safepoint = false) 1936 : BlockEnd(illegalType, state_before, is_safepoint) 1937 , _profiled_method(NULL) 1938 , _profiled_bci(0) 1939 , _direction(none) { 1940 BlockList* s = new BlockList(1); 1941 s->append(sux); 1942 set_sux(s); 1943 } 1944 1945 Goto(BlockBegin* sux, bool is_safepoint) : BlockEnd(illegalType, NULL, is_safepoint) 1946 , _profiled_method(NULL) 1947 , _profiled_bci(0) 1948 , _direction(none) { 1949 BlockList* s = new BlockList(1); 1950 s->append(sux); 1951 set_sux(s); 1952 } 1953 1954 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1955 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches 1956 int profiled_bci() const { return _profiled_bci; } 1957 Direction direction() const { return _direction; } 1958 1959 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1960 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1961 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1962 void set_direction(Direction d) { _direction = d; } 1963 }; 1964 1965 #ifdef ASSERT 1966 LEAF(Assert, Instruction) 1967 private: 1968 Value _x; 1969 Condition _cond; 1970 Value _y; 1971 char *_message; 1972 1973 public: 1974 // creation 1975 // unordered_is_true is valid for float/double compares only 1976 Assert(Value x, Condition cond, bool unordered_is_true, Value y); 1977 1978 // accessors 1979 Value x() const { return _x; } 1980 Condition cond() const { return _cond; } 1981 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 1982 Value y() const { return _y; } 1983 const char *message() const { return _message; } 1984 1985 // generic 1986 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); } 1987 }; 1988 #endif 1989 1990 LEAF(RangeCheckPredicate, StateSplit) 1991 private: 1992 Value _x; 1993 Condition _cond; 1994 Value _y; 1995 1996 void check_state(); 1997 1998 public: 1999 // creation 2000 // unordered_is_true is valid for float/double compares only 2001 RangeCheckPredicate(Value x, Condition cond, bool unordered_is_true, Value y, ValueStack* state) : StateSplit(illegalType) 2002 , _x(x) 2003 , _cond(cond) 2004 , _y(y) 2005 { 2006 ASSERT_VALUES 2007 set_flag(UnorderedIsTrueFlag, unordered_is_true); 2008 assert(x->type()->tag() == y->type()->tag(), "types must match"); 2009 this->set_state(state); 2010 check_state(); 2011 } 2012 2013 // Always deoptimize 2014 RangeCheckPredicate(ValueStack* state) : StateSplit(illegalType) 2015 { 2016 this->set_state(state); 2017 _x = _y = NULL; 2018 check_state(); 2019 } 2020 2021 // accessors 2022 Value x() const { return _x; } 2023 Condition cond() const { return _cond; } 2024 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 2025 Value y() const { return _y; } 2026 2027 void always_fail() { _x = _y = NULL; } 2028 2029 // generic 2030 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_x); f->visit(&_y); } 2031 HASHING3(RangeCheckPredicate, true, x()->subst(), y()->subst(), cond()) 2032 }; 2033 2034 LEAF(If, BlockEnd) 2035 private: 2036 Value _x; 2037 Condition _cond; 2038 Value _y; 2039 ciMethod* _profiled_method; 2040 int _profiled_bci; // Canonicalizer may alter bci of If node 2041 bool _swapped; // Is the order reversed with respect to the original If in the 2042 // bytecode stream? 2043 public: 2044 // creation 2045 // unordered_is_true is valid for float/double compares only 2046 If(Value x, Condition cond, bool unordered_is_true, Value y, BlockBegin* tsux, BlockBegin* fsux, ValueStack* state_before, bool is_safepoint) 2047 : BlockEnd(illegalType, state_before, is_safepoint) 2048 , _x(x) 2049 , _cond(cond) 2050 , _y(y) 2051 , _profiled_method(NULL) 2052 , _profiled_bci(0) 2053 , _swapped(false) 2054 { 2055 ASSERT_VALUES 2056 set_flag(UnorderedIsTrueFlag, unordered_is_true); 2057 assert(x->type()->tag() == y->type()->tag(), "types must match"); 2058 BlockList* s = new BlockList(2); 2059 s->append(tsux); 2060 s->append(fsux); 2061 set_sux(s); 2062 } 2063 2064 // accessors 2065 Value x() const { return _x; } 2066 Condition cond() const { return _cond; } 2067 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 2068 Value y() const { return _y; } 2069 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); } 2070 BlockBegin* tsux() const { return sux_for(true); } 2071 BlockBegin* fsux() const { return sux_for(false); } 2072 BlockBegin* usux() const { return sux_for(unordered_is_true()); } 2073 bool should_profile() const { return check_flag(ProfileMDOFlag); } 2074 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches 2075 int profiled_bci() const { return _profiled_bci; } // set for profiled branches and tiered 2076 bool is_swapped() const { return _swapped; } 2077 2078 // manipulation 2079 void swap_operands() { 2080 Value t = _x; _x = _y; _y = t; 2081 _cond = mirror(_cond); 2082 } 2083 2084 void swap_sux() { 2085 assert(number_of_sux() == 2, "wrong number of successors"); 2086 BlockList* s = sux(); 2087 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t); 2088 _cond = negate(_cond); 2089 set_flag(UnorderedIsTrueFlag, !check_flag(UnorderedIsTrueFlag)); 2090 } 2091 2092 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 2093 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 2094 void set_profiled_bci(int bci) { _profiled_bci = bci; } 2095 void set_swapped(bool value) { _swapped = value; } 2096 // generic 2097 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_x); f->visit(&_y); } 2098 }; 2099 2100 2101 LEAF(IfInstanceOf, BlockEnd) 2102 private: 2103 ciKlass* _klass; 2104 Value _obj; 2105 bool _test_is_instance; // jump if instance 2106 int _instanceof_bci; 2107 2108 public: 2109 IfInstanceOf(ciKlass* klass, Value obj, bool test_is_instance, int instanceof_bci, BlockBegin* tsux, BlockBegin* fsux) 2110 : BlockEnd(illegalType, NULL, false) // temporary set to false 2111 , _klass(klass) 2112 , _obj(obj) 2113 , _test_is_instance(test_is_instance) 2114 , _instanceof_bci(instanceof_bci) 2115 { 2116 ASSERT_VALUES 2117 assert(instanceof_bci >= 0, "illegal bci"); 2118 BlockList* s = new BlockList(2); 2119 s->append(tsux); 2120 s->append(fsux); 2121 set_sux(s); 2122 } 2123 2124 // accessors 2125 // 2126 // Note 1: If test_is_instance() is true, IfInstanceOf tests if obj *is* an 2127 // instance of klass; otherwise it tests if it is *not* and instance 2128 // of klass. 2129 // 2130 // Note 2: IfInstanceOf instructions are created by combining an InstanceOf 2131 // and an If instruction. The IfInstanceOf bci() corresponds to the 2132 // bci that the If would have had; the (this->) instanceof_bci() is 2133 // the bci of the original InstanceOf instruction. 2134 ciKlass* klass() const { return _klass; } 2135 Value obj() const { return _obj; } 2136 int instanceof_bci() const { return _instanceof_bci; } 2137 bool test_is_instance() const { return _test_is_instance; } 2138 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); } 2139 BlockBegin* tsux() const { return sux_for(true); } 2140 BlockBegin* fsux() const { return sux_for(false); } 2141 2142 // manipulation 2143 void swap_sux() { 2144 assert(number_of_sux() == 2, "wrong number of successors"); 2145 BlockList* s = sux(); 2146 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t); 2147 _test_is_instance = !_test_is_instance; 2148 } 2149 2150 // generic 2151 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_obj); } 2152 }; 2153 2154 2155 BASE(Switch, BlockEnd) 2156 private: 2157 Value _tag; 2158 2159 public: 2160 // creation 2161 Switch(Value tag, BlockList* sux, ValueStack* state_before, bool is_safepoint) 2162 : BlockEnd(illegalType, state_before, is_safepoint) 2163 , _tag(tag) { 2164 ASSERT_VALUES 2165 set_sux(sux); 2166 } 2167 2168 // accessors 2169 Value tag() const { return _tag; } 2170 int length() const { return number_of_sux() - 1; } 2171 2172 virtual bool needs_exception_state() const { return false; } 2173 2174 // generic 2175 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_tag); } 2176 }; 2177 2178 2179 LEAF(TableSwitch, Switch) 2180 private: 2181 int _lo_key; 2182 2183 public: 2184 // creation 2185 TableSwitch(Value tag, BlockList* sux, int lo_key, ValueStack* state_before, bool is_safepoint) 2186 : Switch(tag, sux, state_before, is_safepoint) 2187 , _lo_key(lo_key) { assert(_lo_key <= hi_key(), "integer overflow"); } 2188 2189 // accessors 2190 int lo_key() const { return _lo_key; } 2191 int hi_key() const { return _lo_key + (length() - 1); } 2192 }; 2193 2194 2195 LEAF(LookupSwitch, Switch) 2196 private: 2197 intArray* _keys; 2198 2199 public: 2200 // creation 2201 LookupSwitch(Value tag, BlockList* sux, intArray* keys, ValueStack* state_before, bool is_safepoint) 2202 : Switch(tag, sux, state_before, is_safepoint) 2203 , _keys(keys) { 2204 assert(keys != NULL, "keys must exist"); 2205 assert(keys->length() == length(), "sux & keys have incompatible lengths"); 2206 } 2207 2208 // accessors 2209 int key_at(int i) const { return _keys->at(i); } 2210 }; 2211 2212 2213 LEAF(Return, BlockEnd) 2214 private: 2215 Value _result; 2216 2217 public: 2218 // creation 2219 Return(Value result) : 2220 BlockEnd(result == NULL ? voidType : result->type()->base(), NULL, true), 2221 _result(result) {} 2222 2223 // accessors 2224 Value result() const { return _result; } 2225 bool has_result() const { return result() != NULL; } 2226 2227 // generic 2228 virtual void input_values_do(ValueVisitor* f) { 2229 BlockEnd::input_values_do(f); 2230 if (has_result()) f->visit(&_result); 2231 } 2232 }; 2233 2234 2235 LEAF(Throw, BlockEnd) 2236 private: 2237 Value _exception; 2238 2239 public: 2240 // creation 2241 Throw(Value exception, ValueStack* state_before) : BlockEnd(illegalType, state_before, true), _exception(exception) { 2242 ASSERT_VALUES 2243 } 2244 2245 // accessors 2246 Value exception() const { return _exception; } 2247 2248 // generic 2249 virtual bool can_trap() const { return true; } 2250 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_exception); } 2251 }; 2252 2253 2254 LEAF(Base, BlockEnd) 2255 public: 2256 // creation 2257 Base(BlockBegin* std_entry, BlockBegin* osr_entry) : BlockEnd(illegalType, NULL, false) { 2258 assert(std_entry->is_set(BlockBegin::std_entry_flag), "std entry must be flagged"); 2259 assert(osr_entry == NULL || osr_entry->is_set(BlockBegin::osr_entry_flag), "osr entry must be flagged"); 2260 BlockList* s = new BlockList(2); 2261 if (osr_entry != NULL) s->append(osr_entry); 2262 s->append(std_entry); // must be default sux! 2263 set_sux(s); 2264 } 2265 2266 // accessors 2267 BlockBegin* std_entry() const { return default_sux(); } 2268 BlockBegin* osr_entry() const { return number_of_sux() < 2 ? NULL : sux_at(0); } 2269 }; 2270 2271 2272 LEAF(OsrEntry, Instruction) 2273 public: 2274 // creation 2275 #ifdef _LP64 2276 OsrEntry() : Instruction(longType) { pin(); } 2277 #else 2278 OsrEntry() : Instruction(intType) { pin(); } 2279 #endif 2280 2281 // generic 2282 virtual void input_values_do(ValueVisitor* f) { } 2283 }; 2284 2285 2286 // Models the incoming exception at a catch site 2287 LEAF(ExceptionObject, Instruction) 2288 public: 2289 // creation 2290 ExceptionObject() : Instruction(objectType) { 2291 pin(); 2292 } 2293 2294 // generic 2295 virtual void input_values_do(ValueVisitor* f) { } 2296 }; 2297 2298 2299 // Models needed rounding for floating-point values on Intel. 2300 // Currently only used to represent rounding of double-precision 2301 // values stored into local variables, but could be used to model 2302 // intermediate rounding of single-precision values as well. 2303 LEAF(RoundFP, Instruction) 2304 private: 2305 Value _input; // floating-point value to be rounded 2306 2307 public: 2308 RoundFP(Value input) 2309 : Instruction(input->type()) // Note: should not be used for constants 2310 , _input(input) 2311 { 2312 ASSERT_VALUES 2313 } 2314 2315 // accessors 2316 Value input() const { return _input; } 2317 2318 // generic 2319 virtual void input_values_do(ValueVisitor* f) { f->visit(&_input); } 2320 }; 2321 2322 2323 BASE(UnsafeOp, Instruction) 2324 private: 2325 BasicType _basic_type; // ValueType can not express byte-sized integers 2326 2327 protected: 2328 // creation 2329 UnsafeOp(BasicType basic_type, bool is_put) 2330 : Instruction(is_put ? voidType : as_ValueType(basic_type)) 2331 , _basic_type(basic_type) 2332 { 2333 //Note: Unsafe ops are not not guaranteed to throw NPE. 2334 // Convservatively, Unsafe operations must be pinned though we could be 2335 // looser about this if we wanted to.. 2336 pin(); 2337 } 2338 2339 public: 2340 // accessors 2341 BasicType basic_type() { return _basic_type; } 2342 2343 // generic 2344 virtual void input_values_do(ValueVisitor* f) { } 2345 }; 2346 2347 2348 BASE(UnsafeRawOp, UnsafeOp) 2349 private: 2350 Value _base; // Base address (a Java long) 2351 Value _index; // Index if computed by optimizer; initialized to NULL 2352 int _log2_scale; // Scale factor: 0, 1, 2, or 3. 2353 // Indicates log2 of number of bytes (1, 2, 4, or 8) 2354 // to scale index by. 2355 2356 protected: 2357 UnsafeRawOp(BasicType basic_type, Value addr, bool is_put) 2358 : UnsafeOp(basic_type, is_put) 2359 , _base(addr) 2360 , _index(NULL) 2361 , _log2_scale(0) 2362 { 2363 // Can not use ASSERT_VALUES because index may be NULL 2364 assert(addr != NULL && addr->type()->is_long(), "just checking"); 2365 } 2366 2367 UnsafeRawOp(BasicType basic_type, Value base, Value index, int log2_scale, bool is_put) 2368 : UnsafeOp(basic_type, is_put) 2369 , _base(base) 2370 , _index(index) 2371 , _log2_scale(log2_scale) 2372 { 2373 } 2374 2375 public: 2376 // accessors 2377 Value base() { return _base; } 2378 Value index() { return _index; } 2379 bool has_index() { return (_index != NULL); } 2380 int log2_scale() { return _log2_scale; } 2381 2382 // setters 2383 void set_base (Value base) { _base = base; } 2384 void set_index(Value index) { _index = index; } 2385 void set_log2_scale(int log2_scale) { _log2_scale = log2_scale; } 2386 2387 // generic 2388 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f); 2389 f->visit(&_base); 2390 if (has_index()) f->visit(&_index); } 2391 }; 2392 2393 2394 LEAF(UnsafeGetRaw, UnsafeRawOp) 2395 private: 2396 bool _may_be_unaligned, _is_wide; // For OSREntry 2397 2398 public: 2399 UnsafeGetRaw(BasicType basic_type, Value addr, bool may_be_unaligned, bool is_wide = false) 2400 : UnsafeRawOp(basic_type, addr, false) { 2401 _may_be_unaligned = may_be_unaligned; 2402 _is_wide = is_wide; 2403 } 2404 2405 UnsafeGetRaw(BasicType basic_type, Value base, Value index, int log2_scale, bool may_be_unaligned, bool is_wide = false) 2406 : UnsafeRawOp(basic_type, base, index, log2_scale, false) { 2407 _may_be_unaligned = may_be_unaligned; 2408 _is_wide = is_wide; 2409 } 2410 2411 bool may_be_unaligned() { return _may_be_unaligned; } 2412 bool is_wide() { return _is_wide; } 2413 }; 2414 2415 2416 LEAF(UnsafePutRaw, UnsafeRawOp) 2417 private: 2418 Value _value; // Value to be stored 2419 2420 public: 2421 UnsafePutRaw(BasicType basic_type, Value addr, Value value) 2422 : UnsafeRawOp(basic_type, addr, true) 2423 , _value(value) 2424 { 2425 assert(value != NULL, "just checking"); 2426 ASSERT_VALUES 2427 } 2428 2429 UnsafePutRaw(BasicType basic_type, Value base, Value index, int log2_scale, Value value) 2430 : UnsafeRawOp(basic_type, base, index, log2_scale, true) 2431 , _value(value) 2432 { 2433 assert(value != NULL, "just checking"); 2434 ASSERT_VALUES 2435 } 2436 2437 // accessors 2438 Value value() { return _value; } 2439 2440 // generic 2441 virtual void input_values_do(ValueVisitor* f) { UnsafeRawOp::input_values_do(f); 2442 f->visit(&_value); } 2443 }; 2444 2445 2446 BASE(UnsafeObjectOp, UnsafeOp) 2447 private: 2448 Value _object; // Object to be fetched from or mutated 2449 Value _offset; // Offset within object 2450 bool _is_volatile; // true if volatile - dl/JSR166 2451 public: 2452 UnsafeObjectOp(BasicType basic_type, Value object, Value offset, bool is_put, bool is_volatile) 2453 : UnsafeOp(basic_type, is_put), _object(object), _offset(offset), _is_volatile(is_volatile) 2454 { 2455 } 2456 2457 // accessors 2458 Value object() { return _object; } 2459 Value offset() { return _offset; } 2460 bool is_volatile() { return _is_volatile; } 2461 // generic 2462 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f); 2463 f->visit(&_object); 2464 f->visit(&_offset); } 2465 }; 2466 2467 2468 LEAF(UnsafeGetObject, UnsafeObjectOp) 2469 public: 2470 UnsafeGetObject(BasicType basic_type, Value object, Value offset, bool is_volatile) 2471 : UnsafeObjectOp(basic_type, object, offset, false, is_volatile) 2472 { 2473 ASSERT_VALUES 2474 } 2475 }; 2476 2477 2478 LEAF(UnsafePutObject, UnsafeObjectOp) 2479 private: 2480 Value _value; // Value to be stored 2481 public: 2482 UnsafePutObject(BasicType basic_type, Value object, Value offset, Value value, bool is_volatile) 2483 : UnsafeObjectOp(basic_type, object, offset, true, is_volatile) 2484 , _value(value) 2485 { 2486 ASSERT_VALUES 2487 } 2488 2489 // accessors 2490 Value value() { return _value; } 2491 2492 // generic 2493 virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f); 2494 f->visit(&_value); } 2495 }; 2496 2497 LEAF(UnsafeGetAndSetObject, UnsafeObjectOp) 2498 private: 2499 Value _value; // Value to be stored 2500 bool _is_add; 2501 public: 2502 UnsafeGetAndSetObject(BasicType basic_type, Value object, Value offset, Value value, bool is_add) 2503 : UnsafeObjectOp(basic_type, object, offset, false, false) 2504 , _value(value) 2505 , _is_add(is_add) 2506 { 2507 ASSERT_VALUES 2508 } 2509 2510 // accessors 2511 bool is_add() const { return _is_add; } 2512 Value value() { return _value; } 2513 2514 // generic 2515 virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f); 2516 f->visit(&_value); } 2517 }; 2518 2519 LEAF(ProfileCall, Instruction) 2520 private: 2521 ciMethod* _method; 2522 int _bci_of_invoke; 2523 ciMethod* _callee; // the method that is called at the given bci 2524 Value _recv; 2525 ciKlass* _known_holder; 2526 Values* _obj_args; // arguments for type profiling 2527 ArgsNonNullState _nonnull_state; // Do we know whether some arguments are never null? 2528 bool _inlined; // Are we profiling a call that is inlined 2529 2530 public: 2531 ProfileCall(ciMethod* method, int bci, ciMethod* callee, Value recv, ciKlass* known_holder, Values* obj_args, bool inlined) 2532 : Instruction(voidType) 2533 , _method(method) 2534 , _bci_of_invoke(bci) 2535 , _callee(callee) 2536 , _recv(recv) 2537 , _known_holder(known_holder) 2538 , _obj_args(obj_args) 2539 , _inlined(inlined) 2540 { 2541 // The ProfileCall has side-effects and must occur precisely where located 2542 pin(); 2543 } 2544 2545 ciMethod* method() const { return _method; } 2546 int bci_of_invoke() const { return _bci_of_invoke; } 2547 ciMethod* callee() const { return _callee; } 2548 Value recv() const { return _recv; } 2549 ciKlass* known_holder() const { return _known_holder; } 2550 int nb_profiled_args() const { return _obj_args == NULL ? 0 : _obj_args->length(); } 2551 Value profiled_arg_at(int i) const { return _obj_args->at(i); } 2552 bool arg_needs_null_check(int i) const { 2553 return _nonnull_state.arg_needs_null_check(i); 2554 } 2555 bool inlined() const { return _inlined; } 2556 2557 void set_arg_needs_null_check(int i, bool check) { 2558 _nonnull_state.set_arg_needs_null_check(i, check); 2559 } 2560 2561 virtual void input_values_do(ValueVisitor* f) { 2562 if (_recv != NULL) { 2563 f->visit(&_recv); 2564 } 2565 for (int i = 0; i < nb_profiled_args(); i++) { 2566 f->visit(_obj_args->adr_at(i)); 2567 } 2568 } 2569 }; 2570 2571 LEAF(ProfileReturnType, Instruction) 2572 private: 2573 ciMethod* _method; 2574 ciMethod* _callee; 2575 int _bci_of_invoke; 2576 Value _ret; 2577 2578 public: 2579 ProfileReturnType(ciMethod* method, int bci, ciMethod* callee, Value ret) 2580 : Instruction(voidType) 2581 , _method(method) 2582 , _callee(callee) 2583 , _bci_of_invoke(bci) 2584 , _ret(ret) 2585 { 2586 set_needs_null_check(true); 2587 // The ProfileType has side-effects and must occur precisely where located 2588 pin(); 2589 } 2590 2591 ciMethod* method() const { return _method; } 2592 ciMethod* callee() const { return _callee; } 2593 int bci_of_invoke() const { return _bci_of_invoke; } 2594 Value ret() const { return _ret; } 2595 2596 virtual void input_values_do(ValueVisitor* f) { 2597 if (_ret != NULL) { 2598 f->visit(&_ret); 2599 } 2600 } 2601 }; 2602 2603 // Call some C runtime function that doesn't safepoint, 2604 // optionally passing the current thread as the first argument. 2605 LEAF(RuntimeCall, Instruction) 2606 private: 2607 const char* _entry_name; 2608 address _entry; 2609 Values* _args; 2610 bool _pass_thread; // Pass the JavaThread* as an implicit first argument 2611 2612 public: 2613 RuntimeCall(ValueType* type, const char* entry_name, address entry, Values* args, bool pass_thread = true) 2614 : Instruction(type) 2615 , _entry_name(entry_name) 2616 , _entry(entry) 2617 , _args(args) 2618 , _pass_thread(pass_thread) { 2619 ASSERT_VALUES 2620 pin(); 2621 } 2622 2623 const char* entry_name() const { return _entry_name; } 2624 address entry() const { return _entry; } 2625 int number_of_arguments() const { return _args->length(); } 2626 Value argument_at(int i) const { return _args->at(i); } 2627 bool pass_thread() const { return _pass_thread; } 2628 2629 virtual void input_values_do(ValueVisitor* f) { 2630 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 2631 } 2632 }; 2633 2634 // Use to trip invocation counter of an inlined method 2635 2636 LEAF(ProfileInvoke, Instruction) 2637 private: 2638 ciMethod* _inlinee; 2639 ValueStack* _state; 2640 2641 public: 2642 ProfileInvoke(ciMethod* inlinee, ValueStack* state) 2643 : Instruction(voidType) 2644 , _inlinee(inlinee) 2645 , _state(state) 2646 { 2647 // The ProfileInvoke has side-effects and must occur precisely where located QQQ??? 2648 pin(); 2649 } 2650 2651 ciMethod* inlinee() { return _inlinee; } 2652 ValueStack* state() { return _state; } 2653 virtual void input_values_do(ValueVisitor*) {} 2654 virtual void state_values_do(ValueVisitor*); 2655 }; 2656 2657 LEAF(MemBar, Instruction) 2658 private: 2659 LIR_Code _code; 2660 2661 public: 2662 MemBar(LIR_Code code) 2663 : Instruction(voidType) 2664 , _code(code) 2665 { 2666 pin(); 2667 } 2668 2669 LIR_Code code() { return _code; } 2670 2671 virtual void input_values_do(ValueVisitor*) {} 2672 }; 2673 2674 class BlockPair: public CompilationResourceObj { 2675 private: 2676 BlockBegin* _from; 2677 BlockBegin* _to; 2678 public: 2679 BlockPair(BlockBegin* from, BlockBegin* to): _from(from), _to(to) {} 2680 BlockBegin* from() const { return _from; } 2681 BlockBegin* to() const { return _to; } 2682 bool is_same(BlockBegin* from, BlockBegin* to) const { return _from == from && _to == to; } 2683 bool is_same(BlockPair* p) const { return _from == p->from() && _to == p->to(); } 2684 void set_to(BlockBegin* b) { _to = b; } 2685 void set_from(BlockBegin* b) { _from = b; } 2686 }; 2687 2688 typedef GrowableArray<BlockPair*> BlockPairList; 2689 2690 inline int BlockBegin::number_of_sux() const { assert(_end == NULL || _end->number_of_sux() == _successors.length(), "mismatch"); return _successors.length(); } 2691 inline BlockBegin* BlockBegin::sux_at(int i) const { assert(_end == NULL || _end->sux_at(i) == _successors.at(i), "mismatch"); return _successors.at(i); } 2692 inline void BlockBegin::add_successor(BlockBegin* sux) { assert(_end == NULL, "Would create mismatch with successors of BlockEnd"); _successors.append(sux); } 2693 2694 #undef ASSERT_VALUES 2695 2696 #endif // SHARE_VM_C1_C1_INSTRUCTION_HPP