1 /* 2 * Copyright (c) 1999, 2019, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_C1_C1_INSTRUCTION_HPP 26 #define SHARE_C1_C1_INSTRUCTION_HPP 27 28 #include "c1/c1_Compilation.hpp" 29 #include "c1/c1_LIR.hpp" 30 #include "c1/c1_ValueType.hpp" 31 #include "ci/ciField.hpp" 32 33 // Predefined classes 34 class ciField; 35 class ValueStack; 36 class InstructionPrinter; 37 class IRScope; 38 class LIR_OprDesc; 39 typedef LIR_OprDesc* LIR_Opr; 40 41 42 // Instruction class hierarchy 43 // 44 // All leaf classes in the class hierarchy are concrete classes 45 // (i.e., are instantiated). All other classes are abstract and 46 // serve factoring. 47 48 class Instruction; 49 class Phi; 50 class Local; 51 class Constant; 52 class AccessField; 53 class LoadField; 54 class StoreField; 55 class AccessArray; 56 class ArrayLength; 57 class AccessIndexed; 58 class LoadIndexed; 59 class StoreIndexed; 60 class NegateOp; 61 class Op2; 62 class ArithmeticOp; 63 class ShiftOp; 64 class LogicOp; 65 class CompareOp; 66 class IfOp; 67 class Convert; 68 class NullCheck; 69 class TypeCast; 70 class OsrEntry; 71 class ExceptionObject; 72 class StateSplit; 73 class Invoke; 74 class NewInstance; 75 class NewValueTypeInstance; 76 class NewArray; 77 class NewTypeArray; 78 class NewObjectArray; 79 class NewMultiArray; 80 class TypeCheck; 81 class CheckCast; 82 class InstanceOf; 83 class AccessMonitor; 84 class MonitorEnter; 85 class MonitorExit; 86 class Intrinsic; 87 class BlockBegin; 88 class BlockEnd; 89 class Goto; 90 class If; 91 class IfInstanceOf; 92 class Switch; 93 class TableSwitch; 94 class LookupSwitch; 95 class Return; 96 class Throw; 97 class Base; 98 class RoundFP; 99 class UnsafeOp; 100 class UnsafeRawOp; 101 class UnsafeGetRaw; 102 class UnsafePutRaw; 103 class UnsafeObjectOp; 104 class UnsafeGetObject; 105 class UnsafePutObject; 106 class UnsafeGetAndSetObject; 107 class ProfileCall; 108 class ProfileReturnType; 109 class ProfileInvoke; 110 class RuntimeCall; 111 class MemBar; 112 class RangeCheckPredicate; 113 #ifdef ASSERT 114 class Assert; 115 #endif 116 117 // A Value is a reference to the instruction creating the value 118 typedef Instruction* Value; 119 typedef GrowableArray<Value> Values; 120 typedef GrowableArray<ValueStack*> ValueStackStack; 121 122 // BlockClosure is the base class for block traversal/iteration. 123 124 class BlockClosure: public CompilationResourceObj { 125 public: 126 virtual void block_do(BlockBegin* block) = 0; 127 }; 128 129 130 // A simple closure class for visiting the values of an Instruction 131 class ValueVisitor: public StackObj { 132 public: 133 virtual void visit(Value* v) = 0; 134 }; 135 136 137 // Some array and list classes 138 typedef GrowableArray<BlockBegin*> BlockBeginArray; 139 140 class BlockList: public GrowableArray<BlockBegin*> { 141 public: 142 BlockList(): GrowableArray<BlockBegin*>() {} 143 BlockList(const int size): GrowableArray<BlockBegin*>(size) {} 144 BlockList(const int size, BlockBegin* init): GrowableArray<BlockBegin*>(size, size, init) {} 145 146 void iterate_forward(BlockClosure* closure); 147 void iterate_backward(BlockClosure* closure); 148 void blocks_do(void f(BlockBegin*)); 149 void values_do(ValueVisitor* f); 150 void print(bool cfg_only = false, bool live_only = false) PRODUCT_RETURN; 151 }; 152 153 154 // InstructionVisitors provide type-based dispatch for instructions. 155 // For each concrete Instruction class X, a virtual function do_X is 156 // provided. Functionality that needs to be implemented for all classes 157 // (e.g., printing, code generation) is factored out into a specialised 158 // visitor instead of added to the Instruction classes itself. 159 160 class InstructionVisitor: public StackObj { 161 public: 162 virtual void do_Phi (Phi* x) = 0; 163 virtual void do_Local (Local* x) = 0; 164 virtual void do_Constant (Constant* x) = 0; 165 virtual void do_LoadField (LoadField* x) = 0; 166 virtual void do_StoreField (StoreField* x) = 0; 167 virtual void do_ArrayLength (ArrayLength* x) = 0; 168 virtual void do_LoadIndexed (LoadIndexed* x) = 0; 169 virtual void do_StoreIndexed (StoreIndexed* x) = 0; 170 virtual void do_NegateOp (NegateOp* x) = 0; 171 virtual void do_ArithmeticOp (ArithmeticOp* x) = 0; 172 virtual void do_ShiftOp (ShiftOp* x) = 0; 173 virtual void do_LogicOp (LogicOp* x) = 0; 174 virtual void do_CompareOp (CompareOp* x) = 0; 175 virtual void do_IfOp (IfOp* x) = 0; 176 virtual void do_Convert (Convert* x) = 0; 177 virtual void do_NullCheck (NullCheck* x) = 0; 178 virtual void do_TypeCast (TypeCast* x) = 0; 179 virtual void do_Invoke (Invoke* x) = 0; 180 virtual void do_NewInstance (NewInstance* x) = 0; 181 virtual void do_NewValueTypeInstance(NewValueTypeInstance* x) = 0; 182 virtual void do_NewTypeArray (NewTypeArray* x) = 0; 183 virtual void do_NewObjectArray (NewObjectArray* x) = 0; 184 virtual void do_NewMultiArray (NewMultiArray* x) = 0; 185 virtual void do_CheckCast (CheckCast* x) = 0; 186 virtual void do_InstanceOf (InstanceOf* x) = 0; 187 virtual void do_MonitorEnter (MonitorEnter* x) = 0; 188 virtual void do_MonitorExit (MonitorExit* x) = 0; 189 virtual void do_Intrinsic (Intrinsic* x) = 0; 190 virtual void do_BlockBegin (BlockBegin* x) = 0; 191 virtual void do_Goto (Goto* x) = 0; 192 virtual void do_If (If* x) = 0; 193 virtual void do_IfInstanceOf (IfInstanceOf* x) = 0; 194 virtual void do_TableSwitch (TableSwitch* x) = 0; 195 virtual void do_LookupSwitch (LookupSwitch* x) = 0; 196 virtual void do_Return (Return* x) = 0; 197 virtual void do_Throw (Throw* x) = 0; 198 virtual void do_Base (Base* x) = 0; 199 virtual void do_OsrEntry (OsrEntry* x) = 0; 200 virtual void do_ExceptionObject(ExceptionObject* x) = 0; 201 virtual void do_RoundFP (RoundFP* x) = 0; 202 virtual void do_UnsafeGetRaw (UnsafeGetRaw* x) = 0; 203 virtual void do_UnsafePutRaw (UnsafePutRaw* x) = 0; 204 virtual void do_UnsafeGetObject(UnsafeGetObject* x) = 0; 205 virtual void do_UnsafePutObject(UnsafePutObject* x) = 0; 206 virtual void do_UnsafeGetAndSetObject(UnsafeGetAndSetObject* x) = 0; 207 virtual void do_ProfileCall (ProfileCall* x) = 0; 208 virtual void do_ProfileReturnType (ProfileReturnType* x) = 0; 209 virtual void do_ProfileInvoke (ProfileInvoke* x) = 0; 210 virtual void do_RuntimeCall (RuntimeCall* x) = 0; 211 virtual void do_MemBar (MemBar* x) = 0; 212 virtual void do_RangeCheckPredicate(RangeCheckPredicate* x) = 0; 213 #ifdef ASSERT 214 virtual void do_Assert (Assert* x) = 0; 215 #endif 216 }; 217 218 219 // Hashing support 220 // 221 // Note: This hash functions affect the performance 222 // of ValueMap - make changes carefully! 223 224 #define HASH1(x1 ) ((intx)(x1)) 225 #define HASH2(x1, x2 ) ((HASH1(x1 ) << 7) ^ HASH1(x2)) 226 #define HASH3(x1, x2, x3 ) ((HASH2(x1, x2 ) << 7) ^ HASH1(x3)) 227 #define HASH4(x1, x2, x3, x4) ((HASH3(x1, x2, x3) << 7) ^ HASH1(x4)) 228 229 230 // The following macros are used to implement instruction-specific hashing. 231 // By default, each instruction implements hash() and is_equal(Value), used 232 // for value numbering/common subexpression elimination. The default imple- 233 // mentation disables value numbering. Each instruction which can be value- 234 // numbered, should define corresponding hash() and is_equal(Value) functions 235 // via the macros below. The f arguments specify all the values/op codes, etc. 236 // that need to be identical for two instructions to be identical. 237 // 238 // Note: The default implementation of hash() returns 0 in order to indicate 239 // that the instruction should not be considered for value numbering. 240 // The currently used hash functions do not guarantee that never a 0 241 // is produced. While this is still correct, it may be a performance 242 // bug (no value numbering for that node). However, this situation is 243 // so unlikely, that we are not going to handle it specially. 244 245 #define HASHING1(class_name, enabled, f1) \ 246 virtual intx hash() const { \ 247 return (enabled) ? HASH2(name(), f1) : 0; \ 248 } \ 249 virtual bool is_equal(Value v) const { \ 250 if (!(enabled) ) return false; \ 251 class_name* _v = v->as_##class_name(); \ 252 if (_v == NULL ) return false; \ 253 if (f1 != _v->f1) return false; \ 254 return true; \ 255 } \ 256 257 258 #define HASHING2(class_name, enabled, f1, f2) \ 259 virtual intx hash() const { \ 260 return (enabled) ? HASH3(name(), f1, f2) : 0; \ 261 } \ 262 virtual bool is_equal(Value v) const { \ 263 if (!(enabled) ) return false; \ 264 class_name* _v = v->as_##class_name(); \ 265 if (_v == NULL ) return false; \ 266 if (f1 != _v->f1) return false; \ 267 if (f2 != _v->f2) return false; \ 268 return true; \ 269 } \ 270 271 272 #define HASHING3(class_name, enabled, f1, f2, f3) \ 273 virtual intx hash() const { \ 274 return (enabled) ? HASH4(name(), f1, f2, f3) : 0; \ 275 } \ 276 virtual bool is_equal(Value v) const { \ 277 if (!(enabled) ) return false; \ 278 class_name* _v = v->as_##class_name(); \ 279 if (_v == NULL ) return false; \ 280 if (f1 != _v->f1) return false; \ 281 if (f2 != _v->f2) return false; \ 282 if (f3 != _v->f3) return false; \ 283 return true; \ 284 } \ 285 286 287 // The mother of all instructions... 288 289 class Instruction: public CompilationResourceObj { 290 private: 291 int _id; // the unique instruction id 292 #ifndef PRODUCT 293 int _printable_bci; // the bci of the instruction for printing 294 #endif 295 int _use_count; // the number of instructions refering to this value (w/o prev/next); only roots can have use count = 0 or > 1 296 int _pin_state; // set of PinReason describing the reason for pinning 297 ValueType* _type; // the instruction value type 298 Instruction* _next; // the next instruction if any (NULL for BlockEnd instructions) 299 Instruction* _subst; // the substitution instruction if any 300 LIR_Opr _operand; // LIR specific information 301 unsigned int _flags; // Flag bits 302 303 ValueStack* _state_before; // Copy of state with input operands still on stack (or NULL) 304 ValueStack* _exception_state; // Copy of state for exception handling 305 XHandlers* _exception_handlers; // Flat list of exception handlers covering this instruction 306 307 friend class UseCountComputer; 308 friend class BlockBegin; 309 310 void update_exception_state(ValueStack* state); 311 312 protected: 313 BlockBegin* _block; // Block that contains this instruction 314 315 void set_type(ValueType* type) { 316 assert(type != NULL, "type must exist"); 317 _type = type; 318 } 319 320 // Helper class to keep track of which arguments need a null check 321 class ArgsNonNullState { 322 private: 323 int _nonnull_state; // mask identifying which args are nonnull 324 public: 325 ArgsNonNullState() 326 : _nonnull_state(AllBits) {} 327 328 // Does argument number i needs a null check? 329 bool arg_needs_null_check(int i) const { 330 // No data is kept for arguments starting at position 33 so 331 // conservatively assume that they need a null check. 332 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) { 333 return is_set_nth_bit(_nonnull_state, i); 334 } 335 return true; 336 } 337 338 // Set whether argument number i needs a null check or not 339 void set_arg_needs_null_check(int i, bool check) { 340 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) { 341 if (check) { 342 _nonnull_state |= nth_bit(i); 343 } else { 344 _nonnull_state &= ~(nth_bit(i)); 345 } 346 } 347 } 348 }; 349 350 public: 351 void* operator new(size_t size) throw() { 352 Compilation* c = Compilation::current(); 353 void* res = c->arena()->Amalloc(size); 354 ((Instruction*)res)->_id = c->get_next_id(); 355 return res; 356 } 357 358 static const int no_bci = -99; 359 360 enum InstructionFlag { 361 NeedsNullCheckFlag = 0, 362 NeverNullFlag, // For "Q" signatures 363 CanTrapFlag, 364 DirectCompareFlag, 365 IsEliminatedFlag, 366 IsSafepointFlag, 367 IsStaticFlag, 368 IsStrictfpFlag, 369 NeedsStoreCheckFlag, 370 NeedsWriteBarrierFlag, 371 PreservesStateFlag, 372 TargetIsFinalFlag, 373 TargetIsLoadedFlag, 374 TargetIsStrictfpFlag, 375 UnorderedIsTrueFlag, 376 NeedsPatchingFlag, 377 ThrowIncompatibleClassChangeErrorFlag, 378 InvokeSpecialReceiverCheckFlag, 379 ProfileMDOFlag, 380 IsLinkedInBlockFlag, 381 NeedsRangeCheckFlag, 382 InWorkListFlag, 383 DeoptimizeOnException, 384 InstructionLastFlag 385 }; 386 387 public: 388 bool check_flag(InstructionFlag id) const { return (_flags & (1 << id)) != 0; } 389 void set_flag(InstructionFlag id, bool f) { _flags = f ? (_flags | (1 << id)) : (_flags & ~(1 << id)); }; 390 391 // 'globally' used condition values 392 enum Condition { 393 eql, neq, lss, leq, gtr, geq, aeq, beq 394 }; 395 396 // Instructions may be pinned for many reasons and under certain conditions 397 // with enough knowledge it's possible to safely unpin them. 398 enum PinReason { 399 PinUnknown = 1 << 0 400 , PinExplicitNullCheck = 1 << 3 401 , PinStackForStateSplit= 1 << 12 402 , PinStateSplitConstructor= 1 << 13 403 , PinGlobalValueNumbering= 1 << 14 404 }; 405 406 static Condition mirror(Condition cond); 407 static Condition negate(Condition cond); 408 409 // initialization 410 static int number_of_instructions() { 411 return Compilation::current()->number_of_instructions(); 412 } 413 414 // creation 415 Instruction(ValueType* type, ValueStack* state_before = NULL, bool type_is_constant = false) 416 : 417 #ifndef PRODUCT 418 _printable_bci(-99), 419 #endif 420 _use_count(0) 421 , _pin_state(0) 422 , _type(type) 423 , _next(NULL) 424 , _subst(NULL) 425 , _operand(LIR_OprFact::illegalOpr) 426 , _flags(0) 427 , _state_before(state_before) 428 , _exception_handlers(NULL) 429 , _block(NULL) 430 { 431 check_state(state_before); 432 assert(type != NULL && (!type->is_constant() || type_is_constant), "type must exist"); 433 update_exception_state(_state_before); 434 } 435 436 // accessors 437 int id() const { return _id; } 438 #ifndef PRODUCT 439 bool has_printable_bci() const { return _printable_bci != -99; } 440 int printable_bci() const { assert(has_printable_bci(), "_printable_bci should have been set"); return _printable_bci; } 441 void set_printable_bci(int bci) { _printable_bci = bci; } 442 #endif 443 int dominator_depth(); 444 int use_count() const { return _use_count; } 445 int pin_state() const { return _pin_state; } 446 bool is_pinned() const { return _pin_state != 0 || PinAllInstructions; } 447 ValueType* type() const { return _type; } 448 BlockBegin *block() const { return _block; } 449 Instruction* prev(); // use carefully, expensive operation 450 Instruction* next() const { return _next; } 451 bool has_subst() const { return _subst != NULL; } 452 Instruction* subst() { return _subst == NULL ? this : _subst->subst(); } 453 LIR_Opr operand() const { return _operand; } 454 455 void set_needs_null_check(bool f) { set_flag(NeedsNullCheckFlag, f); } 456 bool needs_null_check() const { return check_flag(NeedsNullCheckFlag); } 457 void set_never_null(bool f) { set_flag(NeverNullFlag, f); } 458 bool is_never_null() const { return check_flag(NeverNullFlag); } 459 bool is_linked() const { return check_flag(IsLinkedInBlockFlag); } 460 bool can_be_linked() { return as_Local() == NULL && as_Phi() == NULL; } 461 462 bool has_uses() const { return use_count() > 0; } 463 ValueStack* state_before() const { return _state_before; } 464 ValueStack* exception_state() const { return _exception_state; } 465 virtual bool needs_exception_state() const { return true; } 466 XHandlers* exception_handlers() const { return _exception_handlers; } 467 ciKlass* as_loaded_klass_or_null() const; 468 469 // manipulation 470 void pin(PinReason reason) { _pin_state |= reason; } 471 void pin() { _pin_state |= PinUnknown; } 472 // DANGEROUS: only used by EliminateStores 473 void unpin(PinReason reason) { assert((reason & PinUnknown) == 0, "can't unpin unknown state"); _pin_state &= ~reason; } 474 475 Instruction* set_next(Instruction* next) { 476 assert(next->has_printable_bci(), "_printable_bci should have been set"); 477 assert(next != NULL, "must not be NULL"); 478 assert(as_BlockEnd() == NULL, "BlockEnd instructions must have no next"); 479 assert(next->can_be_linked(), "shouldn't link these instructions into list"); 480 481 BlockBegin *block = this->block(); 482 next->_block = block; 483 484 next->set_flag(Instruction::IsLinkedInBlockFlag, true); 485 _next = next; 486 return next; 487 } 488 489 Instruction* set_next(Instruction* next, int bci) { 490 #ifndef PRODUCT 491 next->set_printable_bci(bci); 492 #endif 493 return set_next(next); 494 } 495 496 // when blocks are merged 497 void fixup_block_pointers() { 498 Instruction *cur = next()->next(); // next()'s block is set in set_next 499 while (cur && cur->_block != block()) { 500 cur->_block = block(); 501 cur = cur->next(); 502 } 503 } 504 505 Instruction *insert_after(Instruction *i) { 506 Instruction* n = _next; 507 set_next(i); 508 i->set_next(n); 509 return _next; 510 } 511 512 bool is_flattened_array() const; // FIXME -- remove it 513 514 bool is_loaded_flattened_array() const; 515 bool maybe_flattened_array(); 516 bool maybe_null_free_array(); 517 518 Instruction *insert_after_same_bci(Instruction *i) { 519 #ifndef PRODUCT 520 i->set_printable_bci(printable_bci()); 521 #endif 522 return insert_after(i); 523 } 524 525 void set_subst(Instruction* subst) { 526 assert(subst == NULL || 527 type()->base() == subst->type()->base() || 528 subst->type()->base() == illegalType, "type can't change"); 529 _subst = subst; 530 } 531 void set_exception_handlers(XHandlers *xhandlers) { _exception_handlers = xhandlers; } 532 void set_exception_state(ValueStack* s) { check_state(s); _exception_state = s; } 533 void set_state_before(ValueStack* s) { check_state(s); _state_before = s; } 534 535 // machine-specifics 536 void set_operand(LIR_Opr operand) { assert(operand != LIR_OprFact::illegalOpr, "operand must exist"); _operand = operand; } 537 void clear_operand() { _operand = LIR_OprFact::illegalOpr; } 538 539 // generic 540 virtual Instruction* as_Instruction() { return this; } // to satisfy HASHING1 macro 541 virtual Phi* as_Phi() { return NULL; } 542 virtual Local* as_Local() { return NULL; } 543 virtual Constant* as_Constant() { return NULL; } 544 virtual AccessField* as_AccessField() { return NULL; } 545 virtual LoadField* as_LoadField() { return NULL; } 546 virtual StoreField* as_StoreField() { return NULL; } 547 virtual AccessArray* as_AccessArray() { return NULL; } 548 virtual ArrayLength* as_ArrayLength() { return NULL; } 549 virtual AccessIndexed* as_AccessIndexed() { return NULL; } 550 virtual LoadIndexed* as_LoadIndexed() { return NULL; } 551 virtual StoreIndexed* as_StoreIndexed() { return NULL; } 552 virtual NegateOp* as_NegateOp() { return NULL; } 553 virtual Op2* as_Op2() { return NULL; } 554 virtual ArithmeticOp* as_ArithmeticOp() { return NULL; } 555 virtual ShiftOp* as_ShiftOp() { return NULL; } 556 virtual LogicOp* as_LogicOp() { return NULL; } 557 virtual CompareOp* as_CompareOp() { return NULL; } 558 virtual IfOp* as_IfOp() { return NULL; } 559 virtual Convert* as_Convert() { return NULL; } 560 virtual NullCheck* as_NullCheck() { return NULL; } 561 virtual OsrEntry* as_OsrEntry() { return NULL; } 562 virtual StateSplit* as_StateSplit() { return NULL; } 563 virtual Invoke* as_Invoke() { return NULL; } 564 virtual NewInstance* as_NewInstance() { return NULL; } 565 virtual NewValueTypeInstance* as_NewValueTypeInstance() { return NULL; } 566 virtual NewArray* as_NewArray() { return NULL; } 567 virtual NewTypeArray* as_NewTypeArray() { return NULL; } 568 virtual NewObjectArray* as_NewObjectArray() { return NULL; } 569 virtual NewMultiArray* as_NewMultiArray() { return NULL; } 570 virtual TypeCheck* as_TypeCheck() { return NULL; } 571 virtual CheckCast* as_CheckCast() { return NULL; } 572 virtual InstanceOf* as_InstanceOf() { return NULL; } 573 virtual TypeCast* as_TypeCast() { return NULL; } 574 virtual AccessMonitor* as_AccessMonitor() { return NULL; } 575 virtual MonitorEnter* as_MonitorEnter() { return NULL; } 576 virtual MonitorExit* as_MonitorExit() { return NULL; } 577 virtual Intrinsic* as_Intrinsic() { return NULL; } 578 virtual BlockBegin* as_BlockBegin() { return NULL; } 579 virtual BlockEnd* as_BlockEnd() { return NULL; } 580 virtual Goto* as_Goto() { return NULL; } 581 virtual If* as_If() { return NULL; } 582 virtual IfInstanceOf* as_IfInstanceOf() { return NULL; } 583 virtual TableSwitch* as_TableSwitch() { return NULL; } 584 virtual LookupSwitch* as_LookupSwitch() { return NULL; } 585 virtual Return* as_Return() { return NULL; } 586 virtual Throw* as_Throw() { return NULL; } 587 virtual Base* as_Base() { return NULL; } 588 virtual RoundFP* as_RoundFP() { return NULL; } 589 virtual ExceptionObject* as_ExceptionObject() { return NULL; } 590 virtual UnsafeOp* as_UnsafeOp() { return NULL; } 591 virtual ProfileInvoke* as_ProfileInvoke() { return NULL; } 592 virtual RangeCheckPredicate* as_RangeCheckPredicate() { return NULL; } 593 594 #ifdef ASSERT 595 virtual Assert* as_Assert() { return NULL; } 596 #endif 597 598 virtual void visit(InstructionVisitor* v) = 0; 599 600 virtual bool can_trap() const { return false; } 601 602 virtual void input_values_do(ValueVisitor* f) = 0; 603 virtual void state_values_do(ValueVisitor* f); 604 virtual void other_values_do(ValueVisitor* f) { /* usually no other - override on demand */ } 605 void values_do(ValueVisitor* f) { input_values_do(f); state_values_do(f); other_values_do(f); } 606 607 virtual ciType* exact_type() const; 608 virtual ciType* declared_type() const { return NULL; } 609 610 // hashing 611 virtual const char* name() const = 0; 612 HASHING1(Instruction, false, id()) // hashing disabled by default 613 614 // debugging 615 static void check_state(ValueStack* state) PRODUCT_RETURN; 616 void print() PRODUCT_RETURN; 617 void print_line() PRODUCT_RETURN; 618 void print(InstructionPrinter& ip) PRODUCT_RETURN; 619 }; 620 621 622 // The following macros are used to define base (i.e., non-leaf) 623 // and leaf instruction classes. They define class-name related 624 // generic functionality in one place. 625 626 #define BASE(class_name, super_class_name) \ 627 class class_name: public super_class_name { \ 628 public: \ 629 virtual class_name* as_##class_name() { return this; } \ 630 631 632 #define LEAF(class_name, super_class_name) \ 633 BASE(class_name, super_class_name) \ 634 public: \ 635 virtual const char* name() const { return #class_name; } \ 636 virtual void visit(InstructionVisitor* v) { v->do_##class_name(this); } \ 637 638 639 // Debugging support 640 641 642 #ifdef ASSERT 643 class AssertValues: public ValueVisitor { 644 void visit(Value* x) { assert((*x) != NULL, "value must exist"); } 645 }; 646 #define ASSERT_VALUES { AssertValues assert_value; values_do(&assert_value); } 647 #else 648 #define ASSERT_VALUES 649 #endif // ASSERT 650 651 652 // A Phi is a phi function in the sense of SSA form. It stands for 653 // the value of a local variable at the beginning of a join block. 654 // A Phi consists of n operands, one for every incoming branch. 655 656 LEAF(Phi, Instruction) 657 private: 658 int _pf_flags; // the flags of the phi function 659 int _index; // to value on operand stack (index < 0) or to local 660 ciType* _exact_type; // currently is set only for flattened arrays, NULL otherwise. 661 public: 662 // creation 663 Phi(ValueType* type, BlockBegin* b, int index, ciType* exact_type) 664 : Instruction(type->base()) 665 , _pf_flags(0) 666 , _index(index) 667 , _exact_type(exact_type) 668 { 669 _block = b; 670 NOT_PRODUCT(set_printable_bci(Value(b)->printable_bci())); 671 if (type->is_illegal()) { 672 make_illegal(); 673 } 674 } 675 676 virtual ciType* exact_type() const { 677 return _exact_type; 678 } 679 680 virtual ciType* declared_type() const { 681 return _exact_type; 682 } 683 684 // flags 685 enum Flag { 686 no_flag = 0, 687 visited = 1 << 0, 688 cannot_simplify = 1 << 1 689 }; 690 691 // accessors 692 bool is_local() const { return _index >= 0; } 693 bool is_on_stack() const { return !is_local(); } 694 int local_index() const { assert(is_local(), ""); return _index; } 695 int stack_index() const { assert(is_on_stack(), ""); return -(_index+1); } 696 697 Value operand_at(int i) const; 698 int operand_count() const; 699 700 void set(Flag f) { _pf_flags |= f; } 701 void clear(Flag f) { _pf_flags &= ~f; } 702 bool is_set(Flag f) const { return (_pf_flags & f) != 0; } 703 704 // Invalidates phis corresponding to merges of locals of two different types 705 // (these should never be referenced, otherwise the bytecodes are illegal) 706 void make_illegal() { 707 set(cannot_simplify); 708 set_type(illegalType); 709 } 710 711 bool is_illegal() const { 712 return type()->is_illegal(); 713 } 714 715 // generic 716 virtual void input_values_do(ValueVisitor* f) { 717 } 718 }; 719 720 721 // A local is a placeholder for an incoming argument to a function call. 722 LEAF(Local, Instruction) 723 private: 724 int _java_index; // the local index within the method to which the local belongs 725 bool _is_receiver; // if local variable holds the receiver: "this" for non-static methods 726 ciType* _declared_type; 727 public: 728 // creation 729 Local(ciType* declared, ValueType* type, int index, bool receiver, bool never_null) 730 : Instruction(type) 731 , _java_index(index) 732 , _is_receiver(receiver) 733 , _declared_type(declared) 734 { 735 set_never_null(never_null); 736 NOT_PRODUCT(set_printable_bci(-1)); 737 } 738 739 // accessors 740 int java_index() const { return _java_index; } 741 bool is_receiver() const { return _is_receiver; } 742 743 virtual ciType* declared_type() const { return _declared_type; } 744 745 // generic 746 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 747 }; 748 749 750 LEAF(Constant, Instruction) 751 public: 752 // creation 753 Constant(ValueType* type): 754 Instruction(type, NULL, /*type_is_constant*/ true) 755 { 756 assert(type->is_constant(), "must be a constant"); 757 } 758 759 Constant(ValueType* type, ValueStack* state_before): 760 Instruction(type, state_before, /*type_is_constant*/ true) 761 { 762 assert(state_before != NULL, "only used for constants which need patching"); 763 assert(type->is_constant(), "must be a constant"); 764 // since it's patching it needs to be pinned 765 pin(); 766 } 767 768 // generic 769 virtual bool can_trap() const { return state_before() != NULL; } 770 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 771 772 virtual intx hash() const; 773 virtual bool is_equal(Value v) const; 774 775 virtual ciType* exact_type() const; 776 777 enum CompareResult { not_comparable = -1, cond_false, cond_true }; 778 779 virtual CompareResult compare(Instruction::Condition condition, Value right) const; 780 BlockBegin* compare(Instruction::Condition cond, Value right, 781 BlockBegin* true_sux, BlockBegin* false_sux) const { 782 switch (compare(cond, right)) { 783 case not_comparable: 784 return NULL; 785 case cond_false: 786 return false_sux; 787 case cond_true: 788 return true_sux; 789 default: 790 ShouldNotReachHere(); 791 return NULL; 792 } 793 } 794 }; 795 796 797 BASE(AccessField, Instruction) 798 private: 799 Value _obj; 800 int _offset; 801 ciField* _field; 802 NullCheck* _explicit_null_check; // For explicit null check elimination 803 804 public: 805 // creation 806 AccessField(Value obj, int offset, ciField* field, bool is_static, 807 ValueStack* state_before, bool needs_patching) 808 : Instruction(as_ValueType(field->type()->basic_type()), state_before) 809 , _obj(obj) 810 , _offset(offset) 811 , _field(field) 812 , _explicit_null_check(NULL) 813 { 814 set_needs_null_check(!is_static); 815 set_flag(IsStaticFlag, is_static); 816 set_flag(NeedsPatchingFlag, needs_patching); 817 ASSERT_VALUES 818 // pin of all instructions with memory access 819 pin(); 820 } 821 822 // accessors 823 Value obj() const { return _obj; } 824 int offset() const { return _offset; } 825 ciField* field() const { return _field; } 826 BasicType field_type() const { return _field->type()->basic_type(); } 827 bool is_static() const { return check_flag(IsStaticFlag); } 828 NullCheck* explicit_null_check() const { return _explicit_null_check; } 829 bool needs_patching() const { return check_flag(NeedsPatchingFlag); } 830 831 // Unresolved getstatic and putstatic can cause initialization. 832 // Technically it occurs at the Constant that materializes the base 833 // of the static fields but it's simpler to model it here. 834 bool is_init_point() const { return is_static() && (needs_patching() || !_field->holder()->is_initialized()); } 835 836 // manipulation 837 838 // Under certain circumstances, if a previous NullCheck instruction 839 // proved the target object non-null, we can eliminate the explicit 840 // null check and do an implicit one, simply specifying the debug 841 // information from the NullCheck. This field should only be consulted 842 // if needs_null_check() is true. 843 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 844 845 // generic 846 virtual bool can_trap() const { return needs_null_check() || needs_patching(); } 847 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 848 }; 849 850 851 LEAF(LoadField, AccessField) 852 public: 853 // creation 854 LoadField(Value obj, int offset, ciField* field, bool is_static, 855 ValueStack* state_before, bool needs_patching, 856 ciValueKlass* value_klass = NULL, Value default_value = NULL ) 857 : AccessField(obj, offset, field, is_static, state_before, needs_patching) 858 {} 859 860 ciType* declared_type() const; 861 862 // generic 863 HASHING2(LoadField, !needs_patching() && !field()->is_volatile(), obj()->subst(), offset()) // cannot be eliminated if needs patching or if volatile 864 }; 865 866 867 LEAF(StoreField, AccessField) 868 private: 869 Value _value; 870 871 public: 872 // creation 873 StoreField(Value obj, int offset, ciField* field, Value value, bool is_static, 874 ValueStack* state_before, bool needs_patching) 875 : AccessField(obj, offset, field, is_static, state_before, needs_patching) 876 , _value(value) 877 { 878 set_flag(NeedsWriteBarrierFlag, as_ValueType(field_type())->is_object()); 879 ASSERT_VALUES 880 pin(); 881 } 882 883 // accessors 884 Value value() const { return _value; } 885 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); } 886 887 // generic 888 virtual void input_values_do(ValueVisitor* f) { AccessField::input_values_do(f); f->visit(&_value); } 889 }; 890 891 892 BASE(AccessArray, Instruction) 893 private: 894 Value _array; 895 896 public: 897 // creation 898 AccessArray(ValueType* type, Value array, ValueStack* state_before) 899 : Instruction(type, state_before) 900 , _array(array) 901 { 902 set_needs_null_check(true); 903 ASSERT_VALUES 904 pin(); // instruction with side effect (null exception or range check throwing) 905 } 906 907 Value array() const { return _array; } 908 909 // generic 910 virtual bool can_trap() const { return needs_null_check(); } 911 virtual void input_values_do(ValueVisitor* f) { f->visit(&_array); } 912 }; 913 914 915 LEAF(ArrayLength, AccessArray) 916 private: 917 NullCheck* _explicit_null_check; // For explicit null check elimination 918 919 public: 920 // creation 921 ArrayLength(Value array, ValueStack* state_before) 922 : AccessArray(intType, array, state_before) 923 , _explicit_null_check(NULL) {} 924 925 // accessors 926 NullCheck* explicit_null_check() const { return _explicit_null_check; } 927 928 // setters 929 // See LoadField::set_explicit_null_check for documentation 930 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 931 932 // generic 933 HASHING1(ArrayLength, true, array()->subst()) 934 }; 935 936 937 BASE(AccessIndexed, AccessArray) 938 private: 939 Value _index; 940 Value _length; 941 BasicType _elt_type; 942 bool _mismatched; 943 944 public: 945 // creation 946 AccessIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before, bool mismatched) 947 : AccessArray(as_ValueType(elt_type), array, state_before) 948 , _index(index) 949 , _length(length) 950 , _elt_type(elt_type) 951 , _mismatched(mismatched) 952 { 953 set_flag(Instruction::NeedsRangeCheckFlag, true); 954 ASSERT_VALUES 955 } 956 957 // accessors 958 Value index() const { return _index; } 959 Value length() const { return _length; } 960 BasicType elt_type() const { return _elt_type; } 961 bool mismatched() const { return _mismatched; } 962 963 void clear_length() { _length = NULL; } 964 // perform elimination of range checks involving constants 965 bool compute_needs_range_check(); 966 967 // generic 968 virtual void input_values_do(ValueVisitor* f) { AccessArray::input_values_do(f); f->visit(&_index); if (_length != NULL) f->visit(&_length); } 969 }; 970 971 972 LEAF(LoadIndexed, AccessIndexed) 973 private: 974 NullCheck* _explicit_null_check; // For explicit null check elimination 975 NewValueTypeInstance* _vt; 976 977 public: 978 // creation 979 LoadIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before, bool mismatched = false) 980 : AccessIndexed(array, index, length, elt_type, state_before, mismatched) 981 , _explicit_null_check(NULL) {} 982 983 // accessors 984 NullCheck* explicit_null_check() const { return _explicit_null_check; } 985 986 // setters 987 // See LoadField::set_explicit_null_check for documentation 988 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 989 990 ciType* exact_type() const; 991 ciType* declared_type() const; 992 993 NewValueTypeInstance* vt() { return _vt; } 994 void set_vt(NewValueTypeInstance* vt) { _vt = vt; } 995 996 // generic 997 HASHING2(LoadIndexed, true, array()->subst(), index()->subst()) 998 }; 999 1000 1001 LEAF(StoreIndexed, AccessIndexed) 1002 private: 1003 Value _value; 1004 1005 ciMethod* _profiled_method; 1006 int _profiled_bci; 1007 bool _check_boolean; 1008 1009 public: 1010 // creation 1011 StoreIndexed(Value array, Value index, Value length, BasicType elt_type, Value value, ValueStack* state_before, 1012 bool check_boolean, bool mismatched = false) 1013 : AccessIndexed(array, index, length, elt_type, state_before, mismatched) 1014 , _value(value), _profiled_method(NULL), _profiled_bci(0), _check_boolean(check_boolean) 1015 { 1016 set_flag(NeedsWriteBarrierFlag, (as_ValueType(elt_type)->is_object())); 1017 set_flag(NeedsStoreCheckFlag, (as_ValueType(elt_type)->is_object())); 1018 ASSERT_VALUES 1019 pin(); 1020 } 1021 1022 // accessors 1023 Value value() const { return _value; } 1024 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); } 1025 bool needs_store_check() const { return check_flag(NeedsStoreCheckFlag); } 1026 bool check_boolean() const { return _check_boolean; } 1027 // Helpers for MethodData* profiling 1028 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1029 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1030 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1031 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1032 ciMethod* profiled_method() const { return _profiled_method; } 1033 int profiled_bci() const { return _profiled_bci; } 1034 // Flattened array support 1035 bool is_exact_flattened_array_store() const; 1036 // generic 1037 virtual void input_values_do(ValueVisitor* f) { AccessIndexed::input_values_do(f); f->visit(&_value); } 1038 }; 1039 1040 1041 LEAF(NegateOp, Instruction) 1042 private: 1043 Value _x; 1044 1045 public: 1046 // creation 1047 NegateOp(Value x) : Instruction(x->type()->base()), _x(x) { 1048 ASSERT_VALUES 1049 } 1050 1051 // accessors 1052 Value x() const { return _x; } 1053 1054 // generic 1055 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); } 1056 }; 1057 1058 1059 BASE(Op2, Instruction) 1060 private: 1061 Bytecodes::Code _op; 1062 Value _x; 1063 Value _y; 1064 1065 public: 1066 // creation 1067 Op2(ValueType* type, Bytecodes::Code op, Value x, Value y, ValueStack* state_before = NULL) 1068 : Instruction(type, state_before) 1069 , _op(op) 1070 , _x(x) 1071 , _y(y) 1072 { 1073 ASSERT_VALUES 1074 } 1075 1076 // accessors 1077 Bytecodes::Code op() const { return _op; } 1078 Value x() const { return _x; } 1079 Value y() const { return _y; } 1080 1081 // manipulators 1082 void swap_operands() { 1083 assert(is_commutative(), "operation must be commutative"); 1084 Value t = _x; _x = _y; _y = t; 1085 } 1086 1087 // generic 1088 virtual bool is_commutative() const { return false; } 1089 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); } 1090 }; 1091 1092 1093 LEAF(ArithmeticOp, Op2) 1094 public: 1095 // creation 1096 ArithmeticOp(Bytecodes::Code op, Value x, Value y, bool is_strictfp, ValueStack* state_before) 1097 : Op2(x->type()->meet(y->type()), op, x, y, state_before) 1098 { 1099 set_flag(IsStrictfpFlag, is_strictfp); 1100 if (can_trap()) pin(); 1101 } 1102 1103 // accessors 1104 bool is_strictfp() const { return check_flag(IsStrictfpFlag); } 1105 1106 // generic 1107 virtual bool is_commutative() const; 1108 virtual bool can_trap() const; 1109 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1110 }; 1111 1112 1113 LEAF(ShiftOp, Op2) 1114 public: 1115 // creation 1116 ShiftOp(Bytecodes::Code op, Value x, Value s) : Op2(x->type()->base(), op, x, s) {} 1117 1118 // generic 1119 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1120 }; 1121 1122 1123 LEAF(LogicOp, Op2) 1124 public: 1125 // creation 1126 LogicOp(Bytecodes::Code op, Value x, Value y) : Op2(x->type()->meet(y->type()), op, x, y) {} 1127 1128 // generic 1129 virtual bool is_commutative() const; 1130 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1131 }; 1132 1133 1134 LEAF(CompareOp, Op2) 1135 public: 1136 // creation 1137 CompareOp(Bytecodes::Code op, Value x, Value y, ValueStack* state_before) 1138 : Op2(intType, op, x, y, state_before) 1139 {} 1140 1141 // generic 1142 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1143 }; 1144 1145 1146 LEAF(IfOp, Op2) 1147 private: 1148 Value _tval; 1149 Value _fval; 1150 bool _substitutability_check; 1151 1152 public: 1153 // creation 1154 IfOp(Value x, Condition cond, Value y, Value tval, Value fval, ValueStack* state_before, bool substitutability_check) 1155 : Op2(tval->type()->meet(fval->type()), (Bytecodes::Code)cond, x, y) 1156 , _tval(tval) 1157 , _fval(fval) 1158 , _substitutability_check(substitutability_check) 1159 { 1160 ASSERT_VALUES 1161 assert(tval->type()->tag() == fval->type()->tag(), "types must match"); 1162 set_state_before(state_before); 1163 } 1164 1165 // accessors 1166 virtual bool is_commutative() const; 1167 Bytecodes::Code op() const { ShouldNotCallThis(); return Bytecodes::_illegal; } 1168 Condition cond() const { return (Condition)Op2::op(); } 1169 Value tval() const { return _tval; } 1170 Value fval() const { return _fval; } 1171 bool substitutability_check() const { return _substitutability_check; } 1172 // generic 1173 virtual void input_values_do(ValueVisitor* f) { Op2::input_values_do(f); f->visit(&_tval); f->visit(&_fval); } 1174 }; 1175 1176 1177 LEAF(Convert, Instruction) 1178 private: 1179 Bytecodes::Code _op; 1180 Value _value; 1181 1182 public: 1183 // creation 1184 Convert(Bytecodes::Code op, Value value, ValueType* to_type) : Instruction(to_type), _op(op), _value(value) { 1185 ASSERT_VALUES 1186 } 1187 1188 // accessors 1189 Bytecodes::Code op() const { return _op; } 1190 Value value() const { return _value; } 1191 1192 // generic 1193 virtual void input_values_do(ValueVisitor* f) { f->visit(&_value); } 1194 HASHING2(Convert, true, op(), value()->subst()) 1195 }; 1196 1197 1198 LEAF(NullCheck, Instruction) 1199 private: 1200 Value _obj; 1201 1202 public: 1203 // creation 1204 NullCheck(Value obj, ValueStack* state_before) 1205 : Instruction(obj->type()->base(), state_before) 1206 , _obj(obj) 1207 { 1208 ASSERT_VALUES 1209 set_can_trap(true); 1210 assert(_obj->type()->is_object(), "null check must be applied to objects only"); 1211 pin(Instruction::PinExplicitNullCheck); 1212 } 1213 1214 // accessors 1215 Value obj() const { return _obj; } 1216 1217 // setters 1218 void set_can_trap(bool can_trap) { set_flag(CanTrapFlag, can_trap); } 1219 1220 // generic 1221 virtual bool can_trap() const { return check_flag(CanTrapFlag); /* null-check elimination sets to false */ } 1222 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 1223 HASHING1(NullCheck, true, obj()->subst()) 1224 }; 1225 1226 1227 // This node is supposed to cast the type of another node to a more precise 1228 // declared type. 1229 LEAF(TypeCast, Instruction) 1230 private: 1231 ciType* _declared_type; 1232 Value _obj; 1233 1234 public: 1235 // The type of this node is the same type as the object type (and it might be constant). 1236 TypeCast(ciType* type, Value obj, ValueStack* state_before) 1237 : Instruction(obj->type(), state_before, obj->type()->is_constant()), 1238 _declared_type(type), 1239 _obj(obj) {} 1240 1241 // accessors 1242 ciType* declared_type() const { return _declared_type; } 1243 Value obj() const { return _obj; } 1244 1245 // generic 1246 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 1247 }; 1248 1249 1250 BASE(StateSplit, Instruction) 1251 private: 1252 ValueStack* _state; 1253 1254 protected: 1255 static void substitute(BlockList& list, BlockBegin* old_block, BlockBegin* new_block); 1256 1257 public: 1258 // creation 1259 StateSplit(ValueType* type, ValueStack* state_before = NULL) 1260 : Instruction(type, state_before) 1261 , _state(NULL) 1262 { 1263 pin(PinStateSplitConstructor); 1264 } 1265 1266 // accessors 1267 ValueStack* state() const { return _state; } 1268 IRScope* scope() const; // the state's scope 1269 1270 // manipulation 1271 void set_state(ValueStack* state) { assert(_state == NULL, "overwriting existing state"); check_state(state); _state = state; } 1272 1273 // generic 1274 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 1275 virtual void state_values_do(ValueVisitor* f); 1276 }; 1277 1278 1279 LEAF(Invoke, StateSplit) 1280 private: 1281 Bytecodes::Code _code; 1282 Value _recv; 1283 Values* _args; 1284 BasicTypeList* _signature; 1285 int _vtable_index; 1286 ciMethod* _target; 1287 1288 public: 1289 // creation 1290 Invoke(Bytecodes::Code code, ValueType* result_type, Value recv, Values* args, 1291 int vtable_index, ciMethod* target, ValueStack* state_before, bool never_null); 1292 1293 // accessors 1294 Bytecodes::Code code() const { return _code; } 1295 Value receiver() const { return _recv; } 1296 bool has_receiver() const { return receiver() != NULL; } 1297 int number_of_arguments() const { return _args->length(); } 1298 Value argument_at(int i) const { return _args->at(i); } 1299 int vtable_index() const { return _vtable_index; } 1300 BasicTypeList* signature() const { return _signature; } 1301 ciMethod* target() const { return _target; } 1302 1303 ciType* declared_type() const; 1304 1305 // Returns false if target is not loaded 1306 bool target_is_final() const { return check_flag(TargetIsFinalFlag); } 1307 bool target_is_loaded() const { return check_flag(TargetIsLoadedFlag); } 1308 // Returns false if target is not loaded 1309 bool target_is_strictfp() const { return check_flag(TargetIsStrictfpFlag); } 1310 1311 // JSR 292 support 1312 bool is_invokedynamic() const { return code() == Bytecodes::_invokedynamic; } 1313 bool is_method_handle_intrinsic() const { return target()->is_method_handle_intrinsic(); } 1314 1315 virtual bool needs_exception_state() const { return false; } 1316 1317 // generic 1318 virtual bool can_trap() const { return true; } 1319 virtual void input_values_do(ValueVisitor* f) { 1320 StateSplit::input_values_do(f); 1321 if (has_receiver()) f->visit(&_recv); 1322 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 1323 } 1324 virtual void state_values_do(ValueVisitor *f); 1325 }; 1326 1327 1328 LEAF(NewInstance, StateSplit) 1329 private: 1330 ciInstanceKlass* _klass; 1331 bool _is_unresolved; 1332 1333 public: 1334 // creation 1335 NewInstance(ciInstanceKlass* klass, ValueStack* state_before, bool is_unresolved) 1336 : StateSplit(instanceType, state_before) 1337 , _klass(klass), _is_unresolved(is_unresolved) 1338 {} 1339 1340 // accessors 1341 ciInstanceKlass* klass() const { return _klass; } 1342 bool is_unresolved() const { return _is_unresolved; } 1343 1344 virtual bool needs_exception_state() const { return false; } 1345 1346 // generic 1347 virtual bool can_trap() const { return true; } 1348 ciType* exact_type() const; 1349 ciType* declared_type() const; 1350 }; 1351 1352 LEAF(NewValueTypeInstance, StateSplit) 1353 bool _is_unresolved; 1354 ciValueKlass* _klass; 1355 Value _depends_on; // Link to instance on with withfield was called on 1356 1357 public: 1358 1359 // Default creation, always allocated for now 1360 NewValueTypeInstance(ciValueKlass* klass, ValueStack* state_before, bool is_unresolved, Value depends_on = NULL) 1361 : StateSplit(instanceType, state_before) 1362 , _is_unresolved(is_unresolved) 1363 , _klass(klass) 1364 { 1365 if (depends_on == NULL) { 1366 _depends_on = this; 1367 } else { 1368 _depends_on = depends_on; 1369 } 1370 set_never_null(true); 1371 } 1372 1373 // accessors 1374 bool is_unresolved() const { return _is_unresolved; } 1375 Value depends_on(); 1376 1377 ciValueKlass* klass() const { return _klass; } 1378 1379 virtual bool needs_exception_state() const { return false; } 1380 1381 // generic 1382 virtual bool can_trap() const { return true; } 1383 ciType* exact_type() const; 1384 ciType* declared_type() const; 1385 1386 // Only done in LIR Generator -> map everything to object 1387 void set_to_object_type() { set_type(instanceType); } 1388 }; 1389 1390 BASE(NewArray, StateSplit) 1391 private: 1392 Value _length; 1393 1394 public: 1395 // creation 1396 NewArray(Value length, ValueStack* state_before) 1397 : StateSplit(objectType, state_before) 1398 , _length(length) 1399 { 1400 // Do not ASSERT_VALUES since length is NULL for NewMultiArray 1401 } 1402 1403 // accessors 1404 Value length() const { return _length; } 1405 1406 virtual bool needs_exception_state() const { return false; } 1407 1408 ciType* exact_type() const { return NULL; } 1409 ciType* declared_type() const; 1410 1411 // generic 1412 virtual bool can_trap() const { return true; } 1413 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_length); } 1414 }; 1415 1416 1417 LEAF(NewTypeArray, NewArray) 1418 private: 1419 BasicType _elt_type; 1420 1421 public: 1422 // creation 1423 NewTypeArray(Value length, BasicType elt_type, ValueStack* state_before) 1424 : NewArray(length, state_before) 1425 , _elt_type(elt_type) 1426 {} 1427 1428 // accessors 1429 BasicType elt_type() const { return _elt_type; } 1430 ciType* exact_type() const; 1431 }; 1432 1433 1434 LEAF(NewObjectArray, NewArray) 1435 private: 1436 ciKlass* _klass; 1437 1438 public: 1439 // creation 1440 NewObjectArray(ciKlass* klass, Value length, ValueStack* state_before, bool never_null) 1441 : NewArray(length, state_before), _klass(klass) { 1442 set_never_null(never_null); 1443 } 1444 1445 // accessors 1446 ciKlass* klass() const { return _klass; } 1447 ciType* exact_type() const; 1448 }; 1449 1450 1451 LEAF(NewMultiArray, NewArray) 1452 private: 1453 ciKlass* _klass; 1454 Values* _dims; 1455 1456 public: 1457 // creation 1458 NewMultiArray(ciKlass* klass, Values* dims, ValueStack* state_before) : NewArray(NULL, state_before), _klass(klass), _dims(dims) { 1459 ASSERT_VALUES 1460 } 1461 1462 // accessors 1463 ciKlass* klass() const { return _klass; } 1464 Values* dims() const { return _dims; } 1465 int rank() const { return dims()->length(); } 1466 1467 // generic 1468 virtual void input_values_do(ValueVisitor* f) { 1469 // NOTE: we do not call NewArray::input_values_do since "length" 1470 // is meaningless for a multi-dimensional array; passing the 1471 // zeroth element down to NewArray as its length is a bad idea 1472 // since there will be a copy in the "dims" array which doesn't 1473 // get updated, and the value must not be traversed twice. Was bug 1474 // - kbr 4/10/2001 1475 StateSplit::input_values_do(f); 1476 for (int i = 0; i < _dims->length(); i++) f->visit(_dims->adr_at(i)); 1477 } 1478 1479 ciType* exact_type() const; 1480 }; 1481 1482 1483 BASE(TypeCheck, StateSplit) 1484 private: 1485 ciKlass* _klass; 1486 Value _obj; 1487 1488 ciMethod* _profiled_method; 1489 int _profiled_bci; 1490 1491 public: 1492 // creation 1493 TypeCheck(ciKlass* klass, Value obj, ValueType* type, ValueStack* state_before) 1494 : StateSplit(type, state_before), _klass(klass), _obj(obj), 1495 _profiled_method(NULL), _profiled_bci(0) { 1496 ASSERT_VALUES 1497 set_direct_compare(false); 1498 } 1499 1500 // accessors 1501 ciKlass* klass() const { return _klass; } 1502 Value obj() const { return _obj; } 1503 bool is_loaded() const { return klass() != NULL; } 1504 bool direct_compare() const { return check_flag(DirectCompareFlag); } 1505 1506 // manipulation 1507 void set_direct_compare(bool flag) { set_flag(DirectCompareFlag, flag); } 1508 1509 // generic 1510 virtual bool can_trap() const { return true; } 1511 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } 1512 1513 // Helpers for MethodData* profiling 1514 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1515 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1516 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1517 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1518 ciMethod* profiled_method() const { return _profiled_method; } 1519 int profiled_bci() const { return _profiled_bci; } 1520 }; 1521 1522 1523 LEAF(CheckCast, TypeCheck) 1524 public: 1525 // creation 1526 CheckCast(ciKlass* klass, Value obj, ValueStack* state_before, bool never_null = false) 1527 : TypeCheck(klass, obj, objectType, state_before) { 1528 set_never_null(never_null); 1529 } 1530 1531 void set_incompatible_class_change_check() { 1532 set_flag(ThrowIncompatibleClassChangeErrorFlag, true); 1533 } 1534 bool is_incompatible_class_change_check() const { 1535 return check_flag(ThrowIncompatibleClassChangeErrorFlag); 1536 } 1537 void set_invokespecial_receiver_check() { 1538 set_flag(InvokeSpecialReceiverCheckFlag, true); 1539 } 1540 bool is_invokespecial_receiver_check() const { 1541 return check_flag(InvokeSpecialReceiverCheckFlag); 1542 } 1543 1544 virtual bool needs_exception_state() const { 1545 return !is_invokespecial_receiver_check(); 1546 } 1547 1548 ciType* declared_type() const; 1549 }; 1550 1551 1552 LEAF(InstanceOf, TypeCheck) 1553 public: 1554 // creation 1555 InstanceOf(ciKlass* klass, Value obj, ValueStack* state_before) : TypeCheck(klass, obj, intType, state_before) {} 1556 1557 virtual bool needs_exception_state() const { return false; } 1558 }; 1559 1560 1561 BASE(AccessMonitor, StateSplit) 1562 private: 1563 Value _obj; 1564 int _monitor_no; 1565 1566 public: 1567 // creation 1568 AccessMonitor(Value obj, int monitor_no, ValueStack* state_before = NULL) 1569 : StateSplit(illegalType, state_before) 1570 , _obj(obj) 1571 , _monitor_no(monitor_no) 1572 { 1573 set_needs_null_check(true); 1574 ASSERT_VALUES 1575 } 1576 1577 // accessors 1578 Value obj() const { return _obj; } 1579 int monitor_no() const { return _monitor_no; } 1580 1581 // generic 1582 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } 1583 }; 1584 1585 1586 LEAF(MonitorEnter, AccessMonitor) 1587 bool _maybe_valuetype; 1588 public: 1589 // creation 1590 MonitorEnter(Value obj, int monitor_no, ValueStack* state_before, bool maybe_valuetype) 1591 : AccessMonitor(obj, monitor_no, state_before) 1592 , _maybe_valuetype(maybe_valuetype) 1593 { 1594 ASSERT_VALUES 1595 } 1596 1597 // accessors 1598 bool maybe_valuetype() const { return _maybe_valuetype; } 1599 1600 // generic 1601 virtual bool can_trap() const { return true; } 1602 }; 1603 1604 1605 LEAF(MonitorExit, AccessMonitor) 1606 public: 1607 // creation 1608 MonitorExit(Value obj, int monitor_no) 1609 : AccessMonitor(obj, monitor_no, NULL) 1610 { 1611 ASSERT_VALUES 1612 } 1613 }; 1614 1615 1616 LEAF(Intrinsic, StateSplit) 1617 private: 1618 vmIntrinsics::ID _id; 1619 Values* _args; 1620 Value _recv; 1621 ArgsNonNullState _nonnull_state; 1622 1623 public: 1624 // preserves_state can be set to true for Intrinsics 1625 // which are guaranteed to preserve register state across any slow 1626 // cases; setting it to true does not mean that the Intrinsic can 1627 // not trap, only that if we continue execution in the same basic 1628 // block after the Intrinsic, all of the registers are intact. This 1629 // allows load elimination and common expression elimination to be 1630 // performed across the Intrinsic. The default value is false. 1631 Intrinsic(ValueType* type, 1632 vmIntrinsics::ID id, 1633 Values* args, 1634 bool has_receiver, 1635 ValueStack* state_before, 1636 bool preserves_state, 1637 bool cantrap = true) 1638 : StateSplit(type, state_before) 1639 , _id(id) 1640 , _args(args) 1641 , _recv(NULL) 1642 { 1643 assert(args != NULL, "args must exist"); 1644 ASSERT_VALUES 1645 set_flag(PreservesStateFlag, preserves_state); 1646 set_flag(CanTrapFlag, cantrap); 1647 if (has_receiver) { 1648 _recv = argument_at(0); 1649 } 1650 set_needs_null_check(has_receiver); 1651 1652 // some intrinsics can't trap, so don't force them to be pinned 1653 if (!can_trap() && !vmIntrinsics::should_be_pinned(_id)) { 1654 unpin(PinStateSplitConstructor); 1655 } 1656 } 1657 1658 // accessors 1659 vmIntrinsics::ID id() const { return _id; } 1660 int number_of_arguments() const { return _args->length(); } 1661 Value argument_at(int i) const { return _args->at(i); } 1662 1663 bool has_receiver() const { return (_recv != NULL); } 1664 Value receiver() const { assert(has_receiver(), "must have receiver"); return _recv; } 1665 bool preserves_state() const { return check_flag(PreservesStateFlag); } 1666 1667 bool arg_needs_null_check(int i) const { 1668 return _nonnull_state.arg_needs_null_check(i); 1669 } 1670 1671 void set_arg_needs_null_check(int i, bool check) { 1672 _nonnull_state.set_arg_needs_null_check(i, check); 1673 } 1674 1675 // generic 1676 virtual bool can_trap() const { return check_flag(CanTrapFlag); } 1677 virtual void input_values_do(ValueVisitor* f) { 1678 StateSplit::input_values_do(f); 1679 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 1680 } 1681 }; 1682 1683 1684 class LIR_List; 1685 1686 LEAF(BlockBegin, StateSplit) 1687 private: 1688 int _block_id; // the unique block id 1689 int _bci; // start-bci of block 1690 int _depth_first_number; // number of this block in a depth-first ordering 1691 int _linear_scan_number; // number of this block in linear-scan ordering 1692 int _dominator_depth; 1693 int _loop_depth; // the loop nesting level of this block 1694 int _loop_index; // number of the innermost loop of this block 1695 int _flags; // the flags associated with this block 1696 1697 // fields used by BlockListBuilder 1698 int _total_preds; // number of predecessors found by BlockListBuilder 1699 ResourceBitMap _stores_to_locals; // bit is set when a local variable is stored in the block 1700 1701 // SSA specific fields: (factor out later) 1702 BlockList _successors; // the successors of this block 1703 BlockList _predecessors; // the predecessors of this block 1704 BlockList _dominates; // list of blocks that are dominated by this block 1705 BlockBegin* _dominator; // the dominator of this block 1706 // SSA specific ends 1707 BlockEnd* _end; // the last instruction of this block 1708 BlockList _exception_handlers; // the exception handlers potentially invoked by this block 1709 ValueStackStack* _exception_states; // only for xhandler entries: states of all instructions that have an edge to this xhandler 1710 int _exception_handler_pco; // if this block is the start of an exception handler, 1711 // this records the PC offset in the assembly code of the 1712 // first instruction in this block 1713 Label _label; // the label associated with this block 1714 LIR_List* _lir; // the low level intermediate representation for this block 1715 1716 ResourceBitMap _live_in; // set of live LIR_Opr registers at entry to this block 1717 ResourceBitMap _live_out; // set of live LIR_Opr registers at exit from this block 1718 ResourceBitMap _live_gen; // set of registers used before any redefinition in this block 1719 ResourceBitMap _live_kill; // set of registers defined in this block 1720 1721 ResourceBitMap _fpu_register_usage; 1722 intArray* _fpu_stack_state; // For x86 FPU code generation with UseLinearScan 1723 int _first_lir_instruction_id; // ID of first LIR instruction in this block 1724 int _last_lir_instruction_id; // ID of last LIR instruction in this block 1725 1726 void iterate_preorder (boolArray& mark, BlockClosure* closure); 1727 void iterate_postorder(boolArray& mark, BlockClosure* closure); 1728 1729 friend class SuxAndWeightAdjuster; 1730 1731 public: 1732 void* operator new(size_t size) throw() { 1733 Compilation* c = Compilation::current(); 1734 void* res = c->arena()->Amalloc(size); 1735 ((BlockBegin*)res)->_id = c->get_next_id(); 1736 ((BlockBegin*)res)->_block_id = c->get_next_block_id(); 1737 return res; 1738 } 1739 1740 // initialization/counting 1741 static int number_of_blocks() { 1742 return Compilation::current()->number_of_blocks(); 1743 } 1744 1745 // creation 1746 BlockBegin(int bci) 1747 : StateSplit(illegalType) 1748 , _bci(bci) 1749 , _depth_first_number(-1) 1750 , _linear_scan_number(-1) 1751 , _dominator_depth(-1) 1752 , _loop_depth(0) 1753 , _loop_index(-1) 1754 , _flags(0) 1755 , _total_preds(0) 1756 , _stores_to_locals() 1757 , _successors(2) 1758 , _predecessors(2) 1759 , _dominates(2) 1760 , _dominator(NULL) 1761 , _end(NULL) 1762 , _exception_handlers(1) 1763 , _exception_states(NULL) 1764 , _exception_handler_pco(-1) 1765 , _lir(NULL) 1766 , _live_in() 1767 , _live_out() 1768 , _live_gen() 1769 , _live_kill() 1770 , _fpu_register_usage() 1771 , _fpu_stack_state(NULL) 1772 , _first_lir_instruction_id(-1) 1773 , _last_lir_instruction_id(-1) 1774 { 1775 _block = this; 1776 #ifndef PRODUCT 1777 set_printable_bci(bci); 1778 #endif 1779 } 1780 1781 // accessors 1782 int block_id() const { return _block_id; } 1783 int bci() const { return _bci; } 1784 BlockList* successors() { return &_successors; } 1785 BlockList* dominates() { return &_dominates; } 1786 BlockBegin* dominator() const { return _dominator; } 1787 int loop_depth() const { return _loop_depth; } 1788 int dominator_depth() const { return _dominator_depth; } 1789 int depth_first_number() const { return _depth_first_number; } 1790 int linear_scan_number() const { return _linear_scan_number; } 1791 BlockEnd* end() const { return _end; } 1792 Label* label() { return &_label; } 1793 LIR_List* lir() const { return _lir; } 1794 int exception_handler_pco() const { return _exception_handler_pco; } 1795 ResourceBitMap& live_in() { return _live_in; } 1796 ResourceBitMap& live_out() { return _live_out; } 1797 ResourceBitMap& live_gen() { return _live_gen; } 1798 ResourceBitMap& live_kill() { return _live_kill; } 1799 ResourceBitMap& fpu_register_usage() { return _fpu_register_usage; } 1800 intArray* fpu_stack_state() const { return _fpu_stack_state; } 1801 int first_lir_instruction_id() const { return _first_lir_instruction_id; } 1802 int last_lir_instruction_id() const { return _last_lir_instruction_id; } 1803 int total_preds() const { return _total_preds; } 1804 BitMap& stores_to_locals() { return _stores_to_locals; } 1805 1806 // manipulation 1807 void set_dominator(BlockBegin* dom) { _dominator = dom; } 1808 void set_loop_depth(int d) { _loop_depth = d; } 1809 void set_dominator_depth(int d) { _dominator_depth = d; } 1810 void set_depth_first_number(int dfn) { _depth_first_number = dfn; } 1811 void set_linear_scan_number(int lsn) { _linear_scan_number = lsn; } 1812 void set_end(BlockEnd* end); 1813 void clear_end(); 1814 void disconnect_from_graph(); 1815 static void disconnect_edge(BlockBegin* from, BlockBegin* to); 1816 BlockBegin* insert_block_between(BlockBegin* sux); 1817 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux); 1818 void set_lir(LIR_List* lir) { _lir = lir; } 1819 void set_exception_handler_pco(int pco) { _exception_handler_pco = pco; } 1820 void set_live_in (const ResourceBitMap& map) { _live_in = map; } 1821 void set_live_out (const ResourceBitMap& map) { _live_out = map; } 1822 void set_live_gen (const ResourceBitMap& map) { _live_gen = map; } 1823 void set_live_kill(const ResourceBitMap& map) { _live_kill = map; } 1824 void set_fpu_register_usage(const ResourceBitMap& map) { _fpu_register_usage = map; } 1825 void set_fpu_stack_state(intArray* state) { _fpu_stack_state = state; } 1826 void set_first_lir_instruction_id(int id) { _first_lir_instruction_id = id; } 1827 void set_last_lir_instruction_id(int id) { _last_lir_instruction_id = id; } 1828 void increment_total_preds(int n = 1) { _total_preds += n; } 1829 void init_stores_to_locals(int locals_count) { _stores_to_locals.initialize(locals_count); } 1830 1831 // generic 1832 virtual void state_values_do(ValueVisitor* f); 1833 1834 // successors and predecessors 1835 int number_of_sux() const; 1836 BlockBegin* sux_at(int i) const; 1837 void add_successor(BlockBegin* sux); 1838 void remove_successor(BlockBegin* pred); 1839 bool is_successor(BlockBegin* sux) const { return _successors.contains(sux); } 1840 1841 void add_predecessor(BlockBegin* pred); 1842 void remove_predecessor(BlockBegin* pred); 1843 bool is_predecessor(BlockBegin* pred) const { return _predecessors.contains(pred); } 1844 int number_of_preds() const { return _predecessors.length(); } 1845 BlockBegin* pred_at(int i) const { return _predecessors.at(i); } 1846 1847 // exception handlers potentially invoked by this block 1848 void add_exception_handler(BlockBegin* b); 1849 bool is_exception_handler(BlockBegin* b) const { return _exception_handlers.contains(b); } 1850 int number_of_exception_handlers() const { return _exception_handlers.length(); } 1851 BlockBegin* exception_handler_at(int i) const { return _exception_handlers.at(i); } 1852 1853 // states of the instructions that have an edge to this exception handler 1854 int number_of_exception_states() { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states == NULL ? 0 : _exception_states->length(); } 1855 ValueStack* exception_state_at(int idx) const { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states->at(idx); } 1856 int add_exception_state(ValueStack* state); 1857 1858 // flags 1859 enum Flag { 1860 no_flag = 0, 1861 std_entry_flag = 1 << 0, 1862 osr_entry_flag = 1 << 1, 1863 exception_entry_flag = 1 << 2, 1864 subroutine_entry_flag = 1 << 3, 1865 backward_branch_target_flag = 1 << 4, 1866 is_on_work_list_flag = 1 << 5, 1867 was_visited_flag = 1 << 6, 1868 parser_loop_header_flag = 1 << 7, // set by parser to identify blocks where phi functions can not be created on demand 1869 critical_edge_split_flag = 1 << 8, // set for all blocks that are introduced when critical edges are split 1870 linear_scan_loop_header_flag = 1 << 9, // set during loop-detection for LinearScan 1871 linear_scan_loop_end_flag = 1 << 10, // set during loop-detection for LinearScan 1872 donot_eliminate_range_checks = 1 << 11 // Should be try to eliminate range checks in this block 1873 }; 1874 1875 void set(Flag f) { _flags |= f; } 1876 void clear(Flag f) { _flags &= ~f; } 1877 bool is_set(Flag f) const { return (_flags & f) != 0; } 1878 bool is_entry_block() const { 1879 const int entry_mask = std_entry_flag | osr_entry_flag | exception_entry_flag; 1880 return (_flags & entry_mask) != 0; 1881 } 1882 1883 // iteration 1884 void iterate_preorder (BlockClosure* closure); 1885 void iterate_postorder (BlockClosure* closure); 1886 1887 void block_values_do(ValueVisitor* f); 1888 1889 // loops 1890 void set_loop_index(int ix) { _loop_index = ix; } 1891 int loop_index() const { return _loop_index; } 1892 1893 // merging 1894 bool try_merge(ValueStack* state); // try to merge states at block begin 1895 void merge(ValueStack* state) { bool b = try_merge(state); assert(b, "merge failed"); } 1896 1897 // debugging 1898 void print_block() PRODUCT_RETURN; 1899 void print_block(InstructionPrinter& ip, bool live_only = false) PRODUCT_RETURN; 1900 }; 1901 1902 1903 BASE(BlockEnd, StateSplit) 1904 private: 1905 BlockList* _sux; 1906 1907 protected: 1908 BlockList* sux() const { return _sux; } 1909 1910 void set_sux(BlockList* sux) { 1911 #ifdef ASSERT 1912 assert(sux != NULL, "sux must exist"); 1913 for (int i = sux->length() - 1; i >= 0; i--) assert(sux->at(i) != NULL, "sux must exist"); 1914 #endif 1915 _sux = sux; 1916 } 1917 1918 public: 1919 // creation 1920 BlockEnd(ValueType* type, ValueStack* state_before, bool is_safepoint) 1921 : StateSplit(type, state_before) 1922 , _sux(NULL) 1923 { 1924 set_flag(IsSafepointFlag, is_safepoint); 1925 } 1926 1927 // accessors 1928 bool is_safepoint() const { return check_flag(IsSafepointFlag); } 1929 // For compatibility with old code, for new code use block() 1930 BlockBegin* begin() const { return _block; } 1931 1932 // manipulation 1933 void set_begin(BlockBegin* begin); 1934 1935 // successors 1936 int number_of_sux() const { return _sux != NULL ? _sux->length() : 0; } 1937 BlockBegin* sux_at(int i) const { return _sux->at(i); } 1938 BlockBegin* default_sux() const { return sux_at(number_of_sux() - 1); } 1939 BlockBegin** addr_sux_at(int i) const { return _sux->adr_at(i); } 1940 int sux_index(BlockBegin* sux) const { return _sux->find(sux); } 1941 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux); 1942 }; 1943 1944 1945 LEAF(Goto, BlockEnd) 1946 public: 1947 enum Direction { 1948 none, // Just a regular goto 1949 taken, not_taken // Goto produced from If 1950 }; 1951 private: 1952 ciMethod* _profiled_method; 1953 int _profiled_bci; 1954 Direction _direction; 1955 public: 1956 // creation 1957 Goto(BlockBegin* sux, ValueStack* state_before, bool is_safepoint = false) 1958 : BlockEnd(illegalType, state_before, is_safepoint) 1959 , _profiled_method(NULL) 1960 , _profiled_bci(0) 1961 , _direction(none) { 1962 BlockList* s = new BlockList(1); 1963 s->append(sux); 1964 set_sux(s); 1965 } 1966 1967 Goto(BlockBegin* sux, bool is_safepoint) : BlockEnd(illegalType, NULL, is_safepoint) 1968 , _profiled_method(NULL) 1969 , _profiled_bci(0) 1970 , _direction(none) { 1971 BlockList* s = new BlockList(1); 1972 s->append(sux); 1973 set_sux(s); 1974 } 1975 1976 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1977 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches 1978 int profiled_bci() const { return _profiled_bci; } 1979 Direction direction() const { return _direction; } 1980 1981 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1982 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1983 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1984 void set_direction(Direction d) { _direction = d; } 1985 }; 1986 1987 #ifdef ASSERT 1988 LEAF(Assert, Instruction) 1989 private: 1990 Value _x; 1991 Condition _cond; 1992 Value _y; 1993 char *_message; 1994 1995 public: 1996 // creation 1997 // unordered_is_true is valid for float/double compares only 1998 Assert(Value x, Condition cond, bool unordered_is_true, Value y); 1999 2000 // accessors 2001 Value x() const { return _x; } 2002 Condition cond() const { return _cond; } 2003 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 2004 Value y() const { return _y; } 2005 const char *message() const { return _message; } 2006 2007 // generic 2008 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); } 2009 }; 2010 #endif 2011 2012 LEAF(RangeCheckPredicate, StateSplit) 2013 private: 2014 Value _x; 2015 Condition _cond; 2016 Value _y; 2017 2018 void check_state(); 2019 2020 public: 2021 // creation 2022 // unordered_is_true is valid for float/double compares only 2023 RangeCheckPredicate(Value x, Condition cond, bool unordered_is_true, Value y, ValueStack* state) : StateSplit(illegalType) 2024 , _x(x) 2025 , _cond(cond) 2026 , _y(y) 2027 { 2028 ASSERT_VALUES 2029 set_flag(UnorderedIsTrueFlag, unordered_is_true); 2030 assert(x->type()->tag() == y->type()->tag(), "types must match"); 2031 this->set_state(state); 2032 check_state(); 2033 } 2034 2035 // Always deoptimize 2036 RangeCheckPredicate(ValueStack* state) : StateSplit(illegalType) 2037 { 2038 this->set_state(state); 2039 _x = _y = NULL; 2040 check_state(); 2041 } 2042 2043 // accessors 2044 Value x() const { return _x; } 2045 Condition cond() const { return _cond; } 2046 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 2047 Value y() const { return _y; } 2048 2049 void always_fail() { _x = _y = NULL; } 2050 2051 // generic 2052 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_x); f->visit(&_y); } 2053 HASHING3(RangeCheckPredicate, true, x()->subst(), y()->subst(), cond()) 2054 }; 2055 2056 LEAF(If, BlockEnd) 2057 private: 2058 Value _x; 2059 Condition _cond; 2060 Value _y; 2061 ciMethod* _profiled_method; 2062 int _profiled_bci; // Canonicalizer may alter bci of If node 2063 bool _swapped; // Is the order reversed with respect to the original If in the 2064 // bytecode stream? 2065 bool _substitutability_check; 2066 public: 2067 // creation 2068 // unordered_is_true is valid for float/double compares only 2069 If(Value x, Condition cond, bool unordered_is_true, Value y, BlockBegin* tsux, BlockBegin* fsux, ValueStack* state_before, bool is_safepoint, bool substitutability_check=false) 2070 : BlockEnd(illegalType, state_before, is_safepoint) 2071 , _x(x) 2072 , _cond(cond) 2073 , _y(y) 2074 , _profiled_method(NULL) 2075 , _profiled_bci(0) 2076 , _swapped(false) 2077 , _substitutability_check(substitutability_check) 2078 { 2079 ASSERT_VALUES 2080 set_flag(UnorderedIsTrueFlag, unordered_is_true); 2081 assert(x->type()->tag() == y->type()->tag(), "types must match"); 2082 BlockList* s = new BlockList(2); 2083 s->append(tsux); 2084 s->append(fsux); 2085 set_sux(s); 2086 } 2087 2088 // accessors 2089 Value x() const { return _x; } 2090 Condition cond() const { return _cond; } 2091 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 2092 Value y() const { return _y; } 2093 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); } 2094 BlockBegin* tsux() const { return sux_for(true); } 2095 BlockBegin* fsux() const { return sux_for(false); } 2096 BlockBegin* usux() const { return sux_for(unordered_is_true()); } 2097 bool should_profile() const { return check_flag(ProfileMDOFlag); } 2098 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches 2099 int profiled_bci() const { return _profiled_bci; } // set for profiled branches and tiered 2100 bool is_swapped() const { return _swapped; } 2101 2102 // manipulation 2103 void swap_operands() { 2104 Value t = _x; _x = _y; _y = t; 2105 _cond = mirror(_cond); 2106 } 2107 2108 void swap_sux() { 2109 assert(number_of_sux() == 2, "wrong number of successors"); 2110 BlockList* s = sux(); 2111 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t); 2112 _cond = negate(_cond); 2113 set_flag(UnorderedIsTrueFlag, !check_flag(UnorderedIsTrueFlag)); 2114 } 2115 2116 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 2117 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 2118 void set_profiled_bci(int bci) { _profiled_bci = bci; } 2119 void set_swapped(bool value) { _swapped = value; } 2120 bool substitutability_check() const { return _substitutability_check; } 2121 // generic 2122 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_x); f->visit(&_y); } 2123 }; 2124 2125 2126 LEAF(IfInstanceOf, BlockEnd) 2127 private: 2128 ciKlass* _klass; 2129 Value _obj; 2130 bool _test_is_instance; // jump if instance 2131 int _instanceof_bci; 2132 2133 public: 2134 IfInstanceOf(ciKlass* klass, Value obj, bool test_is_instance, int instanceof_bci, BlockBegin* tsux, BlockBegin* fsux) 2135 : BlockEnd(illegalType, NULL, false) // temporary set to false 2136 , _klass(klass) 2137 , _obj(obj) 2138 , _test_is_instance(test_is_instance) 2139 , _instanceof_bci(instanceof_bci) 2140 { 2141 ASSERT_VALUES 2142 assert(instanceof_bci >= 0, "illegal bci"); 2143 BlockList* s = new BlockList(2); 2144 s->append(tsux); 2145 s->append(fsux); 2146 set_sux(s); 2147 } 2148 2149 // accessors 2150 // 2151 // Note 1: If test_is_instance() is true, IfInstanceOf tests if obj *is* an 2152 // instance of klass; otherwise it tests if it is *not* and instance 2153 // of klass. 2154 // 2155 // Note 2: IfInstanceOf instructions are created by combining an InstanceOf 2156 // and an If instruction. The IfInstanceOf bci() corresponds to the 2157 // bci that the If would have had; the (this->) instanceof_bci() is 2158 // the bci of the original InstanceOf instruction. 2159 ciKlass* klass() const { return _klass; } 2160 Value obj() const { return _obj; } 2161 int instanceof_bci() const { return _instanceof_bci; } 2162 bool test_is_instance() const { return _test_is_instance; } 2163 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); } 2164 BlockBegin* tsux() const { return sux_for(true); } 2165 BlockBegin* fsux() const { return sux_for(false); } 2166 2167 // manipulation 2168 void swap_sux() { 2169 assert(number_of_sux() == 2, "wrong number of successors"); 2170 BlockList* s = sux(); 2171 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t); 2172 _test_is_instance = !_test_is_instance; 2173 } 2174 2175 // generic 2176 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_obj); } 2177 }; 2178 2179 2180 BASE(Switch, BlockEnd) 2181 private: 2182 Value _tag; 2183 2184 public: 2185 // creation 2186 Switch(Value tag, BlockList* sux, ValueStack* state_before, bool is_safepoint) 2187 : BlockEnd(illegalType, state_before, is_safepoint) 2188 , _tag(tag) { 2189 ASSERT_VALUES 2190 set_sux(sux); 2191 } 2192 2193 // accessors 2194 Value tag() const { return _tag; } 2195 int length() const { return number_of_sux() - 1; } 2196 2197 virtual bool needs_exception_state() const { return false; } 2198 2199 // generic 2200 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_tag); } 2201 }; 2202 2203 2204 LEAF(TableSwitch, Switch) 2205 private: 2206 int _lo_key; 2207 2208 public: 2209 // creation 2210 TableSwitch(Value tag, BlockList* sux, int lo_key, ValueStack* state_before, bool is_safepoint) 2211 : Switch(tag, sux, state_before, is_safepoint) 2212 , _lo_key(lo_key) { assert(_lo_key <= hi_key(), "integer overflow"); } 2213 2214 // accessors 2215 int lo_key() const { return _lo_key; } 2216 int hi_key() const { return _lo_key + (length() - 1); } 2217 }; 2218 2219 2220 LEAF(LookupSwitch, Switch) 2221 private: 2222 intArray* _keys; 2223 2224 public: 2225 // creation 2226 LookupSwitch(Value tag, BlockList* sux, intArray* keys, ValueStack* state_before, bool is_safepoint) 2227 : Switch(tag, sux, state_before, is_safepoint) 2228 , _keys(keys) { 2229 assert(keys != NULL, "keys must exist"); 2230 assert(keys->length() == length(), "sux & keys have incompatible lengths"); 2231 } 2232 2233 // accessors 2234 int key_at(int i) const { return _keys->at(i); } 2235 }; 2236 2237 2238 LEAF(Return, BlockEnd) 2239 private: 2240 Value _result; 2241 2242 public: 2243 // creation 2244 Return(Value result) : 2245 BlockEnd(result == NULL ? voidType : result->type()->base(), NULL, true), 2246 _result(result) {} 2247 2248 // accessors 2249 Value result() const { return _result; } 2250 bool has_result() const { return result() != NULL; } 2251 2252 // generic 2253 virtual void input_values_do(ValueVisitor* f) { 2254 BlockEnd::input_values_do(f); 2255 if (has_result()) f->visit(&_result); 2256 } 2257 }; 2258 2259 2260 LEAF(Throw, BlockEnd) 2261 private: 2262 Value _exception; 2263 2264 public: 2265 // creation 2266 Throw(Value exception, ValueStack* state_before) : BlockEnd(illegalType, state_before, true), _exception(exception) { 2267 ASSERT_VALUES 2268 } 2269 2270 // accessors 2271 Value exception() const { return _exception; } 2272 2273 // generic 2274 virtual bool can_trap() const { return true; } 2275 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_exception); } 2276 }; 2277 2278 2279 LEAF(Base, BlockEnd) 2280 public: 2281 // creation 2282 Base(BlockBegin* std_entry, BlockBegin* osr_entry) : BlockEnd(illegalType, NULL, false) { 2283 assert(std_entry->is_set(BlockBegin::std_entry_flag), "std entry must be flagged"); 2284 assert(osr_entry == NULL || osr_entry->is_set(BlockBegin::osr_entry_flag), "osr entry must be flagged"); 2285 BlockList* s = new BlockList(2); 2286 if (osr_entry != NULL) s->append(osr_entry); 2287 s->append(std_entry); // must be default sux! 2288 set_sux(s); 2289 } 2290 2291 // accessors 2292 BlockBegin* std_entry() const { return default_sux(); } 2293 BlockBegin* osr_entry() const { return number_of_sux() < 2 ? NULL : sux_at(0); } 2294 }; 2295 2296 2297 LEAF(OsrEntry, Instruction) 2298 public: 2299 // creation 2300 #ifdef _LP64 2301 OsrEntry() : Instruction(longType) { pin(); } 2302 #else 2303 OsrEntry() : Instruction(intType) { pin(); } 2304 #endif 2305 2306 // generic 2307 virtual void input_values_do(ValueVisitor* f) { } 2308 }; 2309 2310 2311 // Models the incoming exception at a catch site 2312 LEAF(ExceptionObject, Instruction) 2313 public: 2314 // creation 2315 ExceptionObject() : Instruction(objectType) { 2316 pin(); 2317 } 2318 2319 // generic 2320 virtual void input_values_do(ValueVisitor* f) { } 2321 }; 2322 2323 2324 // Models needed rounding for floating-point values on Intel. 2325 // Currently only used to represent rounding of double-precision 2326 // values stored into local variables, but could be used to model 2327 // intermediate rounding of single-precision values as well. 2328 LEAF(RoundFP, Instruction) 2329 private: 2330 Value _input; // floating-point value to be rounded 2331 2332 public: 2333 RoundFP(Value input) 2334 : Instruction(input->type()) // Note: should not be used for constants 2335 , _input(input) 2336 { 2337 ASSERT_VALUES 2338 } 2339 2340 // accessors 2341 Value input() const { return _input; } 2342 2343 // generic 2344 virtual void input_values_do(ValueVisitor* f) { f->visit(&_input); } 2345 }; 2346 2347 2348 BASE(UnsafeOp, Instruction) 2349 private: 2350 BasicType _basic_type; // ValueType can not express byte-sized integers 2351 2352 protected: 2353 // creation 2354 UnsafeOp(BasicType basic_type, bool is_put) 2355 : Instruction(is_put ? voidType : as_ValueType(basic_type)) 2356 , _basic_type(basic_type) 2357 { 2358 //Note: Unsafe ops are not not guaranteed to throw NPE. 2359 // Convservatively, Unsafe operations must be pinned though we could be 2360 // looser about this if we wanted to.. 2361 pin(); 2362 } 2363 2364 public: 2365 // accessors 2366 BasicType basic_type() { return _basic_type; } 2367 2368 // generic 2369 virtual void input_values_do(ValueVisitor* f) { } 2370 }; 2371 2372 2373 BASE(UnsafeRawOp, UnsafeOp) 2374 private: 2375 Value _base; // Base address (a Java long) 2376 Value _index; // Index if computed by optimizer; initialized to NULL 2377 int _log2_scale; // Scale factor: 0, 1, 2, or 3. 2378 // Indicates log2 of number of bytes (1, 2, 4, or 8) 2379 // to scale index by. 2380 2381 protected: 2382 UnsafeRawOp(BasicType basic_type, Value addr, bool is_put) 2383 : UnsafeOp(basic_type, is_put) 2384 , _base(addr) 2385 , _index(NULL) 2386 , _log2_scale(0) 2387 { 2388 // Can not use ASSERT_VALUES because index may be NULL 2389 assert(addr != NULL && addr->type()->is_long(), "just checking"); 2390 } 2391 2392 UnsafeRawOp(BasicType basic_type, Value base, Value index, int log2_scale, bool is_put) 2393 : UnsafeOp(basic_type, is_put) 2394 , _base(base) 2395 , _index(index) 2396 , _log2_scale(log2_scale) 2397 { 2398 } 2399 2400 public: 2401 // accessors 2402 Value base() { return _base; } 2403 Value index() { return _index; } 2404 bool has_index() { return (_index != NULL); } 2405 int log2_scale() { return _log2_scale; } 2406 2407 // setters 2408 void set_base (Value base) { _base = base; } 2409 void set_index(Value index) { _index = index; } 2410 void set_log2_scale(int log2_scale) { _log2_scale = log2_scale; } 2411 2412 // generic 2413 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f); 2414 f->visit(&_base); 2415 if (has_index()) f->visit(&_index); } 2416 }; 2417 2418 2419 LEAF(UnsafeGetRaw, UnsafeRawOp) 2420 private: 2421 bool _may_be_unaligned, _is_wide; // For OSREntry 2422 2423 public: 2424 UnsafeGetRaw(BasicType basic_type, Value addr, bool may_be_unaligned, bool is_wide = false) 2425 : UnsafeRawOp(basic_type, addr, false) { 2426 _may_be_unaligned = may_be_unaligned; 2427 _is_wide = is_wide; 2428 } 2429 2430 UnsafeGetRaw(BasicType basic_type, Value base, Value index, int log2_scale, bool may_be_unaligned, bool is_wide = false) 2431 : UnsafeRawOp(basic_type, base, index, log2_scale, false) { 2432 _may_be_unaligned = may_be_unaligned; 2433 _is_wide = is_wide; 2434 } 2435 2436 bool may_be_unaligned() { return _may_be_unaligned; } 2437 bool is_wide() { return _is_wide; } 2438 }; 2439 2440 2441 LEAF(UnsafePutRaw, UnsafeRawOp) 2442 private: 2443 Value _value; // Value to be stored 2444 2445 public: 2446 UnsafePutRaw(BasicType basic_type, Value addr, Value value) 2447 : UnsafeRawOp(basic_type, addr, true) 2448 , _value(value) 2449 { 2450 assert(value != NULL, "just checking"); 2451 ASSERT_VALUES 2452 } 2453 2454 UnsafePutRaw(BasicType basic_type, Value base, Value index, int log2_scale, Value value) 2455 : UnsafeRawOp(basic_type, base, index, log2_scale, true) 2456 , _value(value) 2457 { 2458 assert(value != NULL, "just checking"); 2459 ASSERT_VALUES 2460 } 2461 2462 // accessors 2463 Value value() { return _value; } 2464 2465 // generic 2466 virtual void input_values_do(ValueVisitor* f) { UnsafeRawOp::input_values_do(f); 2467 f->visit(&_value); } 2468 }; 2469 2470 2471 BASE(UnsafeObjectOp, UnsafeOp) 2472 private: 2473 Value _object; // Object to be fetched from or mutated 2474 Value _offset; // Offset within object 2475 bool _is_volatile; // true if volatile - dl/JSR166 2476 public: 2477 UnsafeObjectOp(BasicType basic_type, Value object, Value offset, bool is_put, bool is_volatile) 2478 : UnsafeOp(basic_type, is_put), _object(object), _offset(offset), _is_volatile(is_volatile) 2479 { 2480 } 2481 2482 // accessors 2483 Value object() { return _object; } 2484 Value offset() { return _offset; } 2485 bool is_volatile() { return _is_volatile; } 2486 // generic 2487 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f); 2488 f->visit(&_object); 2489 f->visit(&_offset); } 2490 }; 2491 2492 2493 LEAF(UnsafeGetObject, UnsafeObjectOp) 2494 public: 2495 UnsafeGetObject(BasicType basic_type, Value object, Value offset, bool is_volatile) 2496 : UnsafeObjectOp(basic_type, object, offset, false, is_volatile) 2497 { 2498 ASSERT_VALUES 2499 } 2500 }; 2501 2502 2503 LEAF(UnsafePutObject, UnsafeObjectOp) 2504 private: 2505 Value _value; // Value to be stored 2506 public: 2507 UnsafePutObject(BasicType basic_type, Value object, Value offset, Value value, bool is_volatile) 2508 : UnsafeObjectOp(basic_type, object, offset, true, is_volatile) 2509 , _value(value) 2510 { 2511 ASSERT_VALUES 2512 } 2513 2514 // accessors 2515 Value value() { return _value; } 2516 2517 // generic 2518 virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f); 2519 f->visit(&_value); } 2520 }; 2521 2522 LEAF(UnsafeGetAndSetObject, UnsafeObjectOp) 2523 private: 2524 Value _value; // Value to be stored 2525 bool _is_add; 2526 public: 2527 UnsafeGetAndSetObject(BasicType basic_type, Value object, Value offset, Value value, bool is_add) 2528 : UnsafeObjectOp(basic_type, object, offset, false, false) 2529 , _value(value) 2530 , _is_add(is_add) 2531 { 2532 ASSERT_VALUES 2533 } 2534 2535 // accessors 2536 bool is_add() const { return _is_add; } 2537 Value value() { return _value; } 2538 2539 // generic 2540 virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f); 2541 f->visit(&_value); } 2542 }; 2543 2544 LEAF(ProfileCall, Instruction) 2545 private: 2546 ciMethod* _method; 2547 int _bci_of_invoke; 2548 ciMethod* _callee; // the method that is called at the given bci 2549 Value _recv; 2550 ciKlass* _known_holder; 2551 Values* _obj_args; // arguments for type profiling 2552 ArgsNonNullState _nonnull_state; // Do we know whether some arguments are never null? 2553 bool _inlined; // Are we profiling a call that is inlined 2554 2555 public: 2556 ProfileCall(ciMethod* method, int bci, ciMethod* callee, Value recv, ciKlass* known_holder, Values* obj_args, bool inlined) 2557 : Instruction(voidType) 2558 , _method(method) 2559 , _bci_of_invoke(bci) 2560 , _callee(callee) 2561 , _recv(recv) 2562 , _known_holder(known_holder) 2563 , _obj_args(obj_args) 2564 , _inlined(inlined) 2565 { 2566 // The ProfileCall has side-effects and must occur precisely where located 2567 pin(); 2568 } 2569 2570 ciMethod* method() const { return _method; } 2571 int bci_of_invoke() const { return _bci_of_invoke; } 2572 ciMethod* callee() const { return _callee; } 2573 Value recv() const { return _recv; } 2574 ciKlass* known_holder() const { return _known_holder; } 2575 int nb_profiled_args() const { return _obj_args == NULL ? 0 : _obj_args->length(); } 2576 Value profiled_arg_at(int i) const { return _obj_args->at(i); } 2577 bool arg_needs_null_check(int i) const { 2578 return _nonnull_state.arg_needs_null_check(i); 2579 } 2580 bool inlined() const { return _inlined; } 2581 2582 void set_arg_needs_null_check(int i, bool check) { 2583 _nonnull_state.set_arg_needs_null_check(i, check); 2584 } 2585 2586 virtual void input_values_do(ValueVisitor* f) { 2587 if (_recv != NULL) { 2588 f->visit(&_recv); 2589 } 2590 for (int i = 0; i < nb_profiled_args(); i++) { 2591 f->visit(_obj_args->adr_at(i)); 2592 } 2593 } 2594 }; 2595 2596 LEAF(ProfileReturnType, Instruction) 2597 private: 2598 ciMethod* _method; 2599 ciMethod* _callee; 2600 int _bci_of_invoke; 2601 Value _ret; 2602 2603 public: 2604 ProfileReturnType(ciMethod* method, int bci, ciMethod* callee, Value ret) 2605 : Instruction(voidType) 2606 , _method(method) 2607 , _callee(callee) 2608 , _bci_of_invoke(bci) 2609 , _ret(ret) 2610 { 2611 set_needs_null_check(true); 2612 // The ProfileType has side-effects and must occur precisely where located 2613 pin(); 2614 } 2615 2616 ciMethod* method() const { return _method; } 2617 ciMethod* callee() const { return _callee; } 2618 int bci_of_invoke() const { return _bci_of_invoke; } 2619 Value ret() const { return _ret; } 2620 2621 virtual void input_values_do(ValueVisitor* f) { 2622 if (_ret != NULL) { 2623 f->visit(&_ret); 2624 } 2625 } 2626 }; 2627 2628 // Call some C runtime function that doesn't safepoint, 2629 // optionally passing the current thread as the first argument. 2630 LEAF(RuntimeCall, Instruction) 2631 private: 2632 const char* _entry_name; 2633 address _entry; 2634 Values* _args; 2635 bool _pass_thread; // Pass the JavaThread* as an implicit first argument 2636 2637 public: 2638 RuntimeCall(ValueType* type, const char* entry_name, address entry, Values* args, bool pass_thread = true) 2639 : Instruction(type) 2640 , _entry_name(entry_name) 2641 , _entry(entry) 2642 , _args(args) 2643 , _pass_thread(pass_thread) { 2644 ASSERT_VALUES 2645 pin(); 2646 } 2647 2648 const char* entry_name() const { return _entry_name; } 2649 address entry() const { return _entry; } 2650 int number_of_arguments() const { return _args->length(); } 2651 Value argument_at(int i) const { return _args->at(i); } 2652 bool pass_thread() const { return _pass_thread; } 2653 2654 virtual void input_values_do(ValueVisitor* f) { 2655 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 2656 } 2657 }; 2658 2659 // Use to trip invocation counter of an inlined method 2660 2661 LEAF(ProfileInvoke, Instruction) 2662 private: 2663 ciMethod* _inlinee; 2664 ValueStack* _state; 2665 2666 public: 2667 ProfileInvoke(ciMethod* inlinee, ValueStack* state) 2668 : Instruction(voidType) 2669 , _inlinee(inlinee) 2670 , _state(state) 2671 { 2672 // The ProfileInvoke has side-effects and must occur precisely where located QQQ??? 2673 pin(); 2674 } 2675 2676 ciMethod* inlinee() { return _inlinee; } 2677 ValueStack* state() { return _state; } 2678 virtual void input_values_do(ValueVisitor*) {} 2679 virtual void state_values_do(ValueVisitor*); 2680 }; 2681 2682 LEAF(MemBar, Instruction) 2683 private: 2684 LIR_Code _code; 2685 2686 public: 2687 MemBar(LIR_Code code) 2688 : Instruction(voidType) 2689 , _code(code) 2690 { 2691 pin(); 2692 } 2693 2694 LIR_Code code() { return _code; } 2695 2696 virtual void input_values_do(ValueVisitor*) {} 2697 }; 2698 2699 class BlockPair: public CompilationResourceObj { 2700 private: 2701 BlockBegin* _from; 2702 BlockBegin* _to; 2703 public: 2704 BlockPair(BlockBegin* from, BlockBegin* to): _from(from), _to(to) {} 2705 BlockBegin* from() const { return _from; } 2706 BlockBegin* to() const { return _to; } 2707 bool is_same(BlockBegin* from, BlockBegin* to) const { return _from == from && _to == to; } 2708 bool is_same(BlockPair* p) const { return _from == p->from() && _to == p->to(); } 2709 void set_to(BlockBegin* b) { _to = b; } 2710 void set_from(BlockBegin* b) { _from = b; } 2711 }; 2712 2713 typedef GrowableArray<BlockPair*> BlockPairList; 2714 2715 inline int BlockBegin::number_of_sux() const { assert(_end == NULL || _end->number_of_sux() == _successors.length(), "mismatch"); return _successors.length(); } 2716 inline BlockBegin* BlockBegin::sux_at(int i) const { assert(_end == NULL || _end->sux_at(i) == _successors.at(i), "mismatch"); return _successors.at(i); } 2717 inline void BlockBegin::add_successor(BlockBegin* sux) { assert(_end == NULL, "Would create mismatch with successors of BlockEnd"); _successors.append(sux); } 2718 2719 #undef ASSERT_VALUES 2720 2721 #endif // SHARE_C1_C1_INSTRUCTION_HPP