1 /* 2 * Copyright (c) 1999, 2019, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_C1_C1_INSTRUCTION_HPP 26 #define SHARE_C1_C1_INSTRUCTION_HPP 27 28 #include "c1/c1_Compilation.hpp" 29 #include "c1/c1_LIR.hpp" 30 #include "c1/c1_ValueType.hpp" 31 #include "ci/ciField.hpp" 32 33 // Predefined classes 34 class ciField; 35 class ValueStack; 36 class InstructionPrinter; 37 class IRScope; 38 class LIR_OprDesc; 39 typedef LIR_OprDesc* LIR_Opr; 40 41 42 // Instruction class hierarchy 43 // 44 // All leaf classes in the class hierarchy are concrete classes 45 // (i.e., are instantiated). All other classes are abstract and 46 // serve factoring. 47 48 class Instruction; 49 class Phi; 50 class Local; 51 class Constant; 52 class AccessField; 53 class LoadField; 54 class StoreField; 55 class AccessArray; 56 class ArrayLength; 57 class AccessIndexed; 58 class LoadIndexed; 59 class StoreIndexed; 60 class NegateOp; 61 class Op2; 62 class ArithmeticOp; 63 class ShiftOp; 64 class LogicOp; 65 class CompareOp; 66 class IfOp; 67 class Convert; 68 class NullCheck; 69 class TypeCast; 70 class OsrEntry; 71 class ExceptionObject; 72 class StateSplit; 73 class Invoke; 74 class NewInstance; 75 class NewValueTypeInstance; 76 class NewArray; 77 class NewTypeArray; 78 class NewObjectArray; 79 class NewMultiArray; 80 class TypeCheck; 81 class CheckCast; 82 class InstanceOf; 83 class AccessMonitor; 84 class MonitorEnter; 85 class MonitorExit; 86 class Intrinsic; 87 class BlockBegin; 88 class BlockEnd; 89 class Goto; 90 class If; 91 class IfInstanceOf; 92 class Switch; 93 class TableSwitch; 94 class LookupSwitch; 95 class Return; 96 class Throw; 97 class Base; 98 class RoundFP; 99 class UnsafeOp; 100 class UnsafeRawOp; 101 class UnsafeGetRaw; 102 class UnsafePutRaw; 103 class UnsafeObjectOp; 104 class UnsafeGetObject; 105 class UnsafePutObject; 106 class UnsafeGetAndSetObject; 107 class ProfileCall; 108 class ProfileReturnType; 109 class ProfileInvoke; 110 class RuntimeCall; 111 class MemBar; 112 class RangeCheckPredicate; 113 #ifdef ASSERT 114 class Assert; 115 #endif 116 117 // A Value is a reference to the instruction creating the value 118 typedef Instruction* Value; 119 typedef GrowableArray<Value> Values; 120 typedef GrowableArray<ValueStack*> ValueStackStack; 121 122 // BlockClosure is the base class for block traversal/iteration. 123 124 class BlockClosure: public CompilationResourceObj { 125 public: 126 virtual void block_do(BlockBegin* block) = 0; 127 }; 128 129 130 // A simple closure class for visiting the values of an Instruction 131 class ValueVisitor: public StackObj { 132 public: 133 virtual void visit(Value* v) = 0; 134 }; 135 136 137 // Some array and list classes 138 typedef GrowableArray<BlockBegin*> BlockBeginArray; 139 140 class BlockList: public GrowableArray<BlockBegin*> { 141 public: 142 BlockList(): GrowableArray<BlockBegin*>() {} 143 BlockList(const int size): GrowableArray<BlockBegin*>(size) {} 144 BlockList(const int size, BlockBegin* init): GrowableArray<BlockBegin*>(size, size, init) {} 145 146 void iterate_forward(BlockClosure* closure); 147 void iterate_backward(BlockClosure* closure); 148 void blocks_do(void f(BlockBegin*)); 149 void values_do(ValueVisitor* f); 150 void print(bool cfg_only = false, bool live_only = false) PRODUCT_RETURN; 151 }; 152 153 154 // InstructionVisitors provide type-based dispatch for instructions. 155 // For each concrete Instruction class X, a virtual function do_X is 156 // provided. Functionality that needs to be implemented for all classes 157 // (e.g., printing, code generation) is factored out into a specialised 158 // visitor instead of added to the Instruction classes itself. 159 160 class InstructionVisitor: public StackObj { 161 public: 162 virtual void do_Phi (Phi* x) = 0; 163 virtual void do_Local (Local* x) = 0; 164 virtual void do_Constant (Constant* x) = 0; 165 virtual void do_LoadField (LoadField* x) = 0; 166 virtual void do_StoreField (StoreField* x) = 0; 167 virtual void do_ArrayLength (ArrayLength* x) = 0; 168 virtual void do_LoadIndexed (LoadIndexed* x) = 0; 169 virtual void do_StoreIndexed (StoreIndexed* x) = 0; 170 virtual void do_NegateOp (NegateOp* x) = 0; 171 virtual void do_ArithmeticOp (ArithmeticOp* x) = 0; 172 virtual void do_ShiftOp (ShiftOp* x) = 0; 173 virtual void do_LogicOp (LogicOp* x) = 0; 174 virtual void do_CompareOp (CompareOp* x) = 0; 175 virtual void do_IfOp (IfOp* x) = 0; 176 virtual void do_Convert (Convert* x) = 0; 177 virtual void do_NullCheck (NullCheck* x) = 0; 178 virtual void do_TypeCast (TypeCast* x) = 0; 179 virtual void do_Invoke (Invoke* x) = 0; 180 virtual void do_NewInstance (NewInstance* x) = 0; 181 virtual void do_NewValueTypeInstance(NewValueTypeInstance* x) = 0; 182 virtual void do_NewTypeArray (NewTypeArray* x) = 0; 183 virtual void do_NewObjectArray (NewObjectArray* x) = 0; 184 virtual void do_NewMultiArray (NewMultiArray* x) = 0; 185 virtual void do_CheckCast (CheckCast* x) = 0; 186 virtual void do_InstanceOf (InstanceOf* x) = 0; 187 virtual void do_MonitorEnter (MonitorEnter* x) = 0; 188 virtual void do_MonitorExit (MonitorExit* x) = 0; 189 virtual void do_Intrinsic (Intrinsic* x) = 0; 190 virtual void do_BlockBegin (BlockBegin* x) = 0; 191 virtual void do_Goto (Goto* x) = 0; 192 virtual void do_If (If* x) = 0; 193 virtual void do_IfInstanceOf (IfInstanceOf* x) = 0; 194 virtual void do_TableSwitch (TableSwitch* x) = 0; 195 virtual void do_LookupSwitch (LookupSwitch* x) = 0; 196 virtual void do_Return (Return* x) = 0; 197 virtual void do_Throw (Throw* x) = 0; 198 virtual void do_Base (Base* x) = 0; 199 virtual void do_OsrEntry (OsrEntry* x) = 0; 200 virtual void do_ExceptionObject(ExceptionObject* x) = 0; 201 virtual void do_RoundFP (RoundFP* x) = 0; 202 virtual void do_UnsafeGetRaw (UnsafeGetRaw* x) = 0; 203 virtual void do_UnsafePutRaw (UnsafePutRaw* x) = 0; 204 virtual void do_UnsafeGetObject(UnsafeGetObject* x) = 0; 205 virtual void do_UnsafePutObject(UnsafePutObject* x) = 0; 206 virtual void do_UnsafeGetAndSetObject(UnsafeGetAndSetObject* x) = 0; 207 virtual void do_ProfileCall (ProfileCall* x) = 0; 208 virtual void do_ProfileReturnType (ProfileReturnType* x) = 0; 209 virtual void do_ProfileInvoke (ProfileInvoke* x) = 0; 210 virtual void do_RuntimeCall (RuntimeCall* x) = 0; 211 virtual void do_MemBar (MemBar* x) = 0; 212 virtual void do_RangeCheckPredicate(RangeCheckPredicate* x) = 0; 213 #ifdef ASSERT 214 virtual void do_Assert (Assert* x) = 0; 215 #endif 216 }; 217 218 219 // Hashing support 220 // 221 // Note: This hash functions affect the performance 222 // of ValueMap - make changes carefully! 223 224 #define HASH1(x1 ) ((intx)(x1)) 225 #define HASH2(x1, x2 ) ((HASH1(x1 ) << 7) ^ HASH1(x2)) 226 #define HASH3(x1, x2, x3 ) ((HASH2(x1, x2 ) << 7) ^ HASH1(x3)) 227 #define HASH4(x1, x2, x3, x4) ((HASH3(x1, x2, x3) << 7) ^ HASH1(x4)) 228 229 230 // The following macros are used to implement instruction-specific hashing. 231 // By default, each instruction implements hash() and is_equal(Value), used 232 // for value numbering/common subexpression elimination. The default imple- 233 // mentation disables value numbering. Each instruction which can be value- 234 // numbered, should define corresponding hash() and is_equal(Value) functions 235 // via the macros below. The f arguments specify all the values/op codes, etc. 236 // that need to be identical for two instructions to be identical. 237 // 238 // Note: The default implementation of hash() returns 0 in order to indicate 239 // that the instruction should not be considered for value numbering. 240 // The currently used hash functions do not guarantee that never a 0 241 // is produced. While this is still correct, it may be a performance 242 // bug (no value numbering for that node). However, this situation is 243 // so unlikely, that we are not going to handle it specially. 244 245 #define HASHING1(class_name, enabled, f1) \ 246 virtual intx hash() const { \ 247 return (enabled) ? HASH2(name(), f1) : 0; \ 248 } \ 249 virtual bool is_equal(Value v) const { \ 250 if (!(enabled) ) return false; \ 251 class_name* _v = v->as_##class_name(); \ 252 if (_v == NULL ) return false; \ 253 if (f1 != _v->f1) return false; \ 254 return true; \ 255 } \ 256 257 258 #define HASHING2(class_name, enabled, f1, f2) \ 259 virtual intx hash() const { \ 260 return (enabled) ? HASH3(name(), f1, f2) : 0; \ 261 } \ 262 virtual bool is_equal(Value v) const { \ 263 if (!(enabled) ) return false; \ 264 class_name* _v = v->as_##class_name(); \ 265 if (_v == NULL ) return false; \ 266 if (f1 != _v->f1) return false; \ 267 if (f2 != _v->f2) return false; \ 268 return true; \ 269 } \ 270 271 272 #define HASHING3(class_name, enabled, f1, f2, f3) \ 273 virtual intx hash() const { \ 274 return (enabled) ? HASH4(name(), f1, f2, f3) : 0; \ 275 } \ 276 virtual bool is_equal(Value v) const { \ 277 if (!(enabled) ) return false; \ 278 class_name* _v = v->as_##class_name(); \ 279 if (_v == NULL ) return false; \ 280 if (f1 != _v->f1) return false; \ 281 if (f2 != _v->f2) return false; \ 282 if (f3 != _v->f3) return false; \ 283 return true; \ 284 } \ 285 286 287 // The mother of all instructions... 288 289 class Instruction: public CompilationResourceObj { 290 private: 291 int _id; // the unique instruction id 292 #ifndef PRODUCT 293 int _printable_bci; // the bci of the instruction for printing 294 #endif 295 int _use_count; // the number of instructions refering to this value (w/o prev/next); only roots can have use count = 0 or > 1 296 int _pin_state; // set of PinReason describing the reason for pinning 297 ValueType* _type; // the instruction value type 298 Instruction* _next; // the next instruction if any (NULL for BlockEnd instructions) 299 Instruction* _subst; // the substitution instruction if any 300 LIR_Opr _operand; // LIR specific information 301 unsigned int _flags; // Flag bits 302 303 ValueStack* _state_before; // Copy of state with input operands still on stack (or NULL) 304 ValueStack* _exception_state; // Copy of state for exception handling 305 XHandlers* _exception_handlers; // Flat list of exception handlers covering this instruction 306 307 friend class UseCountComputer; 308 friend class BlockBegin; 309 310 void update_exception_state(ValueStack* state); 311 312 protected: 313 BlockBegin* _block; // Block that contains this instruction 314 315 void set_type(ValueType* type) { 316 assert(type != NULL, "type must exist"); 317 _type = type; 318 } 319 320 // Helper class to keep track of which arguments need a null check 321 class ArgsNonNullState { 322 private: 323 int _nonnull_state; // mask identifying which args are nonnull 324 public: 325 ArgsNonNullState() 326 : _nonnull_state(AllBits) {} 327 328 // Does argument number i needs a null check? 329 bool arg_needs_null_check(int i) const { 330 // No data is kept for arguments starting at position 33 so 331 // conservatively assume that they need a null check. 332 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) { 333 return is_set_nth_bit(_nonnull_state, i); 334 } 335 return true; 336 } 337 338 // Set whether argument number i needs a null check or not 339 void set_arg_needs_null_check(int i, bool check) { 340 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) { 341 if (check) { 342 _nonnull_state |= nth_bit(i); 343 } else { 344 _nonnull_state &= ~(nth_bit(i)); 345 } 346 } 347 } 348 }; 349 350 public: 351 void* operator new(size_t size) throw() { 352 Compilation* c = Compilation::current(); 353 void* res = c->arena()->Amalloc(size); 354 ((Instruction*)res)->_id = c->get_next_id(); 355 return res; 356 } 357 358 static const int no_bci = -99; 359 360 enum InstructionFlag { 361 NeedsNullCheckFlag = 0, 362 CanTrapFlag, 363 DirectCompareFlag, 364 IsEliminatedFlag, 365 IsSafepointFlag, 366 IsStaticFlag, 367 IsStrictfpFlag, 368 NeedsStoreCheckFlag, 369 NeedsWriteBarrierFlag, 370 PreservesStateFlag, 371 TargetIsFinalFlag, 372 TargetIsLoadedFlag, 373 TargetIsStrictfpFlag, 374 UnorderedIsTrueFlag, 375 NeedsPatchingFlag, 376 ThrowIncompatibleClassChangeErrorFlag, 377 InvokeSpecialReceiverCheckFlag, 378 ProfileMDOFlag, 379 IsLinkedInBlockFlag, 380 NeedsRangeCheckFlag, 381 InWorkListFlag, 382 DeoptimizeOnException, 383 InstructionLastFlag 384 }; 385 386 public: 387 bool check_flag(InstructionFlag id) const { return (_flags & (1 << id)) != 0; } 388 void set_flag(InstructionFlag id, bool f) { _flags = f ? (_flags | (1 << id)) : (_flags & ~(1 << id)); }; 389 390 // 'globally' used condition values 391 enum Condition { 392 eql, neq, lss, leq, gtr, geq, aeq, beq 393 }; 394 395 // Instructions may be pinned for many reasons and under certain conditions 396 // with enough knowledge it's possible to safely unpin them. 397 enum PinReason { 398 PinUnknown = 1 << 0 399 , PinExplicitNullCheck = 1 << 3 400 , PinStackForStateSplit= 1 << 12 401 , PinStateSplitConstructor= 1 << 13 402 , PinGlobalValueNumbering= 1 << 14 403 }; 404 405 static Condition mirror(Condition cond); 406 static Condition negate(Condition cond); 407 408 // initialization 409 static int number_of_instructions() { 410 return Compilation::current()->number_of_instructions(); 411 } 412 413 // creation 414 Instruction(ValueType* type, ValueStack* state_before = NULL, bool type_is_constant = false) 415 : 416 #ifndef PRODUCT 417 _printable_bci(-99), 418 #endif 419 _use_count(0) 420 , _pin_state(0) 421 , _type(type) 422 , _next(NULL) 423 , _subst(NULL) 424 , _operand(LIR_OprFact::illegalOpr) 425 , _flags(0) 426 , _state_before(state_before) 427 , _exception_handlers(NULL) 428 , _block(NULL) 429 { 430 check_state(state_before); 431 assert(type != NULL && (!type->is_constant() || type_is_constant), "type must exist"); 432 update_exception_state(_state_before); 433 } 434 435 // accessors 436 int id() const { return _id; } 437 #ifndef PRODUCT 438 bool has_printable_bci() const { return _printable_bci != -99; } 439 int printable_bci() const { assert(has_printable_bci(), "_printable_bci should have been set"); return _printable_bci; } 440 void set_printable_bci(int bci) { _printable_bci = bci; } 441 #endif 442 int dominator_depth(); 443 int use_count() const { return _use_count; } 444 int pin_state() const { return _pin_state; } 445 bool is_pinned() const { return _pin_state != 0 || PinAllInstructions; } 446 ValueType* type() const { return _type; } 447 BlockBegin *block() const { return _block; } 448 Instruction* prev(); // use carefully, expensive operation 449 Instruction* next() const { return _next; } 450 bool has_subst() const { return _subst != NULL; } 451 Instruction* subst() { return _subst == NULL ? this : _subst->subst(); } 452 LIR_Opr operand() const { return _operand; } 453 454 void set_needs_null_check(bool f) { set_flag(NeedsNullCheckFlag, f); } 455 bool needs_null_check() const { return check_flag(NeedsNullCheckFlag); } 456 bool is_linked() const { return check_flag(IsLinkedInBlockFlag); } 457 bool can_be_linked() { return as_Local() == NULL && as_Phi() == NULL; } 458 459 bool has_uses() const { return use_count() > 0; } 460 ValueStack* state_before() const { return _state_before; } 461 ValueStack* exception_state() const { return _exception_state; } 462 virtual bool needs_exception_state() const { return true; } 463 XHandlers* exception_handlers() const { return _exception_handlers; } 464 465 // manipulation 466 void pin(PinReason reason) { _pin_state |= reason; } 467 void pin() { _pin_state |= PinUnknown; } 468 // DANGEROUS: only used by EliminateStores 469 void unpin(PinReason reason) { assert((reason & PinUnknown) == 0, "can't unpin unknown state"); _pin_state &= ~reason; } 470 471 Instruction* set_next(Instruction* next) { 472 assert(next->has_printable_bci(), "_printable_bci should have been set"); 473 assert(next != NULL, "must not be NULL"); 474 assert(as_BlockEnd() == NULL, "BlockEnd instructions must have no next"); 475 assert(next->can_be_linked(), "shouldn't link these instructions into list"); 476 477 BlockBegin *block = this->block(); 478 next->_block = block; 479 480 next->set_flag(Instruction::IsLinkedInBlockFlag, true); 481 _next = next; 482 return next; 483 } 484 485 Instruction* set_next(Instruction* next, int bci) { 486 #ifndef PRODUCT 487 next->set_printable_bci(bci); 488 #endif 489 return set_next(next); 490 } 491 492 // when blocks are merged 493 void fixup_block_pointers() { 494 Instruction *cur = next()->next(); // next()'s block is set in set_next 495 while (cur && cur->_block != block()) { 496 cur->_block = block(); 497 cur = cur->next(); 498 } 499 } 500 501 Instruction *insert_after(Instruction *i) { 502 Instruction* n = _next; 503 set_next(i); 504 i->set_next(n); 505 return _next; 506 } 507 508 bool is_flattened_array() const; 509 510 Instruction *insert_after_same_bci(Instruction *i) { 511 #ifndef PRODUCT 512 i->set_printable_bci(printable_bci()); 513 #endif 514 return insert_after(i); 515 } 516 517 void set_subst(Instruction* subst) { 518 assert(subst == NULL || 519 type()->base() == subst->type()->base() || 520 subst->type()->base() == illegalType, "type can't change"); 521 _subst = subst; 522 } 523 void set_exception_handlers(XHandlers *xhandlers) { _exception_handlers = xhandlers; } 524 void set_exception_state(ValueStack* s) { check_state(s); _exception_state = s; } 525 void set_state_before(ValueStack* s) { check_state(s); _state_before = s; } 526 527 // machine-specifics 528 void set_operand(LIR_Opr operand) { assert(operand != LIR_OprFact::illegalOpr, "operand must exist"); _operand = operand; } 529 void clear_operand() { _operand = LIR_OprFact::illegalOpr; } 530 531 // generic 532 virtual Instruction* as_Instruction() { return this; } // to satisfy HASHING1 macro 533 virtual Phi* as_Phi() { return NULL; } 534 virtual Local* as_Local() { return NULL; } 535 virtual Constant* as_Constant() { return NULL; } 536 virtual AccessField* as_AccessField() { return NULL; } 537 virtual LoadField* as_LoadField() { return NULL; } 538 virtual StoreField* as_StoreField() { return NULL; } 539 virtual AccessArray* as_AccessArray() { return NULL; } 540 virtual ArrayLength* as_ArrayLength() { return NULL; } 541 virtual AccessIndexed* as_AccessIndexed() { return NULL; } 542 virtual LoadIndexed* as_LoadIndexed() { return NULL; } 543 virtual StoreIndexed* as_StoreIndexed() { return NULL; } 544 virtual NegateOp* as_NegateOp() { return NULL; } 545 virtual Op2* as_Op2() { return NULL; } 546 virtual ArithmeticOp* as_ArithmeticOp() { return NULL; } 547 virtual ShiftOp* as_ShiftOp() { return NULL; } 548 virtual LogicOp* as_LogicOp() { return NULL; } 549 virtual CompareOp* as_CompareOp() { return NULL; } 550 virtual IfOp* as_IfOp() { return NULL; } 551 virtual Convert* as_Convert() { return NULL; } 552 virtual NullCheck* as_NullCheck() { return NULL; } 553 virtual OsrEntry* as_OsrEntry() { return NULL; } 554 virtual StateSplit* as_StateSplit() { return NULL; } 555 virtual Invoke* as_Invoke() { return NULL; } 556 virtual NewInstance* as_NewInstance() { return NULL; } 557 virtual NewValueTypeInstance* as_NewValueTypeInstance() { return NULL; } 558 virtual NewArray* as_NewArray() { return NULL; } 559 virtual NewTypeArray* as_NewTypeArray() { return NULL; } 560 virtual NewObjectArray* as_NewObjectArray() { return NULL; } 561 virtual NewMultiArray* as_NewMultiArray() { return NULL; } 562 virtual TypeCheck* as_TypeCheck() { return NULL; } 563 virtual CheckCast* as_CheckCast() { return NULL; } 564 virtual InstanceOf* as_InstanceOf() { return NULL; } 565 virtual TypeCast* as_TypeCast() { return NULL; } 566 virtual AccessMonitor* as_AccessMonitor() { return NULL; } 567 virtual MonitorEnter* as_MonitorEnter() { return NULL; } 568 virtual MonitorExit* as_MonitorExit() { return NULL; } 569 virtual Intrinsic* as_Intrinsic() { return NULL; } 570 virtual BlockBegin* as_BlockBegin() { return NULL; } 571 virtual BlockEnd* as_BlockEnd() { return NULL; } 572 virtual Goto* as_Goto() { return NULL; } 573 virtual If* as_If() { return NULL; } 574 virtual IfInstanceOf* as_IfInstanceOf() { return NULL; } 575 virtual TableSwitch* as_TableSwitch() { return NULL; } 576 virtual LookupSwitch* as_LookupSwitch() { return NULL; } 577 virtual Return* as_Return() { return NULL; } 578 virtual Throw* as_Throw() { return NULL; } 579 virtual Base* as_Base() { return NULL; } 580 virtual RoundFP* as_RoundFP() { return NULL; } 581 virtual ExceptionObject* as_ExceptionObject() { return NULL; } 582 virtual UnsafeOp* as_UnsafeOp() { return NULL; } 583 virtual ProfileInvoke* as_ProfileInvoke() { return NULL; } 584 virtual RangeCheckPredicate* as_RangeCheckPredicate() { return NULL; } 585 586 #ifdef ASSERT 587 virtual Assert* as_Assert() { return NULL; } 588 #endif 589 590 virtual void visit(InstructionVisitor* v) = 0; 591 592 virtual bool can_trap() const { return false; } 593 594 virtual void input_values_do(ValueVisitor* f) = 0; 595 virtual void state_values_do(ValueVisitor* f); 596 virtual void other_values_do(ValueVisitor* f) { /* usually no other - override on demand */ } 597 void values_do(ValueVisitor* f) { input_values_do(f); state_values_do(f); other_values_do(f); } 598 599 virtual ciType* exact_type() const; 600 virtual ciType* declared_type() const { return NULL; } 601 602 // hashing 603 virtual const char* name() const = 0; 604 HASHING1(Instruction, false, id()) // hashing disabled by default 605 606 // debugging 607 static void check_state(ValueStack* state) PRODUCT_RETURN; 608 void print() PRODUCT_RETURN; 609 void print_line() PRODUCT_RETURN; 610 void print(InstructionPrinter& ip) PRODUCT_RETURN; 611 }; 612 613 614 // The following macros are used to define base (i.e., non-leaf) 615 // and leaf instruction classes. They define class-name related 616 // generic functionality in one place. 617 618 #define BASE(class_name, super_class_name) \ 619 class class_name: public super_class_name { \ 620 public: \ 621 virtual class_name* as_##class_name() { return this; } \ 622 623 624 #define LEAF(class_name, super_class_name) \ 625 BASE(class_name, super_class_name) \ 626 public: \ 627 virtual const char* name() const { return #class_name; } \ 628 virtual void visit(InstructionVisitor* v) { v->do_##class_name(this); } \ 629 630 631 // Debugging support 632 633 634 #ifdef ASSERT 635 class AssertValues: public ValueVisitor { 636 void visit(Value* x) { assert((*x) != NULL, "value must exist"); } 637 }; 638 #define ASSERT_VALUES { AssertValues assert_value; values_do(&assert_value); } 639 #else 640 #define ASSERT_VALUES 641 #endif // ASSERT 642 643 644 // A Phi is a phi function in the sense of SSA form. It stands for 645 // the value of a local variable at the beginning of a join block. 646 // A Phi consists of n operands, one for every incoming branch. 647 648 LEAF(Phi, Instruction) 649 private: 650 int _pf_flags; // the flags of the phi function 651 int _index; // to value on operand stack (index < 0) or to local 652 ciType* _exact_type; // currently is set only for flattened arrays, NULL otherwise. 653 public: 654 // creation 655 Phi(ValueType* type, BlockBegin* b, int index, ciType* exact_type) 656 : Instruction(type->base()) 657 , _pf_flags(0) 658 , _index(index) 659 , _exact_type(exact_type) 660 { 661 _block = b; 662 NOT_PRODUCT(set_printable_bci(Value(b)->printable_bci())); 663 if (type->is_illegal()) { 664 make_illegal(); 665 } 666 } 667 668 virtual ciType* exact_type() const { 669 return _exact_type; 670 } 671 672 virtual ciType* declared_type() const { 673 return _exact_type; 674 } 675 676 // flags 677 enum Flag { 678 no_flag = 0, 679 visited = 1 << 0, 680 cannot_simplify = 1 << 1 681 }; 682 683 // accessors 684 bool is_local() const { return _index >= 0; } 685 bool is_on_stack() const { return !is_local(); } 686 int local_index() const { assert(is_local(), ""); return _index; } 687 int stack_index() const { assert(is_on_stack(), ""); return -(_index+1); } 688 689 Value operand_at(int i) const; 690 int operand_count() const; 691 692 void set(Flag f) { _pf_flags |= f; } 693 void clear(Flag f) { _pf_flags &= ~f; } 694 bool is_set(Flag f) const { return (_pf_flags & f) != 0; } 695 696 // Invalidates phis corresponding to merges of locals of two different types 697 // (these should never be referenced, otherwise the bytecodes are illegal) 698 void make_illegal() { 699 set(cannot_simplify); 700 set_type(illegalType); 701 } 702 703 bool is_illegal() const { 704 return type()->is_illegal(); 705 } 706 707 // generic 708 virtual void input_values_do(ValueVisitor* f) { 709 } 710 }; 711 712 713 // A local is a placeholder for an incoming argument to a function call. 714 LEAF(Local, Instruction) 715 private: 716 int _java_index; // the local index within the method to which the local belongs 717 bool _is_receiver; // if local variable holds the receiver: "this" for non-static methods 718 ciType* _declared_type; 719 public: 720 // creation 721 Local(ciType* declared, ValueType* type, int index, bool receiver) 722 : Instruction(type) 723 , _java_index(index) 724 , _is_receiver(receiver) 725 , _declared_type(declared) 726 { 727 NOT_PRODUCT(set_printable_bci(-1)); 728 } 729 730 // accessors 731 int java_index() const { return _java_index; } 732 bool is_receiver() const { return _is_receiver; } 733 734 virtual ciType* declared_type() const { return _declared_type; } 735 736 // generic 737 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 738 }; 739 740 741 LEAF(Constant, Instruction) 742 public: 743 // creation 744 Constant(ValueType* type): 745 Instruction(type, NULL, /*type_is_constant*/ true) 746 { 747 assert(type->is_constant(), "must be a constant"); 748 } 749 750 Constant(ValueType* type, ValueStack* state_before): 751 Instruction(type, state_before, /*type_is_constant*/ true) 752 { 753 assert(state_before != NULL, "only used for constants which need patching"); 754 assert(type->is_constant(), "must be a constant"); 755 // since it's patching it needs to be pinned 756 pin(); 757 } 758 759 // generic 760 virtual bool can_trap() const { return state_before() != NULL; } 761 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 762 763 virtual intx hash() const; 764 virtual bool is_equal(Value v) const; 765 766 virtual ciType* exact_type() const; 767 768 enum CompareResult { not_comparable = -1, cond_false, cond_true }; 769 770 virtual CompareResult compare(Instruction::Condition condition, Value right) const; 771 BlockBegin* compare(Instruction::Condition cond, Value right, 772 BlockBegin* true_sux, BlockBegin* false_sux) const { 773 switch (compare(cond, right)) { 774 case not_comparable: 775 return NULL; 776 case cond_false: 777 return false_sux; 778 case cond_true: 779 return true_sux; 780 default: 781 ShouldNotReachHere(); 782 return NULL; 783 } 784 } 785 }; 786 787 788 BASE(AccessField, Instruction) 789 private: 790 Value _obj; 791 int _offset; 792 ciField* _field; 793 NullCheck* _explicit_null_check; // For explicit null check elimination 794 795 public: 796 // creation 797 AccessField(Value obj, int offset, ciField* field, bool is_static, 798 ValueStack* state_before, bool needs_patching) 799 : Instruction(as_ValueType(field->type()->basic_type()), state_before) 800 , _obj(obj) 801 , _offset(offset) 802 , _field(field) 803 , _explicit_null_check(NULL) 804 { 805 set_needs_null_check(!is_static); 806 set_flag(IsStaticFlag, is_static); 807 set_flag(NeedsPatchingFlag, needs_patching); 808 ASSERT_VALUES 809 // pin of all instructions with memory access 810 pin(); 811 } 812 813 // accessors 814 Value obj() const { return _obj; } 815 int offset() const { return _offset; } 816 ciField* field() const { return _field; } 817 BasicType field_type() const { return _field->type()->basic_type(); } 818 bool is_static() const { return check_flag(IsStaticFlag); } 819 NullCheck* explicit_null_check() const { return _explicit_null_check; } 820 bool needs_patching() const { return check_flag(NeedsPatchingFlag); } 821 822 // Unresolved getstatic and putstatic can cause initialization. 823 // Technically it occurs at the Constant that materializes the base 824 // of the static fields but it's simpler to model it here. 825 bool is_init_point() const { return is_static() && (needs_patching() || !_field->holder()->is_initialized()); } 826 827 // manipulation 828 829 // Under certain circumstances, if a previous NullCheck instruction 830 // proved the target object non-null, we can eliminate the explicit 831 // null check and do an implicit one, simply specifying the debug 832 // information from the NullCheck. This field should only be consulted 833 // if needs_null_check() is true. 834 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 835 836 // generic 837 virtual bool can_trap() const { return needs_null_check() || needs_patching(); } 838 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 839 }; 840 841 842 LEAF(LoadField, AccessField) 843 ciValueKlass* _value_klass; 844 Value _default_value; 845 public: 846 // creation 847 LoadField(Value obj, int offset, ciField* field, bool is_static, 848 ValueStack* state_before, bool needs_patching, 849 ciValueKlass* value_klass = NULL, Value default_value = NULL ) 850 : AccessField(obj, offset, field, is_static, state_before, needs_patching) 851 , _value_klass(value_klass), _default_value(default_value) 852 {} 853 854 ciType* declared_type() const; 855 856 // generic 857 HASHING2(LoadField, !needs_patching() && !field()->is_volatile(), obj()->subst(), offset()) // cannot be eliminated if needs patching or if volatile 858 859 ciValueKlass* value_klass() const { return _value_klass;} 860 Value default_value() const { return _default_value; } 861 }; 862 863 864 LEAF(StoreField, AccessField) 865 private: 866 Value _value; 867 868 public: 869 // creation 870 StoreField(Value obj, int offset, ciField* field, Value value, bool is_static, 871 ValueStack* state_before, bool needs_patching) 872 : AccessField(obj, offset, field, is_static, state_before, needs_patching) 873 , _value(value) 874 { 875 set_flag(NeedsWriteBarrierFlag, as_ValueType(field_type())->is_object()); 876 ASSERT_VALUES 877 pin(); 878 } 879 880 // accessors 881 Value value() const { return _value; } 882 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); } 883 884 // generic 885 virtual void input_values_do(ValueVisitor* f) { AccessField::input_values_do(f); f->visit(&_value); } 886 }; 887 888 889 BASE(AccessArray, Instruction) 890 private: 891 Value _array; 892 893 public: 894 // creation 895 AccessArray(ValueType* type, Value array, ValueStack* state_before) 896 : Instruction(type, state_before) 897 , _array(array) 898 { 899 set_needs_null_check(true); 900 ASSERT_VALUES 901 pin(); // instruction with side effect (null exception or range check throwing) 902 } 903 904 Value array() const { return _array; } 905 906 // generic 907 virtual bool can_trap() const { return needs_null_check(); } 908 virtual void input_values_do(ValueVisitor* f) { f->visit(&_array); } 909 }; 910 911 912 LEAF(ArrayLength, AccessArray) 913 private: 914 NullCheck* _explicit_null_check; // For explicit null check elimination 915 916 public: 917 // creation 918 ArrayLength(Value array, ValueStack* state_before) 919 : AccessArray(intType, array, state_before) 920 , _explicit_null_check(NULL) {} 921 922 // accessors 923 NullCheck* explicit_null_check() const { return _explicit_null_check; } 924 925 // setters 926 // See LoadField::set_explicit_null_check for documentation 927 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 928 929 // generic 930 HASHING1(ArrayLength, true, array()->subst()) 931 }; 932 933 934 BASE(AccessIndexed, AccessArray) 935 private: 936 Value _index; 937 Value _length; 938 BasicType _elt_type; 939 bool _mismatched; 940 941 public: 942 // creation 943 AccessIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before, bool mismatched) 944 : AccessArray(as_ValueType(elt_type), array, state_before) 945 , _index(index) 946 , _length(length) 947 , _elt_type(elt_type) 948 , _mismatched(mismatched) 949 { 950 set_flag(Instruction::NeedsRangeCheckFlag, true); 951 ASSERT_VALUES 952 } 953 954 // accessors 955 Value index() const { return _index; } 956 Value length() const { return _length; } 957 BasicType elt_type() const { return _elt_type; } 958 bool mismatched() const { return _mismatched; } 959 960 void clear_length() { _length = NULL; } 961 // perform elimination of range checks involving constants 962 bool compute_needs_range_check(); 963 964 // generic 965 virtual void input_values_do(ValueVisitor* f) { AccessArray::input_values_do(f); f->visit(&_index); if (_length != NULL) f->visit(&_length); } 966 }; 967 968 969 LEAF(LoadIndexed, AccessIndexed) 970 private: 971 NullCheck* _explicit_null_check; // For explicit null check elimination 972 NewValueTypeInstance* _vt; 973 974 public: 975 // creation 976 LoadIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before, bool mismatched = false) 977 : AccessIndexed(array, index, length, elt_type, state_before, mismatched) 978 , _explicit_null_check(NULL) {} 979 980 // accessors 981 NullCheck* explicit_null_check() const { return _explicit_null_check; } 982 983 // setters 984 // See LoadField::set_explicit_null_check for documentation 985 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 986 987 ciType* exact_type() const; 988 ciType* declared_type() const; 989 990 NewValueTypeInstance* vt() { return _vt; } 991 void set_vt(NewValueTypeInstance* vt) { _vt = vt; } 992 993 // generic 994 HASHING2(LoadIndexed, true, array()->subst(), index()->subst()) 995 }; 996 997 998 LEAF(StoreIndexed, AccessIndexed) 999 private: 1000 Value _value; 1001 1002 ciMethod* _profiled_method; 1003 int _profiled_bci; 1004 bool _check_boolean; 1005 1006 public: 1007 // creation 1008 StoreIndexed(Value array, Value index, Value length, BasicType elt_type, Value value, ValueStack* state_before, 1009 bool check_boolean, bool mismatched = false) 1010 : AccessIndexed(array, index, length, elt_type, state_before, mismatched) 1011 , _value(value), _profiled_method(NULL), _profiled_bci(0), _check_boolean(check_boolean) 1012 { 1013 set_flag(NeedsWriteBarrierFlag, (as_ValueType(elt_type)->is_object())); 1014 set_flag(NeedsStoreCheckFlag, (as_ValueType(elt_type)->is_object())); 1015 ASSERT_VALUES 1016 pin(); 1017 } 1018 1019 // accessors 1020 Value value() const { return _value; } 1021 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); } 1022 bool needs_store_check() const { return check_flag(NeedsStoreCheckFlag); } 1023 bool check_boolean() const { return _check_boolean; } 1024 // Helpers for MethodData* profiling 1025 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1026 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1027 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1028 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1029 ciMethod* profiled_method() const { return _profiled_method; } 1030 int profiled_bci() const { return _profiled_bci; } 1031 // generic 1032 virtual void input_values_do(ValueVisitor* f) { AccessIndexed::input_values_do(f); f->visit(&_value); } 1033 }; 1034 1035 1036 LEAF(NegateOp, Instruction) 1037 private: 1038 Value _x; 1039 1040 public: 1041 // creation 1042 NegateOp(Value x) : Instruction(x->type()->base()), _x(x) { 1043 ASSERT_VALUES 1044 } 1045 1046 // accessors 1047 Value x() const { return _x; } 1048 1049 // generic 1050 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); } 1051 }; 1052 1053 1054 BASE(Op2, Instruction) 1055 private: 1056 Bytecodes::Code _op; 1057 Value _x; 1058 Value _y; 1059 1060 public: 1061 // creation 1062 Op2(ValueType* type, Bytecodes::Code op, Value x, Value y, ValueStack* state_before = NULL) 1063 : Instruction(type, state_before) 1064 , _op(op) 1065 , _x(x) 1066 , _y(y) 1067 { 1068 ASSERT_VALUES 1069 } 1070 1071 // accessors 1072 Bytecodes::Code op() const { return _op; } 1073 Value x() const { return _x; } 1074 Value y() const { return _y; } 1075 1076 // manipulators 1077 void swap_operands() { 1078 assert(is_commutative(), "operation must be commutative"); 1079 Value t = _x; _x = _y; _y = t; 1080 } 1081 1082 // generic 1083 virtual bool is_commutative() const { return false; } 1084 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); } 1085 }; 1086 1087 1088 LEAF(ArithmeticOp, Op2) 1089 public: 1090 // creation 1091 ArithmeticOp(Bytecodes::Code op, Value x, Value y, bool is_strictfp, ValueStack* state_before) 1092 : Op2(x->type()->meet(y->type()), op, x, y, state_before) 1093 { 1094 set_flag(IsStrictfpFlag, is_strictfp); 1095 if (can_trap()) pin(); 1096 } 1097 1098 // accessors 1099 bool is_strictfp() const { return check_flag(IsStrictfpFlag); } 1100 1101 // generic 1102 virtual bool is_commutative() const; 1103 virtual bool can_trap() const; 1104 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1105 }; 1106 1107 1108 LEAF(ShiftOp, Op2) 1109 public: 1110 // creation 1111 ShiftOp(Bytecodes::Code op, Value x, Value s) : Op2(x->type()->base(), op, x, s) {} 1112 1113 // generic 1114 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1115 }; 1116 1117 1118 LEAF(LogicOp, Op2) 1119 public: 1120 // creation 1121 LogicOp(Bytecodes::Code op, Value x, Value y) : Op2(x->type()->meet(y->type()), op, x, y) {} 1122 1123 // generic 1124 virtual bool is_commutative() const; 1125 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1126 }; 1127 1128 1129 LEAF(CompareOp, Op2) 1130 public: 1131 // creation 1132 CompareOp(Bytecodes::Code op, Value x, Value y, ValueStack* state_before) 1133 : Op2(intType, op, x, y, state_before) 1134 {} 1135 1136 // generic 1137 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1138 }; 1139 1140 1141 LEAF(IfOp, Op2) 1142 private: 1143 Value _tval; 1144 Value _fval; 1145 1146 public: 1147 // creation 1148 IfOp(Value x, Condition cond, Value y, Value tval, Value fval) 1149 : Op2(tval->type()->meet(fval->type()), (Bytecodes::Code)cond, x, y) 1150 , _tval(tval) 1151 , _fval(fval) 1152 { 1153 ASSERT_VALUES 1154 assert(tval->type()->tag() == fval->type()->tag(), "types must match"); 1155 } 1156 1157 // accessors 1158 virtual bool is_commutative() const; 1159 Bytecodes::Code op() const { ShouldNotCallThis(); return Bytecodes::_illegal; } 1160 Condition cond() const { return (Condition)Op2::op(); } 1161 Value tval() const { return _tval; } 1162 Value fval() const { return _fval; } 1163 1164 // generic 1165 virtual void input_values_do(ValueVisitor* f) { Op2::input_values_do(f); f->visit(&_tval); f->visit(&_fval); } 1166 }; 1167 1168 1169 LEAF(Convert, Instruction) 1170 private: 1171 Bytecodes::Code _op; 1172 Value _value; 1173 1174 public: 1175 // creation 1176 Convert(Bytecodes::Code op, Value value, ValueType* to_type) : Instruction(to_type), _op(op), _value(value) { 1177 ASSERT_VALUES 1178 } 1179 1180 // accessors 1181 Bytecodes::Code op() const { return _op; } 1182 Value value() const { return _value; } 1183 1184 // generic 1185 virtual void input_values_do(ValueVisitor* f) { f->visit(&_value); } 1186 HASHING2(Convert, true, op(), value()->subst()) 1187 }; 1188 1189 1190 LEAF(NullCheck, Instruction) 1191 private: 1192 Value _obj; 1193 1194 public: 1195 // creation 1196 NullCheck(Value obj, ValueStack* state_before) 1197 : Instruction(obj->type()->base(), state_before) 1198 , _obj(obj) 1199 { 1200 ASSERT_VALUES 1201 set_can_trap(true); 1202 assert(_obj->type()->is_object(), "null check must be applied to objects only"); 1203 pin(Instruction::PinExplicitNullCheck); 1204 } 1205 1206 // accessors 1207 Value obj() const { return _obj; } 1208 1209 // setters 1210 void set_can_trap(bool can_trap) { set_flag(CanTrapFlag, can_trap); } 1211 1212 // generic 1213 virtual bool can_trap() const { return check_flag(CanTrapFlag); /* null-check elimination sets to false */ } 1214 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 1215 HASHING1(NullCheck, true, obj()->subst()) 1216 }; 1217 1218 1219 // This node is supposed to cast the type of another node to a more precise 1220 // declared type. 1221 LEAF(TypeCast, Instruction) 1222 private: 1223 ciType* _declared_type; 1224 Value _obj; 1225 1226 public: 1227 // The type of this node is the same type as the object type (and it might be constant). 1228 TypeCast(ciType* type, Value obj, ValueStack* state_before) 1229 : Instruction(obj->type(), state_before, obj->type()->is_constant()), 1230 _declared_type(type), 1231 _obj(obj) {} 1232 1233 // accessors 1234 ciType* declared_type() const { return _declared_type; } 1235 Value obj() const { return _obj; } 1236 1237 // generic 1238 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 1239 }; 1240 1241 1242 BASE(StateSplit, Instruction) 1243 private: 1244 ValueStack* _state; 1245 1246 protected: 1247 static void substitute(BlockList& list, BlockBegin* old_block, BlockBegin* new_block); 1248 1249 public: 1250 // creation 1251 StateSplit(ValueType* type, ValueStack* state_before = NULL) 1252 : Instruction(type, state_before) 1253 , _state(NULL) 1254 { 1255 pin(PinStateSplitConstructor); 1256 } 1257 1258 // accessors 1259 ValueStack* state() const { return _state; } 1260 IRScope* scope() const; // the state's scope 1261 1262 // manipulation 1263 void set_state(ValueStack* state) { assert(_state == NULL, "overwriting existing state"); check_state(state); _state = state; } 1264 1265 // generic 1266 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 1267 virtual void state_values_do(ValueVisitor* f); 1268 }; 1269 1270 1271 LEAF(Invoke, StateSplit) 1272 private: 1273 Bytecodes::Code _code; 1274 Value _recv; 1275 Values* _args; 1276 BasicTypeList* _signature; 1277 int _vtable_index; 1278 ciMethod* _target; 1279 1280 public: 1281 // creation 1282 Invoke(Bytecodes::Code code, ValueType* result_type, Value recv, Values* args, 1283 int vtable_index, ciMethod* target, ValueStack* state_before); 1284 1285 // accessors 1286 Bytecodes::Code code() const { return _code; } 1287 Value receiver() const { return _recv; } 1288 bool has_receiver() const { return receiver() != NULL; } 1289 int number_of_arguments() const { return _args->length(); } 1290 Value argument_at(int i) const { return _args->at(i); } 1291 int vtable_index() const { return _vtable_index; } 1292 BasicTypeList* signature() const { return _signature; } 1293 ciMethod* target() const { return _target; } 1294 1295 ciType* declared_type() const; 1296 1297 // Returns false if target is not loaded 1298 bool target_is_final() const { return check_flag(TargetIsFinalFlag); } 1299 bool target_is_loaded() const { return check_flag(TargetIsLoadedFlag); } 1300 // Returns false if target is not loaded 1301 bool target_is_strictfp() const { return check_flag(TargetIsStrictfpFlag); } 1302 1303 // JSR 292 support 1304 bool is_invokedynamic() const { return code() == Bytecodes::_invokedynamic; } 1305 bool is_method_handle_intrinsic() const { return target()->is_method_handle_intrinsic(); } 1306 1307 virtual bool needs_exception_state() const { return false; } 1308 1309 // generic 1310 virtual bool can_trap() const { return true; } 1311 virtual void input_values_do(ValueVisitor* f) { 1312 StateSplit::input_values_do(f); 1313 if (has_receiver()) f->visit(&_recv); 1314 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 1315 } 1316 virtual void state_values_do(ValueVisitor *f); 1317 }; 1318 1319 1320 LEAF(NewInstance, StateSplit) 1321 private: 1322 ciInstanceKlass* _klass; 1323 bool _is_unresolved; 1324 1325 public: 1326 // creation 1327 NewInstance(ciInstanceKlass* klass, ValueStack* state_before, bool is_unresolved) 1328 : StateSplit(instanceType, state_before) 1329 , _klass(klass), _is_unresolved(is_unresolved) 1330 {} 1331 1332 // accessors 1333 ciInstanceKlass* klass() const { return _klass; } 1334 bool is_unresolved() const { return _is_unresolved; } 1335 1336 virtual bool needs_exception_state() const { return false; } 1337 1338 // generic 1339 virtual bool can_trap() const { return true; } 1340 ciType* exact_type() const; 1341 ciType* declared_type() const; 1342 }; 1343 1344 LEAF(NewValueTypeInstance, StateSplit) 1345 bool _is_unresolved; 1346 ciValueKlass* _klass; 1347 Value _depends_on; // Link to instance on with withfield was called on 1348 1349 public: 1350 1351 // Default creation, always allocated for now 1352 NewValueTypeInstance(ciValueKlass* klass, ValueStack* state_before, bool is_unresolved, Value depends_on = NULL) 1353 : StateSplit(instanceType, state_before) 1354 , _is_unresolved(is_unresolved) 1355 , _klass(klass) 1356 { 1357 if (depends_on == NULL) { 1358 _depends_on = this; 1359 } else { 1360 _depends_on = depends_on; 1361 } 1362 } 1363 1364 // accessors 1365 bool is_unresolved() const { return _is_unresolved; } 1366 Value depends_on(); 1367 1368 ciValueKlass* klass() const { return _klass; } 1369 1370 virtual bool needs_exception_state() const { return false; } 1371 1372 // generic 1373 virtual bool can_trap() const { return true; } 1374 ciType* exact_type() const; 1375 ciType* declared_type() const; 1376 1377 // Only done in LIR Generator -> map everything to object 1378 void set_to_object_type() { set_type(instanceType); } 1379 }; 1380 1381 BASE(NewArray, StateSplit) 1382 private: 1383 Value _length; 1384 1385 public: 1386 // creation 1387 NewArray(Value length, ValueStack* state_before) 1388 : StateSplit(objectType, state_before) 1389 , _length(length) 1390 { 1391 // Do not ASSERT_VALUES since length is NULL for NewMultiArray 1392 } 1393 1394 // accessors 1395 Value length() const { return _length; } 1396 1397 virtual bool needs_exception_state() const { return false; } 1398 1399 ciType* exact_type() const { return NULL; } 1400 ciType* declared_type() const; 1401 1402 // generic 1403 virtual bool can_trap() const { return true; } 1404 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_length); } 1405 }; 1406 1407 1408 LEAF(NewTypeArray, NewArray) 1409 private: 1410 BasicType _elt_type; 1411 1412 public: 1413 // creation 1414 NewTypeArray(Value length, BasicType elt_type, ValueStack* state_before) 1415 : NewArray(length, state_before) 1416 , _elt_type(elt_type) 1417 {} 1418 1419 // accessors 1420 BasicType elt_type() const { return _elt_type; } 1421 ciType* exact_type() const; 1422 }; 1423 1424 1425 LEAF(NewObjectArray, NewArray) 1426 private: 1427 ciKlass* _klass; 1428 1429 public: 1430 // creation 1431 NewObjectArray(ciKlass* klass, Value length, ValueStack* state_before) : NewArray(length, state_before), _klass(klass) {} 1432 1433 // accessors 1434 ciKlass* klass() const { return _klass; } 1435 ciType* exact_type() const; 1436 }; 1437 1438 1439 LEAF(NewMultiArray, NewArray) 1440 private: 1441 ciKlass* _klass; 1442 Values* _dims; 1443 1444 public: 1445 // creation 1446 NewMultiArray(ciKlass* klass, Values* dims, ValueStack* state_before) : NewArray(NULL, state_before), _klass(klass), _dims(dims) { 1447 ASSERT_VALUES 1448 } 1449 1450 // accessors 1451 ciKlass* klass() const { return _klass; } 1452 Values* dims() const { return _dims; } 1453 int rank() const { return dims()->length(); } 1454 1455 // generic 1456 virtual void input_values_do(ValueVisitor* f) { 1457 // NOTE: we do not call NewArray::input_values_do since "length" 1458 // is meaningless for a multi-dimensional array; passing the 1459 // zeroth element down to NewArray as its length is a bad idea 1460 // since there will be a copy in the "dims" array which doesn't 1461 // get updated, and the value must not be traversed twice. Was bug 1462 // - kbr 4/10/2001 1463 StateSplit::input_values_do(f); 1464 for (int i = 0; i < _dims->length(); i++) f->visit(_dims->adr_at(i)); 1465 } 1466 1467 ciType* exact_type() const; 1468 }; 1469 1470 1471 BASE(TypeCheck, StateSplit) 1472 private: 1473 ciKlass* _klass; 1474 Value _obj; 1475 1476 ciMethod* _profiled_method; 1477 int _profiled_bci; 1478 1479 public: 1480 // creation 1481 TypeCheck(ciKlass* klass, Value obj, ValueType* type, ValueStack* state_before) 1482 : StateSplit(type, state_before), _klass(klass), _obj(obj), 1483 _profiled_method(NULL), _profiled_bci(0) { 1484 ASSERT_VALUES 1485 set_direct_compare(false); 1486 } 1487 1488 // accessors 1489 ciKlass* klass() const { return _klass; } 1490 Value obj() const { return _obj; } 1491 bool is_loaded() const { return klass() != NULL; } 1492 bool direct_compare() const { return check_flag(DirectCompareFlag); } 1493 1494 // manipulation 1495 void set_direct_compare(bool flag) { set_flag(DirectCompareFlag, flag); } 1496 1497 // generic 1498 virtual bool can_trap() const { return true; } 1499 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } 1500 1501 // Helpers for MethodData* profiling 1502 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1503 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1504 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1505 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1506 ciMethod* profiled_method() const { return _profiled_method; } 1507 int profiled_bci() const { return _profiled_bci; } 1508 }; 1509 1510 1511 LEAF(CheckCast, TypeCheck) 1512 bool _is_never_null; 1513 public: 1514 // creation 1515 CheckCast(ciKlass* klass, Value obj, ValueStack* state_before, bool never_null = false) 1516 : TypeCheck(klass, obj, objectType, state_before), _is_never_null(never_null) {} 1517 1518 void set_incompatible_class_change_check() { 1519 set_flag(ThrowIncompatibleClassChangeErrorFlag, true); 1520 } 1521 bool is_incompatible_class_change_check() const { 1522 return check_flag(ThrowIncompatibleClassChangeErrorFlag); 1523 } 1524 void set_invokespecial_receiver_check() { 1525 set_flag(InvokeSpecialReceiverCheckFlag, true); 1526 } 1527 bool is_invokespecial_receiver_check() const { 1528 return check_flag(InvokeSpecialReceiverCheckFlag); 1529 } 1530 bool is_never_null() const { 1531 return _is_never_null; 1532 } 1533 1534 virtual bool needs_exception_state() const { 1535 return !is_invokespecial_receiver_check(); 1536 } 1537 1538 ciType* declared_type() const; 1539 }; 1540 1541 1542 LEAF(InstanceOf, TypeCheck) 1543 public: 1544 // creation 1545 InstanceOf(ciKlass* klass, Value obj, ValueStack* state_before) : TypeCheck(klass, obj, intType, state_before) {} 1546 1547 virtual bool needs_exception_state() const { return false; } 1548 }; 1549 1550 1551 BASE(AccessMonitor, StateSplit) 1552 private: 1553 Value _obj; 1554 int _monitor_no; 1555 1556 public: 1557 // creation 1558 AccessMonitor(Value obj, int monitor_no, ValueStack* state_before = NULL) 1559 : StateSplit(illegalType, state_before) 1560 , _obj(obj) 1561 , _monitor_no(monitor_no) 1562 { 1563 set_needs_null_check(true); 1564 ASSERT_VALUES 1565 } 1566 1567 // accessors 1568 Value obj() const { return _obj; } 1569 int monitor_no() const { return _monitor_no; } 1570 1571 // generic 1572 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } 1573 }; 1574 1575 1576 LEAF(MonitorEnter, AccessMonitor) 1577 bool _maybe_valuetype; 1578 public: 1579 // creation 1580 MonitorEnter(Value obj, int monitor_no, ValueStack* state_before, bool maybe_valuetype) 1581 : AccessMonitor(obj, monitor_no, state_before) 1582 , _maybe_valuetype(maybe_valuetype) 1583 { 1584 ASSERT_VALUES 1585 } 1586 1587 // accessors 1588 bool maybe_valuetype() const { return _maybe_valuetype; } 1589 1590 // generic 1591 virtual bool can_trap() const { return true; } 1592 }; 1593 1594 1595 LEAF(MonitorExit, AccessMonitor) 1596 public: 1597 // creation 1598 MonitorExit(Value obj, int monitor_no) 1599 : AccessMonitor(obj, monitor_no, NULL) 1600 { 1601 ASSERT_VALUES 1602 } 1603 }; 1604 1605 1606 LEAF(Intrinsic, StateSplit) 1607 private: 1608 vmIntrinsics::ID _id; 1609 Values* _args; 1610 Value _recv; 1611 ArgsNonNullState _nonnull_state; 1612 1613 public: 1614 // preserves_state can be set to true for Intrinsics 1615 // which are guaranteed to preserve register state across any slow 1616 // cases; setting it to true does not mean that the Intrinsic can 1617 // not trap, only that if we continue execution in the same basic 1618 // block after the Intrinsic, all of the registers are intact. This 1619 // allows load elimination and common expression elimination to be 1620 // performed across the Intrinsic. The default value is false. 1621 Intrinsic(ValueType* type, 1622 vmIntrinsics::ID id, 1623 Values* args, 1624 bool has_receiver, 1625 ValueStack* state_before, 1626 bool preserves_state, 1627 bool cantrap = true) 1628 : StateSplit(type, state_before) 1629 , _id(id) 1630 , _args(args) 1631 , _recv(NULL) 1632 { 1633 assert(args != NULL, "args must exist"); 1634 ASSERT_VALUES 1635 set_flag(PreservesStateFlag, preserves_state); 1636 set_flag(CanTrapFlag, cantrap); 1637 if (has_receiver) { 1638 _recv = argument_at(0); 1639 } 1640 set_needs_null_check(has_receiver); 1641 1642 // some intrinsics can't trap, so don't force them to be pinned 1643 if (!can_trap() && !vmIntrinsics::should_be_pinned(_id)) { 1644 unpin(PinStateSplitConstructor); 1645 } 1646 } 1647 1648 // accessors 1649 vmIntrinsics::ID id() const { return _id; } 1650 int number_of_arguments() const { return _args->length(); } 1651 Value argument_at(int i) const { return _args->at(i); } 1652 1653 bool has_receiver() const { return (_recv != NULL); } 1654 Value receiver() const { assert(has_receiver(), "must have receiver"); return _recv; } 1655 bool preserves_state() const { return check_flag(PreservesStateFlag); } 1656 1657 bool arg_needs_null_check(int i) const { 1658 return _nonnull_state.arg_needs_null_check(i); 1659 } 1660 1661 void set_arg_needs_null_check(int i, bool check) { 1662 _nonnull_state.set_arg_needs_null_check(i, check); 1663 } 1664 1665 // generic 1666 virtual bool can_trap() const { return check_flag(CanTrapFlag); } 1667 virtual void input_values_do(ValueVisitor* f) { 1668 StateSplit::input_values_do(f); 1669 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 1670 } 1671 }; 1672 1673 1674 class LIR_List; 1675 1676 LEAF(BlockBegin, StateSplit) 1677 private: 1678 int _block_id; // the unique block id 1679 int _bci; // start-bci of block 1680 int _depth_first_number; // number of this block in a depth-first ordering 1681 int _linear_scan_number; // number of this block in linear-scan ordering 1682 int _dominator_depth; 1683 int _loop_depth; // the loop nesting level of this block 1684 int _loop_index; // number of the innermost loop of this block 1685 int _flags; // the flags associated with this block 1686 1687 // fields used by BlockListBuilder 1688 int _total_preds; // number of predecessors found by BlockListBuilder 1689 ResourceBitMap _stores_to_locals; // bit is set when a local variable is stored in the block 1690 1691 // SSA specific fields: (factor out later) 1692 BlockList _successors; // the successors of this block 1693 BlockList _predecessors; // the predecessors of this block 1694 BlockList _dominates; // list of blocks that are dominated by this block 1695 BlockBegin* _dominator; // the dominator of this block 1696 // SSA specific ends 1697 BlockEnd* _end; // the last instruction of this block 1698 BlockList _exception_handlers; // the exception handlers potentially invoked by this block 1699 ValueStackStack* _exception_states; // only for xhandler entries: states of all instructions that have an edge to this xhandler 1700 int _exception_handler_pco; // if this block is the start of an exception handler, 1701 // this records the PC offset in the assembly code of the 1702 // first instruction in this block 1703 Label _label; // the label associated with this block 1704 LIR_List* _lir; // the low level intermediate representation for this block 1705 1706 ResourceBitMap _live_in; // set of live LIR_Opr registers at entry to this block 1707 ResourceBitMap _live_out; // set of live LIR_Opr registers at exit from this block 1708 ResourceBitMap _live_gen; // set of registers used before any redefinition in this block 1709 ResourceBitMap _live_kill; // set of registers defined in this block 1710 1711 ResourceBitMap _fpu_register_usage; 1712 intArray* _fpu_stack_state; // For x86 FPU code generation with UseLinearScan 1713 int _first_lir_instruction_id; // ID of first LIR instruction in this block 1714 int _last_lir_instruction_id; // ID of last LIR instruction in this block 1715 1716 void iterate_preorder (boolArray& mark, BlockClosure* closure); 1717 void iterate_postorder(boolArray& mark, BlockClosure* closure); 1718 1719 friend class SuxAndWeightAdjuster; 1720 1721 public: 1722 void* operator new(size_t size) throw() { 1723 Compilation* c = Compilation::current(); 1724 void* res = c->arena()->Amalloc(size); 1725 ((BlockBegin*)res)->_id = c->get_next_id(); 1726 ((BlockBegin*)res)->_block_id = c->get_next_block_id(); 1727 return res; 1728 } 1729 1730 // initialization/counting 1731 static int number_of_blocks() { 1732 return Compilation::current()->number_of_blocks(); 1733 } 1734 1735 // creation 1736 BlockBegin(int bci) 1737 : StateSplit(illegalType) 1738 , _bci(bci) 1739 , _depth_first_number(-1) 1740 , _linear_scan_number(-1) 1741 , _dominator_depth(-1) 1742 , _loop_depth(0) 1743 , _loop_index(-1) 1744 , _flags(0) 1745 , _total_preds(0) 1746 , _stores_to_locals() 1747 , _successors(2) 1748 , _predecessors(2) 1749 , _dominates(2) 1750 , _dominator(NULL) 1751 , _end(NULL) 1752 , _exception_handlers(1) 1753 , _exception_states(NULL) 1754 , _exception_handler_pco(-1) 1755 , _lir(NULL) 1756 , _live_in() 1757 , _live_out() 1758 , _live_gen() 1759 , _live_kill() 1760 , _fpu_register_usage() 1761 , _fpu_stack_state(NULL) 1762 , _first_lir_instruction_id(-1) 1763 , _last_lir_instruction_id(-1) 1764 { 1765 _block = this; 1766 #ifndef PRODUCT 1767 set_printable_bci(bci); 1768 #endif 1769 } 1770 1771 // accessors 1772 int block_id() const { return _block_id; } 1773 int bci() const { return _bci; } 1774 BlockList* successors() { return &_successors; } 1775 BlockList* dominates() { return &_dominates; } 1776 BlockBegin* dominator() const { return _dominator; } 1777 int loop_depth() const { return _loop_depth; } 1778 int dominator_depth() const { return _dominator_depth; } 1779 int depth_first_number() const { return _depth_first_number; } 1780 int linear_scan_number() const { return _linear_scan_number; } 1781 BlockEnd* end() const { return _end; } 1782 Label* label() { return &_label; } 1783 LIR_List* lir() const { return _lir; } 1784 int exception_handler_pco() const { return _exception_handler_pco; } 1785 ResourceBitMap& live_in() { return _live_in; } 1786 ResourceBitMap& live_out() { return _live_out; } 1787 ResourceBitMap& live_gen() { return _live_gen; } 1788 ResourceBitMap& live_kill() { return _live_kill; } 1789 ResourceBitMap& fpu_register_usage() { return _fpu_register_usage; } 1790 intArray* fpu_stack_state() const { return _fpu_stack_state; } 1791 int first_lir_instruction_id() const { return _first_lir_instruction_id; } 1792 int last_lir_instruction_id() const { return _last_lir_instruction_id; } 1793 int total_preds() const { return _total_preds; } 1794 BitMap& stores_to_locals() { return _stores_to_locals; } 1795 1796 // manipulation 1797 void set_dominator(BlockBegin* dom) { _dominator = dom; } 1798 void set_loop_depth(int d) { _loop_depth = d; } 1799 void set_dominator_depth(int d) { _dominator_depth = d; } 1800 void set_depth_first_number(int dfn) { _depth_first_number = dfn; } 1801 void set_linear_scan_number(int lsn) { _linear_scan_number = lsn; } 1802 void set_end(BlockEnd* end); 1803 void clear_end(); 1804 void disconnect_from_graph(); 1805 static void disconnect_edge(BlockBegin* from, BlockBegin* to); 1806 BlockBegin* insert_block_between(BlockBegin* sux); 1807 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux); 1808 void set_lir(LIR_List* lir) { _lir = lir; } 1809 void set_exception_handler_pco(int pco) { _exception_handler_pco = pco; } 1810 void set_live_in (const ResourceBitMap& map) { _live_in = map; } 1811 void set_live_out (const ResourceBitMap& map) { _live_out = map; } 1812 void set_live_gen (const ResourceBitMap& map) { _live_gen = map; } 1813 void set_live_kill(const ResourceBitMap& map) { _live_kill = map; } 1814 void set_fpu_register_usage(const ResourceBitMap& map) { _fpu_register_usage = map; } 1815 void set_fpu_stack_state(intArray* state) { _fpu_stack_state = state; } 1816 void set_first_lir_instruction_id(int id) { _first_lir_instruction_id = id; } 1817 void set_last_lir_instruction_id(int id) { _last_lir_instruction_id = id; } 1818 void increment_total_preds(int n = 1) { _total_preds += n; } 1819 void init_stores_to_locals(int locals_count) { _stores_to_locals.initialize(locals_count); } 1820 1821 // generic 1822 virtual void state_values_do(ValueVisitor* f); 1823 1824 // successors and predecessors 1825 int number_of_sux() const; 1826 BlockBegin* sux_at(int i) const; 1827 void add_successor(BlockBegin* sux); 1828 void remove_successor(BlockBegin* pred); 1829 bool is_successor(BlockBegin* sux) const { return _successors.contains(sux); } 1830 1831 void add_predecessor(BlockBegin* pred); 1832 void remove_predecessor(BlockBegin* pred); 1833 bool is_predecessor(BlockBegin* pred) const { return _predecessors.contains(pred); } 1834 int number_of_preds() const { return _predecessors.length(); } 1835 BlockBegin* pred_at(int i) const { return _predecessors.at(i); } 1836 1837 // exception handlers potentially invoked by this block 1838 void add_exception_handler(BlockBegin* b); 1839 bool is_exception_handler(BlockBegin* b) const { return _exception_handlers.contains(b); } 1840 int number_of_exception_handlers() const { return _exception_handlers.length(); } 1841 BlockBegin* exception_handler_at(int i) const { return _exception_handlers.at(i); } 1842 1843 // states of the instructions that have an edge to this exception handler 1844 int number_of_exception_states() { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states == NULL ? 0 : _exception_states->length(); } 1845 ValueStack* exception_state_at(int idx) const { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states->at(idx); } 1846 int add_exception_state(ValueStack* state); 1847 1848 // flags 1849 enum Flag { 1850 no_flag = 0, 1851 std_entry_flag = 1 << 0, 1852 osr_entry_flag = 1 << 1, 1853 exception_entry_flag = 1 << 2, 1854 subroutine_entry_flag = 1 << 3, 1855 backward_branch_target_flag = 1 << 4, 1856 is_on_work_list_flag = 1 << 5, 1857 was_visited_flag = 1 << 6, 1858 parser_loop_header_flag = 1 << 7, // set by parser to identify blocks where phi functions can not be created on demand 1859 critical_edge_split_flag = 1 << 8, // set for all blocks that are introduced when critical edges are split 1860 linear_scan_loop_header_flag = 1 << 9, // set during loop-detection for LinearScan 1861 linear_scan_loop_end_flag = 1 << 10, // set during loop-detection for LinearScan 1862 donot_eliminate_range_checks = 1 << 11 // Should be try to eliminate range checks in this block 1863 }; 1864 1865 void set(Flag f) { _flags |= f; } 1866 void clear(Flag f) { _flags &= ~f; } 1867 bool is_set(Flag f) const { return (_flags & f) != 0; } 1868 bool is_entry_block() const { 1869 const int entry_mask = std_entry_flag | osr_entry_flag | exception_entry_flag; 1870 return (_flags & entry_mask) != 0; 1871 } 1872 1873 // iteration 1874 void iterate_preorder (BlockClosure* closure); 1875 void iterate_postorder (BlockClosure* closure); 1876 1877 void block_values_do(ValueVisitor* f); 1878 1879 // loops 1880 void set_loop_index(int ix) { _loop_index = ix; } 1881 int loop_index() const { return _loop_index; } 1882 1883 // merging 1884 bool try_merge(ValueStack* state); // try to merge states at block begin 1885 void merge(ValueStack* state) { bool b = try_merge(state); assert(b, "merge failed"); } 1886 1887 // debugging 1888 void print_block() PRODUCT_RETURN; 1889 void print_block(InstructionPrinter& ip, bool live_only = false) PRODUCT_RETURN; 1890 }; 1891 1892 1893 BASE(BlockEnd, StateSplit) 1894 private: 1895 BlockList* _sux; 1896 1897 protected: 1898 BlockList* sux() const { return _sux; } 1899 1900 void set_sux(BlockList* sux) { 1901 #ifdef ASSERT 1902 assert(sux != NULL, "sux must exist"); 1903 for (int i = sux->length() - 1; i >= 0; i--) assert(sux->at(i) != NULL, "sux must exist"); 1904 #endif 1905 _sux = sux; 1906 } 1907 1908 public: 1909 // creation 1910 BlockEnd(ValueType* type, ValueStack* state_before, bool is_safepoint) 1911 : StateSplit(type, state_before) 1912 , _sux(NULL) 1913 { 1914 set_flag(IsSafepointFlag, is_safepoint); 1915 } 1916 1917 // accessors 1918 bool is_safepoint() const { return check_flag(IsSafepointFlag); } 1919 // For compatibility with old code, for new code use block() 1920 BlockBegin* begin() const { return _block; } 1921 1922 // manipulation 1923 void set_begin(BlockBegin* begin); 1924 1925 // successors 1926 int number_of_sux() const { return _sux != NULL ? _sux->length() : 0; } 1927 BlockBegin* sux_at(int i) const { return _sux->at(i); } 1928 BlockBegin* default_sux() const { return sux_at(number_of_sux() - 1); } 1929 BlockBegin** addr_sux_at(int i) const { return _sux->adr_at(i); } 1930 int sux_index(BlockBegin* sux) const { return _sux->find(sux); } 1931 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux); 1932 }; 1933 1934 1935 LEAF(Goto, BlockEnd) 1936 public: 1937 enum Direction { 1938 none, // Just a regular goto 1939 taken, not_taken // Goto produced from If 1940 }; 1941 private: 1942 ciMethod* _profiled_method; 1943 int _profiled_bci; 1944 Direction _direction; 1945 public: 1946 // creation 1947 Goto(BlockBegin* sux, ValueStack* state_before, bool is_safepoint = false) 1948 : BlockEnd(illegalType, state_before, is_safepoint) 1949 , _profiled_method(NULL) 1950 , _profiled_bci(0) 1951 , _direction(none) { 1952 BlockList* s = new BlockList(1); 1953 s->append(sux); 1954 set_sux(s); 1955 } 1956 1957 Goto(BlockBegin* sux, bool is_safepoint) : BlockEnd(illegalType, NULL, is_safepoint) 1958 , _profiled_method(NULL) 1959 , _profiled_bci(0) 1960 , _direction(none) { 1961 BlockList* s = new BlockList(1); 1962 s->append(sux); 1963 set_sux(s); 1964 } 1965 1966 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1967 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches 1968 int profiled_bci() const { return _profiled_bci; } 1969 Direction direction() const { return _direction; } 1970 1971 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1972 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1973 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1974 void set_direction(Direction d) { _direction = d; } 1975 }; 1976 1977 #ifdef ASSERT 1978 LEAF(Assert, Instruction) 1979 private: 1980 Value _x; 1981 Condition _cond; 1982 Value _y; 1983 char *_message; 1984 1985 public: 1986 // creation 1987 // unordered_is_true is valid for float/double compares only 1988 Assert(Value x, Condition cond, bool unordered_is_true, Value y); 1989 1990 // accessors 1991 Value x() const { return _x; } 1992 Condition cond() const { return _cond; } 1993 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 1994 Value y() const { return _y; } 1995 const char *message() const { return _message; } 1996 1997 // generic 1998 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); } 1999 }; 2000 #endif 2001 2002 LEAF(RangeCheckPredicate, StateSplit) 2003 private: 2004 Value _x; 2005 Condition _cond; 2006 Value _y; 2007 2008 void check_state(); 2009 2010 public: 2011 // creation 2012 // unordered_is_true is valid for float/double compares only 2013 RangeCheckPredicate(Value x, Condition cond, bool unordered_is_true, Value y, ValueStack* state) : StateSplit(illegalType) 2014 , _x(x) 2015 , _cond(cond) 2016 , _y(y) 2017 { 2018 ASSERT_VALUES 2019 set_flag(UnorderedIsTrueFlag, unordered_is_true); 2020 assert(x->type()->tag() == y->type()->tag(), "types must match"); 2021 this->set_state(state); 2022 check_state(); 2023 } 2024 2025 // Always deoptimize 2026 RangeCheckPredicate(ValueStack* state) : StateSplit(illegalType) 2027 { 2028 this->set_state(state); 2029 _x = _y = NULL; 2030 check_state(); 2031 } 2032 2033 // accessors 2034 Value x() const { return _x; } 2035 Condition cond() const { return _cond; } 2036 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 2037 Value y() const { return _y; } 2038 2039 void always_fail() { _x = _y = NULL; } 2040 2041 // generic 2042 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_x); f->visit(&_y); } 2043 HASHING3(RangeCheckPredicate, true, x()->subst(), y()->subst(), cond()) 2044 }; 2045 2046 LEAF(If, BlockEnd) 2047 private: 2048 Value _x; 2049 Condition _cond; 2050 Value _y; 2051 ciMethod* _profiled_method; 2052 int _profiled_bci; // Canonicalizer may alter bci of If node 2053 bool _swapped; // Is the order reversed with respect to the original If in the 2054 // bytecode stream? 2055 public: 2056 // creation 2057 // unordered_is_true is valid for float/double compares only 2058 If(Value x, Condition cond, bool unordered_is_true, Value y, BlockBegin* tsux, BlockBegin* fsux, ValueStack* state_before, bool is_safepoint) 2059 : BlockEnd(illegalType, state_before, is_safepoint) 2060 , _x(x) 2061 , _cond(cond) 2062 , _y(y) 2063 , _profiled_method(NULL) 2064 , _profiled_bci(0) 2065 , _swapped(false) 2066 { 2067 ASSERT_VALUES 2068 set_flag(UnorderedIsTrueFlag, unordered_is_true); 2069 assert(x->type()->tag() == y->type()->tag(), "types must match"); 2070 BlockList* s = new BlockList(2); 2071 s->append(tsux); 2072 s->append(fsux); 2073 set_sux(s); 2074 } 2075 2076 // accessors 2077 Value x() const { return _x; } 2078 Condition cond() const { return _cond; } 2079 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 2080 Value y() const { return _y; } 2081 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); } 2082 BlockBegin* tsux() const { return sux_for(true); } 2083 BlockBegin* fsux() const { return sux_for(false); } 2084 BlockBegin* usux() const { return sux_for(unordered_is_true()); } 2085 bool should_profile() const { return check_flag(ProfileMDOFlag); } 2086 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches 2087 int profiled_bci() const { return _profiled_bci; } // set for profiled branches and tiered 2088 bool is_swapped() const { return _swapped; } 2089 2090 // manipulation 2091 void swap_operands() { 2092 Value t = _x; _x = _y; _y = t; 2093 _cond = mirror(_cond); 2094 } 2095 2096 void swap_sux() { 2097 assert(number_of_sux() == 2, "wrong number of successors"); 2098 BlockList* s = sux(); 2099 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t); 2100 _cond = negate(_cond); 2101 set_flag(UnorderedIsTrueFlag, !check_flag(UnorderedIsTrueFlag)); 2102 } 2103 2104 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 2105 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 2106 void set_profiled_bci(int bci) { _profiled_bci = bci; } 2107 void set_swapped(bool value) { _swapped = value; } 2108 // generic 2109 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_x); f->visit(&_y); } 2110 }; 2111 2112 2113 LEAF(IfInstanceOf, BlockEnd) 2114 private: 2115 ciKlass* _klass; 2116 Value _obj; 2117 bool _test_is_instance; // jump if instance 2118 int _instanceof_bci; 2119 2120 public: 2121 IfInstanceOf(ciKlass* klass, Value obj, bool test_is_instance, int instanceof_bci, BlockBegin* tsux, BlockBegin* fsux) 2122 : BlockEnd(illegalType, NULL, false) // temporary set to false 2123 , _klass(klass) 2124 , _obj(obj) 2125 , _test_is_instance(test_is_instance) 2126 , _instanceof_bci(instanceof_bci) 2127 { 2128 ASSERT_VALUES 2129 assert(instanceof_bci >= 0, "illegal bci"); 2130 BlockList* s = new BlockList(2); 2131 s->append(tsux); 2132 s->append(fsux); 2133 set_sux(s); 2134 } 2135 2136 // accessors 2137 // 2138 // Note 1: If test_is_instance() is true, IfInstanceOf tests if obj *is* an 2139 // instance of klass; otherwise it tests if it is *not* and instance 2140 // of klass. 2141 // 2142 // Note 2: IfInstanceOf instructions are created by combining an InstanceOf 2143 // and an If instruction. The IfInstanceOf bci() corresponds to the 2144 // bci that the If would have had; the (this->) instanceof_bci() is 2145 // the bci of the original InstanceOf instruction. 2146 ciKlass* klass() const { return _klass; } 2147 Value obj() const { return _obj; } 2148 int instanceof_bci() const { return _instanceof_bci; } 2149 bool test_is_instance() const { return _test_is_instance; } 2150 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); } 2151 BlockBegin* tsux() const { return sux_for(true); } 2152 BlockBegin* fsux() const { return sux_for(false); } 2153 2154 // manipulation 2155 void swap_sux() { 2156 assert(number_of_sux() == 2, "wrong number of successors"); 2157 BlockList* s = sux(); 2158 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t); 2159 _test_is_instance = !_test_is_instance; 2160 } 2161 2162 // generic 2163 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_obj); } 2164 }; 2165 2166 2167 BASE(Switch, BlockEnd) 2168 private: 2169 Value _tag; 2170 2171 public: 2172 // creation 2173 Switch(Value tag, BlockList* sux, ValueStack* state_before, bool is_safepoint) 2174 : BlockEnd(illegalType, state_before, is_safepoint) 2175 , _tag(tag) { 2176 ASSERT_VALUES 2177 set_sux(sux); 2178 } 2179 2180 // accessors 2181 Value tag() const { return _tag; } 2182 int length() const { return number_of_sux() - 1; } 2183 2184 virtual bool needs_exception_state() const { return false; } 2185 2186 // generic 2187 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_tag); } 2188 }; 2189 2190 2191 LEAF(TableSwitch, Switch) 2192 private: 2193 int _lo_key; 2194 2195 public: 2196 // creation 2197 TableSwitch(Value tag, BlockList* sux, int lo_key, ValueStack* state_before, bool is_safepoint) 2198 : Switch(tag, sux, state_before, is_safepoint) 2199 , _lo_key(lo_key) { assert(_lo_key <= hi_key(), "integer overflow"); } 2200 2201 // accessors 2202 int lo_key() const { return _lo_key; } 2203 int hi_key() const { return _lo_key + (length() - 1); } 2204 }; 2205 2206 2207 LEAF(LookupSwitch, Switch) 2208 private: 2209 intArray* _keys; 2210 2211 public: 2212 // creation 2213 LookupSwitch(Value tag, BlockList* sux, intArray* keys, ValueStack* state_before, bool is_safepoint) 2214 : Switch(tag, sux, state_before, is_safepoint) 2215 , _keys(keys) { 2216 assert(keys != NULL, "keys must exist"); 2217 assert(keys->length() == length(), "sux & keys have incompatible lengths"); 2218 } 2219 2220 // accessors 2221 int key_at(int i) const { return _keys->at(i); } 2222 }; 2223 2224 2225 LEAF(Return, BlockEnd) 2226 private: 2227 Value _result; 2228 2229 public: 2230 // creation 2231 Return(Value result) : 2232 BlockEnd(result == NULL ? voidType : result->type()->base(), NULL, true), 2233 _result(result) {} 2234 2235 // accessors 2236 Value result() const { return _result; } 2237 bool has_result() const { return result() != NULL; } 2238 2239 // generic 2240 virtual void input_values_do(ValueVisitor* f) { 2241 BlockEnd::input_values_do(f); 2242 if (has_result()) f->visit(&_result); 2243 } 2244 }; 2245 2246 2247 LEAF(Throw, BlockEnd) 2248 private: 2249 Value _exception; 2250 2251 public: 2252 // creation 2253 Throw(Value exception, ValueStack* state_before) : BlockEnd(illegalType, state_before, true), _exception(exception) { 2254 ASSERT_VALUES 2255 } 2256 2257 // accessors 2258 Value exception() const { return _exception; } 2259 2260 // generic 2261 virtual bool can_trap() const { return true; } 2262 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_exception); } 2263 }; 2264 2265 2266 LEAF(Base, BlockEnd) 2267 public: 2268 // creation 2269 Base(BlockBegin* std_entry, BlockBegin* osr_entry) : BlockEnd(illegalType, NULL, false) { 2270 assert(std_entry->is_set(BlockBegin::std_entry_flag), "std entry must be flagged"); 2271 assert(osr_entry == NULL || osr_entry->is_set(BlockBegin::osr_entry_flag), "osr entry must be flagged"); 2272 BlockList* s = new BlockList(2); 2273 if (osr_entry != NULL) s->append(osr_entry); 2274 s->append(std_entry); // must be default sux! 2275 set_sux(s); 2276 } 2277 2278 // accessors 2279 BlockBegin* std_entry() const { return default_sux(); } 2280 BlockBegin* osr_entry() const { return number_of_sux() < 2 ? NULL : sux_at(0); } 2281 }; 2282 2283 2284 LEAF(OsrEntry, Instruction) 2285 public: 2286 // creation 2287 #ifdef _LP64 2288 OsrEntry() : Instruction(longType) { pin(); } 2289 #else 2290 OsrEntry() : Instruction(intType) { pin(); } 2291 #endif 2292 2293 // generic 2294 virtual void input_values_do(ValueVisitor* f) { } 2295 }; 2296 2297 2298 // Models the incoming exception at a catch site 2299 LEAF(ExceptionObject, Instruction) 2300 public: 2301 // creation 2302 ExceptionObject() : Instruction(objectType) { 2303 pin(); 2304 } 2305 2306 // generic 2307 virtual void input_values_do(ValueVisitor* f) { } 2308 }; 2309 2310 2311 // Models needed rounding for floating-point values on Intel. 2312 // Currently only used to represent rounding of double-precision 2313 // values stored into local variables, but could be used to model 2314 // intermediate rounding of single-precision values as well. 2315 LEAF(RoundFP, Instruction) 2316 private: 2317 Value _input; // floating-point value to be rounded 2318 2319 public: 2320 RoundFP(Value input) 2321 : Instruction(input->type()) // Note: should not be used for constants 2322 , _input(input) 2323 { 2324 ASSERT_VALUES 2325 } 2326 2327 // accessors 2328 Value input() const { return _input; } 2329 2330 // generic 2331 virtual void input_values_do(ValueVisitor* f) { f->visit(&_input); } 2332 }; 2333 2334 2335 BASE(UnsafeOp, Instruction) 2336 private: 2337 BasicType _basic_type; // ValueType can not express byte-sized integers 2338 2339 protected: 2340 // creation 2341 UnsafeOp(BasicType basic_type, bool is_put) 2342 : Instruction(is_put ? voidType : as_ValueType(basic_type)) 2343 , _basic_type(basic_type) 2344 { 2345 //Note: Unsafe ops are not not guaranteed to throw NPE. 2346 // Convservatively, Unsafe operations must be pinned though we could be 2347 // looser about this if we wanted to.. 2348 pin(); 2349 } 2350 2351 public: 2352 // accessors 2353 BasicType basic_type() { return _basic_type; } 2354 2355 // generic 2356 virtual void input_values_do(ValueVisitor* f) { } 2357 }; 2358 2359 2360 BASE(UnsafeRawOp, UnsafeOp) 2361 private: 2362 Value _base; // Base address (a Java long) 2363 Value _index; // Index if computed by optimizer; initialized to NULL 2364 int _log2_scale; // Scale factor: 0, 1, 2, or 3. 2365 // Indicates log2 of number of bytes (1, 2, 4, or 8) 2366 // to scale index by. 2367 2368 protected: 2369 UnsafeRawOp(BasicType basic_type, Value addr, bool is_put) 2370 : UnsafeOp(basic_type, is_put) 2371 , _base(addr) 2372 , _index(NULL) 2373 , _log2_scale(0) 2374 { 2375 // Can not use ASSERT_VALUES because index may be NULL 2376 assert(addr != NULL && addr->type()->is_long(), "just checking"); 2377 } 2378 2379 UnsafeRawOp(BasicType basic_type, Value base, Value index, int log2_scale, bool is_put) 2380 : UnsafeOp(basic_type, is_put) 2381 , _base(base) 2382 , _index(index) 2383 , _log2_scale(log2_scale) 2384 { 2385 } 2386 2387 public: 2388 // accessors 2389 Value base() { return _base; } 2390 Value index() { return _index; } 2391 bool has_index() { return (_index != NULL); } 2392 int log2_scale() { return _log2_scale; } 2393 2394 // setters 2395 void set_base (Value base) { _base = base; } 2396 void set_index(Value index) { _index = index; } 2397 void set_log2_scale(int log2_scale) { _log2_scale = log2_scale; } 2398 2399 // generic 2400 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f); 2401 f->visit(&_base); 2402 if (has_index()) f->visit(&_index); } 2403 }; 2404 2405 2406 LEAF(UnsafeGetRaw, UnsafeRawOp) 2407 private: 2408 bool _may_be_unaligned, _is_wide; // For OSREntry 2409 2410 public: 2411 UnsafeGetRaw(BasicType basic_type, Value addr, bool may_be_unaligned, bool is_wide = false) 2412 : UnsafeRawOp(basic_type, addr, false) { 2413 _may_be_unaligned = may_be_unaligned; 2414 _is_wide = is_wide; 2415 } 2416 2417 UnsafeGetRaw(BasicType basic_type, Value base, Value index, int log2_scale, bool may_be_unaligned, bool is_wide = false) 2418 : UnsafeRawOp(basic_type, base, index, log2_scale, false) { 2419 _may_be_unaligned = may_be_unaligned; 2420 _is_wide = is_wide; 2421 } 2422 2423 bool may_be_unaligned() { return _may_be_unaligned; } 2424 bool is_wide() { return _is_wide; } 2425 }; 2426 2427 2428 LEAF(UnsafePutRaw, UnsafeRawOp) 2429 private: 2430 Value _value; // Value to be stored 2431 2432 public: 2433 UnsafePutRaw(BasicType basic_type, Value addr, Value value) 2434 : UnsafeRawOp(basic_type, addr, true) 2435 , _value(value) 2436 { 2437 assert(value != NULL, "just checking"); 2438 ASSERT_VALUES 2439 } 2440 2441 UnsafePutRaw(BasicType basic_type, Value base, Value index, int log2_scale, Value value) 2442 : UnsafeRawOp(basic_type, base, index, log2_scale, true) 2443 , _value(value) 2444 { 2445 assert(value != NULL, "just checking"); 2446 ASSERT_VALUES 2447 } 2448 2449 // accessors 2450 Value value() { return _value; } 2451 2452 // generic 2453 virtual void input_values_do(ValueVisitor* f) { UnsafeRawOp::input_values_do(f); 2454 f->visit(&_value); } 2455 }; 2456 2457 2458 BASE(UnsafeObjectOp, UnsafeOp) 2459 private: 2460 Value _object; // Object to be fetched from or mutated 2461 Value _offset; // Offset within object 2462 bool _is_volatile; // true if volatile - dl/JSR166 2463 public: 2464 UnsafeObjectOp(BasicType basic_type, Value object, Value offset, bool is_put, bool is_volatile) 2465 : UnsafeOp(basic_type, is_put), _object(object), _offset(offset), _is_volatile(is_volatile) 2466 { 2467 } 2468 2469 // accessors 2470 Value object() { return _object; } 2471 Value offset() { return _offset; } 2472 bool is_volatile() { return _is_volatile; } 2473 // generic 2474 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f); 2475 f->visit(&_object); 2476 f->visit(&_offset); } 2477 }; 2478 2479 2480 LEAF(UnsafeGetObject, UnsafeObjectOp) 2481 public: 2482 UnsafeGetObject(BasicType basic_type, Value object, Value offset, bool is_volatile) 2483 : UnsafeObjectOp(basic_type, object, offset, false, is_volatile) 2484 { 2485 ASSERT_VALUES 2486 } 2487 }; 2488 2489 2490 LEAF(UnsafePutObject, UnsafeObjectOp) 2491 private: 2492 Value _value; // Value to be stored 2493 public: 2494 UnsafePutObject(BasicType basic_type, Value object, Value offset, Value value, bool is_volatile) 2495 : UnsafeObjectOp(basic_type, object, offset, true, is_volatile) 2496 , _value(value) 2497 { 2498 ASSERT_VALUES 2499 } 2500 2501 // accessors 2502 Value value() { return _value; } 2503 2504 // generic 2505 virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f); 2506 f->visit(&_value); } 2507 }; 2508 2509 LEAF(UnsafeGetAndSetObject, UnsafeObjectOp) 2510 private: 2511 Value _value; // Value to be stored 2512 bool _is_add; 2513 public: 2514 UnsafeGetAndSetObject(BasicType basic_type, Value object, Value offset, Value value, bool is_add) 2515 : UnsafeObjectOp(basic_type, object, offset, false, false) 2516 , _value(value) 2517 , _is_add(is_add) 2518 { 2519 ASSERT_VALUES 2520 } 2521 2522 // accessors 2523 bool is_add() const { return _is_add; } 2524 Value value() { return _value; } 2525 2526 // generic 2527 virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f); 2528 f->visit(&_value); } 2529 }; 2530 2531 LEAF(ProfileCall, Instruction) 2532 private: 2533 ciMethod* _method; 2534 int _bci_of_invoke; 2535 ciMethod* _callee; // the method that is called at the given bci 2536 Value _recv; 2537 ciKlass* _known_holder; 2538 Values* _obj_args; // arguments for type profiling 2539 ArgsNonNullState _nonnull_state; // Do we know whether some arguments are never null? 2540 bool _inlined; // Are we profiling a call that is inlined 2541 2542 public: 2543 ProfileCall(ciMethod* method, int bci, ciMethod* callee, Value recv, ciKlass* known_holder, Values* obj_args, bool inlined) 2544 : Instruction(voidType) 2545 , _method(method) 2546 , _bci_of_invoke(bci) 2547 , _callee(callee) 2548 , _recv(recv) 2549 , _known_holder(known_holder) 2550 , _obj_args(obj_args) 2551 , _inlined(inlined) 2552 { 2553 // The ProfileCall has side-effects and must occur precisely where located 2554 pin(); 2555 } 2556 2557 ciMethod* method() const { return _method; } 2558 int bci_of_invoke() const { return _bci_of_invoke; } 2559 ciMethod* callee() const { return _callee; } 2560 Value recv() const { return _recv; } 2561 ciKlass* known_holder() const { return _known_holder; } 2562 int nb_profiled_args() const { return _obj_args == NULL ? 0 : _obj_args->length(); } 2563 Value profiled_arg_at(int i) const { return _obj_args->at(i); } 2564 bool arg_needs_null_check(int i) const { 2565 return _nonnull_state.arg_needs_null_check(i); 2566 } 2567 bool inlined() const { return _inlined; } 2568 2569 void set_arg_needs_null_check(int i, bool check) { 2570 _nonnull_state.set_arg_needs_null_check(i, check); 2571 } 2572 2573 virtual void input_values_do(ValueVisitor* f) { 2574 if (_recv != NULL) { 2575 f->visit(&_recv); 2576 } 2577 for (int i = 0; i < nb_profiled_args(); i++) { 2578 f->visit(_obj_args->adr_at(i)); 2579 } 2580 } 2581 }; 2582 2583 LEAF(ProfileReturnType, Instruction) 2584 private: 2585 ciMethod* _method; 2586 ciMethod* _callee; 2587 int _bci_of_invoke; 2588 Value _ret; 2589 2590 public: 2591 ProfileReturnType(ciMethod* method, int bci, ciMethod* callee, Value ret) 2592 : Instruction(voidType) 2593 , _method(method) 2594 , _callee(callee) 2595 , _bci_of_invoke(bci) 2596 , _ret(ret) 2597 { 2598 set_needs_null_check(true); 2599 // The ProfileType has side-effects and must occur precisely where located 2600 pin(); 2601 } 2602 2603 ciMethod* method() const { return _method; } 2604 ciMethod* callee() const { return _callee; } 2605 int bci_of_invoke() const { return _bci_of_invoke; } 2606 Value ret() const { return _ret; } 2607 2608 virtual void input_values_do(ValueVisitor* f) { 2609 if (_ret != NULL) { 2610 f->visit(&_ret); 2611 } 2612 } 2613 }; 2614 2615 // Call some C runtime function that doesn't safepoint, 2616 // optionally passing the current thread as the first argument. 2617 LEAF(RuntimeCall, Instruction) 2618 private: 2619 const char* _entry_name; 2620 address _entry; 2621 Values* _args; 2622 bool _pass_thread; // Pass the JavaThread* as an implicit first argument 2623 2624 public: 2625 RuntimeCall(ValueType* type, const char* entry_name, address entry, Values* args, bool pass_thread = true) 2626 : Instruction(type) 2627 , _entry_name(entry_name) 2628 , _entry(entry) 2629 , _args(args) 2630 , _pass_thread(pass_thread) { 2631 ASSERT_VALUES 2632 pin(); 2633 } 2634 2635 const char* entry_name() const { return _entry_name; } 2636 address entry() const { return _entry; } 2637 int number_of_arguments() const { return _args->length(); } 2638 Value argument_at(int i) const { return _args->at(i); } 2639 bool pass_thread() const { return _pass_thread; } 2640 2641 virtual void input_values_do(ValueVisitor* f) { 2642 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 2643 } 2644 }; 2645 2646 // Use to trip invocation counter of an inlined method 2647 2648 LEAF(ProfileInvoke, Instruction) 2649 private: 2650 ciMethod* _inlinee; 2651 ValueStack* _state; 2652 2653 public: 2654 ProfileInvoke(ciMethod* inlinee, ValueStack* state) 2655 : Instruction(voidType) 2656 , _inlinee(inlinee) 2657 , _state(state) 2658 { 2659 // The ProfileInvoke has side-effects and must occur precisely where located QQQ??? 2660 pin(); 2661 } 2662 2663 ciMethod* inlinee() { return _inlinee; } 2664 ValueStack* state() { return _state; } 2665 virtual void input_values_do(ValueVisitor*) {} 2666 virtual void state_values_do(ValueVisitor*); 2667 }; 2668 2669 LEAF(MemBar, Instruction) 2670 private: 2671 LIR_Code _code; 2672 2673 public: 2674 MemBar(LIR_Code code) 2675 : Instruction(voidType) 2676 , _code(code) 2677 { 2678 pin(); 2679 } 2680 2681 LIR_Code code() { return _code; } 2682 2683 virtual void input_values_do(ValueVisitor*) {} 2684 }; 2685 2686 class BlockPair: public CompilationResourceObj { 2687 private: 2688 BlockBegin* _from; 2689 BlockBegin* _to; 2690 public: 2691 BlockPair(BlockBegin* from, BlockBegin* to): _from(from), _to(to) {} 2692 BlockBegin* from() const { return _from; } 2693 BlockBegin* to() const { return _to; } 2694 bool is_same(BlockBegin* from, BlockBegin* to) const { return _from == from && _to == to; } 2695 bool is_same(BlockPair* p) const { return _from == p->from() && _to == p->to(); } 2696 void set_to(BlockBegin* b) { _to = b; } 2697 void set_from(BlockBegin* b) { _from = b; } 2698 }; 2699 2700 typedef GrowableArray<BlockPair*> BlockPairList; 2701 2702 inline int BlockBegin::number_of_sux() const { assert(_end == NULL || _end->number_of_sux() == _successors.length(), "mismatch"); return _successors.length(); } 2703 inline BlockBegin* BlockBegin::sux_at(int i) const { assert(_end == NULL || _end->sux_at(i) == _successors.at(i), "mismatch"); return _successors.at(i); } 2704 inline void BlockBegin::add_successor(BlockBegin* sux) { assert(_end == NULL, "Would create mismatch with successors of BlockEnd"); _successors.append(sux); } 2705 2706 #undef ASSERT_VALUES 2707 2708 #endif // SHARE_C1_C1_INSTRUCTION_HPP