1 /* 2 * Copyright (c) 1999, 2019, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_C1_C1_INSTRUCTION_HPP 26 #define SHARE_C1_C1_INSTRUCTION_HPP 27 28 #include "c1/c1_Compilation.hpp" 29 #include "c1/c1_LIR.hpp" 30 #include "c1/c1_ValueType.hpp" 31 #include "ci/ciField.hpp" 32 33 // Predefined classes 34 class ciField; 35 class ValueStack; 36 class InstructionPrinter; 37 class IRScope; 38 class LIR_OprDesc; 39 typedef LIR_OprDesc* LIR_Opr; 40 41 42 // Instruction class hierarchy 43 // 44 // All leaf classes in the class hierarchy are concrete classes 45 // (i.e., are instantiated). All other classes are abstract and 46 // serve factoring. 47 48 class Instruction; 49 class Phi; 50 class Local; 51 class Constant; 52 class AccessField; 53 class LoadField; 54 class StoreField; 55 class AccessArray; 56 class ArrayLength; 57 class AccessIndexed; 58 class LoadIndexed; 59 class StoreIndexed; 60 class NegateOp; 61 class Op2; 62 class ArithmeticOp; 63 class ShiftOp; 64 class LogicOp; 65 class CompareOp; 66 class IfOp; 67 class Convert; 68 class NullCheck; 69 class TypeCast; 70 class OsrEntry; 71 class ExceptionObject; 72 class StateSplit; 73 class Invoke; 74 class NewInstance; 75 class NewValueTypeInstance; 76 class NewArray; 77 class NewTypeArray; 78 class NewObjectArray; 79 class NewMultiArray; 80 class TypeCheck; 81 class CheckCast; 82 class InstanceOf; 83 class AccessMonitor; 84 class MonitorEnter; 85 class MonitorExit; 86 class Intrinsic; 87 class BlockBegin; 88 class BlockEnd; 89 class Goto; 90 class If; 91 class IfInstanceOf; 92 class Switch; 93 class TableSwitch; 94 class LookupSwitch; 95 class Return; 96 class Throw; 97 class Base; 98 class RoundFP; 99 class UnsafeOp; 100 class UnsafeRawOp; 101 class UnsafeGetRaw; 102 class UnsafePutRaw; 103 class UnsafeObjectOp; 104 class UnsafeGetObject; 105 class UnsafePutObject; 106 class UnsafeGetAndSetObject; 107 class ProfileCall; 108 class ProfileReturnType; 109 class ProfileInvoke; 110 class RuntimeCall; 111 class MemBar; 112 class RangeCheckPredicate; 113 #ifdef ASSERT 114 class Assert; 115 #endif 116 117 // A Value is a reference to the instruction creating the value 118 typedef Instruction* Value; 119 typedef GrowableArray<Value> Values; 120 typedef GrowableArray<ValueStack*> ValueStackStack; 121 122 // BlockClosure is the base class for block traversal/iteration. 123 124 class BlockClosure: public CompilationResourceObj { 125 public: 126 virtual void block_do(BlockBegin* block) = 0; 127 }; 128 129 130 // A simple closure class for visiting the values of an Instruction 131 class ValueVisitor: public StackObj { 132 public: 133 virtual void visit(Value* v) = 0; 134 }; 135 136 137 // Some array and list classes 138 typedef GrowableArray<BlockBegin*> BlockBeginArray; 139 140 class BlockList: public GrowableArray<BlockBegin*> { 141 public: 142 BlockList(): GrowableArray<BlockBegin*>() {} 143 BlockList(const int size): GrowableArray<BlockBegin*>(size) {} 144 BlockList(const int size, BlockBegin* init): GrowableArray<BlockBegin*>(size, size, init) {} 145 146 void iterate_forward(BlockClosure* closure); 147 void iterate_backward(BlockClosure* closure); 148 void blocks_do(void f(BlockBegin*)); 149 void values_do(ValueVisitor* f); 150 void print(bool cfg_only = false, bool live_only = false) PRODUCT_RETURN; 151 }; 152 153 154 // InstructionVisitors provide type-based dispatch for instructions. 155 // For each concrete Instruction class X, a virtual function do_X is 156 // provided. Functionality that needs to be implemented for all classes 157 // (e.g., printing, code generation) is factored out into a specialised 158 // visitor instead of added to the Instruction classes itself. 159 160 class InstructionVisitor: public StackObj { 161 public: 162 virtual void do_Phi (Phi* x) = 0; 163 virtual void do_Local (Local* x) = 0; 164 virtual void do_Constant (Constant* x) = 0; 165 virtual void do_LoadField (LoadField* x) = 0; 166 virtual void do_StoreField (StoreField* x) = 0; 167 virtual void do_ArrayLength (ArrayLength* x) = 0; 168 virtual void do_LoadIndexed (LoadIndexed* x) = 0; 169 virtual void do_StoreIndexed (StoreIndexed* x) = 0; 170 virtual void do_NegateOp (NegateOp* x) = 0; 171 virtual void do_ArithmeticOp (ArithmeticOp* x) = 0; 172 virtual void do_ShiftOp (ShiftOp* x) = 0; 173 virtual void do_LogicOp (LogicOp* x) = 0; 174 virtual void do_CompareOp (CompareOp* x) = 0; 175 virtual void do_IfOp (IfOp* x) = 0; 176 virtual void do_Convert (Convert* x) = 0; 177 virtual void do_NullCheck (NullCheck* x) = 0; 178 virtual void do_TypeCast (TypeCast* x) = 0; 179 virtual void do_Invoke (Invoke* x) = 0; 180 virtual void do_NewInstance (NewInstance* x) = 0; 181 virtual void do_NewValueTypeInstance(NewValueTypeInstance* x) = 0; 182 virtual void do_NewTypeArray (NewTypeArray* x) = 0; 183 virtual void do_NewObjectArray (NewObjectArray* x) = 0; 184 virtual void do_NewMultiArray (NewMultiArray* x) = 0; 185 virtual void do_CheckCast (CheckCast* x) = 0; 186 virtual void do_InstanceOf (InstanceOf* x) = 0; 187 virtual void do_MonitorEnter (MonitorEnter* x) = 0; 188 virtual void do_MonitorExit (MonitorExit* x) = 0; 189 virtual void do_Intrinsic (Intrinsic* x) = 0; 190 virtual void do_BlockBegin (BlockBegin* x) = 0; 191 virtual void do_Goto (Goto* x) = 0; 192 virtual void do_If (If* x) = 0; 193 virtual void do_IfInstanceOf (IfInstanceOf* x) = 0; 194 virtual void do_TableSwitch (TableSwitch* x) = 0; 195 virtual void do_LookupSwitch (LookupSwitch* x) = 0; 196 virtual void do_Return (Return* x) = 0; 197 virtual void do_Throw (Throw* x) = 0; 198 virtual void do_Base (Base* x) = 0; 199 virtual void do_OsrEntry (OsrEntry* x) = 0; 200 virtual void do_ExceptionObject(ExceptionObject* x) = 0; 201 virtual void do_RoundFP (RoundFP* x) = 0; 202 virtual void do_UnsafeGetRaw (UnsafeGetRaw* x) = 0; 203 virtual void do_UnsafePutRaw (UnsafePutRaw* x) = 0; 204 virtual void do_UnsafeGetObject(UnsafeGetObject* x) = 0; 205 virtual void do_UnsafePutObject(UnsafePutObject* x) = 0; 206 virtual void do_UnsafeGetAndSetObject(UnsafeGetAndSetObject* x) = 0; 207 virtual void do_ProfileCall (ProfileCall* x) = 0; 208 virtual void do_ProfileReturnType (ProfileReturnType* x) = 0; 209 virtual void do_ProfileInvoke (ProfileInvoke* x) = 0; 210 virtual void do_RuntimeCall (RuntimeCall* x) = 0; 211 virtual void do_MemBar (MemBar* x) = 0; 212 virtual void do_RangeCheckPredicate(RangeCheckPredicate* x) = 0; 213 #ifdef ASSERT 214 virtual void do_Assert (Assert* x) = 0; 215 #endif 216 }; 217 218 219 // Hashing support 220 // 221 // Note: This hash functions affect the performance 222 // of ValueMap - make changes carefully! 223 224 #define HASH1(x1 ) ((intx)(x1)) 225 #define HASH2(x1, x2 ) ((HASH1(x1 ) << 7) ^ HASH1(x2)) 226 #define HASH3(x1, x2, x3 ) ((HASH2(x1, x2 ) << 7) ^ HASH1(x3)) 227 #define HASH4(x1, x2, x3, x4) ((HASH3(x1, x2, x3) << 7) ^ HASH1(x4)) 228 229 230 // The following macros are used to implement instruction-specific hashing. 231 // By default, each instruction implements hash() and is_equal(Value), used 232 // for value numbering/common subexpression elimination. The default imple- 233 // mentation disables value numbering. Each instruction which can be value- 234 // numbered, should define corresponding hash() and is_equal(Value) functions 235 // via the macros below. The f arguments specify all the values/op codes, etc. 236 // that need to be identical for two instructions to be identical. 237 // 238 // Note: The default implementation of hash() returns 0 in order to indicate 239 // that the instruction should not be considered for value numbering. 240 // The currently used hash functions do not guarantee that never a 0 241 // is produced. While this is still correct, it may be a performance 242 // bug (no value numbering for that node). However, this situation is 243 // so unlikely, that we are not going to handle it specially. 244 245 #define HASHING1(class_name, enabled, f1) \ 246 virtual intx hash() const { \ 247 return (enabled) ? HASH2(name(), f1) : 0; \ 248 } \ 249 virtual bool is_equal(Value v) const { \ 250 if (!(enabled) ) return false; \ 251 class_name* _v = v->as_##class_name(); \ 252 if (_v == NULL ) return false; \ 253 if (f1 != _v->f1) return false; \ 254 return true; \ 255 } \ 256 257 258 #define HASHING2(class_name, enabled, f1, f2) \ 259 virtual intx hash() const { \ 260 return (enabled) ? HASH3(name(), f1, f2) : 0; \ 261 } \ 262 virtual bool is_equal(Value v) const { \ 263 if (!(enabled) ) return false; \ 264 class_name* _v = v->as_##class_name(); \ 265 if (_v == NULL ) return false; \ 266 if (f1 != _v->f1) return false; \ 267 if (f2 != _v->f2) return false; \ 268 return true; \ 269 } \ 270 271 272 #define HASHING3(class_name, enabled, f1, f2, f3) \ 273 virtual intx hash() const { \ 274 return (enabled) ? HASH4(name(), f1, f2, f3) : 0; \ 275 } \ 276 virtual bool is_equal(Value v) const { \ 277 if (!(enabled) ) return false; \ 278 class_name* _v = v->as_##class_name(); \ 279 if (_v == NULL ) return false; \ 280 if (f1 != _v->f1) return false; \ 281 if (f2 != _v->f2) return false; \ 282 if (f3 != _v->f3) return false; \ 283 return true; \ 284 } \ 285 286 287 // The mother of all instructions... 288 289 class Instruction: public CompilationResourceObj { 290 private: 291 int _id; // the unique instruction id 292 #ifndef PRODUCT 293 int _printable_bci; // the bci of the instruction for printing 294 #endif 295 int _use_count; // the number of instructions refering to this value (w/o prev/next); only roots can have use count = 0 or > 1 296 int _pin_state; // set of PinReason describing the reason for pinning 297 ValueType* _type; // the instruction value type 298 Instruction* _next; // the next instruction if any (NULL for BlockEnd instructions) 299 Instruction* _subst; // the substitution instruction if any 300 LIR_Opr _operand; // LIR specific information 301 unsigned int _flags; // Flag bits 302 303 ValueStack* _state_before; // Copy of state with input operands still on stack (or NULL) 304 ValueStack* _exception_state; // Copy of state for exception handling 305 XHandlers* _exception_handlers; // Flat list of exception handlers covering this instruction 306 307 friend class UseCountComputer; 308 friend class BlockBegin; 309 310 void update_exception_state(ValueStack* state); 311 312 protected: 313 BlockBegin* _block; // Block that contains this instruction 314 315 void set_type(ValueType* type) { 316 assert(type != NULL, "type must exist"); 317 _type = type; 318 } 319 320 // Helper class to keep track of which arguments need a null check 321 class ArgsNonNullState { 322 private: 323 int _nonnull_state; // mask identifying which args are nonnull 324 public: 325 ArgsNonNullState() 326 : _nonnull_state(AllBits) {} 327 328 // Does argument number i needs a null check? 329 bool arg_needs_null_check(int i) const { 330 // No data is kept for arguments starting at position 33 so 331 // conservatively assume that they need a null check. 332 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) { 333 return is_set_nth_bit(_nonnull_state, i); 334 } 335 return true; 336 } 337 338 // Set whether argument number i needs a null check or not 339 void set_arg_needs_null_check(int i, bool check) { 340 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) { 341 if (check) { 342 _nonnull_state |= nth_bit(i); 343 } else { 344 _nonnull_state &= ~(nth_bit(i)); 345 } 346 } 347 } 348 }; 349 350 public: 351 void* operator new(size_t size) throw() { 352 Compilation* c = Compilation::current(); 353 void* res = c->arena()->Amalloc(size); 354 ((Instruction*)res)->_id = c->get_next_id(); 355 return res; 356 } 357 358 static const int no_bci = -99; 359 360 enum InstructionFlag { 361 NeedsNullCheckFlag = 0, 362 NeverNullFlag, // For "Q" signatures 363 CanTrapFlag, 364 DirectCompareFlag, 365 IsEliminatedFlag, 366 IsSafepointFlag, 367 IsStaticFlag, 368 IsStrictfpFlag, 369 NeedsStoreCheckFlag, 370 NeedsWriteBarrierFlag, 371 PreservesStateFlag, 372 TargetIsFinalFlag, 373 TargetIsLoadedFlag, 374 TargetIsStrictfpFlag, 375 UnorderedIsTrueFlag, 376 NeedsPatchingFlag, 377 ThrowIncompatibleClassChangeErrorFlag, 378 InvokeSpecialReceiverCheckFlag, 379 ProfileMDOFlag, 380 IsLinkedInBlockFlag, 381 NeedsRangeCheckFlag, 382 InWorkListFlag, 383 DeoptimizeOnException, 384 InstructionLastFlag 385 }; 386 387 public: 388 bool check_flag(InstructionFlag id) const { return (_flags & (1 << id)) != 0; } 389 void set_flag(InstructionFlag id, bool f) { _flags = f ? (_flags | (1 << id)) : (_flags & ~(1 << id)); }; 390 391 // 'globally' used condition values 392 enum Condition { 393 eql, neq, lss, leq, gtr, geq, aeq, beq 394 }; 395 396 // Instructions may be pinned for many reasons and under certain conditions 397 // with enough knowledge it's possible to safely unpin them. 398 enum PinReason { 399 PinUnknown = 1 << 0 400 , PinExplicitNullCheck = 1 << 3 401 , PinStackForStateSplit= 1 << 12 402 , PinStateSplitConstructor= 1 << 13 403 , PinGlobalValueNumbering= 1 << 14 404 }; 405 406 static Condition mirror(Condition cond); 407 static Condition negate(Condition cond); 408 409 // initialization 410 static int number_of_instructions() { 411 return Compilation::current()->number_of_instructions(); 412 } 413 414 // creation 415 Instruction(ValueType* type, ValueStack* state_before = NULL, bool type_is_constant = false) 416 : 417 #ifndef PRODUCT 418 _printable_bci(-99), 419 #endif 420 _use_count(0) 421 , _pin_state(0) 422 , _type(type) 423 , _next(NULL) 424 , _subst(NULL) 425 , _operand(LIR_OprFact::illegalOpr) 426 , _flags(0) 427 , _state_before(state_before) 428 , _exception_handlers(NULL) 429 , _block(NULL) 430 { 431 check_state(state_before); 432 assert(type != NULL && (!type->is_constant() || type_is_constant), "type must exist"); 433 update_exception_state(_state_before); 434 } 435 436 // accessors 437 int id() const { return _id; } 438 #ifndef PRODUCT 439 bool has_printable_bci() const { return _printable_bci != -99; } 440 int printable_bci() const { assert(has_printable_bci(), "_printable_bci should have been set"); return _printable_bci; } 441 void set_printable_bci(int bci) { _printable_bci = bci; } 442 #endif 443 int dominator_depth(); 444 int use_count() const { return _use_count; } 445 int pin_state() const { return _pin_state; } 446 bool is_pinned() const { return _pin_state != 0 || PinAllInstructions; } 447 ValueType* type() const { return _type; } 448 BlockBegin *block() const { return _block; } 449 Instruction* prev(); // use carefully, expensive operation 450 Instruction* next() const { return _next; } 451 bool has_subst() const { return _subst != NULL; } 452 Instruction* subst() { return _subst == NULL ? this : _subst->subst(); } 453 LIR_Opr operand() const { return _operand; } 454 455 void set_needs_null_check(bool f) { set_flag(NeedsNullCheckFlag, f); } 456 bool needs_null_check() const { return check_flag(NeedsNullCheckFlag); } 457 void set_never_null(bool f) { set_flag(NeverNullFlag, f); } 458 bool is_never_null() const { return check_flag(NeverNullFlag); } 459 bool is_linked() const { return check_flag(IsLinkedInBlockFlag); } 460 bool can_be_linked() { return as_Local() == NULL && as_Phi() == NULL; } 461 462 bool has_uses() const { return use_count() > 0; } 463 ValueStack* state_before() const { return _state_before; } 464 ValueStack* exception_state() const { return _exception_state; } 465 virtual bool needs_exception_state() const { return true; } 466 XHandlers* exception_handlers() const { return _exception_handlers; } 467 468 // manipulation 469 void pin(PinReason reason) { _pin_state |= reason; } 470 void pin() { _pin_state |= PinUnknown; } 471 // DANGEROUS: only used by EliminateStores 472 void unpin(PinReason reason) { assert((reason & PinUnknown) == 0, "can't unpin unknown state"); _pin_state &= ~reason; } 473 474 Instruction* set_next(Instruction* next) { 475 assert(next->has_printable_bci(), "_printable_bci should have been set"); 476 assert(next != NULL, "must not be NULL"); 477 assert(as_BlockEnd() == NULL, "BlockEnd instructions must have no next"); 478 assert(next->can_be_linked(), "shouldn't link these instructions into list"); 479 480 BlockBegin *block = this->block(); 481 next->_block = block; 482 483 next->set_flag(Instruction::IsLinkedInBlockFlag, true); 484 _next = next; 485 return next; 486 } 487 488 Instruction* set_next(Instruction* next, int bci) { 489 #ifndef PRODUCT 490 next->set_printable_bci(bci); 491 #endif 492 return set_next(next); 493 } 494 495 // when blocks are merged 496 void fixup_block_pointers() { 497 Instruction *cur = next()->next(); // next()'s block is set in set_next 498 while (cur && cur->_block != block()) { 499 cur->_block = block(); 500 cur = cur->next(); 501 } 502 } 503 504 Instruction *insert_after(Instruction *i) { 505 Instruction* n = _next; 506 set_next(i); 507 i->set_next(n); 508 return _next; 509 } 510 511 bool is_flattened_array() const; // FIXME -- remove it 512 513 bool is_loaded_flattened_array() const; 514 bool maybe_flattened_array(); 515 516 Instruction *insert_after_same_bci(Instruction *i) { 517 #ifndef PRODUCT 518 i->set_printable_bci(printable_bci()); 519 #endif 520 return insert_after(i); 521 } 522 523 void set_subst(Instruction* subst) { 524 assert(subst == NULL || 525 type()->base() == subst->type()->base() || 526 subst->type()->base() == illegalType, "type can't change"); 527 _subst = subst; 528 } 529 void set_exception_handlers(XHandlers *xhandlers) { _exception_handlers = xhandlers; } 530 void set_exception_state(ValueStack* s) { check_state(s); _exception_state = s; } 531 void set_state_before(ValueStack* s) { check_state(s); _state_before = s; } 532 533 // machine-specifics 534 void set_operand(LIR_Opr operand) { assert(operand != LIR_OprFact::illegalOpr, "operand must exist"); _operand = operand; } 535 void clear_operand() { _operand = LIR_OprFact::illegalOpr; } 536 537 // generic 538 virtual Instruction* as_Instruction() { return this; } // to satisfy HASHING1 macro 539 virtual Phi* as_Phi() { return NULL; } 540 virtual Local* as_Local() { return NULL; } 541 virtual Constant* as_Constant() { return NULL; } 542 virtual AccessField* as_AccessField() { return NULL; } 543 virtual LoadField* as_LoadField() { return NULL; } 544 virtual StoreField* as_StoreField() { return NULL; } 545 virtual AccessArray* as_AccessArray() { return NULL; } 546 virtual ArrayLength* as_ArrayLength() { return NULL; } 547 virtual AccessIndexed* as_AccessIndexed() { return NULL; } 548 virtual LoadIndexed* as_LoadIndexed() { return NULL; } 549 virtual StoreIndexed* as_StoreIndexed() { return NULL; } 550 virtual NegateOp* as_NegateOp() { return NULL; } 551 virtual Op2* as_Op2() { return NULL; } 552 virtual ArithmeticOp* as_ArithmeticOp() { return NULL; } 553 virtual ShiftOp* as_ShiftOp() { return NULL; } 554 virtual LogicOp* as_LogicOp() { return NULL; } 555 virtual CompareOp* as_CompareOp() { return NULL; } 556 virtual IfOp* as_IfOp() { return NULL; } 557 virtual Convert* as_Convert() { return NULL; } 558 virtual NullCheck* as_NullCheck() { return NULL; } 559 virtual OsrEntry* as_OsrEntry() { return NULL; } 560 virtual StateSplit* as_StateSplit() { return NULL; } 561 virtual Invoke* as_Invoke() { return NULL; } 562 virtual NewInstance* as_NewInstance() { return NULL; } 563 virtual NewValueTypeInstance* as_NewValueTypeInstance() { return NULL; } 564 virtual NewArray* as_NewArray() { return NULL; } 565 virtual NewTypeArray* as_NewTypeArray() { return NULL; } 566 virtual NewObjectArray* as_NewObjectArray() { return NULL; } 567 virtual NewMultiArray* as_NewMultiArray() { return NULL; } 568 virtual TypeCheck* as_TypeCheck() { return NULL; } 569 virtual CheckCast* as_CheckCast() { return NULL; } 570 virtual InstanceOf* as_InstanceOf() { return NULL; } 571 virtual TypeCast* as_TypeCast() { return NULL; } 572 virtual AccessMonitor* as_AccessMonitor() { return NULL; } 573 virtual MonitorEnter* as_MonitorEnter() { return NULL; } 574 virtual MonitorExit* as_MonitorExit() { return NULL; } 575 virtual Intrinsic* as_Intrinsic() { return NULL; } 576 virtual BlockBegin* as_BlockBegin() { return NULL; } 577 virtual BlockEnd* as_BlockEnd() { return NULL; } 578 virtual Goto* as_Goto() { return NULL; } 579 virtual If* as_If() { return NULL; } 580 virtual IfInstanceOf* as_IfInstanceOf() { return NULL; } 581 virtual TableSwitch* as_TableSwitch() { return NULL; } 582 virtual LookupSwitch* as_LookupSwitch() { return NULL; } 583 virtual Return* as_Return() { return NULL; } 584 virtual Throw* as_Throw() { return NULL; } 585 virtual Base* as_Base() { return NULL; } 586 virtual RoundFP* as_RoundFP() { return NULL; } 587 virtual ExceptionObject* as_ExceptionObject() { return NULL; } 588 virtual UnsafeOp* as_UnsafeOp() { return NULL; } 589 virtual ProfileInvoke* as_ProfileInvoke() { return NULL; } 590 virtual RangeCheckPredicate* as_RangeCheckPredicate() { return NULL; } 591 592 #ifdef ASSERT 593 virtual Assert* as_Assert() { return NULL; } 594 #endif 595 596 virtual void visit(InstructionVisitor* v) = 0; 597 598 virtual bool can_trap() const { return false; } 599 600 virtual void input_values_do(ValueVisitor* f) = 0; 601 virtual void state_values_do(ValueVisitor* f); 602 virtual void other_values_do(ValueVisitor* f) { /* usually no other - override on demand */ } 603 void values_do(ValueVisitor* f) { input_values_do(f); state_values_do(f); other_values_do(f); } 604 605 virtual ciType* exact_type() const; 606 virtual ciType* declared_type() const { return NULL; } 607 608 // hashing 609 virtual const char* name() const = 0; 610 HASHING1(Instruction, false, id()) // hashing disabled by default 611 612 // debugging 613 static void check_state(ValueStack* state) PRODUCT_RETURN; 614 void print() PRODUCT_RETURN; 615 void print_line() PRODUCT_RETURN; 616 void print(InstructionPrinter& ip) PRODUCT_RETURN; 617 }; 618 619 620 // The following macros are used to define base (i.e., non-leaf) 621 // and leaf instruction classes. They define class-name related 622 // generic functionality in one place. 623 624 #define BASE(class_name, super_class_name) \ 625 class class_name: public super_class_name { \ 626 public: \ 627 virtual class_name* as_##class_name() { return this; } \ 628 629 630 #define LEAF(class_name, super_class_name) \ 631 BASE(class_name, super_class_name) \ 632 public: \ 633 virtual const char* name() const { return #class_name; } \ 634 virtual void visit(InstructionVisitor* v) { v->do_##class_name(this); } \ 635 636 637 // Debugging support 638 639 640 #ifdef ASSERT 641 class AssertValues: public ValueVisitor { 642 void visit(Value* x) { assert((*x) != NULL, "value must exist"); } 643 }; 644 #define ASSERT_VALUES { AssertValues assert_value; values_do(&assert_value); } 645 #else 646 #define ASSERT_VALUES 647 #endif // ASSERT 648 649 650 // A Phi is a phi function in the sense of SSA form. It stands for 651 // the value of a local variable at the beginning of a join block. 652 // A Phi consists of n operands, one for every incoming branch. 653 654 LEAF(Phi, Instruction) 655 private: 656 int _pf_flags; // the flags of the phi function 657 int _index; // to value on operand stack (index < 0) or to local 658 ciType* _exact_type; // currently is set only for flattened arrays, NULL otherwise. 659 public: 660 // creation 661 Phi(ValueType* type, BlockBegin* b, int index, ciType* exact_type) 662 : Instruction(type->base()) 663 , _pf_flags(0) 664 , _index(index) 665 , _exact_type(exact_type) 666 { 667 _block = b; 668 NOT_PRODUCT(set_printable_bci(Value(b)->printable_bci())); 669 if (type->is_illegal()) { 670 make_illegal(); 671 } 672 } 673 674 virtual ciType* exact_type() const { 675 return _exact_type; 676 } 677 678 virtual ciType* declared_type() const { 679 return _exact_type; 680 } 681 682 // flags 683 enum Flag { 684 no_flag = 0, 685 visited = 1 << 0, 686 cannot_simplify = 1 << 1 687 }; 688 689 // accessors 690 bool is_local() const { return _index >= 0; } 691 bool is_on_stack() const { return !is_local(); } 692 int local_index() const { assert(is_local(), ""); return _index; } 693 int stack_index() const { assert(is_on_stack(), ""); return -(_index+1); } 694 695 Value operand_at(int i) const; 696 int operand_count() const; 697 698 void set(Flag f) { _pf_flags |= f; } 699 void clear(Flag f) { _pf_flags &= ~f; } 700 bool is_set(Flag f) const { return (_pf_flags & f) != 0; } 701 702 // Invalidates phis corresponding to merges of locals of two different types 703 // (these should never be referenced, otherwise the bytecodes are illegal) 704 void make_illegal() { 705 set(cannot_simplify); 706 set_type(illegalType); 707 } 708 709 bool is_illegal() const { 710 return type()->is_illegal(); 711 } 712 713 // generic 714 virtual void input_values_do(ValueVisitor* f) { 715 } 716 }; 717 718 719 // A local is a placeholder for an incoming argument to a function call. 720 LEAF(Local, Instruction) 721 private: 722 int _java_index; // the local index within the method to which the local belongs 723 bool _is_receiver; // if local variable holds the receiver: "this" for non-static methods 724 ciType* _declared_type; 725 public: 726 // creation 727 Local(ciType* declared, ValueType* type, int index, bool receiver, bool never_null) 728 : Instruction(type) 729 , _java_index(index) 730 , _is_receiver(receiver) 731 , _declared_type(declared) 732 { 733 set_never_null(never_null); 734 NOT_PRODUCT(set_printable_bci(-1)); 735 } 736 737 // accessors 738 int java_index() const { return _java_index; } 739 bool is_receiver() const { return _is_receiver; } 740 741 virtual ciType* declared_type() const { return _declared_type; } 742 743 // generic 744 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 745 }; 746 747 748 LEAF(Constant, Instruction) 749 public: 750 // creation 751 Constant(ValueType* type): 752 Instruction(type, NULL, /*type_is_constant*/ true) 753 { 754 assert(type->is_constant(), "must be a constant"); 755 } 756 757 Constant(ValueType* type, ValueStack* state_before): 758 Instruction(type, state_before, /*type_is_constant*/ true) 759 { 760 assert(state_before != NULL, "only used for constants which need patching"); 761 assert(type->is_constant(), "must be a constant"); 762 // since it's patching it needs to be pinned 763 pin(); 764 } 765 766 // generic 767 virtual bool can_trap() const { return state_before() != NULL; } 768 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 769 770 virtual intx hash() const; 771 virtual bool is_equal(Value v) const; 772 773 virtual ciType* exact_type() const; 774 775 enum CompareResult { not_comparable = -1, cond_false, cond_true }; 776 777 virtual CompareResult compare(Instruction::Condition condition, Value right) const; 778 BlockBegin* compare(Instruction::Condition cond, Value right, 779 BlockBegin* true_sux, BlockBegin* false_sux) const { 780 switch (compare(cond, right)) { 781 case not_comparable: 782 return NULL; 783 case cond_false: 784 return false_sux; 785 case cond_true: 786 return true_sux; 787 default: 788 ShouldNotReachHere(); 789 return NULL; 790 } 791 } 792 }; 793 794 795 BASE(AccessField, Instruction) 796 private: 797 Value _obj; 798 int _offset; 799 ciField* _field; 800 NullCheck* _explicit_null_check; // For explicit null check elimination 801 802 public: 803 // creation 804 AccessField(Value obj, int offset, ciField* field, bool is_static, 805 ValueStack* state_before, bool needs_patching) 806 : Instruction(as_ValueType(field->type()->basic_type()), state_before) 807 , _obj(obj) 808 , _offset(offset) 809 , _field(field) 810 , _explicit_null_check(NULL) 811 { 812 set_needs_null_check(!is_static); 813 set_flag(IsStaticFlag, is_static); 814 set_flag(NeedsPatchingFlag, needs_patching); 815 ASSERT_VALUES 816 // pin of all instructions with memory access 817 pin(); 818 } 819 820 // accessors 821 Value obj() const { return _obj; } 822 int offset() const { return _offset; } 823 ciField* field() const { return _field; } 824 BasicType field_type() const { return _field->type()->basic_type(); } 825 bool is_static() const { return check_flag(IsStaticFlag); } 826 NullCheck* explicit_null_check() const { return _explicit_null_check; } 827 bool needs_patching() const { return check_flag(NeedsPatchingFlag); } 828 829 // Unresolved getstatic and putstatic can cause initialization. 830 // Technically it occurs at the Constant that materializes the base 831 // of the static fields but it's simpler to model it here. 832 bool is_init_point() const { return is_static() && (needs_patching() || !_field->holder()->is_initialized()); } 833 834 // manipulation 835 836 // Under certain circumstances, if a previous NullCheck instruction 837 // proved the target object non-null, we can eliminate the explicit 838 // null check and do an implicit one, simply specifying the debug 839 // information from the NullCheck. This field should only be consulted 840 // if needs_null_check() is true. 841 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 842 843 // generic 844 virtual bool can_trap() const { return needs_null_check() || needs_patching(); } 845 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 846 }; 847 848 849 LEAF(LoadField, AccessField) 850 public: 851 // creation 852 LoadField(Value obj, int offset, ciField* field, bool is_static, 853 ValueStack* state_before, bool needs_patching, 854 ciValueKlass* value_klass = NULL, Value default_value = NULL ) 855 : AccessField(obj, offset, field, is_static, state_before, needs_patching) 856 {} 857 858 ciType* declared_type() const; 859 860 // generic 861 HASHING2(LoadField, !needs_patching() && !field()->is_volatile(), obj()->subst(), offset()) // cannot be eliminated if needs patching or if volatile 862 }; 863 864 865 LEAF(StoreField, AccessField) 866 private: 867 Value _value; 868 869 public: 870 // creation 871 StoreField(Value obj, int offset, ciField* field, Value value, bool is_static, 872 ValueStack* state_before, bool needs_patching) 873 : AccessField(obj, offset, field, is_static, state_before, needs_patching) 874 , _value(value) 875 { 876 set_flag(NeedsWriteBarrierFlag, as_ValueType(field_type())->is_object()); 877 ASSERT_VALUES 878 pin(); 879 } 880 881 // accessors 882 Value value() const { return _value; } 883 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); } 884 885 // generic 886 virtual void input_values_do(ValueVisitor* f) { AccessField::input_values_do(f); f->visit(&_value); } 887 }; 888 889 890 BASE(AccessArray, Instruction) 891 private: 892 Value _array; 893 894 public: 895 // creation 896 AccessArray(ValueType* type, Value array, ValueStack* state_before) 897 : Instruction(type, state_before) 898 , _array(array) 899 { 900 set_needs_null_check(true); 901 ASSERT_VALUES 902 pin(); // instruction with side effect (null exception or range check throwing) 903 } 904 905 Value array() const { return _array; } 906 907 // generic 908 virtual bool can_trap() const { return needs_null_check(); } 909 virtual void input_values_do(ValueVisitor* f) { f->visit(&_array); } 910 }; 911 912 913 LEAF(ArrayLength, AccessArray) 914 private: 915 NullCheck* _explicit_null_check; // For explicit null check elimination 916 917 public: 918 // creation 919 ArrayLength(Value array, ValueStack* state_before) 920 : AccessArray(intType, array, state_before) 921 , _explicit_null_check(NULL) {} 922 923 // accessors 924 NullCheck* explicit_null_check() const { return _explicit_null_check; } 925 926 // setters 927 // See LoadField::set_explicit_null_check for documentation 928 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 929 930 // generic 931 HASHING1(ArrayLength, true, array()->subst()) 932 }; 933 934 935 BASE(AccessIndexed, AccessArray) 936 private: 937 Value _index; 938 Value _length; 939 BasicType _elt_type; 940 bool _mismatched; 941 942 public: 943 // creation 944 AccessIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before, bool mismatched) 945 : AccessArray(as_ValueType(elt_type), array, state_before) 946 , _index(index) 947 , _length(length) 948 , _elt_type(elt_type) 949 , _mismatched(mismatched) 950 { 951 set_flag(Instruction::NeedsRangeCheckFlag, true); 952 ASSERT_VALUES 953 } 954 955 // accessors 956 Value index() const { return _index; } 957 Value length() const { return _length; } 958 BasicType elt_type() const { return _elt_type; } 959 bool mismatched() const { return _mismatched; } 960 961 void clear_length() { _length = NULL; } 962 // perform elimination of range checks involving constants 963 bool compute_needs_range_check(); 964 965 // generic 966 virtual void input_values_do(ValueVisitor* f) { AccessArray::input_values_do(f); f->visit(&_index); if (_length != NULL) f->visit(&_length); } 967 }; 968 969 970 LEAF(LoadIndexed, AccessIndexed) 971 private: 972 NullCheck* _explicit_null_check; // For explicit null check elimination 973 NewValueTypeInstance* _vt; 974 975 public: 976 // creation 977 LoadIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before, bool mismatched = false) 978 : AccessIndexed(array, index, length, elt_type, state_before, mismatched) 979 , _explicit_null_check(NULL) {} 980 981 // accessors 982 NullCheck* explicit_null_check() const { return _explicit_null_check; } 983 984 // setters 985 // See LoadField::set_explicit_null_check for documentation 986 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 987 988 ciType* exact_type() const; 989 ciType* declared_type() const; 990 991 NewValueTypeInstance* vt() { return _vt; } 992 void set_vt(NewValueTypeInstance* vt) { _vt = vt; } 993 994 // generic 995 HASHING2(LoadIndexed, true, array()->subst(), index()->subst()) 996 }; 997 998 999 LEAF(StoreIndexed, AccessIndexed) 1000 private: 1001 Value _value; 1002 1003 ciMethod* _profiled_method; 1004 int _profiled_bci; 1005 bool _check_boolean; 1006 1007 public: 1008 // creation 1009 StoreIndexed(Value array, Value index, Value length, BasicType elt_type, Value value, ValueStack* state_before, 1010 bool check_boolean, bool mismatched = false) 1011 : AccessIndexed(array, index, length, elt_type, state_before, mismatched) 1012 , _value(value), _profiled_method(NULL), _profiled_bci(0), _check_boolean(check_boolean) 1013 { 1014 set_flag(NeedsWriteBarrierFlag, (as_ValueType(elt_type)->is_object())); 1015 set_flag(NeedsStoreCheckFlag, (as_ValueType(elt_type)->is_object())); 1016 ASSERT_VALUES 1017 pin(); 1018 } 1019 1020 // accessors 1021 Value value() const { return _value; } 1022 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); } 1023 bool needs_store_check() const { return check_flag(NeedsStoreCheckFlag); } 1024 bool check_boolean() const { return _check_boolean; } 1025 // Helpers for MethodData* profiling 1026 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1027 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1028 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1029 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1030 ciMethod* profiled_method() const { return _profiled_method; } 1031 int profiled_bci() const { return _profiled_bci; } 1032 // Flattened array support 1033 bool is_exact_flattened_array_store() const; 1034 // generic 1035 virtual void input_values_do(ValueVisitor* f) { AccessIndexed::input_values_do(f); f->visit(&_value); } 1036 }; 1037 1038 1039 LEAF(NegateOp, Instruction) 1040 private: 1041 Value _x; 1042 1043 public: 1044 // creation 1045 NegateOp(Value x) : Instruction(x->type()->base()), _x(x) { 1046 ASSERT_VALUES 1047 } 1048 1049 // accessors 1050 Value x() const { return _x; } 1051 1052 // generic 1053 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); } 1054 }; 1055 1056 1057 BASE(Op2, Instruction) 1058 private: 1059 Bytecodes::Code _op; 1060 Value _x; 1061 Value _y; 1062 1063 public: 1064 // creation 1065 Op2(ValueType* type, Bytecodes::Code op, Value x, Value y, ValueStack* state_before = NULL) 1066 : Instruction(type, state_before) 1067 , _op(op) 1068 , _x(x) 1069 , _y(y) 1070 { 1071 ASSERT_VALUES 1072 } 1073 1074 // accessors 1075 Bytecodes::Code op() const { return _op; } 1076 Value x() const { return _x; } 1077 Value y() const { return _y; } 1078 1079 // manipulators 1080 void swap_operands() { 1081 assert(is_commutative(), "operation must be commutative"); 1082 Value t = _x; _x = _y; _y = t; 1083 } 1084 1085 // generic 1086 virtual bool is_commutative() const { return false; } 1087 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); } 1088 }; 1089 1090 1091 LEAF(ArithmeticOp, Op2) 1092 public: 1093 // creation 1094 ArithmeticOp(Bytecodes::Code op, Value x, Value y, bool is_strictfp, ValueStack* state_before) 1095 : Op2(x->type()->meet(y->type()), op, x, y, state_before) 1096 { 1097 set_flag(IsStrictfpFlag, is_strictfp); 1098 if (can_trap()) pin(); 1099 } 1100 1101 // accessors 1102 bool is_strictfp() const { return check_flag(IsStrictfpFlag); } 1103 1104 // generic 1105 virtual bool is_commutative() const; 1106 virtual bool can_trap() const; 1107 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1108 }; 1109 1110 1111 LEAF(ShiftOp, Op2) 1112 public: 1113 // creation 1114 ShiftOp(Bytecodes::Code op, Value x, Value s) : Op2(x->type()->base(), op, x, s) {} 1115 1116 // generic 1117 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1118 }; 1119 1120 1121 LEAF(LogicOp, Op2) 1122 public: 1123 // creation 1124 LogicOp(Bytecodes::Code op, Value x, Value y) : Op2(x->type()->meet(y->type()), op, x, y) {} 1125 1126 // generic 1127 virtual bool is_commutative() const; 1128 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1129 }; 1130 1131 1132 LEAF(CompareOp, Op2) 1133 public: 1134 // creation 1135 CompareOp(Bytecodes::Code op, Value x, Value y, ValueStack* state_before) 1136 : Op2(intType, op, x, y, state_before) 1137 {} 1138 1139 // generic 1140 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1141 }; 1142 1143 1144 LEAF(IfOp, Op2) 1145 private: 1146 Value _tval; 1147 Value _fval; 1148 1149 public: 1150 // creation 1151 IfOp(Value x, Condition cond, Value y, Value tval, Value fval) 1152 : Op2(tval->type()->meet(fval->type()), (Bytecodes::Code)cond, x, y) 1153 , _tval(tval) 1154 , _fval(fval) 1155 { 1156 ASSERT_VALUES 1157 assert(tval->type()->tag() == fval->type()->tag(), "types must match"); 1158 } 1159 1160 // accessors 1161 virtual bool is_commutative() const; 1162 Bytecodes::Code op() const { ShouldNotCallThis(); return Bytecodes::_illegal; } 1163 Condition cond() const { return (Condition)Op2::op(); } 1164 Value tval() const { return _tval; } 1165 Value fval() const { return _fval; } 1166 1167 // generic 1168 virtual void input_values_do(ValueVisitor* f) { Op2::input_values_do(f); f->visit(&_tval); f->visit(&_fval); } 1169 }; 1170 1171 1172 LEAF(Convert, Instruction) 1173 private: 1174 Bytecodes::Code _op; 1175 Value _value; 1176 1177 public: 1178 // creation 1179 Convert(Bytecodes::Code op, Value value, ValueType* to_type) : Instruction(to_type), _op(op), _value(value) { 1180 ASSERT_VALUES 1181 } 1182 1183 // accessors 1184 Bytecodes::Code op() const { return _op; } 1185 Value value() const { return _value; } 1186 1187 // generic 1188 virtual void input_values_do(ValueVisitor* f) { f->visit(&_value); } 1189 HASHING2(Convert, true, op(), value()->subst()) 1190 }; 1191 1192 1193 LEAF(NullCheck, Instruction) 1194 private: 1195 Value _obj; 1196 1197 public: 1198 // creation 1199 NullCheck(Value obj, ValueStack* state_before) 1200 : Instruction(obj->type()->base(), state_before) 1201 , _obj(obj) 1202 { 1203 ASSERT_VALUES 1204 set_can_trap(true); 1205 assert(_obj->type()->is_object(), "null check must be applied to objects only"); 1206 pin(Instruction::PinExplicitNullCheck); 1207 } 1208 1209 // accessors 1210 Value obj() const { return _obj; } 1211 1212 // setters 1213 void set_can_trap(bool can_trap) { set_flag(CanTrapFlag, can_trap); } 1214 1215 // generic 1216 virtual bool can_trap() const { return check_flag(CanTrapFlag); /* null-check elimination sets to false */ } 1217 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 1218 HASHING1(NullCheck, true, obj()->subst()) 1219 }; 1220 1221 1222 // This node is supposed to cast the type of another node to a more precise 1223 // declared type. 1224 LEAF(TypeCast, Instruction) 1225 private: 1226 ciType* _declared_type; 1227 Value _obj; 1228 1229 public: 1230 // The type of this node is the same type as the object type (and it might be constant). 1231 TypeCast(ciType* type, Value obj, ValueStack* state_before) 1232 : Instruction(obj->type(), state_before, obj->type()->is_constant()), 1233 _declared_type(type), 1234 _obj(obj) {} 1235 1236 // accessors 1237 ciType* declared_type() const { return _declared_type; } 1238 Value obj() const { return _obj; } 1239 1240 // generic 1241 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 1242 }; 1243 1244 1245 BASE(StateSplit, Instruction) 1246 private: 1247 ValueStack* _state; 1248 1249 protected: 1250 static void substitute(BlockList& list, BlockBegin* old_block, BlockBegin* new_block); 1251 1252 public: 1253 // creation 1254 StateSplit(ValueType* type, ValueStack* state_before = NULL) 1255 : Instruction(type, state_before) 1256 , _state(NULL) 1257 { 1258 pin(PinStateSplitConstructor); 1259 } 1260 1261 // accessors 1262 ValueStack* state() const { return _state; } 1263 IRScope* scope() const; // the state's scope 1264 1265 // manipulation 1266 void set_state(ValueStack* state) { assert(_state == NULL, "overwriting existing state"); check_state(state); _state = state; } 1267 1268 // generic 1269 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 1270 virtual void state_values_do(ValueVisitor* f); 1271 }; 1272 1273 1274 LEAF(Invoke, StateSplit) 1275 private: 1276 Bytecodes::Code _code; 1277 Value _recv; 1278 Values* _args; 1279 BasicTypeList* _signature; 1280 int _vtable_index; 1281 ciMethod* _target; 1282 1283 public: 1284 // creation 1285 Invoke(Bytecodes::Code code, ValueType* result_type, Value recv, Values* args, 1286 int vtable_index, ciMethod* target, ValueStack* state_before, bool never_null); 1287 1288 // accessors 1289 Bytecodes::Code code() const { return _code; } 1290 Value receiver() const { return _recv; } 1291 bool has_receiver() const { return receiver() != NULL; } 1292 int number_of_arguments() const { return _args->length(); } 1293 Value argument_at(int i) const { return _args->at(i); } 1294 int vtable_index() const { return _vtable_index; } 1295 BasicTypeList* signature() const { return _signature; } 1296 ciMethod* target() const { return _target; } 1297 1298 ciType* declared_type() const; 1299 1300 // Returns false if target is not loaded 1301 bool target_is_final() const { return check_flag(TargetIsFinalFlag); } 1302 bool target_is_loaded() const { return check_flag(TargetIsLoadedFlag); } 1303 // Returns false if target is not loaded 1304 bool target_is_strictfp() const { return check_flag(TargetIsStrictfpFlag); } 1305 1306 // JSR 292 support 1307 bool is_invokedynamic() const { return code() == Bytecodes::_invokedynamic; } 1308 bool is_method_handle_intrinsic() const { return target()->is_method_handle_intrinsic(); } 1309 1310 virtual bool needs_exception_state() const { return false; } 1311 1312 // generic 1313 virtual bool can_trap() const { return true; } 1314 virtual void input_values_do(ValueVisitor* f) { 1315 StateSplit::input_values_do(f); 1316 if (has_receiver()) f->visit(&_recv); 1317 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 1318 } 1319 virtual void state_values_do(ValueVisitor *f); 1320 }; 1321 1322 1323 LEAF(NewInstance, StateSplit) 1324 private: 1325 ciInstanceKlass* _klass; 1326 bool _is_unresolved; 1327 1328 public: 1329 // creation 1330 NewInstance(ciInstanceKlass* klass, ValueStack* state_before, bool is_unresolved) 1331 : StateSplit(instanceType, state_before) 1332 , _klass(klass), _is_unresolved(is_unresolved) 1333 {} 1334 1335 // accessors 1336 ciInstanceKlass* klass() const { return _klass; } 1337 bool is_unresolved() const { return _is_unresolved; } 1338 1339 virtual bool needs_exception_state() const { return false; } 1340 1341 // generic 1342 virtual bool can_trap() const { return true; } 1343 ciType* exact_type() const; 1344 ciType* declared_type() const; 1345 }; 1346 1347 LEAF(NewValueTypeInstance, StateSplit) 1348 bool _is_unresolved; 1349 ciValueKlass* _klass; 1350 Value _depends_on; // Link to instance on with withfield was called on 1351 1352 public: 1353 1354 // Default creation, always allocated for now 1355 NewValueTypeInstance(ciValueKlass* klass, ValueStack* state_before, bool is_unresolved, Value depends_on = NULL) 1356 : StateSplit(instanceType, state_before) 1357 , _is_unresolved(is_unresolved) 1358 , _klass(klass) 1359 { 1360 if (depends_on == NULL) { 1361 _depends_on = this; 1362 } else { 1363 _depends_on = depends_on; 1364 } 1365 set_never_null(true); 1366 } 1367 1368 // accessors 1369 bool is_unresolved() const { return _is_unresolved; } 1370 Value depends_on(); 1371 1372 ciValueKlass* klass() const { return _klass; } 1373 1374 virtual bool needs_exception_state() const { return false; } 1375 1376 // generic 1377 virtual bool can_trap() const { return true; } 1378 ciType* exact_type() const; 1379 ciType* declared_type() const; 1380 1381 // Only done in LIR Generator -> map everything to object 1382 void set_to_object_type() { set_type(instanceType); } 1383 }; 1384 1385 BASE(NewArray, StateSplit) 1386 private: 1387 Value _length; 1388 1389 public: 1390 // creation 1391 NewArray(Value length, ValueStack* state_before) 1392 : StateSplit(objectType, state_before) 1393 , _length(length) 1394 { 1395 // Do not ASSERT_VALUES since length is NULL for NewMultiArray 1396 } 1397 1398 // accessors 1399 Value length() const { return _length; } 1400 1401 virtual bool needs_exception_state() const { return false; } 1402 1403 ciType* exact_type() const { return NULL; } 1404 ciType* declared_type() const; 1405 1406 // generic 1407 virtual bool can_trap() const { return true; } 1408 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_length); } 1409 }; 1410 1411 1412 LEAF(NewTypeArray, NewArray) 1413 private: 1414 BasicType _elt_type; 1415 1416 public: 1417 // creation 1418 NewTypeArray(Value length, BasicType elt_type, ValueStack* state_before) 1419 : NewArray(length, state_before) 1420 , _elt_type(elt_type) 1421 {} 1422 1423 // accessors 1424 BasicType elt_type() const { return _elt_type; } 1425 ciType* exact_type() const; 1426 }; 1427 1428 1429 LEAF(NewObjectArray, NewArray) 1430 private: 1431 ciKlass* _klass; 1432 1433 public: 1434 // creation 1435 NewObjectArray(ciKlass* klass, Value length, ValueStack* state_before) : NewArray(length, state_before), _klass(klass) {} 1436 1437 // accessors 1438 ciKlass* klass() const { return _klass; } 1439 ciType* exact_type() const; 1440 }; 1441 1442 1443 LEAF(NewMultiArray, NewArray) 1444 private: 1445 ciKlass* _klass; 1446 Values* _dims; 1447 1448 public: 1449 // creation 1450 NewMultiArray(ciKlass* klass, Values* dims, ValueStack* state_before) : NewArray(NULL, state_before), _klass(klass), _dims(dims) { 1451 ASSERT_VALUES 1452 } 1453 1454 // accessors 1455 ciKlass* klass() const { return _klass; } 1456 Values* dims() const { return _dims; } 1457 int rank() const { return dims()->length(); } 1458 1459 // generic 1460 virtual void input_values_do(ValueVisitor* f) { 1461 // NOTE: we do not call NewArray::input_values_do since "length" 1462 // is meaningless for a multi-dimensional array; passing the 1463 // zeroth element down to NewArray as its length is a bad idea 1464 // since there will be a copy in the "dims" array which doesn't 1465 // get updated, and the value must not be traversed twice. Was bug 1466 // - kbr 4/10/2001 1467 StateSplit::input_values_do(f); 1468 for (int i = 0; i < _dims->length(); i++) f->visit(_dims->adr_at(i)); 1469 } 1470 1471 ciType* exact_type() const; 1472 }; 1473 1474 1475 BASE(TypeCheck, StateSplit) 1476 private: 1477 ciKlass* _klass; 1478 Value _obj; 1479 1480 ciMethod* _profiled_method; 1481 int _profiled_bci; 1482 1483 public: 1484 // creation 1485 TypeCheck(ciKlass* klass, Value obj, ValueType* type, ValueStack* state_before) 1486 : StateSplit(type, state_before), _klass(klass), _obj(obj), 1487 _profiled_method(NULL), _profiled_bci(0) { 1488 ASSERT_VALUES 1489 set_direct_compare(false); 1490 } 1491 1492 // accessors 1493 ciKlass* klass() const { return _klass; } 1494 Value obj() const { return _obj; } 1495 bool is_loaded() const { return klass() != NULL; } 1496 bool direct_compare() const { return check_flag(DirectCompareFlag); } 1497 1498 // manipulation 1499 void set_direct_compare(bool flag) { set_flag(DirectCompareFlag, flag); } 1500 1501 // generic 1502 virtual bool can_trap() const { return true; } 1503 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } 1504 1505 // Helpers for MethodData* profiling 1506 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1507 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1508 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1509 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1510 ciMethod* profiled_method() const { return _profiled_method; } 1511 int profiled_bci() const { return _profiled_bci; } 1512 }; 1513 1514 1515 LEAF(CheckCast, TypeCheck) 1516 public: 1517 // creation 1518 CheckCast(ciKlass* klass, Value obj, ValueStack* state_before, bool never_null = false) 1519 : TypeCheck(klass, obj, objectType, state_before) { 1520 set_never_null(never_null); 1521 } 1522 1523 void set_incompatible_class_change_check() { 1524 set_flag(ThrowIncompatibleClassChangeErrorFlag, true); 1525 } 1526 bool is_incompatible_class_change_check() const { 1527 return check_flag(ThrowIncompatibleClassChangeErrorFlag); 1528 } 1529 void set_invokespecial_receiver_check() { 1530 set_flag(InvokeSpecialReceiverCheckFlag, true); 1531 } 1532 bool is_invokespecial_receiver_check() const { 1533 return check_flag(InvokeSpecialReceiverCheckFlag); 1534 } 1535 1536 virtual bool needs_exception_state() const { 1537 return !is_invokespecial_receiver_check(); 1538 } 1539 1540 ciType* declared_type() const; 1541 }; 1542 1543 1544 LEAF(InstanceOf, TypeCheck) 1545 public: 1546 // creation 1547 InstanceOf(ciKlass* klass, Value obj, ValueStack* state_before) : TypeCheck(klass, obj, intType, state_before) {} 1548 1549 virtual bool needs_exception_state() const { return false; } 1550 }; 1551 1552 1553 BASE(AccessMonitor, StateSplit) 1554 private: 1555 Value _obj; 1556 int _monitor_no; 1557 1558 public: 1559 // creation 1560 AccessMonitor(Value obj, int monitor_no, ValueStack* state_before = NULL) 1561 : StateSplit(illegalType, state_before) 1562 , _obj(obj) 1563 , _monitor_no(monitor_no) 1564 { 1565 set_needs_null_check(true); 1566 ASSERT_VALUES 1567 } 1568 1569 // accessors 1570 Value obj() const { return _obj; } 1571 int monitor_no() const { return _monitor_no; } 1572 1573 // generic 1574 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } 1575 }; 1576 1577 1578 LEAF(MonitorEnter, AccessMonitor) 1579 bool _maybe_valuetype; 1580 public: 1581 // creation 1582 MonitorEnter(Value obj, int monitor_no, ValueStack* state_before, bool maybe_valuetype) 1583 : AccessMonitor(obj, monitor_no, state_before) 1584 , _maybe_valuetype(maybe_valuetype) 1585 { 1586 ASSERT_VALUES 1587 } 1588 1589 // accessors 1590 bool maybe_valuetype() const { return _maybe_valuetype; } 1591 1592 // generic 1593 virtual bool can_trap() const { return true; } 1594 }; 1595 1596 1597 LEAF(MonitorExit, AccessMonitor) 1598 public: 1599 // creation 1600 MonitorExit(Value obj, int monitor_no) 1601 : AccessMonitor(obj, monitor_no, NULL) 1602 { 1603 ASSERT_VALUES 1604 } 1605 }; 1606 1607 1608 LEAF(Intrinsic, StateSplit) 1609 private: 1610 vmIntrinsics::ID _id; 1611 Values* _args; 1612 Value _recv; 1613 ArgsNonNullState _nonnull_state; 1614 1615 public: 1616 // preserves_state can be set to true for Intrinsics 1617 // which are guaranteed to preserve register state across any slow 1618 // cases; setting it to true does not mean that the Intrinsic can 1619 // not trap, only that if we continue execution in the same basic 1620 // block after the Intrinsic, all of the registers are intact. This 1621 // allows load elimination and common expression elimination to be 1622 // performed across the Intrinsic. The default value is false. 1623 Intrinsic(ValueType* type, 1624 vmIntrinsics::ID id, 1625 Values* args, 1626 bool has_receiver, 1627 ValueStack* state_before, 1628 bool preserves_state, 1629 bool cantrap = true) 1630 : StateSplit(type, state_before) 1631 , _id(id) 1632 , _args(args) 1633 , _recv(NULL) 1634 { 1635 assert(args != NULL, "args must exist"); 1636 ASSERT_VALUES 1637 set_flag(PreservesStateFlag, preserves_state); 1638 set_flag(CanTrapFlag, cantrap); 1639 if (has_receiver) { 1640 _recv = argument_at(0); 1641 } 1642 set_needs_null_check(has_receiver); 1643 1644 // some intrinsics can't trap, so don't force them to be pinned 1645 if (!can_trap() && !vmIntrinsics::should_be_pinned(_id)) { 1646 unpin(PinStateSplitConstructor); 1647 } 1648 } 1649 1650 // accessors 1651 vmIntrinsics::ID id() const { return _id; } 1652 int number_of_arguments() const { return _args->length(); } 1653 Value argument_at(int i) const { return _args->at(i); } 1654 1655 bool has_receiver() const { return (_recv != NULL); } 1656 Value receiver() const { assert(has_receiver(), "must have receiver"); return _recv; } 1657 bool preserves_state() const { return check_flag(PreservesStateFlag); } 1658 1659 bool arg_needs_null_check(int i) const { 1660 return _nonnull_state.arg_needs_null_check(i); 1661 } 1662 1663 void set_arg_needs_null_check(int i, bool check) { 1664 _nonnull_state.set_arg_needs_null_check(i, check); 1665 } 1666 1667 // generic 1668 virtual bool can_trap() const { return check_flag(CanTrapFlag); } 1669 virtual void input_values_do(ValueVisitor* f) { 1670 StateSplit::input_values_do(f); 1671 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 1672 } 1673 }; 1674 1675 1676 class LIR_List; 1677 1678 LEAF(BlockBegin, StateSplit) 1679 private: 1680 int _block_id; // the unique block id 1681 int _bci; // start-bci of block 1682 int _depth_first_number; // number of this block in a depth-first ordering 1683 int _linear_scan_number; // number of this block in linear-scan ordering 1684 int _dominator_depth; 1685 int _loop_depth; // the loop nesting level of this block 1686 int _loop_index; // number of the innermost loop of this block 1687 int _flags; // the flags associated with this block 1688 1689 // fields used by BlockListBuilder 1690 int _total_preds; // number of predecessors found by BlockListBuilder 1691 ResourceBitMap _stores_to_locals; // bit is set when a local variable is stored in the block 1692 1693 // SSA specific fields: (factor out later) 1694 BlockList _successors; // the successors of this block 1695 BlockList _predecessors; // the predecessors of this block 1696 BlockList _dominates; // list of blocks that are dominated by this block 1697 BlockBegin* _dominator; // the dominator of this block 1698 // SSA specific ends 1699 BlockEnd* _end; // the last instruction of this block 1700 BlockList _exception_handlers; // the exception handlers potentially invoked by this block 1701 ValueStackStack* _exception_states; // only for xhandler entries: states of all instructions that have an edge to this xhandler 1702 int _exception_handler_pco; // if this block is the start of an exception handler, 1703 // this records the PC offset in the assembly code of the 1704 // first instruction in this block 1705 Label _label; // the label associated with this block 1706 LIR_List* _lir; // the low level intermediate representation for this block 1707 1708 ResourceBitMap _live_in; // set of live LIR_Opr registers at entry to this block 1709 ResourceBitMap _live_out; // set of live LIR_Opr registers at exit from this block 1710 ResourceBitMap _live_gen; // set of registers used before any redefinition in this block 1711 ResourceBitMap _live_kill; // set of registers defined in this block 1712 1713 ResourceBitMap _fpu_register_usage; 1714 intArray* _fpu_stack_state; // For x86 FPU code generation with UseLinearScan 1715 int _first_lir_instruction_id; // ID of first LIR instruction in this block 1716 int _last_lir_instruction_id; // ID of last LIR instruction in this block 1717 1718 void iterate_preorder (boolArray& mark, BlockClosure* closure); 1719 void iterate_postorder(boolArray& mark, BlockClosure* closure); 1720 1721 friend class SuxAndWeightAdjuster; 1722 1723 public: 1724 void* operator new(size_t size) throw() { 1725 Compilation* c = Compilation::current(); 1726 void* res = c->arena()->Amalloc(size); 1727 ((BlockBegin*)res)->_id = c->get_next_id(); 1728 ((BlockBegin*)res)->_block_id = c->get_next_block_id(); 1729 return res; 1730 } 1731 1732 // initialization/counting 1733 static int number_of_blocks() { 1734 return Compilation::current()->number_of_blocks(); 1735 } 1736 1737 // creation 1738 BlockBegin(int bci) 1739 : StateSplit(illegalType) 1740 , _bci(bci) 1741 , _depth_first_number(-1) 1742 , _linear_scan_number(-1) 1743 , _dominator_depth(-1) 1744 , _loop_depth(0) 1745 , _loop_index(-1) 1746 , _flags(0) 1747 , _total_preds(0) 1748 , _stores_to_locals() 1749 , _successors(2) 1750 , _predecessors(2) 1751 , _dominates(2) 1752 , _dominator(NULL) 1753 , _end(NULL) 1754 , _exception_handlers(1) 1755 , _exception_states(NULL) 1756 , _exception_handler_pco(-1) 1757 , _lir(NULL) 1758 , _live_in() 1759 , _live_out() 1760 , _live_gen() 1761 , _live_kill() 1762 , _fpu_register_usage() 1763 , _fpu_stack_state(NULL) 1764 , _first_lir_instruction_id(-1) 1765 , _last_lir_instruction_id(-1) 1766 { 1767 _block = this; 1768 #ifndef PRODUCT 1769 set_printable_bci(bci); 1770 #endif 1771 } 1772 1773 // accessors 1774 int block_id() const { return _block_id; } 1775 int bci() const { return _bci; } 1776 BlockList* successors() { return &_successors; } 1777 BlockList* dominates() { return &_dominates; } 1778 BlockBegin* dominator() const { return _dominator; } 1779 int loop_depth() const { return _loop_depth; } 1780 int dominator_depth() const { return _dominator_depth; } 1781 int depth_first_number() const { return _depth_first_number; } 1782 int linear_scan_number() const { return _linear_scan_number; } 1783 BlockEnd* end() const { return _end; } 1784 Label* label() { return &_label; } 1785 LIR_List* lir() const { return _lir; } 1786 int exception_handler_pco() const { return _exception_handler_pco; } 1787 ResourceBitMap& live_in() { return _live_in; } 1788 ResourceBitMap& live_out() { return _live_out; } 1789 ResourceBitMap& live_gen() { return _live_gen; } 1790 ResourceBitMap& live_kill() { return _live_kill; } 1791 ResourceBitMap& fpu_register_usage() { return _fpu_register_usage; } 1792 intArray* fpu_stack_state() const { return _fpu_stack_state; } 1793 int first_lir_instruction_id() const { return _first_lir_instruction_id; } 1794 int last_lir_instruction_id() const { return _last_lir_instruction_id; } 1795 int total_preds() const { return _total_preds; } 1796 BitMap& stores_to_locals() { return _stores_to_locals; } 1797 1798 // manipulation 1799 void set_dominator(BlockBegin* dom) { _dominator = dom; } 1800 void set_loop_depth(int d) { _loop_depth = d; } 1801 void set_dominator_depth(int d) { _dominator_depth = d; } 1802 void set_depth_first_number(int dfn) { _depth_first_number = dfn; } 1803 void set_linear_scan_number(int lsn) { _linear_scan_number = lsn; } 1804 void set_end(BlockEnd* end); 1805 void clear_end(); 1806 void disconnect_from_graph(); 1807 static void disconnect_edge(BlockBegin* from, BlockBegin* to); 1808 BlockBegin* insert_block_between(BlockBegin* sux); 1809 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux); 1810 void set_lir(LIR_List* lir) { _lir = lir; } 1811 void set_exception_handler_pco(int pco) { _exception_handler_pco = pco; } 1812 void set_live_in (const ResourceBitMap& map) { _live_in = map; } 1813 void set_live_out (const ResourceBitMap& map) { _live_out = map; } 1814 void set_live_gen (const ResourceBitMap& map) { _live_gen = map; } 1815 void set_live_kill(const ResourceBitMap& map) { _live_kill = map; } 1816 void set_fpu_register_usage(const ResourceBitMap& map) { _fpu_register_usage = map; } 1817 void set_fpu_stack_state(intArray* state) { _fpu_stack_state = state; } 1818 void set_first_lir_instruction_id(int id) { _first_lir_instruction_id = id; } 1819 void set_last_lir_instruction_id(int id) { _last_lir_instruction_id = id; } 1820 void increment_total_preds(int n = 1) { _total_preds += n; } 1821 void init_stores_to_locals(int locals_count) { _stores_to_locals.initialize(locals_count); } 1822 1823 // generic 1824 virtual void state_values_do(ValueVisitor* f); 1825 1826 // successors and predecessors 1827 int number_of_sux() const; 1828 BlockBegin* sux_at(int i) const; 1829 void add_successor(BlockBegin* sux); 1830 void remove_successor(BlockBegin* pred); 1831 bool is_successor(BlockBegin* sux) const { return _successors.contains(sux); } 1832 1833 void add_predecessor(BlockBegin* pred); 1834 void remove_predecessor(BlockBegin* pred); 1835 bool is_predecessor(BlockBegin* pred) const { return _predecessors.contains(pred); } 1836 int number_of_preds() const { return _predecessors.length(); } 1837 BlockBegin* pred_at(int i) const { return _predecessors.at(i); } 1838 1839 // exception handlers potentially invoked by this block 1840 void add_exception_handler(BlockBegin* b); 1841 bool is_exception_handler(BlockBegin* b) const { return _exception_handlers.contains(b); } 1842 int number_of_exception_handlers() const { return _exception_handlers.length(); } 1843 BlockBegin* exception_handler_at(int i) const { return _exception_handlers.at(i); } 1844 1845 // states of the instructions that have an edge to this exception handler 1846 int number_of_exception_states() { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states == NULL ? 0 : _exception_states->length(); } 1847 ValueStack* exception_state_at(int idx) const { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states->at(idx); } 1848 int add_exception_state(ValueStack* state); 1849 1850 // flags 1851 enum Flag { 1852 no_flag = 0, 1853 std_entry_flag = 1 << 0, 1854 osr_entry_flag = 1 << 1, 1855 exception_entry_flag = 1 << 2, 1856 subroutine_entry_flag = 1 << 3, 1857 backward_branch_target_flag = 1 << 4, 1858 is_on_work_list_flag = 1 << 5, 1859 was_visited_flag = 1 << 6, 1860 parser_loop_header_flag = 1 << 7, // set by parser to identify blocks where phi functions can not be created on demand 1861 critical_edge_split_flag = 1 << 8, // set for all blocks that are introduced when critical edges are split 1862 linear_scan_loop_header_flag = 1 << 9, // set during loop-detection for LinearScan 1863 linear_scan_loop_end_flag = 1 << 10, // set during loop-detection for LinearScan 1864 donot_eliminate_range_checks = 1 << 11 // Should be try to eliminate range checks in this block 1865 }; 1866 1867 void set(Flag f) { _flags |= f; } 1868 void clear(Flag f) { _flags &= ~f; } 1869 bool is_set(Flag f) const { return (_flags & f) != 0; } 1870 bool is_entry_block() const { 1871 const int entry_mask = std_entry_flag | osr_entry_flag | exception_entry_flag; 1872 return (_flags & entry_mask) != 0; 1873 } 1874 1875 // iteration 1876 void iterate_preorder (BlockClosure* closure); 1877 void iterate_postorder (BlockClosure* closure); 1878 1879 void block_values_do(ValueVisitor* f); 1880 1881 // loops 1882 void set_loop_index(int ix) { _loop_index = ix; } 1883 int loop_index() const { return _loop_index; } 1884 1885 // merging 1886 bool try_merge(ValueStack* state); // try to merge states at block begin 1887 void merge(ValueStack* state) { bool b = try_merge(state); assert(b, "merge failed"); } 1888 1889 // debugging 1890 void print_block() PRODUCT_RETURN; 1891 void print_block(InstructionPrinter& ip, bool live_only = false) PRODUCT_RETURN; 1892 }; 1893 1894 1895 BASE(BlockEnd, StateSplit) 1896 private: 1897 BlockList* _sux; 1898 1899 protected: 1900 BlockList* sux() const { return _sux; } 1901 1902 void set_sux(BlockList* sux) { 1903 #ifdef ASSERT 1904 assert(sux != NULL, "sux must exist"); 1905 for (int i = sux->length() - 1; i >= 0; i--) assert(sux->at(i) != NULL, "sux must exist"); 1906 #endif 1907 _sux = sux; 1908 } 1909 1910 public: 1911 // creation 1912 BlockEnd(ValueType* type, ValueStack* state_before, bool is_safepoint) 1913 : StateSplit(type, state_before) 1914 , _sux(NULL) 1915 { 1916 set_flag(IsSafepointFlag, is_safepoint); 1917 } 1918 1919 // accessors 1920 bool is_safepoint() const { return check_flag(IsSafepointFlag); } 1921 // For compatibility with old code, for new code use block() 1922 BlockBegin* begin() const { return _block; } 1923 1924 // manipulation 1925 void set_begin(BlockBegin* begin); 1926 1927 // successors 1928 int number_of_sux() const { return _sux != NULL ? _sux->length() : 0; } 1929 BlockBegin* sux_at(int i) const { return _sux->at(i); } 1930 BlockBegin* default_sux() const { return sux_at(number_of_sux() - 1); } 1931 BlockBegin** addr_sux_at(int i) const { return _sux->adr_at(i); } 1932 int sux_index(BlockBegin* sux) const { return _sux->find(sux); } 1933 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux); 1934 }; 1935 1936 1937 LEAF(Goto, BlockEnd) 1938 public: 1939 enum Direction { 1940 none, // Just a regular goto 1941 taken, not_taken // Goto produced from If 1942 }; 1943 private: 1944 ciMethod* _profiled_method; 1945 int _profiled_bci; 1946 Direction _direction; 1947 public: 1948 // creation 1949 Goto(BlockBegin* sux, ValueStack* state_before, bool is_safepoint = false) 1950 : BlockEnd(illegalType, state_before, is_safepoint) 1951 , _profiled_method(NULL) 1952 , _profiled_bci(0) 1953 , _direction(none) { 1954 BlockList* s = new BlockList(1); 1955 s->append(sux); 1956 set_sux(s); 1957 } 1958 1959 Goto(BlockBegin* sux, bool is_safepoint) : BlockEnd(illegalType, NULL, is_safepoint) 1960 , _profiled_method(NULL) 1961 , _profiled_bci(0) 1962 , _direction(none) { 1963 BlockList* s = new BlockList(1); 1964 s->append(sux); 1965 set_sux(s); 1966 } 1967 1968 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1969 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches 1970 int profiled_bci() const { return _profiled_bci; } 1971 Direction direction() const { return _direction; } 1972 1973 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1974 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1975 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1976 void set_direction(Direction d) { _direction = d; } 1977 }; 1978 1979 #ifdef ASSERT 1980 LEAF(Assert, Instruction) 1981 private: 1982 Value _x; 1983 Condition _cond; 1984 Value _y; 1985 char *_message; 1986 1987 public: 1988 // creation 1989 // unordered_is_true is valid for float/double compares only 1990 Assert(Value x, Condition cond, bool unordered_is_true, Value y); 1991 1992 // accessors 1993 Value x() const { return _x; } 1994 Condition cond() const { return _cond; } 1995 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 1996 Value y() const { return _y; } 1997 const char *message() const { return _message; } 1998 1999 // generic 2000 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); } 2001 }; 2002 #endif 2003 2004 LEAF(RangeCheckPredicate, StateSplit) 2005 private: 2006 Value _x; 2007 Condition _cond; 2008 Value _y; 2009 2010 void check_state(); 2011 2012 public: 2013 // creation 2014 // unordered_is_true is valid for float/double compares only 2015 RangeCheckPredicate(Value x, Condition cond, bool unordered_is_true, Value y, ValueStack* state) : StateSplit(illegalType) 2016 , _x(x) 2017 , _cond(cond) 2018 , _y(y) 2019 { 2020 ASSERT_VALUES 2021 set_flag(UnorderedIsTrueFlag, unordered_is_true); 2022 assert(x->type()->tag() == y->type()->tag(), "types must match"); 2023 this->set_state(state); 2024 check_state(); 2025 } 2026 2027 // Always deoptimize 2028 RangeCheckPredicate(ValueStack* state) : StateSplit(illegalType) 2029 { 2030 this->set_state(state); 2031 _x = _y = NULL; 2032 check_state(); 2033 } 2034 2035 // accessors 2036 Value x() const { return _x; } 2037 Condition cond() const { return _cond; } 2038 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 2039 Value y() const { return _y; } 2040 2041 void always_fail() { _x = _y = NULL; } 2042 2043 // generic 2044 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_x); f->visit(&_y); } 2045 HASHING3(RangeCheckPredicate, true, x()->subst(), y()->subst(), cond()) 2046 }; 2047 2048 LEAF(If, BlockEnd) 2049 private: 2050 Value _x; 2051 Condition _cond; 2052 Value _y; 2053 ciMethod* _profiled_method; 2054 int _profiled_bci; // Canonicalizer may alter bci of If node 2055 bool _swapped; // Is the order reversed with respect to the original If in the 2056 // bytecode stream? 2057 public: 2058 // creation 2059 // unordered_is_true is valid for float/double compares only 2060 If(Value x, Condition cond, bool unordered_is_true, Value y, BlockBegin* tsux, BlockBegin* fsux, ValueStack* state_before, bool is_safepoint) 2061 : BlockEnd(illegalType, state_before, is_safepoint) 2062 , _x(x) 2063 , _cond(cond) 2064 , _y(y) 2065 , _profiled_method(NULL) 2066 , _profiled_bci(0) 2067 , _swapped(false) 2068 { 2069 ASSERT_VALUES 2070 set_flag(UnorderedIsTrueFlag, unordered_is_true); 2071 assert(x->type()->tag() == y->type()->tag(), "types must match"); 2072 BlockList* s = new BlockList(2); 2073 s->append(tsux); 2074 s->append(fsux); 2075 set_sux(s); 2076 } 2077 2078 // accessors 2079 Value x() const { return _x; } 2080 Condition cond() const { return _cond; } 2081 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 2082 Value y() const { return _y; } 2083 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); } 2084 BlockBegin* tsux() const { return sux_for(true); } 2085 BlockBegin* fsux() const { return sux_for(false); } 2086 BlockBegin* usux() const { return sux_for(unordered_is_true()); } 2087 bool should_profile() const { return check_flag(ProfileMDOFlag); } 2088 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches 2089 int profiled_bci() const { return _profiled_bci; } // set for profiled branches and tiered 2090 bool is_swapped() const { return _swapped; } 2091 2092 // manipulation 2093 void swap_operands() { 2094 Value t = _x; _x = _y; _y = t; 2095 _cond = mirror(_cond); 2096 } 2097 2098 void swap_sux() { 2099 assert(number_of_sux() == 2, "wrong number of successors"); 2100 BlockList* s = sux(); 2101 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t); 2102 _cond = negate(_cond); 2103 set_flag(UnorderedIsTrueFlag, !check_flag(UnorderedIsTrueFlag)); 2104 } 2105 2106 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 2107 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 2108 void set_profiled_bci(int bci) { _profiled_bci = bci; } 2109 void set_swapped(bool value) { _swapped = value; } 2110 // generic 2111 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_x); f->visit(&_y); } 2112 }; 2113 2114 2115 LEAF(IfInstanceOf, BlockEnd) 2116 private: 2117 ciKlass* _klass; 2118 Value _obj; 2119 bool _test_is_instance; // jump if instance 2120 int _instanceof_bci; 2121 2122 public: 2123 IfInstanceOf(ciKlass* klass, Value obj, bool test_is_instance, int instanceof_bci, BlockBegin* tsux, BlockBegin* fsux) 2124 : BlockEnd(illegalType, NULL, false) // temporary set to false 2125 , _klass(klass) 2126 , _obj(obj) 2127 , _test_is_instance(test_is_instance) 2128 , _instanceof_bci(instanceof_bci) 2129 { 2130 ASSERT_VALUES 2131 assert(instanceof_bci >= 0, "illegal bci"); 2132 BlockList* s = new BlockList(2); 2133 s->append(tsux); 2134 s->append(fsux); 2135 set_sux(s); 2136 } 2137 2138 // accessors 2139 // 2140 // Note 1: If test_is_instance() is true, IfInstanceOf tests if obj *is* an 2141 // instance of klass; otherwise it tests if it is *not* and instance 2142 // of klass. 2143 // 2144 // Note 2: IfInstanceOf instructions are created by combining an InstanceOf 2145 // and an If instruction. The IfInstanceOf bci() corresponds to the 2146 // bci that the If would have had; the (this->) instanceof_bci() is 2147 // the bci of the original InstanceOf instruction. 2148 ciKlass* klass() const { return _klass; } 2149 Value obj() const { return _obj; } 2150 int instanceof_bci() const { return _instanceof_bci; } 2151 bool test_is_instance() const { return _test_is_instance; } 2152 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); } 2153 BlockBegin* tsux() const { return sux_for(true); } 2154 BlockBegin* fsux() const { return sux_for(false); } 2155 2156 // manipulation 2157 void swap_sux() { 2158 assert(number_of_sux() == 2, "wrong number of successors"); 2159 BlockList* s = sux(); 2160 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t); 2161 _test_is_instance = !_test_is_instance; 2162 } 2163 2164 // generic 2165 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_obj); } 2166 }; 2167 2168 2169 BASE(Switch, BlockEnd) 2170 private: 2171 Value _tag; 2172 2173 public: 2174 // creation 2175 Switch(Value tag, BlockList* sux, ValueStack* state_before, bool is_safepoint) 2176 : BlockEnd(illegalType, state_before, is_safepoint) 2177 , _tag(tag) { 2178 ASSERT_VALUES 2179 set_sux(sux); 2180 } 2181 2182 // accessors 2183 Value tag() const { return _tag; } 2184 int length() const { return number_of_sux() - 1; } 2185 2186 virtual bool needs_exception_state() const { return false; } 2187 2188 // generic 2189 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_tag); } 2190 }; 2191 2192 2193 LEAF(TableSwitch, Switch) 2194 private: 2195 int _lo_key; 2196 2197 public: 2198 // creation 2199 TableSwitch(Value tag, BlockList* sux, int lo_key, ValueStack* state_before, bool is_safepoint) 2200 : Switch(tag, sux, state_before, is_safepoint) 2201 , _lo_key(lo_key) { assert(_lo_key <= hi_key(), "integer overflow"); } 2202 2203 // accessors 2204 int lo_key() const { return _lo_key; } 2205 int hi_key() const { return _lo_key + (length() - 1); } 2206 }; 2207 2208 2209 LEAF(LookupSwitch, Switch) 2210 private: 2211 intArray* _keys; 2212 2213 public: 2214 // creation 2215 LookupSwitch(Value tag, BlockList* sux, intArray* keys, ValueStack* state_before, bool is_safepoint) 2216 : Switch(tag, sux, state_before, is_safepoint) 2217 , _keys(keys) { 2218 assert(keys != NULL, "keys must exist"); 2219 assert(keys->length() == length(), "sux & keys have incompatible lengths"); 2220 } 2221 2222 // accessors 2223 int key_at(int i) const { return _keys->at(i); } 2224 }; 2225 2226 2227 LEAF(Return, BlockEnd) 2228 private: 2229 Value _result; 2230 2231 public: 2232 // creation 2233 Return(Value result) : 2234 BlockEnd(result == NULL ? voidType : result->type()->base(), NULL, true), 2235 _result(result) {} 2236 2237 // accessors 2238 Value result() const { return _result; } 2239 bool has_result() const { return result() != NULL; } 2240 2241 // generic 2242 virtual void input_values_do(ValueVisitor* f) { 2243 BlockEnd::input_values_do(f); 2244 if (has_result()) f->visit(&_result); 2245 } 2246 }; 2247 2248 2249 LEAF(Throw, BlockEnd) 2250 private: 2251 Value _exception; 2252 2253 public: 2254 // creation 2255 Throw(Value exception, ValueStack* state_before) : BlockEnd(illegalType, state_before, true), _exception(exception) { 2256 ASSERT_VALUES 2257 } 2258 2259 // accessors 2260 Value exception() const { return _exception; } 2261 2262 // generic 2263 virtual bool can_trap() const { return true; } 2264 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_exception); } 2265 }; 2266 2267 2268 LEAF(Base, BlockEnd) 2269 public: 2270 // creation 2271 Base(BlockBegin* std_entry, BlockBegin* osr_entry) : BlockEnd(illegalType, NULL, false) { 2272 assert(std_entry->is_set(BlockBegin::std_entry_flag), "std entry must be flagged"); 2273 assert(osr_entry == NULL || osr_entry->is_set(BlockBegin::osr_entry_flag), "osr entry must be flagged"); 2274 BlockList* s = new BlockList(2); 2275 if (osr_entry != NULL) s->append(osr_entry); 2276 s->append(std_entry); // must be default sux! 2277 set_sux(s); 2278 } 2279 2280 // accessors 2281 BlockBegin* std_entry() const { return default_sux(); } 2282 BlockBegin* osr_entry() const { return number_of_sux() < 2 ? NULL : sux_at(0); } 2283 }; 2284 2285 2286 LEAF(OsrEntry, Instruction) 2287 public: 2288 // creation 2289 #ifdef _LP64 2290 OsrEntry() : Instruction(longType) { pin(); } 2291 #else 2292 OsrEntry() : Instruction(intType) { pin(); } 2293 #endif 2294 2295 // generic 2296 virtual void input_values_do(ValueVisitor* f) { } 2297 }; 2298 2299 2300 // Models the incoming exception at a catch site 2301 LEAF(ExceptionObject, Instruction) 2302 public: 2303 // creation 2304 ExceptionObject() : Instruction(objectType) { 2305 pin(); 2306 } 2307 2308 // generic 2309 virtual void input_values_do(ValueVisitor* f) { } 2310 }; 2311 2312 2313 // Models needed rounding for floating-point values on Intel. 2314 // Currently only used to represent rounding of double-precision 2315 // values stored into local variables, but could be used to model 2316 // intermediate rounding of single-precision values as well. 2317 LEAF(RoundFP, Instruction) 2318 private: 2319 Value _input; // floating-point value to be rounded 2320 2321 public: 2322 RoundFP(Value input) 2323 : Instruction(input->type()) // Note: should not be used for constants 2324 , _input(input) 2325 { 2326 ASSERT_VALUES 2327 } 2328 2329 // accessors 2330 Value input() const { return _input; } 2331 2332 // generic 2333 virtual void input_values_do(ValueVisitor* f) { f->visit(&_input); } 2334 }; 2335 2336 2337 BASE(UnsafeOp, Instruction) 2338 private: 2339 BasicType _basic_type; // ValueType can not express byte-sized integers 2340 2341 protected: 2342 // creation 2343 UnsafeOp(BasicType basic_type, bool is_put) 2344 : Instruction(is_put ? voidType : as_ValueType(basic_type)) 2345 , _basic_type(basic_type) 2346 { 2347 //Note: Unsafe ops are not not guaranteed to throw NPE. 2348 // Convservatively, Unsafe operations must be pinned though we could be 2349 // looser about this if we wanted to.. 2350 pin(); 2351 } 2352 2353 public: 2354 // accessors 2355 BasicType basic_type() { return _basic_type; } 2356 2357 // generic 2358 virtual void input_values_do(ValueVisitor* f) { } 2359 }; 2360 2361 2362 BASE(UnsafeRawOp, UnsafeOp) 2363 private: 2364 Value _base; // Base address (a Java long) 2365 Value _index; // Index if computed by optimizer; initialized to NULL 2366 int _log2_scale; // Scale factor: 0, 1, 2, or 3. 2367 // Indicates log2 of number of bytes (1, 2, 4, or 8) 2368 // to scale index by. 2369 2370 protected: 2371 UnsafeRawOp(BasicType basic_type, Value addr, bool is_put) 2372 : UnsafeOp(basic_type, is_put) 2373 , _base(addr) 2374 , _index(NULL) 2375 , _log2_scale(0) 2376 { 2377 // Can not use ASSERT_VALUES because index may be NULL 2378 assert(addr != NULL && addr->type()->is_long(), "just checking"); 2379 } 2380 2381 UnsafeRawOp(BasicType basic_type, Value base, Value index, int log2_scale, bool is_put) 2382 : UnsafeOp(basic_type, is_put) 2383 , _base(base) 2384 , _index(index) 2385 , _log2_scale(log2_scale) 2386 { 2387 } 2388 2389 public: 2390 // accessors 2391 Value base() { return _base; } 2392 Value index() { return _index; } 2393 bool has_index() { return (_index != NULL); } 2394 int log2_scale() { return _log2_scale; } 2395 2396 // setters 2397 void set_base (Value base) { _base = base; } 2398 void set_index(Value index) { _index = index; } 2399 void set_log2_scale(int log2_scale) { _log2_scale = log2_scale; } 2400 2401 // generic 2402 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f); 2403 f->visit(&_base); 2404 if (has_index()) f->visit(&_index); } 2405 }; 2406 2407 2408 LEAF(UnsafeGetRaw, UnsafeRawOp) 2409 private: 2410 bool _may_be_unaligned, _is_wide; // For OSREntry 2411 2412 public: 2413 UnsafeGetRaw(BasicType basic_type, Value addr, bool may_be_unaligned, bool is_wide = false) 2414 : UnsafeRawOp(basic_type, addr, false) { 2415 _may_be_unaligned = may_be_unaligned; 2416 _is_wide = is_wide; 2417 } 2418 2419 UnsafeGetRaw(BasicType basic_type, Value base, Value index, int log2_scale, bool may_be_unaligned, bool is_wide = false) 2420 : UnsafeRawOp(basic_type, base, index, log2_scale, false) { 2421 _may_be_unaligned = may_be_unaligned; 2422 _is_wide = is_wide; 2423 } 2424 2425 bool may_be_unaligned() { return _may_be_unaligned; } 2426 bool is_wide() { return _is_wide; } 2427 }; 2428 2429 2430 LEAF(UnsafePutRaw, UnsafeRawOp) 2431 private: 2432 Value _value; // Value to be stored 2433 2434 public: 2435 UnsafePutRaw(BasicType basic_type, Value addr, Value value) 2436 : UnsafeRawOp(basic_type, addr, true) 2437 , _value(value) 2438 { 2439 assert(value != NULL, "just checking"); 2440 ASSERT_VALUES 2441 } 2442 2443 UnsafePutRaw(BasicType basic_type, Value base, Value index, int log2_scale, Value value) 2444 : UnsafeRawOp(basic_type, base, index, log2_scale, true) 2445 , _value(value) 2446 { 2447 assert(value != NULL, "just checking"); 2448 ASSERT_VALUES 2449 } 2450 2451 // accessors 2452 Value value() { return _value; } 2453 2454 // generic 2455 virtual void input_values_do(ValueVisitor* f) { UnsafeRawOp::input_values_do(f); 2456 f->visit(&_value); } 2457 }; 2458 2459 2460 BASE(UnsafeObjectOp, UnsafeOp) 2461 private: 2462 Value _object; // Object to be fetched from or mutated 2463 Value _offset; // Offset within object 2464 bool _is_volatile; // true if volatile - dl/JSR166 2465 public: 2466 UnsafeObjectOp(BasicType basic_type, Value object, Value offset, bool is_put, bool is_volatile) 2467 : UnsafeOp(basic_type, is_put), _object(object), _offset(offset), _is_volatile(is_volatile) 2468 { 2469 } 2470 2471 // accessors 2472 Value object() { return _object; } 2473 Value offset() { return _offset; } 2474 bool is_volatile() { return _is_volatile; } 2475 // generic 2476 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f); 2477 f->visit(&_object); 2478 f->visit(&_offset); } 2479 }; 2480 2481 2482 LEAF(UnsafeGetObject, UnsafeObjectOp) 2483 public: 2484 UnsafeGetObject(BasicType basic_type, Value object, Value offset, bool is_volatile) 2485 : UnsafeObjectOp(basic_type, object, offset, false, is_volatile) 2486 { 2487 ASSERT_VALUES 2488 } 2489 }; 2490 2491 2492 LEAF(UnsafePutObject, UnsafeObjectOp) 2493 private: 2494 Value _value; // Value to be stored 2495 public: 2496 UnsafePutObject(BasicType basic_type, Value object, Value offset, Value value, bool is_volatile) 2497 : UnsafeObjectOp(basic_type, object, offset, true, is_volatile) 2498 , _value(value) 2499 { 2500 ASSERT_VALUES 2501 } 2502 2503 // accessors 2504 Value value() { return _value; } 2505 2506 // generic 2507 virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f); 2508 f->visit(&_value); } 2509 }; 2510 2511 LEAF(UnsafeGetAndSetObject, UnsafeObjectOp) 2512 private: 2513 Value _value; // Value to be stored 2514 bool _is_add; 2515 public: 2516 UnsafeGetAndSetObject(BasicType basic_type, Value object, Value offset, Value value, bool is_add) 2517 : UnsafeObjectOp(basic_type, object, offset, false, false) 2518 , _value(value) 2519 , _is_add(is_add) 2520 { 2521 ASSERT_VALUES 2522 } 2523 2524 // accessors 2525 bool is_add() const { return _is_add; } 2526 Value value() { return _value; } 2527 2528 // generic 2529 virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f); 2530 f->visit(&_value); } 2531 }; 2532 2533 LEAF(ProfileCall, Instruction) 2534 private: 2535 ciMethod* _method; 2536 int _bci_of_invoke; 2537 ciMethod* _callee; // the method that is called at the given bci 2538 Value _recv; 2539 ciKlass* _known_holder; 2540 Values* _obj_args; // arguments for type profiling 2541 ArgsNonNullState _nonnull_state; // Do we know whether some arguments are never null? 2542 bool _inlined; // Are we profiling a call that is inlined 2543 2544 public: 2545 ProfileCall(ciMethod* method, int bci, ciMethod* callee, Value recv, ciKlass* known_holder, Values* obj_args, bool inlined) 2546 : Instruction(voidType) 2547 , _method(method) 2548 , _bci_of_invoke(bci) 2549 , _callee(callee) 2550 , _recv(recv) 2551 , _known_holder(known_holder) 2552 , _obj_args(obj_args) 2553 , _inlined(inlined) 2554 { 2555 // The ProfileCall has side-effects and must occur precisely where located 2556 pin(); 2557 } 2558 2559 ciMethod* method() const { return _method; } 2560 int bci_of_invoke() const { return _bci_of_invoke; } 2561 ciMethod* callee() const { return _callee; } 2562 Value recv() const { return _recv; } 2563 ciKlass* known_holder() const { return _known_holder; } 2564 int nb_profiled_args() const { return _obj_args == NULL ? 0 : _obj_args->length(); } 2565 Value profiled_arg_at(int i) const { return _obj_args->at(i); } 2566 bool arg_needs_null_check(int i) const { 2567 return _nonnull_state.arg_needs_null_check(i); 2568 } 2569 bool inlined() const { return _inlined; } 2570 2571 void set_arg_needs_null_check(int i, bool check) { 2572 _nonnull_state.set_arg_needs_null_check(i, check); 2573 } 2574 2575 virtual void input_values_do(ValueVisitor* f) { 2576 if (_recv != NULL) { 2577 f->visit(&_recv); 2578 } 2579 for (int i = 0; i < nb_profiled_args(); i++) { 2580 f->visit(_obj_args->adr_at(i)); 2581 } 2582 } 2583 }; 2584 2585 LEAF(ProfileReturnType, Instruction) 2586 private: 2587 ciMethod* _method; 2588 ciMethod* _callee; 2589 int _bci_of_invoke; 2590 Value _ret; 2591 2592 public: 2593 ProfileReturnType(ciMethod* method, int bci, ciMethod* callee, Value ret) 2594 : Instruction(voidType) 2595 , _method(method) 2596 , _callee(callee) 2597 , _bci_of_invoke(bci) 2598 , _ret(ret) 2599 { 2600 set_needs_null_check(true); 2601 // The ProfileType has side-effects and must occur precisely where located 2602 pin(); 2603 } 2604 2605 ciMethod* method() const { return _method; } 2606 ciMethod* callee() const { return _callee; } 2607 int bci_of_invoke() const { return _bci_of_invoke; } 2608 Value ret() const { return _ret; } 2609 2610 virtual void input_values_do(ValueVisitor* f) { 2611 if (_ret != NULL) { 2612 f->visit(&_ret); 2613 } 2614 } 2615 }; 2616 2617 // Call some C runtime function that doesn't safepoint, 2618 // optionally passing the current thread as the first argument. 2619 LEAF(RuntimeCall, Instruction) 2620 private: 2621 const char* _entry_name; 2622 address _entry; 2623 Values* _args; 2624 bool _pass_thread; // Pass the JavaThread* as an implicit first argument 2625 2626 public: 2627 RuntimeCall(ValueType* type, const char* entry_name, address entry, Values* args, bool pass_thread = true) 2628 : Instruction(type) 2629 , _entry_name(entry_name) 2630 , _entry(entry) 2631 , _args(args) 2632 , _pass_thread(pass_thread) { 2633 ASSERT_VALUES 2634 pin(); 2635 } 2636 2637 const char* entry_name() const { return _entry_name; } 2638 address entry() const { return _entry; } 2639 int number_of_arguments() const { return _args->length(); } 2640 Value argument_at(int i) const { return _args->at(i); } 2641 bool pass_thread() const { return _pass_thread; } 2642 2643 virtual void input_values_do(ValueVisitor* f) { 2644 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 2645 } 2646 }; 2647 2648 // Use to trip invocation counter of an inlined method 2649 2650 LEAF(ProfileInvoke, Instruction) 2651 private: 2652 ciMethod* _inlinee; 2653 ValueStack* _state; 2654 2655 public: 2656 ProfileInvoke(ciMethod* inlinee, ValueStack* state) 2657 : Instruction(voidType) 2658 , _inlinee(inlinee) 2659 , _state(state) 2660 { 2661 // The ProfileInvoke has side-effects and must occur precisely where located QQQ??? 2662 pin(); 2663 } 2664 2665 ciMethod* inlinee() { return _inlinee; } 2666 ValueStack* state() { return _state; } 2667 virtual void input_values_do(ValueVisitor*) {} 2668 virtual void state_values_do(ValueVisitor*); 2669 }; 2670 2671 LEAF(MemBar, Instruction) 2672 private: 2673 LIR_Code _code; 2674 2675 public: 2676 MemBar(LIR_Code code) 2677 : Instruction(voidType) 2678 , _code(code) 2679 { 2680 pin(); 2681 } 2682 2683 LIR_Code code() { return _code; } 2684 2685 virtual void input_values_do(ValueVisitor*) {} 2686 }; 2687 2688 class BlockPair: public CompilationResourceObj { 2689 private: 2690 BlockBegin* _from; 2691 BlockBegin* _to; 2692 public: 2693 BlockPair(BlockBegin* from, BlockBegin* to): _from(from), _to(to) {} 2694 BlockBegin* from() const { return _from; } 2695 BlockBegin* to() const { return _to; } 2696 bool is_same(BlockBegin* from, BlockBegin* to) const { return _from == from && _to == to; } 2697 bool is_same(BlockPair* p) const { return _from == p->from() && _to == p->to(); } 2698 void set_to(BlockBegin* b) { _to = b; } 2699 void set_from(BlockBegin* b) { _from = b; } 2700 }; 2701 2702 typedef GrowableArray<BlockPair*> BlockPairList; 2703 2704 inline int BlockBegin::number_of_sux() const { assert(_end == NULL || _end->number_of_sux() == _successors.length(), "mismatch"); return _successors.length(); } 2705 inline BlockBegin* BlockBegin::sux_at(int i) const { assert(_end == NULL || _end->sux_at(i) == _successors.at(i), "mismatch"); return _successors.at(i); } 2706 inline void BlockBegin::add_successor(BlockBegin* sux) { assert(_end == NULL, "Would create mismatch with successors of BlockEnd"); _successors.append(sux); } 2707 2708 #undef ASSERT_VALUES 2709 2710 #endif // SHARE_C1_C1_INSTRUCTION_HPP