1 /* 2 * Copyright (c) 1999, 2015, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_VM_C1_C1_INSTRUCTION_HPP 26 #define SHARE_VM_C1_C1_INSTRUCTION_HPP 27 28 #include "c1/c1_Compilation.hpp" 29 #include "c1/c1_LIR.hpp" 30 #include "c1/c1_ValueType.hpp" 31 #include "ci/ciField.hpp" 32 33 // Predefined classes 34 class ciField; 35 class ValueStack; 36 class InstructionPrinter; 37 class IRScope; 38 class LIR_OprDesc; 39 typedef LIR_OprDesc* LIR_Opr; 40 41 42 // Instruction class hierarchy 43 // 44 // All leaf classes in the class hierarchy are concrete classes 45 // (i.e., are instantiated). All other classes are abstract and 46 // serve factoring. 47 48 class Instruction; 49 class Phi; 50 class Local; 51 class Constant; 52 class AccessField; 53 class LoadField; 54 class StoreField; 55 class AccessArray; 56 class ArrayLength; 57 class AccessIndexed; 58 class LoadIndexed; 59 class StoreIndexed; 60 class NegateOp; 61 class Op2; 62 class ArithmeticOp; 63 class ShiftOp; 64 class LogicOp; 65 class CompareOp; 66 class IfOp; 67 class Convert; 68 class NullCheck; 69 class TypeCast; 70 class OsrEntry; 71 class ExceptionObject; 72 class StateSplit; 73 class Invoke; 74 class NewInstance; 75 class NewArray; 76 class NewTypeArray; 77 class NewObjectArray; 78 class NewMultiArray; 79 class TypeCheck; 80 class CheckCast; 81 class InstanceOf; 82 class AccessMonitor; 83 class MonitorEnter; 84 class MonitorExit; 85 class Intrinsic; 86 class BlockBegin; 87 class BlockEnd; 88 class Goto; 89 class If; 90 class IfInstanceOf; 91 class Switch; 92 class TableSwitch; 93 class LookupSwitch; 94 class Return; 95 class Throw; 96 class Base; 97 class RoundFP; 98 class UnsafeOp; 99 class UnsafeRawOp; 100 class UnsafeGetRaw; 101 class UnsafePutRaw; 102 class UnsafeObjectOp; 103 class UnsafeGetObject; 104 class UnsafePutObject; 105 class UnsafeGetAndSetObject; 106 class ProfileCall; 107 class ProfileReturnType; 108 class ProfileInvoke; 109 class RuntimeCall; 110 class MemBar; 111 class RangeCheckPredicate; 112 #ifdef ASSERT 113 class Assert; 114 #endif 115 116 // A Value is a reference to the instruction creating the value 117 typedef Instruction* Value; 118 define_array(ValueArray, Value) 119 define_stack(Values, ValueArray) 120 121 define_array(ValueStackArray, ValueStack*) 122 define_stack(ValueStackStack, ValueStackArray) 123 124 // BlockClosure is the base class for block traversal/iteration. 125 126 class BlockClosure: public CompilationResourceObj { 127 public: 128 virtual void block_do(BlockBegin* block) = 0; 129 }; 130 131 132 // A simple closure class for visiting the values of an Instruction 133 class ValueVisitor: public StackObj { 134 public: 135 virtual void visit(Value* v) = 0; 136 }; 137 138 139 // Some array and list classes 140 define_array(BlockBeginArray, BlockBegin*) 141 define_stack(_BlockList, BlockBeginArray) 142 143 class BlockList: public _BlockList { 144 public: 145 BlockList(): _BlockList() {} 146 BlockList(const int size): _BlockList(size) {} 147 BlockList(const int size, BlockBegin* init): _BlockList(size, init) {} 148 149 void iterate_forward(BlockClosure* closure); 150 void iterate_backward(BlockClosure* closure); 151 void blocks_do(void f(BlockBegin*)); 152 void values_do(ValueVisitor* f); 153 void print(bool cfg_only = false, bool live_only = false) PRODUCT_RETURN; 154 }; 155 156 157 // InstructionVisitors provide type-based dispatch for instructions. 158 // For each concrete Instruction class X, a virtual function do_X is 159 // provided. Functionality that needs to be implemented for all classes 160 // (e.g., printing, code generation) is factored out into a specialised 161 // visitor instead of added to the Instruction classes itself. 162 163 class InstructionVisitor: public StackObj { 164 public: 165 virtual void do_Phi (Phi* x) = 0; 166 virtual void do_Local (Local* x) = 0; 167 virtual void do_Constant (Constant* x) = 0; 168 virtual void do_LoadField (LoadField* x) = 0; 169 virtual void do_StoreField (StoreField* x) = 0; 170 virtual void do_ArrayLength (ArrayLength* x) = 0; 171 virtual void do_LoadIndexed (LoadIndexed* x) = 0; 172 virtual void do_StoreIndexed (StoreIndexed* x) = 0; 173 virtual void do_NegateOp (NegateOp* x) = 0; 174 virtual void do_ArithmeticOp (ArithmeticOp* x) = 0; 175 virtual void do_ShiftOp (ShiftOp* x) = 0; 176 virtual void do_LogicOp (LogicOp* x) = 0; 177 virtual void do_CompareOp (CompareOp* x) = 0; 178 virtual void do_IfOp (IfOp* x) = 0; 179 virtual void do_Convert (Convert* x) = 0; 180 virtual void do_NullCheck (NullCheck* x) = 0; 181 virtual void do_TypeCast (TypeCast* x) = 0; 182 virtual void do_Invoke (Invoke* x) = 0; 183 virtual void do_NewInstance (NewInstance* x) = 0; 184 virtual void do_NewTypeArray (NewTypeArray* x) = 0; 185 virtual void do_NewObjectArray (NewObjectArray* x) = 0; 186 virtual void do_NewMultiArray (NewMultiArray* x) = 0; 187 virtual void do_CheckCast (CheckCast* x) = 0; 188 virtual void do_InstanceOf (InstanceOf* x) = 0; 189 virtual void do_MonitorEnter (MonitorEnter* x) = 0; 190 virtual void do_MonitorExit (MonitorExit* x) = 0; 191 virtual void do_Intrinsic (Intrinsic* x) = 0; 192 virtual void do_BlockBegin (BlockBegin* x) = 0; 193 virtual void do_Goto (Goto* x) = 0; 194 virtual void do_If (If* x) = 0; 195 virtual void do_IfInstanceOf (IfInstanceOf* x) = 0; 196 virtual void do_TableSwitch (TableSwitch* x) = 0; 197 virtual void do_LookupSwitch (LookupSwitch* x) = 0; 198 virtual void do_Return (Return* x) = 0; 199 virtual void do_Throw (Throw* x) = 0; 200 virtual void do_Base (Base* x) = 0; 201 virtual void do_OsrEntry (OsrEntry* x) = 0; 202 virtual void do_ExceptionObject(ExceptionObject* x) = 0; 203 virtual void do_RoundFP (RoundFP* x) = 0; 204 virtual void do_UnsafeGetRaw (UnsafeGetRaw* x) = 0; 205 virtual void do_UnsafePutRaw (UnsafePutRaw* x) = 0; 206 virtual void do_UnsafeGetObject(UnsafeGetObject* x) = 0; 207 virtual void do_UnsafePutObject(UnsafePutObject* x) = 0; 208 virtual void do_UnsafeGetAndSetObject(UnsafeGetAndSetObject* x) = 0; 209 virtual void do_ProfileCall (ProfileCall* x) = 0; 210 virtual void do_ProfileReturnType (ProfileReturnType* x) = 0; 211 virtual void do_ProfileInvoke (ProfileInvoke* x) = 0; 212 virtual void do_RuntimeCall (RuntimeCall* x) = 0; 213 virtual void do_MemBar (MemBar* x) = 0; 214 virtual void do_RangeCheckPredicate(RangeCheckPredicate* x) = 0; 215 #ifdef ASSERT 216 virtual void do_Assert (Assert* x) = 0; 217 #endif 218 }; 219 220 221 // Hashing support 222 // 223 // Note: This hash functions affect the performance 224 // of ValueMap - make changes carefully! 225 226 #define HASH1(x1 ) ((intx)(x1)) 227 #define HASH2(x1, x2 ) ((HASH1(x1 ) << 7) ^ HASH1(x2)) 228 #define HASH3(x1, x2, x3 ) ((HASH2(x1, x2 ) << 7) ^ HASH1(x3)) 229 #define HASH4(x1, x2, x3, x4) ((HASH3(x1, x2, x3) << 7) ^ HASH1(x4)) 230 231 232 // The following macros are used to implement instruction-specific hashing. 233 // By default, each instruction implements hash() and is_equal(Value), used 234 // for value numbering/common subexpression elimination. The default imple- 235 // mentation disables value numbering. Each instruction which can be value- 236 // numbered, should define corresponding hash() and is_equal(Value) functions 237 // via the macros below. The f arguments specify all the values/op codes, etc. 238 // that need to be identical for two instructions to be identical. 239 // 240 // Note: The default implementation of hash() returns 0 in order to indicate 241 // that the instruction should not be considered for value numbering. 242 // The currently used hash functions do not guarantee that never a 0 243 // is produced. While this is still correct, it may be a performance 244 // bug (no value numbering for that node). However, this situation is 245 // so unlikely, that we are not going to handle it specially. 246 247 #define HASHING1(class_name, enabled, f1) \ 248 virtual intx hash() const { \ 249 return (enabled) ? HASH2(name(), f1) : 0; \ 250 } \ 251 virtual bool is_equal(Value v) const { \ 252 if (!(enabled) ) return false; \ 253 class_name* _v = v->as_##class_name(); \ 254 if (_v == NULL ) return false; \ 255 if (f1 != _v->f1) return false; \ 256 return true; \ 257 } \ 258 259 260 #define HASHING2(class_name, enabled, f1, f2) \ 261 virtual intx hash() const { \ 262 return (enabled) ? HASH3(name(), f1, f2) : 0; \ 263 } \ 264 virtual bool is_equal(Value v) const { \ 265 if (!(enabled) ) return false; \ 266 class_name* _v = v->as_##class_name(); \ 267 if (_v == NULL ) return false; \ 268 if (f1 != _v->f1) return false; \ 269 if (f2 != _v->f2) return false; \ 270 return true; \ 271 } \ 272 273 274 #define HASHING3(class_name, enabled, f1, f2, f3) \ 275 virtual intx hash() const { \ 276 return (enabled) ? HASH4(name(), f1, f2, f3) : 0; \ 277 } \ 278 virtual bool is_equal(Value v) const { \ 279 if (!(enabled) ) return false; \ 280 class_name* _v = v->as_##class_name(); \ 281 if (_v == NULL ) return false; \ 282 if (f1 != _v->f1) return false; \ 283 if (f2 != _v->f2) return false; \ 284 if (f3 != _v->f3) return false; \ 285 return true; \ 286 } \ 287 288 289 // The mother of all instructions... 290 291 class Instruction: public CompilationResourceObj { 292 private: 293 int _id; // the unique instruction id 294 #ifndef PRODUCT 295 int _printable_bci; // the bci of the instruction for printing 296 #endif 297 int _use_count; // the number of instructions refering to this value (w/o prev/next); only roots can have use count = 0 or > 1 298 int _pin_state; // set of PinReason describing the reason for pinning 299 ValueType* _type; // the instruction value type 300 Instruction* _next; // the next instruction if any (NULL for BlockEnd instructions) 301 Instruction* _subst; // the substitution instruction if any 302 LIR_Opr _operand; // LIR specific information 303 unsigned int _flags; // Flag bits 304 305 ValueStack* _state_before; // Copy of state with input operands still on stack (or NULL) 306 ValueStack* _exception_state; // Copy of state for exception handling 307 XHandlers* _exception_handlers; // Flat list of exception handlers covering this instruction 308 309 friend class UseCountComputer; 310 friend class BlockBegin; 311 312 void update_exception_state(ValueStack* state); 313 314 protected: 315 BlockBegin* _block; // Block that contains this instruction 316 317 void set_type(ValueType* type) { 318 assert(type != NULL, "type must exist"); 319 _type = type; 320 } 321 322 // Helper class to keep track of which arguments need a null check 323 class ArgsNonNullState { 324 private: 325 int _nonnull_state; // mask identifying which args are nonnull 326 public: 327 ArgsNonNullState() 328 : _nonnull_state(AllBits) {} 329 330 // Does argument number i needs a null check? 331 bool arg_needs_null_check(int i) const { 332 // No data is kept for arguments starting at position 33 so 333 // conservatively assume that they need a null check. 334 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) { 335 return is_set_nth_bit(_nonnull_state, i); 336 } 337 return true; 338 } 339 340 // Set whether argument number i needs a null check or not 341 void set_arg_needs_null_check(int i, bool check) { 342 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) { 343 if (check) { 344 _nonnull_state |= nth_bit(i); 345 } else { 346 _nonnull_state &= ~(nth_bit(i)); 347 } 348 } 349 } 350 }; 351 352 public: 353 void* operator new(size_t size) throw() { 354 Compilation* c = Compilation::current(); 355 void* res = c->arena()->Amalloc(size); 356 ((Instruction*)res)->_id = c->get_next_id(); 357 return res; 358 } 359 360 static const int no_bci = -99; 361 362 enum InstructionFlag { 363 NeedsNullCheckFlag = 0, 364 CanTrapFlag, 365 DirectCompareFlag, 366 IsEliminatedFlag, 367 IsSafepointFlag, 368 IsStaticFlag, 369 IsStrictfpFlag, 370 NeedsStoreCheckFlag, 371 NeedsWriteBarrierFlag, 372 PreservesStateFlag, 373 TargetIsFinalFlag, 374 TargetIsLoadedFlag, 375 TargetIsStrictfpFlag, 376 UnorderedIsTrueFlag, 377 NeedsPatchingFlag, 378 ThrowIncompatibleClassChangeErrorFlag, 379 ProfileMDOFlag, 380 IsLinkedInBlockFlag, 381 NeedsRangeCheckFlag, 382 InWorkListFlag, 383 DeoptimizeOnException, 384 InstructionLastFlag 385 }; 386 387 public: 388 bool check_flag(InstructionFlag id) const { return (_flags & (1 << id)) != 0; } 389 void set_flag(InstructionFlag id, bool f) { _flags = f ? (_flags | (1 << id)) : (_flags & ~(1 << id)); }; 390 391 // 'globally' used condition values 392 enum Condition { 393 eql, neq, lss, leq, gtr, geq, aeq, beq 394 }; 395 396 // Instructions may be pinned for many reasons and under certain conditions 397 // with enough knowledge it's possible to safely unpin them. 398 enum PinReason { 399 PinUnknown = 1 << 0 400 , PinExplicitNullCheck = 1 << 3 401 , PinStackForStateSplit= 1 << 12 402 , PinStateSplitConstructor= 1 << 13 403 , PinGlobalValueNumbering= 1 << 14 404 }; 405 406 static Condition mirror(Condition cond); 407 static Condition negate(Condition cond); 408 409 // initialization 410 static int number_of_instructions() { 411 return Compilation::current()->number_of_instructions(); 412 } 413 414 // creation 415 Instruction(ValueType* type, ValueStack* state_before = NULL, bool type_is_constant = false) 416 : _use_count(0) 417 #ifndef PRODUCT 418 , _printable_bci(-99) 419 #endif 420 , _pin_state(0) 421 , _type(type) 422 , _next(NULL) 423 , _block(NULL) 424 , _subst(NULL) 425 , _flags(0) 426 , _operand(LIR_OprFact::illegalOpr) 427 , _state_before(state_before) 428 , _exception_handlers(NULL) 429 { 430 check_state(state_before); 431 assert(type != NULL && (!type->is_constant() || type_is_constant), "type must exist"); 432 update_exception_state(_state_before); 433 } 434 435 // accessors 436 int id() const { return _id; } 437 #ifndef PRODUCT 438 bool has_printable_bci() const { return _printable_bci != -99; } 439 int printable_bci() const { assert(has_printable_bci(), "_printable_bci should have been set"); return _printable_bci; } 440 void set_printable_bci(int bci) { _printable_bci = bci; } 441 #endif 442 int dominator_depth(); 443 int use_count() const { return _use_count; } 444 int pin_state() const { return _pin_state; } 445 bool is_pinned() const { return _pin_state != 0 || PinAllInstructions; } 446 ValueType* type() const { return _type; } 447 BlockBegin *block() const { return _block; } 448 Instruction* prev(); // use carefully, expensive operation 449 Instruction* next() const { return _next; } 450 bool has_subst() const { return _subst != NULL; } 451 Instruction* subst() { return _subst == NULL ? this : _subst->subst(); } 452 LIR_Opr operand() const { return _operand; } 453 454 void set_needs_null_check(bool f) { set_flag(NeedsNullCheckFlag, f); } 455 bool needs_null_check() const { return check_flag(NeedsNullCheckFlag); } 456 bool is_linked() const { return check_flag(IsLinkedInBlockFlag); } 457 bool can_be_linked() { return as_Local() == NULL && as_Phi() == NULL; } 458 459 bool has_uses() const { return use_count() > 0; } 460 ValueStack* state_before() const { return _state_before; } 461 ValueStack* exception_state() const { return _exception_state; } 462 virtual bool needs_exception_state() const { return true; } 463 XHandlers* exception_handlers() const { return _exception_handlers; } 464 465 // manipulation 466 void pin(PinReason reason) { _pin_state |= reason; } 467 void pin() { _pin_state |= PinUnknown; } 468 // DANGEROUS: only used by EliminateStores 469 void unpin(PinReason reason) { assert((reason & PinUnknown) == 0, "can't unpin unknown state"); _pin_state &= ~reason; } 470 471 Instruction* set_next(Instruction* next) { 472 assert(next->has_printable_bci(), "_printable_bci should have been set"); 473 assert(next != NULL, "must not be NULL"); 474 assert(as_BlockEnd() == NULL, "BlockEnd instructions must have no next"); 475 assert(next->can_be_linked(), "shouldn't link these instructions into list"); 476 477 BlockBegin *block = this->block(); 478 next->_block = block; 479 480 next->set_flag(Instruction::IsLinkedInBlockFlag, true); 481 _next = next; 482 return next; 483 } 484 485 Instruction* set_next(Instruction* next, int bci) { 486 #ifndef PRODUCT 487 next->set_printable_bci(bci); 488 #endif 489 return set_next(next); 490 } 491 492 // when blocks are merged 493 void fixup_block_pointers() { 494 Instruction *cur = next()->next(); // next()'s block is set in set_next 495 while (cur && cur->_block != block()) { 496 cur->_block = block(); 497 cur = cur->next(); 498 } 499 } 500 501 Instruction *insert_after(Instruction *i) { 502 Instruction* n = _next; 503 set_next(i); 504 i->set_next(n); 505 return _next; 506 } 507 508 Instruction *insert_after_same_bci(Instruction *i) { 509 #ifndef PRODUCT 510 i->set_printable_bci(printable_bci()); 511 #endif 512 return insert_after(i); 513 } 514 515 void set_subst(Instruction* subst) { 516 assert(subst == NULL || 517 type()->base() == subst->type()->base() || 518 subst->type()->base() == illegalType, "type can't change"); 519 _subst = subst; 520 } 521 void set_exception_handlers(XHandlers *xhandlers) { _exception_handlers = xhandlers; } 522 void set_exception_state(ValueStack* s) { check_state(s); _exception_state = s; } 523 void set_state_before(ValueStack* s) { check_state(s); _state_before = s; } 524 525 // machine-specifics 526 void set_operand(LIR_Opr operand) { assert(operand != LIR_OprFact::illegalOpr, "operand must exist"); _operand = operand; } 527 void clear_operand() { _operand = LIR_OprFact::illegalOpr; } 528 529 // generic 530 virtual Instruction* as_Instruction() { return this; } // to satisfy HASHING1 macro 531 virtual Phi* as_Phi() { return NULL; } 532 virtual Local* as_Local() { return NULL; } 533 virtual Constant* as_Constant() { return NULL; } 534 virtual AccessField* as_AccessField() { return NULL; } 535 virtual LoadField* as_LoadField() { return NULL; } 536 virtual StoreField* as_StoreField() { return NULL; } 537 virtual AccessArray* as_AccessArray() { return NULL; } 538 virtual ArrayLength* as_ArrayLength() { return NULL; } 539 virtual AccessIndexed* as_AccessIndexed() { return NULL; } 540 virtual LoadIndexed* as_LoadIndexed() { return NULL; } 541 virtual StoreIndexed* as_StoreIndexed() { return NULL; } 542 virtual NegateOp* as_NegateOp() { return NULL; } 543 virtual Op2* as_Op2() { return NULL; } 544 virtual ArithmeticOp* as_ArithmeticOp() { return NULL; } 545 virtual ShiftOp* as_ShiftOp() { return NULL; } 546 virtual LogicOp* as_LogicOp() { return NULL; } 547 virtual CompareOp* as_CompareOp() { return NULL; } 548 virtual IfOp* as_IfOp() { return NULL; } 549 virtual Convert* as_Convert() { return NULL; } 550 virtual NullCheck* as_NullCheck() { return NULL; } 551 virtual OsrEntry* as_OsrEntry() { return NULL; } 552 virtual StateSplit* as_StateSplit() { return NULL; } 553 virtual Invoke* as_Invoke() { return NULL; } 554 virtual NewInstance* as_NewInstance() { return NULL; } 555 virtual NewArray* as_NewArray() { return NULL; } 556 virtual NewTypeArray* as_NewTypeArray() { return NULL; } 557 virtual NewObjectArray* as_NewObjectArray() { return NULL; } 558 virtual NewMultiArray* as_NewMultiArray() { return NULL; } 559 virtual TypeCheck* as_TypeCheck() { return NULL; } 560 virtual CheckCast* as_CheckCast() { return NULL; } 561 virtual InstanceOf* as_InstanceOf() { return NULL; } 562 virtual TypeCast* as_TypeCast() { return NULL; } 563 virtual AccessMonitor* as_AccessMonitor() { return NULL; } 564 virtual MonitorEnter* as_MonitorEnter() { return NULL; } 565 virtual MonitorExit* as_MonitorExit() { return NULL; } 566 virtual Intrinsic* as_Intrinsic() { return NULL; } 567 virtual BlockBegin* as_BlockBegin() { return NULL; } 568 virtual BlockEnd* as_BlockEnd() { return NULL; } 569 virtual Goto* as_Goto() { return NULL; } 570 virtual If* as_If() { return NULL; } 571 virtual IfInstanceOf* as_IfInstanceOf() { return NULL; } 572 virtual TableSwitch* as_TableSwitch() { return NULL; } 573 virtual LookupSwitch* as_LookupSwitch() { return NULL; } 574 virtual Return* as_Return() { return NULL; } 575 virtual Throw* as_Throw() { return NULL; } 576 virtual Base* as_Base() { return NULL; } 577 virtual RoundFP* as_RoundFP() { return NULL; } 578 virtual ExceptionObject* as_ExceptionObject() { return NULL; } 579 virtual UnsafeOp* as_UnsafeOp() { return NULL; } 580 virtual ProfileInvoke* as_ProfileInvoke() { return NULL; } 581 virtual RangeCheckPredicate* as_RangeCheckPredicate() { return NULL; } 582 583 #ifdef ASSERT 584 virtual Assert* as_Assert() { return NULL; } 585 #endif 586 587 virtual void visit(InstructionVisitor* v) = 0; 588 589 virtual bool can_trap() const { return false; } 590 591 virtual void input_values_do(ValueVisitor* f) = 0; 592 virtual void state_values_do(ValueVisitor* f); 593 virtual void other_values_do(ValueVisitor* f) { /* usually no other - override on demand */ } 594 void values_do(ValueVisitor* f) { input_values_do(f); state_values_do(f); other_values_do(f); } 595 596 virtual ciType* exact_type() const; 597 virtual ciType* declared_type() const { return NULL; } 598 599 // hashing 600 virtual const char* name() const = 0; 601 HASHING1(Instruction, false, id()) // hashing disabled by default 602 603 // debugging 604 static void check_state(ValueStack* state) PRODUCT_RETURN; 605 void print() PRODUCT_RETURN; 606 void print_line() PRODUCT_RETURN; 607 void print(InstructionPrinter& ip) PRODUCT_RETURN; 608 }; 609 610 611 // The following macros are used to define base (i.e., non-leaf) 612 // and leaf instruction classes. They define class-name related 613 // generic functionality in one place. 614 615 #define BASE(class_name, super_class_name) \ 616 class class_name: public super_class_name { \ 617 public: \ 618 virtual class_name* as_##class_name() { return this; } \ 619 620 621 #define LEAF(class_name, super_class_name) \ 622 BASE(class_name, super_class_name) \ 623 public: \ 624 virtual const char* name() const { return #class_name; } \ 625 virtual void visit(InstructionVisitor* v) { v->do_##class_name(this); } \ 626 627 628 // Debugging support 629 630 631 #ifdef ASSERT 632 class AssertValues: public ValueVisitor { 633 void visit(Value* x) { assert((*x) != NULL, "value must exist"); } 634 }; 635 #define ASSERT_VALUES { AssertValues assert_value; values_do(&assert_value); } 636 #else 637 #define ASSERT_VALUES 638 #endif // ASSERT 639 640 641 // A Phi is a phi function in the sense of SSA form. It stands for 642 // the value of a local variable at the beginning of a join block. 643 // A Phi consists of n operands, one for every incoming branch. 644 645 LEAF(Phi, Instruction) 646 private: 647 int _pf_flags; // the flags of the phi function 648 int _index; // to value on operand stack (index < 0) or to local 649 public: 650 // creation 651 Phi(ValueType* type, BlockBegin* b, int index) 652 : Instruction(type->base()) 653 , _pf_flags(0) 654 , _index(index) 655 { 656 _block = b; 657 NOT_PRODUCT(set_printable_bci(Value(b)->printable_bci())); 658 if (type->is_illegal()) { 659 make_illegal(); 660 } 661 } 662 663 // flags 664 enum Flag { 665 no_flag = 0, 666 visited = 1 << 0, 667 cannot_simplify = 1 << 1 668 }; 669 670 // accessors 671 bool is_local() const { return _index >= 0; } 672 bool is_on_stack() const { return !is_local(); } 673 int local_index() const { assert(is_local(), ""); return _index; } 674 int stack_index() const { assert(is_on_stack(), ""); return -(_index+1); } 675 676 Value operand_at(int i) const; 677 int operand_count() const; 678 679 void set(Flag f) { _pf_flags |= f; } 680 void clear(Flag f) { _pf_flags &= ~f; } 681 bool is_set(Flag f) const { return (_pf_flags & f) != 0; } 682 683 // Invalidates phis corresponding to merges of locals of two different types 684 // (these should never be referenced, otherwise the bytecodes are illegal) 685 void make_illegal() { 686 set(cannot_simplify); 687 set_type(illegalType); 688 } 689 690 bool is_illegal() const { 691 return type()->is_illegal(); 692 } 693 694 // generic 695 virtual void input_values_do(ValueVisitor* f) { 696 } 697 }; 698 699 700 // A local is a placeholder for an incoming argument to a function call. 701 LEAF(Local, Instruction) 702 private: 703 int _java_index; // the local index within the method to which the local belongs 704 ciType* _declared_type; 705 public: 706 // creation 707 Local(ciType* declared, ValueType* type, int index) 708 : Instruction(type) 709 , _java_index(index) 710 , _declared_type(declared) 711 { 712 NOT_PRODUCT(set_printable_bci(-1)); 713 } 714 715 // accessors 716 int java_index() const { return _java_index; } 717 718 virtual ciType* declared_type() const { return _declared_type; } 719 720 // generic 721 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 722 }; 723 724 725 LEAF(Constant, Instruction) 726 public: 727 // creation 728 Constant(ValueType* type): 729 Instruction(type, NULL, /*type_is_constant*/ true) 730 { 731 assert(type->is_constant(), "must be a constant"); 732 } 733 734 Constant(ValueType* type, ValueStack* state_before): 735 Instruction(type, state_before, /*type_is_constant*/ true) 736 { 737 assert(state_before != NULL, "only used for constants which need patching"); 738 assert(type->is_constant(), "must be a constant"); 739 // since it's patching it needs to be pinned 740 pin(); 741 } 742 743 // generic 744 virtual bool can_trap() const { return state_before() != NULL; } 745 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 746 747 virtual intx hash() const; 748 virtual bool is_equal(Value v) const; 749 750 virtual ciType* exact_type() const; 751 752 enum CompareResult { not_comparable = -1, cond_false, cond_true }; 753 754 virtual CompareResult compare(Instruction::Condition condition, Value right) const; 755 BlockBegin* compare(Instruction::Condition cond, Value right, 756 BlockBegin* true_sux, BlockBegin* false_sux) const { 757 switch (compare(cond, right)) { 758 case not_comparable: 759 return NULL; 760 case cond_false: 761 return false_sux; 762 case cond_true: 763 return true_sux; 764 default: 765 ShouldNotReachHere(); 766 return NULL; 767 } 768 } 769 }; 770 771 772 BASE(AccessField, Instruction) 773 private: 774 Value _obj; 775 int _offset; 776 ciField* _field; 777 NullCheck* _explicit_null_check; // For explicit null check elimination 778 779 public: 780 // creation 781 AccessField(Value obj, int offset, ciField* field, bool is_static, 782 ValueStack* state_before, bool needs_patching) 783 : Instruction(as_ValueType(field->type()->basic_type()), state_before) 784 , _obj(obj) 785 , _offset(offset) 786 , _field(field) 787 , _explicit_null_check(NULL) 788 { 789 set_needs_null_check(!is_static); 790 set_flag(IsStaticFlag, is_static); 791 set_flag(NeedsPatchingFlag, needs_patching); 792 ASSERT_VALUES 793 // pin of all instructions with memory access 794 pin(); 795 } 796 797 // accessors 798 Value obj() const { return _obj; } 799 int offset() const { return _offset; } 800 ciField* field() const { return _field; } 801 BasicType field_type() const { return _field->type()->basic_type(); } 802 bool is_static() const { return check_flag(IsStaticFlag); } 803 NullCheck* explicit_null_check() const { return _explicit_null_check; } 804 bool needs_patching() const { return check_flag(NeedsPatchingFlag); } 805 806 // Unresolved getstatic and putstatic can cause initialization. 807 // Technically it occurs at the Constant that materializes the base 808 // of the static fields but it's simpler to model it here. 809 bool is_init_point() const { return is_static() && (needs_patching() || !_field->holder()->is_initialized()); } 810 811 // manipulation 812 813 // Under certain circumstances, if a previous NullCheck instruction 814 // proved the target object non-null, we can eliminate the explicit 815 // null check and do an implicit one, simply specifying the debug 816 // information from the NullCheck. This field should only be consulted 817 // if needs_null_check() is true. 818 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 819 820 // generic 821 virtual bool can_trap() const { return needs_null_check() || needs_patching(); } 822 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 823 }; 824 825 826 LEAF(LoadField, AccessField) 827 public: 828 // creation 829 LoadField(Value obj, int offset, ciField* field, bool is_static, 830 ValueStack* state_before, bool needs_patching) 831 : AccessField(obj, offset, field, is_static, state_before, needs_patching) 832 {} 833 834 ciType* declared_type() const; 835 836 // generic 837 HASHING2(LoadField, !needs_patching() && !field()->is_volatile(), obj()->subst(), offset()) // cannot be eliminated if needs patching or if volatile 838 }; 839 840 841 LEAF(StoreField, AccessField) 842 private: 843 Value _value; 844 845 public: 846 // creation 847 StoreField(Value obj, int offset, ciField* field, Value value, bool is_static, 848 ValueStack* state_before, bool needs_patching) 849 : AccessField(obj, offset, field, is_static, state_before, needs_patching) 850 , _value(value) 851 { 852 set_flag(NeedsWriteBarrierFlag, as_ValueType(field_type())->is_object()); 853 ASSERT_VALUES 854 pin(); 855 } 856 857 // accessors 858 Value value() const { return _value; } 859 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); } 860 861 // generic 862 virtual void input_values_do(ValueVisitor* f) { AccessField::input_values_do(f); f->visit(&_value); } 863 }; 864 865 866 BASE(AccessArray, Instruction) 867 private: 868 Value _array; 869 870 public: 871 // creation 872 AccessArray(ValueType* type, Value array, ValueStack* state_before) 873 : Instruction(type, state_before) 874 , _array(array) 875 { 876 set_needs_null_check(true); 877 ASSERT_VALUES 878 pin(); // instruction with side effect (null exception or range check throwing) 879 } 880 881 Value array() const { return _array; } 882 883 // generic 884 virtual bool can_trap() const { return needs_null_check(); } 885 virtual void input_values_do(ValueVisitor* f) { f->visit(&_array); } 886 }; 887 888 889 LEAF(ArrayLength, AccessArray) 890 private: 891 NullCheck* _explicit_null_check; // For explicit null check elimination 892 893 public: 894 // creation 895 ArrayLength(Value array, ValueStack* state_before) 896 : AccessArray(intType, array, state_before) 897 , _explicit_null_check(NULL) {} 898 899 // accessors 900 NullCheck* explicit_null_check() const { return _explicit_null_check; } 901 902 // setters 903 // See LoadField::set_explicit_null_check for documentation 904 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 905 906 // generic 907 HASHING1(ArrayLength, true, array()->subst()) 908 }; 909 910 911 BASE(AccessIndexed, AccessArray) 912 private: 913 Value _index; 914 Value _length; 915 BasicType _elt_type; 916 917 public: 918 // creation 919 AccessIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before) 920 : AccessArray(as_ValueType(elt_type), array, state_before) 921 , _index(index) 922 , _length(length) 923 , _elt_type(elt_type) 924 { 925 set_flag(Instruction::NeedsRangeCheckFlag, true); 926 ASSERT_VALUES 927 } 928 929 // accessors 930 Value index() const { return _index; } 931 Value length() const { return _length; } 932 BasicType elt_type() const { return _elt_type; } 933 934 void clear_length() { _length = NULL; } 935 // perform elimination of range checks involving constants 936 bool compute_needs_range_check(); 937 938 // generic 939 virtual void input_values_do(ValueVisitor* f) { AccessArray::input_values_do(f); f->visit(&_index); if (_length != NULL) f->visit(&_length); } 940 }; 941 942 943 LEAF(LoadIndexed, AccessIndexed) 944 private: 945 NullCheck* _explicit_null_check; // For explicit null check elimination 946 947 public: 948 // creation 949 LoadIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before) 950 : AccessIndexed(array, index, length, elt_type, state_before) 951 , _explicit_null_check(NULL) {} 952 953 // accessors 954 NullCheck* explicit_null_check() const { return _explicit_null_check; } 955 956 // setters 957 // See LoadField::set_explicit_null_check for documentation 958 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 959 960 ciType* exact_type() const; 961 ciType* declared_type() const; 962 963 // generic 964 HASHING2(LoadIndexed, true, array()->subst(), index()->subst()) 965 }; 966 967 968 LEAF(StoreIndexed, AccessIndexed) 969 private: 970 Value _value; 971 972 ciMethod* _profiled_method; 973 int _profiled_bci; 974 public: 975 // creation 976 StoreIndexed(Value array, Value index, Value length, BasicType elt_type, Value value, ValueStack* state_before) 977 : AccessIndexed(array, index, length, elt_type, state_before) 978 , _value(value), _profiled_method(NULL), _profiled_bci(0) 979 { 980 set_flag(NeedsWriteBarrierFlag, (as_ValueType(elt_type)->is_object())); 981 set_flag(NeedsStoreCheckFlag, (as_ValueType(elt_type)->is_object())); 982 ASSERT_VALUES 983 pin(); 984 } 985 986 // accessors 987 Value value() const { return _value; } 988 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); } 989 bool needs_store_check() const { return check_flag(NeedsStoreCheckFlag); } 990 // Helpers for MethodData* profiling 991 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 992 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 993 void set_profiled_bci(int bci) { _profiled_bci = bci; } 994 bool should_profile() const { return check_flag(ProfileMDOFlag); } 995 ciMethod* profiled_method() const { return _profiled_method; } 996 int profiled_bci() const { return _profiled_bci; } 997 // generic 998 virtual void input_values_do(ValueVisitor* f) { AccessIndexed::input_values_do(f); f->visit(&_value); } 999 }; 1000 1001 1002 LEAF(NegateOp, Instruction) 1003 private: 1004 Value _x; 1005 1006 public: 1007 // creation 1008 NegateOp(Value x) : Instruction(x->type()->base()), _x(x) { 1009 ASSERT_VALUES 1010 } 1011 1012 // accessors 1013 Value x() const { return _x; } 1014 1015 // generic 1016 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); } 1017 }; 1018 1019 1020 BASE(Op2, Instruction) 1021 private: 1022 Bytecodes::Code _op; 1023 Value _x; 1024 Value _y; 1025 1026 public: 1027 // creation 1028 Op2(ValueType* type, Bytecodes::Code op, Value x, Value y, ValueStack* state_before = NULL) 1029 : Instruction(type, state_before) 1030 , _op(op) 1031 , _x(x) 1032 , _y(y) 1033 { 1034 ASSERT_VALUES 1035 } 1036 1037 // accessors 1038 Bytecodes::Code op() const { return _op; } 1039 Value x() const { return _x; } 1040 Value y() const { return _y; } 1041 1042 // manipulators 1043 void swap_operands() { 1044 assert(is_commutative(), "operation must be commutative"); 1045 Value t = _x; _x = _y; _y = t; 1046 } 1047 1048 // generic 1049 virtual bool is_commutative() const { return false; } 1050 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); } 1051 }; 1052 1053 1054 LEAF(ArithmeticOp, Op2) 1055 public: 1056 // creation 1057 ArithmeticOp(Bytecodes::Code op, Value x, Value y, bool is_strictfp, ValueStack* state_before) 1058 : Op2(x->type()->meet(y->type()), op, x, y, state_before) 1059 { 1060 set_flag(IsStrictfpFlag, is_strictfp); 1061 if (can_trap()) pin(); 1062 } 1063 1064 // accessors 1065 bool is_strictfp() const { return check_flag(IsStrictfpFlag); } 1066 1067 // generic 1068 virtual bool is_commutative() const; 1069 virtual bool can_trap() const; 1070 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1071 }; 1072 1073 1074 LEAF(ShiftOp, Op2) 1075 public: 1076 // creation 1077 ShiftOp(Bytecodes::Code op, Value x, Value s) : Op2(x->type()->base(), op, x, s) {} 1078 1079 // generic 1080 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1081 }; 1082 1083 1084 LEAF(LogicOp, Op2) 1085 public: 1086 // creation 1087 LogicOp(Bytecodes::Code op, Value x, Value y) : Op2(x->type()->meet(y->type()), op, x, y) {} 1088 1089 // generic 1090 virtual bool is_commutative() const; 1091 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1092 }; 1093 1094 1095 LEAF(CompareOp, Op2) 1096 public: 1097 // creation 1098 CompareOp(Bytecodes::Code op, Value x, Value y, ValueStack* state_before) 1099 : Op2(intType, op, x, y, state_before) 1100 {} 1101 1102 // generic 1103 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1104 }; 1105 1106 1107 LEAF(IfOp, Op2) 1108 private: 1109 Value _tval; 1110 Value _fval; 1111 1112 public: 1113 // creation 1114 IfOp(Value x, Condition cond, Value y, Value tval, Value fval) 1115 : Op2(tval->type()->meet(fval->type()), (Bytecodes::Code)cond, x, y) 1116 , _tval(tval) 1117 , _fval(fval) 1118 { 1119 ASSERT_VALUES 1120 assert(tval->type()->tag() == fval->type()->tag(), "types must match"); 1121 } 1122 1123 // accessors 1124 virtual bool is_commutative() const; 1125 Bytecodes::Code op() const { ShouldNotCallThis(); return Bytecodes::_illegal; } 1126 Condition cond() const { return (Condition)Op2::op(); } 1127 Value tval() const { return _tval; } 1128 Value fval() const { return _fval; } 1129 1130 // generic 1131 virtual void input_values_do(ValueVisitor* f) { Op2::input_values_do(f); f->visit(&_tval); f->visit(&_fval); } 1132 }; 1133 1134 1135 LEAF(Convert, Instruction) 1136 private: 1137 Bytecodes::Code _op; 1138 Value _value; 1139 1140 public: 1141 // creation 1142 Convert(Bytecodes::Code op, Value value, ValueType* to_type) : Instruction(to_type), _op(op), _value(value) { 1143 ASSERT_VALUES 1144 } 1145 1146 // accessors 1147 Bytecodes::Code op() const { return _op; } 1148 Value value() const { return _value; } 1149 1150 // generic 1151 virtual void input_values_do(ValueVisitor* f) { f->visit(&_value); } 1152 HASHING2(Convert, true, op(), value()->subst()) 1153 }; 1154 1155 1156 LEAF(NullCheck, Instruction) 1157 private: 1158 Value _obj; 1159 1160 public: 1161 // creation 1162 NullCheck(Value obj, ValueStack* state_before) 1163 : Instruction(obj->type()->base(), state_before) 1164 , _obj(obj) 1165 { 1166 ASSERT_VALUES 1167 set_can_trap(true); 1168 assert(_obj->type()->is_object(), "null check must be applied to objects only"); 1169 pin(Instruction::PinExplicitNullCheck); 1170 } 1171 1172 // accessors 1173 Value obj() const { return _obj; } 1174 1175 // setters 1176 void set_can_trap(bool can_trap) { set_flag(CanTrapFlag, can_trap); } 1177 1178 // generic 1179 virtual bool can_trap() const { return check_flag(CanTrapFlag); /* null-check elimination sets to false */ } 1180 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 1181 HASHING1(NullCheck, true, obj()->subst()) 1182 }; 1183 1184 1185 // This node is supposed to cast the type of another node to a more precise 1186 // declared type. 1187 LEAF(TypeCast, Instruction) 1188 private: 1189 ciType* _declared_type; 1190 Value _obj; 1191 1192 public: 1193 // The type of this node is the same type as the object type (and it might be constant). 1194 TypeCast(ciType* type, Value obj, ValueStack* state_before) 1195 : Instruction(obj->type(), state_before, obj->type()->is_constant()), 1196 _declared_type(type), 1197 _obj(obj) {} 1198 1199 // accessors 1200 ciType* declared_type() const { return _declared_type; } 1201 Value obj() const { return _obj; } 1202 1203 // generic 1204 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 1205 }; 1206 1207 1208 BASE(StateSplit, Instruction) 1209 private: 1210 ValueStack* _state; 1211 1212 protected: 1213 static void substitute(BlockList& list, BlockBegin* old_block, BlockBegin* new_block); 1214 1215 public: 1216 // creation 1217 StateSplit(ValueType* type, ValueStack* state_before = NULL) 1218 : Instruction(type, state_before) 1219 , _state(NULL) 1220 { 1221 pin(PinStateSplitConstructor); 1222 } 1223 1224 // accessors 1225 ValueStack* state() const { return _state; } 1226 IRScope* scope() const; // the state's scope 1227 1228 // manipulation 1229 void set_state(ValueStack* state) { assert(_state == NULL, "overwriting existing state"); check_state(state); _state = state; } 1230 1231 // generic 1232 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 1233 virtual void state_values_do(ValueVisitor* f); 1234 }; 1235 1236 1237 LEAF(Invoke, StateSplit) 1238 private: 1239 Bytecodes::Code _code; 1240 Value _recv; 1241 Values* _args; 1242 BasicTypeList* _signature; 1243 int _vtable_index; 1244 ciMethod* _target; 1245 1246 public: 1247 // creation 1248 Invoke(Bytecodes::Code code, ValueType* result_type, Value recv, Values* args, 1249 int vtable_index, ciMethod* target, ValueStack* state_before); 1250 1251 // accessors 1252 Bytecodes::Code code() const { return _code; } 1253 Value receiver() const { return _recv; } 1254 bool has_receiver() const { return receiver() != NULL; } 1255 int number_of_arguments() const { return _args->length(); } 1256 Value argument_at(int i) const { return _args->at(i); } 1257 int vtable_index() const { return _vtable_index; } 1258 BasicTypeList* signature() const { return _signature; } 1259 ciMethod* target() const { return _target; } 1260 1261 ciType* declared_type() const; 1262 1263 // Returns false if target is not loaded 1264 bool target_is_final() const { return check_flag(TargetIsFinalFlag); } 1265 bool target_is_loaded() const { return check_flag(TargetIsLoadedFlag); } 1266 // Returns false if target is not loaded 1267 bool target_is_strictfp() const { return check_flag(TargetIsStrictfpFlag); } 1268 1269 // JSR 292 support 1270 bool is_invokedynamic() const { return code() == Bytecodes::_invokedynamic; } 1271 bool is_method_handle_intrinsic() const { return target()->is_method_handle_intrinsic(); } 1272 1273 virtual bool needs_exception_state() const { return false; } 1274 1275 // generic 1276 virtual bool can_trap() const { return true; } 1277 virtual void input_values_do(ValueVisitor* f) { 1278 StateSplit::input_values_do(f); 1279 if (has_receiver()) f->visit(&_recv); 1280 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 1281 } 1282 virtual void state_values_do(ValueVisitor *f); 1283 }; 1284 1285 1286 LEAF(NewInstance, StateSplit) 1287 private: 1288 ciInstanceKlass* _klass; 1289 bool _is_unresolved; 1290 1291 public: 1292 // creation 1293 NewInstance(ciInstanceKlass* klass, ValueStack* state_before, bool is_unresolved) 1294 : StateSplit(instanceType, state_before) 1295 , _klass(klass), _is_unresolved(is_unresolved) 1296 {} 1297 1298 // accessors 1299 ciInstanceKlass* klass() const { return _klass; } 1300 bool is_unresolved() const { return _is_unresolved; } 1301 1302 virtual bool needs_exception_state() const { return false; } 1303 1304 // generic 1305 virtual bool can_trap() const { return true; } 1306 ciType* exact_type() const; 1307 ciType* declared_type() const; 1308 }; 1309 1310 1311 BASE(NewArray, StateSplit) 1312 private: 1313 Value _length; 1314 1315 public: 1316 // creation 1317 NewArray(Value length, ValueStack* state_before) 1318 : StateSplit(objectType, state_before) 1319 , _length(length) 1320 { 1321 // Do not ASSERT_VALUES since length is NULL for NewMultiArray 1322 } 1323 1324 // accessors 1325 Value length() const { return _length; } 1326 1327 virtual bool needs_exception_state() const { return false; } 1328 1329 ciType* exact_type() const { return NULL; } 1330 ciType* declared_type() const; 1331 1332 // generic 1333 virtual bool can_trap() const { return true; } 1334 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_length); } 1335 }; 1336 1337 1338 LEAF(NewTypeArray, NewArray) 1339 private: 1340 BasicType _elt_type; 1341 1342 public: 1343 // creation 1344 NewTypeArray(Value length, BasicType elt_type, ValueStack* state_before) 1345 : NewArray(length, state_before) 1346 , _elt_type(elt_type) 1347 {} 1348 1349 // accessors 1350 BasicType elt_type() const { return _elt_type; } 1351 ciType* exact_type() const; 1352 }; 1353 1354 1355 LEAF(NewObjectArray, NewArray) 1356 private: 1357 ciKlass* _klass; 1358 1359 public: 1360 // creation 1361 NewObjectArray(ciKlass* klass, Value length, ValueStack* state_before) : NewArray(length, state_before), _klass(klass) {} 1362 1363 // accessors 1364 ciKlass* klass() const { return _klass; } 1365 ciType* exact_type() const; 1366 }; 1367 1368 1369 LEAF(NewMultiArray, NewArray) 1370 private: 1371 ciKlass* _klass; 1372 Values* _dims; 1373 1374 public: 1375 // creation 1376 NewMultiArray(ciKlass* klass, Values* dims, ValueStack* state_before) : NewArray(NULL, state_before), _klass(klass), _dims(dims) { 1377 ASSERT_VALUES 1378 } 1379 1380 // accessors 1381 ciKlass* klass() const { return _klass; } 1382 Values* dims() const { return _dims; } 1383 int rank() const { return dims()->length(); } 1384 1385 // generic 1386 virtual void input_values_do(ValueVisitor* f) { 1387 // NOTE: we do not call NewArray::input_values_do since "length" 1388 // is meaningless for a multi-dimensional array; passing the 1389 // zeroth element down to NewArray as its length is a bad idea 1390 // since there will be a copy in the "dims" array which doesn't 1391 // get updated, and the value must not be traversed twice. Was bug 1392 // - kbr 4/10/2001 1393 StateSplit::input_values_do(f); 1394 for (int i = 0; i < _dims->length(); i++) f->visit(_dims->adr_at(i)); 1395 } 1396 }; 1397 1398 1399 BASE(TypeCheck, StateSplit) 1400 private: 1401 ciKlass* _klass; 1402 Value _obj; 1403 1404 ciMethod* _profiled_method; 1405 int _profiled_bci; 1406 1407 public: 1408 // creation 1409 TypeCheck(ciKlass* klass, Value obj, ValueType* type, ValueStack* state_before) 1410 : StateSplit(type, state_before), _klass(klass), _obj(obj), 1411 _profiled_method(NULL), _profiled_bci(0) { 1412 ASSERT_VALUES 1413 set_direct_compare(false); 1414 } 1415 1416 // accessors 1417 ciKlass* klass() const { return _klass; } 1418 Value obj() const { return _obj; } 1419 bool is_loaded() const { return klass() != NULL; } 1420 bool direct_compare() const { return check_flag(DirectCompareFlag); } 1421 1422 // manipulation 1423 void set_direct_compare(bool flag) { set_flag(DirectCompareFlag, flag); } 1424 1425 // generic 1426 virtual bool can_trap() const { return true; } 1427 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } 1428 1429 // Helpers for MethodData* profiling 1430 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1431 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1432 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1433 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1434 ciMethod* profiled_method() const { return _profiled_method; } 1435 int profiled_bci() const { return _profiled_bci; } 1436 }; 1437 1438 1439 LEAF(CheckCast, TypeCheck) 1440 public: 1441 // creation 1442 CheckCast(ciKlass* klass, Value obj, ValueStack* state_before) 1443 : TypeCheck(klass, obj, objectType, state_before) {} 1444 1445 void set_incompatible_class_change_check() { 1446 set_flag(ThrowIncompatibleClassChangeErrorFlag, true); 1447 } 1448 bool is_incompatible_class_change_check() const { 1449 return check_flag(ThrowIncompatibleClassChangeErrorFlag); 1450 } 1451 1452 ciType* declared_type() const; 1453 }; 1454 1455 1456 LEAF(InstanceOf, TypeCheck) 1457 public: 1458 // creation 1459 InstanceOf(ciKlass* klass, Value obj, ValueStack* state_before) : TypeCheck(klass, obj, intType, state_before) {} 1460 1461 virtual bool needs_exception_state() const { return false; } 1462 }; 1463 1464 1465 BASE(AccessMonitor, StateSplit) 1466 private: 1467 Value _obj; 1468 int _monitor_no; 1469 1470 public: 1471 // creation 1472 AccessMonitor(Value obj, int monitor_no, ValueStack* state_before = NULL) 1473 : StateSplit(illegalType, state_before) 1474 , _obj(obj) 1475 , _monitor_no(monitor_no) 1476 { 1477 set_needs_null_check(true); 1478 ASSERT_VALUES 1479 } 1480 1481 // accessors 1482 Value obj() const { return _obj; } 1483 int monitor_no() const { return _monitor_no; } 1484 1485 // generic 1486 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } 1487 }; 1488 1489 1490 LEAF(MonitorEnter, AccessMonitor) 1491 public: 1492 // creation 1493 MonitorEnter(Value obj, int monitor_no, ValueStack* state_before) 1494 : AccessMonitor(obj, monitor_no, state_before) 1495 { 1496 ASSERT_VALUES 1497 } 1498 1499 // generic 1500 virtual bool can_trap() const { return true; } 1501 }; 1502 1503 1504 LEAF(MonitorExit, AccessMonitor) 1505 public: 1506 // creation 1507 MonitorExit(Value obj, int monitor_no) 1508 : AccessMonitor(obj, monitor_no, NULL) 1509 { 1510 ASSERT_VALUES 1511 } 1512 }; 1513 1514 1515 LEAF(Intrinsic, StateSplit) 1516 private: 1517 vmIntrinsics::ID _id; 1518 Values* _args; 1519 Value _recv; 1520 ArgsNonNullState _nonnull_state; 1521 1522 public: 1523 // preserves_state can be set to true for Intrinsics 1524 // which are guaranteed to preserve register state across any slow 1525 // cases; setting it to true does not mean that the Intrinsic can 1526 // not trap, only that if we continue execution in the same basic 1527 // block after the Intrinsic, all of the registers are intact. This 1528 // allows load elimination and common expression elimination to be 1529 // performed across the Intrinsic. The default value is false. 1530 Intrinsic(ValueType* type, 1531 vmIntrinsics::ID id, 1532 Values* args, 1533 bool has_receiver, 1534 ValueStack* state_before, 1535 bool preserves_state, 1536 bool cantrap = true) 1537 : StateSplit(type, state_before) 1538 , _id(id) 1539 , _args(args) 1540 , _recv(NULL) 1541 { 1542 assert(args != NULL, "args must exist"); 1543 ASSERT_VALUES 1544 set_flag(PreservesStateFlag, preserves_state); 1545 set_flag(CanTrapFlag, cantrap); 1546 if (has_receiver) { 1547 _recv = argument_at(0); 1548 } 1549 set_needs_null_check(has_receiver); 1550 1551 // some intrinsics can't trap, so don't force them to be pinned 1552 if (!can_trap()) { 1553 unpin(PinStateSplitConstructor); 1554 } 1555 } 1556 1557 // accessors 1558 vmIntrinsics::ID id() const { return _id; } 1559 int number_of_arguments() const { return _args->length(); } 1560 Value argument_at(int i) const { return _args->at(i); } 1561 1562 bool has_receiver() const { return (_recv != NULL); } 1563 Value receiver() const { assert(has_receiver(), "must have receiver"); return _recv; } 1564 bool preserves_state() const { return check_flag(PreservesStateFlag); } 1565 1566 bool arg_needs_null_check(int i) const { 1567 return _nonnull_state.arg_needs_null_check(i); 1568 } 1569 1570 void set_arg_needs_null_check(int i, bool check) { 1571 _nonnull_state.set_arg_needs_null_check(i, check); 1572 } 1573 1574 // generic 1575 virtual bool can_trap() const { return check_flag(CanTrapFlag); } 1576 virtual void input_values_do(ValueVisitor* f) { 1577 StateSplit::input_values_do(f); 1578 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 1579 } 1580 }; 1581 1582 1583 class LIR_List; 1584 1585 LEAF(BlockBegin, StateSplit) 1586 private: 1587 int _block_id; // the unique block id 1588 int _bci; // start-bci of block 1589 int _depth_first_number; // number of this block in a depth-first ordering 1590 int _linear_scan_number; // number of this block in linear-scan ordering 1591 int _dominator_depth; 1592 int _loop_depth; // the loop nesting level of this block 1593 int _loop_index; // number of the innermost loop of this block 1594 int _flags; // the flags associated with this block 1595 1596 // fields used by BlockListBuilder 1597 int _total_preds; // number of predecessors found by BlockListBuilder 1598 BitMap _stores_to_locals; // bit is set when a local variable is stored in the block 1599 1600 // SSA specific fields: (factor out later) 1601 BlockList _successors; // the successors of this block 1602 BlockList _predecessors; // the predecessors of this block 1603 BlockList _dominates; // list of blocks that are dominated by this block 1604 BlockBegin* _dominator; // the dominator of this block 1605 // SSA specific ends 1606 BlockEnd* _end; // the last instruction of this block 1607 BlockList _exception_handlers; // the exception handlers potentially invoked by this block 1608 ValueStackStack* _exception_states; // only for xhandler entries: states of all instructions that have an edge to this xhandler 1609 int _exception_handler_pco; // if this block is the start of an exception handler, 1610 // this records the PC offset in the assembly code of the 1611 // first instruction in this block 1612 Label _label; // the label associated with this block 1613 LIR_List* _lir; // the low level intermediate representation for this block 1614 1615 BitMap _live_in; // set of live LIR_Opr registers at entry to this block 1616 BitMap _live_out; // set of live LIR_Opr registers at exit from this block 1617 BitMap _live_gen; // set of registers used before any redefinition in this block 1618 BitMap _live_kill; // set of registers defined in this block 1619 1620 BitMap _fpu_register_usage; 1621 intArray* _fpu_stack_state; // For x86 FPU code generation with UseLinearScan 1622 int _first_lir_instruction_id; // ID of first LIR instruction in this block 1623 int _last_lir_instruction_id; // ID of last LIR instruction in this block 1624 1625 void iterate_preorder (boolArray& mark, BlockClosure* closure); 1626 void iterate_postorder(boolArray& mark, BlockClosure* closure); 1627 1628 friend class SuxAndWeightAdjuster; 1629 1630 public: 1631 void* operator new(size_t size) throw() { 1632 Compilation* c = Compilation::current(); 1633 void* res = c->arena()->Amalloc(size); 1634 ((BlockBegin*)res)->_id = c->get_next_id(); 1635 ((BlockBegin*)res)->_block_id = c->get_next_block_id(); 1636 return res; 1637 } 1638 1639 // initialization/counting 1640 static int number_of_blocks() { 1641 return Compilation::current()->number_of_blocks(); 1642 } 1643 1644 // creation 1645 BlockBegin(int bci) 1646 : StateSplit(illegalType) 1647 , _bci(bci) 1648 , _depth_first_number(-1) 1649 , _linear_scan_number(-1) 1650 , _loop_depth(0) 1651 , _flags(0) 1652 , _dominator_depth(-1) 1653 , _dominator(NULL) 1654 , _end(NULL) 1655 , _predecessors(2) 1656 , _successors(2) 1657 , _dominates(2) 1658 , _exception_handlers(1) 1659 , _exception_states(NULL) 1660 , _exception_handler_pco(-1) 1661 , _lir(NULL) 1662 , _loop_index(-1) 1663 , _live_in() 1664 , _live_out() 1665 , _live_gen() 1666 , _live_kill() 1667 , _fpu_register_usage() 1668 , _fpu_stack_state(NULL) 1669 , _first_lir_instruction_id(-1) 1670 , _last_lir_instruction_id(-1) 1671 , _total_preds(0) 1672 , _stores_to_locals() 1673 { 1674 _block = this; 1675 #ifndef PRODUCT 1676 set_printable_bci(bci); 1677 #endif 1678 } 1679 1680 // accessors 1681 int block_id() const { return _block_id; } 1682 int bci() const { return _bci; } 1683 BlockList* successors() { return &_successors; } 1684 BlockList* dominates() { return &_dominates; } 1685 BlockBegin* dominator() const { return _dominator; } 1686 int loop_depth() const { return _loop_depth; } 1687 int dominator_depth() const { return _dominator_depth; } 1688 int depth_first_number() const { return _depth_first_number; } 1689 int linear_scan_number() const { return _linear_scan_number; } 1690 BlockEnd* end() const { return _end; } 1691 Label* label() { return &_label; } 1692 LIR_List* lir() const { return _lir; } 1693 int exception_handler_pco() const { return _exception_handler_pco; } 1694 BitMap& live_in() { return _live_in; } 1695 BitMap& live_out() { return _live_out; } 1696 BitMap& live_gen() { return _live_gen; } 1697 BitMap& live_kill() { return _live_kill; } 1698 BitMap& fpu_register_usage() { return _fpu_register_usage; } 1699 intArray* fpu_stack_state() const { return _fpu_stack_state; } 1700 int first_lir_instruction_id() const { return _first_lir_instruction_id; } 1701 int last_lir_instruction_id() const { return _last_lir_instruction_id; } 1702 int total_preds() const { return _total_preds; } 1703 BitMap& stores_to_locals() { return _stores_to_locals; } 1704 1705 // manipulation 1706 void set_dominator(BlockBegin* dom) { _dominator = dom; } 1707 void set_loop_depth(int d) { _loop_depth = d; } 1708 void set_dominator_depth(int d) { _dominator_depth = d; } 1709 void set_depth_first_number(int dfn) { _depth_first_number = dfn; } 1710 void set_linear_scan_number(int lsn) { _linear_scan_number = lsn; } 1711 void set_end(BlockEnd* end); 1712 void clear_end(); 1713 void disconnect_from_graph(); 1714 static void disconnect_edge(BlockBegin* from, BlockBegin* to); 1715 BlockBegin* insert_block_between(BlockBegin* sux); 1716 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux); 1717 void set_lir(LIR_List* lir) { _lir = lir; } 1718 void set_exception_handler_pco(int pco) { _exception_handler_pco = pco; } 1719 void set_live_in (BitMap map) { _live_in = map; } 1720 void set_live_out (BitMap map) { _live_out = map; } 1721 void set_live_gen (BitMap map) { _live_gen = map; } 1722 void set_live_kill (BitMap map) { _live_kill = map; } 1723 void set_fpu_register_usage(BitMap map) { _fpu_register_usage = map; } 1724 void set_fpu_stack_state(intArray* state) { _fpu_stack_state = state; } 1725 void set_first_lir_instruction_id(int id) { _first_lir_instruction_id = id; } 1726 void set_last_lir_instruction_id(int id) { _last_lir_instruction_id = id; } 1727 void increment_total_preds(int n = 1) { _total_preds += n; } 1728 void init_stores_to_locals(int locals_count) { _stores_to_locals = BitMap(locals_count); _stores_to_locals.clear(); } 1729 1730 // generic 1731 virtual void state_values_do(ValueVisitor* f); 1732 1733 // successors and predecessors 1734 int number_of_sux() const; 1735 BlockBegin* sux_at(int i) const; 1736 void add_successor(BlockBegin* sux); 1737 void remove_successor(BlockBegin* pred); 1738 bool is_successor(BlockBegin* sux) const { return _successors.contains(sux); } 1739 1740 void add_predecessor(BlockBegin* pred); 1741 void remove_predecessor(BlockBegin* pred); 1742 bool is_predecessor(BlockBegin* pred) const { return _predecessors.contains(pred); } 1743 int number_of_preds() const { return _predecessors.length(); } 1744 BlockBegin* pred_at(int i) const { return _predecessors[i]; } 1745 1746 // exception handlers potentially invoked by this block 1747 void add_exception_handler(BlockBegin* b); 1748 bool is_exception_handler(BlockBegin* b) const { return _exception_handlers.contains(b); } 1749 int number_of_exception_handlers() const { return _exception_handlers.length(); } 1750 BlockBegin* exception_handler_at(int i) const { return _exception_handlers.at(i); } 1751 1752 // states of the instructions that have an edge to this exception handler 1753 int number_of_exception_states() { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states == NULL ? 0 : _exception_states->length(); } 1754 ValueStack* exception_state_at(int idx) const { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states->at(idx); } 1755 int add_exception_state(ValueStack* state); 1756 1757 // flags 1758 enum Flag { 1759 no_flag = 0, 1760 std_entry_flag = 1 << 0, 1761 osr_entry_flag = 1 << 1, 1762 exception_entry_flag = 1 << 2, 1763 subroutine_entry_flag = 1 << 3, 1764 backward_branch_target_flag = 1 << 4, 1765 is_on_work_list_flag = 1 << 5, 1766 was_visited_flag = 1 << 6, 1767 parser_loop_header_flag = 1 << 7, // set by parser to identify blocks where phi functions can not be created on demand 1768 critical_edge_split_flag = 1 << 8, // set for all blocks that are introduced when critical edges are split 1769 linear_scan_loop_header_flag = 1 << 9, // set during loop-detection for LinearScan 1770 linear_scan_loop_end_flag = 1 << 10, // set during loop-detection for LinearScan 1771 donot_eliminate_range_checks = 1 << 11 // Should be try to eliminate range checks in this block 1772 }; 1773 1774 void set(Flag f) { _flags |= f; } 1775 void clear(Flag f) { _flags &= ~f; } 1776 bool is_set(Flag f) const { return (_flags & f) != 0; } 1777 bool is_entry_block() const { 1778 const int entry_mask = std_entry_flag | osr_entry_flag | exception_entry_flag; 1779 return (_flags & entry_mask) != 0; 1780 } 1781 1782 // iteration 1783 void iterate_preorder (BlockClosure* closure); 1784 void iterate_postorder (BlockClosure* closure); 1785 1786 void block_values_do(ValueVisitor* f); 1787 1788 // loops 1789 void set_loop_index(int ix) { _loop_index = ix; } 1790 int loop_index() const { return _loop_index; } 1791 1792 // merging 1793 bool try_merge(ValueStack* state); // try to merge states at block begin 1794 void merge(ValueStack* state) { bool b = try_merge(state); assert(b, "merge failed"); } 1795 1796 // debugging 1797 void print_block() PRODUCT_RETURN; 1798 void print_block(InstructionPrinter& ip, bool live_only = false) PRODUCT_RETURN; 1799 }; 1800 1801 1802 BASE(BlockEnd, StateSplit) 1803 private: 1804 BlockList* _sux; 1805 1806 protected: 1807 BlockList* sux() const { return _sux; } 1808 1809 void set_sux(BlockList* sux) { 1810 #ifdef ASSERT 1811 assert(sux != NULL, "sux must exist"); 1812 for (int i = sux->length() - 1; i >= 0; i--) assert(sux->at(i) != NULL, "sux must exist"); 1813 #endif 1814 _sux = sux; 1815 } 1816 1817 public: 1818 // creation 1819 BlockEnd(ValueType* type, ValueStack* state_before, bool is_safepoint) 1820 : StateSplit(type, state_before) 1821 , _sux(NULL) 1822 { 1823 set_flag(IsSafepointFlag, is_safepoint); 1824 } 1825 1826 // accessors 1827 bool is_safepoint() const { return check_flag(IsSafepointFlag); } 1828 // For compatibility with old code, for new code use block() 1829 BlockBegin* begin() const { return _block; } 1830 1831 // manipulation 1832 void set_begin(BlockBegin* begin); 1833 1834 // successors 1835 int number_of_sux() const { return _sux != NULL ? _sux->length() : 0; } 1836 BlockBegin* sux_at(int i) const { return _sux->at(i); } 1837 BlockBegin* default_sux() const { return sux_at(number_of_sux() - 1); } 1838 BlockBegin** addr_sux_at(int i) const { return _sux->adr_at(i); } 1839 int sux_index(BlockBegin* sux) const { return _sux->find(sux); } 1840 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux); 1841 }; 1842 1843 1844 LEAF(Goto, BlockEnd) 1845 public: 1846 enum Direction { 1847 none, // Just a regular goto 1848 taken, not_taken // Goto produced from If 1849 }; 1850 private: 1851 ciMethod* _profiled_method; 1852 int _profiled_bci; 1853 Direction _direction; 1854 public: 1855 // creation 1856 Goto(BlockBegin* sux, ValueStack* state_before, bool is_safepoint = false) 1857 : BlockEnd(illegalType, state_before, is_safepoint) 1858 , _direction(none) 1859 , _profiled_method(NULL) 1860 , _profiled_bci(0) { 1861 BlockList* s = new BlockList(1); 1862 s->append(sux); 1863 set_sux(s); 1864 } 1865 1866 Goto(BlockBegin* sux, bool is_safepoint) : BlockEnd(illegalType, NULL, is_safepoint) 1867 , _direction(none) 1868 , _profiled_method(NULL) 1869 , _profiled_bci(0) { 1870 BlockList* s = new BlockList(1); 1871 s->append(sux); 1872 set_sux(s); 1873 } 1874 1875 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1876 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches 1877 int profiled_bci() const { return _profiled_bci; } 1878 Direction direction() const { return _direction; } 1879 1880 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1881 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1882 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1883 void set_direction(Direction d) { _direction = d; } 1884 }; 1885 1886 #ifdef ASSERT 1887 LEAF(Assert, Instruction) 1888 private: 1889 Value _x; 1890 Condition _cond; 1891 Value _y; 1892 char *_message; 1893 1894 public: 1895 // creation 1896 // unordered_is_true is valid for float/double compares only 1897 Assert(Value x, Condition cond, bool unordered_is_true, Value y); 1898 1899 // accessors 1900 Value x() const { return _x; } 1901 Condition cond() const { return _cond; } 1902 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 1903 Value y() const { return _y; } 1904 const char *message() const { return _message; } 1905 1906 // generic 1907 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); } 1908 }; 1909 #endif 1910 1911 LEAF(RangeCheckPredicate, StateSplit) 1912 private: 1913 Value _x; 1914 Condition _cond; 1915 Value _y; 1916 1917 void check_state(); 1918 1919 public: 1920 // creation 1921 // unordered_is_true is valid for float/double compares only 1922 RangeCheckPredicate(Value x, Condition cond, bool unordered_is_true, Value y, ValueStack* state) : StateSplit(illegalType) 1923 , _x(x) 1924 , _cond(cond) 1925 , _y(y) 1926 { 1927 ASSERT_VALUES 1928 set_flag(UnorderedIsTrueFlag, unordered_is_true); 1929 assert(x->type()->tag() == y->type()->tag(), "types must match"); 1930 this->set_state(state); 1931 check_state(); 1932 } 1933 1934 // Always deoptimize 1935 RangeCheckPredicate(ValueStack* state) : StateSplit(illegalType) 1936 { 1937 this->set_state(state); 1938 _x = _y = NULL; 1939 check_state(); 1940 } 1941 1942 // accessors 1943 Value x() const { return _x; } 1944 Condition cond() const { return _cond; } 1945 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 1946 Value y() const { return _y; } 1947 1948 void always_fail() { _x = _y = NULL; } 1949 1950 // generic 1951 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_x); f->visit(&_y); } 1952 HASHING3(RangeCheckPredicate, true, x()->subst(), y()->subst(), cond()) 1953 }; 1954 1955 LEAF(If, BlockEnd) 1956 private: 1957 Value _x; 1958 Condition _cond; 1959 Value _y; 1960 ciMethod* _profiled_method; 1961 int _profiled_bci; // Canonicalizer may alter bci of If node 1962 bool _swapped; // Is the order reversed with respect to the original If in the 1963 // bytecode stream? 1964 public: 1965 // creation 1966 // unordered_is_true is valid for float/double compares only 1967 If(Value x, Condition cond, bool unordered_is_true, Value y, BlockBegin* tsux, BlockBegin* fsux, ValueStack* state_before, bool is_safepoint) 1968 : BlockEnd(illegalType, state_before, is_safepoint) 1969 , _x(x) 1970 , _cond(cond) 1971 , _y(y) 1972 , _profiled_method(NULL) 1973 , _profiled_bci(0) 1974 , _swapped(false) 1975 { 1976 ASSERT_VALUES 1977 set_flag(UnorderedIsTrueFlag, unordered_is_true); 1978 assert(x->type()->tag() == y->type()->tag(), "types must match"); 1979 BlockList* s = new BlockList(2); 1980 s->append(tsux); 1981 s->append(fsux); 1982 set_sux(s); 1983 } 1984 1985 // accessors 1986 Value x() const { return _x; } 1987 Condition cond() const { return _cond; } 1988 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 1989 Value y() const { return _y; } 1990 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); } 1991 BlockBegin* tsux() const { return sux_for(true); } 1992 BlockBegin* fsux() const { return sux_for(false); } 1993 BlockBegin* usux() const { return sux_for(unordered_is_true()); } 1994 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1995 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches 1996 int profiled_bci() const { return _profiled_bci; } // set for profiled branches and tiered 1997 bool is_swapped() const { return _swapped; } 1998 1999 // manipulation 2000 void swap_operands() { 2001 Value t = _x; _x = _y; _y = t; 2002 _cond = mirror(_cond); 2003 } 2004 2005 void swap_sux() { 2006 assert(number_of_sux() == 2, "wrong number of successors"); 2007 BlockList* s = sux(); 2008 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t); 2009 _cond = negate(_cond); 2010 set_flag(UnorderedIsTrueFlag, !check_flag(UnorderedIsTrueFlag)); 2011 } 2012 2013 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 2014 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 2015 void set_profiled_bci(int bci) { _profiled_bci = bci; } 2016 void set_swapped(bool value) { _swapped = value; } 2017 // generic 2018 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_x); f->visit(&_y); } 2019 }; 2020 2021 2022 LEAF(IfInstanceOf, BlockEnd) 2023 private: 2024 ciKlass* _klass; 2025 Value _obj; 2026 bool _test_is_instance; // jump if instance 2027 int _instanceof_bci; 2028 2029 public: 2030 IfInstanceOf(ciKlass* klass, Value obj, bool test_is_instance, int instanceof_bci, BlockBegin* tsux, BlockBegin* fsux) 2031 : BlockEnd(illegalType, NULL, false) // temporary set to false 2032 , _klass(klass) 2033 , _obj(obj) 2034 , _test_is_instance(test_is_instance) 2035 , _instanceof_bci(instanceof_bci) 2036 { 2037 ASSERT_VALUES 2038 assert(instanceof_bci >= 0, "illegal bci"); 2039 BlockList* s = new BlockList(2); 2040 s->append(tsux); 2041 s->append(fsux); 2042 set_sux(s); 2043 } 2044 2045 // accessors 2046 // 2047 // Note 1: If test_is_instance() is true, IfInstanceOf tests if obj *is* an 2048 // instance of klass; otherwise it tests if it is *not* and instance 2049 // of klass. 2050 // 2051 // Note 2: IfInstanceOf instructions are created by combining an InstanceOf 2052 // and an If instruction. The IfInstanceOf bci() corresponds to the 2053 // bci that the If would have had; the (this->) instanceof_bci() is 2054 // the bci of the original InstanceOf instruction. 2055 ciKlass* klass() const { return _klass; } 2056 Value obj() const { return _obj; } 2057 int instanceof_bci() const { return _instanceof_bci; } 2058 bool test_is_instance() const { return _test_is_instance; } 2059 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); } 2060 BlockBegin* tsux() const { return sux_for(true); } 2061 BlockBegin* fsux() const { return sux_for(false); } 2062 2063 // manipulation 2064 void swap_sux() { 2065 assert(number_of_sux() == 2, "wrong number of successors"); 2066 BlockList* s = sux(); 2067 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t); 2068 _test_is_instance = !_test_is_instance; 2069 } 2070 2071 // generic 2072 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_obj); } 2073 }; 2074 2075 2076 BASE(Switch, BlockEnd) 2077 private: 2078 Value _tag; 2079 2080 public: 2081 // creation 2082 Switch(Value tag, BlockList* sux, ValueStack* state_before, bool is_safepoint) 2083 : BlockEnd(illegalType, state_before, is_safepoint) 2084 , _tag(tag) { 2085 ASSERT_VALUES 2086 set_sux(sux); 2087 } 2088 2089 // accessors 2090 Value tag() const { return _tag; } 2091 int length() const { return number_of_sux() - 1; } 2092 2093 virtual bool needs_exception_state() const { return false; } 2094 2095 // generic 2096 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_tag); } 2097 }; 2098 2099 2100 LEAF(TableSwitch, Switch) 2101 private: 2102 int _lo_key; 2103 2104 public: 2105 // creation 2106 TableSwitch(Value tag, BlockList* sux, int lo_key, ValueStack* state_before, bool is_safepoint) 2107 : Switch(tag, sux, state_before, is_safepoint) 2108 , _lo_key(lo_key) {} 2109 2110 // accessors 2111 int lo_key() const { return _lo_key; } 2112 int hi_key() const { return _lo_key + length() - 1; } 2113 }; 2114 2115 2116 LEAF(LookupSwitch, Switch) 2117 private: 2118 intArray* _keys; 2119 2120 public: 2121 // creation 2122 LookupSwitch(Value tag, BlockList* sux, intArray* keys, ValueStack* state_before, bool is_safepoint) 2123 : Switch(tag, sux, state_before, is_safepoint) 2124 , _keys(keys) { 2125 assert(keys != NULL, "keys must exist"); 2126 assert(keys->length() == length(), "sux & keys have incompatible lengths"); 2127 } 2128 2129 // accessors 2130 int key_at(int i) const { return _keys->at(i); } 2131 }; 2132 2133 2134 LEAF(Return, BlockEnd) 2135 private: 2136 Value _result; 2137 2138 public: 2139 // creation 2140 Return(Value result) : 2141 BlockEnd(result == NULL ? voidType : result->type()->base(), NULL, true), 2142 _result(result) {} 2143 2144 // accessors 2145 Value result() const { return _result; } 2146 bool has_result() const { return result() != NULL; } 2147 2148 // generic 2149 virtual void input_values_do(ValueVisitor* f) { 2150 BlockEnd::input_values_do(f); 2151 if (has_result()) f->visit(&_result); 2152 } 2153 }; 2154 2155 2156 LEAF(Throw, BlockEnd) 2157 private: 2158 Value _exception; 2159 2160 public: 2161 // creation 2162 Throw(Value exception, ValueStack* state_before) : BlockEnd(illegalType, state_before, true), _exception(exception) { 2163 ASSERT_VALUES 2164 } 2165 2166 // accessors 2167 Value exception() const { return _exception; } 2168 2169 // generic 2170 virtual bool can_trap() const { return true; } 2171 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_exception); } 2172 }; 2173 2174 2175 LEAF(Base, BlockEnd) 2176 public: 2177 // creation 2178 Base(BlockBegin* std_entry, BlockBegin* osr_entry) : BlockEnd(illegalType, NULL, false) { 2179 assert(std_entry->is_set(BlockBegin::std_entry_flag), "std entry must be flagged"); 2180 assert(osr_entry == NULL || osr_entry->is_set(BlockBegin::osr_entry_flag), "osr entry must be flagged"); 2181 BlockList* s = new BlockList(2); 2182 if (osr_entry != NULL) s->append(osr_entry); 2183 s->append(std_entry); // must be default sux! 2184 set_sux(s); 2185 } 2186 2187 // accessors 2188 BlockBegin* std_entry() const { return default_sux(); } 2189 BlockBegin* osr_entry() const { return number_of_sux() < 2 ? NULL : sux_at(0); } 2190 }; 2191 2192 2193 LEAF(OsrEntry, Instruction) 2194 public: 2195 // creation 2196 #ifdef _LP64 2197 OsrEntry() : Instruction(longType) { pin(); } 2198 #else 2199 OsrEntry() : Instruction(intType) { pin(); } 2200 #endif 2201 2202 // generic 2203 virtual void input_values_do(ValueVisitor* f) { } 2204 }; 2205 2206 2207 // Models the incoming exception at a catch site 2208 LEAF(ExceptionObject, Instruction) 2209 public: 2210 // creation 2211 ExceptionObject() : Instruction(objectType) { 2212 pin(); 2213 } 2214 2215 // generic 2216 virtual void input_values_do(ValueVisitor* f) { } 2217 }; 2218 2219 2220 // Models needed rounding for floating-point values on Intel. 2221 // Currently only used to represent rounding of double-precision 2222 // values stored into local variables, but could be used to model 2223 // intermediate rounding of single-precision values as well. 2224 LEAF(RoundFP, Instruction) 2225 private: 2226 Value _input; // floating-point value to be rounded 2227 2228 public: 2229 RoundFP(Value input) 2230 : Instruction(input->type()) // Note: should not be used for constants 2231 , _input(input) 2232 { 2233 ASSERT_VALUES 2234 } 2235 2236 // accessors 2237 Value input() const { return _input; } 2238 2239 // generic 2240 virtual void input_values_do(ValueVisitor* f) { f->visit(&_input); } 2241 }; 2242 2243 2244 BASE(UnsafeOp, Instruction) 2245 private: 2246 BasicType _basic_type; // ValueType can not express byte-sized integers 2247 2248 protected: 2249 // creation 2250 UnsafeOp(BasicType basic_type, bool is_put) 2251 : Instruction(is_put ? voidType : as_ValueType(basic_type)) 2252 , _basic_type(basic_type) 2253 { 2254 //Note: Unsafe ops are not not guaranteed to throw NPE. 2255 // Convservatively, Unsafe operations must be pinned though we could be 2256 // looser about this if we wanted to.. 2257 pin(); 2258 } 2259 2260 public: 2261 // accessors 2262 BasicType basic_type() { return _basic_type; } 2263 2264 // generic 2265 virtual void input_values_do(ValueVisitor* f) { } 2266 }; 2267 2268 2269 BASE(UnsafeRawOp, UnsafeOp) 2270 private: 2271 Value _base; // Base address (a Java long) 2272 Value _index; // Index if computed by optimizer; initialized to NULL 2273 int _log2_scale; // Scale factor: 0, 1, 2, or 3. 2274 // Indicates log2 of number of bytes (1, 2, 4, or 8) 2275 // to scale index by. 2276 2277 protected: 2278 UnsafeRawOp(BasicType basic_type, Value addr, bool is_put) 2279 : UnsafeOp(basic_type, is_put) 2280 , _base(addr) 2281 , _index(NULL) 2282 , _log2_scale(0) 2283 { 2284 // Can not use ASSERT_VALUES because index may be NULL 2285 assert(addr != NULL && addr->type()->is_long(), "just checking"); 2286 } 2287 2288 UnsafeRawOp(BasicType basic_type, Value base, Value index, int log2_scale, bool is_put) 2289 : UnsafeOp(basic_type, is_put) 2290 , _base(base) 2291 , _index(index) 2292 , _log2_scale(log2_scale) 2293 { 2294 } 2295 2296 public: 2297 // accessors 2298 Value base() { return _base; } 2299 Value index() { return _index; } 2300 bool has_index() { return (_index != NULL); } 2301 int log2_scale() { return _log2_scale; } 2302 2303 // setters 2304 void set_base (Value base) { _base = base; } 2305 void set_index(Value index) { _index = index; } 2306 void set_log2_scale(int log2_scale) { _log2_scale = log2_scale; } 2307 2308 // generic 2309 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f); 2310 f->visit(&_base); 2311 if (has_index()) f->visit(&_index); } 2312 }; 2313 2314 2315 LEAF(UnsafeGetRaw, UnsafeRawOp) 2316 private: 2317 bool _may_be_unaligned, _is_wide; // For OSREntry 2318 2319 public: 2320 UnsafeGetRaw(BasicType basic_type, Value addr, bool may_be_unaligned, bool is_wide = false) 2321 : UnsafeRawOp(basic_type, addr, false) { 2322 _may_be_unaligned = may_be_unaligned; 2323 _is_wide = is_wide; 2324 } 2325 2326 UnsafeGetRaw(BasicType basic_type, Value base, Value index, int log2_scale, bool may_be_unaligned, bool is_wide = false) 2327 : UnsafeRawOp(basic_type, base, index, log2_scale, false) { 2328 _may_be_unaligned = may_be_unaligned; 2329 _is_wide = is_wide; 2330 } 2331 2332 bool may_be_unaligned() { return _may_be_unaligned; } 2333 bool is_wide() { return _is_wide; } 2334 }; 2335 2336 2337 LEAF(UnsafePutRaw, UnsafeRawOp) 2338 private: 2339 Value _value; // Value to be stored 2340 2341 public: 2342 UnsafePutRaw(BasicType basic_type, Value addr, Value value) 2343 : UnsafeRawOp(basic_type, addr, true) 2344 , _value(value) 2345 { 2346 assert(value != NULL, "just checking"); 2347 ASSERT_VALUES 2348 } 2349 2350 UnsafePutRaw(BasicType basic_type, Value base, Value index, int log2_scale, Value value) 2351 : UnsafeRawOp(basic_type, base, index, log2_scale, true) 2352 , _value(value) 2353 { 2354 assert(value != NULL, "just checking"); 2355 ASSERT_VALUES 2356 } 2357 2358 // accessors 2359 Value value() { return _value; } 2360 2361 // generic 2362 virtual void input_values_do(ValueVisitor* f) { UnsafeRawOp::input_values_do(f); 2363 f->visit(&_value); } 2364 }; 2365 2366 2367 BASE(UnsafeObjectOp, UnsafeOp) 2368 private: 2369 Value _object; // Object to be fetched from or mutated 2370 Value _offset; // Offset within object 2371 bool _is_volatile; // true if volatile - dl/JSR166 2372 public: 2373 UnsafeObjectOp(BasicType basic_type, Value object, Value offset, bool is_put, bool is_volatile) 2374 : UnsafeOp(basic_type, is_put), _object(object), _offset(offset), _is_volatile(is_volatile) 2375 { 2376 } 2377 2378 // accessors 2379 Value object() { return _object; } 2380 Value offset() { return _offset; } 2381 bool is_volatile() { return _is_volatile; } 2382 // generic 2383 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f); 2384 f->visit(&_object); 2385 f->visit(&_offset); } 2386 }; 2387 2388 2389 LEAF(UnsafeGetObject, UnsafeObjectOp) 2390 public: 2391 UnsafeGetObject(BasicType basic_type, Value object, Value offset, bool is_volatile) 2392 : UnsafeObjectOp(basic_type, object, offset, false, is_volatile) 2393 { 2394 ASSERT_VALUES 2395 } 2396 }; 2397 2398 2399 LEAF(UnsafePutObject, UnsafeObjectOp) 2400 private: 2401 Value _value; // Value to be stored 2402 public: 2403 UnsafePutObject(BasicType basic_type, Value object, Value offset, Value value, bool is_volatile) 2404 : UnsafeObjectOp(basic_type, object, offset, true, is_volatile) 2405 , _value(value) 2406 { 2407 ASSERT_VALUES 2408 } 2409 2410 // accessors 2411 Value value() { return _value; } 2412 2413 // generic 2414 virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f); 2415 f->visit(&_value); } 2416 }; 2417 2418 LEAF(UnsafeGetAndSetObject, UnsafeObjectOp) 2419 private: 2420 Value _value; // Value to be stored 2421 bool _is_add; 2422 public: 2423 UnsafeGetAndSetObject(BasicType basic_type, Value object, Value offset, Value value, bool is_add) 2424 : UnsafeObjectOp(basic_type, object, offset, false, false) 2425 , _value(value) 2426 , _is_add(is_add) 2427 { 2428 ASSERT_VALUES 2429 } 2430 2431 // accessors 2432 bool is_add() const { return _is_add; } 2433 Value value() { return _value; } 2434 2435 // generic 2436 virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f); 2437 f->visit(&_value); } 2438 }; 2439 2440 LEAF(ProfileCall, Instruction) 2441 private: 2442 ciMethod* _method; 2443 int _bci_of_invoke; 2444 ciMethod* _callee; // the method that is called at the given bci 2445 Value _recv; 2446 ciKlass* _known_holder; 2447 Values* _obj_args; // arguments for type profiling 2448 ArgsNonNullState _nonnull_state; // Do we know whether some arguments are never null? 2449 bool _inlined; // Are we profiling a call that is inlined 2450 2451 public: 2452 ProfileCall(ciMethod* method, int bci, ciMethod* callee, Value recv, ciKlass* known_holder, Values* obj_args, bool inlined) 2453 : Instruction(voidType) 2454 , _method(method) 2455 , _bci_of_invoke(bci) 2456 , _callee(callee) 2457 , _recv(recv) 2458 , _known_holder(known_holder) 2459 , _obj_args(obj_args) 2460 , _inlined(inlined) 2461 { 2462 // The ProfileCall has side-effects and must occur precisely where located 2463 pin(); 2464 } 2465 2466 ciMethod* method() const { return _method; } 2467 int bci_of_invoke() const { return _bci_of_invoke; } 2468 ciMethod* callee() const { return _callee; } 2469 Value recv() const { return _recv; } 2470 ciKlass* known_holder() const { return _known_holder; } 2471 int nb_profiled_args() const { return _obj_args == NULL ? 0 : _obj_args->length(); } 2472 Value profiled_arg_at(int i) const { return _obj_args->at(i); } 2473 bool arg_needs_null_check(int i) const { 2474 return _nonnull_state.arg_needs_null_check(i); 2475 } 2476 bool inlined() const { return _inlined; } 2477 2478 void set_arg_needs_null_check(int i, bool check) { 2479 _nonnull_state.set_arg_needs_null_check(i, check); 2480 } 2481 2482 virtual void input_values_do(ValueVisitor* f) { 2483 if (_recv != NULL) { 2484 f->visit(&_recv); 2485 } 2486 for (int i = 0; i < nb_profiled_args(); i++) { 2487 f->visit(_obj_args->adr_at(i)); 2488 } 2489 } 2490 }; 2491 2492 LEAF(ProfileReturnType, Instruction) 2493 private: 2494 ciMethod* _method; 2495 ciMethod* _callee; 2496 int _bci_of_invoke; 2497 Value _ret; 2498 2499 public: 2500 ProfileReturnType(ciMethod* method, int bci, ciMethod* callee, Value ret) 2501 : Instruction(voidType) 2502 , _method(method) 2503 , _callee(callee) 2504 , _bci_of_invoke(bci) 2505 , _ret(ret) 2506 { 2507 set_needs_null_check(true); 2508 // The ProfileType has side-effects and must occur precisely where located 2509 pin(); 2510 } 2511 2512 ciMethod* method() const { return _method; } 2513 ciMethod* callee() const { return _callee; } 2514 int bci_of_invoke() const { return _bci_of_invoke; } 2515 Value ret() const { return _ret; } 2516 2517 virtual void input_values_do(ValueVisitor* f) { 2518 if (_ret != NULL) { 2519 f->visit(&_ret); 2520 } 2521 } 2522 }; 2523 2524 // Call some C runtime function that doesn't safepoint, 2525 // optionally passing the current thread as the first argument. 2526 LEAF(RuntimeCall, Instruction) 2527 private: 2528 const char* _entry_name; 2529 address _entry; 2530 Values* _args; 2531 bool _pass_thread; // Pass the JavaThread* as an implicit first argument 2532 2533 public: 2534 RuntimeCall(ValueType* type, const char* entry_name, address entry, Values* args, bool pass_thread = true) 2535 : Instruction(type) 2536 , _entry(entry) 2537 , _args(args) 2538 , _entry_name(entry_name) 2539 , _pass_thread(pass_thread) { 2540 ASSERT_VALUES 2541 pin(); 2542 } 2543 2544 const char* entry_name() const { return _entry_name; } 2545 address entry() const { return _entry; } 2546 int number_of_arguments() const { return _args->length(); } 2547 Value argument_at(int i) const { return _args->at(i); } 2548 bool pass_thread() const { return _pass_thread; } 2549 2550 virtual void input_values_do(ValueVisitor* f) { 2551 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 2552 } 2553 }; 2554 2555 // Use to trip invocation counter of an inlined method 2556 2557 LEAF(ProfileInvoke, Instruction) 2558 private: 2559 ciMethod* _inlinee; 2560 ValueStack* _state; 2561 2562 public: 2563 ProfileInvoke(ciMethod* inlinee, ValueStack* state) 2564 : Instruction(voidType) 2565 , _inlinee(inlinee) 2566 , _state(state) 2567 { 2568 // The ProfileInvoke has side-effects and must occur precisely where located QQQ??? 2569 pin(); 2570 } 2571 2572 ciMethod* inlinee() { return _inlinee; } 2573 ValueStack* state() { return _state; } 2574 virtual void input_values_do(ValueVisitor*) {} 2575 virtual void state_values_do(ValueVisitor*); 2576 }; 2577 2578 LEAF(MemBar, Instruction) 2579 private: 2580 LIR_Code _code; 2581 2582 public: 2583 MemBar(LIR_Code code) 2584 : Instruction(voidType) 2585 , _code(code) 2586 { 2587 pin(); 2588 } 2589 2590 LIR_Code code() { return _code; } 2591 2592 virtual void input_values_do(ValueVisitor*) {} 2593 }; 2594 2595 class BlockPair: public CompilationResourceObj { 2596 private: 2597 BlockBegin* _from; 2598 BlockBegin* _to; 2599 public: 2600 BlockPair(BlockBegin* from, BlockBegin* to): _from(from), _to(to) {} 2601 BlockBegin* from() const { return _from; } 2602 BlockBegin* to() const { return _to; } 2603 bool is_same(BlockBegin* from, BlockBegin* to) const { return _from == from && _to == to; } 2604 bool is_same(BlockPair* p) const { return _from == p->from() && _to == p->to(); } 2605 void set_to(BlockBegin* b) { _to = b; } 2606 void set_from(BlockBegin* b) { _from = b; } 2607 }; 2608 2609 2610 define_array(BlockPairArray, BlockPair*) 2611 define_stack(BlockPairList, BlockPairArray) 2612 2613 2614 inline int BlockBegin::number_of_sux() const { assert(_end == NULL || _end->number_of_sux() == _successors.length(), "mismatch"); return _successors.length(); } 2615 inline BlockBegin* BlockBegin::sux_at(int i) const { assert(_end == NULL || _end->sux_at(i) == _successors.at(i), "mismatch"); return _successors.at(i); } 2616 inline void BlockBegin::add_successor(BlockBegin* sux) { assert(_end == NULL, "Would create mismatch with successors of BlockEnd"); _successors.append(sux); } 2617 2618 #undef ASSERT_VALUES 2619 2620 #endif // SHARE_VM_C1_C1_INSTRUCTION_HPP