1 /* 2 * Copyright (c) 1999, 2013, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_VM_C1_C1_INSTRUCTION_HPP 26 #define SHARE_VM_C1_C1_INSTRUCTION_HPP 27 28 #include "c1/c1_Compilation.hpp" 29 #include "c1/c1_LIR.hpp" 30 #include "c1/c1_ValueType.hpp" 31 #include "ci/ciField.hpp" 32 33 // Predefined classes 34 class ciField; 35 class ValueStack; 36 class InstructionPrinter; 37 class IRScope; 38 class LIR_OprDesc; 39 typedef LIR_OprDesc* LIR_Opr; 40 41 42 // Instruction class hierarchy 43 // 44 // All leaf classes in the class hierarchy are concrete classes 45 // (i.e., are instantiated). All other classes are abstract and 46 // serve factoring. 47 48 class Instruction; 49 class Phi; 50 class Local; 51 class Constant; 52 class AccessField; 53 class LoadField; 54 class StoreField; 55 class AccessArray; 56 class ArrayLength; 57 class AccessIndexed; 58 class LoadIndexed; 59 class StoreIndexed; 60 class NegateOp; 61 class Op2; 62 class ArithmeticOp; 63 class ShiftOp; 64 class LogicOp; 65 class CompareOp; 66 class IfOp; 67 class Convert; 68 class NullCheck; 69 class TypeCast; 70 class OsrEntry; 71 class ExceptionObject; 72 class StateSplit; 73 class Invoke; 74 class NewInstance; 75 class NewArray; 76 class NewTypeArray; 77 class NewObjectArray; 78 class NewMultiArray; 79 class TypeCheck; 80 class CheckCast; 81 class InstanceOf; 82 class AccessMonitor; 83 class MonitorEnter; 84 class MonitorExit; 85 class Intrinsic; 86 class BlockBegin; 87 class BlockEnd; 88 class Goto; 89 class If; 90 class IfInstanceOf; 91 class Switch; 92 class TableSwitch; 93 class LookupSwitch; 94 class Return; 95 class Throw; 96 class Base; 97 class RoundFP; 98 class UnsafeOp; 99 class UnsafeRawOp; 100 class UnsafeGetRaw; 101 class UnsafePutRaw; 102 class UnsafeObjectOp; 103 class UnsafeGetObject; 104 class UnsafePutObject; 105 class UnsafeGetAndSetObject; 106 class UnsafePrefetch; 107 class UnsafePrefetchRead; 108 class UnsafePrefetchWrite; 109 class ProfileCall; 110 class ProfileInvoke; 111 class RuntimeCall; 112 class MemBar; 113 class RangeCheckPredicate; 114 #ifdef ASSERT 115 class Assert; 116 #endif 117 118 // A Value is a reference to the instruction creating the value 119 typedef Instruction* Value; 120 define_array(ValueArray, Value) 121 define_stack(Values, ValueArray) 122 123 define_array(ValueStackArray, ValueStack*) 124 define_stack(ValueStackStack, ValueStackArray) 125 126 // BlockClosure is the base class for block traversal/iteration. 127 128 class BlockClosure: public CompilationResourceObj { 129 public: 130 virtual void block_do(BlockBegin* block) = 0; 131 }; 132 133 134 // A simple closure class for visiting the values of an Instruction 135 class ValueVisitor: public StackObj { 136 public: 137 virtual void visit(Value* v) = 0; 138 }; 139 140 141 // Some array and list classes 142 define_array(BlockBeginArray, BlockBegin*) 143 define_stack(_BlockList, BlockBeginArray) 144 145 class BlockList: public _BlockList { 146 public: 147 BlockList(): _BlockList() {} 148 BlockList(const int size): _BlockList(size) {} 149 BlockList(const int size, BlockBegin* init): _BlockList(size, init) {} 150 151 void iterate_forward(BlockClosure* closure); 152 void iterate_backward(BlockClosure* closure); 153 void blocks_do(void f(BlockBegin*)); 154 void values_do(ValueVisitor* f); 155 void print(bool cfg_only = false, bool live_only = false) PRODUCT_RETURN; 156 }; 157 158 159 // InstructionVisitors provide type-based dispatch for instructions. 160 // For each concrete Instruction class X, a virtual function do_X is 161 // provided. Functionality that needs to be implemented for all classes 162 // (e.g., printing, code generation) is factored out into a specialised 163 // visitor instead of added to the Instruction classes itself. 164 165 class InstructionVisitor: public StackObj { 166 public: 167 virtual void do_Phi (Phi* x) = 0; 168 virtual void do_Local (Local* x) = 0; 169 virtual void do_Constant (Constant* x) = 0; 170 virtual void do_LoadField (LoadField* x) = 0; 171 virtual void do_StoreField (StoreField* x) = 0; 172 virtual void do_ArrayLength (ArrayLength* x) = 0; 173 virtual void do_LoadIndexed (LoadIndexed* x) = 0; 174 virtual void do_StoreIndexed (StoreIndexed* x) = 0; 175 virtual void do_NegateOp (NegateOp* x) = 0; 176 virtual void do_ArithmeticOp (ArithmeticOp* x) = 0; 177 virtual void do_ShiftOp (ShiftOp* x) = 0; 178 virtual void do_LogicOp (LogicOp* x) = 0; 179 virtual void do_CompareOp (CompareOp* x) = 0; 180 virtual void do_IfOp (IfOp* x) = 0; 181 virtual void do_Convert (Convert* x) = 0; 182 virtual void do_NullCheck (NullCheck* x) = 0; 183 virtual void do_TypeCast (TypeCast* x) = 0; 184 virtual void do_Invoke (Invoke* x) = 0; 185 virtual void do_NewInstance (NewInstance* x) = 0; 186 virtual void do_NewTypeArray (NewTypeArray* x) = 0; 187 virtual void do_NewObjectArray (NewObjectArray* x) = 0; 188 virtual void do_NewMultiArray (NewMultiArray* x) = 0; 189 virtual void do_CheckCast (CheckCast* x) = 0; 190 virtual void do_InstanceOf (InstanceOf* x) = 0; 191 virtual void do_MonitorEnter (MonitorEnter* x) = 0; 192 virtual void do_MonitorExit (MonitorExit* x) = 0; 193 virtual void do_Intrinsic (Intrinsic* x) = 0; 194 virtual void do_BlockBegin (BlockBegin* x) = 0; 195 virtual void do_Goto (Goto* x) = 0; 196 virtual void do_If (If* x) = 0; 197 virtual void do_IfInstanceOf (IfInstanceOf* x) = 0; 198 virtual void do_TableSwitch (TableSwitch* x) = 0; 199 virtual void do_LookupSwitch (LookupSwitch* x) = 0; 200 virtual void do_Return (Return* x) = 0; 201 virtual void do_Throw (Throw* x) = 0; 202 virtual void do_Base (Base* x) = 0; 203 virtual void do_OsrEntry (OsrEntry* x) = 0; 204 virtual void do_ExceptionObject(ExceptionObject* x) = 0; 205 virtual void do_RoundFP (RoundFP* x) = 0; 206 virtual void do_UnsafeGetRaw (UnsafeGetRaw* x) = 0; 207 virtual void do_UnsafePutRaw (UnsafePutRaw* x) = 0; 208 virtual void do_UnsafeGetObject(UnsafeGetObject* x) = 0; 209 virtual void do_UnsafePutObject(UnsafePutObject* x) = 0; 210 virtual void do_UnsafeGetAndSetObject(UnsafeGetAndSetObject* x) = 0; 211 virtual void do_UnsafePrefetchRead (UnsafePrefetchRead* x) = 0; 212 virtual void do_UnsafePrefetchWrite(UnsafePrefetchWrite* x) = 0; 213 virtual void do_ProfileCall (ProfileCall* x) = 0; 214 virtual void do_ProfileInvoke (ProfileInvoke* x) = 0; 215 virtual void do_RuntimeCall (RuntimeCall* x) = 0; 216 virtual void do_MemBar (MemBar* x) = 0; 217 virtual void do_RangeCheckPredicate(RangeCheckPredicate* x) = 0; 218 #ifdef ASSERT 219 virtual void do_Assert (Assert* x) = 0; 220 #endif 221 }; 222 223 224 // Hashing support 225 // 226 // Note: This hash functions affect the performance 227 // of ValueMap - make changes carefully! 228 229 #define HASH1(x1 ) ((intx)(x1)) 230 #define HASH2(x1, x2 ) ((HASH1(x1 ) << 7) ^ HASH1(x2)) 231 #define HASH3(x1, x2, x3 ) ((HASH2(x1, x2 ) << 7) ^ HASH1(x3)) 232 #define HASH4(x1, x2, x3, x4) ((HASH3(x1, x2, x3) << 7) ^ HASH1(x4)) 233 234 235 // The following macros are used to implement instruction-specific hashing. 236 // By default, each instruction implements hash() and is_equal(Value), used 237 // for value numbering/common subexpression elimination. The default imple- 238 // mentation disables value numbering. Each instruction which can be value- 239 // numbered, should define corresponding hash() and is_equal(Value) functions 240 // via the macros below. The f arguments specify all the values/op codes, etc. 241 // that need to be identical for two instructions to be identical. 242 // 243 // Note: The default implementation of hash() returns 0 in order to indicate 244 // that the instruction should not be considered for value numbering. 245 // The currently used hash functions do not guarantee that never a 0 246 // is produced. While this is still correct, it may be a performance 247 // bug (no value numbering for that node). However, this situation is 248 // so unlikely, that we are not going to handle it specially. 249 250 #define HASHING1(class_name, enabled, f1) \ 251 virtual intx hash() const { \ 252 return (enabled) ? HASH2(name(), f1) : 0; \ 253 } \ 254 virtual bool is_equal(Value v) const { \ 255 if (!(enabled) ) return false; \ 256 class_name* _v = v->as_##class_name(); \ 257 if (_v == NULL ) return false; \ 258 if (f1 != _v->f1) return false; \ 259 return true; \ 260 } \ 261 262 263 #define HASHING2(class_name, enabled, f1, f2) \ 264 virtual intx hash() const { \ 265 return (enabled) ? HASH3(name(), f1, f2) : 0; \ 266 } \ 267 virtual bool is_equal(Value v) const { \ 268 if (!(enabled) ) return false; \ 269 class_name* _v = v->as_##class_name(); \ 270 if (_v == NULL ) return false; \ 271 if (f1 != _v->f1) return false; \ 272 if (f2 != _v->f2) return false; \ 273 return true; \ 274 } \ 275 276 277 #define HASHING3(class_name, enabled, f1, f2, f3) \ 278 virtual intx hash() const { \ 279 return (enabled) ? HASH4(name(), f1, f2, f3) : 0; \ 280 } \ 281 virtual bool is_equal(Value v) const { \ 282 if (!(enabled) ) return false; \ 283 class_name* _v = v->as_##class_name(); \ 284 if (_v == NULL ) return false; \ 285 if (f1 != _v->f1) return false; \ 286 if (f2 != _v->f2) return false; \ 287 if (f3 != _v->f3) return false; \ 288 return true; \ 289 } \ 290 291 292 // The mother of all instructions... 293 294 class Instruction: public CompilationResourceObj { 295 private: 296 int _id; // the unique instruction id 297 #ifndef PRODUCT 298 int _printable_bci; // the bci of the instruction for printing 299 #endif 300 int _use_count; // the number of instructions refering to this value (w/o prev/next); only roots can have use count = 0 or > 1 301 int _pin_state; // set of PinReason describing the reason for pinning 302 ValueType* _type; // the instruction value type 303 Instruction* _next; // the next instruction if any (NULL for BlockEnd instructions) 304 Instruction* _subst; // the substitution instruction if any 305 LIR_Opr _operand; // LIR specific information 306 unsigned int _flags; // Flag bits 307 308 ValueStack* _state_before; // Copy of state with input operands still on stack (or NULL) 309 ValueStack* _exception_state; // Copy of state for exception handling 310 XHandlers* _exception_handlers; // Flat list of exception handlers covering this instruction 311 312 friend class UseCountComputer; 313 friend class BlockBegin; 314 315 void update_exception_state(ValueStack* state); 316 317 protected: 318 BlockBegin* _block; // Block that contains this instruction 319 320 void set_type(ValueType* type) { 321 assert(type != NULL, "type must exist"); 322 _type = type; 323 } 324 325 class ArgsNonNullState { 326 private: 327 int _nonnull_state; // mask identifying which args are nonnull 328 public: 329 ArgsNonNullState() 330 : _nonnull_state(AllBits) {} 331 332 bool arg_needs_null_check(int i) const { 333 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) { 334 return is_set_nth_bit(_nonnull_state, i); 335 } 336 return true; 337 } 338 339 void set_arg_needs_null_check(int i, bool check) { 340 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) { 341 if (check) { 342 _nonnull_state |= nth_bit(i); 343 } else { 344 _nonnull_state &= ~(nth_bit(i)); 345 } 346 } 347 } 348 }; 349 350 public: 351 void* operator new(size_t size) throw() { 352 Compilation* c = Compilation::current(); 353 void* res = c->arena()->Amalloc(size); 354 ((Instruction*)res)->_id = c->get_next_id(); 355 return res; 356 } 357 358 static const int no_bci = -99; 359 360 enum InstructionFlag { 361 NeedsNullCheckFlag = 0, 362 CanTrapFlag, 363 DirectCompareFlag, 364 IsEliminatedFlag, 365 IsSafepointFlag, 366 IsStaticFlag, 367 IsStrictfpFlag, 368 NeedsStoreCheckFlag, 369 NeedsWriteBarrierFlag, 370 PreservesStateFlag, 371 TargetIsFinalFlag, 372 TargetIsLoadedFlag, 373 TargetIsStrictfpFlag, 374 UnorderedIsTrueFlag, 375 NeedsPatchingFlag, 376 ThrowIncompatibleClassChangeErrorFlag, 377 ProfileMDOFlag, 378 IsLinkedInBlockFlag, 379 NeedsRangeCheckFlag, 380 InWorkListFlag, 381 DeoptimizeOnException, 382 InstructionLastFlag 383 }; 384 385 public: 386 bool check_flag(InstructionFlag id) const { return (_flags & (1 << id)) != 0; } 387 void set_flag(InstructionFlag id, bool f) { _flags = f ? (_flags | (1 << id)) : (_flags & ~(1 << id)); }; 388 389 // 'globally' used condition values 390 enum Condition { 391 eql, neq, lss, leq, gtr, geq, aeq, beq 392 }; 393 394 // Instructions may be pinned for many reasons and under certain conditions 395 // with enough knowledge it's possible to safely unpin them. 396 enum PinReason { 397 PinUnknown = 1 << 0 398 , PinExplicitNullCheck = 1 << 3 399 , PinStackForStateSplit= 1 << 12 400 , PinStateSplitConstructor= 1 << 13 401 , PinGlobalValueNumbering= 1 << 14 402 }; 403 404 static Condition mirror(Condition cond); 405 static Condition negate(Condition cond); 406 407 // initialization 408 static int number_of_instructions() { 409 return Compilation::current()->number_of_instructions(); 410 } 411 412 // creation 413 Instruction(ValueType* type, ValueStack* state_before = NULL, bool type_is_constant = false) 414 : _use_count(0) 415 #ifndef PRODUCT 416 , _printable_bci(-99) 417 #endif 418 , _pin_state(0) 419 , _type(type) 420 , _next(NULL) 421 , _block(NULL) 422 , _subst(NULL) 423 , _flags(0) 424 , _operand(LIR_OprFact::illegalOpr) 425 , _state_before(state_before) 426 , _exception_handlers(NULL) 427 { 428 check_state(state_before); 429 assert(type != NULL && (!type->is_constant() || type_is_constant), "type must exist"); 430 update_exception_state(_state_before); 431 } 432 433 // accessors 434 int id() const { return _id; } 435 #ifndef PRODUCT 436 bool has_printable_bci() const { return _printable_bci != -99; } 437 int printable_bci() const { assert(has_printable_bci(), "_printable_bci should have been set"); return _printable_bci; } 438 void set_printable_bci(int bci) { _printable_bci = bci; } 439 #endif 440 int dominator_depth(); 441 int use_count() const { return _use_count; } 442 int pin_state() const { return _pin_state; } 443 bool is_pinned() const { return _pin_state != 0 || PinAllInstructions; } 444 ValueType* type() const { return _type; } 445 BlockBegin *block() const { return _block; } 446 Instruction* prev(); // use carefully, expensive operation 447 Instruction* next() const { return _next; } 448 bool has_subst() const { return _subst != NULL; } 449 Instruction* subst() { return _subst == NULL ? this : _subst->subst(); } 450 LIR_Opr operand() const { return _operand; } 451 452 void set_needs_null_check(bool f) { set_flag(NeedsNullCheckFlag, f); } 453 bool needs_null_check() const { return check_flag(NeedsNullCheckFlag); } 454 bool is_linked() const { return check_flag(IsLinkedInBlockFlag); } 455 bool can_be_linked() { return as_Local() == NULL && as_Phi() == NULL; } 456 457 bool has_uses() const { return use_count() > 0; } 458 ValueStack* state_before() const { return _state_before; } 459 ValueStack* exception_state() const { return _exception_state; } 460 virtual bool needs_exception_state() const { return true; } 461 XHandlers* exception_handlers() const { return _exception_handlers; } 462 463 // manipulation 464 void pin(PinReason reason) { _pin_state |= reason; } 465 void pin() { _pin_state |= PinUnknown; } 466 // DANGEROUS: only used by EliminateStores 467 void unpin(PinReason reason) { assert((reason & PinUnknown) == 0, "can't unpin unknown state"); _pin_state &= ~reason; } 468 469 Instruction* set_next(Instruction* next) { 470 assert(next->has_printable_bci(), "_printable_bci should have been set"); 471 assert(next != NULL, "must not be NULL"); 472 assert(as_BlockEnd() == NULL, "BlockEnd instructions must have no next"); 473 assert(next->can_be_linked(), "shouldn't link these instructions into list"); 474 475 BlockBegin *block = this->block(); 476 next->_block = block; 477 478 next->set_flag(Instruction::IsLinkedInBlockFlag, true); 479 _next = next; 480 return next; 481 } 482 483 Instruction* set_next(Instruction* next, int bci) { 484 #ifndef PRODUCT 485 next->set_printable_bci(bci); 486 #endif 487 return set_next(next); 488 } 489 490 // when blocks are merged 491 void fixup_block_pointers() { 492 Instruction *cur = next()->next(); // next()'s block is set in set_next 493 while (cur && cur->_block != block()) { 494 cur->_block = block(); 495 cur = cur->next(); 496 } 497 } 498 499 Instruction *insert_after(Instruction *i) { 500 Instruction* n = _next; 501 set_next(i); 502 i->set_next(n); 503 return _next; 504 } 505 506 Instruction *insert_after_same_bci(Instruction *i) { 507 #ifndef PRODUCT 508 i->set_printable_bci(printable_bci()); 509 #endif 510 return insert_after(i); 511 } 512 513 void set_subst(Instruction* subst) { 514 assert(subst == NULL || 515 type()->base() == subst->type()->base() || 516 subst->type()->base() == illegalType, "type can't change"); 517 _subst = subst; 518 } 519 void set_exception_handlers(XHandlers *xhandlers) { _exception_handlers = xhandlers; } 520 void set_exception_state(ValueStack* s) { check_state(s); _exception_state = s; } 521 void set_state_before(ValueStack* s) { check_state(s); _state_before = s; } 522 523 // machine-specifics 524 void set_operand(LIR_Opr operand) { assert(operand != LIR_OprFact::illegalOpr, "operand must exist"); _operand = operand; } 525 void clear_operand() { _operand = LIR_OprFact::illegalOpr; } 526 527 // generic 528 virtual Instruction* as_Instruction() { return this; } // to satisfy HASHING1 macro 529 virtual Phi* as_Phi() { return NULL; } 530 virtual Local* as_Local() { return NULL; } 531 virtual Constant* as_Constant() { return NULL; } 532 virtual AccessField* as_AccessField() { return NULL; } 533 virtual LoadField* as_LoadField() { return NULL; } 534 virtual StoreField* as_StoreField() { return NULL; } 535 virtual AccessArray* as_AccessArray() { return NULL; } 536 virtual ArrayLength* as_ArrayLength() { return NULL; } 537 virtual AccessIndexed* as_AccessIndexed() { return NULL; } 538 virtual LoadIndexed* as_LoadIndexed() { return NULL; } 539 virtual StoreIndexed* as_StoreIndexed() { return NULL; } 540 virtual NegateOp* as_NegateOp() { return NULL; } 541 virtual Op2* as_Op2() { return NULL; } 542 virtual ArithmeticOp* as_ArithmeticOp() { return NULL; } 543 virtual ShiftOp* as_ShiftOp() { return NULL; } 544 virtual LogicOp* as_LogicOp() { return NULL; } 545 virtual CompareOp* as_CompareOp() { return NULL; } 546 virtual IfOp* as_IfOp() { return NULL; } 547 virtual Convert* as_Convert() { return NULL; } 548 virtual NullCheck* as_NullCheck() { return NULL; } 549 virtual OsrEntry* as_OsrEntry() { return NULL; } 550 virtual StateSplit* as_StateSplit() { return NULL; } 551 virtual Invoke* as_Invoke() { return NULL; } 552 virtual NewInstance* as_NewInstance() { return NULL; } 553 virtual NewArray* as_NewArray() { return NULL; } 554 virtual NewTypeArray* as_NewTypeArray() { return NULL; } 555 virtual NewObjectArray* as_NewObjectArray() { return NULL; } 556 virtual NewMultiArray* as_NewMultiArray() { return NULL; } 557 virtual TypeCheck* as_TypeCheck() { return NULL; } 558 virtual CheckCast* as_CheckCast() { return NULL; } 559 virtual InstanceOf* as_InstanceOf() { return NULL; } 560 virtual TypeCast* as_TypeCast() { return NULL; } 561 virtual AccessMonitor* as_AccessMonitor() { return NULL; } 562 virtual MonitorEnter* as_MonitorEnter() { return NULL; } 563 virtual MonitorExit* as_MonitorExit() { return NULL; } 564 virtual Intrinsic* as_Intrinsic() { return NULL; } 565 virtual BlockBegin* as_BlockBegin() { return NULL; } 566 virtual BlockEnd* as_BlockEnd() { return NULL; } 567 virtual Goto* as_Goto() { return NULL; } 568 virtual If* as_If() { return NULL; } 569 virtual IfInstanceOf* as_IfInstanceOf() { return NULL; } 570 virtual TableSwitch* as_TableSwitch() { return NULL; } 571 virtual LookupSwitch* as_LookupSwitch() { return NULL; } 572 virtual Return* as_Return() { return NULL; } 573 virtual Throw* as_Throw() { return NULL; } 574 virtual Base* as_Base() { return NULL; } 575 virtual RoundFP* as_RoundFP() { return NULL; } 576 virtual ExceptionObject* as_ExceptionObject() { return NULL; } 577 virtual UnsafeOp* as_UnsafeOp() { return NULL; } 578 virtual ProfileInvoke* as_ProfileInvoke() { return NULL; } 579 virtual RangeCheckPredicate* as_RangeCheckPredicate() { return NULL; } 580 581 #ifdef ASSERT 582 virtual Assert* as_Assert() { return NULL; } 583 #endif 584 585 virtual void visit(InstructionVisitor* v) = 0; 586 587 virtual bool can_trap() const { return false; } 588 589 virtual void input_values_do(ValueVisitor* f) = 0; 590 virtual void state_values_do(ValueVisitor* f); 591 virtual void other_values_do(ValueVisitor* f) { /* usually no other - override on demand */ } 592 void values_do(ValueVisitor* f) { input_values_do(f); state_values_do(f); other_values_do(f); } 593 594 virtual ciType* exact_type() const; 595 virtual ciType* declared_type() const { return NULL; } 596 597 // hashing 598 virtual const char* name() const = 0; 599 HASHING1(Instruction, false, id()) // hashing disabled by default 600 601 // debugging 602 static void check_state(ValueStack* state) PRODUCT_RETURN; 603 void print() PRODUCT_RETURN; 604 void print_line() PRODUCT_RETURN; 605 void print(InstructionPrinter& ip) PRODUCT_RETURN; 606 }; 607 608 609 // The following macros are used to define base (i.e., non-leaf) 610 // and leaf instruction classes. They define class-name related 611 // generic functionality in one place. 612 613 #define BASE(class_name, super_class_name) \ 614 class class_name: public super_class_name { \ 615 public: \ 616 virtual class_name* as_##class_name() { return this; } \ 617 618 619 #define LEAF(class_name, super_class_name) \ 620 BASE(class_name, super_class_name) \ 621 public: \ 622 virtual const char* name() const { return #class_name; } \ 623 virtual void visit(InstructionVisitor* v) { v->do_##class_name(this); } \ 624 625 626 // Debugging support 627 628 629 #ifdef ASSERT 630 class AssertValues: public ValueVisitor { 631 void visit(Value* x) { assert((*x) != NULL, "value must exist"); } 632 }; 633 #define ASSERT_VALUES { AssertValues assert_value; values_do(&assert_value); } 634 #else 635 #define ASSERT_VALUES 636 #endif // ASSERT 637 638 639 // A Phi is a phi function in the sense of SSA form. It stands for 640 // the value of a local variable at the beginning of a join block. 641 // A Phi consists of n operands, one for every incoming branch. 642 643 LEAF(Phi, Instruction) 644 private: 645 int _pf_flags; // the flags of the phi function 646 int _index; // to value on operand stack (index < 0) or to local 647 public: 648 // creation 649 Phi(ValueType* type, BlockBegin* b, int index) 650 : Instruction(type->base()) 651 , _pf_flags(0) 652 , _index(index) 653 { 654 _block = b; 655 NOT_PRODUCT(set_printable_bci(Value(b)->printable_bci())); 656 if (type->is_illegal()) { 657 make_illegal(); 658 } 659 } 660 661 // flags 662 enum Flag { 663 no_flag = 0, 664 visited = 1 << 0, 665 cannot_simplify = 1 << 1 666 }; 667 668 // accessors 669 bool is_local() const { return _index >= 0; } 670 bool is_on_stack() const { return !is_local(); } 671 int local_index() const { assert(is_local(), ""); return _index; } 672 int stack_index() const { assert(is_on_stack(), ""); return -(_index+1); } 673 674 Value operand_at(int i) const; 675 int operand_count() const; 676 677 void set(Flag f) { _pf_flags |= f; } 678 void clear(Flag f) { _pf_flags &= ~f; } 679 bool is_set(Flag f) const { return (_pf_flags & f) != 0; } 680 681 // Invalidates phis corresponding to merges of locals of two different types 682 // (these should never be referenced, otherwise the bytecodes are illegal) 683 void make_illegal() { 684 set(cannot_simplify); 685 set_type(illegalType); 686 } 687 688 bool is_illegal() const { 689 return type()->is_illegal(); 690 } 691 692 // generic 693 virtual void input_values_do(ValueVisitor* f) { 694 } 695 }; 696 697 698 // A local is a placeholder for an incoming argument to a function call. 699 LEAF(Local, Instruction) 700 private: 701 int _java_index; // the local index within the method to which the local belongs 702 ciType* _declared_type; 703 public: 704 // creation 705 Local(ciType* declared, ValueType* type, int index) 706 : Instruction(type) 707 , _java_index(index) 708 , _declared_type(declared) 709 { 710 NOT_PRODUCT(set_printable_bci(-1)); 711 } 712 713 // accessors 714 int java_index() const { return _java_index; } 715 716 virtual ciType* declared_type() const { return _declared_type; } 717 718 // generic 719 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 720 }; 721 722 723 LEAF(Constant, Instruction) 724 public: 725 // creation 726 Constant(ValueType* type): 727 Instruction(type, NULL, /*type_is_constant*/ true) 728 { 729 assert(type->is_constant(), "must be a constant"); 730 } 731 732 Constant(ValueType* type, ValueStack* state_before): 733 Instruction(type, state_before, /*type_is_constant*/ true) 734 { 735 assert(state_before != NULL, "only used for constants which need patching"); 736 assert(type->is_constant(), "must be a constant"); 737 // since it's patching it needs to be pinned 738 pin(); 739 } 740 741 // generic 742 virtual bool can_trap() const { return state_before() != NULL; } 743 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 744 745 virtual intx hash() const; 746 virtual bool is_equal(Value v) const; 747 748 virtual ciType* exact_type() const; 749 750 enum CompareResult { not_comparable = -1, cond_false, cond_true }; 751 752 virtual CompareResult compare(Instruction::Condition condition, Value right) const; 753 BlockBegin* compare(Instruction::Condition cond, Value right, 754 BlockBegin* true_sux, BlockBegin* false_sux) const { 755 switch (compare(cond, right)) { 756 case not_comparable: 757 return NULL; 758 case cond_false: 759 return false_sux; 760 case cond_true: 761 return true_sux; 762 default: 763 ShouldNotReachHere(); 764 return NULL; 765 } 766 } 767 }; 768 769 770 BASE(AccessField, Instruction) 771 private: 772 Value _obj; 773 int _offset; 774 ciField* _field; 775 NullCheck* _explicit_null_check; // For explicit null check elimination 776 777 public: 778 // creation 779 AccessField(Value obj, int offset, ciField* field, bool is_static, 780 ValueStack* state_before, bool needs_patching) 781 : Instruction(as_ValueType(field->type()->basic_type()), state_before) 782 , _obj(obj) 783 , _offset(offset) 784 , _field(field) 785 , _explicit_null_check(NULL) 786 { 787 set_needs_null_check(!is_static); 788 set_flag(IsStaticFlag, is_static); 789 set_flag(NeedsPatchingFlag, needs_patching); 790 ASSERT_VALUES 791 // pin of all instructions with memory access 792 pin(); 793 } 794 795 // accessors 796 Value obj() const { return _obj; } 797 int offset() const { return _offset; } 798 ciField* field() const { return _field; } 799 BasicType field_type() const { return _field->type()->basic_type(); } 800 bool is_static() const { return check_flag(IsStaticFlag); } 801 NullCheck* explicit_null_check() const { return _explicit_null_check; } 802 bool needs_patching() const { return check_flag(NeedsPatchingFlag); } 803 804 // Unresolved getstatic and putstatic can cause initialization. 805 // Technically it occurs at the Constant that materializes the base 806 // of the static fields but it's simpler to model it here. 807 bool is_init_point() const { return is_static() && (needs_patching() || !_field->holder()->is_initialized()); } 808 809 // manipulation 810 811 // Under certain circumstances, if a previous NullCheck instruction 812 // proved the target object non-null, we can eliminate the explicit 813 // null check and do an implicit one, simply specifying the debug 814 // information from the NullCheck. This field should only be consulted 815 // if needs_null_check() is true. 816 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 817 818 // generic 819 virtual bool can_trap() const { return needs_null_check() || needs_patching(); } 820 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 821 }; 822 823 824 LEAF(LoadField, AccessField) 825 public: 826 // creation 827 LoadField(Value obj, int offset, ciField* field, bool is_static, 828 ValueStack* state_before, bool needs_patching) 829 : AccessField(obj, offset, field, is_static, state_before, needs_patching) 830 {} 831 832 ciType* declared_type() const; 833 834 // generic 835 HASHING2(LoadField, !needs_patching() && !field()->is_volatile(), obj()->subst(), offset()) // cannot be eliminated if needs patching or if volatile 836 }; 837 838 839 LEAF(StoreField, AccessField) 840 private: 841 Value _value; 842 843 public: 844 // creation 845 StoreField(Value obj, int offset, ciField* field, Value value, bool is_static, 846 ValueStack* state_before, bool needs_patching) 847 : AccessField(obj, offset, field, is_static, state_before, needs_patching) 848 , _value(value) 849 { 850 set_flag(NeedsWriteBarrierFlag, as_ValueType(field_type())->is_object()); 851 ASSERT_VALUES 852 pin(); 853 } 854 855 // accessors 856 Value value() const { return _value; } 857 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); } 858 859 // generic 860 virtual void input_values_do(ValueVisitor* f) { AccessField::input_values_do(f); f->visit(&_value); } 861 }; 862 863 864 BASE(AccessArray, Instruction) 865 private: 866 Value _array; 867 868 public: 869 // creation 870 AccessArray(ValueType* type, Value array, ValueStack* state_before) 871 : Instruction(type, state_before) 872 , _array(array) 873 { 874 set_needs_null_check(true); 875 ASSERT_VALUES 876 pin(); // instruction with side effect (null exception or range check throwing) 877 } 878 879 Value array() const { return _array; } 880 881 // generic 882 virtual bool can_trap() const { return needs_null_check(); } 883 virtual void input_values_do(ValueVisitor* f) { f->visit(&_array); } 884 }; 885 886 887 LEAF(ArrayLength, AccessArray) 888 private: 889 NullCheck* _explicit_null_check; // For explicit null check elimination 890 891 public: 892 // creation 893 ArrayLength(Value array, ValueStack* state_before) 894 : AccessArray(intType, array, state_before) 895 , _explicit_null_check(NULL) {} 896 897 // accessors 898 NullCheck* explicit_null_check() const { return _explicit_null_check; } 899 900 // setters 901 // See LoadField::set_explicit_null_check for documentation 902 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 903 904 // generic 905 HASHING1(ArrayLength, true, array()->subst()) 906 }; 907 908 909 BASE(AccessIndexed, AccessArray) 910 private: 911 Value _index; 912 Value _length; 913 BasicType _elt_type; 914 915 public: 916 // creation 917 AccessIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before) 918 : AccessArray(as_ValueType(elt_type), array, state_before) 919 , _index(index) 920 , _length(length) 921 , _elt_type(elt_type) 922 { 923 set_flag(Instruction::NeedsRangeCheckFlag, true); 924 ASSERT_VALUES 925 } 926 927 // accessors 928 Value index() const { return _index; } 929 Value length() const { return _length; } 930 BasicType elt_type() const { return _elt_type; } 931 932 void clear_length() { _length = NULL; } 933 // perform elimination of range checks involving constants 934 bool compute_needs_range_check(); 935 936 // generic 937 virtual void input_values_do(ValueVisitor* f) { AccessArray::input_values_do(f); f->visit(&_index); if (_length != NULL) f->visit(&_length); } 938 }; 939 940 941 LEAF(LoadIndexed, AccessIndexed) 942 private: 943 NullCheck* _explicit_null_check; // For explicit null check elimination 944 945 public: 946 // creation 947 LoadIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before) 948 : AccessIndexed(array, index, length, elt_type, state_before) 949 , _explicit_null_check(NULL) {} 950 951 // accessors 952 NullCheck* explicit_null_check() const { return _explicit_null_check; } 953 954 // setters 955 // See LoadField::set_explicit_null_check for documentation 956 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 957 958 ciType* exact_type() const; 959 ciType* declared_type() const; 960 961 // generic 962 HASHING2(LoadIndexed, true, array()->subst(), index()->subst()) 963 }; 964 965 966 LEAF(StoreIndexed, AccessIndexed) 967 private: 968 Value _value; 969 970 ciMethod* _profiled_method; 971 int _profiled_bci; 972 public: 973 // creation 974 StoreIndexed(Value array, Value index, Value length, BasicType elt_type, Value value, ValueStack* state_before) 975 : AccessIndexed(array, index, length, elt_type, state_before) 976 , _value(value), _profiled_method(NULL), _profiled_bci(0) 977 { 978 set_flag(NeedsWriteBarrierFlag, (as_ValueType(elt_type)->is_object())); 979 set_flag(NeedsStoreCheckFlag, (as_ValueType(elt_type)->is_object())); 980 ASSERT_VALUES 981 pin(); 982 } 983 984 // accessors 985 Value value() const { return _value; } 986 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); } 987 bool needs_store_check() const { return check_flag(NeedsStoreCheckFlag); } 988 // Helpers for MethodData* profiling 989 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 990 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 991 void set_profiled_bci(int bci) { _profiled_bci = bci; } 992 bool should_profile() const { return check_flag(ProfileMDOFlag); } 993 ciMethod* profiled_method() const { return _profiled_method; } 994 int profiled_bci() const { return _profiled_bci; } 995 // generic 996 virtual void input_values_do(ValueVisitor* f) { AccessIndexed::input_values_do(f); f->visit(&_value); } 997 }; 998 999 1000 LEAF(NegateOp, Instruction) 1001 private: 1002 Value _x; 1003 1004 public: 1005 // creation 1006 NegateOp(Value x) : Instruction(x->type()->base()), _x(x) { 1007 ASSERT_VALUES 1008 } 1009 1010 // accessors 1011 Value x() const { return _x; } 1012 1013 // generic 1014 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); } 1015 }; 1016 1017 1018 BASE(Op2, Instruction) 1019 private: 1020 Bytecodes::Code _op; 1021 Value _x; 1022 Value _y; 1023 1024 public: 1025 // creation 1026 Op2(ValueType* type, Bytecodes::Code op, Value x, Value y, ValueStack* state_before = NULL) 1027 : Instruction(type, state_before) 1028 , _op(op) 1029 , _x(x) 1030 , _y(y) 1031 { 1032 ASSERT_VALUES 1033 } 1034 1035 // accessors 1036 Bytecodes::Code op() const { return _op; } 1037 Value x() const { return _x; } 1038 Value y() const { return _y; } 1039 1040 // manipulators 1041 void swap_operands() { 1042 assert(is_commutative(), "operation must be commutative"); 1043 Value t = _x; _x = _y; _y = t; 1044 } 1045 1046 // generic 1047 virtual bool is_commutative() const { return false; } 1048 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); } 1049 }; 1050 1051 1052 LEAF(ArithmeticOp, Op2) 1053 public: 1054 // creation 1055 ArithmeticOp(Bytecodes::Code op, Value x, Value y, bool is_strictfp, ValueStack* state_before) 1056 : Op2(x->type()->meet(y->type()), op, x, y, state_before) 1057 { 1058 set_flag(IsStrictfpFlag, is_strictfp); 1059 if (can_trap()) pin(); 1060 } 1061 1062 // accessors 1063 bool is_strictfp() const { return check_flag(IsStrictfpFlag); } 1064 1065 // generic 1066 virtual bool is_commutative() const; 1067 virtual bool can_trap() const; 1068 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1069 }; 1070 1071 1072 LEAF(ShiftOp, Op2) 1073 public: 1074 // creation 1075 ShiftOp(Bytecodes::Code op, Value x, Value s) : Op2(x->type()->base(), op, x, s) {} 1076 1077 // generic 1078 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1079 }; 1080 1081 1082 LEAF(LogicOp, Op2) 1083 public: 1084 // creation 1085 LogicOp(Bytecodes::Code op, Value x, Value y) : Op2(x->type()->meet(y->type()), op, x, y) {} 1086 1087 // generic 1088 virtual bool is_commutative() const; 1089 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1090 }; 1091 1092 1093 LEAF(CompareOp, Op2) 1094 public: 1095 // creation 1096 CompareOp(Bytecodes::Code op, Value x, Value y, ValueStack* state_before) 1097 : Op2(intType, op, x, y, state_before) 1098 {} 1099 1100 // generic 1101 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1102 }; 1103 1104 1105 LEAF(IfOp, Op2) 1106 private: 1107 Value _tval; 1108 Value _fval; 1109 1110 public: 1111 // creation 1112 IfOp(Value x, Condition cond, Value y, Value tval, Value fval) 1113 : Op2(tval->type()->meet(fval->type()), (Bytecodes::Code)cond, x, y) 1114 , _tval(tval) 1115 , _fval(fval) 1116 { 1117 ASSERT_VALUES 1118 assert(tval->type()->tag() == fval->type()->tag(), "types must match"); 1119 } 1120 1121 // accessors 1122 virtual bool is_commutative() const; 1123 Bytecodes::Code op() const { ShouldNotCallThis(); return Bytecodes::_illegal; } 1124 Condition cond() const { return (Condition)Op2::op(); } 1125 Value tval() const { return _tval; } 1126 Value fval() const { return _fval; } 1127 1128 // generic 1129 virtual void input_values_do(ValueVisitor* f) { Op2::input_values_do(f); f->visit(&_tval); f->visit(&_fval); } 1130 }; 1131 1132 1133 LEAF(Convert, Instruction) 1134 private: 1135 Bytecodes::Code _op; 1136 Value _value; 1137 1138 public: 1139 // creation 1140 Convert(Bytecodes::Code op, Value value, ValueType* to_type) : Instruction(to_type), _op(op), _value(value) { 1141 ASSERT_VALUES 1142 } 1143 1144 // accessors 1145 Bytecodes::Code op() const { return _op; } 1146 Value value() const { return _value; } 1147 1148 // generic 1149 virtual void input_values_do(ValueVisitor* f) { f->visit(&_value); } 1150 HASHING2(Convert, true, op(), value()->subst()) 1151 }; 1152 1153 1154 LEAF(NullCheck, Instruction) 1155 private: 1156 Value _obj; 1157 1158 public: 1159 // creation 1160 NullCheck(Value obj, ValueStack* state_before) 1161 : Instruction(obj->type()->base(), state_before) 1162 , _obj(obj) 1163 { 1164 ASSERT_VALUES 1165 set_can_trap(true); 1166 assert(_obj->type()->is_object(), "null check must be applied to objects only"); 1167 pin(Instruction::PinExplicitNullCheck); 1168 } 1169 1170 // accessors 1171 Value obj() const { return _obj; } 1172 1173 // setters 1174 void set_can_trap(bool can_trap) { set_flag(CanTrapFlag, can_trap); } 1175 1176 // generic 1177 virtual bool can_trap() const { return check_flag(CanTrapFlag); /* null-check elimination sets to false */ } 1178 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 1179 HASHING1(NullCheck, true, obj()->subst()) 1180 }; 1181 1182 1183 // This node is supposed to cast the type of another node to a more precise 1184 // declared type. 1185 LEAF(TypeCast, Instruction) 1186 private: 1187 ciType* _declared_type; 1188 Value _obj; 1189 1190 public: 1191 // The type of this node is the same type as the object type (and it might be constant). 1192 TypeCast(ciType* type, Value obj, ValueStack* state_before) 1193 : Instruction(obj->type(), state_before, obj->type()->is_constant()), 1194 _declared_type(type), 1195 _obj(obj) {} 1196 1197 // accessors 1198 ciType* declared_type() const { return _declared_type; } 1199 Value obj() const { return _obj; } 1200 1201 // generic 1202 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 1203 }; 1204 1205 1206 BASE(StateSplit, Instruction) 1207 private: 1208 ValueStack* _state; 1209 1210 protected: 1211 static void substitute(BlockList& list, BlockBegin* old_block, BlockBegin* new_block); 1212 1213 public: 1214 // creation 1215 StateSplit(ValueType* type, ValueStack* state_before = NULL) 1216 : Instruction(type, state_before) 1217 , _state(NULL) 1218 { 1219 pin(PinStateSplitConstructor); 1220 } 1221 1222 // accessors 1223 ValueStack* state() const { return _state; } 1224 IRScope* scope() const; // the state's scope 1225 1226 // manipulation 1227 void set_state(ValueStack* state) { assert(_state == NULL, "overwriting existing state"); check_state(state); _state = state; } 1228 1229 // generic 1230 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 1231 virtual void state_values_do(ValueVisitor* f); 1232 }; 1233 1234 1235 LEAF(Invoke, StateSplit) 1236 private: 1237 Bytecodes::Code _code; 1238 Value _recv; 1239 Values* _args; 1240 BasicTypeList* _signature; 1241 int _vtable_index; 1242 ciMethod* _target; 1243 1244 public: 1245 // creation 1246 Invoke(Bytecodes::Code code, ValueType* result_type, Value recv, Values* args, 1247 int vtable_index, ciMethod* target, ValueStack* state_before); 1248 1249 // accessors 1250 Bytecodes::Code code() const { return _code; } 1251 Value receiver() const { return _recv; } 1252 bool has_receiver() const { return receiver() != NULL; } 1253 int number_of_arguments() const { return _args->length(); } 1254 Value argument_at(int i) const { return _args->at(i); } 1255 int vtable_index() const { return _vtable_index; } 1256 BasicTypeList* signature() const { return _signature; } 1257 ciMethod* target() const { return _target; } 1258 1259 ciType* declared_type() const; 1260 1261 // Returns false if target is not loaded 1262 bool target_is_final() const { return check_flag(TargetIsFinalFlag); } 1263 bool target_is_loaded() const { return check_flag(TargetIsLoadedFlag); } 1264 // Returns false if target is not loaded 1265 bool target_is_strictfp() const { return check_flag(TargetIsStrictfpFlag); } 1266 1267 // JSR 292 support 1268 bool is_invokedynamic() const { return code() == Bytecodes::_invokedynamic; } 1269 bool is_method_handle_intrinsic() const { return target()->is_method_handle_intrinsic(); } 1270 1271 virtual bool needs_exception_state() const { return false; } 1272 1273 // generic 1274 virtual bool can_trap() const { return true; } 1275 virtual void input_values_do(ValueVisitor* f) { 1276 StateSplit::input_values_do(f); 1277 if (has_receiver()) f->visit(&_recv); 1278 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 1279 } 1280 virtual void state_values_do(ValueVisitor *f); 1281 }; 1282 1283 1284 LEAF(NewInstance, StateSplit) 1285 private: 1286 ciInstanceKlass* _klass; 1287 1288 public: 1289 // creation 1290 NewInstance(ciInstanceKlass* klass, ValueStack* state_before) 1291 : StateSplit(instanceType, state_before) 1292 , _klass(klass) 1293 {} 1294 1295 // accessors 1296 ciInstanceKlass* klass() const { return _klass; } 1297 1298 virtual bool needs_exception_state() const { return false; } 1299 1300 // generic 1301 virtual bool can_trap() const { return true; } 1302 ciType* exact_type() const; 1303 ciType* declared_type() const; 1304 }; 1305 1306 1307 BASE(NewArray, StateSplit) 1308 private: 1309 Value _length; 1310 1311 public: 1312 // creation 1313 NewArray(Value length, ValueStack* state_before) 1314 : StateSplit(objectType, state_before) 1315 , _length(length) 1316 { 1317 // Do not ASSERT_VALUES since length is NULL for NewMultiArray 1318 } 1319 1320 // accessors 1321 Value length() const { return _length; } 1322 1323 virtual bool needs_exception_state() const { return false; } 1324 1325 ciType* exact_type() const { return NULL; } 1326 ciType* declared_type() const; 1327 1328 // generic 1329 virtual bool can_trap() const { return true; } 1330 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_length); } 1331 }; 1332 1333 1334 LEAF(NewTypeArray, NewArray) 1335 private: 1336 BasicType _elt_type; 1337 1338 public: 1339 // creation 1340 NewTypeArray(Value length, BasicType elt_type, ValueStack* state_before) 1341 : NewArray(length, state_before) 1342 , _elt_type(elt_type) 1343 {} 1344 1345 // accessors 1346 BasicType elt_type() const { return _elt_type; } 1347 ciType* exact_type() const; 1348 }; 1349 1350 1351 LEAF(NewObjectArray, NewArray) 1352 private: 1353 ciKlass* _klass; 1354 1355 public: 1356 // creation 1357 NewObjectArray(ciKlass* klass, Value length, ValueStack* state_before) : NewArray(length, state_before), _klass(klass) {} 1358 1359 // accessors 1360 ciKlass* klass() const { return _klass; } 1361 ciType* exact_type() const; 1362 }; 1363 1364 1365 LEAF(NewMultiArray, NewArray) 1366 private: 1367 ciKlass* _klass; 1368 Values* _dims; 1369 1370 public: 1371 // creation 1372 NewMultiArray(ciKlass* klass, Values* dims, ValueStack* state_before) : NewArray(NULL, state_before), _klass(klass), _dims(dims) { 1373 ASSERT_VALUES 1374 } 1375 1376 // accessors 1377 ciKlass* klass() const { return _klass; } 1378 Values* dims() const { return _dims; } 1379 int rank() const { return dims()->length(); } 1380 1381 // generic 1382 virtual void input_values_do(ValueVisitor* f) { 1383 // NOTE: we do not call NewArray::input_values_do since "length" 1384 // is meaningless for a multi-dimensional array; passing the 1385 // zeroth element down to NewArray as its length is a bad idea 1386 // since there will be a copy in the "dims" array which doesn't 1387 // get updated, and the value must not be traversed twice. Was bug 1388 // - kbr 4/10/2001 1389 StateSplit::input_values_do(f); 1390 for (int i = 0; i < _dims->length(); i++) f->visit(_dims->adr_at(i)); 1391 } 1392 }; 1393 1394 1395 BASE(TypeCheck, StateSplit) 1396 private: 1397 ciKlass* _klass; 1398 Value _obj; 1399 1400 ciMethod* _profiled_method; 1401 int _profiled_bci; 1402 1403 public: 1404 // creation 1405 TypeCheck(ciKlass* klass, Value obj, ValueType* type, ValueStack* state_before) 1406 : StateSplit(type, state_before), _klass(klass), _obj(obj), 1407 _profiled_method(NULL), _profiled_bci(0) { 1408 ASSERT_VALUES 1409 set_direct_compare(false); 1410 } 1411 1412 // accessors 1413 ciKlass* klass() const { return _klass; } 1414 Value obj() const { return _obj; } 1415 bool is_loaded() const { return klass() != NULL; } 1416 bool direct_compare() const { return check_flag(DirectCompareFlag); } 1417 1418 // manipulation 1419 void set_direct_compare(bool flag) { set_flag(DirectCompareFlag, flag); } 1420 1421 // generic 1422 virtual bool can_trap() const { return true; } 1423 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } 1424 1425 // Helpers for MethodData* profiling 1426 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1427 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1428 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1429 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1430 ciMethod* profiled_method() const { return _profiled_method; } 1431 int profiled_bci() const { return _profiled_bci; } 1432 }; 1433 1434 1435 LEAF(CheckCast, TypeCheck) 1436 public: 1437 // creation 1438 CheckCast(ciKlass* klass, Value obj, ValueStack* state_before) 1439 : TypeCheck(klass, obj, objectType, state_before) {} 1440 1441 void set_incompatible_class_change_check() { 1442 set_flag(ThrowIncompatibleClassChangeErrorFlag, true); 1443 } 1444 bool is_incompatible_class_change_check() const { 1445 return check_flag(ThrowIncompatibleClassChangeErrorFlag); 1446 } 1447 1448 ciType* declared_type() const; 1449 }; 1450 1451 1452 LEAF(InstanceOf, TypeCheck) 1453 public: 1454 // creation 1455 InstanceOf(ciKlass* klass, Value obj, ValueStack* state_before) : TypeCheck(klass, obj, intType, state_before) {} 1456 1457 virtual bool needs_exception_state() const { return false; } 1458 }; 1459 1460 1461 BASE(AccessMonitor, StateSplit) 1462 private: 1463 Value _obj; 1464 int _monitor_no; 1465 1466 public: 1467 // creation 1468 AccessMonitor(Value obj, int monitor_no, ValueStack* state_before = NULL) 1469 : StateSplit(illegalType, state_before) 1470 , _obj(obj) 1471 , _monitor_no(monitor_no) 1472 { 1473 set_needs_null_check(true); 1474 ASSERT_VALUES 1475 } 1476 1477 // accessors 1478 Value obj() const { return _obj; } 1479 int monitor_no() const { return _monitor_no; } 1480 1481 // generic 1482 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } 1483 }; 1484 1485 1486 LEAF(MonitorEnter, AccessMonitor) 1487 public: 1488 // creation 1489 MonitorEnter(Value obj, int monitor_no, ValueStack* state_before) 1490 : AccessMonitor(obj, monitor_no, state_before) 1491 { 1492 ASSERT_VALUES 1493 } 1494 1495 // generic 1496 virtual bool can_trap() const { return true; } 1497 }; 1498 1499 1500 LEAF(MonitorExit, AccessMonitor) 1501 public: 1502 // creation 1503 MonitorExit(Value obj, int monitor_no) 1504 : AccessMonitor(obj, monitor_no, NULL) 1505 { 1506 ASSERT_VALUES 1507 } 1508 }; 1509 1510 1511 LEAF(Intrinsic, StateSplit) 1512 private: 1513 vmIntrinsics::ID _id; 1514 Values* _args; 1515 Value _recv; 1516 ArgsNonNullState _nonnull_state; 1517 1518 public: 1519 // preserves_state can be set to true for Intrinsics 1520 // which are guaranteed to preserve register state across any slow 1521 // cases; setting it to true does not mean that the Intrinsic can 1522 // not trap, only that if we continue execution in the same basic 1523 // block after the Intrinsic, all of the registers are intact. This 1524 // allows load elimination and common expression elimination to be 1525 // performed across the Intrinsic. The default value is false. 1526 Intrinsic(ValueType* type, 1527 vmIntrinsics::ID id, 1528 Values* args, 1529 bool has_receiver, 1530 ValueStack* state_before, 1531 bool preserves_state, 1532 bool cantrap = true) 1533 : StateSplit(type, state_before) 1534 , _id(id) 1535 , _args(args) 1536 , _recv(NULL) 1537 { 1538 assert(args != NULL, "args must exist"); 1539 ASSERT_VALUES 1540 set_flag(PreservesStateFlag, preserves_state); 1541 set_flag(CanTrapFlag, cantrap); 1542 if (has_receiver) { 1543 _recv = argument_at(0); 1544 } 1545 set_needs_null_check(has_receiver); 1546 1547 // some intrinsics can't trap, so don't force them to be pinned 1548 if (!can_trap()) { 1549 unpin(PinStateSplitConstructor); 1550 } 1551 } 1552 1553 // accessors 1554 vmIntrinsics::ID id() const { return _id; } 1555 int number_of_arguments() const { return _args->length(); } 1556 Value argument_at(int i) const { return _args->at(i); } 1557 1558 bool has_receiver() const { return (_recv != NULL); } 1559 Value receiver() const { assert(has_receiver(), "must have receiver"); return _recv; } 1560 bool preserves_state() const { return check_flag(PreservesStateFlag); } 1561 1562 bool arg_needs_null_check(int i) const { 1563 return _nonnull_state.arg_needs_null_check(i); 1564 } 1565 1566 void set_arg_needs_null_check(int i, bool check) { 1567 _nonnull_state.set_arg_needs_null_check(i, check); 1568 } 1569 1570 // generic 1571 virtual bool can_trap() const { return check_flag(CanTrapFlag); } 1572 virtual void input_values_do(ValueVisitor* f) { 1573 StateSplit::input_values_do(f); 1574 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 1575 } 1576 }; 1577 1578 1579 class LIR_List; 1580 1581 LEAF(BlockBegin, StateSplit) 1582 private: 1583 int _block_id; // the unique block id 1584 int _bci; // start-bci of block 1585 int _depth_first_number; // number of this block in a depth-first ordering 1586 int _linear_scan_number; // number of this block in linear-scan ordering 1587 int _dominator_depth; 1588 int _loop_depth; // the loop nesting level of this block 1589 int _loop_index; // number of the innermost loop of this block 1590 int _flags; // the flags associated with this block 1591 1592 // fields used by BlockListBuilder 1593 int _total_preds; // number of predecessors found by BlockListBuilder 1594 BitMap _stores_to_locals; // bit is set when a local variable is stored in the block 1595 1596 // SSA specific fields: (factor out later) 1597 BlockList _successors; // the successors of this block 1598 BlockList _predecessors; // the predecessors of this block 1599 BlockList _dominates; // list of blocks that are dominated by this block 1600 BlockBegin* _dominator; // the dominator of this block 1601 // SSA specific ends 1602 BlockEnd* _end; // the last instruction of this block 1603 BlockList _exception_handlers; // the exception handlers potentially invoked by this block 1604 ValueStackStack* _exception_states; // only for xhandler entries: states of all instructions that have an edge to this xhandler 1605 int _exception_handler_pco; // if this block is the start of an exception handler, 1606 // this records the PC offset in the assembly code of the 1607 // first instruction in this block 1608 Label _label; // the label associated with this block 1609 LIR_List* _lir; // the low level intermediate representation for this block 1610 1611 BitMap _live_in; // set of live LIR_Opr registers at entry to this block 1612 BitMap _live_out; // set of live LIR_Opr registers at exit from this block 1613 BitMap _live_gen; // set of registers used before any redefinition in this block 1614 BitMap _live_kill; // set of registers defined in this block 1615 1616 BitMap _fpu_register_usage; 1617 intArray* _fpu_stack_state; // For x86 FPU code generation with UseLinearScan 1618 int _first_lir_instruction_id; // ID of first LIR instruction in this block 1619 int _last_lir_instruction_id; // ID of last LIR instruction in this block 1620 1621 void iterate_preorder (boolArray& mark, BlockClosure* closure); 1622 void iterate_postorder(boolArray& mark, BlockClosure* closure); 1623 1624 friend class SuxAndWeightAdjuster; 1625 1626 public: 1627 void* operator new(size_t size) throw() { 1628 Compilation* c = Compilation::current(); 1629 void* res = c->arena()->Amalloc(size); 1630 ((BlockBegin*)res)->_id = c->get_next_id(); 1631 ((BlockBegin*)res)->_block_id = c->get_next_block_id(); 1632 return res; 1633 } 1634 1635 // initialization/counting 1636 static int number_of_blocks() { 1637 return Compilation::current()->number_of_blocks(); 1638 } 1639 1640 // creation 1641 BlockBegin(int bci) 1642 : StateSplit(illegalType) 1643 , _bci(bci) 1644 , _depth_first_number(-1) 1645 , _linear_scan_number(-1) 1646 , _loop_depth(0) 1647 , _flags(0) 1648 , _dominator_depth(-1) 1649 , _dominator(NULL) 1650 , _end(NULL) 1651 , _predecessors(2) 1652 , _successors(2) 1653 , _dominates(2) 1654 , _exception_handlers(1) 1655 , _exception_states(NULL) 1656 , _exception_handler_pco(-1) 1657 , _lir(NULL) 1658 , _loop_index(-1) 1659 , _live_in() 1660 , _live_out() 1661 , _live_gen() 1662 , _live_kill() 1663 , _fpu_register_usage() 1664 , _fpu_stack_state(NULL) 1665 , _first_lir_instruction_id(-1) 1666 , _last_lir_instruction_id(-1) 1667 , _total_preds(0) 1668 , _stores_to_locals() 1669 { 1670 _block = this; 1671 #ifndef PRODUCT 1672 set_printable_bci(bci); 1673 #endif 1674 } 1675 1676 // accessors 1677 int block_id() const { return _block_id; } 1678 int bci() const { return _bci; } 1679 BlockList* successors() { return &_successors; } 1680 BlockList* dominates() { return &_dominates; } 1681 BlockBegin* dominator() const { return _dominator; } 1682 int loop_depth() const { return _loop_depth; } 1683 int dominator_depth() const { return _dominator_depth; } 1684 int depth_first_number() const { return _depth_first_number; } 1685 int linear_scan_number() const { return _linear_scan_number; } 1686 BlockEnd* end() const { return _end; } 1687 Label* label() { return &_label; } 1688 LIR_List* lir() const { return _lir; } 1689 int exception_handler_pco() const { return _exception_handler_pco; } 1690 BitMap& live_in() { return _live_in; } 1691 BitMap& live_out() { return _live_out; } 1692 BitMap& live_gen() { return _live_gen; } 1693 BitMap& live_kill() { return _live_kill; } 1694 BitMap& fpu_register_usage() { return _fpu_register_usage; } 1695 intArray* fpu_stack_state() const { return _fpu_stack_state; } 1696 int first_lir_instruction_id() const { return _first_lir_instruction_id; } 1697 int last_lir_instruction_id() const { return _last_lir_instruction_id; } 1698 int total_preds() const { return _total_preds; } 1699 BitMap& stores_to_locals() { return _stores_to_locals; } 1700 1701 // manipulation 1702 void set_dominator(BlockBegin* dom) { _dominator = dom; } 1703 void set_loop_depth(int d) { _loop_depth = d; } 1704 void set_dominator_depth(int d) { _dominator_depth = d; } 1705 void set_depth_first_number(int dfn) { _depth_first_number = dfn; } 1706 void set_linear_scan_number(int lsn) { _linear_scan_number = lsn; } 1707 void set_end(BlockEnd* end); 1708 void clear_end(); 1709 void disconnect_from_graph(); 1710 static void disconnect_edge(BlockBegin* from, BlockBegin* to); 1711 BlockBegin* insert_block_between(BlockBegin* sux); 1712 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux); 1713 void set_lir(LIR_List* lir) { _lir = lir; } 1714 void set_exception_handler_pco(int pco) { _exception_handler_pco = pco; } 1715 void set_live_in (BitMap map) { _live_in = map; } 1716 void set_live_out (BitMap map) { _live_out = map; } 1717 void set_live_gen (BitMap map) { _live_gen = map; } 1718 void set_live_kill (BitMap map) { _live_kill = map; } 1719 void set_fpu_register_usage(BitMap map) { _fpu_register_usage = map; } 1720 void set_fpu_stack_state(intArray* state) { _fpu_stack_state = state; } 1721 void set_first_lir_instruction_id(int id) { _first_lir_instruction_id = id; } 1722 void set_last_lir_instruction_id(int id) { _last_lir_instruction_id = id; } 1723 void increment_total_preds(int n = 1) { _total_preds += n; } 1724 void init_stores_to_locals(int locals_count) { _stores_to_locals = BitMap(locals_count); _stores_to_locals.clear(); } 1725 1726 // generic 1727 virtual void state_values_do(ValueVisitor* f); 1728 1729 // successors and predecessors 1730 int number_of_sux() const; 1731 BlockBegin* sux_at(int i) const; 1732 void add_successor(BlockBegin* sux); 1733 void remove_successor(BlockBegin* pred); 1734 bool is_successor(BlockBegin* sux) const { return _successors.contains(sux); } 1735 1736 void add_predecessor(BlockBegin* pred); 1737 void remove_predecessor(BlockBegin* pred); 1738 bool is_predecessor(BlockBegin* pred) const { return _predecessors.contains(pred); } 1739 int number_of_preds() const { return _predecessors.length(); } 1740 BlockBegin* pred_at(int i) const { return _predecessors[i]; } 1741 1742 // exception handlers potentially invoked by this block 1743 void add_exception_handler(BlockBegin* b); 1744 bool is_exception_handler(BlockBegin* b) const { return _exception_handlers.contains(b); } 1745 int number_of_exception_handlers() const { return _exception_handlers.length(); } 1746 BlockBegin* exception_handler_at(int i) const { return _exception_handlers.at(i); } 1747 1748 // states of the instructions that have an edge to this exception handler 1749 int number_of_exception_states() { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states == NULL ? 0 : _exception_states->length(); } 1750 ValueStack* exception_state_at(int idx) const { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states->at(idx); } 1751 int add_exception_state(ValueStack* state); 1752 1753 // flags 1754 enum Flag { 1755 no_flag = 0, 1756 std_entry_flag = 1 << 0, 1757 osr_entry_flag = 1 << 1, 1758 exception_entry_flag = 1 << 2, 1759 subroutine_entry_flag = 1 << 3, 1760 backward_branch_target_flag = 1 << 4, 1761 is_on_work_list_flag = 1 << 5, 1762 was_visited_flag = 1 << 6, 1763 parser_loop_header_flag = 1 << 7, // set by parser to identify blocks where phi functions can not be created on demand 1764 critical_edge_split_flag = 1 << 8, // set for all blocks that are introduced when critical edges are split 1765 linear_scan_loop_header_flag = 1 << 9, // set during loop-detection for LinearScan 1766 linear_scan_loop_end_flag = 1 << 10, // set during loop-detection for LinearScan 1767 donot_eliminate_range_checks = 1 << 11 // Should be try to eliminate range checks in this block 1768 }; 1769 1770 void set(Flag f) { _flags |= f; } 1771 void clear(Flag f) { _flags &= ~f; } 1772 bool is_set(Flag f) const { return (_flags & f) != 0; } 1773 bool is_entry_block() const { 1774 const int entry_mask = std_entry_flag | osr_entry_flag | exception_entry_flag; 1775 return (_flags & entry_mask) != 0; 1776 } 1777 1778 // iteration 1779 void iterate_preorder (BlockClosure* closure); 1780 void iterate_postorder (BlockClosure* closure); 1781 1782 void block_values_do(ValueVisitor* f); 1783 1784 // loops 1785 void set_loop_index(int ix) { _loop_index = ix; } 1786 int loop_index() const { return _loop_index; } 1787 1788 // merging 1789 bool try_merge(ValueStack* state); // try to merge states at block begin 1790 void merge(ValueStack* state) { bool b = try_merge(state); assert(b, "merge failed"); } 1791 1792 // debugging 1793 void print_block() PRODUCT_RETURN; 1794 void print_block(InstructionPrinter& ip, bool live_only = false) PRODUCT_RETURN; 1795 }; 1796 1797 1798 BASE(BlockEnd, StateSplit) 1799 private: 1800 BlockList* _sux; 1801 1802 protected: 1803 BlockList* sux() const { return _sux; } 1804 1805 void set_sux(BlockList* sux) { 1806 #ifdef ASSERT 1807 assert(sux != NULL, "sux must exist"); 1808 for (int i = sux->length() - 1; i >= 0; i--) assert(sux->at(i) != NULL, "sux must exist"); 1809 #endif 1810 _sux = sux; 1811 } 1812 1813 public: 1814 // creation 1815 BlockEnd(ValueType* type, ValueStack* state_before, bool is_safepoint) 1816 : StateSplit(type, state_before) 1817 , _sux(NULL) 1818 { 1819 set_flag(IsSafepointFlag, is_safepoint); 1820 } 1821 1822 // accessors 1823 bool is_safepoint() const { return check_flag(IsSafepointFlag); } 1824 // For compatibility with old code, for new code use block() 1825 BlockBegin* begin() const { return _block; } 1826 1827 // manipulation 1828 void set_begin(BlockBegin* begin); 1829 1830 // successors 1831 int number_of_sux() const { return _sux != NULL ? _sux->length() : 0; } 1832 BlockBegin* sux_at(int i) const { return _sux->at(i); } 1833 BlockBegin* default_sux() const { return sux_at(number_of_sux() - 1); } 1834 BlockBegin** addr_sux_at(int i) const { return _sux->adr_at(i); } 1835 int sux_index(BlockBegin* sux) const { return _sux->find(sux); } 1836 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux); 1837 }; 1838 1839 1840 LEAF(Goto, BlockEnd) 1841 public: 1842 enum Direction { 1843 none, // Just a regular goto 1844 taken, not_taken // Goto produced from If 1845 }; 1846 private: 1847 ciMethod* _profiled_method; 1848 int _profiled_bci; 1849 Direction _direction; 1850 public: 1851 // creation 1852 Goto(BlockBegin* sux, ValueStack* state_before, bool is_safepoint = false) 1853 : BlockEnd(illegalType, state_before, is_safepoint) 1854 , _direction(none) 1855 , _profiled_method(NULL) 1856 , _profiled_bci(0) { 1857 BlockList* s = new BlockList(1); 1858 s->append(sux); 1859 set_sux(s); 1860 } 1861 1862 Goto(BlockBegin* sux, bool is_safepoint) : BlockEnd(illegalType, NULL, is_safepoint) 1863 , _direction(none) 1864 , _profiled_method(NULL) 1865 , _profiled_bci(0) { 1866 BlockList* s = new BlockList(1); 1867 s->append(sux); 1868 set_sux(s); 1869 } 1870 1871 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1872 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches 1873 int profiled_bci() const { return _profiled_bci; } 1874 Direction direction() const { return _direction; } 1875 1876 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1877 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1878 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1879 void set_direction(Direction d) { _direction = d; } 1880 }; 1881 1882 #ifdef ASSERT 1883 LEAF(Assert, Instruction) 1884 private: 1885 Value _x; 1886 Condition _cond; 1887 Value _y; 1888 char *_message; 1889 1890 public: 1891 // creation 1892 // unordered_is_true is valid for float/double compares only 1893 Assert(Value x, Condition cond, bool unordered_is_true, Value y); 1894 1895 // accessors 1896 Value x() const { return _x; } 1897 Condition cond() const { return _cond; } 1898 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 1899 Value y() const { return _y; } 1900 const char *message() const { return _message; } 1901 1902 // generic 1903 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); } 1904 }; 1905 #endif 1906 1907 LEAF(RangeCheckPredicate, StateSplit) 1908 private: 1909 Value _x; 1910 Condition _cond; 1911 Value _y; 1912 1913 void check_state(); 1914 1915 public: 1916 // creation 1917 // unordered_is_true is valid for float/double compares only 1918 RangeCheckPredicate(Value x, Condition cond, bool unordered_is_true, Value y, ValueStack* state) : StateSplit(illegalType) 1919 , _x(x) 1920 , _cond(cond) 1921 , _y(y) 1922 { 1923 ASSERT_VALUES 1924 set_flag(UnorderedIsTrueFlag, unordered_is_true); 1925 assert(x->type()->tag() == y->type()->tag(), "types must match"); 1926 this->set_state(state); 1927 check_state(); 1928 } 1929 1930 // Always deoptimize 1931 RangeCheckPredicate(ValueStack* state) : StateSplit(illegalType) 1932 { 1933 this->set_state(state); 1934 _x = _y = NULL; 1935 check_state(); 1936 } 1937 1938 // accessors 1939 Value x() const { return _x; } 1940 Condition cond() const { return _cond; } 1941 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 1942 Value y() const { return _y; } 1943 1944 void always_fail() { _x = _y = NULL; } 1945 1946 // generic 1947 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_x); f->visit(&_y); } 1948 HASHING3(RangeCheckPredicate, true, x()->subst(), y()->subst(), cond()) 1949 }; 1950 1951 LEAF(If, BlockEnd) 1952 private: 1953 Value _x; 1954 Condition _cond; 1955 Value _y; 1956 ciMethod* _profiled_method; 1957 int _profiled_bci; // Canonicalizer may alter bci of If node 1958 bool _swapped; // Is the order reversed with respect to the original If in the 1959 // bytecode stream? 1960 public: 1961 // creation 1962 // unordered_is_true is valid for float/double compares only 1963 If(Value x, Condition cond, bool unordered_is_true, Value y, BlockBegin* tsux, BlockBegin* fsux, ValueStack* state_before, bool is_safepoint) 1964 : BlockEnd(illegalType, state_before, is_safepoint) 1965 , _x(x) 1966 , _cond(cond) 1967 , _y(y) 1968 , _profiled_method(NULL) 1969 , _profiled_bci(0) 1970 , _swapped(false) 1971 { 1972 ASSERT_VALUES 1973 set_flag(UnorderedIsTrueFlag, unordered_is_true); 1974 assert(x->type()->tag() == y->type()->tag(), "types must match"); 1975 BlockList* s = new BlockList(2); 1976 s->append(tsux); 1977 s->append(fsux); 1978 set_sux(s); 1979 } 1980 1981 // accessors 1982 Value x() const { return _x; } 1983 Condition cond() const { return _cond; } 1984 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 1985 Value y() const { return _y; } 1986 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); } 1987 BlockBegin* tsux() const { return sux_for(true); } 1988 BlockBegin* fsux() const { return sux_for(false); } 1989 BlockBegin* usux() const { return sux_for(unordered_is_true()); } 1990 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1991 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches 1992 int profiled_bci() const { return _profiled_bci; } // set for profiled branches and tiered 1993 bool is_swapped() const { return _swapped; } 1994 1995 // manipulation 1996 void swap_operands() { 1997 Value t = _x; _x = _y; _y = t; 1998 _cond = mirror(_cond); 1999 } 2000 2001 void swap_sux() { 2002 assert(number_of_sux() == 2, "wrong number of successors"); 2003 BlockList* s = sux(); 2004 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t); 2005 _cond = negate(_cond); 2006 set_flag(UnorderedIsTrueFlag, !check_flag(UnorderedIsTrueFlag)); 2007 } 2008 2009 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 2010 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 2011 void set_profiled_bci(int bci) { _profiled_bci = bci; } 2012 void set_swapped(bool value) { _swapped = value; } 2013 // generic 2014 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_x); f->visit(&_y); } 2015 }; 2016 2017 2018 LEAF(IfInstanceOf, BlockEnd) 2019 private: 2020 ciKlass* _klass; 2021 Value _obj; 2022 bool _test_is_instance; // jump if instance 2023 int _instanceof_bci; 2024 2025 public: 2026 IfInstanceOf(ciKlass* klass, Value obj, bool test_is_instance, int instanceof_bci, BlockBegin* tsux, BlockBegin* fsux) 2027 : BlockEnd(illegalType, NULL, false) // temporary set to false 2028 , _klass(klass) 2029 , _obj(obj) 2030 , _test_is_instance(test_is_instance) 2031 , _instanceof_bci(instanceof_bci) 2032 { 2033 ASSERT_VALUES 2034 assert(instanceof_bci >= 0, "illegal bci"); 2035 BlockList* s = new BlockList(2); 2036 s->append(tsux); 2037 s->append(fsux); 2038 set_sux(s); 2039 } 2040 2041 // accessors 2042 // 2043 // Note 1: If test_is_instance() is true, IfInstanceOf tests if obj *is* an 2044 // instance of klass; otherwise it tests if it is *not* and instance 2045 // of klass. 2046 // 2047 // Note 2: IfInstanceOf instructions are created by combining an InstanceOf 2048 // and an If instruction. The IfInstanceOf bci() corresponds to the 2049 // bci that the If would have had; the (this->) instanceof_bci() is 2050 // the bci of the original InstanceOf instruction. 2051 ciKlass* klass() const { return _klass; } 2052 Value obj() const { return _obj; } 2053 int instanceof_bci() const { return _instanceof_bci; } 2054 bool test_is_instance() const { return _test_is_instance; } 2055 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); } 2056 BlockBegin* tsux() const { return sux_for(true); } 2057 BlockBegin* fsux() const { return sux_for(false); } 2058 2059 // manipulation 2060 void swap_sux() { 2061 assert(number_of_sux() == 2, "wrong number of successors"); 2062 BlockList* s = sux(); 2063 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t); 2064 _test_is_instance = !_test_is_instance; 2065 } 2066 2067 // generic 2068 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_obj); } 2069 }; 2070 2071 2072 BASE(Switch, BlockEnd) 2073 private: 2074 Value _tag; 2075 2076 public: 2077 // creation 2078 Switch(Value tag, BlockList* sux, ValueStack* state_before, bool is_safepoint) 2079 : BlockEnd(illegalType, state_before, is_safepoint) 2080 , _tag(tag) { 2081 ASSERT_VALUES 2082 set_sux(sux); 2083 } 2084 2085 // accessors 2086 Value tag() const { return _tag; } 2087 int length() const { return number_of_sux() - 1; } 2088 2089 virtual bool needs_exception_state() const { return false; } 2090 2091 // generic 2092 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_tag); } 2093 }; 2094 2095 2096 LEAF(TableSwitch, Switch) 2097 private: 2098 int _lo_key; 2099 2100 public: 2101 // creation 2102 TableSwitch(Value tag, BlockList* sux, int lo_key, ValueStack* state_before, bool is_safepoint) 2103 : Switch(tag, sux, state_before, is_safepoint) 2104 , _lo_key(lo_key) {} 2105 2106 // accessors 2107 int lo_key() const { return _lo_key; } 2108 int hi_key() const { return _lo_key + length() - 1; } 2109 }; 2110 2111 2112 LEAF(LookupSwitch, Switch) 2113 private: 2114 intArray* _keys; 2115 2116 public: 2117 // creation 2118 LookupSwitch(Value tag, BlockList* sux, intArray* keys, ValueStack* state_before, bool is_safepoint) 2119 : Switch(tag, sux, state_before, is_safepoint) 2120 , _keys(keys) { 2121 assert(keys != NULL, "keys must exist"); 2122 assert(keys->length() == length(), "sux & keys have incompatible lengths"); 2123 } 2124 2125 // accessors 2126 int key_at(int i) const { return _keys->at(i); } 2127 }; 2128 2129 2130 LEAF(Return, BlockEnd) 2131 private: 2132 Value _result; 2133 2134 public: 2135 // creation 2136 Return(Value result) : 2137 BlockEnd(result == NULL ? voidType : result->type()->base(), NULL, true), 2138 _result(result) {} 2139 2140 // accessors 2141 Value result() const { return _result; } 2142 bool has_result() const { return result() != NULL; } 2143 2144 // generic 2145 virtual void input_values_do(ValueVisitor* f) { 2146 BlockEnd::input_values_do(f); 2147 if (has_result()) f->visit(&_result); 2148 } 2149 }; 2150 2151 2152 LEAF(Throw, BlockEnd) 2153 private: 2154 Value _exception; 2155 2156 public: 2157 // creation 2158 Throw(Value exception, ValueStack* state_before) : BlockEnd(illegalType, state_before, true), _exception(exception) { 2159 ASSERT_VALUES 2160 } 2161 2162 // accessors 2163 Value exception() const { return _exception; } 2164 2165 // generic 2166 virtual bool can_trap() const { return true; } 2167 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_exception); } 2168 }; 2169 2170 2171 LEAF(Base, BlockEnd) 2172 public: 2173 // creation 2174 Base(BlockBegin* std_entry, BlockBegin* osr_entry) : BlockEnd(illegalType, NULL, false) { 2175 assert(std_entry->is_set(BlockBegin::std_entry_flag), "std entry must be flagged"); 2176 assert(osr_entry == NULL || osr_entry->is_set(BlockBegin::osr_entry_flag), "osr entry must be flagged"); 2177 BlockList* s = new BlockList(2); 2178 if (osr_entry != NULL) s->append(osr_entry); 2179 s->append(std_entry); // must be default sux! 2180 set_sux(s); 2181 } 2182 2183 // accessors 2184 BlockBegin* std_entry() const { return default_sux(); } 2185 BlockBegin* osr_entry() const { return number_of_sux() < 2 ? NULL : sux_at(0); } 2186 }; 2187 2188 2189 LEAF(OsrEntry, Instruction) 2190 public: 2191 // creation 2192 #ifdef _LP64 2193 OsrEntry() : Instruction(longType) { pin(); } 2194 #else 2195 OsrEntry() : Instruction(intType) { pin(); } 2196 #endif 2197 2198 // generic 2199 virtual void input_values_do(ValueVisitor* f) { } 2200 }; 2201 2202 2203 // Models the incoming exception at a catch site 2204 LEAF(ExceptionObject, Instruction) 2205 public: 2206 // creation 2207 ExceptionObject() : Instruction(objectType) { 2208 pin(); 2209 } 2210 2211 // generic 2212 virtual void input_values_do(ValueVisitor* f) { } 2213 }; 2214 2215 2216 // Models needed rounding for floating-point values on Intel. 2217 // Currently only used to represent rounding of double-precision 2218 // values stored into local variables, but could be used to model 2219 // intermediate rounding of single-precision values as well. 2220 LEAF(RoundFP, Instruction) 2221 private: 2222 Value _input; // floating-point value to be rounded 2223 2224 public: 2225 RoundFP(Value input) 2226 : Instruction(input->type()) // Note: should not be used for constants 2227 , _input(input) 2228 { 2229 ASSERT_VALUES 2230 } 2231 2232 // accessors 2233 Value input() const { return _input; } 2234 2235 // generic 2236 virtual void input_values_do(ValueVisitor* f) { f->visit(&_input); } 2237 }; 2238 2239 2240 BASE(UnsafeOp, Instruction) 2241 private: 2242 BasicType _basic_type; // ValueType can not express byte-sized integers 2243 2244 protected: 2245 // creation 2246 UnsafeOp(BasicType basic_type, bool is_put) 2247 : Instruction(is_put ? voidType : as_ValueType(basic_type)) 2248 , _basic_type(basic_type) 2249 { 2250 //Note: Unsafe ops are not not guaranteed to throw NPE. 2251 // Convservatively, Unsafe operations must be pinned though we could be 2252 // looser about this if we wanted to.. 2253 pin(); 2254 } 2255 2256 public: 2257 // accessors 2258 BasicType basic_type() { return _basic_type; } 2259 2260 // generic 2261 virtual void input_values_do(ValueVisitor* f) { } 2262 }; 2263 2264 2265 BASE(UnsafeRawOp, UnsafeOp) 2266 private: 2267 Value _base; // Base address (a Java long) 2268 Value _index; // Index if computed by optimizer; initialized to NULL 2269 int _log2_scale; // Scale factor: 0, 1, 2, or 3. 2270 // Indicates log2 of number of bytes (1, 2, 4, or 8) 2271 // to scale index by. 2272 2273 protected: 2274 UnsafeRawOp(BasicType basic_type, Value addr, bool is_put) 2275 : UnsafeOp(basic_type, is_put) 2276 , _base(addr) 2277 , _index(NULL) 2278 , _log2_scale(0) 2279 { 2280 // Can not use ASSERT_VALUES because index may be NULL 2281 assert(addr != NULL && addr->type()->is_long(), "just checking"); 2282 } 2283 2284 UnsafeRawOp(BasicType basic_type, Value base, Value index, int log2_scale, bool is_put) 2285 : UnsafeOp(basic_type, is_put) 2286 , _base(base) 2287 , _index(index) 2288 , _log2_scale(log2_scale) 2289 { 2290 } 2291 2292 public: 2293 // accessors 2294 Value base() { return _base; } 2295 Value index() { return _index; } 2296 bool has_index() { return (_index != NULL); } 2297 int log2_scale() { return _log2_scale; } 2298 2299 // setters 2300 void set_base (Value base) { _base = base; } 2301 void set_index(Value index) { _index = index; } 2302 void set_log2_scale(int log2_scale) { _log2_scale = log2_scale; } 2303 2304 // generic 2305 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f); 2306 f->visit(&_base); 2307 if (has_index()) f->visit(&_index); } 2308 }; 2309 2310 2311 LEAF(UnsafeGetRaw, UnsafeRawOp) 2312 private: 2313 bool _may_be_unaligned, _is_wide; // For OSREntry 2314 2315 public: 2316 UnsafeGetRaw(BasicType basic_type, Value addr, bool may_be_unaligned, bool is_wide = false) 2317 : UnsafeRawOp(basic_type, addr, false) { 2318 _may_be_unaligned = may_be_unaligned; 2319 _is_wide = is_wide; 2320 } 2321 2322 UnsafeGetRaw(BasicType basic_type, Value base, Value index, int log2_scale, bool may_be_unaligned, bool is_wide = false) 2323 : UnsafeRawOp(basic_type, base, index, log2_scale, false) { 2324 _may_be_unaligned = may_be_unaligned; 2325 _is_wide = is_wide; 2326 } 2327 2328 bool may_be_unaligned() { return _may_be_unaligned; } 2329 bool is_wide() { return _is_wide; } 2330 }; 2331 2332 2333 LEAF(UnsafePutRaw, UnsafeRawOp) 2334 private: 2335 Value _value; // Value to be stored 2336 2337 public: 2338 UnsafePutRaw(BasicType basic_type, Value addr, Value value) 2339 : UnsafeRawOp(basic_type, addr, true) 2340 , _value(value) 2341 { 2342 assert(value != NULL, "just checking"); 2343 ASSERT_VALUES 2344 } 2345 2346 UnsafePutRaw(BasicType basic_type, Value base, Value index, int log2_scale, Value value) 2347 : UnsafeRawOp(basic_type, base, index, log2_scale, true) 2348 , _value(value) 2349 { 2350 assert(value != NULL, "just checking"); 2351 ASSERT_VALUES 2352 } 2353 2354 // accessors 2355 Value value() { return _value; } 2356 2357 // generic 2358 virtual void input_values_do(ValueVisitor* f) { UnsafeRawOp::input_values_do(f); 2359 f->visit(&_value); } 2360 }; 2361 2362 2363 BASE(UnsafeObjectOp, UnsafeOp) 2364 private: 2365 Value _object; // Object to be fetched from or mutated 2366 Value _offset; // Offset within object 2367 bool _is_volatile; // true if volatile - dl/JSR166 2368 public: 2369 UnsafeObjectOp(BasicType basic_type, Value object, Value offset, bool is_put, bool is_volatile) 2370 : UnsafeOp(basic_type, is_put), _object(object), _offset(offset), _is_volatile(is_volatile) 2371 { 2372 } 2373 2374 // accessors 2375 Value object() { return _object; } 2376 Value offset() { return _offset; } 2377 bool is_volatile() { return _is_volatile; } 2378 // generic 2379 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f); 2380 f->visit(&_object); 2381 f->visit(&_offset); } 2382 }; 2383 2384 2385 LEAF(UnsafeGetObject, UnsafeObjectOp) 2386 public: 2387 UnsafeGetObject(BasicType basic_type, Value object, Value offset, bool is_volatile) 2388 : UnsafeObjectOp(basic_type, object, offset, false, is_volatile) 2389 { 2390 ASSERT_VALUES 2391 } 2392 }; 2393 2394 2395 LEAF(UnsafePutObject, UnsafeObjectOp) 2396 private: 2397 Value _value; // Value to be stored 2398 public: 2399 UnsafePutObject(BasicType basic_type, Value object, Value offset, Value value, bool is_volatile) 2400 : UnsafeObjectOp(basic_type, object, offset, true, is_volatile) 2401 , _value(value) 2402 { 2403 ASSERT_VALUES 2404 } 2405 2406 // accessors 2407 Value value() { return _value; } 2408 2409 // generic 2410 virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f); 2411 f->visit(&_value); } 2412 }; 2413 2414 LEAF(UnsafeGetAndSetObject, UnsafeObjectOp) 2415 private: 2416 Value _value; // Value to be stored 2417 bool _is_add; 2418 public: 2419 UnsafeGetAndSetObject(BasicType basic_type, Value object, Value offset, Value value, bool is_add) 2420 : UnsafeObjectOp(basic_type, object, offset, false, false) 2421 , _value(value) 2422 , _is_add(is_add) 2423 { 2424 ASSERT_VALUES 2425 } 2426 2427 // accessors 2428 bool is_add() const { return _is_add; } 2429 Value value() { return _value; } 2430 2431 // generic 2432 virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f); 2433 f->visit(&_value); } 2434 }; 2435 2436 BASE(UnsafePrefetch, UnsafeObjectOp) 2437 public: 2438 UnsafePrefetch(Value object, Value offset) 2439 : UnsafeObjectOp(T_VOID, object, offset, false, false) 2440 { 2441 } 2442 }; 2443 2444 2445 LEAF(UnsafePrefetchRead, UnsafePrefetch) 2446 public: 2447 UnsafePrefetchRead(Value object, Value offset) 2448 : UnsafePrefetch(object, offset) 2449 { 2450 ASSERT_VALUES 2451 } 2452 }; 2453 2454 2455 LEAF(UnsafePrefetchWrite, UnsafePrefetch) 2456 public: 2457 UnsafePrefetchWrite(Value object, Value offset) 2458 : UnsafePrefetch(object, offset) 2459 { 2460 ASSERT_VALUES 2461 } 2462 }; 2463 2464 LEAF(ProfileCall, Instruction) 2465 private: 2466 ciMethod* _method; 2467 int _bci_of_invoke; 2468 ciMethod* _callee; // the method that is called at the given bci 2469 Value _recv; 2470 ciKlass* _known_holder; 2471 Values* _obj_args; // arguments for type profiling 2472 ArgsNonNullState _nonnull_state; // Do we know whether some arguments are never null? 2473 bool _inlined; // Are we profiling a call that is inlined 2474 2475 public: 2476 ProfileCall(ciMethod* method, int bci, ciMethod* callee, Value recv, ciKlass* known_holder, Values* obj_args, bool inlined) 2477 : Instruction(voidType) 2478 , _method(method) 2479 , _bci_of_invoke(bci) 2480 , _callee(callee) 2481 , _recv(recv) 2482 , _known_holder(known_holder) 2483 , _obj_args(obj_args) 2484 , _inlined(inlined) 2485 { 2486 // The ProfileCall has side-effects and must occur precisely where located 2487 pin(); 2488 } 2489 2490 ciMethod* method() const { return _method; } 2491 int bci_of_invoke() const { return _bci_of_invoke; } 2492 ciMethod* callee() const { return _callee; } 2493 Value recv() const { return _recv; } 2494 ciKlass* known_holder() const { return _known_holder; } 2495 int nb_profiled_args() const { return _obj_args == NULL ? 0 : _obj_args->length(); } 2496 Value profiled_arg_at(int i) const { return _obj_args->at(i); } 2497 bool arg_needs_null_check(int i) const { 2498 return _nonnull_state.arg_needs_null_check(i); 2499 } 2500 bool inlined() const { return _inlined; } 2501 2502 void set_arg_needs_null_check(int i, bool check) { 2503 _nonnull_state.set_arg_needs_null_check(i, check); 2504 } 2505 2506 virtual void input_values_do(ValueVisitor* f) { 2507 if (_recv != NULL) f->visit(&_recv); 2508 for (int i = 0; i < nb_profiled_args(); i++) f->visit(_obj_args->adr_at(i)); 2509 } 2510 }; 2511 2512 // Call some C runtime function that doesn't safepoint, 2513 // optionally passing the current thread as the first argument. 2514 LEAF(RuntimeCall, Instruction) 2515 private: 2516 const char* _entry_name; 2517 address _entry; 2518 Values* _args; 2519 bool _pass_thread; // Pass the JavaThread* as an implicit first argument 2520 2521 public: 2522 RuntimeCall(ValueType* type, const char* entry_name, address entry, Values* args, bool pass_thread = true) 2523 : Instruction(type) 2524 , _entry(entry) 2525 , _args(args) 2526 , _entry_name(entry_name) 2527 , _pass_thread(pass_thread) { 2528 ASSERT_VALUES 2529 pin(); 2530 } 2531 2532 const char* entry_name() const { return _entry_name; } 2533 address entry() const { return _entry; } 2534 int number_of_arguments() const { return _args->length(); } 2535 Value argument_at(int i) const { return _args->at(i); } 2536 bool pass_thread() const { return _pass_thread; } 2537 2538 virtual void input_values_do(ValueVisitor* f) { 2539 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 2540 } 2541 }; 2542 2543 // Use to trip invocation counter of an inlined method 2544 2545 LEAF(ProfileInvoke, Instruction) 2546 private: 2547 ciMethod* _inlinee; 2548 ValueStack* _state; 2549 2550 public: 2551 ProfileInvoke(ciMethod* inlinee, ValueStack* state) 2552 : Instruction(voidType) 2553 , _inlinee(inlinee) 2554 , _state(state) 2555 { 2556 // The ProfileInvoke has side-effects and must occur precisely where located QQQ??? 2557 pin(); 2558 } 2559 2560 ciMethod* inlinee() { return _inlinee; } 2561 ValueStack* state() { return _state; } 2562 virtual void input_values_do(ValueVisitor*) {} 2563 virtual void state_values_do(ValueVisitor*); 2564 }; 2565 2566 LEAF(MemBar, Instruction) 2567 private: 2568 LIR_Code _code; 2569 2570 public: 2571 MemBar(LIR_Code code) 2572 : Instruction(voidType) 2573 , _code(code) 2574 { 2575 pin(); 2576 } 2577 2578 LIR_Code code() { return _code; } 2579 2580 virtual void input_values_do(ValueVisitor*) {} 2581 }; 2582 2583 class BlockPair: public CompilationResourceObj { 2584 private: 2585 BlockBegin* _from; 2586 BlockBegin* _to; 2587 public: 2588 BlockPair(BlockBegin* from, BlockBegin* to): _from(from), _to(to) {} 2589 BlockBegin* from() const { return _from; } 2590 BlockBegin* to() const { return _to; } 2591 bool is_same(BlockBegin* from, BlockBegin* to) const { return _from == from && _to == to; } 2592 bool is_same(BlockPair* p) const { return _from == p->from() && _to == p->to(); } 2593 void set_to(BlockBegin* b) { _to = b; } 2594 void set_from(BlockBegin* b) { _from = b; } 2595 }; 2596 2597 2598 define_array(BlockPairArray, BlockPair*) 2599 define_stack(BlockPairList, BlockPairArray) 2600 2601 2602 inline int BlockBegin::number_of_sux() const { assert(_end == NULL || _end->number_of_sux() == _successors.length(), "mismatch"); return _successors.length(); } 2603 inline BlockBegin* BlockBegin::sux_at(int i) const { assert(_end == NULL || _end->sux_at(i) == _successors.at(i), "mismatch"); return _successors.at(i); } 2604 inline void BlockBegin::add_successor(BlockBegin* sux) { assert(_end == NULL, "Would create mismatch with successors of BlockEnd"); _successors.append(sux); } 2605 2606 #undef ASSERT_VALUES 2607 2608 #endif // SHARE_VM_C1_C1_INSTRUCTION_HPP