1 /* 2 * Copyright (c) 1999, 2012, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_VM_C1_C1_INSTRUCTION_HPP 26 #define SHARE_VM_C1_C1_INSTRUCTION_HPP 27 28 #include "c1/c1_Compilation.hpp" 29 #include "c1/c1_LIR.hpp" 30 #include "c1/c1_ValueType.hpp" 31 #include "ci/ciField.hpp" 32 33 // Predefined classes 34 class ciField; 35 class ValueStack; 36 class InstructionPrinter; 37 class IRScope; 38 class LIR_OprDesc; 39 typedef LIR_OprDesc* LIR_Opr; 40 41 42 // Instruction class hierarchy 43 // 44 // All leaf classes in the class hierarchy are concrete classes 45 // (i.e., are instantiated). All other classes are abstract and 46 // serve factoring. 47 48 class Instruction; 49 class Phi; 50 class Local; 51 class Constant; 52 class AccessField; 53 class LoadField; 54 class StoreField; 55 class AccessArray; 56 class ArrayLength; 57 class AccessIndexed; 58 class LoadIndexed; 59 class StoreIndexed; 60 class NegateOp; 61 class Op2; 62 class ArithmeticOp; 63 class ShiftOp; 64 class LogicOp; 65 class CompareOp; 66 class IfOp; 67 class Convert; 68 class NullCheck; 69 class TypeCast; 70 class OsrEntry; 71 class ExceptionObject; 72 class StateSplit; 73 class Invoke; 74 class NewInstance; 75 class NewArray; 76 class NewTypeArray; 77 class NewObjectArray; 78 class NewMultiArray; 79 class TypeCheck; 80 class CheckCast; 81 class InstanceOf; 82 class AccessMonitor; 83 class MonitorEnter; 84 class MonitorExit; 85 class Intrinsic; 86 class BlockBegin; 87 class BlockEnd; 88 class Goto; 89 class If; 90 class IfInstanceOf; 91 class Switch; 92 class TableSwitch; 93 class LookupSwitch; 94 class Return; 95 class Throw; 96 class Base; 97 class RoundFP; 98 class UnsafeOp; 99 class UnsafeRawOp; 100 class UnsafeGetRaw; 101 class UnsafePutRaw; 102 class UnsafeObjectOp; 103 class UnsafeGetObject; 104 class UnsafePutObject; 105 class UnsafeGetAndSetObject; 106 class UnsafePrefetch; 107 class UnsafePrefetchRead; 108 class UnsafePrefetchWrite; 109 class ProfileCall; 110 class ProfileInvoke; 111 class RuntimeCall; 112 class MemBar; 113 114 // A Value is a reference to the instruction creating the value 115 typedef Instruction* Value; 116 define_array(ValueArray, Value) 117 define_stack(Values, ValueArray) 118 119 define_array(ValueStackArray, ValueStack*) 120 define_stack(ValueStackStack, ValueStackArray) 121 122 // BlockClosure is the base class for block traversal/iteration. 123 124 class BlockClosure: public CompilationResourceObj { 125 public: 126 virtual void block_do(BlockBegin* block) = 0; 127 }; 128 129 130 // A simple closure class for visiting the values of an Instruction 131 class ValueVisitor: public StackObj { 132 public: 133 virtual void visit(Value* v) = 0; 134 }; 135 136 137 // Some array and list classes 138 define_array(BlockBeginArray, BlockBegin*) 139 define_stack(_BlockList, BlockBeginArray) 140 141 class BlockList: public _BlockList { 142 public: 143 BlockList(): _BlockList() {} 144 BlockList(const int size): _BlockList(size) {} 145 BlockList(const int size, BlockBegin* init): _BlockList(size, init) {} 146 147 void iterate_forward(BlockClosure* closure); 148 void iterate_backward(BlockClosure* closure); 149 void blocks_do(void f(BlockBegin*)); 150 void values_do(ValueVisitor* f); 151 void print(bool cfg_only = false, bool live_only = false) PRODUCT_RETURN; 152 }; 153 154 155 // InstructionVisitors provide type-based dispatch for instructions. 156 // For each concrete Instruction class X, a virtual function do_X is 157 // provided. Functionality that needs to be implemented for all classes 158 // (e.g., printing, code generation) is factored out into a specialised 159 // visitor instead of added to the Instruction classes itself. 160 161 class InstructionVisitor: public StackObj { 162 public: 163 virtual void do_Phi (Phi* x) = 0; 164 virtual void do_Local (Local* x) = 0; 165 virtual void do_Constant (Constant* x) = 0; 166 virtual void do_LoadField (LoadField* x) = 0; 167 virtual void do_StoreField (StoreField* x) = 0; 168 virtual void do_ArrayLength (ArrayLength* x) = 0; 169 virtual void do_LoadIndexed (LoadIndexed* x) = 0; 170 virtual void do_StoreIndexed (StoreIndexed* x) = 0; 171 virtual void do_NegateOp (NegateOp* x) = 0; 172 virtual void do_ArithmeticOp (ArithmeticOp* x) = 0; 173 virtual void do_ShiftOp (ShiftOp* x) = 0; 174 virtual void do_LogicOp (LogicOp* x) = 0; 175 virtual void do_CompareOp (CompareOp* x) = 0; 176 virtual void do_IfOp (IfOp* x) = 0; 177 virtual void do_Convert (Convert* x) = 0; 178 virtual void do_NullCheck (NullCheck* x) = 0; 179 virtual void do_TypeCast (TypeCast* x) = 0; 180 virtual void do_Invoke (Invoke* x) = 0; 181 virtual void do_NewInstance (NewInstance* x) = 0; 182 virtual void do_NewTypeArray (NewTypeArray* x) = 0; 183 virtual void do_NewObjectArray (NewObjectArray* x) = 0; 184 virtual void do_NewMultiArray (NewMultiArray* x) = 0; 185 virtual void do_CheckCast (CheckCast* x) = 0; 186 virtual void do_InstanceOf (InstanceOf* x) = 0; 187 virtual void do_MonitorEnter (MonitorEnter* x) = 0; 188 virtual void do_MonitorExit (MonitorExit* x) = 0; 189 virtual void do_Intrinsic (Intrinsic* x) = 0; 190 virtual void do_BlockBegin (BlockBegin* x) = 0; 191 virtual void do_Goto (Goto* x) = 0; 192 virtual void do_If (If* x) = 0; 193 virtual void do_IfInstanceOf (IfInstanceOf* x) = 0; 194 virtual void do_TableSwitch (TableSwitch* x) = 0; 195 virtual void do_LookupSwitch (LookupSwitch* x) = 0; 196 virtual void do_Return (Return* x) = 0; 197 virtual void do_Throw (Throw* x) = 0; 198 virtual void do_Base (Base* x) = 0; 199 virtual void do_OsrEntry (OsrEntry* x) = 0; 200 virtual void do_ExceptionObject(ExceptionObject* x) = 0; 201 virtual void do_RoundFP (RoundFP* x) = 0; 202 virtual void do_UnsafeGetRaw (UnsafeGetRaw* x) = 0; 203 virtual void do_UnsafePutRaw (UnsafePutRaw* x) = 0; 204 virtual void do_UnsafeGetObject(UnsafeGetObject* x) = 0; 205 virtual void do_UnsafePutObject(UnsafePutObject* x) = 0; 206 virtual void do_UnsafeGetAndSetObject(UnsafeGetAndSetObject* x) = 0; 207 virtual void do_UnsafePrefetchRead (UnsafePrefetchRead* x) = 0; 208 virtual void do_UnsafePrefetchWrite(UnsafePrefetchWrite* x) = 0; 209 virtual void do_ProfileCall (ProfileCall* x) = 0; 210 virtual void do_ProfileInvoke (ProfileInvoke* x) = 0; 211 virtual void do_RuntimeCall (RuntimeCall* x) = 0; 212 virtual void do_MemBar (MemBar* x) = 0; 213 }; 214 215 216 // Hashing support 217 // 218 // Note: This hash functions affect the performance 219 // of ValueMap - make changes carefully! 220 221 #define HASH1(x1 ) ((intx)(x1)) 222 #define HASH2(x1, x2 ) ((HASH1(x1 ) << 7) ^ HASH1(x2)) 223 #define HASH3(x1, x2, x3 ) ((HASH2(x1, x2 ) << 7) ^ HASH1(x3)) 224 #define HASH4(x1, x2, x3, x4) ((HASH3(x1, x2, x3) << 7) ^ HASH1(x4)) 225 226 227 // The following macros are used to implement instruction-specific hashing. 228 // By default, each instruction implements hash() and is_equal(Value), used 229 // for value numbering/common subexpression elimination. The default imple- 230 // mentation disables value numbering. Each instruction which can be value- 231 // numbered, should define corresponding hash() and is_equal(Value) functions 232 // via the macros below. The f arguments specify all the values/op codes, etc. 233 // that need to be identical for two instructions to be identical. 234 // 235 // Note: The default implementation of hash() returns 0 in order to indicate 236 // that the instruction should not be considered for value numbering. 237 // The currently used hash functions do not guarantee that never a 0 238 // is produced. While this is still correct, it may be a performance 239 // bug (no value numbering for that node). However, this situation is 240 // so unlikely, that we are not going to handle it specially. 241 242 #define HASHING1(class_name, enabled, f1) \ 243 virtual intx hash() const { \ 244 return (enabled) ? HASH2(name(), f1) : 0; \ 245 } \ 246 virtual bool is_equal(Value v) const { \ 247 if (!(enabled) ) return false; \ 248 class_name* _v = v->as_##class_name(); \ 249 if (_v == NULL ) return false; \ 250 if (f1 != _v->f1) return false; \ 251 return true; \ 252 } \ 253 254 255 #define HASHING2(class_name, enabled, f1, f2) \ 256 virtual intx hash() const { \ 257 return (enabled) ? HASH3(name(), f1, f2) : 0; \ 258 } \ 259 virtual bool is_equal(Value v) const { \ 260 if (!(enabled) ) return false; \ 261 class_name* _v = v->as_##class_name(); \ 262 if (_v == NULL ) return false; \ 263 if (f1 != _v->f1) return false; \ 264 if (f2 != _v->f2) return false; \ 265 return true; \ 266 } \ 267 268 269 #define HASHING3(class_name, enabled, f1, f2, f3) \ 270 virtual intx hash() const { \ 271 return (enabled) ? HASH4(name(), f1, f2, f3) : 0; \ 272 } \ 273 virtual bool is_equal(Value v) const { \ 274 if (!(enabled) ) return false; \ 275 class_name* _v = v->as_##class_name(); \ 276 if (_v == NULL ) return false; \ 277 if (f1 != _v->f1) return false; \ 278 if (f2 != _v->f2) return false; \ 279 if (f3 != _v->f3) return false; \ 280 return true; \ 281 } \ 282 283 284 // The mother of all instructions... 285 286 class Instruction: public CompilationResourceObj { 287 private: 288 int _id; // the unique instruction id 289 #ifndef PRODUCT 290 int _printable_bci; // the bci of the instruction for printing 291 #endif 292 int _use_count; // the number of instructions refering to this value (w/o prev/next); only roots can have use count = 0 or > 1 293 int _pin_state; // set of PinReason describing the reason for pinning 294 ValueType* _type; // the instruction value type 295 Instruction* _next; // the next instruction if any (NULL for BlockEnd instructions) 296 Instruction* _subst; // the substitution instruction if any 297 LIR_Opr _operand; // LIR specific information 298 unsigned int _flags; // Flag bits 299 300 ValueStack* _state_before; // Copy of state with input operands still on stack (or NULL) 301 ValueStack* _exception_state; // Copy of state for exception handling 302 XHandlers* _exception_handlers; // Flat list of exception handlers covering this instruction 303 304 friend class UseCountComputer; 305 friend class BlockBegin; 306 307 void update_exception_state(ValueStack* state); 308 309 //protected: 310 public: 311 void set_type(ValueType* type) { 312 assert(type != NULL, "type must exist"); 313 _type = type; 314 } 315 316 public: 317 void* operator new(size_t size) { 318 Compilation* c = Compilation::current(); 319 void* res = c->arena()->Amalloc(size); 320 ((Instruction*)res)->_id = c->get_next_id(); 321 return res; 322 } 323 324 static const int no_bci = -99; 325 326 enum InstructionFlag { 327 NeedsNullCheckFlag = 0, 328 CanTrapFlag, 329 DirectCompareFlag, 330 IsEliminatedFlag, 331 IsSafepointFlag, 332 IsStaticFlag, 333 IsStrictfpFlag, 334 NeedsStoreCheckFlag, 335 NeedsWriteBarrierFlag, 336 PreservesStateFlag, 337 TargetIsFinalFlag, 338 TargetIsLoadedFlag, 339 TargetIsStrictfpFlag, 340 UnorderedIsTrueFlag, 341 NeedsPatchingFlag, 342 ThrowIncompatibleClassChangeErrorFlag, 343 ProfileMDOFlag, 344 IsLinkedInBlockFlag, 345 InstructionLastFlag 346 }; 347 348 public: 349 bool check_flag(InstructionFlag id) const { return (_flags & (1 << id)) != 0; } 350 void set_flag(InstructionFlag id, bool f) { _flags = f ? (_flags | (1 << id)) : (_flags & ~(1 << id)); }; 351 352 // 'globally' used condition values 353 enum Condition { 354 eql, neq, lss, leq, gtr, geq 355 }; 356 357 // Instructions may be pinned for many reasons and under certain conditions 358 // with enough knowledge it's possible to safely unpin them. 359 enum PinReason { 360 PinUnknown = 1 << 0 361 , PinExplicitNullCheck = 1 << 3 362 , PinStackForStateSplit= 1 << 12 363 , PinStateSplitConstructor= 1 << 13 364 , PinGlobalValueNumbering= 1 << 14 365 }; 366 367 static Condition mirror(Condition cond); 368 static Condition negate(Condition cond); 369 370 // initialization 371 static int number_of_instructions() { 372 return Compilation::current()->number_of_instructions(); 373 } 374 375 // creation 376 Instruction(ValueType* type, ValueStack* state_before = NULL, bool type_is_constant = false) 377 : _use_count(0) 378 #ifndef PRODUCT 379 , _printable_bci(-99) 380 #endif 381 , _pin_state(0) 382 , _type(type) 383 , _next(NULL) 384 , _subst(NULL) 385 , _flags(0) 386 , _operand(LIR_OprFact::illegalOpr) 387 , _state_before(state_before) 388 , _exception_handlers(NULL) 389 { 390 check_state(state_before); 391 assert(type != NULL && (!type->is_constant() || type_is_constant), "type must exist"); 392 update_exception_state(_state_before); 393 } 394 395 // accessors 396 int id() const { return _id; } 397 #ifndef PRODUCT 398 bool has_printable_bci() const { return _printable_bci != -99; } 399 int printable_bci() const { assert(has_printable_bci(), "_printable_bci should have been set"); return _printable_bci; } 400 void set_printable_bci(int bci) { _printable_bci = bci; } 401 #endif 402 int use_count() const { return _use_count; } 403 int pin_state() const { return _pin_state; } 404 bool is_pinned() const { return _pin_state != 0 || PinAllInstructions; } 405 ValueType* type() const { return _type; } 406 Instruction* prev(BlockBegin* block); // use carefully, expensive operation 407 Instruction* next() const { return _next; } 408 bool has_subst() const { return _subst != NULL; } 409 Instruction* subst() { return _subst == NULL ? this : _subst->subst(); } 410 LIR_Opr operand() const { return _operand; } 411 412 void set_needs_null_check(bool f) { set_flag(NeedsNullCheckFlag, f); } 413 bool needs_null_check() const { return check_flag(NeedsNullCheckFlag); } 414 bool is_linked() const { return check_flag(IsLinkedInBlockFlag); } 415 bool can_be_linked() { return as_Local() == NULL && as_Phi() == NULL; } 416 417 bool has_uses() const { return use_count() > 0; } 418 ValueStack* state_before() const { return _state_before; } 419 ValueStack* exception_state() const { return _exception_state; } 420 virtual bool needs_exception_state() const { return true; } 421 XHandlers* exception_handlers() const { return _exception_handlers; } 422 423 // manipulation 424 void pin(PinReason reason) { _pin_state |= reason; } 425 void pin() { _pin_state |= PinUnknown; } 426 // DANGEROUS: only used by EliminateStores 427 void unpin(PinReason reason) { assert((reason & PinUnknown) == 0, "can't unpin unknown state"); _pin_state &= ~reason; } 428 429 Instruction* set_next(Instruction* next) { 430 assert(next->has_printable_bci(), "_printable_bci should have been set"); 431 assert(next != NULL, "must not be NULL"); 432 assert(as_BlockEnd() == NULL, "BlockEnd instructions must have no next"); 433 assert(next->can_be_linked(), "shouldn't link these instructions into list"); 434 435 next->set_flag(Instruction::IsLinkedInBlockFlag, true); 436 _next = next; 437 return next; 438 } 439 440 Instruction* set_next(Instruction* next, int bci) { 441 #ifndef PRODUCT 442 next->set_printable_bci(bci); 443 #endif 444 return set_next(next); 445 } 446 447 void set_subst(Instruction* subst) { 448 assert(subst == NULL || 449 type()->base() == subst->type()->base() || 450 subst->type()->base() == illegalType, "type can't change"); 451 _subst = subst; 452 } 453 void set_exception_handlers(XHandlers *xhandlers) { _exception_handlers = xhandlers; } 454 void set_exception_state(ValueStack* s) { check_state(s); _exception_state = s; } 455 456 // machine-specifics 457 void set_operand(LIR_Opr operand) { assert(operand != LIR_OprFact::illegalOpr, "operand must exist"); _operand = operand; } 458 void clear_operand() { _operand = LIR_OprFact::illegalOpr; } 459 460 // generic 461 virtual Instruction* as_Instruction() { return this; } // to satisfy HASHING1 macro 462 virtual Phi* as_Phi() { return NULL; } 463 virtual Local* as_Local() { return NULL; } 464 virtual Constant* as_Constant() { return NULL; } 465 virtual AccessField* as_AccessField() { return NULL; } 466 virtual LoadField* as_LoadField() { return NULL; } 467 virtual StoreField* as_StoreField() { return NULL; } 468 virtual AccessArray* as_AccessArray() { return NULL; } 469 virtual ArrayLength* as_ArrayLength() { return NULL; } 470 virtual AccessIndexed* as_AccessIndexed() { return NULL; } 471 virtual LoadIndexed* as_LoadIndexed() { return NULL; } 472 virtual StoreIndexed* as_StoreIndexed() { return NULL; } 473 virtual NegateOp* as_NegateOp() { return NULL; } 474 virtual Op2* as_Op2() { return NULL; } 475 virtual ArithmeticOp* as_ArithmeticOp() { return NULL; } 476 virtual ShiftOp* as_ShiftOp() { return NULL; } 477 virtual LogicOp* as_LogicOp() { return NULL; } 478 virtual CompareOp* as_CompareOp() { return NULL; } 479 virtual IfOp* as_IfOp() { return NULL; } 480 virtual Convert* as_Convert() { return NULL; } 481 virtual NullCheck* as_NullCheck() { return NULL; } 482 virtual OsrEntry* as_OsrEntry() { return NULL; } 483 virtual StateSplit* as_StateSplit() { return NULL; } 484 virtual Invoke* as_Invoke() { return NULL; } 485 virtual NewInstance* as_NewInstance() { return NULL; } 486 virtual NewArray* as_NewArray() { return NULL; } 487 virtual NewTypeArray* as_NewTypeArray() { return NULL; } 488 virtual NewObjectArray* as_NewObjectArray() { return NULL; } 489 virtual NewMultiArray* as_NewMultiArray() { return NULL; } 490 virtual TypeCheck* as_TypeCheck() { return NULL; } 491 virtual CheckCast* as_CheckCast() { return NULL; } 492 virtual InstanceOf* as_InstanceOf() { return NULL; } 493 virtual TypeCast* as_TypeCast() { return NULL; } 494 virtual AccessMonitor* as_AccessMonitor() { return NULL; } 495 virtual MonitorEnter* as_MonitorEnter() { return NULL; } 496 virtual MonitorExit* as_MonitorExit() { return NULL; } 497 virtual Intrinsic* as_Intrinsic() { return NULL; } 498 virtual BlockBegin* as_BlockBegin() { return NULL; } 499 virtual BlockEnd* as_BlockEnd() { return NULL; } 500 virtual Goto* as_Goto() { return NULL; } 501 virtual If* as_If() { return NULL; } 502 virtual IfInstanceOf* as_IfInstanceOf() { return NULL; } 503 virtual TableSwitch* as_TableSwitch() { return NULL; } 504 virtual LookupSwitch* as_LookupSwitch() { return NULL; } 505 virtual Return* as_Return() { return NULL; } 506 virtual Throw* as_Throw() { return NULL; } 507 virtual Base* as_Base() { return NULL; } 508 virtual RoundFP* as_RoundFP() { return NULL; } 509 virtual ExceptionObject* as_ExceptionObject() { return NULL; } 510 virtual UnsafeOp* as_UnsafeOp() { return NULL; } 511 virtual ProfileInvoke* as_ProfileInvoke() { return NULL; } 512 513 virtual void visit(InstructionVisitor* v) = 0; 514 515 virtual bool can_trap() const { return false; } 516 517 virtual void input_values_do(ValueVisitor* f) = 0; 518 virtual void state_values_do(ValueVisitor* f); 519 virtual void other_values_do(ValueVisitor* f) { /* usually no other - override on demand */ } 520 void values_do(ValueVisitor* f) { input_values_do(f); state_values_do(f); other_values_do(f); } 521 522 virtual ciType* exact_type() const { return NULL; } 523 virtual ciType* declared_type() const { return NULL; } 524 525 // hashing 526 virtual const char* name() const = 0; 527 HASHING1(Instruction, false, id()) // hashing disabled by default 528 529 // debugging 530 static void check_state(ValueStack* state) PRODUCT_RETURN; 531 void print() PRODUCT_RETURN; 532 void print_line() PRODUCT_RETURN; 533 void print(InstructionPrinter& ip) PRODUCT_RETURN; 534 }; 535 536 537 // The following macros are used to define base (i.e., non-leaf) 538 // and leaf instruction classes. They define class-name related 539 // generic functionality in one place. 540 541 #define BASE(class_name, super_class_name) \ 542 class class_name: public super_class_name { \ 543 public: \ 544 virtual class_name* as_##class_name() { return this; } \ 545 546 547 #define LEAF(class_name, super_class_name) \ 548 BASE(class_name, super_class_name) \ 549 public: \ 550 virtual const char* name() const { return #class_name; } \ 551 virtual void visit(InstructionVisitor* v) { v->do_##class_name(this); } \ 552 553 554 // Debugging support 555 556 557 #ifdef ASSERT 558 class AssertValues: public ValueVisitor { 559 void visit(Value* x) { assert((*x) != NULL, "value must exist"); } 560 }; 561 #define ASSERT_VALUES { AssertValues assert_value; values_do(&assert_value); } 562 #else 563 #define ASSERT_VALUES 564 #endif // ASSERT 565 566 567 // A Phi is a phi function in the sense of SSA form. It stands for 568 // the value of a local variable at the beginning of a join block. 569 // A Phi consists of n operands, one for every incoming branch. 570 571 LEAF(Phi, Instruction) 572 private: 573 BlockBegin* _block; // the block to which the phi function belongs 574 int _pf_flags; // the flags of the phi function 575 int _index; // to value on operand stack (index < 0) or to local 576 public: 577 // creation 578 Phi(ValueType* type, BlockBegin* b, int index) 579 : Instruction(type->base()) 580 , _pf_flags(0) 581 , _block(b) 582 , _index(index) 583 { 584 NOT_PRODUCT(set_printable_bci(Value(b)->printable_bci())); 585 if (type->is_illegal()) { 586 make_illegal(); 587 } 588 } 589 590 // flags 591 enum Flag { 592 no_flag = 0, 593 visited = 1 << 0, 594 cannot_simplify = 1 << 1 595 }; 596 597 // accessors 598 bool is_local() const { return _index >= 0; } 599 bool is_on_stack() const { return !is_local(); } 600 int local_index() const { assert(is_local(), ""); return _index; } 601 int stack_index() const { assert(is_on_stack(), ""); return -(_index+1); } 602 603 Value operand_at(int i) const; 604 int operand_count() const; 605 606 BlockBegin* block() const { return _block; } 607 608 void set(Flag f) { _pf_flags |= f; } 609 void clear(Flag f) { _pf_flags &= ~f; } 610 bool is_set(Flag f) const { return (_pf_flags & f) != 0; } 611 612 // Invalidates phis corresponding to merges of locals of two different types 613 // (these should never be referenced, otherwise the bytecodes are illegal) 614 void make_illegal() { 615 set(cannot_simplify); 616 set_type(illegalType); 617 } 618 619 bool is_illegal() const { 620 return type()->is_illegal(); 621 } 622 623 // generic 624 virtual void input_values_do(ValueVisitor* f) { 625 } 626 }; 627 628 629 // A local is a placeholder for an incoming argument to a function call. 630 LEAF(Local, Instruction) 631 private: 632 int _java_index; // the local index within the method to which the local belongs 633 ciType* _declared_type; 634 public: 635 // creation 636 Local(ciType* declared, ValueType* type, int index) 637 : Instruction(type) 638 , _java_index(index) 639 , _declared_type(declared) 640 { 641 NOT_PRODUCT(set_printable_bci(-1)); 642 } 643 644 // accessors 645 int java_index() const { return _java_index; } 646 647 virtual ciType* declared_type() const { return _declared_type; } 648 virtual ciType* exact_type() const; 649 650 // generic 651 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 652 }; 653 654 655 LEAF(Constant, Instruction) 656 public: 657 // creation 658 Constant(ValueType* type): 659 Instruction(type, NULL, /*type_is_constant*/ true) 660 { 661 assert(type->is_constant(), "must be a constant"); 662 } 663 664 Constant(ValueType* type, ValueStack* state_before): 665 Instruction(type, state_before, /*type_is_constant*/ true) 666 { 667 assert(state_before != NULL, "only used for constants which need patching"); 668 assert(type->is_constant(), "must be a constant"); 669 // since it's patching it needs to be pinned 670 pin(); 671 } 672 673 virtual bool can_trap() const { return state_before() != NULL; } 674 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 675 676 virtual intx hash() const; 677 virtual bool is_equal(Value v) const; 678 679 virtual ciType* exact_type() const; 680 681 enum CompareResult { not_comparable = -1, cond_false, cond_true }; 682 683 virtual CompareResult compare(Instruction::Condition condition, Value right) const; 684 BlockBegin* compare(Instruction::Condition cond, Value right, 685 BlockBegin* true_sux, BlockBegin* false_sux) const { 686 switch (compare(cond, right)) { 687 case not_comparable: 688 return NULL; 689 case cond_false: 690 return false_sux; 691 case cond_true: 692 return true_sux; 693 default: 694 ShouldNotReachHere(); 695 return NULL; 696 } 697 } 698 }; 699 700 701 BASE(AccessField, Instruction) 702 private: 703 Value _obj; 704 int _offset; 705 ciField* _field; 706 NullCheck* _explicit_null_check; // For explicit null check elimination 707 708 public: 709 // creation 710 AccessField(Value obj, int offset, ciField* field, bool is_static, 711 ValueStack* state_before, bool needs_patching) 712 : Instruction(as_ValueType(field->type()->basic_type()), state_before) 713 , _obj(obj) 714 , _offset(offset) 715 , _field(field) 716 , _explicit_null_check(NULL) 717 { 718 set_needs_null_check(!is_static); 719 set_flag(IsStaticFlag, is_static); 720 set_flag(NeedsPatchingFlag, needs_patching); 721 ASSERT_VALUES 722 // pin of all instructions with memory access 723 pin(); 724 } 725 726 // accessors 727 Value obj() const { return _obj; } 728 int offset() const { return _offset; } 729 ciField* field() const { return _field; } 730 BasicType field_type() const { return _field->type()->basic_type(); } 731 bool is_static() const { return check_flag(IsStaticFlag); } 732 NullCheck* explicit_null_check() const { return _explicit_null_check; } 733 bool needs_patching() const { return check_flag(NeedsPatchingFlag); } 734 735 // Unresolved getstatic and putstatic can cause initialization. 736 // Technically it occurs at the Constant that materializes the base 737 // of the static fields but it's simpler to model it here. 738 bool is_init_point() const { return is_static() && (needs_patching() || !_field->holder()->is_initialized()); } 739 740 // manipulation 741 742 // Under certain circumstances, if a previous NullCheck instruction 743 // proved the target object non-null, we can eliminate the explicit 744 // null check and do an implicit one, simply specifying the debug 745 // information from the NullCheck. This field should only be consulted 746 // if needs_null_check() is true. 747 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 748 749 // generic 750 virtual bool can_trap() const { return needs_null_check() || needs_patching(); } 751 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 752 }; 753 754 755 LEAF(LoadField, AccessField) 756 public: 757 // creation 758 LoadField(Value obj, int offset, ciField* field, bool is_static, 759 ValueStack* state_before, bool needs_patching) 760 : AccessField(obj, offset, field, is_static, state_before, needs_patching) 761 {} 762 763 ciType* declared_type() const; 764 ciType* exact_type() const; 765 766 // generic 767 HASHING2(LoadField, !needs_patching() && !field()->is_volatile(), obj()->subst(), offset()) // cannot be eliminated if needs patching or if volatile 768 }; 769 770 771 LEAF(StoreField, AccessField) 772 private: 773 Value _value; 774 775 public: 776 // creation 777 StoreField(Value obj, int offset, ciField* field, Value value, bool is_static, 778 ValueStack* state_before, bool needs_patching) 779 : AccessField(obj, offset, field, is_static, state_before, needs_patching) 780 , _value(value) 781 { 782 set_flag(NeedsWriteBarrierFlag, as_ValueType(field_type())->is_object()); 783 ASSERT_VALUES 784 pin(); 785 } 786 787 // accessors 788 Value value() const { return _value; } 789 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); } 790 791 // generic 792 virtual void input_values_do(ValueVisitor* f) { AccessField::input_values_do(f); f->visit(&_value); } 793 }; 794 795 796 BASE(AccessArray, Instruction) 797 private: 798 Value _array; 799 800 public: 801 // creation 802 AccessArray(ValueType* type, Value array, ValueStack* state_before) 803 : Instruction(type, state_before) 804 , _array(array) 805 { 806 set_needs_null_check(true); 807 ASSERT_VALUES 808 pin(); // instruction with side effect (null exception or range check throwing) 809 } 810 811 Value array() const { return _array; } 812 813 // generic 814 virtual bool can_trap() const { return needs_null_check(); } 815 virtual void input_values_do(ValueVisitor* f) { f->visit(&_array); } 816 }; 817 818 819 LEAF(ArrayLength, AccessArray) 820 private: 821 NullCheck* _explicit_null_check; // For explicit null check elimination 822 823 public: 824 // creation 825 ArrayLength(Value array, ValueStack* state_before) 826 : AccessArray(intType, array, state_before) 827 , _explicit_null_check(NULL) {} 828 829 // accessors 830 NullCheck* explicit_null_check() const { return _explicit_null_check; } 831 832 // setters 833 // See LoadField::set_explicit_null_check for documentation 834 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 835 836 // generic 837 HASHING1(ArrayLength, true, array()->subst()) 838 }; 839 840 841 BASE(AccessIndexed, AccessArray) 842 private: 843 Value _index; 844 Value _length; 845 BasicType _elt_type; 846 847 public: 848 // creation 849 AccessIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before) 850 : AccessArray(as_ValueType(elt_type), array, state_before) 851 , _index(index) 852 , _length(length) 853 , _elt_type(elt_type) 854 { 855 ASSERT_VALUES 856 } 857 858 // accessors 859 Value index() const { return _index; } 860 Value length() const { return _length; } 861 BasicType elt_type() const { return _elt_type; } 862 863 // perform elimination of range checks involving constants 864 bool compute_needs_range_check(); 865 866 // generic 867 virtual void input_values_do(ValueVisitor* f) { AccessArray::input_values_do(f); f->visit(&_index); if (_length != NULL) f->visit(&_length); } 868 }; 869 870 871 LEAF(LoadIndexed, AccessIndexed) 872 private: 873 NullCheck* _explicit_null_check; // For explicit null check elimination 874 875 public: 876 // creation 877 LoadIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before) 878 : AccessIndexed(array, index, length, elt_type, state_before) 879 , _explicit_null_check(NULL) {} 880 881 // accessors 882 NullCheck* explicit_null_check() const { return _explicit_null_check; } 883 884 // setters 885 // See LoadField::set_explicit_null_check for documentation 886 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 887 888 ciType* exact_type() const; 889 ciType* declared_type() const; 890 891 // generic 892 HASHING2(LoadIndexed, true, array()->subst(), index()->subst()) 893 }; 894 895 896 LEAF(StoreIndexed, AccessIndexed) 897 private: 898 Value _value; 899 900 ciMethod* _profiled_method; 901 int _profiled_bci; 902 public: 903 // creation 904 StoreIndexed(Value array, Value index, Value length, BasicType elt_type, Value value, ValueStack* state_before) 905 : AccessIndexed(array, index, length, elt_type, state_before) 906 , _value(value), _profiled_method(NULL), _profiled_bci(0) 907 { 908 set_flag(NeedsWriteBarrierFlag, (as_ValueType(elt_type)->is_object())); 909 set_flag(NeedsStoreCheckFlag, (as_ValueType(elt_type)->is_object())); 910 ASSERT_VALUES 911 pin(); 912 } 913 914 // accessors 915 Value value() const { return _value; } 916 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); } 917 bool needs_store_check() const { return check_flag(NeedsStoreCheckFlag); } 918 // Helpers for MethodData* profiling 919 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 920 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 921 void set_profiled_bci(int bci) { _profiled_bci = bci; } 922 bool should_profile() const { return check_flag(ProfileMDOFlag); } 923 ciMethod* profiled_method() const { return _profiled_method; } 924 int profiled_bci() const { return _profiled_bci; } 925 // generic 926 virtual void input_values_do(ValueVisitor* f) { AccessIndexed::input_values_do(f); f->visit(&_value); } 927 }; 928 929 930 LEAF(NegateOp, Instruction) 931 private: 932 Value _x; 933 934 public: 935 // creation 936 NegateOp(Value x) : Instruction(x->type()->base()), _x(x) { 937 ASSERT_VALUES 938 } 939 940 // accessors 941 Value x() const { return _x; } 942 943 // generic 944 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); } 945 }; 946 947 948 BASE(Op2, Instruction) 949 private: 950 Bytecodes::Code _op; 951 Value _x; 952 Value _y; 953 954 public: 955 // creation 956 Op2(ValueType* type, Bytecodes::Code op, Value x, Value y, ValueStack* state_before = NULL) 957 : Instruction(type, state_before) 958 , _op(op) 959 , _x(x) 960 , _y(y) 961 { 962 ASSERT_VALUES 963 } 964 965 // accessors 966 Bytecodes::Code op() const { return _op; } 967 Value x() const { return _x; } 968 Value y() const { return _y; } 969 970 // manipulators 971 void swap_operands() { 972 assert(is_commutative(), "operation must be commutative"); 973 Value t = _x; _x = _y; _y = t; 974 } 975 976 // generic 977 virtual bool is_commutative() const { return false; } 978 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); } 979 }; 980 981 982 LEAF(ArithmeticOp, Op2) 983 public: 984 // creation 985 ArithmeticOp(Bytecodes::Code op, Value x, Value y, bool is_strictfp, ValueStack* state_before) 986 : Op2(x->type()->meet(y->type()), op, x, y, state_before) 987 { 988 set_flag(IsStrictfpFlag, is_strictfp); 989 if (can_trap()) pin(); 990 } 991 992 // accessors 993 bool is_strictfp() const { return check_flag(IsStrictfpFlag); } 994 995 // generic 996 virtual bool is_commutative() const; 997 virtual bool can_trap() const; 998 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 999 }; 1000 1001 1002 LEAF(ShiftOp, Op2) 1003 public: 1004 // creation 1005 ShiftOp(Bytecodes::Code op, Value x, Value s) : Op2(x->type()->base(), op, x, s) {} 1006 1007 // generic 1008 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1009 }; 1010 1011 1012 LEAF(LogicOp, Op2) 1013 public: 1014 // creation 1015 LogicOp(Bytecodes::Code op, Value x, Value y) : Op2(x->type()->meet(y->type()), op, x, y) {} 1016 1017 // generic 1018 virtual bool is_commutative() const; 1019 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1020 }; 1021 1022 1023 LEAF(CompareOp, Op2) 1024 public: 1025 // creation 1026 CompareOp(Bytecodes::Code op, Value x, Value y, ValueStack* state_before) 1027 : Op2(intType, op, x, y, state_before) 1028 {} 1029 1030 // generic 1031 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1032 }; 1033 1034 1035 LEAF(IfOp, Op2) 1036 private: 1037 Value _tval; 1038 Value _fval; 1039 1040 public: 1041 // creation 1042 IfOp(Value x, Condition cond, Value y, Value tval, Value fval) 1043 : Op2(tval->type()->meet(fval->type()), (Bytecodes::Code)cond, x, y) 1044 , _tval(tval) 1045 , _fval(fval) 1046 { 1047 ASSERT_VALUES 1048 assert(tval->type()->tag() == fval->type()->tag(), "types must match"); 1049 } 1050 1051 // accessors 1052 virtual bool is_commutative() const; 1053 Bytecodes::Code op() const { ShouldNotCallThis(); return Bytecodes::_illegal; } 1054 Condition cond() const { return (Condition)Op2::op(); } 1055 Value tval() const { return _tval; } 1056 Value fval() const { return _fval; } 1057 1058 // generic 1059 virtual void input_values_do(ValueVisitor* f) { Op2::input_values_do(f); f->visit(&_tval); f->visit(&_fval); } 1060 }; 1061 1062 1063 LEAF(Convert, Instruction) 1064 private: 1065 Bytecodes::Code _op; 1066 Value _value; 1067 1068 public: 1069 // creation 1070 Convert(Bytecodes::Code op, Value value, ValueType* to_type) : Instruction(to_type), _op(op), _value(value) { 1071 ASSERT_VALUES 1072 } 1073 1074 // accessors 1075 Bytecodes::Code op() const { return _op; } 1076 Value value() const { return _value; } 1077 1078 // generic 1079 virtual void input_values_do(ValueVisitor* f) { f->visit(&_value); } 1080 HASHING2(Convert, true, op(), value()->subst()) 1081 }; 1082 1083 1084 LEAF(NullCheck, Instruction) 1085 private: 1086 Value _obj; 1087 1088 public: 1089 // creation 1090 NullCheck(Value obj, ValueStack* state_before) 1091 : Instruction(obj->type()->base(), state_before) 1092 , _obj(obj) 1093 { 1094 ASSERT_VALUES 1095 set_can_trap(true); 1096 assert(_obj->type()->is_object(), "null check must be applied to objects only"); 1097 pin(Instruction::PinExplicitNullCheck); 1098 } 1099 1100 // accessors 1101 Value obj() const { return _obj; } 1102 1103 // setters 1104 void set_can_trap(bool can_trap) { set_flag(CanTrapFlag, can_trap); } 1105 1106 // generic 1107 virtual bool can_trap() const { return check_flag(CanTrapFlag); /* null-check elimination sets to false */ } 1108 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 1109 HASHING1(NullCheck, true, obj()->subst()) 1110 }; 1111 1112 1113 // This node is supposed to cast the type of another node to a more precise 1114 // declared type. 1115 LEAF(TypeCast, Instruction) 1116 private: 1117 ciType* _declared_type; 1118 Value _obj; 1119 1120 public: 1121 // The type of this node is the same type as the object type (and it might be constant). 1122 TypeCast(ciType* type, Value obj, ValueStack* state_before) 1123 : Instruction(obj->type(), state_before, obj->type()->is_constant()), 1124 _declared_type(type), 1125 _obj(obj) {} 1126 1127 // accessors 1128 ciType* declared_type() const { return _declared_type; } 1129 Value obj() const { return _obj; } 1130 1131 // generic 1132 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 1133 }; 1134 1135 1136 BASE(StateSplit, Instruction) 1137 private: 1138 ValueStack* _state; 1139 1140 protected: 1141 static void substitute(BlockList& list, BlockBegin* old_block, BlockBegin* new_block); 1142 1143 public: 1144 // creation 1145 StateSplit(ValueType* type, ValueStack* state_before = NULL) 1146 : Instruction(type, state_before) 1147 , _state(NULL) 1148 { 1149 pin(PinStateSplitConstructor); 1150 } 1151 1152 // accessors 1153 ValueStack* state() const { return _state; } 1154 IRScope* scope() const; // the state's scope 1155 1156 // manipulation 1157 void set_state(ValueStack* state) { assert(_state == NULL, "overwriting existing state"); check_state(state); _state = state; } 1158 1159 // generic 1160 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 1161 virtual void state_values_do(ValueVisitor* f); 1162 }; 1163 1164 1165 LEAF(Invoke, StateSplit) 1166 private: 1167 Bytecodes::Code _code; 1168 Value _recv; 1169 Values* _args; 1170 BasicTypeList* _signature; 1171 int _vtable_index; 1172 ciMethod* _target; 1173 1174 public: 1175 // creation 1176 Invoke(Bytecodes::Code code, ValueType* result_type, Value recv, Values* args, 1177 int vtable_index, ciMethod* target, ValueStack* state_before); 1178 1179 // accessors 1180 Bytecodes::Code code() const { return _code; } 1181 Value receiver() const { return _recv; } 1182 bool has_receiver() const { return receiver() != NULL; } 1183 int number_of_arguments() const { return _args->length(); } 1184 Value argument_at(int i) const { return _args->at(i); } 1185 int vtable_index() const { return _vtable_index; } 1186 BasicTypeList* signature() const { return _signature; } 1187 ciMethod* target() const { return _target; } 1188 1189 ciType* declared_type() const; 1190 1191 // Returns false if target is not loaded 1192 bool target_is_final() const { return check_flag(TargetIsFinalFlag); } 1193 bool target_is_loaded() const { return check_flag(TargetIsLoadedFlag); } 1194 // Returns false if target is not loaded 1195 bool target_is_strictfp() const { return check_flag(TargetIsStrictfpFlag); } 1196 1197 // JSR 292 support 1198 bool is_invokedynamic() const { return code() == Bytecodes::_invokedynamic; } 1199 bool is_method_handle_intrinsic() const { return target()->is_method_handle_intrinsic(); } 1200 1201 virtual bool needs_exception_state() const { return false; } 1202 1203 // generic 1204 virtual bool can_trap() const { return true; } 1205 virtual void input_values_do(ValueVisitor* f) { 1206 StateSplit::input_values_do(f); 1207 if (has_receiver()) f->visit(&_recv); 1208 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 1209 } 1210 virtual void state_values_do(ValueVisitor *f); 1211 }; 1212 1213 1214 LEAF(NewInstance, StateSplit) 1215 private: 1216 ciInstanceKlass* _klass; 1217 1218 public: 1219 // creation 1220 NewInstance(ciInstanceKlass* klass, ValueStack* state_before) 1221 : StateSplit(instanceType, state_before) 1222 , _klass(klass) 1223 {} 1224 1225 // accessors 1226 ciInstanceKlass* klass() const { return _klass; } 1227 1228 virtual bool needs_exception_state() const { return false; } 1229 1230 // generic 1231 virtual bool can_trap() const { return true; } 1232 ciType* exact_type() const; 1233 ciType* declared_type() const; 1234 }; 1235 1236 1237 BASE(NewArray, StateSplit) 1238 private: 1239 Value _length; 1240 1241 public: 1242 // creation 1243 NewArray(Value length, ValueStack* state_before) 1244 : StateSplit(objectType, state_before) 1245 , _length(length) 1246 { 1247 // Do not ASSERT_VALUES since length is NULL for NewMultiArray 1248 } 1249 1250 // accessors 1251 Value length() const { return _length; } 1252 1253 virtual bool needs_exception_state() const { return false; } 1254 1255 ciType* declared_type() const; 1256 1257 // generic 1258 virtual bool can_trap() const { return true; } 1259 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_length); } 1260 }; 1261 1262 1263 LEAF(NewTypeArray, NewArray) 1264 private: 1265 BasicType _elt_type; 1266 1267 public: 1268 // creation 1269 NewTypeArray(Value length, BasicType elt_type, ValueStack* state_before) 1270 : NewArray(length, state_before) 1271 , _elt_type(elt_type) 1272 {} 1273 1274 // accessors 1275 BasicType elt_type() const { return _elt_type; } 1276 ciType* exact_type() const; 1277 }; 1278 1279 1280 LEAF(NewObjectArray, NewArray) 1281 private: 1282 ciKlass* _klass; 1283 1284 public: 1285 // creation 1286 NewObjectArray(ciKlass* klass, Value length, ValueStack* state_before) : NewArray(length, state_before), _klass(klass) {} 1287 1288 // accessors 1289 ciKlass* klass() const { return _klass; } 1290 ciType* exact_type() const; 1291 }; 1292 1293 1294 LEAF(NewMultiArray, NewArray) 1295 private: 1296 ciKlass* _klass; 1297 Values* _dims; 1298 1299 public: 1300 // creation 1301 NewMultiArray(ciKlass* klass, Values* dims, ValueStack* state_before) : NewArray(NULL, state_before), _klass(klass), _dims(dims) { 1302 ASSERT_VALUES 1303 } 1304 1305 // accessors 1306 ciKlass* klass() const { return _klass; } 1307 Values* dims() const { return _dims; } 1308 int rank() const { return dims()->length(); } 1309 1310 // generic 1311 virtual void input_values_do(ValueVisitor* f) { 1312 // NOTE: we do not call NewArray::input_values_do since "length" 1313 // is meaningless for a multi-dimensional array; passing the 1314 // zeroth element down to NewArray as its length is a bad idea 1315 // since there will be a copy in the "dims" array which doesn't 1316 // get updated, and the value must not be traversed twice. Was bug 1317 // - kbr 4/10/2001 1318 StateSplit::input_values_do(f); 1319 for (int i = 0; i < _dims->length(); i++) f->visit(_dims->adr_at(i)); 1320 } 1321 }; 1322 1323 1324 BASE(TypeCheck, StateSplit) 1325 private: 1326 ciKlass* _klass; 1327 Value _obj; 1328 1329 ciMethod* _profiled_method; 1330 int _profiled_bci; 1331 1332 public: 1333 // creation 1334 TypeCheck(ciKlass* klass, Value obj, ValueType* type, ValueStack* state_before) 1335 : StateSplit(type, state_before), _klass(klass), _obj(obj), 1336 _profiled_method(NULL), _profiled_bci(0) { 1337 ASSERT_VALUES 1338 set_direct_compare(false); 1339 } 1340 1341 // accessors 1342 ciKlass* klass() const { return _klass; } 1343 Value obj() const { return _obj; } 1344 bool is_loaded() const { return klass() != NULL; } 1345 bool direct_compare() const { return check_flag(DirectCompareFlag); } 1346 1347 // manipulation 1348 void set_direct_compare(bool flag) { set_flag(DirectCompareFlag, flag); } 1349 1350 // generic 1351 virtual bool can_trap() const { return true; } 1352 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } 1353 1354 // Helpers for MethodData* profiling 1355 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1356 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1357 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1358 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1359 ciMethod* profiled_method() const { return _profiled_method; } 1360 int profiled_bci() const { return _profiled_bci; } 1361 }; 1362 1363 1364 LEAF(CheckCast, TypeCheck) 1365 public: 1366 // creation 1367 CheckCast(ciKlass* klass, Value obj, ValueStack* state_before) 1368 : TypeCheck(klass, obj, objectType, state_before) {} 1369 1370 void set_incompatible_class_change_check() { 1371 set_flag(ThrowIncompatibleClassChangeErrorFlag, true); 1372 } 1373 bool is_incompatible_class_change_check() const { 1374 return check_flag(ThrowIncompatibleClassChangeErrorFlag); 1375 } 1376 1377 ciType* declared_type() const; 1378 ciType* exact_type() const; 1379 }; 1380 1381 1382 LEAF(InstanceOf, TypeCheck) 1383 public: 1384 // creation 1385 InstanceOf(ciKlass* klass, Value obj, ValueStack* state_before) : TypeCheck(klass, obj, intType, state_before) {} 1386 1387 virtual bool needs_exception_state() const { return false; } 1388 }; 1389 1390 1391 BASE(AccessMonitor, StateSplit) 1392 private: 1393 Value _obj; 1394 int _monitor_no; 1395 1396 public: 1397 // creation 1398 AccessMonitor(Value obj, int monitor_no, ValueStack* state_before = NULL) 1399 : StateSplit(illegalType, state_before) 1400 , _obj(obj) 1401 , _monitor_no(monitor_no) 1402 { 1403 set_needs_null_check(true); 1404 ASSERT_VALUES 1405 } 1406 1407 // accessors 1408 Value obj() const { return _obj; } 1409 int monitor_no() const { return _monitor_no; } 1410 1411 // generic 1412 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } 1413 }; 1414 1415 1416 LEAF(MonitorEnter, AccessMonitor) 1417 public: 1418 // creation 1419 MonitorEnter(Value obj, int monitor_no, ValueStack* state_before) 1420 : AccessMonitor(obj, monitor_no, state_before) 1421 { 1422 ASSERT_VALUES 1423 } 1424 1425 // generic 1426 virtual bool can_trap() const { return true; } 1427 }; 1428 1429 1430 LEAF(MonitorExit, AccessMonitor) 1431 public: 1432 // creation 1433 MonitorExit(Value obj, int monitor_no) 1434 : AccessMonitor(obj, monitor_no, NULL) 1435 { 1436 ASSERT_VALUES 1437 } 1438 }; 1439 1440 1441 LEAF(Intrinsic, StateSplit) 1442 private: 1443 vmIntrinsics::ID _id; 1444 Values* _args; 1445 Value _recv; 1446 int _nonnull_state; // mask identifying which args are nonnull 1447 1448 public: 1449 // preserves_state can be set to true for Intrinsics 1450 // which are guaranteed to preserve register state across any slow 1451 // cases; setting it to true does not mean that the Intrinsic can 1452 // not trap, only that if we continue execution in the same basic 1453 // block after the Intrinsic, all of the registers are intact. This 1454 // allows load elimination and common expression elimination to be 1455 // performed across the Intrinsic. The default value is false. 1456 Intrinsic(ValueType* type, 1457 vmIntrinsics::ID id, 1458 Values* args, 1459 bool has_receiver, 1460 ValueStack* state_before, 1461 bool preserves_state, 1462 bool cantrap = true) 1463 : StateSplit(type, state_before) 1464 , _id(id) 1465 , _args(args) 1466 , _recv(NULL) 1467 , _nonnull_state(AllBits) 1468 { 1469 assert(args != NULL, "args must exist"); 1470 ASSERT_VALUES 1471 set_flag(PreservesStateFlag, preserves_state); 1472 set_flag(CanTrapFlag, cantrap); 1473 if (has_receiver) { 1474 _recv = argument_at(0); 1475 } 1476 set_needs_null_check(has_receiver); 1477 1478 // some intrinsics can't trap, so don't force them to be pinned 1479 if (!can_trap()) { 1480 unpin(PinStateSplitConstructor); 1481 } 1482 } 1483 1484 // accessors 1485 vmIntrinsics::ID id() const { return _id; } 1486 int number_of_arguments() const { return _args->length(); } 1487 Value argument_at(int i) const { return _args->at(i); } 1488 1489 bool has_receiver() const { return (_recv != NULL); } 1490 Value receiver() const { assert(has_receiver(), "must have receiver"); return _recv; } 1491 bool preserves_state() const { return check_flag(PreservesStateFlag); } 1492 1493 bool arg_needs_null_check(int i) { 1494 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) { 1495 return is_set_nth_bit(_nonnull_state, i); 1496 } 1497 return true; 1498 } 1499 1500 void set_arg_needs_null_check(int i, bool check) { 1501 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) { 1502 if (check) { 1503 _nonnull_state |= nth_bit(i); 1504 } else { 1505 _nonnull_state &= ~(nth_bit(i)); 1506 } 1507 } 1508 } 1509 1510 // generic 1511 virtual bool can_trap() const { return check_flag(CanTrapFlag); } 1512 virtual void input_values_do(ValueVisitor* f) { 1513 StateSplit::input_values_do(f); 1514 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 1515 } 1516 }; 1517 1518 1519 class LIR_List; 1520 1521 LEAF(BlockBegin, StateSplit) 1522 private: 1523 int _block_id; // the unique block id 1524 int _bci; // start-bci of block 1525 int _depth_first_number; // number of this block in a depth-first ordering 1526 int _linear_scan_number; // number of this block in linear-scan ordering 1527 int _loop_depth; // the loop nesting level of this block 1528 int _loop_index; // number of the innermost loop of this block 1529 int _flags; // the flags associated with this block 1530 1531 // fields used by BlockListBuilder 1532 int _total_preds; // number of predecessors found by BlockListBuilder 1533 BitMap _stores_to_locals; // bit is set when a local variable is stored in the block 1534 1535 // SSA specific fields: (factor out later) 1536 BlockList _successors; // the successors of this block 1537 BlockList _predecessors; // the predecessors of this block 1538 BlockBegin* _dominator; // the dominator of this block 1539 // SSA specific ends 1540 BlockEnd* _end; // the last instruction of this block 1541 BlockList _exception_handlers; // the exception handlers potentially invoked by this block 1542 ValueStackStack* _exception_states; // only for xhandler entries: states of all instructions that have an edge to this xhandler 1543 int _exception_handler_pco; // if this block is the start of an exception handler, 1544 // this records the PC offset in the assembly code of the 1545 // first instruction in this block 1546 Label _label; // the label associated with this block 1547 LIR_List* _lir; // the low level intermediate representation for this block 1548 1549 BitMap _live_in; // set of live LIR_Opr registers at entry to this block 1550 BitMap _live_out; // set of live LIR_Opr registers at exit from this block 1551 BitMap _live_gen; // set of registers used before any redefinition in this block 1552 BitMap _live_kill; // set of registers defined in this block 1553 1554 BitMap _fpu_register_usage; 1555 intArray* _fpu_stack_state; // For x86 FPU code generation with UseLinearScan 1556 int _first_lir_instruction_id; // ID of first LIR instruction in this block 1557 int _last_lir_instruction_id; // ID of last LIR instruction in this block 1558 1559 void iterate_preorder (boolArray& mark, BlockClosure* closure); 1560 void iterate_postorder(boolArray& mark, BlockClosure* closure); 1561 1562 friend class SuxAndWeightAdjuster; 1563 1564 public: 1565 void* operator new(size_t size) { 1566 Compilation* c = Compilation::current(); 1567 void* res = c->arena()->Amalloc(size); 1568 ((BlockBegin*)res)->_id = c->get_next_id(); 1569 ((BlockBegin*)res)->_block_id = c->get_next_block_id(); 1570 return res; 1571 } 1572 1573 // initialization/counting 1574 static int number_of_blocks() { 1575 return Compilation::current()->number_of_blocks(); 1576 } 1577 1578 // creation 1579 BlockBegin(int bci) 1580 : StateSplit(illegalType) 1581 , _bci(bci) 1582 , _depth_first_number(-1) 1583 , _linear_scan_number(-1) 1584 , _loop_depth(0) 1585 , _flags(0) 1586 , _dominator(NULL) 1587 , _end(NULL) 1588 , _predecessors(2) 1589 , _successors(2) 1590 , _exception_handlers(1) 1591 , _exception_states(NULL) 1592 , _exception_handler_pco(-1) 1593 , _lir(NULL) 1594 , _loop_index(-1) 1595 , _live_in() 1596 , _live_out() 1597 , _live_gen() 1598 , _live_kill() 1599 , _fpu_register_usage() 1600 , _fpu_stack_state(NULL) 1601 , _first_lir_instruction_id(-1) 1602 , _last_lir_instruction_id(-1) 1603 , _total_preds(0) 1604 , _stores_to_locals() 1605 { 1606 #ifndef PRODUCT 1607 set_printable_bci(bci); 1608 #endif 1609 } 1610 1611 // accessors 1612 int block_id() const { return _block_id; } 1613 int bci() const { return _bci; } 1614 BlockList* successors() { return &_successors; } 1615 BlockBegin* dominator() const { return _dominator; } 1616 int loop_depth() const { return _loop_depth; } 1617 int depth_first_number() const { return _depth_first_number; } 1618 int linear_scan_number() const { return _linear_scan_number; } 1619 BlockEnd* end() const { return _end; } 1620 Label* label() { return &_label; } 1621 LIR_List* lir() const { return _lir; } 1622 int exception_handler_pco() const { return _exception_handler_pco; } 1623 BitMap& live_in() { return _live_in; } 1624 BitMap& live_out() { return _live_out; } 1625 BitMap& live_gen() { return _live_gen; } 1626 BitMap& live_kill() { return _live_kill; } 1627 BitMap& fpu_register_usage() { return _fpu_register_usage; } 1628 intArray* fpu_stack_state() const { return _fpu_stack_state; } 1629 int first_lir_instruction_id() const { return _first_lir_instruction_id; } 1630 int last_lir_instruction_id() const { return _last_lir_instruction_id; } 1631 int total_preds() const { return _total_preds; } 1632 BitMap& stores_to_locals() { return _stores_to_locals; } 1633 1634 // manipulation 1635 void set_dominator(BlockBegin* dom) { _dominator = dom; } 1636 void set_loop_depth(int d) { _loop_depth = d; } 1637 void set_depth_first_number(int dfn) { _depth_first_number = dfn; } 1638 void set_linear_scan_number(int lsn) { _linear_scan_number = lsn; } 1639 void set_end(BlockEnd* end); 1640 void clear_end(); 1641 void disconnect_from_graph(); 1642 static void disconnect_edge(BlockBegin* from, BlockBegin* to); 1643 BlockBegin* insert_block_between(BlockBegin* sux); 1644 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux); 1645 void set_lir(LIR_List* lir) { _lir = lir; } 1646 void set_exception_handler_pco(int pco) { _exception_handler_pco = pco; } 1647 void set_live_in (BitMap map) { _live_in = map; } 1648 void set_live_out (BitMap map) { _live_out = map; } 1649 void set_live_gen (BitMap map) { _live_gen = map; } 1650 void set_live_kill (BitMap map) { _live_kill = map; } 1651 void set_fpu_register_usage(BitMap map) { _fpu_register_usage = map; } 1652 void set_fpu_stack_state(intArray* state) { _fpu_stack_state = state; } 1653 void set_first_lir_instruction_id(int id) { _first_lir_instruction_id = id; } 1654 void set_last_lir_instruction_id(int id) { _last_lir_instruction_id = id; } 1655 void increment_total_preds(int n = 1) { _total_preds += n; } 1656 void init_stores_to_locals(int locals_count) { _stores_to_locals = BitMap(locals_count); _stores_to_locals.clear(); } 1657 1658 // generic 1659 virtual void state_values_do(ValueVisitor* f); 1660 1661 // successors and predecessors 1662 int number_of_sux() const; 1663 BlockBegin* sux_at(int i) const; 1664 void add_successor(BlockBegin* sux); 1665 void remove_successor(BlockBegin* pred); 1666 bool is_successor(BlockBegin* sux) const { return _successors.contains(sux); } 1667 1668 void add_predecessor(BlockBegin* pred); 1669 void remove_predecessor(BlockBegin* pred); 1670 bool is_predecessor(BlockBegin* pred) const { return _predecessors.contains(pred); } 1671 int number_of_preds() const { return _predecessors.length(); } 1672 BlockBegin* pred_at(int i) const { return _predecessors[i]; } 1673 1674 // exception handlers potentially invoked by this block 1675 void add_exception_handler(BlockBegin* b); 1676 bool is_exception_handler(BlockBegin* b) const { return _exception_handlers.contains(b); } 1677 int number_of_exception_handlers() const { return _exception_handlers.length(); } 1678 BlockBegin* exception_handler_at(int i) const { return _exception_handlers.at(i); } 1679 1680 // states of the instructions that have an edge to this exception handler 1681 int number_of_exception_states() { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states == NULL ? 0 : _exception_states->length(); } 1682 ValueStack* exception_state_at(int idx) const { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states->at(idx); } 1683 int add_exception_state(ValueStack* state); 1684 1685 // flags 1686 enum Flag { 1687 no_flag = 0, 1688 std_entry_flag = 1 << 0, 1689 osr_entry_flag = 1 << 1, 1690 exception_entry_flag = 1 << 2, 1691 subroutine_entry_flag = 1 << 3, 1692 backward_branch_target_flag = 1 << 4, 1693 is_on_work_list_flag = 1 << 5, 1694 was_visited_flag = 1 << 6, 1695 parser_loop_header_flag = 1 << 7, // set by parser to identify blocks where phi functions can not be created on demand 1696 critical_edge_split_flag = 1 << 8, // set for all blocks that are introduced when critical edges are split 1697 linear_scan_loop_header_flag = 1 << 9, // set during loop-detection for LinearScan 1698 linear_scan_loop_end_flag = 1 << 10 // set during loop-detection for LinearScan 1699 }; 1700 1701 void set(Flag f) { _flags |= f; } 1702 void clear(Flag f) { _flags &= ~f; } 1703 bool is_set(Flag f) const { return (_flags & f) != 0; } 1704 bool is_entry_block() const { 1705 const int entry_mask = std_entry_flag | osr_entry_flag | exception_entry_flag; 1706 return (_flags & entry_mask) != 0; 1707 } 1708 1709 // iteration 1710 void iterate_preorder (BlockClosure* closure); 1711 void iterate_postorder (BlockClosure* closure); 1712 1713 void block_values_do(ValueVisitor* f); 1714 1715 // loops 1716 void set_loop_index(int ix) { _loop_index = ix; } 1717 int loop_index() const { return _loop_index; } 1718 1719 // merging 1720 bool try_merge(ValueStack* state); // try to merge states at block begin 1721 void merge(ValueStack* state) { bool b = try_merge(state); assert(b, "merge failed"); } 1722 1723 // debugging 1724 void print_block() PRODUCT_RETURN; 1725 void print_block(InstructionPrinter& ip, bool live_only = false) PRODUCT_RETURN; 1726 }; 1727 1728 1729 BASE(BlockEnd, StateSplit) 1730 private: 1731 BlockBegin* _begin; 1732 BlockList* _sux; 1733 1734 protected: 1735 BlockList* sux() const { return _sux; } 1736 1737 void set_sux(BlockList* sux) { 1738 #ifdef ASSERT 1739 assert(sux != NULL, "sux must exist"); 1740 for (int i = sux->length() - 1; i >= 0; i--) assert(sux->at(i) != NULL, "sux must exist"); 1741 #endif 1742 _sux = sux; 1743 } 1744 1745 public: 1746 // creation 1747 BlockEnd(ValueType* type, ValueStack* state_before, bool is_safepoint) 1748 : StateSplit(type, state_before) 1749 , _begin(NULL) 1750 , _sux(NULL) 1751 { 1752 set_flag(IsSafepointFlag, is_safepoint); 1753 } 1754 1755 // accessors 1756 bool is_safepoint() const { return check_flag(IsSafepointFlag); } 1757 BlockBegin* begin() const { return _begin; } 1758 1759 // manipulation 1760 void set_begin(BlockBegin* begin); 1761 1762 // successors 1763 int number_of_sux() const { return _sux != NULL ? _sux->length() : 0; } 1764 BlockBegin* sux_at(int i) const { return _sux->at(i); } 1765 BlockBegin* default_sux() const { return sux_at(number_of_sux() - 1); } 1766 BlockBegin** addr_sux_at(int i) const { return _sux->adr_at(i); } 1767 int sux_index(BlockBegin* sux) const { return _sux->find(sux); } 1768 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux); 1769 }; 1770 1771 1772 LEAF(Goto, BlockEnd) 1773 public: 1774 enum Direction { 1775 none, // Just a regular goto 1776 taken, not_taken // Goto produced from If 1777 }; 1778 private: 1779 ciMethod* _profiled_method; 1780 int _profiled_bci; 1781 Direction _direction; 1782 public: 1783 // creation 1784 Goto(BlockBegin* sux, ValueStack* state_before, bool is_safepoint = false) 1785 : BlockEnd(illegalType, state_before, is_safepoint) 1786 , _direction(none) 1787 , _profiled_method(NULL) 1788 , _profiled_bci(0) { 1789 BlockList* s = new BlockList(1); 1790 s->append(sux); 1791 set_sux(s); 1792 } 1793 1794 Goto(BlockBegin* sux, bool is_safepoint) : BlockEnd(illegalType, NULL, is_safepoint) 1795 , _direction(none) 1796 , _profiled_method(NULL) 1797 , _profiled_bci(0) { 1798 BlockList* s = new BlockList(1); 1799 s->append(sux); 1800 set_sux(s); 1801 } 1802 1803 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1804 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches 1805 int profiled_bci() const { return _profiled_bci; } 1806 Direction direction() const { return _direction; } 1807 1808 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1809 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1810 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1811 void set_direction(Direction d) { _direction = d; } 1812 }; 1813 1814 1815 LEAF(If, BlockEnd) 1816 private: 1817 Value _x; 1818 Condition _cond; 1819 Value _y; 1820 ciMethod* _profiled_method; 1821 int _profiled_bci; // Canonicalizer may alter bci of If node 1822 bool _swapped; // Is the order reversed with respect to the original If in the 1823 // bytecode stream? 1824 public: 1825 // creation 1826 // unordered_is_true is valid for float/double compares only 1827 If(Value x, Condition cond, bool unordered_is_true, Value y, BlockBegin* tsux, BlockBegin* fsux, ValueStack* state_before, bool is_safepoint) 1828 : BlockEnd(illegalType, state_before, is_safepoint) 1829 , _x(x) 1830 , _cond(cond) 1831 , _y(y) 1832 , _profiled_method(NULL) 1833 , _profiled_bci(0) 1834 , _swapped(false) 1835 { 1836 ASSERT_VALUES 1837 set_flag(UnorderedIsTrueFlag, unordered_is_true); 1838 assert(x->type()->tag() == y->type()->tag(), "types must match"); 1839 BlockList* s = new BlockList(2); 1840 s->append(tsux); 1841 s->append(fsux); 1842 set_sux(s); 1843 } 1844 1845 // accessors 1846 Value x() const { return _x; } 1847 Condition cond() const { return _cond; } 1848 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 1849 Value y() const { return _y; } 1850 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); } 1851 BlockBegin* tsux() const { return sux_for(true); } 1852 BlockBegin* fsux() const { return sux_for(false); } 1853 BlockBegin* usux() const { return sux_for(unordered_is_true()); } 1854 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1855 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches 1856 int profiled_bci() const { return _profiled_bci; } // set for profiled branches and tiered 1857 bool is_swapped() const { return _swapped; } 1858 1859 // manipulation 1860 void swap_operands() { 1861 Value t = _x; _x = _y; _y = t; 1862 _cond = mirror(_cond); 1863 } 1864 1865 void swap_sux() { 1866 assert(number_of_sux() == 2, "wrong number of successors"); 1867 BlockList* s = sux(); 1868 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t); 1869 _cond = negate(_cond); 1870 set_flag(UnorderedIsTrueFlag, !check_flag(UnorderedIsTrueFlag)); 1871 } 1872 1873 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1874 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1875 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1876 void set_swapped(bool value) { _swapped = value; } 1877 // generic 1878 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_x); f->visit(&_y); } 1879 }; 1880 1881 1882 LEAF(IfInstanceOf, BlockEnd) 1883 private: 1884 ciKlass* _klass; 1885 Value _obj; 1886 bool _test_is_instance; // jump if instance 1887 int _instanceof_bci; 1888 1889 public: 1890 IfInstanceOf(ciKlass* klass, Value obj, bool test_is_instance, int instanceof_bci, BlockBegin* tsux, BlockBegin* fsux) 1891 : BlockEnd(illegalType, NULL, false) // temporary set to false 1892 , _klass(klass) 1893 , _obj(obj) 1894 , _test_is_instance(test_is_instance) 1895 , _instanceof_bci(instanceof_bci) 1896 { 1897 ASSERT_VALUES 1898 assert(instanceof_bci >= 0, "illegal bci"); 1899 BlockList* s = new BlockList(2); 1900 s->append(tsux); 1901 s->append(fsux); 1902 set_sux(s); 1903 } 1904 1905 // accessors 1906 // 1907 // Note 1: If test_is_instance() is true, IfInstanceOf tests if obj *is* an 1908 // instance of klass; otherwise it tests if it is *not* and instance 1909 // of klass. 1910 // 1911 // Note 2: IfInstanceOf instructions are created by combining an InstanceOf 1912 // and an If instruction. The IfInstanceOf bci() corresponds to the 1913 // bci that the If would have had; the (this->) instanceof_bci() is 1914 // the bci of the original InstanceOf instruction. 1915 ciKlass* klass() const { return _klass; } 1916 Value obj() const { return _obj; } 1917 int instanceof_bci() const { return _instanceof_bci; } 1918 bool test_is_instance() const { return _test_is_instance; } 1919 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); } 1920 BlockBegin* tsux() const { return sux_for(true); } 1921 BlockBegin* fsux() const { return sux_for(false); } 1922 1923 // manipulation 1924 void swap_sux() { 1925 assert(number_of_sux() == 2, "wrong number of successors"); 1926 BlockList* s = sux(); 1927 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t); 1928 _test_is_instance = !_test_is_instance; 1929 } 1930 1931 // generic 1932 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_obj); } 1933 }; 1934 1935 1936 BASE(Switch, BlockEnd) 1937 private: 1938 Value _tag; 1939 1940 public: 1941 // creation 1942 Switch(Value tag, BlockList* sux, ValueStack* state_before, bool is_safepoint) 1943 : BlockEnd(illegalType, state_before, is_safepoint) 1944 , _tag(tag) { 1945 ASSERT_VALUES 1946 set_sux(sux); 1947 } 1948 1949 // accessors 1950 Value tag() const { return _tag; } 1951 int length() const { return number_of_sux() - 1; } 1952 1953 virtual bool needs_exception_state() const { return false; } 1954 1955 // generic 1956 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_tag); } 1957 }; 1958 1959 1960 LEAF(TableSwitch, Switch) 1961 private: 1962 int _lo_key; 1963 1964 public: 1965 // creation 1966 TableSwitch(Value tag, BlockList* sux, int lo_key, ValueStack* state_before, bool is_safepoint) 1967 : Switch(tag, sux, state_before, is_safepoint) 1968 , _lo_key(lo_key) {} 1969 1970 // accessors 1971 int lo_key() const { return _lo_key; } 1972 int hi_key() const { return _lo_key + length() - 1; } 1973 }; 1974 1975 1976 LEAF(LookupSwitch, Switch) 1977 private: 1978 intArray* _keys; 1979 1980 public: 1981 // creation 1982 LookupSwitch(Value tag, BlockList* sux, intArray* keys, ValueStack* state_before, bool is_safepoint) 1983 : Switch(tag, sux, state_before, is_safepoint) 1984 , _keys(keys) { 1985 assert(keys != NULL, "keys must exist"); 1986 assert(keys->length() == length(), "sux & keys have incompatible lengths"); 1987 } 1988 1989 // accessors 1990 int key_at(int i) const { return _keys->at(i); } 1991 }; 1992 1993 1994 LEAF(Return, BlockEnd) 1995 private: 1996 Value _result; 1997 1998 public: 1999 // creation 2000 Return(Value result) : 2001 BlockEnd(result == NULL ? voidType : result->type()->base(), NULL, true), 2002 _result(result) {} 2003 2004 // accessors 2005 Value result() const { return _result; } 2006 bool has_result() const { return result() != NULL; } 2007 2008 // generic 2009 virtual void input_values_do(ValueVisitor* f) { 2010 BlockEnd::input_values_do(f); 2011 if (has_result()) f->visit(&_result); 2012 } 2013 }; 2014 2015 2016 LEAF(Throw, BlockEnd) 2017 private: 2018 Value _exception; 2019 2020 public: 2021 // creation 2022 Throw(Value exception, ValueStack* state_before) : BlockEnd(illegalType, state_before, true), _exception(exception) { 2023 ASSERT_VALUES 2024 } 2025 2026 // accessors 2027 Value exception() const { return _exception; } 2028 2029 // generic 2030 virtual bool can_trap() const { return true; } 2031 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_exception); } 2032 }; 2033 2034 2035 LEAF(Base, BlockEnd) 2036 public: 2037 // creation 2038 Base(BlockBegin* std_entry, BlockBegin* osr_entry) : BlockEnd(illegalType, NULL, false) { 2039 assert(std_entry->is_set(BlockBegin::std_entry_flag), "std entry must be flagged"); 2040 assert(osr_entry == NULL || osr_entry->is_set(BlockBegin::osr_entry_flag), "osr entry must be flagged"); 2041 BlockList* s = new BlockList(2); 2042 if (osr_entry != NULL) s->append(osr_entry); 2043 s->append(std_entry); // must be default sux! 2044 set_sux(s); 2045 } 2046 2047 // accessors 2048 BlockBegin* std_entry() const { return default_sux(); } 2049 BlockBegin* osr_entry() const { return number_of_sux() < 2 ? NULL : sux_at(0); } 2050 }; 2051 2052 2053 LEAF(OsrEntry, Instruction) 2054 public: 2055 // creation 2056 #ifdef _LP64 2057 OsrEntry() : Instruction(longType) { pin(); } 2058 #else 2059 OsrEntry() : Instruction(intType) { pin(); } 2060 #endif 2061 2062 // generic 2063 virtual void input_values_do(ValueVisitor* f) { } 2064 }; 2065 2066 2067 // Models the incoming exception at a catch site 2068 LEAF(ExceptionObject, Instruction) 2069 public: 2070 // creation 2071 ExceptionObject() : Instruction(objectType) { 2072 pin(); 2073 } 2074 2075 // generic 2076 virtual void input_values_do(ValueVisitor* f) { } 2077 }; 2078 2079 2080 // Models needed rounding for floating-point values on Intel. 2081 // Currently only used to represent rounding of double-precision 2082 // values stored into local variables, but could be used to model 2083 // intermediate rounding of single-precision values as well. 2084 LEAF(RoundFP, Instruction) 2085 private: 2086 Value _input; // floating-point value to be rounded 2087 2088 public: 2089 RoundFP(Value input) 2090 : Instruction(input->type()) // Note: should not be used for constants 2091 , _input(input) 2092 { 2093 ASSERT_VALUES 2094 } 2095 2096 // accessors 2097 Value input() const { return _input; } 2098 2099 // generic 2100 virtual void input_values_do(ValueVisitor* f) { f->visit(&_input); } 2101 }; 2102 2103 2104 BASE(UnsafeOp, Instruction) 2105 private: 2106 BasicType _basic_type; // ValueType can not express byte-sized integers 2107 2108 protected: 2109 // creation 2110 UnsafeOp(BasicType basic_type, bool is_put) 2111 : Instruction(is_put ? voidType : as_ValueType(basic_type)) 2112 , _basic_type(basic_type) 2113 { 2114 //Note: Unsafe ops are not not guaranteed to throw NPE. 2115 // Convservatively, Unsafe operations must be pinned though we could be 2116 // looser about this if we wanted to.. 2117 pin(); 2118 } 2119 2120 public: 2121 // accessors 2122 BasicType basic_type() { return _basic_type; } 2123 2124 // generic 2125 virtual void input_values_do(ValueVisitor* f) { } 2126 }; 2127 2128 2129 BASE(UnsafeRawOp, UnsafeOp) 2130 private: 2131 Value _base; // Base address (a Java long) 2132 Value _index; // Index if computed by optimizer; initialized to NULL 2133 int _log2_scale; // Scale factor: 0, 1, 2, or 3. 2134 // Indicates log2 of number of bytes (1, 2, 4, or 8) 2135 // to scale index by. 2136 2137 protected: 2138 UnsafeRawOp(BasicType basic_type, Value addr, bool is_put) 2139 : UnsafeOp(basic_type, is_put) 2140 , _base(addr) 2141 , _index(NULL) 2142 , _log2_scale(0) 2143 { 2144 // Can not use ASSERT_VALUES because index may be NULL 2145 assert(addr != NULL && addr->type()->is_long(), "just checking"); 2146 } 2147 2148 UnsafeRawOp(BasicType basic_type, Value base, Value index, int log2_scale, bool is_put) 2149 : UnsafeOp(basic_type, is_put) 2150 , _base(base) 2151 , _index(index) 2152 , _log2_scale(log2_scale) 2153 { 2154 } 2155 2156 public: 2157 // accessors 2158 Value base() { return _base; } 2159 Value index() { return _index; } 2160 bool has_index() { return (_index != NULL); } 2161 int log2_scale() { return _log2_scale; } 2162 2163 // setters 2164 void set_base (Value base) { _base = base; } 2165 void set_index(Value index) { _index = index; } 2166 void set_log2_scale(int log2_scale) { _log2_scale = log2_scale; } 2167 2168 // generic 2169 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f); 2170 f->visit(&_base); 2171 if (has_index()) f->visit(&_index); } 2172 }; 2173 2174 2175 LEAF(UnsafeGetRaw, UnsafeRawOp) 2176 private: 2177 bool _may_be_unaligned, _is_wide; // For OSREntry 2178 2179 public: 2180 UnsafeGetRaw(BasicType basic_type, Value addr, bool may_be_unaligned, bool is_wide = false) 2181 : UnsafeRawOp(basic_type, addr, false) { 2182 _may_be_unaligned = may_be_unaligned; 2183 _is_wide = is_wide; 2184 } 2185 2186 UnsafeGetRaw(BasicType basic_type, Value base, Value index, int log2_scale, bool may_be_unaligned, bool is_wide = false) 2187 : UnsafeRawOp(basic_type, base, index, log2_scale, false) { 2188 _may_be_unaligned = may_be_unaligned; 2189 _is_wide = is_wide; 2190 } 2191 2192 bool may_be_unaligned() { return _may_be_unaligned; } 2193 bool is_wide() { return _is_wide; } 2194 }; 2195 2196 2197 LEAF(UnsafePutRaw, UnsafeRawOp) 2198 private: 2199 Value _value; // Value to be stored 2200 2201 public: 2202 UnsafePutRaw(BasicType basic_type, Value addr, Value value) 2203 : UnsafeRawOp(basic_type, addr, true) 2204 , _value(value) 2205 { 2206 assert(value != NULL, "just checking"); 2207 ASSERT_VALUES 2208 } 2209 2210 UnsafePutRaw(BasicType basic_type, Value base, Value index, int log2_scale, Value value) 2211 : UnsafeRawOp(basic_type, base, index, log2_scale, true) 2212 , _value(value) 2213 { 2214 assert(value != NULL, "just checking"); 2215 ASSERT_VALUES 2216 } 2217 2218 // accessors 2219 Value value() { return _value; } 2220 2221 // generic 2222 virtual void input_values_do(ValueVisitor* f) { UnsafeRawOp::input_values_do(f); 2223 f->visit(&_value); } 2224 }; 2225 2226 2227 BASE(UnsafeObjectOp, UnsafeOp) 2228 private: 2229 Value _object; // Object to be fetched from or mutated 2230 Value _offset; // Offset within object 2231 bool _is_volatile; // true if volatile - dl/JSR166 2232 public: 2233 UnsafeObjectOp(BasicType basic_type, Value object, Value offset, bool is_put, bool is_volatile) 2234 : UnsafeOp(basic_type, is_put), _object(object), _offset(offset), _is_volatile(is_volatile) 2235 { 2236 } 2237 2238 // accessors 2239 Value object() { return _object; } 2240 Value offset() { return _offset; } 2241 bool is_volatile() { return _is_volatile; } 2242 // generic 2243 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f); 2244 f->visit(&_object); 2245 f->visit(&_offset); } 2246 }; 2247 2248 2249 LEAF(UnsafeGetObject, UnsafeObjectOp) 2250 public: 2251 UnsafeGetObject(BasicType basic_type, Value object, Value offset, bool is_volatile) 2252 : UnsafeObjectOp(basic_type, object, offset, false, is_volatile) 2253 { 2254 ASSERT_VALUES 2255 } 2256 }; 2257 2258 2259 LEAF(UnsafePutObject, UnsafeObjectOp) 2260 private: 2261 Value _value; // Value to be stored 2262 public: 2263 UnsafePutObject(BasicType basic_type, Value object, Value offset, Value value, bool is_volatile) 2264 : UnsafeObjectOp(basic_type, object, offset, true, is_volatile) 2265 , _value(value) 2266 { 2267 ASSERT_VALUES 2268 } 2269 2270 // accessors 2271 Value value() { return _value; } 2272 2273 // generic 2274 virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f); 2275 f->visit(&_value); } 2276 }; 2277 2278 LEAF(UnsafeGetAndSetObject, UnsafeObjectOp) 2279 private: 2280 Value _value; // Value to be stored 2281 bool _is_add; 2282 public: 2283 UnsafeGetAndSetObject(BasicType basic_type, Value object, Value offset, Value value, bool is_add) 2284 : UnsafeObjectOp(basic_type, object, offset, false, false) 2285 , _value(value) 2286 , _is_add(is_add) 2287 { 2288 ASSERT_VALUES 2289 } 2290 2291 // accessors 2292 bool is_add() const { return _is_add; } 2293 Value value() { return _value; } 2294 2295 // generic 2296 virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f); 2297 f->visit(&_value); } 2298 }; 2299 2300 BASE(UnsafePrefetch, UnsafeObjectOp) 2301 public: 2302 UnsafePrefetch(Value object, Value offset) 2303 : UnsafeObjectOp(T_VOID, object, offset, false, false) 2304 { 2305 } 2306 }; 2307 2308 2309 LEAF(UnsafePrefetchRead, UnsafePrefetch) 2310 public: 2311 UnsafePrefetchRead(Value object, Value offset) 2312 : UnsafePrefetch(object, offset) 2313 { 2314 ASSERT_VALUES 2315 } 2316 }; 2317 2318 2319 LEAF(UnsafePrefetchWrite, UnsafePrefetch) 2320 public: 2321 UnsafePrefetchWrite(Value object, Value offset) 2322 : UnsafePrefetch(object, offset) 2323 { 2324 ASSERT_VALUES 2325 } 2326 }; 2327 2328 LEAF(ProfileCall, Instruction) 2329 private: 2330 ciMethod* _method; 2331 int _bci_of_invoke; 2332 ciMethod* _callee; // the method that is called at the given bci 2333 Value _recv; 2334 ciKlass* _known_holder; 2335 2336 public: 2337 ProfileCall(ciMethod* method, int bci, ciMethod* callee, Value recv, ciKlass* known_holder) 2338 : Instruction(voidType) 2339 , _method(method) 2340 , _bci_of_invoke(bci) 2341 , _callee(callee) 2342 , _recv(recv) 2343 , _known_holder(known_holder) 2344 { 2345 // The ProfileCall has side-effects and must occur precisely where located 2346 pin(); 2347 } 2348 2349 ciMethod* method() { return _method; } 2350 int bci_of_invoke() { return _bci_of_invoke; } 2351 ciMethod* callee() { return _callee; } 2352 Value recv() { return _recv; } 2353 ciKlass* known_holder() { return _known_holder; } 2354 2355 virtual void input_values_do(ValueVisitor* f) { if (_recv != NULL) f->visit(&_recv); } 2356 }; 2357 2358 2359 // Call some C runtime function that doesn't safepoint, 2360 // optionally passing the current thread as the first argument. 2361 LEAF(RuntimeCall, Instruction) 2362 private: 2363 const char* _entry_name; 2364 address _entry; 2365 Values* _args; 2366 bool _pass_thread; // Pass the JavaThread* as an implicit first argument 2367 2368 public: 2369 RuntimeCall(ValueType* type, const char* entry_name, address entry, Values* args, bool pass_thread = true) 2370 : Instruction(type) 2371 , _entry(entry) 2372 , _args(args) 2373 , _entry_name(entry_name) 2374 , _pass_thread(pass_thread) { 2375 ASSERT_VALUES 2376 pin(); 2377 } 2378 2379 const char* entry_name() const { return _entry_name; } 2380 address entry() const { return _entry; } 2381 int number_of_arguments() const { return _args->length(); } 2382 Value argument_at(int i) const { return _args->at(i); } 2383 bool pass_thread() const { return _pass_thread; } 2384 2385 virtual void input_values_do(ValueVisitor* f) { 2386 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 2387 } 2388 }; 2389 2390 // Use to trip invocation counter of an inlined method 2391 2392 LEAF(ProfileInvoke, Instruction) 2393 private: 2394 ciMethod* _inlinee; 2395 ValueStack* _state; 2396 2397 public: 2398 ProfileInvoke(ciMethod* inlinee, ValueStack* state) 2399 : Instruction(voidType) 2400 , _inlinee(inlinee) 2401 , _state(state) 2402 { 2403 // The ProfileInvoke has side-effects and must occur precisely where located QQQ??? 2404 pin(); 2405 } 2406 2407 ciMethod* inlinee() { return _inlinee; } 2408 ValueStack* state() { return _state; } 2409 virtual void input_values_do(ValueVisitor*) {} 2410 virtual void state_values_do(ValueVisitor*); 2411 }; 2412 2413 LEAF(MemBar, Instruction) 2414 private: 2415 LIR_Code _code; 2416 2417 public: 2418 MemBar(LIR_Code code) 2419 : Instruction(voidType) 2420 , _code(code) 2421 { 2422 pin(); 2423 } 2424 2425 LIR_Code code() { return _code; } 2426 2427 virtual void input_values_do(ValueVisitor*) {} 2428 }; 2429 2430 class BlockPair: public CompilationResourceObj { 2431 private: 2432 BlockBegin* _from; 2433 BlockBegin* _to; 2434 public: 2435 BlockPair(BlockBegin* from, BlockBegin* to): _from(from), _to(to) {} 2436 BlockBegin* from() const { return _from; } 2437 BlockBegin* to() const { return _to; } 2438 bool is_same(BlockBegin* from, BlockBegin* to) const { return _from == from && _to == to; } 2439 bool is_same(BlockPair* p) const { return _from == p->from() && _to == p->to(); } 2440 void set_to(BlockBegin* b) { _to = b; } 2441 void set_from(BlockBegin* b) { _from = b; } 2442 }; 2443 2444 2445 define_array(BlockPairArray, BlockPair*) 2446 define_stack(BlockPairList, BlockPairArray) 2447 2448 2449 inline int BlockBegin::number_of_sux() const { assert(_end == NULL || _end->number_of_sux() == _successors.length(), "mismatch"); return _successors.length(); } 2450 inline BlockBegin* BlockBegin::sux_at(int i) const { assert(_end == NULL || _end->sux_at(i) == _successors.at(i), "mismatch"); return _successors.at(i); } 2451 inline void BlockBegin::add_successor(BlockBegin* sux) { assert(_end == NULL, "Would create mismatch with successors of BlockEnd"); _successors.append(sux); } 2452 2453 #undef ASSERT_VALUES 2454 2455 #endif // SHARE_VM_C1_C1_INSTRUCTION_HPP