1 /* 2 * Copyright (c) 1999, 2011, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_VM_C1_C1_INSTRUCTION_HPP 26 #define SHARE_VM_C1_C1_INSTRUCTION_HPP 27 28 #include "c1/c1_Compilation.hpp" 29 #include "c1/c1_LIR.hpp" 30 #include "c1/c1_ValueType.hpp" 31 #include "ci/ciField.hpp" 32 33 // Predefined classes 34 class ciField; 35 class ValueStack; 36 class InstructionPrinter; 37 class IRScope; 38 class LIR_OprDesc; 39 typedef LIR_OprDesc* LIR_Opr; 40 41 42 // Instruction class hierarchy 43 // 44 // All leaf classes in the class hierarchy are concrete classes 45 // (i.e., are instantiated). All other classes are abstract and 46 // serve factoring. 47 48 class Instruction; 49 class Phi; 50 class Local; 51 class Constant; 52 class AccessField; 53 class LoadField; 54 class StoreField; 55 class AccessArray; 56 class ArrayLength; 57 class AccessIndexed; 58 class LoadIndexed; 59 class StoreIndexed; 60 class NegateOp; 61 class Op2; 62 class ArithmeticOp; 63 class ShiftOp; 64 class LogicOp; 65 class CompareOp; 66 class IfOp; 67 class Convert; 68 class NullCheck; 69 class OsrEntry; 70 class ExceptionObject; 71 class StateSplit; 72 class Invoke; 73 class NewInstance; 74 class NewArray; 75 class NewTypeArray; 76 class NewObjectArray; 77 class NewMultiArray; 78 class TypeCheck; 79 class CheckCast; 80 class InstanceOf; 81 class AccessMonitor; 82 class MonitorEnter; 83 class MonitorExit; 84 class Intrinsic; 85 class BlockBegin; 86 class BlockEnd; 87 class Goto; 88 class If; 89 class IfInstanceOf; 90 class Switch; 91 class TableSwitch; 92 class LookupSwitch; 93 class Return; 94 class Throw; 95 class Base; 96 class RoundFP; 97 class UnsafeOp; 98 class UnsafeRawOp; 99 class UnsafeGetRaw; 100 class UnsafePutRaw; 101 class UnsafeObjectOp; 102 class UnsafeGetObject; 103 class UnsafePutObject; 104 class UnsafePrefetch; 105 class UnsafePrefetchRead; 106 class UnsafePrefetchWrite; 107 class ProfileCall; 108 class ProfileInvoke; 109 class RuntimeCall; 110 111 // A Value is a reference to the instruction creating the value 112 typedef Instruction* Value; 113 define_array(ValueArray, Value) 114 define_stack(Values, ValueArray) 115 116 define_array(ValueStackArray, ValueStack*) 117 define_stack(ValueStackStack, ValueStackArray) 118 119 // BlockClosure is the base class for block traversal/iteration. 120 121 class BlockClosure: public CompilationResourceObj { 122 public: 123 virtual void block_do(BlockBegin* block) = 0; 124 }; 125 126 127 // A simple closure class for visiting the values of an Instruction 128 class ValueVisitor: public StackObj { 129 public: 130 virtual void visit(Value* v) = 0; 131 }; 132 133 134 // Some array and list classes 135 define_array(BlockBeginArray, BlockBegin*) 136 define_stack(_BlockList, BlockBeginArray) 137 138 class BlockList: public _BlockList { 139 public: 140 BlockList(): _BlockList() {} 141 BlockList(const int size): _BlockList(size) {} 142 BlockList(const int size, BlockBegin* init): _BlockList(size, init) {} 143 144 void iterate_forward(BlockClosure* closure); 145 void iterate_backward(BlockClosure* closure); 146 void blocks_do(void f(BlockBegin*)); 147 void values_do(ValueVisitor* f); 148 void print(bool cfg_only = false, bool live_only = false) PRODUCT_RETURN; 149 }; 150 151 152 // InstructionVisitors provide type-based dispatch for instructions. 153 // For each concrete Instruction class X, a virtual function do_X is 154 // provided. Functionality that needs to be implemented for all classes 155 // (e.g., printing, code generation) is factored out into a specialised 156 // visitor instead of added to the Instruction classes itself. 157 158 class InstructionVisitor: public StackObj { 159 public: 160 virtual void do_Phi (Phi* x) = 0; 161 virtual void do_Local (Local* x) = 0; 162 virtual void do_Constant (Constant* x) = 0; 163 virtual void do_LoadField (LoadField* x) = 0; 164 virtual void do_StoreField (StoreField* x) = 0; 165 virtual void do_ArrayLength (ArrayLength* x) = 0; 166 virtual void do_LoadIndexed (LoadIndexed* x) = 0; 167 virtual void do_StoreIndexed (StoreIndexed* x) = 0; 168 virtual void do_NegateOp (NegateOp* x) = 0; 169 virtual void do_ArithmeticOp (ArithmeticOp* x) = 0; 170 virtual void do_ShiftOp (ShiftOp* x) = 0; 171 virtual void do_LogicOp (LogicOp* x) = 0; 172 virtual void do_CompareOp (CompareOp* x) = 0; 173 virtual void do_IfOp (IfOp* x) = 0; 174 virtual void do_Convert (Convert* x) = 0; 175 virtual void do_NullCheck (NullCheck* x) = 0; 176 virtual void do_Invoke (Invoke* x) = 0; 177 virtual void do_NewInstance (NewInstance* x) = 0; 178 virtual void do_NewTypeArray (NewTypeArray* x) = 0; 179 virtual void do_NewObjectArray (NewObjectArray* x) = 0; 180 virtual void do_NewMultiArray (NewMultiArray* x) = 0; 181 virtual void do_CheckCast (CheckCast* x) = 0; 182 virtual void do_InstanceOf (InstanceOf* x) = 0; 183 virtual void do_MonitorEnter (MonitorEnter* x) = 0; 184 virtual void do_MonitorExit (MonitorExit* x) = 0; 185 virtual void do_Intrinsic (Intrinsic* x) = 0; 186 virtual void do_BlockBegin (BlockBegin* x) = 0; 187 virtual void do_Goto (Goto* x) = 0; 188 virtual void do_If (If* x) = 0; 189 virtual void do_IfInstanceOf (IfInstanceOf* x) = 0; 190 virtual void do_TableSwitch (TableSwitch* x) = 0; 191 virtual void do_LookupSwitch (LookupSwitch* x) = 0; 192 virtual void do_Return (Return* x) = 0; 193 virtual void do_Throw (Throw* x) = 0; 194 virtual void do_Base (Base* x) = 0; 195 virtual void do_OsrEntry (OsrEntry* x) = 0; 196 virtual void do_ExceptionObject(ExceptionObject* x) = 0; 197 virtual void do_RoundFP (RoundFP* x) = 0; 198 virtual void do_UnsafeGetRaw (UnsafeGetRaw* x) = 0; 199 virtual void do_UnsafePutRaw (UnsafePutRaw* x) = 0; 200 virtual void do_UnsafeGetObject(UnsafeGetObject* x) = 0; 201 virtual void do_UnsafePutObject(UnsafePutObject* x) = 0; 202 virtual void do_UnsafePrefetchRead (UnsafePrefetchRead* x) = 0; 203 virtual void do_UnsafePrefetchWrite(UnsafePrefetchWrite* x) = 0; 204 virtual void do_ProfileCall (ProfileCall* x) = 0; 205 virtual void do_ProfileInvoke (ProfileInvoke* x) = 0; 206 virtual void do_RuntimeCall (RuntimeCall* x) = 0; 207 }; 208 209 210 // Hashing support 211 // 212 // Note: This hash functions affect the performance 213 // of ValueMap - make changes carefully! 214 215 #define HASH1(x1 ) ((intx)(x1)) 216 #define HASH2(x1, x2 ) ((HASH1(x1 ) << 7) ^ HASH1(x2)) 217 #define HASH3(x1, x2, x3 ) ((HASH2(x1, x2 ) << 7) ^ HASH1(x3)) 218 #define HASH4(x1, x2, x3, x4) ((HASH3(x1, x2, x3) << 7) ^ HASH1(x4)) 219 220 221 // The following macros are used to implement instruction-specific hashing. 222 // By default, each instruction implements hash() and is_equal(Value), used 223 // for value numbering/common subexpression elimination. The default imple- 224 // mentation disables value numbering. Each instruction which can be value- 225 // numbered, should define corresponding hash() and is_equal(Value) functions 226 // via the macros below. The f arguments specify all the values/op codes, etc. 227 // that need to be identical for two instructions to be identical. 228 // 229 // Note: The default implementation of hash() returns 0 in order to indicate 230 // that the instruction should not be considered for value numbering. 231 // The currently used hash functions do not guarantee that never a 0 232 // is produced. While this is still correct, it may be a performance 233 // bug (no value numbering for that node). However, this situation is 234 // so unlikely, that we are not going to handle it specially. 235 236 #define HASHING1(class_name, enabled, f1) \ 237 virtual intx hash() const { \ 238 return (enabled) ? HASH2(name(), f1) : 0; \ 239 } \ 240 virtual bool is_equal(Value v) const { \ 241 if (!(enabled) ) return false; \ 242 class_name* _v = v->as_##class_name(); \ 243 if (_v == NULL ) return false; \ 244 if (f1 != _v->f1) return false; \ 245 return true; \ 246 } \ 247 248 249 #define HASHING2(class_name, enabled, f1, f2) \ 250 virtual intx hash() const { \ 251 return (enabled) ? HASH3(name(), f1, f2) : 0; \ 252 } \ 253 virtual bool is_equal(Value v) const { \ 254 if (!(enabled) ) return false; \ 255 class_name* _v = v->as_##class_name(); \ 256 if (_v == NULL ) return false; \ 257 if (f1 != _v->f1) return false; \ 258 if (f2 != _v->f2) return false; \ 259 return true; \ 260 } \ 261 262 263 #define HASHING3(class_name, enabled, f1, f2, f3) \ 264 virtual intx hash() const { \ 265 return (enabled) ? HASH4(name(), f1, f2, f3) : 0; \ 266 } \ 267 virtual bool is_equal(Value v) const { \ 268 if (!(enabled) ) return false; \ 269 class_name* _v = v->as_##class_name(); \ 270 if (_v == NULL ) return false; \ 271 if (f1 != _v->f1) return false; \ 272 if (f2 != _v->f2) return false; \ 273 if (f3 != _v->f3) return false; \ 274 return true; \ 275 } \ 276 277 278 // The mother of all instructions... 279 280 class Instruction: public CompilationResourceObj { 281 private: 282 int _id; // the unique instruction id 283 #ifndef PRODUCT 284 int _printable_bci; // the bci of the instruction for printing 285 #endif 286 int _use_count; // the number of instructions refering to this value (w/o prev/next); only roots can have use count = 0 or > 1 287 int _pin_state; // set of PinReason describing the reason for pinning 288 ValueType* _type; // the instruction value type 289 Instruction* _next; // the next instruction if any (NULL for BlockEnd instructions) 290 Instruction* _subst; // the substitution instruction if any 291 LIR_Opr _operand; // LIR specific information 292 unsigned int _flags; // Flag bits 293 294 ValueStack* _state_before; // Copy of state with input operands still on stack (or NULL) 295 ValueStack* _exception_state; // Copy of state for exception handling 296 XHandlers* _exception_handlers; // Flat list of exception handlers covering this instruction 297 298 friend class UseCountComputer; 299 friend class BlockBegin; 300 301 void update_exception_state(ValueStack* state); 302 303 bool has_printable_bci() const { return NOT_PRODUCT(_printable_bci != -99) PRODUCT_ONLY(false); } 304 305 protected: 306 void set_type(ValueType* type) { 307 assert(type != NULL, "type must exist"); 308 _type = type; 309 } 310 311 public: 312 void* operator new(size_t size) { 313 Compilation* c = Compilation::current(); 314 void* res = c->arena()->Amalloc(size); 315 ((Instruction*)res)->_id = c->get_next_id(); 316 return res; 317 } 318 319 static const int no_bci = -99; 320 321 enum InstructionFlag { 322 NeedsNullCheckFlag = 0, 323 CanTrapFlag, 324 DirectCompareFlag, 325 IsEliminatedFlag, 326 IsInitializedFlag, 327 IsLoadedFlag, 328 IsSafepointFlag, 329 IsStaticFlag, 330 IsStrictfpFlag, 331 NeedsStoreCheckFlag, 332 NeedsWriteBarrierFlag, 333 PreservesStateFlag, 334 TargetIsFinalFlag, 335 TargetIsLoadedFlag, 336 TargetIsStrictfpFlag, 337 UnorderedIsTrueFlag, 338 NeedsPatchingFlag, 339 ThrowIncompatibleClassChangeErrorFlag, 340 ProfileMDOFlag, 341 IsLinkedInBlockFlag, 342 InstructionLastFlag 343 }; 344 345 public: 346 bool check_flag(InstructionFlag id) const { return (_flags & (1 << id)) != 0; } 347 void set_flag(InstructionFlag id, bool f) { _flags = f ? (_flags | (1 << id)) : (_flags & ~(1 << id)); }; 348 349 // 'globally' used condition values 350 enum Condition { 351 eql, neq, lss, leq, gtr, geq 352 }; 353 354 // Instructions may be pinned for many reasons and under certain conditions 355 // with enough knowledge it's possible to safely unpin them. 356 enum PinReason { 357 PinUnknown = 1 << 0 358 , PinExplicitNullCheck = 1 << 3 359 , PinStackForStateSplit= 1 << 12 360 , PinStateSplitConstructor= 1 << 13 361 , PinGlobalValueNumbering= 1 << 14 362 }; 363 364 static Condition mirror(Condition cond); 365 static Condition negate(Condition cond); 366 367 // initialization 368 static int number_of_instructions() { 369 return Compilation::current()->number_of_instructions(); 370 } 371 372 // creation 373 Instruction(ValueType* type, ValueStack* state_before = NULL, bool type_is_constant = false) 374 : _use_count(0) 375 #ifndef PRODUCT 376 , _printable_bci(-99) 377 #endif 378 , _pin_state(0) 379 , _type(type) 380 , _next(NULL) 381 , _subst(NULL) 382 , _flags(0) 383 , _operand(LIR_OprFact::illegalOpr) 384 , _state_before(state_before) 385 , _exception_handlers(NULL) 386 { 387 check_state(state_before); 388 assert(type != NULL && (!type->is_constant() || type_is_constant), "type must exist"); 389 update_exception_state(_state_before); 390 } 391 392 // accessors 393 int id() const { return _id; } 394 #ifndef PRODUCT 395 int printable_bci() const { assert(has_printable_bci(), "_printable_bci should have been set"); return _printable_bci; } 396 void set_printable_bci(int bci) { NOT_PRODUCT(_printable_bci = bci;) } 397 #endif 398 int use_count() const { return _use_count; } 399 int pin_state() const { return _pin_state; } 400 bool is_pinned() const { return _pin_state != 0 || PinAllInstructions; } 401 ValueType* type() const { return _type; } 402 Instruction* prev(BlockBegin* block); // use carefully, expensive operation 403 Instruction* next() const { return _next; } 404 bool has_subst() const { return _subst != NULL; } 405 Instruction* subst() { return _subst == NULL ? this : _subst->subst(); } 406 LIR_Opr operand() const { return _operand; } 407 408 void set_needs_null_check(bool f) { set_flag(NeedsNullCheckFlag, f); } 409 bool needs_null_check() const { return check_flag(NeedsNullCheckFlag); } 410 bool is_linked() const { return check_flag(IsLinkedInBlockFlag); } 411 bool can_be_linked() { return as_Local() == NULL && as_Phi() == NULL; } 412 413 bool has_uses() const { return use_count() > 0; } 414 ValueStack* state_before() const { return _state_before; } 415 ValueStack* exception_state() const { return _exception_state; } 416 virtual bool needs_exception_state() const { return true; } 417 XHandlers* exception_handlers() const { return _exception_handlers; } 418 419 // manipulation 420 void pin(PinReason reason) { _pin_state |= reason; } 421 void pin() { _pin_state |= PinUnknown; } 422 // DANGEROUS: only used by EliminateStores 423 void unpin(PinReason reason) { assert((reason & PinUnknown) == 0, "can't unpin unknown state"); _pin_state &= ~reason; } 424 425 Instruction* set_next(Instruction* next) { 426 assert(next->has_printable_bci(), "_printable_bci should have been set"); 427 assert(next != NULL, "must not be NULL"); 428 assert(as_BlockEnd() == NULL, "BlockEnd instructions must have no next"); 429 assert(next->can_be_linked(), "shouldn't link these instructions into list"); 430 431 next->set_flag(Instruction::IsLinkedInBlockFlag, true); 432 _next = next; 433 return next; 434 } 435 436 Instruction* set_next(Instruction* next, int bci) { 437 #ifndef PRODUCT 438 next->set_printable_bci(bci); 439 #endif 440 return set_next(next); 441 } 442 443 void set_subst(Instruction* subst) { 444 assert(subst == NULL || 445 type()->base() == subst->type()->base() || 446 subst->type()->base() == illegalType, "type can't change"); 447 _subst = subst; 448 } 449 void set_exception_handlers(XHandlers *xhandlers) { _exception_handlers = xhandlers; } 450 void set_exception_state(ValueStack* s) { check_state(s); _exception_state = s; } 451 452 // machine-specifics 453 void set_operand(LIR_Opr operand) { assert(operand != LIR_OprFact::illegalOpr, "operand must exist"); _operand = operand; } 454 void clear_operand() { _operand = LIR_OprFact::illegalOpr; } 455 456 // generic 457 virtual Instruction* as_Instruction() { return this; } // to satisfy HASHING1 macro 458 virtual Phi* as_Phi() { return NULL; } 459 virtual Local* as_Local() { return NULL; } 460 virtual Constant* as_Constant() { return NULL; } 461 virtual AccessField* as_AccessField() { return NULL; } 462 virtual LoadField* as_LoadField() { return NULL; } 463 virtual StoreField* as_StoreField() { return NULL; } 464 virtual AccessArray* as_AccessArray() { return NULL; } 465 virtual ArrayLength* as_ArrayLength() { return NULL; } 466 virtual AccessIndexed* as_AccessIndexed() { return NULL; } 467 virtual LoadIndexed* as_LoadIndexed() { return NULL; } 468 virtual StoreIndexed* as_StoreIndexed() { return NULL; } 469 virtual NegateOp* as_NegateOp() { return NULL; } 470 virtual Op2* as_Op2() { return NULL; } 471 virtual ArithmeticOp* as_ArithmeticOp() { return NULL; } 472 virtual ShiftOp* as_ShiftOp() { return NULL; } 473 virtual LogicOp* as_LogicOp() { return NULL; } 474 virtual CompareOp* as_CompareOp() { return NULL; } 475 virtual IfOp* as_IfOp() { return NULL; } 476 virtual Convert* as_Convert() { return NULL; } 477 virtual NullCheck* as_NullCheck() { return NULL; } 478 virtual OsrEntry* as_OsrEntry() { return NULL; } 479 virtual StateSplit* as_StateSplit() { return NULL; } 480 virtual Invoke* as_Invoke() { return NULL; } 481 virtual NewInstance* as_NewInstance() { return NULL; } 482 virtual NewArray* as_NewArray() { return NULL; } 483 virtual NewTypeArray* as_NewTypeArray() { return NULL; } 484 virtual NewObjectArray* as_NewObjectArray() { return NULL; } 485 virtual NewMultiArray* as_NewMultiArray() { return NULL; } 486 virtual TypeCheck* as_TypeCheck() { return NULL; } 487 virtual CheckCast* as_CheckCast() { return NULL; } 488 virtual InstanceOf* as_InstanceOf() { return NULL; } 489 virtual AccessMonitor* as_AccessMonitor() { return NULL; } 490 virtual MonitorEnter* as_MonitorEnter() { return NULL; } 491 virtual MonitorExit* as_MonitorExit() { return NULL; } 492 virtual Intrinsic* as_Intrinsic() { return NULL; } 493 virtual BlockBegin* as_BlockBegin() { return NULL; } 494 virtual BlockEnd* as_BlockEnd() { return NULL; } 495 virtual Goto* as_Goto() { return NULL; } 496 virtual If* as_If() { return NULL; } 497 virtual IfInstanceOf* as_IfInstanceOf() { return NULL; } 498 virtual TableSwitch* as_TableSwitch() { return NULL; } 499 virtual LookupSwitch* as_LookupSwitch() { return NULL; } 500 virtual Return* as_Return() { return NULL; } 501 virtual Throw* as_Throw() { return NULL; } 502 virtual Base* as_Base() { return NULL; } 503 virtual RoundFP* as_RoundFP() { return NULL; } 504 virtual ExceptionObject* as_ExceptionObject() { return NULL; } 505 virtual UnsafeOp* as_UnsafeOp() { return NULL; } 506 507 virtual void visit(InstructionVisitor* v) = 0; 508 509 virtual bool can_trap() const { return false; } 510 511 virtual void input_values_do(ValueVisitor* f) = 0; 512 virtual void state_values_do(ValueVisitor* f); 513 virtual void other_values_do(ValueVisitor* f) { /* usually no other - override on demand */ } 514 void values_do(ValueVisitor* f) { input_values_do(f); state_values_do(f); other_values_do(f); } 515 516 virtual ciType* exact_type() const { return NULL; } 517 virtual ciType* declared_type() const { return NULL; } 518 519 // hashing 520 virtual const char* name() const = 0; 521 HASHING1(Instruction, false, id()) // hashing disabled by default 522 523 // debugging 524 static void check_state(ValueStack* state) PRODUCT_RETURN; 525 void print() PRODUCT_RETURN; 526 void print_line() PRODUCT_RETURN; 527 void print(InstructionPrinter& ip) PRODUCT_RETURN; 528 }; 529 530 531 // The following macros are used to define base (i.e., non-leaf) 532 // and leaf instruction classes. They define class-name related 533 // generic functionality in one place. 534 535 #define BASE(class_name, super_class_name) \ 536 class class_name: public super_class_name { \ 537 public: \ 538 virtual class_name* as_##class_name() { return this; } \ 539 540 541 #define LEAF(class_name, super_class_name) \ 542 BASE(class_name, super_class_name) \ 543 public: \ 544 virtual const char* name() const { return #class_name; } \ 545 virtual void visit(InstructionVisitor* v) { v->do_##class_name(this); } \ 546 547 548 // Debugging support 549 550 551 #ifdef ASSERT 552 class AssertValues: public ValueVisitor { 553 void visit(Value* x) { assert((*x) != NULL, "value must exist"); } 554 }; 555 #define ASSERT_VALUES { AssertValues assert_value; values_do(&assert_value); } 556 #else 557 #define ASSERT_VALUES 558 #endif // ASSERT 559 560 561 // A Phi is a phi function in the sense of SSA form. It stands for 562 // the value of a local variable at the beginning of a join block. 563 // A Phi consists of n operands, one for every incoming branch. 564 565 LEAF(Phi, Instruction) 566 private: 567 BlockBegin* _block; // the block to which the phi function belongs 568 int _pf_flags; // the flags of the phi function 569 int _index; // to value on operand stack (index < 0) or to local 570 public: 571 // creation 572 Phi(ValueType* type, BlockBegin* b, int index) 573 : Instruction(type->base()) 574 , _pf_flags(0) 575 , _block(b) 576 , _index(index) 577 { 578 if (type->is_illegal()) { 579 make_illegal(); 580 } 581 } 582 583 // flags 584 enum Flag { 585 no_flag = 0, 586 visited = 1 << 0, 587 cannot_simplify = 1 << 1 588 }; 589 590 // accessors 591 bool is_local() const { return _index >= 0; } 592 bool is_on_stack() const { return !is_local(); } 593 int local_index() const { assert(is_local(), ""); return _index; } 594 int stack_index() const { assert(is_on_stack(), ""); return -(_index+1); } 595 596 Value operand_at(int i) const; 597 int operand_count() const; 598 599 BlockBegin* block() const { return _block; } 600 601 void set(Flag f) { _pf_flags |= f; } 602 void clear(Flag f) { _pf_flags &= ~f; } 603 bool is_set(Flag f) const { return (_pf_flags & f) != 0; } 604 605 // Invalidates phis corresponding to merges of locals of two different types 606 // (these should never be referenced, otherwise the bytecodes are illegal) 607 void make_illegal() { 608 set(cannot_simplify); 609 set_type(illegalType); 610 } 611 612 bool is_illegal() const { 613 return type()->is_illegal(); 614 } 615 616 // generic 617 virtual void input_values_do(ValueVisitor* f) { 618 } 619 }; 620 621 622 // A local is a placeholder for an incoming argument to a function call. 623 LEAF(Local, Instruction) 624 private: 625 int _java_index; // the local index within the method to which the local belongs 626 public: 627 // creation 628 Local(ValueType* type, int index) 629 : Instruction(type) 630 , _java_index(index) 631 {} 632 633 // accessors 634 int java_index() const { return _java_index; } 635 636 // generic 637 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 638 }; 639 640 641 LEAF(Constant, Instruction) 642 public: 643 // creation 644 Constant(ValueType* type): 645 Instruction(type, NULL, true) 646 { 647 assert(type->is_constant(), "must be a constant"); 648 } 649 650 Constant(ValueType* type, ValueStack* state_before): 651 Instruction(type, state_before, true) 652 { 653 assert(state_before != NULL, "only used for constants which need patching"); 654 assert(type->is_constant(), "must be a constant"); 655 // since it's patching it needs to be pinned 656 pin(); 657 } 658 659 virtual bool can_trap() const { return state_before() != NULL; } 660 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 661 662 virtual intx hash() const; 663 virtual bool is_equal(Value v) const; 664 665 666 enum CompareResult { not_comparable = -1, cond_false, cond_true }; 667 668 virtual CompareResult compare(Instruction::Condition condition, Value right) const; 669 BlockBegin* compare(Instruction::Condition cond, Value right, 670 BlockBegin* true_sux, BlockBegin* false_sux) const { 671 switch (compare(cond, right)) { 672 case not_comparable: 673 return NULL; 674 case cond_false: 675 return false_sux; 676 case cond_true: 677 return true_sux; 678 default: 679 ShouldNotReachHere(); 680 return NULL; 681 } 682 } 683 }; 684 685 686 BASE(AccessField, Instruction) 687 private: 688 Value _obj; 689 int _offset; 690 ciField* _field; 691 NullCheck* _explicit_null_check; // For explicit null check elimination 692 693 public: 694 // creation 695 AccessField(Value obj, int offset, ciField* field, bool is_static, 696 ValueStack* state_before, bool is_loaded, bool is_initialized) 697 : Instruction(as_ValueType(field->type()->basic_type()), state_before) 698 , _obj(obj) 699 , _offset(offset) 700 , _field(field) 701 , _explicit_null_check(NULL) 702 { 703 set_needs_null_check(!is_static); 704 set_flag(IsLoadedFlag, is_loaded); 705 set_flag(IsInitializedFlag, is_initialized); 706 set_flag(IsStaticFlag, is_static); 707 ASSERT_VALUES 708 if (!is_loaded || (PatchALot && !field->is_volatile())) { 709 // need to patch if the holder wasn't loaded or we're testing 710 // using PatchALot. Don't allow PatchALot for fields which are 711 // known to be volatile they aren't patchable. 712 set_flag(NeedsPatchingFlag, true); 713 } 714 // pin of all instructions with memory access 715 pin(); 716 } 717 718 // accessors 719 Value obj() const { return _obj; } 720 int offset() const { return _offset; } 721 ciField* field() const { return _field; } 722 BasicType field_type() const { return _field->type()->basic_type(); } 723 bool is_static() const { return check_flag(IsStaticFlag); } 724 bool is_loaded() const { return check_flag(IsLoadedFlag); } 725 bool is_initialized() const { return check_flag(IsInitializedFlag); } 726 NullCheck* explicit_null_check() const { return _explicit_null_check; } 727 bool needs_patching() const { return check_flag(NeedsPatchingFlag); } 728 729 // manipulation 730 731 // Under certain circumstances, if a previous NullCheck instruction 732 // proved the target object non-null, we can eliminate the explicit 733 // null check and do an implicit one, simply specifying the debug 734 // information from the NullCheck. This field should only be consulted 735 // if needs_null_check() is true. 736 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 737 738 // generic 739 virtual bool can_trap() const { return needs_null_check() || needs_patching(); } 740 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 741 }; 742 743 744 LEAF(LoadField, AccessField) 745 public: 746 // creation 747 LoadField(Value obj, int offset, ciField* field, bool is_static, 748 ValueStack* state_before, bool is_loaded, bool is_initialized) 749 : AccessField(obj, offset, field, is_static, state_before, is_loaded, is_initialized) 750 {} 751 752 ciType* declared_type() const; 753 ciType* exact_type() const; 754 755 // generic 756 HASHING2(LoadField, is_loaded() && !field()->is_volatile(), obj()->subst(), offset()) // cannot be eliminated if not yet loaded or if volatile 757 }; 758 759 760 LEAF(StoreField, AccessField) 761 private: 762 Value _value; 763 764 public: 765 // creation 766 StoreField(Value obj, int offset, ciField* field, Value value, bool is_static, 767 ValueStack* state_before, bool is_loaded, bool is_initialized) 768 : AccessField(obj, offset, field, is_static, state_before, is_loaded, is_initialized) 769 , _value(value) 770 { 771 set_flag(NeedsWriteBarrierFlag, as_ValueType(field_type())->is_object()); 772 ASSERT_VALUES 773 pin(); 774 } 775 776 // accessors 777 Value value() const { return _value; } 778 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); } 779 780 // generic 781 virtual void input_values_do(ValueVisitor* f) { AccessField::input_values_do(f); f->visit(&_value); } 782 }; 783 784 785 BASE(AccessArray, Instruction) 786 private: 787 Value _array; 788 789 public: 790 // creation 791 AccessArray(ValueType* type, Value array, ValueStack* state_before) 792 : Instruction(type, state_before) 793 , _array(array) 794 { 795 set_needs_null_check(true); 796 ASSERT_VALUES 797 pin(); // instruction with side effect (null exception or range check throwing) 798 } 799 800 Value array() const { return _array; } 801 802 // generic 803 virtual bool can_trap() const { return needs_null_check(); } 804 virtual void input_values_do(ValueVisitor* f) { f->visit(&_array); } 805 }; 806 807 808 LEAF(ArrayLength, AccessArray) 809 private: 810 NullCheck* _explicit_null_check; // For explicit null check elimination 811 812 public: 813 // creation 814 ArrayLength(Value array, ValueStack* state_before) 815 : AccessArray(intType, array, state_before) 816 , _explicit_null_check(NULL) {} 817 818 // accessors 819 NullCheck* explicit_null_check() const { return _explicit_null_check; } 820 821 // setters 822 // See LoadField::set_explicit_null_check for documentation 823 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 824 825 // generic 826 HASHING1(ArrayLength, true, array()->subst()) 827 }; 828 829 830 BASE(AccessIndexed, AccessArray) 831 private: 832 Value _index; 833 Value _length; 834 BasicType _elt_type; 835 836 public: 837 // creation 838 AccessIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before) 839 : AccessArray(as_ValueType(elt_type), array, state_before) 840 , _index(index) 841 , _length(length) 842 , _elt_type(elt_type) 843 { 844 ASSERT_VALUES 845 } 846 847 // accessors 848 Value index() const { return _index; } 849 Value length() const { return _length; } 850 BasicType elt_type() const { return _elt_type; } 851 852 // perform elimination of range checks involving constants 853 bool compute_needs_range_check(); 854 855 // generic 856 virtual void input_values_do(ValueVisitor* f) { AccessArray::input_values_do(f); f->visit(&_index); if (_length != NULL) f->visit(&_length); } 857 }; 858 859 860 LEAF(LoadIndexed, AccessIndexed) 861 private: 862 NullCheck* _explicit_null_check; // For explicit null check elimination 863 864 public: 865 // creation 866 LoadIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before) 867 : AccessIndexed(array, index, length, elt_type, state_before) 868 , _explicit_null_check(NULL) {} 869 870 // accessors 871 NullCheck* explicit_null_check() const { return _explicit_null_check; } 872 873 // setters 874 // See LoadField::set_explicit_null_check for documentation 875 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 876 877 ciType* exact_type() const; 878 ciType* declared_type() const; 879 880 // generic 881 HASHING2(LoadIndexed, true, array()->subst(), index()->subst()) 882 }; 883 884 885 LEAF(StoreIndexed, AccessIndexed) 886 private: 887 Value _value; 888 889 ciMethod* _profiled_method; 890 int _profiled_bci; 891 public: 892 // creation 893 StoreIndexed(Value array, Value index, Value length, BasicType elt_type, Value value, ValueStack* state_before) 894 : AccessIndexed(array, index, length, elt_type, state_before) 895 , _value(value), _profiled_method(NULL), _profiled_bci(0) 896 { 897 set_flag(NeedsWriteBarrierFlag, (as_ValueType(elt_type)->is_object())); 898 set_flag(NeedsStoreCheckFlag, (as_ValueType(elt_type)->is_object())); 899 ASSERT_VALUES 900 pin(); 901 } 902 903 // accessors 904 Value value() const { return _value; } 905 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); } 906 bool needs_store_check() const { return check_flag(NeedsStoreCheckFlag); } 907 // Helpers for methodDataOop profiling 908 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 909 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 910 void set_profiled_bci(int bci) { _profiled_bci = bci; } 911 bool should_profile() const { return check_flag(ProfileMDOFlag); } 912 ciMethod* profiled_method() const { return _profiled_method; } 913 int profiled_bci() const { return _profiled_bci; } 914 // generic 915 virtual void input_values_do(ValueVisitor* f) { AccessIndexed::input_values_do(f); f->visit(&_value); } 916 }; 917 918 919 LEAF(NegateOp, Instruction) 920 private: 921 Value _x; 922 923 public: 924 // creation 925 NegateOp(Value x) : Instruction(x->type()->base()), _x(x) { 926 ASSERT_VALUES 927 } 928 929 // accessors 930 Value x() const { return _x; } 931 932 // generic 933 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); } 934 }; 935 936 937 BASE(Op2, Instruction) 938 private: 939 Bytecodes::Code _op; 940 Value _x; 941 Value _y; 942 943 public: 944 // creation 945 Op2(ValueType* type, Bytecodes::Code op, Value x, Value y, ValueStack* state_before = NULL) 946 : Instruction(type, state_before) 947 , _op(op) 948 , _x(x) 949 , _y(y) 950 { 951 ASSERT_VALUES 952 } 953 954 // accessors 955 Bytecodes::Code op() const { return _op; } 956 Value x() const { return _x; } 957 Value y() const { return _y; } 958 959 // manipulators 960 void swap_operands() { 961 assert(is_commutative(), "operation must be commutative"); 962 Value t = _x; _x = _y; _y = t; 963 } 964 965 // generic 966 virtual bool is_commutative() const { return false; } 967 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); } 968 }; 969 970 971 LEAF(ArithmeticOp, Op2) 972 public: 973 // creation 974 ArithmeticOp(Bytecodes::Code op, Value x, Value y, bool is_strictfp, ValueStack* state_before) 975 : Op2(x->type()->meet(y->type()), op, x, y, state_before) 976 { 977 set_flag(IsStrictfpFlag, is_strictfp); 978 if (can_trap()) pin(); 979 } 980 981 // accessors 982 bool is_strictfp() const { return check_flag(IsStrictfpFlag); } 983 984 // generic 985 virtual bool is_commutative() const; 986 virtual bool can_trap() const; 987 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 988 }; 989 990 991 LEAF(ShiftOp, Op2) 992 public: 993 // creation 994 ShiftOp(Bytecodes::Code op, Value x, Value s) : Op2(x->type()->base(), op, x, s) {} 995 996 // generic 997 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 998 }; 999 1000 1001 LEAF(LogicOp, Op2) 1002 public: 1003 // creation 1004 LogicOp(Bytecodes::Code op, Value x, Value y) : Op2(x->type()->meet(y->type()), op, x, y) {} 1005 1006 // generic 1007 virtual bool is_commutative() const; 1008 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1009 }; 1010 1011 1012 LEAF(CompareOp, Op2) 1013 public: 1014 // creation 1015 CompareOp(Bytecodes::Code op, Value x, Value y, ValueStack* state_before) 1016 : Op2(intType, op, x, y, state_before) 1017 {} 1018 1019 // generic 1020 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1021 }; 1022 1023 1024 LEAF(IfOp, Op2) 1025 private: 1026 Value _tval; 1027 Value _fval; 1028 1029 public: 1030 // creation 1031 IfOp(Value x, Condition cond, Value y, Value tval, Value fval) 1032 : Op2(tval->type()->meet(fval->type()), (Bytecodes::Code)cond, x, y) 1033 , _tval(tval) 1034 , _fval(fval) 1035 { 1036 ASSERT_VALUES 1037 assert(tval->type()->tag() == fval->type()->tag(), "types must match"); 1038 } 1039 1040 // accessors 1041 virtual bool is_commutative() const; 1042 Bytecodes::Code op() const { ShouldNotCallThis(); return Bytecodes::_illegal; } 1043 Condition cond() const { return (Condition)Op2::op(); } 1044 Value tval() const { return _tval; } 1045 Value fval() const { return _fval; } 1046 1047 // generic 1048 virtual void input_values_do(ValueVisitor* f) { Op2::input_values_do(f); f->visit(&_tval); f->visit(&_fval); } 1049 }; 1050 1051 1052 LEAF(Convert, Instruction) 1053 private: 1054 Bytecodes::Code _op; 1055 Value _value; 1056 1057 public: 1058 // creation 1059 Convert(Bytecodes::Code op, Value value, ValueType* to_type) : Instruction(to_type), _op(op), _value(value) { 1060 ASSERT_VALUES 1061 } 1062 1063 // accessors 1064 Bytecodes::Code op() const { return _op; } 1065 Value value() const { return _value; } 1066 1067 // generic 1068 virtual void input_values_do(ValueVisitor* f) { f->visit(&_value); } 1069 HASHING2(Convert, true, op(), value()->subst()) 1070 }; 1071 1072 1073 LEAF(NullCheck, Instruction) 1074 private: 1075 Value _obj; 1076 1077 public: 1078 // creation 1079 NullCheck(Value obj, ValueStack* state_before) 1080 : Instruction(obj->type()->base(), state_before) 1081 , _obj(obj) 1082 { 1083 ASSERT_VALUES 1084 set_can_trap(true); 1085 assert(_obj->type()->is_object(), "null check must be applied to objects only"); 1086 pin(Instruction::PinExplicitNullCheck); 1087 } 1088 1089 // accessors 1090 Value obj() const { return _obj; } 1091 1092 // setters 1093 void set_can_trap(bool can_trap) { set_flag(CanTrapFlag, can_trap); } 1094 1095 // generic 1096 virtual bool can_trap() const { return check_flag(CanTrapFlag); /* null-check elimination sets to false */ } 1097 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 1098 HASHING1(NullCheck, true, obj()->subst()) 1099 }; 1100 1101 1102 BASE(StateSplit, Instruction) 1103 private: 1104 ValueStack* _state; 1105 1106 protected: 1107 static void substitute(BlockList& list, BlockBegin* old_block, BlockBegin* new_block); 1108 1109 public: 1110 // creation 1111 StateSplit(ValueType* type, ValueStack* state_before = NULL) 1112 : Instruction(type, state_before) 1113 , _state(NULL) 1114 { 1115 pin(PinStateSplitConstructor); 1116 } 1117 1118 // accessors 1119 ValueStack* state() const { return _state; } 1120 IRScope* scope() const; // the state's scope 1121 1122 // manipulation 1123 void set_state(ValueStack* state) { assert(_state == NULL, "overwriting existing state"); check_state(state); _state = state; } 1124 1125 // generic 1126 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 1127 virtual void state_values_do(ValueVisitor* f); 1128 }; 1129 1130 1131 LEAF(Invoke, StateSplit) 1132 private: 1133 Bytecodes::Code _code; 1134 Value _recv; 1135 Values* _args; 1136 BasicTypeList* _signature; 1137 int _vtable_index; 1138 ciMethod* _target; 1139 1140 public: 1141 // creation 1142 Invoke(Bytecodes::Code code, ValueType* result_type, Value recv, Values* args, 1143 int vtable_index, ciMethod* target, ValueStack* state_before); 1144 1145 // accessors 1146 Bytecodes::Code code() const { return _code; } 1147 Value receiver() const { return _recv; } 1148 bool has_receiver() const { return receiver() != NULL; } 1149 int number_of_arguments() const { return _args->length(); } 1150 Value argument_at(int i) const { return _args->at(i); } 1151 int vtable_index() const { return _vtable_index; } 1152 BasicTypeList* signature() const { return _signature; } 1153 ciMethod* target() const { return _target; } 1154 1155 // Returns false if target is not loaded 1156 bool target_is_final() const { return check_flag(TargetIsFinalFlag); } 1157 bool target_is_loaded() const { return check_flag(TargetIsLoadedFlag); } 1158 // Returns false if target is not loaded 1159 bool target_is_strictfp() const { return check_flag(TargetIsStrictfpFlag); } 1160 1161 // JSR 292 support 1162 bool is_invokedynamic() const { return code() == Bytecodes::_invokedynamic; } 1163 1164 virtual bool needs_exception_state() const { return false; } 1165 1166 // generic 1167 virtual bool can_trap() const { return true; } 1168 virtual void input_values_do(ValueVisitor* f) { 1169 StateSplit::input_values_do(f); 1170 if (has_receiver()) f->visit(&_recv); 1171 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 1172 } 1173 virtual void state_values_do(ValueVisitor *f); 1174 }; 1175 1176 1177 LEAF(NewInstance, StateSplit) 1178 private: 1179 ciInstanceKlass* _klass; 1180 1181 public: 1182 // creation 1183 NewInstance(ciInstanceKlass* klass, ValueStack* state_before) 1184 : StateSplit(instanceType, state_before) 1185 , _klass(klass) 1186 {} 1187 1188 // accessors 1189 ciInstanceKlass* klass() const { return _klass; } 1190 1191 virtual bool needs_exception_state() const { return false; } 1192 1193 // generic 1194 virtual bool can_trap() const { return true; } 1195 ciType* exact_type() const; 1196 }; 1197 1198 1199 BASE(NewArray, StateSplit) 1200 private: 1201 Value _length; 1202 1203 public: 1204 // creation 1205 NewArray(Value length, ValueStack* state_before) 1206 : StateSplit(objectType, state_before) 1207 , _length(length) 1208 { 1209 // Do not ASSERT_VALUES since length is NULL for NewMultiArray 1210 } 1211 1212 // accessors 1213 Value length() const { return _length; } 1214 1215 virtual bool needs_exception_state() const { return false; } 1216 1217 // generic 1218 virtual bool can_trap() const { return true; } 1219 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_length); } 1220 }; 1221 1222 1223 LEAF(NewTypeArray, NewArray) 1224 private: 1225 BasicType _elt_type; 1226 1227 public: 1228 // creation 1229 NewTypeArray(Value length, BasicType elt_type, ValueStack* state_before) 1230 : NewArray(length, state_before) 1231 , _elt_type(elt_type) 1232 {} 1233 1234 // accessors 1235 BasicType elt_type() const { return _elt_type; } 1236 ciType* exact_type() const; 1237 }; 1238 1239 1240 LEAF(NewObjectArray, NewArray) 1241 private: 1242 ciKlass* _klass; 1243 1244 public: 1245 // creation 1246 NewObjectArray(ciKlass* klass, Value length, ValueStack* state_before) : NewArray(length, state_before), _klass(klass) {} 1247 1248 // accessors 1249 ciKlass* klass() const { return _klass; } 1250 ciType* exact_type() const; 1251 }; 1252 1253 1254 LEAF(NewMultiArray, NewArray) 1255 private: 1256 ciKlass* _klass; 1257 Values* _dims; 1258 1259 public: 1260 // creation 1261 NewMultiArray(ciKlass* klass, Values* dims, ValueStack* state_before) : NewArray(NULL, state_before), _klass(klass), _dims(dims) { 1262 ASSERT_VALUES 1263 } 1264 1265 // accessors 1266 ciKlass* klass() const { return _klass; } 1267 Values* dims() const { return _dims; } 1268 int rank() const { return dims()->length(); } 1269 1270 // generic 1271 virtual void input_values_do(ValueVisitor* f) { 1272 // NOTE: we do not call NewArray::input_values_do since "length" 1273 // is meaningless for a multi-dimensional array; passing the 1274 // zeroth element down to NewArray as its length is a bad idea 1275 // since there will be a copy in the "dims" array which doesn't 1276 // get updated, and the value must not be traversed twice. Was bug 1277 // - kbr 4/10/2001 1278 StateSplit::input_values_do(f); 1279 for (int i = 0; i < _dims->length(); i++) f->visit(_dims->adr_at(i)); 1280 } 1281 }; 1282 1283 1284 BASE(TypeCheck, StateSplit) 1285 private: 1286 ciKlass* _klass; 1287 Value _obj; 1288 1289 ciMethod* _profiled_method; 1290 int _profiled_bci; 1291 1292 public: 1293 // creation 1294 TypeCheck(ciKlass* klass, Value obj, ValueType* type, ValueStack* state_before) 1295 : StateSplit(type, state_before), _klass(klass), _obj(obj), 1296 _profiled_method(NULL), _profiled_bci(0) { 1297 ASSERT_VALUES 1298 set_direct_compare(false); 1299 } 1300 1301 // accessors 1302 ciKlass* klass() const { return _klass; } 1303 Value obj() const { return _obj; } 1304 bool is_loaded() const { return klass() != NULL; } 1305 bool direct_compare() const { return check_flag(DirectCompareFlag); } 1306 1307 // manipulation 1308 void set_direct_compare(bool flag) { set_flag(DirectCompareFlag, flag); } 1309 1310 // generic 1311 virtual bool can_trap() const { return true; } 1312 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } 1313 1314 // Helpers for methodDataOop profiling 1315 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1316 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1317 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1318 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1319 ciMethod* profiled_method() const { return _profiled_method; } 1320 int profiled_bci() const { return _profiled_bci; } 1321 }; 1322 1323 1324 LEAF(CheckCast, TypeCheck) 1325 public: 1326 // creation 1327 CheckCast(ciKlass* klass, Value obj, ValueStack* state_before) 1328 : TypeCheck(klass, obj, objectType, state_before) {} 1329 1330 void set_incompatible_class_change_check() { 1331 set_flag(ThrowIncompatibleClassChangeErrorFlag, true); 1332 } 1333 bool is_incompatible_class_change_check() const { 1334 return check_flag(ThrowIncompatibleClassChangeErrorFlag); 1335 } 1336 1337 ciType* declared_type() const; 1338 ciType* exact_type() const; 1339 }; 1340 1341 1342 LEAF(InstanceOf, TypeCheck) 1343 public: 1344 // creation 1345 InstanceOf(ciKlass* klass, Value obj, ValueStack* state_before) : TypeCheck(klass, obj, intType, state_before) {} 1346 1347 virtual bool needs_exception_state() const { return false; } 1348 }; 1349 1350 1351 BASE(AccessMonitor, StateSplit) 1352 private: 1353 Value _obj; 1354 int _monitor_no; 1355 1356 public: 1357 // creation 1358 AccessMonitor(Value obj, int monitor_no, ValueStack* state_before = NULL) 1359 : StateSplit(illegalType, state_before) 1360 , _obj(obj) 1361 , _monitor_no(monitor_no) 1362 { 1363 set_needs_null_check(true); 1364 ASSERT_VALUES 1365 } 1366 1367 // accessors 1368 Value obj() const { return _obj; } 1369 int monitor_no() const { return _monitor_no; } 1370 1371 // generic 1372 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } 1373 }; 1374 1375 1376 LEAF(MonitorEnter, AccessMonitor) 1377 public: 1378 // creation 1379 MonitorEnter(Value obj, int monitor_no, ValueStack* state_before) 1380 : AccessMonitor(obj, monitor_no, state_before) 1381 { 1382 ASSERT_VALUES 1383 } 1384 1385 // generic 1386 virtual bool can_trap() const { return true; } 1387 }; 1388 1389 1390 LEAF(MonitorExit, AccessMonitor) 1391 public: 1392 // creation 1393 MonitorExit(Value obj, int monitor_no) 1394 : AccessMonitor(obj, monitor_no, NULL) 1395 { 1396 ASSERT_VALUES 1397 } 1398 }; 1399 1400 1401 LEAF(Intrinsic, StateSplit) 1402 private: 1403 vmIntrinsics::ID _id; 1404 Values* _args; 1405 Value _recv; 1406 1407 public: 1408 // preserves_state can be set to true for Intrinsics 1409 // which are guaranteed to preserve register state across any slow 1410 // cases; setting it to true does not mean that the Intrinsic can 1411 // not trap, only that if we continue execution in the same basic 1412 // block after the Intrinsic, all of the registers are intact. This 1413 // allows load elimination and common expression elimination to be 1414 // performed across the Intrinsic. The default value is false. 1415 Intrinsic(ValueType* type, 1416 vmIntrinsics::ID id, 1417 Values* args, 1418 bool has_receiver, 1419 ValueStack* state_before, 1420 bool preserves_state, 1421 bool cantrap = true) 1422 : StateSplit(type, state_before) 1423 , _id(id) 1424 , _args(args) 1425 , _recv(NULL) 1426 { 1427 assert(args != NULL, "args must exist"); 1428 ASSERT_VALUES 1429 set_flag(PreservesStateFlag, preserves_state); 1430 set_flag(CanTrapFlag, cantrap); 1431 if (has_receiver) { 1432 _recv = argument_at(0); 1433 } 1434 set_needs_null_check(has_receiver); 1435 1436 // some intrinsics can't trap, so don't force them to be pinned 1437 if (!can_trap()) { 1438 unpin(PinStateSplitConstructor); 1439 } 1440 } 1441 1442 // accessors 1443 vmIntrinsics::ID id() const { return _id; } 1444 int number_of_arguments() const { return _args->length(); } 1445 Value argument_at(int i) const { return _args->at(i); } 1446 1447 bool has_receiver() const { return (_recv != NULL); } 1448 Value receiver() const { assert(has_receiver(), "must have receiver"); return _recv; } 1449 bool preserves_state() const { return check_flag(PreservesStateFlag); } 1450 1451 // generic 1452 virtual bool can_trap() const { return check_flag(CanTrapFlag); } 1453 virtual void input_values_do(ValueVisitor* f) { 1454 StateSplit::input_values_do(f); 1455 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 1456 } 1457 }; 1458 1459 1460 class LIR_List; 1461 1462 LEAF(BlockBegin, StateSplit) 1463 private: 1464 int _block_id; // the unique block id 1465 int _bci; // start-bci of block 1466 int _depth_first_number; // number of this block in a depth-first ordering 1467 int _linear_scan_number; // number of this block in linear-scan ordering 1468 int _loop_depth; // the loop nesting level of this block 1469 int _loop_index; // number of the innermost loop of this block 1470 int _flags; // the flags associated with this block 1471 1472 // fields used by BlockListBuilder 1473 int _total_preds; // number of predecessors found by BlockListBuilder 1474 BitMap _stores_to_locals; // bit is set when a local variable is stored in the block 1475 1476 // SSA specific fields: (factor out later) 1477 BlockList _successors; // the successors of this block 1478 BlockList _predecessors; // the predecessors of this block 1479 BlockBegin* _dominator; // the dominator of this block 1480 // SSA specific ends 1481 BlockEnd* _end; // the last instruction of this block 1482 BlockList _exception_handlers; // the exception handlers potentially invoked by this block 1483 ValueStackStack* _exception_states; // only for xhandler entries: states of all instructions that have an edge to this xhandler 1484 int _exception_handler_pco; // if this block is the start of an exception handler, 1485 // this records the PC offset in the assembly code of the 1486 // first instruction in this block 1487 Label _label; // the label associated with this block 1488 LIR_List* _lir; // the low level intermediate representation for this block 1489 1490 BitMap _live_in; // set of live LIR_Opr registers at entry to this block 1491 BitMap _live_out; // set of live LIR_Opr registers at exit from this block 1492 BitMap _live_gen; // set of registers used before any redefinition in this block 1493 BitMap _live_kill; // set of registers defined in this block 1494 1495 BitMap _fpu_register_usage; 1496 intArray* _fpu_stack_state; // For x86 FPU code generation with UseLinearScan 1497 int _first_lir_instruction_id; // ID of first LIR instruction in this block 1498 int _last_lir_instruction_id; // ID of last LIR instruction in this block 1499 1500 void iterate_preorder (boolArray& mark, BlockClosure* closure); 1501 void iterate_postorder(boolArray& mark, BlockClosure* closure); 1502 1503 friend class SuxAndWeightAdjuster; 1504 1505 public: 1506 void* operator new(size_t size) { 1507 Compilation* c = Compilation::current(); 1508 void* res = c->arena()->Amalloc(size); 1509 ((BlockBegin*)res)->_id = c->get_next_id(); 1510 ((BlockBegin*)res)->_block_id = c->get_next_block_id(); 1511 return res; 1512 } 1513 1514 // initialization/counting 1515 static int number_of_blocks() { 1516 return Compilation::current()->number_of_blocks(); 1517 } 1518 1519 // creation 1520 BlockBegin(int bci) 1521 : StateSplit(illegalType) 1522 , _bci(bci) 1523 , _depth_first_number(-1) 1524 , _linear_scan_number(-1) 1525 , _loop_depth(0) 1526 , _flags(0) 1527 , _dominator(NULL) 1528 , _end(NULL) 1529 , _predecessors(2) 1530 , _successors(2) 1531 , _exception_handlers(1) 1532 , _exception_states(NULL) 1533 , _exception_handler_pco(-1) 1534 , _lir(NULL) 1535 , _loop_index(-1) 1536 , _live_in() 1537 , _live_out() 1538 , _live_gen() 1539 , _live_kill() 1540 , _fpu_register_usage() 1541 , _fpu_stack_state(NULL) 1542 , _first_lir_instruction_id(-1) 1543 , _last_lir_instruction_id(-1) 1544 , _total_preds(0) 1545 , _stores_to_locals() 1546 { 1547 #ifndef PRODUCT 1548 set_printable_bci(bci); 1549 #endif 1550 } 1551 1552 // accessors 1553 int block_id() const { return _block_id; } 1554 int bci() const { return _bci; } 1555 BlockList* successors() { return &_successors; } 1556 BlockBegin* dominator() const { return _dominator; } 1557 int loop_depth() const { return _loop_depth; } 1558 int depth_first_number() const { return _depth_first_number; } 1559 int linear_scan_number() const { return _linear_scan_number; } 1560 BlockEnd* end() const { return _end; } 1561 Label* label() { return &_label; } 1562 LIR_List* lir() const { return _lir; } 1563 int exception_handler_pco() const { return _exception_handler_pco; } 1564 BitMap& live_in() { return _live_in; } 1565 BitMap& live_out() { return _live_out; } 1566 BitMap& live_gen() { return _live_gen; } 1567 BitMap& live_kill() { return _live_kill; } 1568 BitMap& fpu_register_usage() { return _fpu_register_usage; } 1569 intArray* fpu_stack_state() const { return _fpu_stack_state; } 1570 int first_lir_instruction_id() const { return _first_lir_instruction_id; } 1571 int last_lir_instruction_id() const { return _last_lir_instruction_id; } 1572 int total_preds() const { return _total_preds; } 1573 BitMap& stores_to_locals() { return _stores_to_locals; } 1574 1575 // manipulation 1576 void set_dominator(BlockBegin* dom) { _dominator = dom; } 1577 void set_loop_depth(int d) { _loop_depth = d; } 1578 void set_depth_first_number(int dfn) { _depth_first_number = dfn; } 1579 void set_linear_scan_number(int lsn) { _linear_scan_number = lsn; } 1580 void set_end(BlockEnd* end); 1581 void disconnect_from_graph(); 1582 static void disconnect_edge(BlockBegin* from, BlockBegin* to); 1583 BlockBegin* insert_block_between(BlockBegin* sux); 1584 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux); 1585 void set_lir(LIR_List* lir) { _lir = lir; } 1586 void set_exception_handler_pco(int pco) { _exception_handler_pco = pco; } 1587 void set_live_in (BitMap map) { _live_in = map; } 1588 void set_live_out (BitMap map) { _live_out = map; } 1589 void set_live_gen (BitMap map) { _live_gen = map; } 1590 void set_live_kill (BitMap map) { _live_kill = map; } 1591 void set_fpu_register_usage(BitMap map) { _fpu_register_usage = map; } 1592 void set_fpu_stack_state(intArray* state) { _fpu_stack_state = state; } 1593 void set_first_lir_instruction_id(int id) { _first_lir_instruction_id = id; } 1594 void set_last_lir_instruction_id(int id) { _last_lir_instruction_id = id; } 1595 void increment_total_preds(int n = 1) { _total_preds += n; } 1596 void init_stores_to_locals(int locals_count) { _stores_to_locals = BitMap(locals_count); _stores_to_locals.clear(); } 1597 1598 // generic 1599 virtual void state_values_do(ValueVisitor* f); 1600 1601 // successors and predecessors 1602 int number_of_sux() const; 1603 BlockBegin* sux_at(int i) const; 1604 void add_successor(BlockBegin* sux); 1605 void remove_successor(BlockBegin* pred); 1606 bool is_successor(BlockBegin* sux) const { return _successors.contains(sux); } 1607 1608 void add_predecessor(BlockBegin* pred); 1609 void remove_predecessor(BlockBegin* pred); 1610 bool is_predecessor(BlockBegin* pred) const { return _predecessors.contains(pred); } 1611 int number_of_preds() const { return _predecessors.length(); } 1612 BlockBegin* pred_at(int i) const { return _predecessors[i]; } 1613 1614 // exception handlers potentially invoked by this block 1615 void add_exception_handler(BlockBegin* b); 1616 bool is_exception_handler(BlockBegin* b) const { return _exception_handlers.contains(b); } 1617 int number_of_exception_handlers() const { return _exception_handlers.length(); } 1618 BlockBegin* exception_handler_at(int i) const { return _exception_handlers.at(i); } 1619 1620 // states of the instructions that have an edge to this exception handler 1621 int number_of_exception_states() { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states == NULL ? 0 : _exception_states->length(); } 1622 ValueStack* exception_state_at(int idx) const { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states->at(idx); } 1623 int add_exception_state(ValueStack* state); 1624 1625 // flags 1626 enum Flag { 1627 no_flag = 0, 1628 std_entry_flag = 1 << 0, 1629 osr_entry_flag = 1 << 1, 1630 exception_entry_flag = 1 << 2, 1631 subroutine_entry_flag = 1 << 3, 1632 backward_branch_target_flag = 1 << 4, 1633 is_on_work_list_flag = 1 << 5, 1634 was_visited_flag = 1 << 6, 1635 parser_loop_header_flag = 1 << 7, // set by parser to identify blocks where phi functions can not be created on demand 1636 critical_edge_split_flag = 1 << 8, // set for all blocks that are introduced when critical edges are split 1637 linear_scan_loop_header_flag = 1 << 9, // set during loop-detection for LinearScan 1638 linear_scan_loop_end_flag = 1 << 10 // set during loop-detection for LinearScan 1639 }; 1640 1641 void set(Flag f) { _flags |= f; } 1642 void clear(Flag f) { _flags &= ~f; } 1643 bool is_set(Flag f) const { return (_flags & f) != 0; } 1644 bool is_entry_block() const { 1645 const int entry_mask = std_entry_flag | osr_entry_flag | exception_entry_flag; 1646 return (_flags & entry_mask) != 0; 1647 } 1648 1649 // iteration 1650 void iterate_preorder (BlockClosure* closure); 1651 void iterate_postorder (BlockClosure* closure); 1652 1653 void block_values_do(ValueVisitor* f); 1654 1655 // loops 1656 void set_loop_index(int ix) { _loop_index = ix; } 1657 int loop_index() const { return _loop_index; } 1658 1659 // merging 1660 bool try_merge(ValueStack* state); // try to merge states at block begin 1661 void merge(ValueStack* state) { bool b = try_merge(state); assert(b, "merge failed"); } 1662 1663 // debugging 1664 void print_block() PRODUCT_RETURN; 1665 void print_block(InstructionPrinter& ip, bool live_only = false) PRODUCT_RETURN; 1666 }; 1667 1668 1669 BASE(BlockEnd, StateSplit) 1670 private: 1671 BlockBegin* _begin; 1672 BlockList* _sux; 1673 1674 protected: 1675 BlockList* sux() const { return _sux; } 1676 1677 void set_sux(BlockList* sux) { 1678 #ifdef ASSERT 1679 assert(sux != NULL, "sux must exist"); 1680 for (int i = sux->length() - 1; i >= 0; i--) assert(sux->at(i) != NULL, "sux must exist"); 1681 #endif 1682 _sux = sux; 1683 } 1684 1685 public: 1686 // creation 1687 BlockEnd(ValueType* type, ValueStack* state_before, bool is_safepoint) 1688 : StateSplit(type, state_before) 1689 , _begin(NULL) 1690 , _sux(NULL) 1691 { 1692 set_flag(IsSafepointFlag, is_safepoint); 1693 } 1694 1695 // accessors 1696 bool is_safepoint() const { return check_flag(IsSafepointFlag); } 1697 BlockBegin* begin() const { return _begin; } 1698 1699 // manipulation 1700 void set_begin(BlockBegin* begin); 1701 1702 // successors 1703 int number_of_sux() const { return _sux != NULL ? _sux->length() : 0; } 1704 BlockBegin* sux_at(int i) const { return _sux->at(i); } 1705 BlockBegin* default_sux() const { return sux_at(number_of_sux() - 1); } 1706 BlockBegin** addr_sux_at(int i) const { return _sux->adr_at(i); } 1707 int sux_index(BlockBegin* sux) const { return _sux->find(sux); } 1708 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux); 1709 }; 1710 1711 1712 LEAF(Goto, BlockEnd) 1713 public: 1714 enum Direction { 1715 none, // Just a regular goto 1716 taken, not_taken // Goto produced from If 1717 }; 1718 private: 1719 ciMethod* _profiled_method; 1720 int _profiled_bci; 1721 Direction _direction; 1722 public: 1723 // creation 1724 Goto(BlockBegin* sux, ValueStack* state_before, bool is_safepoint = false) 1725 : BlockEnd(illegalType, state_before, is_safepoint) 1726 , _direction(none) 1727 , _profiled_method(NULL) 1728 , _profiled_bci(0) { 1729 BlockList* s = new BlockList(1); 1730 s->append(sux); 1731 set_sux(s); 1732 } 1733 1734 Goto(BlockBegin* sux, bool is_safepoint) : BlockEnd(illegalType, NULL, is_safepoint) 1735 , _direction(none) 1736 , _profiled_method(NULL) 1737 , _profiled_bci(0) { 1738 BlockList* s = new BlockList(1); 1739 s->append(sux); 1740 set_sux(s); 1741 } 1742 1743 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1744 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches 1745 int profiled_bci() const { return _profiled_bci; } 1746 Direction direction() const { return _direction; } 1747 1748 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1749 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1750 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1751 void set_direction(Direction d) { _direction = d; } 1752 }; 1753 1754 1755 LEAF(If, BlockEnd) 1756 private: 1757 Value _x; 1758 Condition _cond; 1759 Value _y; 1760 ciMethod* _profiled_method; 1761 int _profiled_bci; // Canonicalizer may alter bci of If node 1762 bool _swapped; // Is the order reversed with respect to the original If in the 1763 // bytecode stream? 1764 public: 1765 // creation 1766 // unordered_is_true is valid for float/double compares only 1767 If(Value x, Condition cond, bool unordered_is_true, Value y, BlockBegin* tsux, BlockBegin* fsux, ValueStack* state_before, bool is_safepoint) 1768 : BlockEnd(illegalType, state_before, is_safepoint) 1769 , _x(x) 1770 , _cond(cond) 1771 , _y(y) 1772 , _profiled_method(NULL) 1773 , _profiled_bci(0) 1774 , _swapped(false) 1775 { 1776 ASSERT_VALUES 1777 set_flag(UnorderedIsTrueFlag, unordered_is_true); 1778 assert(x->type()->tag() == y->type()->tag(), "types must match"); 1779 BlockList* s = new BlockList(2); 1780 s->append(tsux); 1781 s->append(fsux); 1782 set_sux(s); 1783 } 1784 1785 // accessors 1786 Value x() const { return _x; } 1787 Condition cond() const { return _cond; } 1788 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 1789 Value y() const { return _y; } 1790 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); } 1791 BlockBegin* tsux() const { return sux_for(true); } 1792 BlockBegin* fsux() const { return sux_for(false); } 1793 BlockBegin* usux() const { return sux_for(unordered_is_true()); } 1794 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1795 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches 1796 int profiled_bci() const { return _profiled_bci; } // set for profiled branches and tiered 1797 bool is_swapped() const { return _swapped; } 1798 1799 // manipulation 1800 void swap_operands() { 1801 Value t = _x; _x = _y; _y = t; 1802 _cond = mirror(_cond); 1803 } 1804 1805 void swap_sux() { 1806 assert(number_of_sux() == 2, "wrong number of successors"); 1807 BlockList* s = sux(); 1808 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t); 1809 _cond = negate(_cond); 1810 set_flag(UnorderedIsTrueFlag, !check_flag(UnorderedIsTrueFlag)); 1811 } 1812 1813 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1814 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1815 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1816 void set_swapped(bool value) { _swapped = value; } 1817 // generic 1818 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_x); f->visit(&_y); } 1819 }; 1820 1821 1822 LEAF(IfInstanceOf, BlockEnd) 1823 private: 1824 ciKlass* _klass; 1825 Value _obj; 1826 bool _test_is_instance; // jump if instance 1827 int _instanceof_bci; 1828 1829 public: 1830 IfInstanceOf(ciKlass* klass, Value obj, bool test_is_instance, int instanceof_bci, BlockBegin* tsux, BlockBegin* fsux) 1831 : BlockEnd(illegalType, NULL, false) // temporary set to false 1832 , _klass(klass) 1833 , _obj(obj) 1834 , _test_is_instance(test_is_instance) 1835 , _instanceof_bci(instanceof_bci) 1836 { 1837 ASSERT_VALUES 1838 assert(instanceof_bci >= 0, "illegal bci"); 1839 BlockList* s = new BlockList(2); 1840 s->append(tsux); 1841 s->append(fsux); 1842 set_sux(s); 1843 } 1844 1845 // accessors 1846 // 1847 // Note 1: If test_is_instance() is true, IfInstanceOf tests if obj *is* an 1848 // instance of klass; otherwise it tests if it is *not* and instance 1849 // of klass. 1850 // 1851 // Note 2: IfInstanceOf instructions are created by combining an InstanceOf 1852 // and an If instruction. The IfInstanceOf bci() corresponds to the 1853 // bci that the If would have had; the (this->) instanceof_bci() is 1854 // the bci of the original InstanceOf instruction. 1855 ciKlass* klass() const { return _klass; } 1856 Value obj() const { return _obj; } 1857 int instanceof_bci() const { return _instanceof_bci; } 1858 bool test_is_instance() const { return _test_is_instance; } 1859 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); } 1860 BlockBegin* tsux() const { return sux_for(true); } 1861 BlockBegin* fsux() const { return sux_for(false); } 1862 1863 // manipulation 1864 void swap_sux() { 1865 assert(number_of_sux() == 2, "wrong number of successors"); 1866 BlockList* s = sux(); 1867 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t); 1868 _test_is_instance = !_test_is_instance; 1869 } 1870 1871 // generic 1872 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_obj); } 1873 }; 1874 1875 1876 BASE(Switch, BlockEnd) 1877 private: 1878 Value _tag; 1879 1880 public: 1881 // creation 1882 Switch(Value tag, BlockList* sux, ValueStack* state_before, bool is_safepoint) 1883 : BlockEnd(illegalType, state_before, is_safepoint) 1884 , _tag(tag) { 1885 ASSERT_VALUES 1886 set_sux(sux); 1887 } 1888 1889 // accessors 1890 Value tag() const { return _tag; } 1891 int length() const { return number_of_sux() - 1; } 1892 1893 virtual bool needs_exception_state() const { return false; } 1894 1895 // generic 1896 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_tag); } 1897 }; 1898 1899 1900 LEAF(TableSwitch, Switch) 1901 private: 1902 int _lo_key; 1903 1904 public: 1905 // creation 1906 TableSwitch(Value tag, BlockList* sux, int lo_key, ValueStack* state_before, bool is_safepoint) 1907 : Switch(tag, sux, state_before, is_safepoint) 1908 , _lo_key(lo_key) {} 1909 1910 // accessors 1911 int lo_key() const { return _lo_key; } 1912 int hi_key() const { return _lo_key + length() - 1; } 1913 }; 1914 1915 1916 LEAF(LookupSwitch, Switch) 1917 private: 1918 intArray* _keys; 1919 1920 public: 1921 // creation 1922 LookupSwitch(Value tag, BlockList* sux, intArray* keys, ValueStack* state_before, bool is_safepoint) 1923 : Switch(tag, sux, state_before, is_safepoint) 1924 , _keys(keys) { 1925 assert(keys != NULL, "keys must exist"); 1926 assert(keys->length() == length(), "sux & keys have incompatible lengths"); 1927 } 1928 1929 // accessors 1930 int key_at(int i) const { return _keys->at(i); } 1931 }; 1932 1933 1934 LEAF(Return, BlockEnd) 1935 private: 1936 Value _result; 1937 1938 public: 1939 // creation 1940 Return(Value result) : 1941 BlockEnd(result == NULL ? voidType : result->type()->base(), NULL, true), 1942 _result(result) {} 1943 1944 // accessors 1945 Value result() const { return _result; } 1946 bool has_result() const { return result() != NULL; } 1947 1948 // generic 1949 virtual void input_values_do(ValueVisitor* f) { 1950 BlockEnd::input_values_do(f); 1951 if (has_result()) f->visit(&_result); 1952 } 1953 }; 1954 1955 1956 LEAF(Throw, BlockEnd) 1957 private: 1958 Value _exception; 1959 1960 public: 1961 // creation 1962 Throw(Value exception, ValueStack* state_before) : BlockEnd(illegalType, state_before, true), _exception(exception) { 1963 ASSERT_VALUES 1964 } 1965 1966 // accessors 1967 Value exception() const { return _exception; } 1968 1969 // generic 1970 virtual bool can_trap() const { return true; } 1971 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_exception); } 1972 }; 1973 1974 1975 LEAF(Base, BlockEnd) 1976 public: 1977 // creation 1978 Base(BlockBegin* std_entry, BlockBegin* osr_entry) : BlockEnd(illegalType, NULL, false) { 1979 assert(std_entry->is_set(BlockBegin::std_entry_flag), "std entry must be flagged"); 1980 assert(osr_entry == NULL || osr_entry->is_set(BlockBegin::osr_entry_flag), "osr entry must be flagged"); 1981 BlockList* s = new BlockList(2); 1982 if (osr_entry != NULL) s->append(osr_entry); 1983 s->append(std_entry); // must be default sux! 1984 set_sux(s); 1985 } 1986 1987 // accessors 1988 BlockBegin* std_entry() const { return default_sux(); } 1989 BlockBegin* osr_entry() const { return number_of_sux() < 2 ? NULL : sux_at(0); } 1990 }; 1991 1992 1993 LEAF(OsrEntry, Instruction) 1994 public: 1995 // creation 1996 #ifdef _LP64 1997 OsrEntry() : Instruction(longType) { pin(); } 1998 #else 1999 OsrEntry() : Instruction(intType) { pin(); } 2000 #endif 2001 2002 // generic 2003 virtual void input_values_do(ValueVisitor* f) { } 2004 }; 2005 2006 2007 // Models the incoming exception at a catch site 2008 LEAF(ExceptionObject, Instruction) 2009 public: 2010 // creation 2011 ExceptionObject() : Instruction(objectType) { 2012 pin(); 2013 } 2014 2015 // generic 2016 virtual void input_values_do(ValueVisitor* f) { } 2017 }; 2018 2019 2020 // Models needed rounding for floating-point values on Intel. 2021 // Currently only used to represent rounding of double-precision 2022 // values stored into local variables, but could be used to model 2023 // intermediate rounding of single-precision values as well. 2024 LEAF(RoundFP, Instruction) 2025 private: 2026 Value _input; // floating-point value to be rounded 2027 2028 public: 2029 RoundFP(Value input) 2030 : Instruction(input->type()) // Note: should not be used for constants 2031 , _input(input) 2032 { 2033 ASSERT_VALUES 2034 } 2035 2036 // accessors 2037 Value input() const { return _input; } 2038 2039 // generic 2040 virtual void input_values_do(ValueVisitor* f) { f->visit(&_input); } 2041 }; 2042 2043 2044 BASE(UnsafeOp, Instruction) 2045 private: 2046 BasicType _basic_type; // ValueType can not express byte-sized integers 2047 2048 protected: 2049 // creation 2050 UnsafeOp(BasicType basic_type, bool is_put) 2051 : Instruction(is_put ? voidType : as_ValueType(basic_type)) 2052 , _basic_type(basic_type) 2053 { 2054 //Note: Unsafe ops are not not guaranteed to throw NPE. 2055 // Convservatively, Unsafe operations must be pinned though we could be 2056 // looser about this if we wanted to.. 2057 pin(); 2058 } 2059 2060 public: 2061 // accessors 2062 BasicType basic_type() { return _basic_type; } 2063 2064 // generic 2065 virtual void input_values_do(ValueVisitor* f) { } 2066 }; 2067 2068 2069 BASE(UnsafeRawOp, UnsafeOp) 2070 private: 2071 Value _base; // Base address (a Java long) 2072 Value _index; // Index if computed by optimizer; initialized to NULL 2073 int _log2_scale; // Scale factor: 0, 1, 2, or 3. 2074 // Indicates log2 of number of bytes (1, 2, 4, or 8) 2075 // to scale index by. 2076 2077 protected: 2078 UnsafeRawOp(BasicType basic_type, Value addr, bool is_put) 2079 : UnsafeOp(basic_type, is_put) 2080 , _base(addr) 2081 , _index(NULL) 2082 , _log2_scale(0) 2083 { 2084 // Can not use ASSERT_VALUES because index may be NULL 2085 assert(addr != NULL && addr->type()->is_long(), "just checking"); 2086 } 2087 2088 UnsafeRawOp(BasicType basic_type, Value base, Value index, int log2_scale, bool is_put) 2089 : UnsafeOp(basic_type, is_put) 2090 , _base(base) 2091 , _index(index) 2092 , _log2_scale(log2_scale) 2093 { 2094 } 2095 2096 public: 2097 // accessors 2098 Value base() { return _base; } 2099 Value index() { return _index; } 2100 bool has_index() { return (_index != NULL); } 2101 int log2_scale() { return _log2_scale; } 2102 2103 // setters 2104 void set_base (Value base) { _base = base; } 2105 void set_index(Value index) { _index = index; } 2106 void set_log2_scale(int log2_scale) { _log2_scale = log2_scale; } 2107 2108 // generic 2109 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f); 2110 f->visit(&_base); 2111 if (has_index()) f->visit(&_index); } 2112 }; 2113 2114 2115 LEAF(UnsafeGetRaw, UnsafeRawOp) 2116 private: 2117 bool _may_be_unaligned, _is_wide; // For OSREntry 2118 2119 public: 2120 UnsafeGetRaw(BasicType basic_type, Value addr, bool may_be_unaligned, bool is_wide = false) 2121 : UnsafeRawOp(basic_type, addr, false) { 2122 _may_be_unaligned = may_be_unaligned; 2123 _is_wide = is_wide; 2124 } 2125 2126 UnsafeGetRaw(BasicType basic_type, Value base, Value index, int log2_scale, bool may_be_unaligned, bool is_wide = false) 2127 : UnsafeRawOp(basic_type, base, index, log2_scale, false) { 2128 _may_be_unaligned = may_be_unaligned; 2129 _is_wide = is_wide; 2130 } 2131 2132 bool may_be_unaligned() { return _may_be_unaligned; } 2133 bool is_wide() { return _is_wide; } 2134 }; 2135 2136 2137 LEAF(UnsafePutRaw, UnsafeRawOp) 2138 private: 2139 Value _value; // Value to be stored 2140 2141 public: 2142 UnsafePutRaw(BasicType basic_type, Value addr, Value value) 2143 : UnsafeRawOp(basic_type, addr, true) 2144 , _value(value) 2145 { 2146 assert(value != NULL, "just checking"); 2147 ASSERT_VALUES 2148 } 2149 2150 UnsafePutRaw(BasicType basic_type, Value base, Value index, int log2_scale, Value value) 2151 : UnsafeRawOp(basic_type, base, index, log2_scale, true) 2152 , _value(value) 2153 { 2154 assert(value != NULL, "just checking"); 2155 ASSERT_VALUES 2156 } 2157 2158 // accessors 2159 Value value() { return _value; } 2160 2161 // generic 2162 virtual void input_values_do(ValueVisitor* f) { UnsafeRawOp::input_values_do(f); 2163 f->visit(&_value); } 2164 }; 2165 2166 2167 BASE(UnsafeObjectOp, UnsafeOp) 2168 private: 2169 Value _object; // Object to be fetched from or mutated 2170 Value _offset; // Offset within object 2171 bool _is_volatile; // true if volatile - dl/JSR166 2172 public: 2173 UnsafeObjectOp(BasicType basic_type, Value object, Value offset, bool is_put, bool is_volatile) 2174 : UnsafeOp(basic_type, is_put), _object(object), _offset(offset), _is_volatile(is_volatile) 2175 { 2176 } 2177 2178 // accessors 2179 Value object() { return _object; } 2180 Value offset() { return _offset; } 2181 bool is_volatile() { return _is_volatile; } 2182 // generic 2183 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f); 2184 f->visit(&_object); 2185 f->visit(&_offset); } 2186 }; 2187 2188 2189 LEAF(UnsafeGetObject, UnsafeObjectOp) 2190 public: 2191 UnsafeGetObject(BasicType basic_type, Value object, Value offset, bool is_volatile) 2192 : UnsafeObjectOp(basic_type, object, offset, false, is_volatile) 2193 { 2194 ASSERT_VALUES 2195 } 2196 }; 2197 2198 2199 LEAF(UnsafePutObject, UnsafeObjectOp) 2200 private: 2201 Value _value; // Value to be stored 2202 public: 2203 UnsafePutObject(BasicType basic_type, Value object, Value offset, Value value, bool is_volatile) 2204 : UnsafeObjectOp(basic_type, object, offset, true, is_volatile) 2205 , _value(value) 2206 { 2207 ASSERT_VALUES 2208 } 2209 2210 // accessors 2211 Value value() { return _value; } 2212 2213 // generic 2214 virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f); 2215 f->visit(&_value); } 2216 }; 2217 2218 2219 BASE(UnsafePrefetch, UnsafeObjectOp) 2220 public: 2221 UnsafePrefetch(Value object, Value offset) 2222 : UnsafeObjectOp(T_VOID, object, offset, false, false) 2223 { 2224 } 2225 }; 2226 2227 2228 LEAF(UnsafePrefetchRead, UnsafePrefetch) 2229 public: 2230 UnsafePrefetchRead(Value object, Value offset) 2231 : UnsafePrefetch(object, offset) 2232 { 2233 ASSERT_VALUES 2234 } 2235 }; 2236 2237 2238 LEAF(UnsafePrefetchWrite, UnsafePrefetch) 2239 public: 2240 UnsafePrefetchWrite(Value object, Value offset) 2241 : UnsafePrefetch(object, offset) 2242 { 2243 ASSERT_VALUES 2244 } 2245 }; 2246 2247 LEAF(ProfileCall, Instruction) 2248 private: 2249 ciMethod* _method; 2250 int _bci_of_invoke; 2251 Value _recv; 2252 ciKlass* _known_holder; 2253 2254 public: 2255 ProfileCall(ciMethod* method, int bci, Value recv, ciKlass* known_holder) 2256 : Instruction(voidType) 2257 , _method(method) 2258 , _bci_of_invoke(bci) 2259 , _recv(recv) 2260 , _known_holder(known_holder) 2261 { 2262 // The ProfileCall has side-effects and must occur precisely where located 2263 pin(); 2264 } 2265 2266 ciMethod* method() { return _method; } 2267 int bci_of_invoke() { return _bci_of_invoke; } 2268 Value recv() { return _recv; } 2269 ciKlass* known_holder() { return _known_holder; } 2270 2271 virtual void input_values_do(ValueVisitor* f) { if (_recv != NULL) f->visit(&_recv); } 2272 }; 2273 2274 2275 // Call some C runtime function that doesn't safepoint, 2276 // optionally passing the current thread as the first argument. 2277 LEAF(RuntimeCall, Instruction) 2278 private: 2279 const char* _entry_name; 2280 address _entry; 2281 Values* _args; 2282 bool _pass_thread; // Pass the JavaThread* as an implicit first argument 2283 2284 public: 2285 RuntimeCall(ValueType* type, const char* entry_name, address entry, Values* args, bool pass_thread = true) 2286 : Instruction(type) 2287 , _entry(entry) 2288 , _args(args) 2289 , _entry_name(entry_name) 2290 , _pass_thread(pass_thread) { 2291 ASSERT_VALUES 2292 pin(); 2293 } 2294 2295 const char* entry_name() const { return _entry_name; } 2296 address entry() const { return _entry; } 2297 int number_of_arguments() const { return _args->length(); } 2298 Value argument_at(int i) const { return _args->at(i); } 2299 bool pass_thread() const { return _pass_thread; } 2300 2301 virtual void input_values_do(ValueVisitor* f) { 2302 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 2303 } 2304 }; 2305 2306 // Use to trip invocation counter of an inlined method 2307 2308 LEAF(ProfileInvoke, Instruction) 2309 private: 2310 ciMethod* _inlinee; 2311 ValueStack* _state; 2312 2313 public: 2314 ProfileInvoke(ciMethod* inlinee, ValueStack* state) 2315 : Instruction(voidType) 2316 , _inlinee(inlinee) 2317 , _state(state) 2318 { 2319 // The ProfileInvoke has side-effects and must occur precisely where located QQQ??? 2320 pin(); 2321 } 2322 2323 ciMethod* inlinee() { return _inlinee; } 2324 ValueStack* state() { return _state; } 2325 virtual void input_values_do(ValueVisitor*) {} 2326 virtual void state_values_do(ValueVisitor*); 2327 }; 2328 2329 class BlockPair: public CompilationResourceObj { 2330 private: 2331 BlockBegin* _from; 2332 BlockBegin* _to; 2333 public: 2334 BlockPair(BlockBegin* from, BlockBegin* to): _from(from), _to(to) {} 2335 BlockBegin* from() const { return _from; } 2336 BlockBegin* to() const { return _to; } 2337 bool is_same(BlockBegin* from, BlockBegin* to) const { return _from == from && _to == to; } 2338 bool is_same(BlockPair* p) const { return _from == p->from() && _to == p->to(); } 2339 void set_to(BlockBegin* b) { _to = b; } 2340 void set_from(BlockBegin* b) { _from = b; } 2341 }; 2342 2343 2344 define_array(BlockPairArray, BlockPair*) 2345 define_stack(BlockPairList, BlockPairArray) 2346 2347 2348 inline int BlockBegin::number_of_sux() const { assert(_end == NULL || _end->number_of_sux() == _successors.length(), "mismatch"); return _successors.length(); } 2349 inline BlockBegin* BlockBegin::sux_at(int i) const { assert(_end == NULL || _end->sux_at(i) == _successors.at(i), "mismatch"); return _successors.at(i); } 2350 inline void BlockBegin::add_successor(BlockBegin* sux) { assert(_end == NULL, "Would create mismatch with successors of BlockEnd"); _successors.append(sux); } 2351 2352 #undef ASSERT_VALUES 2353 2354 #endif // SHARE_VM_C1_C1_INSTRUCTION_HPP