305 LIR_Opr _operand; // LIR specific information
306 unsigned int _flags; // Flag bits
307
308 ValueStack* _state_before; // Copy of state with input operands still on stack (or NULL)
309 ValueStack* _exception_state; // Copy of state for exception handling
310 XHandlers* _exception_handlers; // Flat list of exception handlers covering this instruction
311
312 friend class UseCountComputer;
313 friend class BlockBegin;
314
315 void update_exception_state(ValueStack* state);
316
317 protected:
318 BlockBegin* _block; // Block that contains this instruction
319
320 void set_type(ValueType* type) {
321 assert(type != NULL, "type must exist");
322 _type = type;
323 }
324
325 public:
326 void* operator new(size_t size) throw() {
327 Compilation* c = Compilation::current();
328 void* res = c->arena()->Amalloc(size);
329 ((Instruction*)res)->_id = c->get_next_id();
330 return res;
331 }
332
333 static const int no_bci = -99;
334
335 enum InstructionFlag {
336 NeedsNullCheckFlag = 0,
337 CanTrapFlag,
338 DirectCompareFlag,
339 IsEliminatedFlag,
340 IsSafepointFlag,
341 IsStaticFlag,
342 IsStrictfpFlag,
343 NeedsStoreCheckFlag,
344 NeedsWriteBarrierFlag,
549 virtual Base* as_Base() { return NULL; }
550 virtual RoundFP* as_RoundFP() { return NULL; }
551 virtual ExceptionObject* as_ExceptionObject() { return NULL; }
552 virtual UnsafeOp* as_UnsafeOp() { return NULL; }
553 virtual ProfileInvoke* as_ProfileInvoke() { return NULL; }
554 virtual RangeCheckPredicate* as_RangeCheckPredicate() { return NULL; }
555
556 #ifdef ASSERT
557 virtual Assert* as_Assert() { return NULL; }
558 #endif
559
560 virtual void visit(InstructionVisitor* v) = 0;
561
562 virtual bool can_trap() const { return false; }
563
564 virtual void input_values_do(ValueVisitor* f) = 0;
565 virtual void state_values_do(ValueVisitor* f);
566 virtual void other_values_do(ValueVisitor* f) { /* usually no other - override on demand */ }
567 void values_do(ValueVisitor* f) { input_values_do(f); state_values_do(f); other_values_do(f); }
568
569 virtual ciType* exact_type() const { return NULL; }
570 virtual ciType* declared_type() const { return NULL; }
571
572 // hashing
573 virtual const char* name() const = 0;
574 HASHING1(Instruction, false, id()) // hashing disabled by default
575
576 // debugging
577 static void check_state(ValueStack* state) PRODUCT_RETURN;
578 void print() PRODUCT_RETURN;
579 void print_line() PRODUCT_RETURN;
580 void print(InstructionPrinter& ip) PRODUCT_RETURN;
581 };
582
583
584 // The following macros are used to define base (i.e., non-leaf)
585 // and leaf instruction classes. They define class-name related
586 // generic functionality in one place.
587
588 #define BASE(class_name, super_class_name) \
589 class class_name: public super_class_name { \
672
673 // A local is a placeholder for an incoming argument to a function call.
674 LEAF(Local, Instruction)
675 private:
676 int _java_index; // the local index within the method to which the local belongs
677 ciType* _declared_type;
678 public:
679 // creation
680 Local(ciType* declared, ValueType* type, int index)
681 : Instruction(type)
682 , _java_index(index)
683 , _declared_type(declared)
684 {
685 NOT_PRODUCT(set_printable_bci(-1));
686 }
687
688 // accessors
689 int java_index() const { return _java_index; }
690
691 virtual ciType* declared_type() const { return _declared_type; }
692 virtual ciType* exact_type() const;
693
694 // generic
695 virtual void input_values_do(ValueVisitor* f) { /* no values */ }
696 };
697
698
699 LEAF(Constant, Instruction)
700 public:
701 // creation
702 Constant(ValueType* type):
703 Instruction(type, NULL, /*type_is_constant*/ true)
704 {
705 assert(type->is_constant(), "must be a constant");
706 }
707
708 Constant(ValueType* type, ValueStack* state_before):
709 Instruction(type, state_before, /*type_is_constant*/ true)
710 {
711 assert(state_before != NULL, "only used for constants which need patching");
712 assert(type->is_constant(), "must be a constant");
789 // null check and do an implicit one, simply specifying the debug
790 // information from the NullCheck. This field should only be consulted
791 // if needs_null_check() is true.
792 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; }
793
794 // generic
795 virtual bool can_trap() const { return needs_null_check() || needs_patching(); }
796 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); }
797 };
798
799
800 LEAF(LoadField, AccessField)
801 public:
802 // creation
803 LoadField(Value obj, int offset, ciField* field, bool is_static,
804 ValueStack* state_before, bool needs_patching)
805 : AccessField(obj, offset, field, is_static, state_before, needs_patching)
806 {}
807
808 ciType* declared_type() const;
809 ciType* exact_type() const;
810
811 // generic
812 HASHING2(LoadField, !needs_patching() && !field()->is_volatile(), obj()->subst(), offset()) // cannot be eliminated if needs patching or if volatile
813 };
814
815
816 LEAF(StoreField, AccessField)
817 private:
818 Value _value;
819
820 public:
821 // creation
822 StoreField(Value obj, int offset, ciField* field, Value value, bool is_static,
823 ValueStack* state_before, bool needs_patching)
824 : AccessField(obj, offset, field, is_static, state_before, needs_patching)
825 , _value(value)
826 {
827 set_flag(NeedsWriteBarrierFlag, as_ValueType(field_type())->is_object());
828 ASSERT_VALUES
829 pin();
1282
1283
1284 BASE(NewArray, StateSplit)
1285 private:
1286 Value _length;
1287
1288 public:
1289 // creation
1290 NewArray(Value length, ValueStack* state_before)
1291 : StateSplit(objectType, state_before)
1292 , _length(length)
1293 {
1294 // Do not ASSERT_VALUES since length is NULL for NewMultiArray
1295 }
1296
1297 // accessors
1298 Value length() const { return _length; }
1299
1300 virtual bool needs_exception_state() const { return false; }
1301
1302 ciType* declared_type() const;
1303
1304 // generic
1305 virtual bool can_trap() const { return true; }
1306 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_length); }
1307 };
1308
1309
1310 LEAF(NewTypeArray, NewArray)
1311 private:
1312 BasicType _elt_type;
1313
1314 public:
1315 // creation
1316 NewTypeArray(Value length, BasicType elt_type, ValueStack* state_before)
1317 : NewArray(length, state_before)
1318 , _elt_type(elt_type)
1319 {}
1320
1321 // accessors
1405 bool should_profile() const { return check_flag(ProfileMDOFlag); }
1406 ciMethod* profiled_method() const { return _profiled_method; }
1407 int profiled_bci() const { return _profiled_bci; }
1408 };
1409
1410
1411 LEAF(CheckCast, TypeCheck)
1412 public:
1413 // creation
1414 CheckCast(ciKlass* klass, Value obj, ValueStack* state_before)
1415 : TypeCheck(klass, obj, objectType, state_before) {}
1416
1417 void set_incompatible_class_change_check() {
1418 set_flag(ThrowIncompatibleClassChangeErrorFlag, true);
1419 }
1420 bool is_incompatible_class_change_check() const {
1421 return check_flag(ThrowIncompatibleClassChangeErrorFlag);
1422 }
1423
1424 ciType* declared_type() const;
1425 ciType* exact_type() const;
1426 };
1427
1428
1429 LEAF(InstanceOf, TypeCheck)
1430 public:
1431 // creation
1432 InstanceOf(ciKlass* klass, Value obj, ValueStack* state_before) : TypeCheck(klass, obj, intType, state_before) {}
1433
1434 virtual bool needs_exception_state() const { return false; }
1435 };
1436
1437
1438 BASE(AccessMonitor, StateSplit)
1439 private:
1440 Value _obj;
1441 int _monitor_no;
1442
1443 public:
1444 // creation
1445 AccessMonitor(Value obj, int monitor_no, ValueStack* state_before = NULL)
1473 virtual bool can_trap() const { return true; }
1474 };
1475
1476
1477 LEAF(MonitorExit, AccessMonitor)
1478 public:
1479 // creation
1480 MonitorExit(Value obj, int monitor_no)
1481 : AccessMonitor(obj, monitor_no, NULL)
1482 {
1483 ASSERT_VALUES
1484 }
1485 };
1486
1487
1488 LEAF(Intrinsic, StateSplit)
1489 private:
1490 vmIntrinsics::ID _id;
1491 Values* _args;
1492 Value _recv;
1493 int _nonnull_state; // mask identifying which args are nonnull
1494
1495 public:
1496 // preserves_state can be set to true for Intrinsics
1497 // which are guaranteed to preserve register state across any slow
1498 // cases; setting it to true does not mean that the Intrinsic can
1499 // not trap, only that if we continue execution in the same basic
1500 // block after the Intrinsic, all of the registers are intact. This
1501 // allows load elimination and common expression elimination to be
1502 // performed across the Intrinsic. The default value is false.
1503 Intrinsic(ValueType* type,
1504 vmIntrinsics::ID id,
1505 Values* args,
1506 bool has_receiver,
1507 ValueStack* state_before,
1508 bool preserves_state,
1509 bool cantrap = true)
1510 : StateSplit(type, state_before)
1511 , _id(id)
1512 , _args(args)
1513 , _recv(NULL)
1514 , _nonnull_state(AllBits)
1515 {
1516 assert(args != NULL, "args must exist");
1517 ASSERT_VALUES
1518 set_flag(PreservesStateFlag, preserves_state);
1519 set_flag(CanTrapFlag, cantrap);
1520 if (has_receiver) {
1521 _recv = argument_at(0);
1522 }
1523 set_needs_null_check(has_receiver);
1524
1525 // some intrinsics can't trap, so don't force them to be pinned
1526 if (!can_trap()) {
1527 unpin(PinStateSplitConstructor);
1528 }
1529 }
1530
1531 // accessors
1532 vmIntrinsics::ID id() const { return _id; }
1533 int number_of_arguments() const { return _args->length(); }
1534 Value argument_at(int i) const { return _args->at(i); }
1535
1536 bool has_receiver() const { return (_recv != NULL); }
1537 Value receiver() const { assert(has_receiver(), "must have receiver"); return _recv; }
1538 bool preserves_state() const { return check_flag(PreservesStateFlag); }
1539
1540 bool arg_needs_null_check(int i) {
1541 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) {
1542 return is_set_nth_bit(_nonnull_state, i);
1543 }
1544 return true;
1545 }
1546
1547 void set_arg_needs_null_check(int i, bool check) {
1548 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) {
1549 if (check) {
1550 _nonnull_state |= nth_bit(i);
1551 } else {
1552 _nonnull_state &= ~(nth_bit(i));
1553 }
1554 }
1555 }
1556
1557 // generic
1558 virtual bool can_trap() const { return check_flag(CanTrapFlag); }
1559 virtual void input_values_do(ValueVisitor* f) {
1560 StateSplit::input_values_do(f);
1561 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i));
1562 }
1563 };
1564
1565
1566 class LIR_List;
1567
1568 LEAF(BlockBegin, StateSplit)
1569 private:
1570 int _block_id; // the unique block id
1571 int _bci; // start-bci of block
1572 int _depth_first_number; // number of this block in a depth-first ordering
1573 int _linear_scan_number; // number of this block in linear-scan ordering
1574 int _dominator_depth;
2438 }
2439 };
2440
2441
2442 LEAF(UnsafePrefetchWrite, UnsafePrefetch)
2443 public:
2444 UnsafePrefetchWrite(Value object, Value offset)
2445 : UnsafePrefetch(object, offset)
2446 {
2447 ASSERT_VALUES
2448 }
2449 };
2450
2451 LEAF(ProfileCall, Instruction)
2452 private:
2453 ciMethod* _method;
2454 int _bci_of_invoke;
2455 ciMethod* _callee; // the method that is called at the given bci
2456 Value _recv;
2457 ciKlass* _known_holder;
2458
2459 public:
2460 ProfileCall(ciMethod* method, int bci, ciMethod* callee, Value recv, ciKlass* known_holder)
2461 : Instruction(voidType)
2462 , _method(method)
2463 , _bci_of_invoke(bci)
2464 , _callee(callee)
2465 , _recv(recv)
2466 , _known_holder(known_holder)
2467 {
2468 // The ProfileCall has side-effects and must occur precisely where located
2469 pin();
2470 }
2471
2472 ciMethod* method() { return _method; }
2473 int bci_of_invoke() { return _bci_of_invoke; }
2474 ciMethod* callee() { return _callee; }
2475 Value recv() { return _recv; }
2476 ciKlass* known_holder() { return _known_holder; }
2477
2478 virtual void input_values_do(ValueVisitor* f) { if (_recv != NULL) f->visit(&_recv); }
2479 };
2480
2481
2482 // Call some C runtime function that doesn't safepoint,
2483 // optionally passing the current thread as the first argument.
2484 LEAF(RuntimeCall, Instruction)
2485 private:
2486 const char* _entry_name;
2487 address _entry;
2488 Values* _args;
2489 bool _pass_thread; // Pass the JavaThread* as an implicit first argument
2490
2491 public:
2492 RuntimeCall(ValueType* type, const char* entry_name, address entry, Values* args, bool pass_thread = true)
2493 : Instruction(type)
2494 , _entry(entry)
2495 , _args(args)
2496 , _entry_name(entry_name)
2497 , _pass_thread(pass_thread) {
2498 ASSERT_VALUES
2499 pin();
2500 }
|
305 LIR_Opr _operand; // LIR specific information
306 unsigned int _flags; // Flag bits
307
308 ValueStack* _state_before; // Copy of state with input operands still on stack (or NULL)
309 ValueStack* _exception_state; // Copy of state for exception handling
310 XHandlers* _exception_handlers; // Flat list of exception handlers covering this instruction
311
312 friend class UseCountComputer;
313 friend class BlockBegin;
314
315 void update_exception_state(ValueStack* state);
316
317 protected:
318 BlockBegin* _block; // Block that contains this instruction
319
320 void set_type(ValueType* type) {
321 assert(type != NULL, "type must exist");
322 _type = type;
323 }
324
325 class ArgsNonNullState {
326 private:
327 int _nonnull_state; // mask identifying which args are nonnull
328 public:
329 ArgsNonNullState()
330 : _nonnull_state(AllBits) {}
331
332 bool arg_needs_null_check(int i) const {
333 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) {
334 return is_set_nth_bit(_nonnull_state, i);
335 }
336 return true;
337 }
338
339 void set_arg_needs_null_check(int i, bool check) {
340 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) {
341 if (check) {
342 _nonnull_state |= nth_bit(i);
343 } else {
344 _nonnull_state &= ~(nth_bit(i));
345 }
346 }
347 }
348 };
349
350 public:
351 void* operator new(size_t size) throw() {
352 Compilation* c = Compilation::current();
353 void* res = c->arena()->Amalloc(size);
354 ((Instruction*)res)->_id = c->get_next_id();
355 return res;
356 }
357
358 static const int no_bci = -99;
359
360 enum InstructionFlag {
361 NeedsNullCheckFlag = 0,
362 CanTrapFlag,
363 DirectCompareFlag,
364 IsEliminatedFlag,
365 IsSafepointFlag,
366 IsStaticFlag,
367 IsStrictfpFlag,
368 NeedsStoreCheckFlag,
369 NeedsWriteBarrierFlag,
574 virtual Base* as_Base() { return NULL; }
575 virtual RoundFP* as_RoundFP() { return NULL; }
576 virtual ExceptionObject* as_ExceptionObject() { return NULL; }
577 virtual UnsafeOp* as_UnsafeOp() { return NULL; }
578 virtual ProfileInvoke* as_ProfileInvoke() { return NULL; }
579 virtual RangeCheckPredicate* as_RangeCheckPredicate() { return NULL; }
580
581 #ifdef ASSERT
582 virtual Assert* as_Assert() { return NULL; }
583 #endif
584
585 virtual void visit(InstructionVisitor* v) = 0;
586
587 virtual bool can_trap() const { return false; }
588
589 virtual void input_values_do(ValueVisitor* f) = 0;
590 virtual void state_values_do(ValueVisitor* f);
591 virtual void other_values_do(ValueVisitor* f) { /* usually no other - override on demand */ }
592 void values_do(ValueVisitor* f) { input_values_do(f); state_values_do(f); other_values_do(f); }
593
594 virtual ciType* exact_type() const;
595 virtual ciType* declared_type() const { return NULL; }
596
597 // hashing
598 virtual const char* name() const = 0;
599 HASHING1(Instruction, false, id()) // hashing disabled by default
600
601 // debugging
602 static void check_state(ValueStack* state) PRODUCT_RETURN;
603 void print() PRODUCT_RETURN;
604 void print_line() PRODUCT_RETURN;
605 void print(InstructionPrinter& ip) PRODUCT_RETURN;
606 };
607
608
609 // The following macros are used to define base (i.e., non-leaf)
610 // and leaf instruction classes. They define class-name related
611 // generic functionality in one place.
612
613 #define BASE(class_name, super_class_name) \
614 class class_name: public super_class_name { \
697
698 // A local is a placeholder for an incoming argument to a function call.
699 LEAF(Local, Instruction)
700 private:
701 int _java_index; // the local index within the method to which the local belongs
702 ciType* _declared_type;
703 public:
704 // creation
705 Local(ciType* declared, ValueType* type, int index)
706 : Instruction(type)
707 , _java_index(index)
708 , _declared_type(declared)
709 {
710 NOT_PRODUCT(set_printable_bci(-1));
711 }
712
713 // accessors
714 int java_index() const { return _java_index; }
715
716 virtual ciType* declared_type() const { return _declared_type; }
717
718 // generic
719 virtual void input_values_do(ValueVisitor* f) { /* no values */ }
720 };
721
722
723 LEAF(Constant, Instruction)
724 public:
725 // creation
726 Constant(ValueType* type):
727 Instruction(type, NULL, /*type_is_constant*/ true)
728 {
729 assert(type->is_constant(), "must be a constant");
730 }
731
732 Constant(ValueType* type, ValueStack* state_before):
733 Instruction(type, state_before, /*type_is_constant*/ true)
734 {
735 assert(state_before != NULL, "only used for constants which need patching");
736 assert(type->is_constant(), "must be a constant");
813 // null check and do an implicit one, simply specifying the debug
814 // information from the NullCheck. This field should only be consulted
815 // if needs_null_check() is true.
816 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; }
817
818 // generic
819 virtual bool can_trap() const { return needs_null_check() || needs_patching(); }
820 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); }
821 };
822
823
824 LEAF(LoadField, AccessField)
825 public:
826 // creation
827 LoadField(Value obj, int offset, ciField* field, bool is_static,
828 ValueStack* state_before, bool needs_patching)
829 : AccessField(obj, offset, field, is_static, state_before, needs_patching)
830 {}
831
832 ciType* declared_type() const;
833
834 // generic
835 HASHING2(LoadField, !needs_patching() && !field()->is_volatile(), obj()->subst(), offset()) // cannot be eliminated if needs patching or if volatile
836 };
837
838
839 LEAF(StoreField, AccessField)
840 private:
841 Value _value;
842
843 public:
844 // creation
845 StoreField(Value obj, int offset, ciField* field, Value value, bool is_static,
846 ValueStack* state_before, bool needs_patching)
847 : AccessField(obj, offset, field, is_static, state_before, needs_patching)
848 , _value(value)
849 {
850 set_flag(NeedsWriteBarrierFlag, as_ValueType(field_type())->is_object());
851 ASSERT_VALUES
852 pin();
1305
1306
1307 BASE(NewArray, StateSplit)
1308 private:
1309 Value _length;
1310
1311 public:
1312 // creation
1313 NewArray(Value length, ValueStack* state_before)
1314 : StateSplit(objectType, state_before)
1315 , _length(length)
1316 {
1317 // Do not ASSERT_VALUES since length is NULL for NewMultiArray
1318 }
1319
1320 // accessors
1321 Value length() const { return _length; }
1322
1323 virtual bool needs_exception_state() const { return false; }
1324
1325 ciType* exact_type() const { return NULL; }
1326 ciType* declared_type() const;
1327
1328 // generic
1329 virtual bool can_trap() const { return true; }
1330 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_length); }
1331 };
1332
1333
1334 LEAF(NewTypeArray, NewArray)
1335 private:
1336 BasicType _elt_type;
1337
1338 public:
1339 // creation
1340 NewTypeArray(Value length, BasicType elt_type, ValueStack* state_before)
1341 : NewArray(length, state_before)
1342 , _elt_type(elt_type)
1343 {}
1344
1345 // accessors
1429 bool should_profile() const { return check_flag(ProfileMDOFlag); }
1430 ciMethod* profiled_method() const { return _profiled_method; }
1431 int profiled_bci() const { return _profiled_bci; }
1432 };
1433
1434
1435 LEAF(CheckCast, TypeCheck)
1436 public:
1437 // creation
1438 CheckCast(ciKlass* klass, Value obj, ValueStack* state_before)
1439 : TypeCheck(klass, obj, objectType, state_before) {}
1440
1441 void set_incompatible_class_change_check() {
1442 set_flag(ThrowIncompatibleClassChangeErrorFlag, true);
1443 }
1444 bool is_incompatible_class_change_check() const {
1445 return check_flag(ThrowIncompatibleClassChangeErrorFlag);
1446 }
1447
1448 ciType* declared_type() const;
1449 };
1450
1451
1452 LEAF(InstanceOf, TypeCheck)
1453 public:
1454 // creation
1455 InstanceOf(ciKlass* klass, Value obj, ValueStack* state_before) : TypeCheck(klass, obj, intType, state_before) {}
1456
1457 virtual bool needs_exception_state() const { return false; }
1458 };
1459
1460
1461 BASE(AccessMonitor, StateSplit)
1462 private:
1463 Value _obj;
1464 int _monitor_no;
1465
1466 public:
1467 // creation
1468 AccessMonitor(Value obj, int monitor_no, ValueStack* state_before = NULL)
1496 virtual bool can_trap() const { return true; }
1497 };
1498
1499
1500 LEAF(MonitorExit, AccessMonitor)
1501 public:
1502 // creation
1503 MonitorExit(Value obj, int monitor_no)
1504 : AccessMonitor(obj, monitor_no, NULL)
1505 {
1506 ASSERT_VALUES
1507 }
1508 };
1509
1510
1511 LEAF(Intrinsic, StateSplit)
1512 private:
1513 vmIntrinsics::ID _id;
1514 Values* _args;
1515 Value _recv;
1516 ArgsNonNullState _nonnull_state;
1517
1518 public:
1519 // preserves_state can be set to true for Intrinsics
1520 // which are guaranteed to preserve register state across any slow
1521 // cases; setting it to true does not mean that the Intrinsic can
1522 // not trap, only that if we continue execution in the same basic
1523 // block after the Intrinsic, all of the registers are intact. This
1524 // allows load elimination and common expression elimination to be
1525 // performed across the Intrinsic. The default value is false.
1526 Intrinsic(ValueType* type,
1527 vmIntrinsics::ID id,
1528 Values* args,
1529 bool has_receiver,
1530 ValueStack* state_before,
1531 bool preserves_state,
1532 bool cantrap = true)
1533 : StateSplit(type, state_before)
1534 , _id(id)
1535 , _args(args)
1536 , _recv(NULL)
1537 {
1538 assert(args != NULL, "args must exist");
1539 ASSERT_VALUES
1540 set_flag(PreservesStateFlag, preserves_state);
1541 set_flag(CanTrapFlag, cantrap);
1542 if (has_receiver) {
1543 _recv = argument_at(0);
1544 }
1545 set_needs_null_check(has_receiver);
1546
1547 // some intrinsics can't trap, so don't force them to be pinned
1548 if (!can_trap()) {
1549 unpin(PinStateSplitConstructor);
1550 }
1551 }
1552
1553 // accessors
1554 vmIntrinsics::ID id() const { return _id; }
1555 int number_of_arguments() const { return _args->length(); }
1556 Value argument_at(int i) const { return _args->at(i); }
1557
1558 bool has_receiver() const { return (_recv != NULL); }
1559 Value receiver() const { assert(has_receiver(), "must have receiver"); return _recv; }
1560 bool preserves_state() const { return check_flag(PreservesStateFlag); }
1561
1562 bool arg_needs_null_check(int i) const {
1563 return _nonnull_state.arg_needs_null_check(i);
1564 }
1565
1566 void set_arg_needs_null_check(int i, bool check) {
1567 _nonnull_state.set_arg_needs_null_check(i, check);
1568 }
1569
1570 // generic
1571 virtual bool can_trap() const { return check_flag(CanTrapFlag); }
1572 virtual void input_values_do(ValueVisitor* f) {
1573 StateSplit::input_values_do(f);
1574 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i));
1575 }
1576 };
1577
1578
1579 class LIR_List;
1580
1581 LEAF(BlockBegin, StateSplit)
1582 private:
1583 int _block_id; // the unique block id
1584 int _bci; // start-bci of block
1585 int _depth_first_number; // number of this block in a depth-first ordering
1586 int _linear_scan_number; // number of this block in linear-scan ordering
1587 int _dominator_depth;
2451 }
2452 };
2453
2454
2455 LEAF(UnsafePrefetchWrite, UnsafePrefetch)
2456 public:
2457 UnsafePrefetchWrite(Value object, Value offset)
2458 : UnsafePrefetch(object, offset)
2459 {
2460 ASSERT_VALUES
2461 }
2462 };
2463
2464 LEAF(ProfileCall, Instruction)
2465 private:
2466 ciMethod* _method;
2467 int _bci_of_invoke;
2468 ciMethod* _callee; // the method that is called at the given bci
2469 Value _recv;
2470 ciKlass* _known_holder;
2471 Values* _obj_args; // arguments for type profiling
2472 ArgsNonNullState _nonnull_state; // Do we know whether some arguments are never null?
2473 bool _inlined; // Are we profiling a call that is inlined
2474
2475 public:
2476 ProfileCall(ciMethod* method, int bci, ciMethod* callee, Value recv, ciKlass* known_holder, Values* obj_args, bool inlined)
2477 : Instruction(voidType)
2478 , _method(method)
2479 , _bci_of_invoke(bci)
2480 , _callee(callee)
2481 , _recv(recv)
2482 , _known_holder(known_holder)
2483 , _obj_args(obj_args)
2484 , _inlined(inlined)
2485 {
2486 // The ProfileCall has side-effects and must occur precisely where located
2487 pin();
2488 }
2489
2490 ciMethod* method() const { return _method; }
2491 int bci_of_invoke() const { return _bci_of_invoke; }
2492 ciMethod* callee() const { return _callee; }
2493 Value recv() const { return _recv; }
2494 ciKlass* known_holder() const { return _known_holder; }
2495 int nb_profiled_args() const { return _obj_args == NULL ? 0 : _obj_args->length(); }
2496 Value profiled_arg_at(int i) const { return _obj_args->at(i); }
2497 bool arg_needs_null_check(int i) const {
2498 return _nonnull_state.arg_needs_null_check(i);
2499 }
2500 bool inlined() const { return _inlined; }
2501
2502 void set_arg_needs_null_check(int i, bool check) {
2503 _nonnull_state.set_arg_needs_null_check(i, check);
2504 }
2505
2506 virtual void input_values_do(ValueVisitor* f) {
2507 if (_recv != NULL) f->visit(&_recv);
2508 for (int i = 0; i < nb_profiled_args(); i++) f->visit(_obj_args->adr_at(i));
2509 }
2510 };
2511
2512 // Call some C runtime function that doesn't safepoint,
2513 // optionally passing the current thread as the first argument.
2514 LEAF(RuntimeCall, Instruction)
2515 private:
2516 const char* _entry_name;
2517 address _entry;
2518 Values* _args;
2519 bool _pass_thread; // Pass the JavaThread* as an implicit first argument
2520
2521 public:
2522 RuntimeCall(ValueType* type, const char* entry_name, address entry, Values* args, bool pass_thread = true)
2523 : Instruction(type)
2524 , _entry(entry)
2525 , _args(args)
2526 , _entry_name(entry_name)
2527 , _pass_thread(pass_thread) {
2528 ASSERT_VALUES
2529 pin();
2530 }
|