575 }
576 // This is the base version which is given alias index
577 // Return the new StoreXNode
578 Node* store_to_memory(Node* ctl, Node* adr, Node* val, BasicType bt,
579 int adr_idx,
580 MemNode::MemOrd,
581 bool require_atomic_access = false,
582 bool unaligned = false,
583 bool mismatched = false,
584 bool unsafe = false);
585
586 // Perform decorated accesses
587
588 Node* access_store_at(Node* obj, // containing obj
589 Node* adr, // actual adress to store val at
590 const TypePtr* adr_type,
591 Node* val,
592 const Type* val_type,
593 BasicType bt,
594 DecoratorSet decorators,
595 bool deoptimize_on_exception = false);
596
597 Node* access_load_at(Node* obj, // containing obj
598 Node* adr, // actual adress to load val at
599 const TypePtr* adr_type,
600 const Type* val_type,
601 BasicType bt,
602 DecoratorSet decorators);
603
604 Node* access_load(Node* adr, // actual adress to load val at
605 const Type* val_type,
606 BasicType bt,
607 DecoratorSet decorators);
608
609 Node* access_atomic_cmpxchg_val_at(Node* obj,
610 Node* adr,
611 const TypePtr* adr_type,
612 int alias_idx,
613 Node* expected_val,
614 Node* new_val,
615 const Type* value_type,
831 int next_monitor();
832 Node* insert_mem_bar(int opcode, Node* precedent = NULL);
833 Node* insert_mem_bar_volatile(int opcode, int alias_idx, Node* precedent = NULL);
834 // Optional 'precedent' is appended as an extra edge, to force ordering.
835 FastLockNode* shared_lock(Node* obj);
836 void shared_unlock(Node* box, Node* obj);
837
838 // helper functions for the fast path/slow path idioms
839 Node* fast_and_slow(Node* in, const Type *result_type, Node* null_result, IfNode* fast_test, Node* fast_result, address slow_call, const TypeFunc *slow_call_type, Node* slow_arg, Klass* ex_klass, Node* slow_result);
840
841 // Generate an instance-of idiom. Used by both the instance-of bytecode
842 // and the reflective instance-of call.
843 Node* gen_instanceof(Node *subobj, Node* superkls, bool safe_for_replace = false);
844
845 // Generate a check-cast idiom. Used by both the check-cast bytecode
846 // and the array-store bytecode
847 Node* gen_checkcast(Node *subobj, Node* superkls, Node* *failure_control = NULL, bool never_null = false);
848
849 Node* is_always_locked(Node* obj);
850 void gen_value_type_guard(Node* obj, int nargs = 0);
851 void gen_value_type_array_guard(Node* ary, Node* obj, int nargs);
852 Node* load_lh_array_tag(Node* kls);
853 Node* gen_lh_array_test(Node* kls, unsigned int lh_value);
854
855 Node* gen_subtype_check(Node* subklass, Node* superklass) {
856 MergeMemNode* mem = merged_memory();
857 Node* ctrl = control();
858 Node* n = Phase::gen_subtype_check(subklass, superklass, &ctrl, mem, &_gvn);
859 set_control(ctrl);
860 return n;
861 }
862
863 // Exact type check used for predicted calls and casts.
864 // Rewrites (*casted_receiver) to be casted to the stronger type.
865 // (Caller is responsible for doing replace_in_map.)
866 Node* type_check_receiver(Node* receiver, ciKlass* klass, float prob,
867 Node* *casted_receiver);
868 Node* type_check(Node* recv_klass, const TypeKlassPtr* tklass, float prob);
869
870 // Inexact type check used for predicted calls.
871 Node* subtype_check_receiver(Node* receiver, ciKlass* klass,
|
575 }
576 // This is the base version which is given alias index
577 // Return the new StoreXNode
578 Node* store_to_memory(Node* ctl, Node* adr, Node* val, BasicType bt,
579 int adr_idx,
580 MemNode::MemOrd,
581 bool require_atomic_access = false,
582 bool unaligned = false,
583 bool mismatched = false,
584 bool unsafe = false);
585
586 // Perform decorated accesses
587
588 Node* access_store_at(Node* obj, // containing obj
589 Node* adr, // actual adress to store val at
590 const TypePtr* adr_type,
591 Node* val,
592 const Type* val_type,
593 BasicType bt,
594 DecoratorSet decorators,
595 bool deoptimize_on_exception = false,
596 bool safe_for_replace = true);
597
598 Node* access_load_at(Node* obj, // containing obj
599 Node* adr, // actual adress to load val at
600 const TypePtr* adr_type,
601 const Type* val_type,
602 BasicType bt,
603 DecoratorSet decorators);
604
605 Node* access_load(Node* adr, // actual adress to load val at
606 const Type* val_type,
607 BasicType bt,
608 DecoratorSet decorators);
609
610 Node* access_atomic_cmpxchg_val_at(Node* obj,
611 Node* adr,
612 const TypePtr* adr_type,
613 int alias_idx,
614 Node* expected_val,
615 Node* new_val,
616 const Type* value_type,
832 int next_monitor();
833 Node* insert_mem_bar(int opcode, Node* precedent = NULL);
834 Node* insert_mem_bar_volatile(int opcode, int alias_idx, Node* precedent = NULL);
835 // Optional 'precedent' is appended as an extra edge, to force ordering.
836 FastLockNode* shared_lock(Node* obj);
837 void shared_unlock(Node* box, Node* obj);
838
839 // helper functions for the fast path/slow path idioms
840 Node* fast_and_slow(Node* in, const Type *result_type, Node* null_result, IfNode* fast_test, Node* fast_result, address slow_call, const TypeFunc *slow_call_type, Node* slow_arg, Klass* ex_klass, Node* slow_result);
841
842 // Generate an instance-of idiom. Used by both the instance-of bytecode
843 // and the reflective instance-of call.
844 Node* gen_instanceof(Node *subobj, Node* superkls, bool safe_for_replace = false);
845
846 // Generate a check-cast idiom. Used by both the check-cast bytecode
847 // and the array-store bytecode
848 Node* gen_checkcast(Node *subobj, Node* superkls, Node* *failure_control = NULL, bool never_null = false);
849
850 Node* is_always_locked(Node* obj);
851 void gen_value_type_guard(Node* obj, int nargs = 0);
852 void gen_value_array_null_guard(Node* ary, Node* val, int nargs);
853 Node* load_lh_array_tag(Node* kls);
854 Node* gen_lh_array_test(Node* kls, unsigned int lh_value);
855
856 Node* gen_subtype_check(Node* subklass, Node* superklass) {
857 MergeMemNode* mem = merged_memory();
858 Node* ctrl = control();
859 Node* n = Phase::gen_subtype_check(subklass, superklass, &ctrl, mem, &_gvn);
860 set_control(ctrl);
861 return n;
862 }
863
864 // Exact type check used for predicted calls and casts.
865 // Rewrites (*casted_receiver) to be casted to the stronger type.
866 // (Caller is responsible for doing replace_in_map.)
867 Node* type_check_receiver(Node* receiver, ciKlass* klass, float prob,
868 Node* *casted_receiver);
869 Node* type_check(Node* recv_klass, const TypeKlassPtr* tklass, float prob);
870
871 // Inexact type check used for predicted calls.
872 Node* subtype_check_receiver(Node* receiver, ciKlass* klass,
|