627 Node* access_atomic_xchg_at(Node* ctl,
628 Node* obj,
629 Node* adr,
630 const TypePtr* adr_type,
631 int alias_idx,
632 Node* new_val,
633 const Type* value_type,
634 BasicType bt,
635 DecoratorSet decorators);
636
637 Node* access_atomic_add_at(Node* ctl,
638 Node* obj,
639 Node* adr,
640 const TypePtr* adr_type,
641 int alias_idx,
642 Node* new_val,
643 const Type* value_type,
644 BasicType bt,
645 DecoratorSet decorators);
646
647 void access_clone(Node* ctl, Node* src, Node* dst, Node* size, bool is_array);
648
649 // Return addressing for an array element.
650 Node* array_element_address(Node* ary, Node* idx, BasicType elembt,
651 // Optional constraint on the array size:
652 const TypeInt* sizetype = NULL,
653 // Optional control dependency (for example, on range check)
654 Node* ctrl = NULL);
655
656 // Return a load of array element at idx.
657 Node* load_array_element(Node* ctl, Node* ary, Node* idx, const TypeAryPtr* arytype);
658
659 //---------------- Dtrace support --------------------
660 void make_dtrace_method_entry_exit(ciMethod* method, bool is_entry);
661 void make_dtrace_method_entry(ciMethod* method) {
662 make_dtrace_method_entry_exit(method, true);
663 }
664 void make_dtrace_method_exit(ciMethod* method) {
665 make_dtrace_method_entry_exit(method, false);
666 }
667
814 Node* insert_mem_bar_volatile(int opcode, int alias_idx, Node* precedent = NULL);
815 // Optional 'precedent' is appended as an extra edge, to force ordering.
816 FastLockNode* shared_lock(Node* obj);
817 void shared_unlock(Node* box, Node* obj);
818
819 // helper functions for the fast path/slow path idioms
820 Node* fast_and_slow(Node* in, const Type *result_type, Node* null_result, IfNode* fast_test, Node* fast_result, address slow_call, const TypeFunc *slow_call_type, Node* slow_arg, Klass* ex_klass, Node* slow_result);
821
822 // Generate an instance-of idiom. Used by both the instance-of bytecode
823 // and the reflective instance-of call.
824 Node* gen_instanceof(Node *subobj, Node* superkls, bool safe_for_replace = false);
825
826 // Generate a check-cast idiom. Used by both the check-cast bytecode
827 // and the array-store bytecode
828 Node* gen_checkcast( Node *subobj, Node* superkls,
829 Node* *failure_control = NULL );
830
831 Node* is_always_locked(Node* obj);
832 Node* gen_value_type_test(Node* kls);
833 void gen_value_type_guard(Node* obj, int nargs = 0);
834 void gen_value_type_array_guard(Node* ary, Node* obj, Node* elem_klass = NULL);
835 void gen_flattened_array_guard(Node* ary, int nargs = 0);
836 Node* gen_lh_array_test(Node* kls, unsigned int lh_value);
837
838 Node* gen_subtype_check(Node* subklass, Node* superklass) {
839 MergeMemNode* mem = merged_memory();
840 Node* ctrl = control();
841 Node* n = Phase::gen_subtype_check(subklass, superklass, &ctrl, mem, &_gvn);
842 set_control(ctrl);
843 return n;
844 }
845
846 // Exact type check used for predicted calls and casts.
847 // Rewrites (*casted_receiver) to be casted to the stronger type.
848 // (Caller is responsible for doing replace_in_map.)
849 Node* type_check_receiver(Node* receiver, ciKlass* klass, float prob,
850 Node* *casted_receiver);
851 Node* type_check(Node* recv_klass, const TypeKlassPtr* tklass, float prob);
852
853 // implementation of object creation
854 Node* set_output_for_allocation(AllocateNode* alloc,
|
627 Node* access_atomic_xchg_at(Node* ctl,
628 Node* obj,
629 Node* adr,
630 const TypePtr* adr_type,
631 int alias_idx,
632 Node* new_val,
633 const Type* value_type,
634 BasicType bt,
635 DecoratorSet decorators);
636
637 Node* access_atomic_add_at(Node* ctl,
638 Node* obj,
639 Node* adr,
640 const TypePtr* adr_type,
641 int alias_idx,
642 Node* new_val,
643 const Type* value_type,
644 BasicType bt,
645 DecoratorSet decorators);
646
647 void access_clone(Node* ctl, Node* src_base, Node* dst_base, Node* countx, bool is_array);
648
649 // Return addressing for an array element.
650 Node* array_element_address(Node* ary, Node* idx, BasicType elembt,
651 // Optional constraint on the array size:
652 const TypeInt* sizetype = NULL,
653 // Optional control dependency (for example, on range check)
654 Node* ctrl = NULL);
655
656 // Return a load of array element at idx.
657 Node* load_array_element(Node* ctl, Node* ary, Node* idx, const TypeAryPtr* arytype);
658
659 //---------------- Dtrace support --------------------
660 void make_dtrace_method_entry_exit(ciMethod* method, bool is_entry);
661 void make_dtrace_method_entry(ciMethod* method) {
662 make_dtrace_method_entry_exit(method, true);
663 }
664 void make_dtrace_method_exit(ciMethod* method) {
665 make_dtrace_method_entry_exit(method, false);
666 }
667
814 Node* insert_mem_bar_volatile(int opcode, int alias_idx, Node* precedent = NULL);
815 // Optional 'precedent' is appended as an extra edge, to force ordering.
816 FastLockNode* shared_lock(Node* obj);
817 void shared_unlock(Node* box, Node* obj);
818
819 // helper functions for the fast path/slow path idioms
820 Node* fast_and_slow(Node* in, const Type *result_type, Node* null_result, IfNode* fast_test, Node* fast_result, address slow_call, const TypeFunc *slow_call_type, Node* slow_arg, Klass* ex_klass, Node* slow_result);
821
822 // Generate an instance-of idiom. Used by both the instance-of bytecode
823 // and the reflective instance-of call.
824 Node* gen_instanceof(Node *subobj, Node* superkls, bool safe_for_replace = false);
825
826 // Generate a check-cast idiom. Used by both the check-cast bytecode
827 // and the array-store bytecode
828 Node* gen_checkcast( Node *subobj, Node* superkls,
829 Node* *failure_control = NULL );
830
831 Node* is_always_locked(Node* obj);
832 Node* gen_value_type_test(Node* kls);
833 void gen_value_type_guard(Node* obj, int nargs = 0);
834 void gen_value_type_array_guard(Node* ary, Node* obj, Node* elem_klass, int nargs);
835 void gen_flattened_array_guard(Node* ary, int nargs = 0);
836 Node* gen_lh_array_test(Node* kls, unsigned int lh_value);
837
838 Node* gen_subtype_check(Node* subklass, Node* superklass) {
839 MergeMemNode* mem = merged_memory();
840 Node* ctrl = control();
841 Node* n = Phase::gen_subtype_check(subklass, superklass, &ctrl, mem, &_gvn);
842 set_control(ctrl);
843 return n;
844 }
845
846 // Exact type check used for predicted calls and casts.
847 // Rewrites (*casted_receiver) to be casted to the stronger type.
848 // (Caller is responsible for doing replace_in_map.)
849 Node* type_check_receiver(Node* receiver, ciKlass* klass, float prob,
850 Node* *casted_receiver);
851 Node* type_check(Node* recv_klass, const TypeKlassPtr* tklass, float prob);
852
853 // implementation of object creation
854 Node* set_output_for_allocation(AllocateNode* alloc,
|