2678 // by oopDesc::field_base.
2679 assert(Unsafe_field_offset_to_byte_offset(11) == 11,
2680 "fieldOffset must be byte-scaled");
2681 // 32-bit machines ignore the high half!
2682 offset = ConvL2X(offset);
2683 adr = make_unsafe_address(base, offset, is_store);
2684 heap_base_oop = base;
2685 val = is_store ? argument(4) : NULL;
2686 } else {
2687 Node* ptr = argument(1); // type: long
2688 ptr = ConvL2X(ptr); // adjust Java long to machine word
2689 adr = make_unsafe_address(NULL, ptr, is_store);
2690 val = is_store ? argument(3) : NULL;
2691 }
2692
2693 // Can base be NULL? Otherwise, always on-heap access.
2694 bool can_access_non_heap = TypePtr::NULL_PTR->higher_equal(_gvn.type(heap_base_oop));
2695
2696 const TypePtr *adr_type = _gvn.type(adr)->isa_ptr();
2697
2698 if (UseShenandoahGC && adr->is_AddP() &&
2699 adr->in(AddPNode::Base) == adr->in(AddPNode::Address)) {
2700 Node* base = ShenandoahBarrierNode::skip_through_barrier(adr->in(AddPNode::Base));
2701 const TypeInstPtr* base_t = _gvn.type(base)->isa_instptr();
2702 if (base_t != NULL &&
2703 base_t->const_oop() != NULL &&
2704 base_t->klass() == ciEnv::current()->Class_klass() &&
2705 adr_type->is_instptr()->offset() >= base_t->klass()->as_instance_klass()->size_helper() * wordSize) {
2706 adr_type = base_t->add_offset(adr_type->is_instptr()->offset());
2707 }
2708 }
2709
2710 // Try to categorize the address.
2711 Compile::AliasType* alias_type = C->alias_type(adr_type);
2712 assert(alias_type->index() != Compile::AliasIdxBot, "no bare pointers here");
2713
2714 if (alias_type->adr_type() == TypeInstPtr::KLASS ||
2715 alias_type->adr_type() == TypeAryPtr::RANGE) {
2716 return false; // not supported
2717 }
2718
2719 bool mismatched = false;
2720 BasicType bt = alias_type->basic_type();
2721 if (bt != T_ILLEGAL) {
2722 assert(alias_type->adr_type()->is_oopptr(), "should be on-heap access");
2723 if (bt == T_BYTE && adr_type->isa_aryptr()) {
2724 // Alias type doesn't differentiate between byte[] and boolean[]).
2725 // Use address type to get the element type.
2726 bt = adr_type->is_aryptr()->elem()->array_element_basic_type();
2727 }
2728 if (bt == T_ARRAY || bt == T_NARROWOOP) {
2729 // accessing an array field with getObject is not a mismatch
7059 if ((klass_SHA == NULL) || !klass_SHA->is_loaded()) {
7060 // if none of SHA/SHA2/SHA5 is loaded, we never take the intrinsic fast path
7061 Node* ctrl = control();
7062 set_control(top()); // no intrinsic path
7063 return ctrl;
7064 }
7065 ciInstanceKlass* instklass_SHA = klass_SHA->as_instance_klass();
7066
7067 Node* instofSHA = gen_instanceof(digestBaseObj, makecon(TypeKlassPtr::make(instklass_SHA)));
7068 Node* cmp_instof = _gvn.transform(new (C) CmpINode(instofSHA, intcon(1)));
7069 Node* bool_instof = _gvn.transform(new (C) BoolNode(cmp_instof, BoolTest::ne));
7070 Node* instof_false = generate_guard(bool_instof, NULL, PROB_MIN);
7071
7072 return instof_false; // even if it is NULL
7073 }
7074
7075 bool LibraryCallKit::inline_profileBoolean() {
7076 Node* counts = argument(1);
7077 const TypeAryPtr* ary = NULL;
7078 ciArray* aobj = NULL;
7079 assert(!(ShenandoahBarrierNode::skip_through_barrier(counts)->is_Con() && !counts->is_Con()), "barrier prevents optimization");
7080 if (counts->is_Con()
7081 && (ary = counts->bottom_type()->isa_aryptr()) != NULL
7082 && (aobj = ary->const_oop()->as_array()) != NULL
7083 && (aobj->length() == 2)) {
7084 // Profile is int[2] where [0] and [1] correspond to false and true value occurrences respectively.
7085 jint false_cnt = aobj->element_value(0).as_int();
7086 jint true_cnt = aobj->element_value(1).as_int();
7087
7088 if (C->log() != NULL) {
7089 C->log()->elem("observe source='profileBoolean' false='%d' true='%d'",
7090 false_cnt, true_cnt);
7091 }
7092
7093 if (false_cnt + true_cnt == 0) {
7094 // According to profile, never executed.
7095 uncommon_trap_exact(Deoptimization::Reason_intrinsic,
7096 Deoptimization::Action_reinterpret);
7097 return true;
7098 }
7099
|
2678 // by oopDesc::field_base.
2679 assert(Unsafe_field_offset_to_byte_offset(11) == 11,
2680 "fieldOffset must be byte-scaled");
2681 // 32-bit machines ignore the high half!
2682 offset = ConvL2X(offset);
2683 adr = make_unsafe_address(base, offset, is_store);
2684 heap_base_oop = base;
2685 val = is_store ? argument(4) : NULL;
2686 } else {
2687 Node* ptr = argument(1); // type: long
2688 ptr = ConvL2X(ptr); // adjust Java long to machine word
2689 adr = make_unsafe_address(NULL, ptr, is_store);
2690 val = is_store ? argument(3) : NULL;
2691 }
2692
2693 // Can base be NULL? Otherwise, always on-heap access.
2694 bool can_access_non_heap = TypePtr::NULL_PTR->higher_equal(_gvn.type(heap_base_oop));
2695
2696 const TypePtr *adr_type = _gvn.type(adr)->isa_ptr();
2697
2698 // Try to categorize the address.
2699 Compile::AliasType* alias_type = C->alias_type(adr_type);
2700 assert(alias_type->index() != Compile::AliasIdxBot, "no bare pointers here");
2701
2702 if (alias_type->adr_type() == TypeInstPtr::KLASS ||
2703 alias_type->adr_type() == TypeAryPtr::RANGE) {
2704 return false; // not supported
2705 }
2706
2707 bool mismatched = false;
2708 BasicType bt = alias_type->basic_type();
2709 if (bt != T_ILLEGAL) {
2710 assert(alias_type->adr_type()->is_oopptr(), "should be on-heap access");
2711 if (bt == T_BYTE && adr_type->isa_aryptr()) {
2712 // Alias type doesn't differentiate between byte[] and boolean[]).
2713 // Use address type to get the element type.
2714 bt = adr_type->is_aryptr()->elem()->array_element_basic_type();
2715 }
2716 if (bt == T_ARRAY || bt == T_NARROWOOP) {
2717 // accessing an array field with getObject is not a mismatch
7047 if ((klass_SHA == NULL) || !klass_SHA->is_loaded()) {
7048 // if none of SHA/SHA2/SHA5 is loaded, we never take the intrinsic fast path
7049 Node* ctrl = control();
7050 set_control(top()); // no intrinsic path
7051 return ctrl;
7052 }
7053 ciInstanceKlass* instklass_SHA = klass_SHA->as_instance_klass();
7054
7055 Node* instofSHA = gen_instanceof(digestBaseObj, makecon(TypeKlassPtr::make(instklass_SHA)));
7056 Node* cmp_instof = _gvn.transform(new (C) CmpINode(instofSHA, intcon(1)));
7057 Node* bool_instof = _gvn.transform(new (C) BoolNode(cmp_instof, BoolTest::ne));
7058 Node* instof_false = generate_guard(bool_instof, NULL, PROB_MIN);
7059
7060 return instof_false; // even if it is NULL
7061 }
7062
7063 bool LibraryCallKit::inline_profileBoolean() {
7064 Node* counts = argument(1);
7065 const TypeAryPtr* ary = NULL;
7066 ciArray* aobj = NULL;
7067 if (counts->is_Con()
7068 && (ary = counts->bottom_type()->isa_aryptr()) != NULL
7069 && (aobj = ary->const_oop()->as_array()) != NULL
7070 && (aobj->length() == 2)) {
7071 // Profile is int[2] where [0] and [1] correspond to false and true value occurrences respectively.
7072 jint false_cnt = aobj->element_value(0).as_int();
7073 jint true_cnt = aobj->element_value(1).as_int();
7074
7075 if (C->log() != NULL) {
7076 C->log()->elem("observe source='profileBoolean' false='%d' true='%d'",
7077 false_cnt, true_cnt);
7078 }
7079
7080 if (false_cnt + true_cnt == 0) {
7081 // According to profile, never executed.
7082 uncommon_trap_exact(Deoptimization::Reason_intrinsic,
7083 Deoptimization::Action_reinterpret);
7084 return true;
7085 }
7086
|