21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "compiler/compileLog.hpp"
27 #include "gc/shared/barrierSet.hpp"
28 #include "gc/shared/c2/barrierSetC2.hpp"
29 #include "memory/allocation.inline.hpp"
30 #include "opto/addnode.hpp"
31 #include "opto/callnode.hpp"
32 #include "opto/cfgnode.hpp"
33 #include "opto/loopnode.hpp"
34 #include "opto/matcher.hpp"
35 #include "opto/movenode.hpp"
36 #include "opto/mulnode.hpp"
37 #include "opto/opcodes.hpp"
38 #include "opto/phaseX.hpp"
39 #include "opto/subnode.hpp"
40 #include "runtime/sharedRuntime.hpp"
41 #include "utilities/macros.hpp"
42 #if INCLUDE_SHENANDOAHGC
43 #include "gc/shenandoah/c2/shenandoahSupport.hpp"
44 #endif
45
46 // Portions of code courtesy of Clifford Click
47
48 // Optimization - Graph Style
49
50 #include "math.h"
51
52 //=============================================================================
53 //------------------------------Identity---------------------------------------
54 // If right input is a constant 0, return the left input.
55 Node* SubNode::Identity(PhaseGVN* phase) {
56 assert(in(1) != this, "Must already have called Value");
57 assert(in(2) != this, "Must already have called Value");
58
59 // Remove double negation
60 const Type *zero = add_id();
61 if( phase->type( in(1) )->higher_equal( zero ) &&
62 in(2)->Opcode() == Opcode() &&
63 phase->type( in(2)->in(1) )->higher_equal( zero ) ) {
64 return in(2)->in(2);
924
925 // x.getClass() == int.class can never be true (for all primitive types)
926 // Return a ConP(NULL) node for this case.
927 if (mirror_type->is_classless()) {
928 return phase->makecon(TypePtr::NULL_PTR);
929 }
930
931 // return the ConP(Foo.klass)
932 assert(mirror_type->is_klass(), "mirror_type should represent a Klass*");
933 return phase->makecon(TypeKlassPtr::make(mirror_type->as_klass()));
934 }
935
936 //------------------------------Ideal------------------------------------------
937 // Normalize comparisons between Java mirror loads to compare the klass instead.
938 //
939 // Also check for the case of comparing an unknown klass loaded from the primary
940 // super-type array vs a known klass with no subtypes. This amounts to
941 // checking to see an unknown klass subtypes a known klass with no subtypes;
942 // this only happens on an exact match. We can shorten this test by 1 load.
943 Node *CmpPNode::Ideal( PhaseGVN *phase, bool can_reshape ) {
944 BarrierSetC2* bs = BarrierSet::barrier_set()->barrier_set_c2();
945 #if INCLUDE_SHENANDOAHGC
946 if (UseShenandoahGC) {
947 Node* in1 = in(1);
948 Node* in2 = in(2);
949 if (in1->bottom_type() == TypePtr::NULL_PTR) {
950 in2 = bs->step_over_gc_barrier(in2);
951 }
952 if (in2->bottom_type() == TypePtr::NULL_PTR) {
953 in1 = bs->step_over_gc_barrier(in1);
954 }
955 PhaseIterGVN* igvn = phase->is_IterGVN();
956 if (in1 != in(1)) {
957 if (igvn != NULL) {
958 set_req_X(1, in1, igvn);
959 } else {
960 set_req(1, in1);
961 }
962 assert(in2 == in(2), "only one change");
963 return this;
964 }
965 if (in2 != in(2)) {
966 if (igvn != NULL) {
967 set_req_X(2, in2, igvn);
968 } else {
969 set_req(2, in2);
970 }
971 return this;
972 }
973 }
974 #endif
975
976 // Normalize comparisons between Java mirrors into comparisons of the low-
977 // level klass, where a dependent load could be shortened.
978 //
979 // The new pattern has a nice effect of matching the same pattern used in the
980 // fast path of instanceof/checkcast/Class.isInstance(), which allows
981 // redundant exact type check be optimized away by GVN.
982 // For example, in
983 // if (x.getClass() == Foo.class) {
984 // Foo foo = (Foo) x;
985 // // ... use a ...
986 // }
987 // a CmpPNode could be shared between if_acmpne and checkcast
988 {
989 Node* k1 = isa_java_mirror_load(phase, in(1));
990 Node* k2 = isa_java_mirror_load(phase, in(2));
991 Node* conk2 = isa_const_java_mirror(phase, in(2));
992
993 if (k1 && (k2 || conk2)) {
994 Node* lhs = k1;
|
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "compiler/compileLog.hpp"
27 #include "gc/shared/barrierSet.hpp"
28 #include "gc/shared/c2/barrierSetC2.hpp"
29 #include "memory/allocation.inline.hpp"
30 #include "opto/addnode.hpp"
31 #include "opto/callnode.hpp"
32 #include "opto/cfgnode.hpp"
33 #include "opto/loopnode.hpp"
34 #include "opto/matcher.hpp"
35 #include "opto/movenode.hpp"
36 #include "opto/mulnode.hpp"
37 #include "opto/opcodes.hpp"
38 #include "opto/phaseX.hpp"
39 #include "opto/subnode.hpp"
40 #include "runtime/sharedRuntime.hpp"
41
42 // Portions of code courtesy of Clifford Click
43
44 // Optimization - Graph Style
45
46 #include "math.h"
47
48 //=============================================================================
49 //------------------------------Identity---------------------------------------
50 // If right input is a constant 0, return the left input.
51 Node* SubNode::Identity(PhaseGVN* phase) {
52 assert(in(1) != this, "Must already have called Value");
53 assert(in(2) != this, "Must already have called Value");
54
55 // Remove double negation
56 const Type *zero = add_id();
57 if( phase->type( in(1) )->higher_equal( zero ) &&
58 in(2)->Opcode() == Opcode() &&
59 phase->type( in(2)->in(1) )->higher_equal( zero ) ) {
60 return in(2)->in(2);
920
921 // x.getClass() == int.class can never be true (for all primitive types)
922 // Return a ConP(NULL) node for this case.
923 if (mirror_type->is_classless()) {
924 return phase->makecon(TypePtr::NULL_PTR);
925 }
926
927 // return the ConP(Foo.klass)
928 assert(mirror_type->is_klass(), "mirror_type should represent a Klass*");
929 return phase->makecon(TypeKlassPtr::make(mirror_type->as_klass()));
930 }
931
932 //------------------------------Ideal------------------------------------------
933 // Normalize comparisons between Java mirror loads to compare the klass instead.
934 //
935 // Also check for the case of comparing an unknown klass loaded from the primary
936 // super-type array vs a known klass with no subtypes. This amounts to
937 // checking to see an unknown klass subtypes a known klass with no subtypes;
938 // this only happens on an exact match. We can shorten this test by 1 load.
939 Node *CmpPNode::Ideal( PhaseGVN *phase, bool can_reshape ) {
940 Node *ideal = BarrierSet::barrier_set()->barrier_set_c2()->ideal_node(phase, this, can_reshape);
941 if (ideal != NULL) {
942 return ideal;
943 }
944
945 // Normalize comparisons between Java mirrors into comparisons of the low-
946 // level klass, where a dependent load could be shortened.
947 //
948 // The new pattern has a nice effect of matching the same pattern used in the
949 // fast path of instanceof/checkcast/Class.isInstance(), which allows
950 // redundant exact type check be optimized away by GVN.
951 // For example, in
952 // if (x.getClass() == Foo.class) {
953 // Foo foo = (Foo) x;
954 // // ... use a ...
955 // }
956 // a CmpPNode could be shared between if_acmpne and checkcast
957 {
958 Node* k1 = isa_java_mirror_load(phase, in(1));
959 Node* k2 = isa_java_mirror_load(phase, in(2));
960 Node* conk2 = isa_const_java_mirror(phase, in(2));
961
962 if (k1 && (k2 || conk2)) {
963 Node* lhs = k1;
|