31 #include "code/exceptionHandlerTable.hpp"
32 #include "compiler/compilerOracle.hpp"
33 #include "compiler/compileBroker.hpp"
34 #include "libadt/dict.hpp"
35 #include "libadt/vectset.hpp"
36 #include "jfr/jfrEvents.hpp"
37 #include "memory/resourceArea.hpp"
38 #include "oops/methodData.hpp"
39 #include "opto/idealGraphPrinter.hpp"
40 #include "opto/phasetype.hpp"
41 #include "opto/phase.hpp"
42 #include "opto/regmask.hpp"
43 #include "runtime/deoptimization.hpp"
44 #include "runtime/timerTrace.hpp"
45 #include "runtime/vmThread.hpp"
46 #include "utilities/ticks.hpp"
47
48 class AddPNode;
49 class Block;
50 class Bundle;
51 class C2Compiler;
52 class CallGenerator;
53 class CloneMap;
54 class ConnectionGraph;
55 class InlineTree;
56 class Int_Array;
57 class LoadBarrierNode;
58 class Matcher;
59 class MachConstantNode;
60 class MachConstantBaseNode;
61 class MachNode;
62 class MachOper;
63 class MachSafePointNode;
64 class Node;
65 class Node_Array;
66 class Node_Notes;
67 class NodeCloneInfo;
68 class OptoReg;
69 class PhaseCFG;
70 class PhaseGVN;
71 class PhaseIterGVN;
72 class PhaseRegAlloc;
73 class PhaseCCP;
74 class PhaseCCP_DCE;
75 class RootNode;
76 class relocInfo;
77 class Scope;
78 class StartNode;
79 class SafePointNode;
80 class JVMState;
81 class Type;
82 class TypeData;
83 class TypeInt;
84 class TypePtr;
85 class TypeOopPtr;
86 class TypeFunc;
87 class Unique_Node_List;
88 class nmethod;
89 class WarmCallInfo;
90 class Node_Stack;
91 struct Final_Reshape_Counts;
92
93 enum LoopOptsMode {
94 LoopOptsDefault,
95 LoopOptsNone,
96 LoopOptsShenandoahExpand,
97 LoopOptsShenandoahPostExpand,
98 LoopOptsSkipSplitIf,
99 LoopOptsVerify,
100 LoopOptsLastRound
101 };
102
103 typedef unsigned int node_idx_t;
104 class NodeCloneInfo {
105 private:
106 uint64_t _idx_clone_orig;
363 const bool _do_escape_analysis; // Do escape analysis.
364 const bool _eliminate_boxing; // Do boxing elimination.
365 ciMethod* _method; // The method being compiled.
366 int _entry_bci; // entry bci for osr methods.
367 const TypeFunc* _tf; // My kind of signature
368 InlineTree* _ilt; // Ditto (temporary).
369 address _stub_function; // VM entry for stub being compiled, or NULL
370 const char* _stub_name; // Name of stub or adapter being compiled, or NULL
371 address _stub_entry_point; // Compile code entry for generated stub, or NULL
372
373 // Control of this compilation.
374 int _max_inline_size; // Max inline size for this compilation
375 int _freq_inline_size; // Max hot method inline size for this compilation
376 int _fixed_slots; // count of frame slots not allocated by the register
377 // allocator i.e. locks, original deopt pc, etc.
378 uintx _max_node_limit; // Max unique node count during a single compilation.
379 // For deopt
380 int _orig_pc_slot;
381 int _orig_pc_slot_offset_in_bytes;
382
383 int _major_progress; // Count of something big happening
384 bool _inlining_progress; // progress doing incremental inlining?
385 bool _inlining_incrementally;// Are we doing incremental inlining (post parse)
386 bool _do_cleanup; // Cleanup is needed before proceeding with incremental inlining
387 bool _has_loops; // True if the method _may_ have some loops
388 bool _has_split_ifs; // True if the method _may_ have some split-if
389 bool _has_unsafe_access; // True if the method _may_ produce faults in unsafe loads or stores.
390 bool _has_stringbuilder; // True StringBuffers or StringBuilders are allocated
391 bool _has_boxed_value; // True if a boxed object is allocated
392 bool _has_reserved_stack_access; // True if the method or an inlined method is annotated with ReservedStackAccess
393 uint _max_vector_size; // Maximum size of generated vectors
394 bool _clear_upper_avx; // Clear upper bits of ymm registers using vzeroupper
395 uint _trap_hist[trapHistLength]; // Cumulative traps
396 bool _trap_can_recompile; // Have we emitted a recompiling trap?
397 uint _decompile_count; // Cumulative decompilation counts.
398 bool _do_inlining; // True if we intend to do inlining
399 bool _do_scheduling; // True if we intend to do scheduling
400 bool _do_freq_based_layout; // True if we intend to do frequency based block layout
401 bool _do_count_invocations; // True if we generate code to count invocations
402 bool _do_method_data_update; // True if we generate code to update MethodData*s
413 #endif
414 bool _has_irreducible_loop; // Found irreducible loops
415 // JSR 292
416 bool _has_method_handle_invokes; // True if this method has MethodHandle invokes.
417 RTMState _rtm_state; // State of Restricted Transactional Memory usage
418 int _loop_opts_cnt; // loop opts round
419
420 // Compilation environment.
421 Arena _comp_arena; // Arena with lifetime equivalent to Compile
422 void* _barrier_set_state; // Potential GC barrier state for Compile
423 ciEnv* _env; // CI interface
424 DirectiveSet* _directive; // Compiler directive
425 CompileLog* _log; // from CompilerThread
426 const char* _failure_reason; // for record_failure/failing pattern
427 GrowableArray<CallGenerator*>* _intrinsics; // List of intrinsics.
428 GrowableArray<Node*>* _macro_nodes; // List of nodes which need to be expanded before matching.
429 GrowableArray<Node*>* _predicate_opaqs; // List of Opaque1 nodes for the loop predicates.
430 GrowableArray<Node*>* _expensive_nodes; // List of nodes that are expensive to compute and that we'd better not let the GVN freely common
431 GrowableArray<Node*>* _range_check_casts; // List of CastII nodes with a range check dependency
432 GrowableArray<Node*>* _opaque4_nodes; // List of Opaque4 nodes that have a default value
433 ConnectionGraph* _congraph;
434 #ifndef PRODUCT
435 IdealGraphPrinter* _printer;
436 #endif
437
438
439 // Node management
440 uint _unique; // Counter for unique Node indices
441 VectorSet _dead_node_list; // Set of dead nodes
442 uint _dead_node_count; // Number of dead nodes; VectorSet::Size() is O(N).
443 // So use this to keep count and make the call O(1).
444 DEBUG_ONLY( Unique_Node_List* _modified_nodes; ) // List of nodes which inputs were modified
445
446 debug_only(static int _debug_idx;) // Monotonic counter (not reset), use -XX:BreakAtNode=<idx>
447 Arena _node_arena; // Arena for new-space Nodes
448 Arena _old_arena; // Arena for old-space Nodes, lifetime during xform
449 RootNode* _root; // Unique root of compilation, or NULL after bail-out.
450 Node* _top; // Unique top node. (Reset by various phases.)
451
452 Node* _immutable_memory; // Initial memory state
698 bool do_vector_loop() const { return _do_vector_loop; }
699 void set_do_vector_loop(bool z) { _do_vector_loop = z; }
700 bool use_cmove() const { return _use_cmove; }
701 void set_use_cmove(bool z) { _use_cmove = z; }
702 bool age_code() const { return _age_code; }
703 void set_age_code(bool z) { _age_code = z; }
704 int AliasLevel() const { return _AliasLevel; }
705 bool print_assembly() const { return _print_assembly; }
706 void set_print_assembly(bool z) { _print_assembly = z; }
707 bool print_inlining() const { return _print_inlining; }
708 void set_print_inlining(bool z) { _print_inlining = z; }
709 bool print_intrinsics() const { return _print_intrinsics; }
710 void set_print_intrinsics(bool z) { _print_intrinsics = z; }
711 RTMState rtm_state() const { return _rtm_state; }
712 void set_rtm_state(RTMState s) { _rtm_state = s; }
713 bool use_rtm() const { return (_rtm_state & NoRTM) == 0; }
714 bool profile_rtm() const { return _rtm_state == ProfileRTM; }
715 uint max_node_limit() const { return (uint)_max_node_limit; }
716 void set_max_node_limit(uint n) { _max_node_limit = n; }
717
718 // check the CompilerOracle for special behaviours for this compile
719 bool method_has_option(const char * option) {
720 return method() != NULL && method()->has_option(option);
721 }
722
723 #ifndef PRODUCT
724 bool trace_opto_output() const { return _trace_opto_output; }
725 bool parsed_irreducible_loop() const { return _parsed_irreducible_loop; }
726 void set_parsed_irreducible_loop(bool z) { _parsed_irreducible_loop = z; }
727 int _in_dump_cnt; // Required for dumping ir nodes.
728 #endif
729 bool has_irreducible_loop() const { return _has_irreducible_loop; }
730 void set_has_irreducible_loop(bool z) { _has_irreducible_loop = z; }
731
732 // JSR 292
733 bool has_method_handle_invokes() const { return _has_method_handle_invokes; }
734 void set_has_method_handle_invokes(bool z) { _has_method_handle_invokes = z; }
735
736 Ticks _latest_stage_start_counter;
737
819 void remove_range_check_cast(Node* n) {
820 if (_range_check_casts->contains(n)) {
821 _range_check_casts->remove(n);
822 }
823 }
824 Node* range_check_cast_node(int idx) const { return _range_check_casts->at(idx); }
825 int range_check_cast_count() const { return _range_check_casts->length(); }
826 // Remove all range check dependent CastIINodes.
827 void remove_range_check_casts(PhaseIterGVN &igvn);
828
829 void add_opaque4_node(Node* n);
830 void remove_opaque4_node(Node* n) {
831 if (_opaque4_nodes->contains(n)) {
832 _opaque4_nodes->remove(n);
833 }
834 }
835 Node* opaque4_node(int idx) const { return _opaque4_nodes->at(idx); }
836 int opaque4_count() const { return _opaque4_nodes->length(); }
837 void remove_opaque4_nodes(PhaseIterGVN &igvn);
838
839 // remove the opaque nodes that protect the predicates so that the unused checks and
840 // uncommon traps will be eliminated from the graph.
841 void cleanup_loop_predicates(PhaseIterGVN &igvn);
842 bool is_predicate_opaq(Node * n) {
843 return _predicate_opaqs->contains(n);
844 }
845
846 // Are there candidate expensive nodes for optimization?
847 bool should_optimize_expensive_nodes(PhaseIterGVN &igvn);
848 // Check whether n1 and n2 are similar
849 static int cmp_expensive_nodes(Node* n1, Node* n2);
850 // Sort expensive nodes to locate similar expensive nodes
851 void sort_expensive_nodes();
852
853 // Compilation environment.
854 Arena* comp_arena() { return &_comp_arena; }
855 ciEnv* env() const { return _env; }
856 CompileLog* log() const { return _log; }
857 bool failing() const { return _env->failing() || _failure_reason != NULL; }
858 const char* failure_reason() const { return (_env->failing()) ? _env->failure_reason() : _failure_reason; }
1350 // Dump formatted assembly
1351 void dump_asm(int *pcs = NULL, uint pc_limit = 0) PRODUCT_RETURN;
1352 void dump_pc(int *pcs, int pc_limit, Node *n);
1353
1354 // Verify ADLC assumptions during startup
1355 static void adlc_verification() PRODUCT_RETURN;
1356
1357 // Definitions of pd methods
1358 static void pd_compiler2_init();
1359
1360 // Static parse-time type checking logic for gen_subtype_check:
1361 enum { SSC_always_false, SSC_always_true, SSC_easy_test, SSC_full_test };
1362 int static_subtype_check(ciKlass* superk, ciKlass* subk);
1363
1364 static Node* conv_I2X_index(PhaseGVN* phase, Node* offset, const TypeInt* sizetype,
1365 // Optional control dependency (for example, on range check)
1366 Node* ctrl = NULL);
1367
1368 // Convert integer value to a narrowed long type dependent on ctrl (for example, a range check)
1369 static Node* constrained_convI2L(PhaseGVN* phase, Node* value, const TypeInt* itype, Node* ctrl);
1370
1371 // Auxiliary method for randomized fuzzing/stressing
1372 static bool randomized_select(int count);
1373
1374 // supporting clone_map
1375 CloneMap& clone_map();
1376 void set_clone_map(Dict* d);
1377
1378 bool is_compiling_clinit_for(ciKlass* k);
1379 };
1380
1381 #endif // SHARE_OPTO_COMPILE_HPP
|
31 #include "code/exceptionHandlerTable.hpp"
32 #include "compiler/compilerOracle.hpp"
33 #include "compiler/compileBroker.hpp"
34 #include "libadt/dict.hpp"
35 #include "libadt/vectset.hpp"
36 #include "jfr/jfrEvents.hpp"
37 #include "memory/resourceArea.hpp"
38 #include "oops/methodData.hpp"
39 #include "opto/idealGraphPrinter.hpp"
40 #include "opto/phasetype.hpp"
41 #include "opto/phase.hpp"
42 #include "opto/regmask.hpp"
43 #include "runtime/deoptimization.hpp"
44 #include "runtime/timerTrace.hpp"
45 #include "runtime/vmThread.hpp"
46 #include "utilities/ticks.hpp"
47
48 class AddPNode;
49 class Block;
50 class Bundle;
51 class CallNode;
52 class C2Compiler;
53 class CallGenerator;
54 class CloneMap;
55 class ConnectionGraph;
56 class InlineTree;
57 class Int_Array;
58 class LoadBarrierNode;
59 class Matcher;
60 class MachConstantNode;
61 class MachConstantBaseNode;
62 class MachNode;
63 class MachOper;
64 class MachSafePointNode;
65 class Node;
66 class Node_Array;
67 class Node_Notes;
68 class NodeCloneInfo;
69 class OptoReg;
70 class PhaseCFG;
71 class PhaseGVN;
72 class PhaseIterGVN;
73 class PhaseRegAlloc;
74 class PhaseCCP;
75 class PhaseCCP_DCE;
76 class RootNode;
77 class relocInfo;
78 class Scope;
79 class StartNode;
80 class SafePointNode;
81 class JVMState;
82 class Type;
83 class TypeData;
84 class TypeInt;
85 class TypePtr;
86 class TypeOopPtr;
87 class TypeFunc;
88 class ValueTypeBaseNode;
89 class Unique_Node_List;
90 class nmethod;
91 class WarmCallInfo;
92 class Node_Stack;
93 struct Final_Reshape_Counts;
94
95 enum LoopOptsMode {
96 LoopOptsDefault,
97 LoopOptsNone,
98 LoopOptsShenandoahExpand,
99 LoopOptsShenandoahPostExpand,
100 LoopOptsSkipSplitIf,
101 LoopOptsVerify,
102 LoopOptsLastRound
103 };
104
105 typedef unsigned int node_idx_t;
106 class NodeCloneInfo {
107 private:
108 uint64_t _idx_clone_orig;
365 const bool _do_escape_analysis; // Do escape analysis.
366 const bool _eliminate_boxing; // Do boxing elimination.
367 ciMethod* _method; // The method being compiled.
368 int _entry_bci; // entry bci for osr methods.
369 const TypeFunc* _tf; // My kind of signature
370 InlineTree* _ilt; // Ditto (temporary).
371 address _stub_function; // VM entry for stub being compiled, or NULL
372 const char* _stub_name; // Name of stub or adapter being compiled, or NULL
373 address _stub_entry_point; // Compile code entry for generated stub, or NULL
374
375 // Control of this compilation.
376 int _max_inline_size; // Max inline size for this compilation
377 int _freq_inline_size; // Max hot method inline size for this compilation
378 int _fixed_slots; // count of frame slots not allocated by the register
379 // allocator i.e. locks, original deopt pc, etc.
380 uintx _max_node_limit; // Max unique node count during a single compilation.
381 // For deopt
382 int _orig_pc_slot;
383 int _orig_pc_slot_offset_in_bytes;
384
385 // For value type calling convention
386 int _sp_inc_slot;
387 int _sp_inc_slot_offset_in_bytes;
388
389 int _major_progress; // Count of something big happening
390 bool _inlining_progress; // progress doing incremental inlining?
391 bool _inlining_incrementally;// Are we doing incremental inlining (post parse)
392 bool _do_cleanup; // Cleanup is needed before proceeding with incremental inlining
393 bool _has_loops; // True if the method _may_ have some loops
394 bool _has_split_ifs; // True if the method _may_ have some split-if
395 bool _has_unsafe_access; // True if the method _may_ produce faults in unsafe loads or stores.
396 bool _has_stringbuilder; // True StringBuffers or StringBuilders are allocated
397 bool _has_boxed_value; // True if a boxed object is allocated
398 bool _has_reserved_stack_access; // True if the method or an inlined method is annotated with ReservedStackAccess
399 uint _max_vector_size; // Maximum size of generated vectors
400 bool _clear_upper_avx; // Clear upper bits of ymm registers using vzeroupper
401 uint _trap_hist[trapHistLength]; // Cumulative traps
402 bool _trap_can_recompile; // Have we emitted a recompiling trap?
403 uint _decompile_count; // Cumulative decompilation counts.
404 bool _do_inlining; // True if we intend to do inlining
405 bool _do_scheduling; // True if we intend to do scheduling
406 bool _do_freq_based_layout; // True if we intend to do frequency based block layout
407 bool _do_count_invocations; // True if we generate code to count invocations
408 bool _do_method_data_update; // True if we generate code to update MethodData*s
419 #endif
420 bool _has_irreducible_loop; // Found irreducible loops
421 // JSR 292
422 bool _has_method_handle_invokes; // True if this method has MethodHandle invokes.
423 RTMState _rtm_state; // State of Restricted Transactional Memory usage
424 int _loop_opts_cnt; // loop opts round
425
426 // Compilation environment.
427 Arena _comp_arena; // Arena with lifetime equivalent to Compile
428 void* _barrier_set_state; // Potential GC barrier state for Compile
429 ciEnv* _env; // CI interface
430 DirectiveSet* _directive; // Compiler directive
431 CompileLog* _log; // from CompilerThread
432 const char* _failure_reason; // for record_failure/failing pattern
433 GrowableArray<CallGenerator*>* _intrinsics; // List of intrinsics.
434 GrowableArray<Node*>* _macro_nodes; // List of nodes which need to be expanded before matching.
435 GrowableArray<Node*>* _predicate_opaqs; // List of Opaque1 nodes for the loop predicates.
436 GrowableArray<Node*>* _expensive_nodes; // List of nodes that are expensive to compute and that we'd better not let the GVN freely common
437 GrowableArray<Node*>* _range_check_casts; // List of CastII nodes with a range check dependency
438 GrowableArray<Node*>* _opaque4_nodes; // List of Opaque4 nodes that have a default value
439 Unique_Node_List* _value_type_nodes; // List of ValueType nodes
440 ConnectionGraph* _congraph;
441 #ifndef PRODUCT
442 IdealGraphPrinter* _printer;
443 #endif
444
445
446 // Node management
447 uint _unique; // Counter for unique Node indices
448 VectorSet _dead_node_list; // Set of dead nodes
449 uint _dead_node_count; // Number of dead nodes; VectorSet::Size() is O(N).
450 // So use this to keep count and make the call O(1).
451 DEBUG_ONLY( Unique_Node_List* _modified_nodes; ) // List of nodes which inputs were modified
452
453 debug_only(static int _debug_idx;) // Monotonic counter (not reset), use -XX:BreakAtNode=<idx>
454 Arena _node_arena; // Arena for new-space Nodes
455 Arena _old_arena; // Arena for old-space Nodes, lifetime during xform
456 RootNode* _root; // Unique root of compilation, or NULL after bail-out.
457 Node* _top; // Unique top node. (Reset by various phases.)
458
459 Node* _immutable_memory; // Initial memory state
705 bool do_vector_loop() const { return _do_vector_loop; }
706 void set_do_vector_loop(bool z) { _do_vector_loop = z; }
707 bool use_cmove() const { return _use_cmove; }
708 void set_use_cmove(bool z) { _use_cmove = z; }
709 bool age_code() const { return _age_code; }
710 void set_age_code(bool z) { _age_code = z; }
711 int AliasLevel() const { return _AliasLevel; }
712 bool print_assembly() const { return _print_assembly; }
713 void set_print_assembly(bool z) { _print_assembly = z; }
714 bool print_inlining() const { return _print_inlining; }
715 void set_print_inlining(bool z) { _print_inlining = z; }
716 bool print_intrinsics() const { return _print_intrinsics; }
717 void set_print_intrinsics(bool z) { _print_intrinsics = z; }
718 RTMState rtm_state() const { return _rtm_state; }
719 void set_rtm_state(RTMState s) { _rtm_state = s; }
720 bool use_rtm() const { return (_rtm_state & NoRTM) == 0; }
721 bool profile_rtm() const { return _rtm_state == ProfileRTM; }
722 uint max_node_limit() const { return (uint)_max_node_limit; }
723 void set_max_node_limit(uint n) { _max_node_limit = n; }
724
725 // Support for scalarized value type calling convention
726 bool has_scalarized_args() const { return _method != NULL && _method->has_scalarized_args(); }
727 bool needs_stack_repair() const { return _method != NULL && _method->get_Method()->needs_stack_repair(); }
728 int sp_inc_offset() const { return _sp_inc_slot_offset_in_bytes; }
729
730 // check the CompilerOracle for special behaviours for this compile
731 bool method_has_option(const char * option) {
732 return method() != NULL && method()->has_option(option);
733 }
734
735 #ifndef PRODUCT
736 bool trace_opto_output() const { return _trace_opto_output; }
737 bool parsed_irreducible_loop() const { return _parsed_irreducible_loop; }
738 void set_parsed_irreducible_loop(bool z) { _parsed_irreducible_loop = z; }
739 int _in_dump_cnt; // Required for dumping ir nodes.
740 #endif
741 bool has_irreducible_loop() const { return _has_irreducible_loop; }
742 void set_has_irreducible_loop(bool z) { _has_irreducible_loop = z; }
743
744 // JSR 292
745 bool has_method_handle_invokes() const { return _has_method_handle_invokes; }
746 void set_has_method_handle_invokes(bool z) { _has_method_handle_invokes = z; }
747
748 Ticks _latest_stage_start_counter;
749
831 void remove_range_check_cast(Node* n) {
832 if (_range_check_casts->contains(n)) {
833 _range_check_casts->remove(n);
834 }
835 }
836 Node* range_check_cast_node(int idx) const { return _range_check_casts->at(idx); }
837 int range_check_cast_count() const { return _range_check_casts->length(); }
838 // Remove all range check dependent CastIINodes.
839 void remove_range_check_casts(PhaseIterGVN &igvn);
840
841 void add_opaque4_node(Node* n);
842 void remove_opaque4_node(Node* n) {
843 if (_opaque4_nodes->contains(n)) {
844 _opaque4_nodes->remove(n);
845 }
846 }
847 Node* opaque4_node(int idx) const { return _opaque4_nodes->at(idx); }
848 int opaque4_count() const { return _opaque4_nodes->length(); }
849 void remove_opaque4_nodes(PhaseIterGVN &igvn);
850
851 // Keep track of value type nodes for later processing
852 void add_value_type(Node* n);
853 void remove_value_type(Node* n);
854 void process_value_types(PhaseIterGVN &igvn);
855 bool can_add_value_type() const { return _value_type_nodes != NULL; }
856
857 // remove the opaque nodes that protect the predicates so that the unused checks and
858 // uncommon traps will be eliminated from the graph.
859 void cleanup_loop_predicates(PhaseIterGVN &igvn);
860 bool is_predicate_opaq(Node * n) {
861 return _predicate_opaqs->contains(n);
862 }
863
864 // Are there candidate expensive nodes for optimization?
865 bool should_optimize_expensive_nodes(PhaseIterGVN &igvn);
866 // Check whether n1 and n2 are similar
867 static int cmp_expensive_nodes(Node* n1, Node* n2);
868 // Sort expensive nodes to locate similar expensive nodes
869 void sort_expensive_nodes();
870
871 // Compilation environment.
872 Arena* comp_arena() { return &_comp_arena; }
873 ciEnv* env() const { return _env; }
874 CompileLog* log() const { return _log; }
875 bool failing() const { return _env->failing() || _failure_reason != NULL; }
876 const char* failure_reason() const { return (_env->failing()) ? _env->failure_reason() : _failure_reason; }
1368 // Dump formatted assembly
1369 void dump_asm(int *pcs = NULL, uint pc_limit = 0) PRODUCT_RETURN;
1370 void dump_pc(int *pcs, int pc_limit, Node *n);
1371
1372 // Verify ADLC assumptions during startup
1373 static void adlc_verification() PRODUCT_RETURN;
1374
1375 // Definitions of pd methods
1376 static void pd_compiler2_init();
1377
1378 // Static parse-time type checking logic for gen_subtype_check:
1379 enum { SSC_always_false, SSC_always_true, SSC_easy_test, SSC_full_test };
1380 int static_subtype_check(ciKlass* superk, ciKlass* subk);
1381
1382 static Node* conv_I2X_index(PhaseGVN* phase, Node* offset, const TypeInt* sizetype,
1383 // Optional control dependency (for example, on range check)
1384 Node* ctrl = NULL);
1385
1386 // Convert integer value to a narrowed long type dependent on ctrl (for example, a range check)
1387 static Node* constrained_convI2L(PhaseGVN* phase, Node* value, const TypeInt* itype, Node* ctrl);
1388
1389 Node* optimize_acmp(PhaseGVN* phase, Node* a, Node* b);
1390
1391 // Auxiliary method for randomized fuzzing/stressing
1392 static bool randomized_select(int count);
1393
1394 // supporting clone_map
1395 CloneMap& clone_map();
1396 void set_clone_map(Dict* d);
1397
1398 bool is_compiling_clinit_for(ciKlass* k);
1399 };
1400
1401 #endif // SHARE_OPTO_COMPILE_HPP
|