366 int _entry_bci; // entry bci for osr methods.
367 const TypeFunc* _tf; // My kind of signature
368 InlineTree* _ilt; // Ditto (temporary).
369 address _stub_function; // VM entry for stub being compiled, or NULL
370 const char* _stub_name; // Name of stub or adapter being compiled, or NULL
371 address _stub_entry_point; // Compile code entry for generated stub, or NULL
372
373 // Control of this compilation.
374 int _max_inline_size; // Max inline size for this compilation
375 int _freq_inline_size; // Max hot method inline size for this compilation
376 int _fixed_slots; // count of frame slots not allocated by the register
377 // allocator i.e. locks, original deopt pc, etc.
378 uintx _max_node_limit; // Max unique node count during a single compilation.
379 // For deopt
380 int _orig_pc_slot;
381 int _orig_pc_slot_offset_in_bytes;
382
383 int _major_progress; // Count of something big happening
384 bool _inlining_progress; // progress doing incremental inlining?
385 bool _inlining_incrementally;// Are we doing incremental inlining (post parse)
386 bool _has_loops; // True if the method _may_ have some loops
387 bool _has_split_ifs; // True if the method _may_ have some split-if
388 bool _has_unsafe_access; // True if the method _may_ produce faults in unsafe loads or stores.
389 bool _has_stringbuilder; // True StringBuffers or StringBuilders are allocated
390 bool _has_boxed_value; // True if a boxed object is allocated
391 bool _has_reserved_stack_access; // True if the method or an inlined method is annotated with ReservedStackAccess
392 uint _max_vector_size; // Maximum size of generated vectors
393 bool _clear_upper_avx; // Clear upper bits of ymm registers using vzeroupper
394 uint _trap_hist[trapHistLength]; // Cumulative traps
395 bool _trap_can_recompile; // Have we emitted a recompiling trap?
396 uint _decompile_count; // Cumulative decompilation counts.
397 bool _do_inlining; // True if we intend to do inlining
398 bool _do_scheduling; // True if we intend to do scheduling
399 bool _do_freq_based_layout; // True if we intend to do frequency based block layout
400 bool _do_count_invocations; // True if we generate code to count invocations
401 bool _do_method_data_update; // True if we generate code to update MethodData*s
402 bool _do_vector_loop; // True if allowed to execute loop in parallel iterations
403 bool _use_cmove; // True if CMove should be used without profitability analysis
404 bool _age_code; // True if we need to profile code age (decrement the aging counter)
405 int _AliasLevel; // Locally-adjusted version of AliasLevel flag.
636 // Other fixed compilation parameters.
637 ciMethod* method() const { return _method; }
638 int entry_bci() const { return _entry_bci; }
639 bool is_osr_compilation() const { return _entry_bci != InvocationEntryBci; }
640 bool is_method_compilation() const { return (_method != NULL && !_method->flags().is_native()); }
641 const TypeFunc* tf() const { assert(_tf!=NULL, ""); return _tf; }
642 void init_tf(const TypeFunc* tf) { assert(_tf==NULL, ""); _tf = tf; }
643 InlineTree* ilt() const { return _ilt; }
644 address stub_function() const { return _stub_function; }
645 const char* stub_name() const { return _stub_name; }
646 address stub_entry_point() const { return _stub_entry_point; }
647
648 // Control of this compilation.
649 int fixed_slots() const { assert(_fixed_slots >= 0, ""); return _fixed_slots; }
650 void set_fixed_slots(int n) { _fixed_slots = n; }
651 int major_progress() const { return _major_progress; }
652 void set_inlining_progress(bool z) { _inlining_progress = z; }
653 int inlining_progress() const { return _inlining_progress; }
654 void set_inlining_incrementally(bool z) { _inlining_incrementally = z; }
655 int inlining_incrementally() const { return _inlining_incrementally; }
656 void set_major_progress() { _major_progress++; }
657 void clear_major_progress() { _major_progress = 0; }
658 int max_inline_size() const { return _max_inline_size; }
659 void set_freq_inline_size(int n) { _freq_inline_size = n; }
660 int freq_inline_size() const { return _freq_inline_size; }
661 void set_max_inline_size(int n) { _max_inline_size = n; }
662 bool has_loops() const { return _has_loops; }
663 void set_has_loops(bool z) { _has_loops = z; }
664 bool has_split_ifs() const { return _has_split_ifs; }
665 void set_has_split_ifs(bool z) { _has_split_ifs = z; }
666 bool has_unsafe_access() const { return _has_unsafe_access; }
667 void set_has_unsafe_access(bool z) { _has_unsafe_access = z; }
668 bool has_stringbuilder() const { return _has_stringbuilder; }
669 void set_has_stringbuilder(bool z) { _has_stringbuilder = z; }
670 bool has_boxed_value() const { return _has_boxed_value; }
671 void set_has_boxed_value(bool z) { _has_boxed_value = z; }
672 bool has_reserved_stack_access() const { return _has_reserved_stack_access; }
673 void set_has_reserved_stack_access(bool z) { _has_reserved_stack_access = z; }
674 uint max_vector_size() const { return _max_vector_size; }
675 void set_max_vector_size(uint s) { _max_vector_size = s; }
1058 _late_inlines.insert_before(0, cg);
1059 }
1060
1061 void add_string_late_inline(CallGenerator* cg) {
1062 _string_late_inlines.push(cg);
1063 }
1064
1065 void add_boxing_late_inline(CallGenerator* cg) {
1066 _boxing_late_inlines.push(cg);
1067 }
1068
1069 void remove_useless_late_inlines(GrowableArray<CallGenerator*>* inlines, Unique_Node_List &useful);
1070
1071 void process_print_inlining();
1072 void dump_print_inlining();
1073
1074 bool over_inlining_cutoff() const {
1075 if (!inlining_incrementally()) {
1076 return unique() > (uint)NodeCountInliningCutoff;
1077 } else {
1078 return live_nodes() > (uint)LiveNodeCountInliningCutoff;
1079 }
1080 }
1081
1082 void inc_number_of_mh_late_inlines() { _number_of_mh_late_inlines++; }
1083 void dec_number_of_mh_late_inlines() { assert(_number_of_mh_late_inlines > 0, "_number_of_mh_late_inlines < 0 !"); _number_of_mh_late_inlines--; }
1084 bool has_mh_late_inlines() const { return _number_of_mh_late_inlines > 0; }
1085
1086 void inline_incrementally_one(PhaseIterGVN& igvn);
1087 void inline_incrementally(PhaseIterGVN& igvn);
1088 void inline_string_calls(bool parse_time);
1089 void inline_boxing_calls(PhaseIterGVN& igvn);
1090 bool optimize_loops(PhaseIterGVN& igvn, LoopOptsMode mode);
1091 void remove_root_to_sfpts_edges(PhaseIterGVN& igvn);
1092
1093 // Matching, CFG layout, allocation, code generation
1094 PhaseCFG* cfg() { return _cfg; }
1095 bool select_24_bit_instr() const { return _select_24_bit_instr; }
1096 bool in_24_bit_fp_mode() const { return _in_24_bit_fp_mode; }
1097 bool has_java_calls() const { return _java_calls > 0; }
1098 int java_calls() const { return _java_calls; }
1099 int inner_loops() const { return _inner_loops; }
1100 Matcher* matcher() { return _matcher; }
1101 PhaseRegAlloc* regalloc() { return _regalloc; }
1102 int frame_slots() const { return _frame_slots; }
1103 int frame_size_in_words() const; // frame_slots in units of the polymorphic 'words'
1104 int frame_size_in_bytes() const { return _frame_slots << LogBytesPerInt; }
1105 RegMask& FIRST_STACK_mask() { return _FIRST_STACK_mask; }
1106 Arena* indexSet_arena() { return _indexSet_arena; }
|
366 int _entry_bci; // entry bci for osr methods.
367 const TypeFunc* _tf; // My kind of signature
368 InlineTree* _ilt; // Ditto (temporary).
369 address _stub_function; // VM entry for stub being compiled, or NULL
370 const char* _stub_name; // Name of stub or adapter being compiled, or NULL
371 address _stub_entry_point; // Compile code entry for generated stub, or NULL
372
373 // Control of this compilation.
374 int _max_inline_size; // Max inline size for this compilation
375 int _freq_inline_size; // Max hot method inline size for this compilation
376 int _fixed_slots; // count of frame slots not allocated by the register
377 // allocator i.e. locks, original deopt pc, etc.
378 uintx _max_node_limit; // Max unique node count during a single compilation.
379 // For deopt
380 int _orig_pc_slot;
381 int _orig_pc_slot_offset_in_bytes;
382
383 int _major_progress; // Count of something big happening
384 bool _inlining_progress; // progress doing incremental inlining?
385 bool _inlining_incrementally;// Are we doing incremental inlining (post parse)
386 bool _do_cleanup; // Cleanup is needed before proceeding with incremental inlining
387 bool _has_loops; // True if the method _may_ have some loops
388 bool _has_split_ifs; // True if the method _may_ have some split-if
389 bool _has_unsafe_access; // True if the method _may_ produce faults in unsafe loads or stores.
390 bool _has_stringbuilder; // True StringBuffers or StringBuilders are allocated
391 bool _has_boxed_value; // True if a boxed object is allocated
392 bool _has_reserved_stack_access; // True if the method or an inlined method is annotated with ReservedStackAccess
393 uint _max_vector_size; // Maximum size of generated vectors
394 bool _clear_upper_avx; // Clear upper bits of ymm registers using vzeroupper
395 uint _trap_hist[trapHistLength]; // Cumulative traps
396 bool _trap_can_recompile; // Have we emitted a recompiling trap?
397 uint _decompile_count; // Cumulative decompilation counts.
398 bool _do_inlining; // True if we intend to do inlining
399 bool _do_scheduling; // True if we intend to do scheduling
400 bool _do_freq_based_layout; // True if we intend to do frequency based block layout
401 bool _do_count_invocations; // True if we generate code to count invocations
402 bool _do_method_data_update; // True if we generate code to update MethodData*s
403 bool _do_vector_loop; // True if allowed to execute loop in parallel iterations
404 bool _use_cmove; // True if CMove should be used without profitability analysis
405 bool _age_code; // True if we need to profile code age (decrement the aging counter)
406 int _AliasLevel; // Locally-adjusted version of AliasLevel flag.
637 // Other fixed compilation parameters.
638 ciMethod* method() const { return _method; }
639 int entry_bci() const { return _entry_bci; }
640 bool is_osr_compilation() const { return _entry_bci != InvocationEntryBci; }
641 bool is_method_compilation() const { return (_method != NULL && !_method->flags().is_native()); }
642 const TypeFunc* tf() const { assert(_tf!=NULL, ""); return _tf; }
643 void init_tf(const TypeFunc* tf) { assert(_tf==NULL, ""); _tf = tf; }
644 InlineTree* ilt() const { return _ilt; }
645 address stub_function() const { return _stub_function; }
646 const char* stub_name() const { return _stub_name; }
647 address stub_entry_point() const { return _stub_entry_point; }
648
649 // Control of this compilation.
650 int fixed_slots() const { assert(_fixed_slots >= 0, ""); return _fixed_slots; }
651 void set_fixed_slots(int n) { _fixed_slots = n; }
652 int major_progress() const { return _major_progress; }
653 void set_inlining_progress(bool z) { _inlining_progress = z; }
654 int inlining_progress() const { return _inlining_progress; }
655 void set_inlining_incrementally(bool z) { _inlining_incrementally = z; }
656 int inlining_incrementally() const { return _inlining_incrementally; }
657 void set_do_cleanup(bool z) { _do_cleanup = z; }
658 int do_cleanup() const { return _do_cleanup; }
659 void set_major_progress() { _major_progress++; }
660 void clear_major_progress() { _major_progress = 0; }
661 int max_inline_size() const { return _max_inline_size; }
662 void set_freq_inline_size(int n) { _freq_inline_size = n; }
663 int freq_inline_size() const { return _freq_inline_size; }
664 void set_max_inline_size(int n) { _max_inline_size = n; }
665 bool has_loops() const { return _has_loops; }
666 void set_has_loops(bool z) { _has_loops = z; }
667 bool has_split_ifs() const { return _has_split_ifs; }
668 void set_has_split_ifs(bool z) { _has_split_ifs = z; }
669 bool has_unsafe_access() const { return _has_unsafe_access; }
670 void set_has_unsafe_access(bool z) { _has_unsafe_access = z; }
671 bool has_stringbuilder() const { return _has_stringbuilder; }
672 void set_has_stringbuilder(bool z) { _has_stringbuilder = z; }
673 bool has_boxed_value() const { return _has_boxed_value; }
674 void set_has_boxed_value(bool z) { _has_boxed_value = z; }
675 bool has_reserved_stack_access() const { return _has_reserved_stack_access; }
676 void set_has_reserved_stack_access(bool z) { _has_reserved_stack_access = z; }
677 uint max_vector_size() const { return _max_vector_size; }
678 void set_max_vector_size(uint s) { _max_vector_size = s; }
1061 _late_inlines.insert_before(0, cg);
1062 }
1063
1064 void add_string_late_inline(CallGenerator* cg) {
1065 _string_late_inlines.push(cg);
1066 }
1067
1068 void add_boxing_late_inline(CallGenerator* cg) {
1069 _boxing_late_inlines.push(cg);
1070 }
1071
1072 void remove_useless_late_inlines(GrowableArray<CallGenerator*>* inlines, Unique_Node_List &useful);
1073
1074 void process_print_inlining();
1075 void dump_print_inlining();
1076
1077 bool over_inlining_cutoff() const {
1078 if (!inlining_incrementally()) {
1079 return unique() > (uint)NodeCountInliningCutoff;
1080 } else {
1081 // Give some room for incremental inlining algorithm to "breathe"
1082 // and avoid thrashing when live node count is close to the limit.
1083 // Keep in mind that live_nodes() isn't accurate during inlining until
1084 // dead node elimination step happens (see Compile::inline_incrementally).
1085 return live_nodes() > (uint)LiveNodeCountInliningCutoff * 11 / 10;
1086 }
1087 }
1088
1089 void inc_number_of_mh_late_inlines() { _number_of_mh_late_inlines++; }
1090 void dec_number_of_mh_late_inlines() { assert(_number_of_mh_late_inlines > 0, "_number_of_mh_late_inlines < 0 !"); _number_of_mh_late_inlines--; }
1091 bool has_mh_late_inlines() const { return _number_of_mh_late_inlines > 0; }
1092
1093 bool inline_incrementally_one();
1094 PhaseIterGVN& inline_incrementally_cleanup(PhaseIterGVN& igvn);
1095 void inline_incrementally(PhaseIterGVN& igvn);
1096 void inline_string_calls(bool parse_time);
1097 void inline_boxing_calls(PhaseIterGVN& igvn);
1098 bool optimize_loops(PhaseIterGVN& igvn, LoopOptsMode mode);
1099 void remove_root_to_sfpts_edges(PhaseIterGVN& igvn);
1100
1101 // Matching, CFG layout, allocation, code generation
1102 PhaseCFG* cfg() { return _cfg; }
1103 bool select_24_bit_instr() const { return _select_24_bit_instr; }
1104 bool in_24_bit_fp_mode() const { return _in_24_bit_fp_mode; }
1105 bool has_java_calls() const { return _java_calls > 0; }
1106 int java_calls() const { return _java_calls; }
1107 int inner_loops() const { return _inner_loops; }
1108 Matcher* matcher() { return _matcher; }
1109 PhaseRegAlloc* regalloc() { return _regalloc; }
1110 int frame_slots() const { return _frame_slots; }
1111 int frame_size_in_words() const; // frame_slots in units of the polymorphic 'words'
1112 int frame_size_in_bytes() const { return _frame_slots << LogBytesPerInt; }
1113 RegMask& FIRST_STACK_mask() { return _FIRST_STACK_mask; }
1114 Arena* indexSet_arena() { return _indexSet_arena; }
|