367 const TypeFunc* _tf; // My kind of signature
368 InlineTree* _ilt; // Ditto (temporary).
369 address _stub_function; // VM entry for stub being compiled, or NULL
370 const char* _stub_name; // Name of stub or adapter being compiled, or NULL
371 address _stub_entry_point; // Compile code entry for generated stub, or NULL
372
373 // Control of this compilation.
374 int _num_loop_opts; // Number of iterations for doing loop optimiztions
375 int _max_inline_size; // Max inline size for this compilation
376 int _freq_inline_size; // Max hot method inline size for this compilation
377 int _fixed_slots; // count of frame slots not allocated by the register
378 // allocator i.e. locks, original deopt pc, etc.
379 uintx _max_node_limit; // Max unique node count during a single compilation.
380 // For deopt
381 int _orig_pc_slot;
382 int _orig_pc_slot_offset_in_bytes;
383
384 int _major_progress; // Count of something big happening
385 bool _inlining_progress; // progress doing incremental inlining?
386 bool _inlining_incrementally;// Are we doing incremental inlining (post parse)
387 bool _has_loops; // True if the method _may_ have some loops
388 bool _has_split_ifs; // True if the method _may_ have some split-if
389 bool _has_unsafe_access; // True if the method _may_ produce faults in unsafe loads or stores.
390 bool _has_stringbuilder; // True StringBuffers or StringBuilders are allocated
391 bool _has_boxed_value; // True if a boxed object is allocated
392 bool _has_reserved_stack_access; // True if the method or an inlined method is annotated with ReservedStackAccess
393 uint _max_vector_size; // Maximum size of generated vectors
394 bool _clear_upper_avx; // Clear upper bits of ymm registers using vzeroupper
395 uint _trap_hist[trapHistLength]; // Cumulative traps
396 bool _trap_can_recompile; // Have we emitted a recompiling trap?
397 uint _decompile_count; // Cumulative decompilation counts.
398 bool _do_inlining; // True if we intend to do inlining
399 bool _do_scheduling; // True if we intend to do scheduling
400 bool _do_freq_based_layout; // True if we intend to do frequency based block layout
401 bool _do_count_invocations; // True if we generate code to count invocations
402 bool _do_method_data_update; // True if we generate code to update MethodData*s
403 bool _do_vector_loop; // True if allowed to execute loop in parallel iterations
404 bool _use_cmove; // True if CMove should be used without profitability analysis
405 bool _age_code; // True if we need to profile code age (decrement the aging counter)
406 int _AliasLevel; // Locally-adjusted version of AliasLevel flag.
478 CloneMap _clone_map; // used for recording history of cloned nodes
479 void* _type_hwm; // Last allocation (see Type::operator new/delete)
480 size_t _type_last_size; // Last allocation size (see Type::operator new/delete)
481 ciMethod* _last_tf_m; // Cache for
482 const TypeFunc* _last_tf; // TypeFunc::make
483 AliasType** _alias_types; // List of alias types seen so far.
484 int _num_alias_types; // Logical length of _alias_types
485 int _max_alias_types; // Physical length of _alias_types
486 AliasCacheEntry _alias_cache[AliasCacheSize]; // Gets aliases w/o data structure walking
487
488 // Parsing, optimization
489 PhaseGVN* _initial_gvn; // Results of parse-time PhaseGVN
490 Unique_Node_List* _for_igvn; // Initial work-list for next round of Iterative GVN
491 WarmCallInfo* _warm_calls; // Sorted work-list for heat-based inlining.
492
493 GrowableArray<CallGenerator*> _late_inlines; // List of CallGenerators to be revisited after
494 // main parsing has finished.
495 GrowableArray<CallGenerator*> _string_late_inlines; // same but for string operations
496
497 GrowableArray<CallGenerator*> _boxing_late_inlines; // same but for boxing operations
498
499 int _late_inlines_pos; // Where in the queue should the next late inlining candidate go (emulate depth first inlining)
500 uint _number_of_mh_late_inlines; // number of method handle late inlining still pending
501
502
503 // Inlining may not happen in parse order which would make
504 // PrintInlining output confusing. Keep track of PrintInlining
505 // pieces in order.
506 class PrintInliningBuffer : public ResourceObj {
507 private:
508 CallGenerator* _cg;
509 stringStream* _ss;
510
511 public:
512 PrintInliningBuffer()
513 : _cg(NULL) { _ss = new stringStream(); }
514
515 stringStream* ss() const { return _ss; }
516 CallGenerator* cg() const { return _cg; }
517 void set_cg(CallGenerator* cg) { _cg = cg; }
637 // Other fixed compilation parameters.
638 ciMethod* method() const { return _method; }
639 int entry_bci() const { return _entry_bci; }
640 bool is_osr_compilation() const { return _entry_bci != InvocationEntryBci; }
641 bool is_method_compilation() const { return (_method != NULL && !_method->flags().is_native()); }
642 const TypeFunc* tf() const { assert(_tf!=NULL, ""); return _tf; }
643 void init_tf(const TypeFunc* tf) { assert(_tf==NULL, ""); _tf = tf; }
644 InlineTree* ilt() const { return _ilt; }
645 address stub_function() const { return _stub_function; }
646 const char* stub_name() const { return _stub_name; }
647 address stub_entry_point() const { return _stub_entry_point; }
648
649 // Control of this compilation.
650 int fixed_slots() const { assert(_fixed_slots >= 0, ""); return _fixed_slots; }
651 void set_fixed_slots(int n) { _fixed_slots = n; }
652 int major_progress() const { return _major_progress; }
653 void set_inlining_progress(bool z) { _inlining_progress = z; }
654 int inlining_progress() const { return _inlining_progress; }
655 void set_inlining_incrementally(bool z) { _inlining_incrementally = z; }
656 int inlining_incrementally() const { return _inlining_incrementally; }
657 void set_major_progress() { _major_progress++; }
658 void clear_major_progress() { _major_progress = 0; }
659 int num_loop_opts() const { return _num_loop_opts; }
660 void set_num_loop_opts(int n) { _num_loop_opts = n; }
661 int max_inline_size() const { return _max_inline_size; }
662 void set_freq_inline_size(int n) { _freq_inline_size = n; }
663 int freq_inline_size() const { return _freq_inline_size; }
664 void set_max_inline_size(int n) { _max_inline_size = n; }
665 bool has_loops() const { return _has_loops; }
666 void set_has_loops(bool z) { _has_loops = z; }
667 bool has_split_ifs() const { return _has_split_ifs; }
668 void set_has_split_ifs(bool z) { _has_split_ifs = z; }
669 bool has_unsafe_access() const { return _has_unsafe_access; }
670 void set_has_unsafe_access(bool z) { _has_unsafe_access = z; }
671 bool has_stringbuilder() const { return _has_stringbuilder; }
672 void set_has_stringbuilder(bool z) { _has_stringbuilder = z; }
673 bool has_boxed_value() const { return _has_boxed_value; }
674 void set_has_boxed_value(bool z) { _has_boxed_value = z; }
675 bool has_reserved_stack_access() const { return _has_reserved_stack_access; }
676 void set_has_reserved_stack_access(bool z) { _has_reserved_stack_access = z; }
712 void set_rtm_state(RTMState s) { _rtm_state = s; }
713 bool use_rtm() const { return (_rtm_state & NoRTM) == 0; }
714 bool profile_rtm() const { return _rtm_state == ProfileRTM; }
715 uint max_node_limit() const { return (uint)_max_node_limit; }
716 void set_max_node_limit(uint n) { _max_node_limit = n; }
717
718 // check the CompilerOracle for special behaviours for this compile
719 bool method_has_option(const char * option) {
720 return method() != NULL && method()->has_option(option);
721 }
722
723 #ifndef PRODUCT
724 bool trace_opto_output() const { return _trace_opto_output; }
725 bool parsed_irreducible_loop() const { return _parsed_irreducible_loop; }
726 void set_parsed_irreducible_loop(bool z) { _parsed_irreducible_loop = z; }
727 int _in_dump_cnt; // Required for dumping ir nodes.
728 #endif
729 bool has_irreducible_loop() const { return _has_irreducible_loop; }
730 void set_has_irreducible_loop(bool z) { _has_irreducible_loop = z; }
731
732 // JSR 292
733 bool has_method_handle_invokes() const { return _has_method_handle_invokes; }
734 void set_has_method_handle_invokes(bool z) { _has_method_handle_invokes = z; }
735
736 Ticks _latest_stage_start_counter;
737
738 void begin_method() {
739 #ifndef PRODUCT
740 if (_printer && _printer->should_print(1)) {
741 _printer->begin_method();
742 }
743 #endif
744 C->_latest_stage_start_counter.stamp();
745 }
746
747 void print_method(CompilerPhaseType cpt, int level = 1) {
748 EventCompilerPhase event;
749 if (event.should_commit()) {
750 event.set_starttime(C->_latest_stage_start_counter);
751 event.set_phase((u1) cpt);
1058 WarmCallInfo* pop_warm_call();
1059
1060 // Record this CallGenerator for inlining at the end of parsing.
1061 void add_late_inline(CallGenerator* cg) {
1062 _late_inlines.insert_before(_late_inlines_pos, cg);
1063 _late_inlines_pos++;
1064 }
1065
1066 void prepend_late_inline(CallGenerator* cg) {
1067 _late_inlines.insert_before(0, cg);
1068 }
1069
1070 void add_string_late_inline(CallGenerator* cg) {
1071 _string_late_inlines.push(cg);
1072 }
1073
1074 void add_boxing_late_inline(CallGenerator* cg) {
1075 _boxing_late_inlines.push(cg);
1076 }
1077
1078 void remove_useless_late_inlines(GrowableArray<CallGenerator*>* inlines, Unique_Node_List &useful);
1079
1080 void process_print_inlining();
1081 void dump_print_inlining();
1082
1083 bool over_inlining_cutoff() const {
1084 if (!inlining_incrementally()) {
1085 return unique() > (uint)NodeCountInliningCutoff;
1086 } else {
1087 return live_nodes() > (uint)LiveNodeCountInliningCutoff;
1088 }
1089 }
1090
1091 void inc_number_of_mh_late_inlines() { _number_of_mh_late_inlines++; }
1092 void dec_number_of_mh_late_inlines() { assert(_number_of_mh_late_inlines > 0, "_number_of_mh_late_inlines < 0 !"); _number_of_mh_late_inlines--; }
1093 bool has_mh_late_inlines() const { return _number_of_mh_late_inlines > 0; }
1094
1095 void inline_incrementally_one(PhaseIterGVN& igvn);
1096 void inline_incrementally(PhaseIterGVN& igvn);
1097 void inline_string_calls(bool parse_time);
1098 void inline_boxing_calls(PhaseIterGVN& igvn);
1099 bool optimize_loops(int& loop_opts_cnt, PhaseIterGVN& igvn, LoopOptsMode mode);
1100
1101 // Matching, CFG layout, allocation, code generation
1102 PhaseCFG* cfg() { return _cfg; }
1103 bool select_24_bit_instr() const { return _select_24_bit_instr; }
1104 bool in_24_bit_fp_mode() const { return _in_24_bit_fp_mode; }
1105 bool has_java_calls() const { return _java_calls > 0; }
1106 int java_calls() const { return _java_calls; }
1107 int inner_loops() const { return _inner_loops; }
1108 Matcher* matcher() { return _matcher; }
1109 PhaseRegAlloc* regalloc() { return _regalloc; }
1110 int frame_slots() const { return _frame_slots; }
1111 int frame_size_in_words() const; // frame_slots in units of the polymorphic 'words'
1112 int frame_size_in_bytes() const { return _frame_slots << LogBytesPerInt; }
1113 RegMask& FIRST_STACK_mask() { return _FIRST_STACK_mask; }
1114 Arena* indexSet_arena() { return _indexSet_arena; }
1115 void* indexSet_free_block_list() { return _indexSet_free_block_list; }
1116 uint node_bundling_limit() { return _node_bundling_limit; }
1117 Bundle* node_bundling_base() { return _node_bundling_base; }
1118 void set_node_bundling_limit(uint n) { _node_bundling_limit = n; }
|
367 const TypeFunc* _tf; // My kind of signature
368 InlineTree* _ilt; // Ditto (temporary).
369 address _stub_function; // VM entry for stub being compiled, or NULL
370 const char* _stub_name; // Name of stub or adapter being compiled, or NULL
371 address _stub_entry_point; // Compile code entry for generated stub, or NULL
372
373 // Control of this compilation.
374 int _num_loop_opts; // Number of iterations for doing loop optimiztions
375 int _max_inline_size; // Max inline size for this compilation
376 int _freq_inline_size; // Max hot method inline size for this compilation
377 int _fixed_slots; // count of frame slots not allocated by the register
378 // allocator i.e. locks, original deopt pc, etc.
379 uintx _max_node_limit; // Max unique node count during a single compilation.
380 // For deopt
381 int _orig_pc_slot;
382 int _orig_pc_slot_offset_in_bytes;
383
384 int _major_progress; // Count of something big happening
385 bool _inlining_progress; // progress doing incremental inlining?
386 bool _inlining_incrementally;// Are we doing incremental inlining (post parse)
387 bool _in_object_equals;
388 bool _has_loops; // True if the method _may_ have some loops
389 bool _has_split_ifs; // True if the method _may_ have some split-if
390 bool _has_unsafe_access; // True if the method _may_ produce faults in unsafe loads or stores.
391 bool _has_stringbuilder; // True StringBuffers or StringBuilders are allocated
392 bool _has_boxed_value; // True if a boxed object is allocated
393 bool _has_reserved_stack_access; // True if the method or an inlined method is annotated with ReservedStackAccess
394 uint _max_vector_size; // Maximum size of generated vectors
395 bool _clear_upper_avx; // Clear upper bits of ymm registers using vzeroupper
396 uint _trap_hist[trapHistLength]; // Cumulative traps
397 bool _trap_can_recompile; // Have we emitted a recompiling trap?
398 uint _decompile_count; // Cumulative decompilation counts.
399 bool _do_inlining; // True if we intend to do inlining
400 bool _do_scheduling; // True if we intend to do scheduling
401 bool _do_freq_based_layout; // True if we intend to do frequency based block layout
402 bool _do_count_invocations; // True if we generate code to count invocations
403 bool _do_method_data_update; // True if we generate code to update MethodData*s
404 bool _do_vector_loop; // True if allowed to execute loop in parallel iterations
405 bool _use_cmove; // True if CMove should be used without profitability analysis
406 bool _age_code; // True if we need to profile code age (decrement the aging counter)
407 int _AliasLevel; // Locally-adjusted version of AliasLevel flag.
479 CloneMap _clone_map; // used for recording history of cloned nodes
480 void* _type_hwm; // Last allocation (see Type::operator new/delete)
481 size_t _type_last_size; // Last allocation size (see Type::operator new/delete)
482 ciMethod* _last_tf_m; // Cache for
483 const TypeFunc* _last_tf; // TypeFunc::make
484 AliasType** _alias_types; // List of alias types seen so far.
485 int _num_alias_types; // Logical length of _alias_types
486 int _max_alias_types; // Physical length of _alias_types
487 AliasCacheEntry _alias_cache[AliasCacheSize]; // Gets aliases w/o data structure walking
488
489 // Parsing, optimization
490 PhaseGVN* _initial_gvn; // Results of parse-time PhaseGVN
491 Unique_Node_List* _for_igvn; // Initial work-list for next round of Iterative GVN
492 WarmCallInfo* _warm_calls; // Sorted work-list for heat-based inlining.
493
494 GrowableArray<CallGenerator*> _late_inlines; // List of CallGenerators to be revisited after
495 // main parsing has finished.
496 GrowableArray<CallGenerator*> _string_late_inlines; // same but for string operations
497
498 GrowableArray<CallGenerator*> _boxing_late_inlines; // same but for boxing operations
499 GrowableArray<CallGenerator*> _object_equals_late_inlines; // same but for Object.equals()
500
501 int _late_inlines_pos; // Where in the queue should the next late inlining candidate go (emulate depth first inlining)
502 uint _number_of_mh_late_inlines; // number of method handle late inlining still pending
503
504
505 // Inlining may not happen in parse order which would make
506 // PrintInlining output confusing. Keep track of PrintInlining
507 // pieces in order.
508 class PrintInliningBuffer : public ResourceObj {
509 private:
510 CallGenerator* _cg;
511 stringStream* _ss;
512
513 public:
514 PrintInliningBuffer()
515 : _cg(NULL) { _ss = new stringStream(); }
516
517 stringStream* ss() const { return _ss; }
518 CallGenerator* cg() const { return _cg; }
519 void set_cg(CallGenerator* cg) { _cg = cg; }
639 // Other fixed compilation parameters.
640 ciMethod* method() const { return _method; }
641 int entry_bci() const { return _entry_bci; }
642 bool is_osr_compilation() const { return _entry_bci != InvocationEntryBci; }
643 bool is_method_compilation() const { return (_method != NULL && !_method->flags().is_native()); }
644 const TypeFunc* tf() const { assert(_tf!=NULL, ""); return _tf; }
645 void init_tf(const TypeFunc* tf) { assert(_tf==NULL, ""); _tf = tf; }
646 InlineTree* ilt() const { return _ilt; }
647 address stub_function() const { return _stub_function; }
648 const char* stub_name() const { return _stub_name; }
649 address stub_entry_point() const { return _stub_entry_point; }
650
651 // Control of this compilation.
652 int fixed_slots() const { assert(_fixed_slots >= 0, ""); return _fixed_slots; }
653 void set_fixed_slots(int n) { _fixed_slots = n; }
654 int major_progress() const { return _major_progress; }
655 void set_inlining_progress(bool z) { _inlining_progress = z; }
656 int inlining_progress() const { return _inlining_progress; }
657 void set_inlining_incrementally(bool z) { _inlining_incrementally = z; }
658 int inlining_incrementally() const { return _inlining_incrementally; }
659 void set_in_object_equals(bool z) { _in_object_equals = z; }
660 int in_object_equals() const { return _in_object_equals; }
661 void set_major_progress() { _major_progress++; }
662 void clear_major_progress() { _major_progress = 0; }
663 int num_loop_opts() const { return _num_loop_opts; }
664 void set_num_loop_opts(int n) { _num_loop_opts = n; }
665 int max_inline_size() const { return _max_inline_size; }
666 void set_freq_inline_size(int n) { _freq_inline_size = n; }
667 int freq_inline_size() const { return _freq_inline_size; }
668 void set_max_inline_size(int n) { _max_inline_size = n; }
669 bool has_loops() const { return _has_loops; }
670 void set_has_loops(bool z) { _has_loops = z; }
671 bool has_split_ifs() const { return _has_split_ifs; }
672 void set_has_split_ifs(bool z) { _has_split_ifs = z; }
673 bool has_unsafe_access() const { return _has_unsafe_access; }
674 void set_has_unsafe_access(bool z) { _has_unsafe_access = z; }
675 bool has_stringbuilder() const { return _has_stringbuilder; }
676 void set_has_stringbuilder(bool z) { _has_stringbuilder = z; }
677 bool has_boxed_value() const { return _has_boxed_value; }
678 void set_has_boxed_value(bool z) { _has_boxed_value = z; }
679 bool has_reserved_stack_access() const { return _has_reserved_stack_access; }
680 void set_has_reserved_stack_access(bool z) { _has_reserved_stack_access = z; }
716 void set_rtm_state(RTMState s) { _rtm_state = s; }
717 bool use_rtm() const { return (_rtm_state & NoRTM) == 0; }
718 bool profile_rtm() const { return _rtm_state == ProfileRTM; }
719 uint max_node_limit() const { return (uint)_max_node_limit; }
720 void set_max_node_limit(uint n) { _max_node_limit = n; }
721
722 // check the CompilerOracle for special behaviours for this compile
723 bool method_has_option(const char * option) {
724 return method() != NULL && method()->has_option(option);
725 }
726
727 #ifndef PRODUCT
728 bool trace_opto_output() const { return _trace_opto_output; }
729 bool parsed_irreducible_loop() const { return _parsed_irreducible_loop; }
730 void set_parsed_irreducible_loop(bool z) { _parsed_irreducible_loop = z; }
731 int _in_dump_cnt; // Required for dumping ir nodes.
732 #endif
733 bool has_irreducible_loop() const { return _has_irreducible_loop; }
734 void set_has_irreducible_loop(bool z) { _has_irreducible_loop = z; }
735
736 void trace_type_profile(ciMethod *method, int depth, int bci, ciMethod *prof_method, ciKlass *prof_klass, int site_count, int receiver_count);
737
738 // JSR 292
739 bool has_method_handle_invokes() const { return _has_method_handle_invokes; }
740 void set_has_method_handle_invokes(bool z) { _has_method_handle_invokes = z; }
741
742 Ticks _latest_stage_start_counter;
743
744 void begin_method() {
745 #ifndef PRODUCT
746 if (_printer && _printer->should_print(1)) {
747 _printer->begin_method();
748 }
749 #endif
750 C->_latest_stage_start_counter.stamp();
751 }
752
753 void print_method(CompilerPhaseType cpt, int level = 1) {
754 EventCompilerPhase event;
755 if (event.should_commit()) {
756 event.set_starttime(C->_latest_stage_start_counter);
757 event.set_phase((u1) cpt);
1064 WarmCallInfo* pop_warm_call();
1065
1066 // Record this CallGenerator for inlining at the end of parsing.
1067 void add_late_inline(CallGenerator* cg) {
1068 _late_inlines.insert_before(_late_inlines_pos, cg);
1069 _late_inlines_pos++;
1070 }
1071
1072 void prepend_late_inline(CallGenerator* cg) {
1073 _late_inlines.insert_before(0, cg);
1074 }
1075
1076 void add_string_late_inline(CallGenerator* cg) {
1077 _string_late_inlines.push(cg);
1078 }
1079
1080 void add_boxing_late_inline(CallGenerator* cg) {
1081 _boxing_late_inlines.push(cg);
1082 }
1083
1084 void add_object_equals_late_inline(CallGenerator* cg) {
1085 _object_equals_late_inlines.push(cg);
1086 }
1087
1088 void remove_useless_late_inlines(GrowableArray<CallGenerator*>* inlines, Unique_Node_List &useful);
1089
1090 void process_print_inlining();
1091 void dump_print_inlining();
1092
1093 bool over_inlining_cutoff() const {
1094 if (!inlining_incrementally()) {
1095 return unique() > (uint)NodeCountInliningCutoff;
1096 } else {
1097 return live_nodes() > (uint)LiveNodeCountInliningCutoff;
1098 }
1099 }
1100
1101 void inc_number_of_mh_late_inlines() { _number_of_mh_late_inlines++; }
1102 void dec_number_of_mh_late_inlines() { assert(_number_of_mh_late_inlines > 0, "_number_of_mh_late_inlines < 0 !"); _number_of_mh_late_inlines--; }
1103 bool has_mh_late_inlines() const { return _number_of_mh_late_inlines > 0; }
1104
1105 void inline_incrementally_one(PhaseIterGVN& igvn);
1106 void inline_incrementally(PhaseIterGVN& igvn);
1107 void inline_string_calls(bool parse_time);
1108 void inline_boxing_calls(PhaseIterGVN& igvn);
1109 void inline_object_equals_calls(PhaseIterGVN& igvn);
1110 bool optimize_loops(int& loop_opts_cnt, PhaseIterGVN& igvn, LoopOptsMode mode);
1111
1112 // Matching, CFG layout, allocation, code generation
1113 PhaseCFG* cfg() { return _cfg; }
1114 bool select_24_bit_instr() const { return _select_24_bit_instr; }
1115 bool in_24_bit_fp_mode() const { return _in_24_bit_fp_mode; }
1116 bool has_java_calls() const { return _java_calls > 0; }
1117 int java_calls() const { return _java_calls; }
1118 int inner_loops() const { return _inner_loops; }
1119 Matcher* matcher() { return _matcher; }
1120 PhaseRegAlloc* regalloc() { return _regalloc; }
1121 int frame_slots() const { return _frame_slots; }
1122 int frame_size_in_words() const; // frame_slots in units of the polymorphic 'words'
1123 int frame_size_in_bytes() const { return _frame_slots << LogBytesPerInt; }
1124 RegMask& FIRST_STACK_mask() { return _FIRST_STACK_mask; }
1125 Arena* indexSet_arena() { return _indexSet_arena; }
1126 void* indexSet_free_block_list() { return _indexSet_free_block_list; }
1127 uint node_bundling_limit() { return _node_bundling_limit; }
1128 Bundle* node_bundling_base() { return _node_bundling_base; }
1129 void set_node_bundling_limit(uint n) { _node_bundling_limit = n; }
|