374 bool _do_count_invocations; // True if we generate code to count invocations
375 bool _do_method_data_update; // True if we generate code to update MethodData*s
376 bool _do_vector_loop; // True if allowed to execute loop in parallel iterations
377 bool _age_code; // True if we need to profile code age (decrement the aging counter)
378 int _AliasLevel; // Locally-adjusted version of AliasLevel flag.
379 bool _print_assembly; // True if we should dump assembly code for this compilation
380 bool _print_inlining; // True if we should print inlining for this compilation
381 bool _print_intrinsics; // True if we should print intrinsics for this compilation
382 #ifndef PRODUCT
383 bool _trace_opto_output;
384 bool _parsed_irreducible_loop; // True if ciTypeFlow detected irreducible loops during parsing
385 #endif
386 bool _has_irreducible_loop; // Found irreducible loops
387 // JSR 292
388 bool _has_method_handle_invokes; // True if this method has MethodHandle invokes.
389 RTMState _rtm_state; // State of Restricted Transactional Memory usage
390
391 // Compilation environment.
392 Arena _comp_arena; // Arena with lifetime equivalent to Compile
393 ciEnv* _env; // CI interface
394 CompileLog* _log; // from CompilerThread
395 const char* _failure_reason; // for record_failure/failing pattern
396 GrowableArray<CallGenerator*>* _intrinsics; // List of intrinsics.
397 GrowableArray<Node*>* _macro_nodes; // List of nodes which need to be expanded before matching.
398 GrowableArray<Node*>* _predicate_opaqs; // List of Opaque1 nodes for the loop predicates.
399 GrowableArray<Node*>* _expensive_nodes; // List of nodes that are expensive to compute and that we'd better not let the GVN freely common
400 ConnectionGraph* _congraph;
401 #ifndef PRODUCT
402 IdealGraphPrinter* _printer;
403 #endif
404
405
406 // Node management
407 uint _unique; // Counter for unique Node indices
408 VectorSet _dead_node_list; // Set of dead nodes
409 uint _dead_node_count; // Number of dead nodes; VectorSet::Size() is O(N).
410 // So use this to keep count and make the call O(1).
411 DEBUG_ONLY( Unique_Node_List* _modified_nodes; ) // List of nodes which inputs were modified
412
413 debug_only(static int _debug_idx;) // Monotonic counter (not reset), use -XX:BreakAtNode=<idx>
561 int _first_block_size; // Size of unvalidated entry point code / OSR poison code
562 ExceptionHandlerTable _handler_table; // Table of native-code exception handlers
563 ImplicitExceptionTable _inc_table; // Table of implicit null checks in native code
564 OopMapSet* _oop_map_set; // Table of oop maps (one for each safepoint location)
565 static int _CompiledZap_count; // counter compared against CompileZap[First/Last]
566 BufferBlob* _scratch_buffer_blob; // For temporary code buffers.
567 relocInfo* _scratch_locs_memory; // For temporary code buffers.
568 int _scratch_const_size; // For temporary code buffers.
569 bool _in_scratch_emit_size; // true when in scratch_emit_size.
570
571 public:
572 // Accessors
573
574 // The Compile instance currently active in this (compiler) thread.
575 static Compile* current() {
576 return (Compile*) ciEnv::current()->compiler_data();
577 }
578
579 // ID for this compilation. Useful for setting breakpoints in the debugger.
580 int compile_id() const { return _compile_id; }
581
582 // Does this compilation allow instructions to subsume loads? User
583 // instructions that subsume a load may result in an unschedulable
584 // instruction sequence.
585 bool subsume_loads() const { return _subsume_loads; }
586 /** Do escape analysis. */
587 bool do_escape_analysis() const { return _do_escape_analysis; }
588 /** Do boxing elimination. */
589 bool eliminate_boxing() const { return _eliminate_boxing; }
590 /** Do aggressive boxing elimination. */
591 bool aggressive_unboxing() const { return _eliminate_boxing && AggressiveUnboxing; }
592 bool save_argument_registers() const { return _save_argument_registers; }
593
594
595 // Other fixed compilation parameters.
596 ciMethod* method() const { return _method; }
597 int entry_bci() const { return _entry_bci; }
598 bool is_osr_compilation() const { return _entry_bci != InvocationEntryBci; }
599 bool is_method_compilation() const { return (_method != NULL && !_method->flags().is_native()); }
600 const TypeFunc* tf() const { assert(_tf!=NULL, ""); return _tf; }
654 bool age_code() const { return _age_code; }
655 void set_age_code(bool z) { _age_code = z; }
656 int AliasLevel() const { return _AliasLevel; }
657 bool print_assembly() const { return _print_assembly; }
658 void set_print_assembly(bool z) { _print_assembly = z; }
659 bool print_inlining() const { return _print_inlining; }
660 void set_print_inlining(bool z) { _print_inlining = z; }
661 bool print_intrinsics() const { return _print_intrinsics; }
662 void set_print_intrinsics(bool z) { _print_intrinsics = z; }
663 RTMState rtm_state() const { return _rtm_state; }
664 void set_rtm_state(RTMState s) { _rtm_state = s; }
665 bool use_rtm() const { return (_rtm_state & NoRTM) == 0; }
666 bool profile_rtm() const { return _rtm_state == ProfileRTM; }
667 uint max_node_limit() const { return (uint)_max_node_limit; }
668 void set_max_node_limit(uint n) { _max_node_limit = n; }
669
670 // check the CompilerOracle for special behaviours for this compile
671 bool method_has_option(const char * option) {
672 return method() != NULL && method()->has_option(option);
673 }
674 template<typename T>
675 bool method_has_option_value(const char * option, T& value) {
676 return method() != NULL && method()->has_option_value(option, value);
677 }
678 #ifndef PRODUCT
679 bool trace_opto_output() const { return _trace_opto_output; }
680 bool parsed_irreducible_loop() const { return _parsed_irreducible_loop; }
681 void set_parsed_irreducible_loop(bool z) { _parsed_irreducible_loop = z; }
682 int _in_dump_cnt; // Required for dumping ir nodes.
683 #endif
684 bool has_irreducible_loop() const { return _has_irreducible_loop; }
685 void set_has_irreducible_loop(bool z) { _has_irreducible_loop = z; }
686
687 // JSR 292
688 bool has_method_handle_invokes() const { return _has_method_handle_invokes; }
689 void set_has_method_handle_invokes(bool z) { _has_method_handle_invokes = z; }
690
691 Ticks _latest_stage_start_counter;
692
693 void begin_method() {
694 #ifndef PRODUCT
695 if (_printer && _printer->should_print(_method)) {
696 _printer->begin_method(this);
697 }
698 #endif
699 C->_latest_stage_start_counter.stamp();
700 }
701
702 void print_method(CompilerPhaseType cpt, int level = 1) {
703 EventCompilerPhase event;
704 if (event.should_commit()) {
705 event.set_starttime(C->_latest_stage_start_counter);
706 event.set_phase((u1) cpt);
707 event.set_compileID(C->_compile_id);
708 event.set_phaseLevel(level);
709 event.commit();
710 }
711
712
713 #ifndef PRODUCT
714 if (_printer && _printer->should_print(_method)) {
715 _printer->print_method(this, CompilerPhaseTypeHelper::to_string(cpt), level);
716 }
717 #endif
718 C->_latest_stage_start_counter.stamp();
719 }
720
721 void end_method(int level = 1) {
722 EventCompilerPhase event;
723 if (event.should_commit()) {
724 event.set_starttime(C->_latest_stage_start_counter);
725 event.set_phase((u1) PHASE_END);
726 event.set_compileID(C->_compile_id);
727 event.set_phaseLevel(level);
728 event.commit();
729 }
730 #ifndef PRODUCT
731 if (_printer && _printer->should_print(_method)) {
732 _printer->end_method();
733 }
734 #endif
735 }
736
737 int macro_count() const { return _macro_nodes->length(); }
738 int predicate_count() const { return _predicate_opaqs->length();}
739 int expensive_count() const { return _expensive_nodes->length(); }
740 Node* macro_node(int idx) const { return _macro_nodes->at(idx); }
741 Node* predicate_opaque1_node(int idx) const { return _predicate_opaqs->at(idx);}
742 Node* expensive_node(int idx) const { return _expensive_nodes->at(idx); }
743 ConnectionGraph* congraph() { return _congraph;}
744 void set_congraph(ConnectionGraph* congraph) { _congraph = congraph;}
745 void add_macro_node(Node * n) {
746 //assert(n->is_macro(), "must be a macro node");
747 assert(!_macro_nodes->contains(n), " duplicate entry in expand list");
748 _macro_nodes->append(n);
749 }
750 void remove_macro_node(Node * n) {
751 // this function may be called twice for a node so check
1090 void set_scratch_locs_memory(relocInfo* b) { _scratch_locs_memory = b; }
1091
1092 // emit to scratch blob, report resulting size
1093 uint scratch_emit_size(const Node* n);
1094 void set_in_scratch_emit_size(bool x) { _in_scratch_emit_size = x; }
1095 bool in_scratch_emit_size() const { return _in_scratch_emit_size; }
1096
1097 enum ScratchBufferBlob {
1098 MAX_inst_size = 1024,
1099 MAX_locs_size = 128, // number of relocInfo elements
1100 MAX_const_size = 128,
1101 MAX_stubs_size = 128
1102 };
1103
1104 // Major entry point. Given a Scope, compile the associated method.
1105 // For normal compilations, entry_bci is InvocationEntryBci. For on stack
1106 // replacement, entry_bci indicates the bytecode for which to compile a
1107 // continuation.
1108 Compile(ciEnv* ci_env, C2Compiler* compiler, ciMethod* target,
1109 int entry_bci, bool subsume_loads, bool do_escape_analysis,
1110 bool eliminate_boxing);
1111
1112 // Second major entry point. From the TypeFunc signature, generate code
1113 // to pass arguments from the Java calling convention to the C calling
1114 // convention.
1115 Compile(ciEnv* ci_env, const TypeFunc *(*gen)(),
1116 address stub_function, const char *stub_name,
1117 int is_fancy_jump, bool pass_tls,
1118 bool save_arg_registers, bool return_pc);
1119
1120 // From the TypeFunc signature, generate code to pass arguments
1121 // from Compiled calling convention to Interpreter's calling convention
1122 void Generate_Compiled_To_Interpreter_Graph(const TypeFunc *tf, address interpreter_entry);
1123
1124 // From the TypeFunc signature, generate code to pass arguments
1125 // from Interpreter's calling convention to Compiler's calling convention
1126 void Generate_Interpreter_To_Compiled_Graph(const TypeFunc *tf);
1127
1128 // Are we compiling a method?
1129 bool has_method() { return method() != NULL; }
1130
1131 // Maybe print some information about this compile.
1132 void print_compile_messages();
1133
1134 // Final graph reshaping, a post-pass after the regular optimizer is done.
1135 bool final_graph_reshaping();
1136
1137 // returns true if adr is completely contained in the given alias category
1138 bool must_alias(const TypePtr* adr, int alias_idx);
|
374 bool _do_count_invocations; // True if we generate code to count invocations
375 bool _do_method_data_update; // True if we generate code to update MethodData*s
376 bool _do_vector_loop; // True if allowed to execute loop in parallel iterations
377 bool _age_code; // True if we need to profile code age (decrement the aging counter)
378 int _AliasLevel; // Locally-adjusted version of AliasLevel flag.
379 bool _print_assembly; // True if we should dump assembly code for this compilation
380 bool _print_inlining; // True if we should print inlining for this compilation
381 bool _print_intrinsics; // True if we should print intrinsics for this compilation
382 #ifndef PRODUCT
383 bool _trace_opto_output;
384 bool _parsed_irreducible_loop; // True if ciTypeFlow detected irreducible loops during parsing
385 #endif
386 bool _has_irreducible_loop; // Found irreducible loops
387 // JSR 292
388 bool _has_method_handle_invokes; // True if this method has MethodHandle invokes.
389 RTMState _rtm_state; // State of Restricted Transactional Memory usage
390
391 // Compilation environment.
392 Arena _comp_arena; // Arena with lifetime equivalent to Compile
393 ciEnv* _env; // CI interface
394 DirectiveSet* _dirset; // Compiler directives
395 CompileLog* _log; // from CompilerThread
396 const char* _failure_reason; // for record_failure/failing pattern
397 GrowableArray<CallGenerator*>* _intrinsics; // List of intrinsics.
398 GrowableArray<Node*>* _macro_nodes; // List of nodes which need to be expanded before matching.
399 GrowableArray<Node*>* _predicate_opaqs; // List of Opaque1 nodes for the loop predicates.
400 GrowableArray<Node*>* _expensive_nodes; // List of nodes that are expensive to compute and that we'd better not let the GVN freely common
401 ConnectionGraph* _congraph;
402 #ifndef PRODUCT
403 IdealGraphPrinter* _printer;
404 #endif
405
406
407 // Node management
408 uint _unique; // Counter for unique Node indices
409 VectorSet _dead_node_list; // Set of dead nodes
410 uint _dead_node_count; // Number of dead nodes; VectorSet::Size() is O(N).
411 // So use this to keep count and make the call O(1).
412 DEBUG_ONLY( Unique_Node_List* _modified_nodes; ) // List of nodes which inputs were modified
413
414 debug_only(static int _debug_idx;) // Monotonic counter (not reset), use -XX:BreakAtNode=<idx>
562 int _first_block_size; // Size of unvalidated entry point code / OSR poison code
563 ExceptionHandlerTable _handler_table; // Table of native-code exception handlers
564 ImplicitExceptionTable _inc_table; // Table of implicit null checks in native code
565 OopMapSet* _oop_map_set; // Table of oop maps (one for each safepoint location)
566 static int _CompiledZap_count; // counter compared against CompileZap[First/Last]
567 BufferBlob* _scratch_buffer_blob; // For temporary code buffers.
568 relocInfo* _scratch_locs_memory; // For temporary code buffers.
569 int _scratch_const_size; // For temporary code buffers.
570 bool _in_scratch_emit_size; // true when in scratch_emit_size.
571
572 public:
573 // Accessors
574
575 // The Compile instance currently active in this (compiler) thread.
576 static Compile* current() {
577 return (Compile*) ciEnv::current()->compiler_data();
578 }
579
580 // ID for this compilation. Useful for setting breakpoints in the debugger.
581 int compile_id() const { return _compile_id; }
582 DirectiveSet* dirset() const { return _dirset; }
583
584 // Does this compilation allow instructions to subsume loads? User
585 // instructions that subsume a load may result in an unschedulable
586 // instruction sequence.
587 bool subsume_loads() const { return _subsume_loads; }
588 /** Do escape analysis. */
589 bool do_escape_analysis() const { return _do_escape_analysis; }
590 /** Do boxing elimination. */
591 bool eliminate_boxing() const { return _eliminate_boxing; }
592 /** Do aggressive boxing elimination. */
593 bool aggressive_unboxing() const { return _eliminate_boxing && AggressiveUnboxing; }
594 bool save_argument_registers() const { return _save_argument_registers; }
595
596
597 // Other fixed compilation parameters.
598 ciMethod* method() const { return _method; }
599 int entry_bci() const { return _entry_bci; }
600 bool is_osr_compilation() const { return _entry_bci != InvocationEntryBci; }
601 bool is_method_compilation() const { return (_method != NULL && !_method->flags().is_native()); }
602 const TypeFunc* tf() const { assert(_tf!=NULL, ""); return _tf; }
656 bool age_code() const { return _age_code; }
657 void set_age_code(bool z) { _age_code = z; }
658 int AliasLevel() const { return _AliasLevel; }
659 bool print_assembly() const { return _print_assembly; }
660 void set_print_assembly(bool z) { _print_assembly = z; }
661 bool print_inlining() const { return _print_inlining; }
662 void set_print_inlining(bool z) { _print_inlining = z; }
663 bool print_intrinsics() const { return _print_intrinsics; }
664 void set_print_intrinsics(bool z) { _print_intrinsics = z; }
665 RTMState rtm_state() const { return _rtm_state; }
666 void set_rtm_state(RTMState s) { _rtm_state = s; }
667 bool use_rtm() const { return (_rtm_state & NoRTM) == 0; }
668 bool profile_rtm() const { return _rtm_state == ProfileRTM; }
669 uint max_node_limit() const { return (uint)_max_node_limit; }
670 void set_max_node_limit(uint n) { _max_node_limit = n; }
671
672 // check the CompilerOracle for special behaviours for this compile
673 bool method_has_option(const char * option) {
674 return method() != NULL && method()->has_option(option);
675 }
676
677 #ifndef PRODUCT
678 bool trace_opto_output() const { return _trace_opto_output; }
679 bool parsed_irreducible_loop() const { return _parsed_irreducible_loop; }
680 void set_parsed_irreducible_loop(bool z) { _parsed_irreducible_loop = z; }
681 int _in_dump_cnt; // Required for dumping ir nodes.
682 #endif
683 bool has_irreducible_loop() const { return _has_irreducible_loop; }
684 void set_has_irreducible_loop(bool z) { _has_irreducible_loop = z; }
685
686 // JSR 292
687 bool has_method_handle_invokes() const { return _has_method_handle_invokes; }
688 void set_has_method_handle_invokes(bool z) { _has_method_handle_invokes = z; }
689
690 Ticks _latest_stage_start_counter;
691
692 void begin_method() {
693 #ifndef PRODUCT
694 if (_printer && _printer->should_print(1)) {
695 _printer->begin_method(this);
696 }
697 #endif
698 C->_latest_stage_start_counter.stamp();
699 }
700
701 void print_method(CompilerPhaseType cpt, int level = 1) {
702 EventCompilerPhase event;
703 if (event.should_commit()) {
704 event.set_starttime(C->_latest_stage_start_counter);
705 event.set_phase((u1) cpt);
706 event.set_compileID(C->_compile_id);
707 event.set_phaseLevel(level);
708 event.commit();
709 }
710
711
712 #ifndef PRODUCT
713 if (_printer && _printer->should_print(level)) {
714 _printer->print_method(this, CompilerPhaseTypeHelper::to_string(cpt), level);
715 }
716 #endif
717 C->_latest_stage_start_counter.stamp();
718 }
719
720 void end_method(int level = 1) {
721 EventCompilerPhase event;
722 if (event.should_commit()) {
723 event.set_starttime(C->_latest_stage_start_counter);
724 event.set_phase((u1) PHASE_END);
725 event.set_compileID(C->_compile_id);
726 event.set_phaseLevel(level);
727 event.commit();
728 }
729 #ifndef PRODUCT
730 if (_printer && _printer->should_print(level)) {
731 _printer->end_method();
732 }
733 #endif
734 }
735
736 int macro_count() const { return _macro_nodes->length(); }
737 int predicate_count() const { return _predicate_opaqs->length();}
738 int expensive_count() const { return _expensive_nodes->length(); }
739 Node* macro_node(int idx) const { return _macro_nodes->at(idx); }
740 Node* predicate_opaque1_node(int idx) const { return _predicate_opaqs->at(idx);}
741 Node* expensive_node(int idx) const { return _expensive_nodes->at(idx); }
742 ConnectionGraph* congraph() { return _congraph;}
743 void set_congraph(ConnectionGraph* congraph) { _congraph = congraph;}
744 void add_macro_node(Node * n) {
745 //assert(n->is_macro(), "must be a macro node");
746 assert(!_macro_nodes->contains(n), " duplicate entry in expand list");
747 _macro_nodes->append(n);
748 }
749 void remove_macro_node(Node * n) {
750 // this function may be called twice for a node so check
1089 void set_scratch_locs_memory(relocInfo* b) { _scratch_locs_memory = b; }
1090
1091 // emit to scratch blob, report resulting size
1092 uint scratch_emit_size(const Node* n);
1093 void set_in_scratch_emit_size(bool x) { _in_scratch_emit_size = x; }
1094 bool in_scratch_emit_size() const { return _in_scratch_emit_size; }
1095
1096 enum ScratchBufferBlob {
1097 MAX_inst_size = 1024,
1098 MAX_locs_size = 128, // number of relocInfo elements
1099 MAX_const_size = 128,
1100 MAX_stubs_size = 128
1101 };
1102
1103 // Major entry point. Given a Scope, compile the associated method.
1104 // For normal compilations, entry_bci is InvocationEntryBci. For on stack
1105 // replacement, entry_bci indicates the bytecode for which to compile a
1106 // continuation.
1107 Compile(ciEnv* ci_env, C2Compiler* compiler, ciMethod* target,
1108 int entry_bci, bool subsume_loads, bool do_escape_analysis,
1109 bool eliminate_boxing, DirectiveSet* dirset);
1110
1111 // Second major entry point. From the TypeFunc signature, generate code
1112 // to pass arguments from the Java calling convention to the C calling
1113 // convention.
1114 Compile(ciEnv* ci_env, const TypeFunc *(*gen)(),
1115 address stub_function, const char *stub_name,
1116 int is_fancy_jump, bool pass_tls,
1117 bool save_arg_registers, bool return_pc, DirectiveSet* dirset);
1118
1119 // From the TypeFunc signature, generate code to pass arguments
1120 // from Compiled calling convention to Interpreter's calling convention
1121 void Generate_Compiled_To_Interpreter_Graph(const TypeFunc *tf, address interpreter_entry);
1122
1123 // From the TypeFunc signature, generate code to pass arguments
1124 // from Interpreter's calling convention to Compiler's calling convention
1125 void Generate_Interpreter_To_Compiled_Graph(const TypeFunc *tf);
1126
1127 // Are we compiling a method?
1128 bool has_method() { return method() != NULL; }
1129
1130 // Maybe print some information about this compile.
1131 void print_compile_messages();
1132
1133 // Final graph reshaping, a post-pass after the regular optimizer is done.
1134 bool final_graph_reshaping();
1135
1136 // returns true if adr is completely contained in the given alias category
1137 bool must_alias(const TypePtr* adr, int alias_idx);
|