593 _subsume_loads(subsume_loads),
594 _do_escape_analysis(do_escape_analysis),
595 _failure_reason(NULL),
596 _code_buffer("Compile::Fill_buffer"),
597 _orig_pc_slot(0),
598 _orig_pc_slot_offset_in_bytes(0),
599 _has_method_handle_invokes(false),
600 _mach_constant_base_node(NULL),
601 _node_bundling_limit(0),
602 _node_bundling_base(NULL),
603 _java_calls(0),
604 _inner_loops(0),
605 _scratch_const_size(-1),
606 _in_scratch_emit_size(false),
607 _dead_node_list(comp_arena()),
608 _dead_node_count(0),
609 #ifndef PRODUCT
610 _trace_opto_output(TraceOptoOutput || method()->has_option("TraceOptoOutput")),
611 _printer(IdealGraphPrinter::printer()),
612 #endif
613 _congraph(NULL) {
614 C = this;
615
616 CompileWrapper cw(this);
617 #ifndef PRODUCT
618 if (TimeCompiler2) {
619 tty->print(" ");
620 target->holder()->name()->print();
621 tty->print(".");
622 target->print_short_name();
623 tty->print(" ");
624 }
625 TraceTime t1("Total compilation time", &_t_totalCompilation, TimeCompiler, TimeCompiler2);
626 TraceTime t2(NULL, &_t_methodCompilation, TimeCompiler, false);
627 bool print_opto_assembly = PrintOptoAssembly || _method->has_option("PrintOptoAssembly");
628 if (!print_opto_assembly) {
629 bool print_assembly = (PrintAssembly || _method->should_print_assembly());
630 if (print_assembly && !Disassembler::can_decode()) {
631 tty->print_cr("PrintAssembly request changed to PrintOptoAssembly");
632 print_opto_assembly = true;
633 }
649
650 if (UseOldInlining || PrintCompilation NOT_PRODUCT( || PrintOpto) )
651 _ilt = InlineTree::build_inline_tree_root();
652 else
653 _ilt = NULL;
654
655 // Even if NO memory addresses are used, MergeMem nodes must have at least 1 slice
656 assert(num_alias_types() >= AliasIdxRaw, "");
657
658 #define MINIMUM_NODE_HASH 1023
659 // Node list that Iterative GVN will start with
660 Unique_Node_List for_igvn(comp_arena());
661 set_for_igvn(&for_igvn);
662
663 // GVN that will be run immediately on new nodes
664 uint estimated_size = method()->code_size()*4+64;
665 estimated_size = (estimated_size < MINIMUM_NODE_HASH ? MINIMUM_NODE_HASH : estimated_size);
666 PhaseGVN gvn(node_arena(), estimated_size);
667 set_initial_gvn(&gvn);
668
669 { // Scope for timing the parser
670 TracePhase t3("parse", &_t_parser, true);
671
672 // Put top into the hash table ASAP.
673 initial_gvn()->transform_no_reclaim(top());
674
675 // Set up tf(), start(), and find a CallGenerator.
676 CallGenerator* cg = NULL;
677 if (is_osr_compilation()) {
678 const TypeTuple *domain = StartOSRNode::osr_domain();
679 const TypeTuple *range = TypeTuple::make_range(method()->signature());
680 init_tf(TypeFunc::make(domain, range));
681 StartNode* s = new (this) StartOSRNode(root(), domain);
682 initial_gvn()->set_type_bottom(s);
683 init_start(s);
684 cg = CallGenerator::for_osr(method(), entry_bci());
685 } else {
686 // Normal case.
687 init_tf(TypeFunc::make(method()));
688 StartNode* s = new (this) StartNode(root(), tf()->domain());
737 // remove useless nodes to make the usage analysis simpler
738 ResourceMark rm;
739 PhaseRemoveUseless pru(initial_gvn(), &for_igvn);
740 }
741
742 {
743 ResourceMark rm;
744 print_method("Before StringOpts", 3);
745 PhaseStringOpts pso(initial_gvn(), &for_igvn);
746 print_method("After StringOpts", 3);
747 }
748
749 // now inline anything that we skipped the first time around
750 while (_late_inlines.length() > 0) {
751 CallGenerator* cg = _late_inlines.pop();
752 cg->do_late_inline();
753 if (failing()) return;
754 }
755 }
756 assert(_late_inlines.length() == 0, "should have been processed");
757
758 print_method("Before RemoveUseless", 3);
759
760 // Remove clutter produced by parsing.
761 if (!failing()) {
762 ResourceMark rm;
763 PhaseRemoveUseless pru(initial_gvn(), &for_igvn);
764 }
765 }
766
767 // Note: Large methods are capped off in do_one_bytecode().
768 if (failing()) return;
769
770 // After parsing, node notes are no longer automagic.
771 // They must be propagated by register_new_node_with_optimizer(),
772 // clone(), or the like.
773 set_default_node_notes(NULL);
774
775 for (;;) {
776 int successes = Inline_Warm();
882 _for_igvn(NULL),
883 _warm_calls(NULL),
884 _orig_pc_slot(0),
885 _orig_pc_slot_offset_in_bytes(0),
886 _subsume_loads(true),
887 _do_escape_analysis(false),
888 _failure_reason(NULL),
889 _code_buffer("Compile::Fill_buffer"),
890 _has_method_handle_invokes(false),
891 _mach_constant_base_node(NULL),
892 _node_bundling_limit(0),
893 _node_bundling_base(NULL),
894 _java_calls(0),
895 _inner_loops(0),
896 #ifndef PRODUCT
897 _trace_opto_output(TraceOptoOutput),
898 _printer(NULL),
899 #endif
900 _dead_node_list(comp_arena()),
901 _dead_node_count(0),
902 _congraph(NULL) {
903 C = this;
904
905 #ifndef PRODUCT
906 TraceTime t1(NULL, &_t_totalCompilation, TimeCompiler, false);
907 TraceTime t2(NULL, &_t_stubCompilation, TimeCompiler, false);
908 set_print_assembly(PrintFrameConverterAssembly);
909 set_parsed_irreducible_loop(false);
910 #endif
911 CompileWrapper cw(this);
912 Init(/*AliasLevel=*/ 0);
913 init_tf((*generator)());
914
915 {
916 // The following is a dummy for the sake of GraphKit::gen_stub
917 Unique_Node_List for_igvn(comp_arena());
918 set_for_igvn(&for_igvn); // not used, but some GraphKit guys push on this
919 PhaseGVN gvn(Thread::current()->resource_area(),255);
920 set_initial_gvn(&gvn); // not significant, but GraphKit guys use it pervasively
921 gvn.transform_no_reclaim(top());
922
3332 void Compile::ConstantTable::fill_jump_table(CodeBuffer& cb, MachConstantNode* n, GrowableArray<Label*> labels) const {
3333 // If called from Compile::scratch_emit_size do nothing.
3334 if (Compile::current()->in_scratch_emit_size()) return;
3335
3336 assert(labels.is_nonempty(), "must be");
3337 assert((uint) labels.length() == n->outcnt(), err_msg_res("must be equal: %d == %d", labels.length(), n->outcnt()));
3338
3339 // Since MachConstantNode::constant_offset() also contains
3340 // table_base_offset() we need to subtract the table_base_offset()
3341 // to get the plain offset into the constant table.
3342 int offset = n->constant_offset() - table_base_offset();
3343
3344 MacroAssembler _masm(&cb);
3345 address* jump_table_base = (address*) (_masm.code()->consts()->start() + offset);
3346
3347 for (uint i = 0; i < n->outcnt(); i++) {
3348 address* constant_addr = &jump_table_base[i];
3349 assert(*constant_addr == (((address) n) + i), err_msg_res("all jump-table entries must contain adjusted node pointer: " INTPTR_FORMAT " == " INTPTR_FORMAT, *constant_addr, (((address) n) + i)));
3350 *constant_addr = cb.consts()->target(*labels.at(i), (address) constant_addr);
3351 cb.consts()->relocate((address) constant_addr, relocInfo::internal_word_type);
3352 }
3353 }
|
593 _subsume_loads(subsume_loads),
594 _do_escape_analysis(do_escape_analysis),
595 _failure_reason(NULL),
596 _code_buffer("Compile::Fill_buffer"),
597 _orig_pc_slot(0),
598 _orig_pc_slot_offset_in_bytes(0),
599 _has_method_handle_invokes(false),
600 _mach_constant_base_node(NULL),
601 _node_bundling_limit(0),
602 _node_bundling_base(NULL),
603 _java_calls(0),
604 _inner_loops(0),
605 _scratch_const_size(-1),
606 _in_scratch_emit_size(false),
607 _dead_node_list(comp_arena()),
608 _dead_node_count(0),
609 #ifndef PRODUCT
610 _trace_opto_output(TraceOptoOutput || method()->has_option("TraceOptoOutput")),
611 _printer(IdealGraphPrinter::printer()),
612 #endif
613 _congraph(NULL),
614 _print_inlining_list(NULL),
615 _print_inlining(0) {
616 C = this;
617
618 CompileWrapper cw(this);
619 #ifndef PRODUCT
620 if (TimeCompiler2) {
621 tty->print(" ");
622 target->holder()->name()->print();
623 tty->print(".");
624 target->print_short_name();
625 tty->print(" ");
626 }
627 TraceTime t1("Total compilation time", &_t_totalCompilation, TimeCompiler, TimeCompiler2);
628 TraceTime t2(NULL, &_t_methodCompilation, TimeCompiler, false);
629 bool print_opto_assembly = PrintOptoAssembly || _method->has_option("PrintOptoAssembly");
630 if (!print_opto_assembly) {
631 bool print_assembly = (PrintAssembly || _method->should_print_assembly());
632 if (print_assembly && !Disassembler::can_decode()) {
633 tty->print_cr("PrintAssembly request changed to PrintOptoAssembly");
634 print_opto_assembly = true;
635 }
651
652 if (UseOldInlining || PrintCompilation NOT_PRODUCT( || PrintOpto) )
653 _ilt = InlineTree::build_inline_tree_root();
654 else
655 _ilt = NULL;
656
657 // Even if NO memory addresses are used, MergeMem nodes must have at least 1 slice
658 assert(num_alias_types() >= AliasIdxRaw, "");
659
660 #define MINIMUM_NODE_HASH 1023
661 // Node list that Iterative GVN will start with
662 Unique_Node_List for_igvn(comp_arena());
663 set_for_igvn(&for_igvn);
664
665 // GVN that will be run immediately on new nodes
666 uint estimated_size = method()->code_size()*4+64;
667 estimated_size = (estimated_size < MINIMUM_NODE_HASH ? MINIMUM_NODE_HASH : estimated_size);
668 PhaseGVN gvn(node_arena(), estimated_size);
669 set_initial_gvn(&gvn);
670
671 if (PrintInlining) {
672 _print_inlining_list = new (comp_arena())GrowableArray<PrintInliningEvent>(comp_arena(), 1, 1, PrintInliningEvent());
673 }
674 { // Scope for timing the parser
675 TracePhase t3("parse", &_t_parser, true);
676
677 // Put top into the hash table ASAP.
678 initial_gvn()->transform_no_reclaim(top());
679
680 // Set up tf(), start(), and find a CallGenerator.
681 CallGenerator* cg = NULL;
682 if (is_osr_compilation()) {
683 const TypeTuple *domain = StartOSRNode::osr_domain();
684 const TypeTuple *range = TypeTuple::make_range(method()->signature());
685 init_tf(TypeFunc::make(domain, range));
686 StartNode* s = new (this) StartOSRNode(root(), domain);
687 initial_gvn()->set_type_bottom(s);
688 init_start(s);
689 cg = CallGenerator::for_osr(method(), entry_bci());
690 } else {
691 // Normal case.
692 init_tf(TypeFunc::make(method()));
693 StartNode* s = new (this) StartNode(root(), tf()->domain());
742 // remove useless nodes to make the usage analysis simpler
743 ResourceMark rm;
744 PhaseRemoveUseless pru(initial_gvn(), &for_igvn);
745 }
746
747 {
748 ResourceMark rm;
749 print_method("Before StringOpts", 3);
750 PhaseStringOpts pso(initial_gvn(), &for_igvn);
751 print_method("After StringOpts", 3);
752 }
753
754 // now inline anything that we skipped the first time around
755 while (_late_inlines.length() > 0) {
756 CallGenerator* cg = _late_inlines.pop();
757 cg->do_late_inline();
758 if (failing()) return;
759 }
760 }
761 assert(_late_inlines.length() == 0, "should have been processed");
762 dump_inlining();
763
764 print_method("Before RemoveUseless", 3);
765
766 // Remove clutter produced by parsing.
767 if (!failing()) {
768 ResourceMark rm;
769 PhaseRemoveUseless pru(initial_gvn(), &for_igvn);
770 }
771 }
772
773 // Note: Large methods are capped off in do_one_bytecode().
774 if (failing()) return;
775
776 // After parsing, node notes are no longer automagic.
777 // They must be propagated by register_new_node_with_optimizer(),
778 // clone(), or the like.
779 set_default_node_notes(NULL);
780
781 for (;;) {
782 int successes = Inline_Warm();
888 _for_igvn(NULL),
889 _warm_calls(NULL),
890 _orig_pc_slot(0),
891 _orig_pc_slot_offset_in_bytes(0),
892 _subsume_loads(true),
893 _do_escape_analysis(false),
894 _failure_reason(NULL),
895 _code_buffer("Compile::Fill_buffer"),
896 _has_method_handle_invokes(false),
897 _mach_constant_base_node(NULL),
898 _node_bundling_limit(0),
899 _node_bundling_base(NULL),
900 _java_calls(0),
901 _inner_loops(0),
902 #ifndef PRODUCT
903 _trace_opto_output(TraceOptoOutput),
904 _printer(NULL),
905 #endif
906 _dead_node_list(comp_arena()),
907 _dead_node_count(0),
908 _congraph(NULL),
909 _print_inlining_list(NULL),
910 _print_inlining(0) {
911 C = this;
912
913 #ifndef PRODUCT
914 TraceTime t1(NULL, &_t_totalCompilation, TimeCompiler, false);
915 TraceTime t2(NULL, &_t_stubCompilation, TimeCompiler, false);
916 set_print_assembly(PrintFrameConverterAssembly);
917 set_parsed_irreducible_loop(false);
918 #endif
919 CompileWrapper cw(this);
920 Init(/*AliasLevel=*/ 0);
921 init_tf((*generator)());
922
923 {
924 // The following is a dummy for the sake of GraphKit::gen_stub
925 Unique_Node_List for_igvn(comp_arena());
926 set_for_igvn(&for_igvn); // not used, but some GraphKit guys push on this
927 PhaseGVN gvn(Thread::current()->resource_area(),255);
928 set_initial_gvn(&gvn); // not significant, but GraphKit guys use it pervasively
929 gvn.transform_no_reclaim(top());
930
3340 void Compile::ConstantTable::fill_jump_table(CodeBuffer& cb, MachConstantNode* n, GrowableArray<Label*> labels) const {
3341 // If called from Compile::scratch_emit_size do nothing.
3342 if (Compile::current()->in_scratch_emit_size()) return;
3343
3344 assert(labels.is_nonempty(), "must be");
3345 assert((uint) labels.length() == n->outcnt(), err_msg_res("must be equal: %d == %d", labels.length(), n->outcnt()));
3346
3347 // Since MachConstantNode::constant_offset() also contains
3348 // table_base_offset() we need to subtract the table_base_offset()
3349 // to get the plain offset into the constant table.
3350 int offset = n->constant_offset() - table_base_offset();
3351
3352 MacroAssembler _masm(&cb);
3353 address* jump_table_base = (address*) (_masm.code()->consts()->start() + offset);
3354
3355 for (uint i = 0; i < n->outcnt(); i++) {
3356 address* constant_addr = &jump_table_base[i];
3357 assert(*constant_addr == (((address) n) + i), err_msg_res("all jump-table entries must contain adjusted node pointer: " INTPTR_FORMAT " == " INTPTR_FORMAT, *constant_addr, (((address) n) + i)));
3358 *constant_addr = cb.consts()->target(*labels.at(i), (address) constant_addr);
3359 cb.consts()->relocate((address) constant_addr, relocInfo::internal_word_type);
3360 }
3361 }
3362
3363 void Compile::dump_inlining() {
3364 if (PrintInlining) {
3365 for (int i = 0; i < _print_inlining_list->length(); i++) {
3366 tty->print(_print_inlining_list->at(i).ss()->as_string());
3367 }
3368 }
3369 }
|