407 stringStream* _ss;
408
409 public:
410 PrintInliningBuffer()
411 : _cg(NULL) { _ss = new stringStream(); }
412
413 stringStream* ss() const { return _ss; }
414 CallGenerator* cg() const { return _cg; }
415 void set_cg(CallGenerator* cg) { _cg = cg; }
416 };
417
418 GrowableArray<PrintInliningBuffer>* _print_inlining_list;
419 int _print_inlining_idx;
420
421 // Only keep nodes in the expensive node list that need to be optimized
422 void cleanup_expensive_nodes(PhaseIterGVN &igvn);
423 // Use for sorting expensive nodes to bring similar nodes together
424 static int cmp_expensive_nodes(Node** n1, Node** n2);
425 // Expensive nodes list already sorted?
426 bool expensive_nodes_sorted() const;
427
428 // Are we within a PreserveJVMState block?
429 int _preserve_jvm_state;
430
431 public:
432
433 outputStream* print_inlining_stream() const {
434 return _print_inlining_list->adr_at(_print_inlining_idx)->ss();
435 }
436
437 void print_inlining_skip(CallGenerator* cg) {
438 if (_print_inlining) {
439 _print_inlining_list->adr_at(_print_inlining_idx)->set_cg(cg);
440 _print_inlining_idx++;
441 _print_inlining_list->insert_before(_print_inlining_idx, PrintInliningBuffer());
442 }
443 }
444
445 void print_inlining_insert(CallGenerator* cg) {
446 if (_print_inlining) {
807 _last_tf = tf;
808 }
809
810 AliasType* alias_type(int idx) { assert(idx < num_alias_types(), "oob"); return _alias_types[idx]; }
811 AliasType* alias_type(const TypePtr* adr_type, ciField* field = NULL) { return find_alias_type(adr_type, false, field); }
812 bool have_alias_type(const TypePtr* adr_type);
813 AliasType* alias_type(ciField* field);
814
815 int get_alias_index(const TypePtr* at) { return alias_type(at)->index(); }
816 const TypePtr* get_adr_type(uint aidx) { return alias_type(aidx)->adr_type(); }
817 int get_general_index(uint aidx) { return alias_type(aidx)->general_index(); }
818
819 // Building nodes
820 void rethrow_exceptions(JVMState* jvms);
821 void return_values(JVMState* jvms);
822 JVMState* build_start_state(StartNode* start, const TypeFunc* tf);
823
824 // Decide how to build a call.
825 // The profile factor is a discount to apply to this site's interp. profile.
826 CallGenerator* call_generator(ciMethod* call_method, int vtable_index, bool call_does_dispatch,
827 JVMState* jvms, bool allow_inline, float profile_factor, bool allow_intrinsics = true,
828 bool delayed_forbidden = false);
829 bool should_delay_inlining(ciMethod* call_method, JVMState* jvms) {
830 return should_delay_string_inlining(call_method, jvms) ||
831 should_delay_boxing_inlining(call_method, jvms);
832 }
833 bool should_delay_string_inlining(ciMethod* call_method, JVMState* jvms);
834 bool should_delay_boxing_inlining(ciMethod* call_method, JVMState* jvms);
835
836 // Helper functions to identify inlining potential at call-site
837 ciMethod* optimize_virtual_call(ciMethod* caller, int bci, ciInstanceKlass* klass,
838 ciMethod* callee, const TypeOopPtr* receiver_type,
839 bool is_virtual,
840 bool &call_does_dispatch, int &vtable_index);
841 ciMethod* optimize_inlining(ciMethod* caller, int bci, ciInstanceKlass* klass,
842 ciMethod* callee, const TypeOopPtr* receiver_type);
843
844 // Report if there were too many traps at a current method and bci.
845 // Report if a trap was recorded, and/or PerMethodTrapLimit was exceeded.
846 // If there is no MDO at all, report no trap unless told to assume it.
847 bool too_many_traps(ciMethod* method, int bci, Deoptimization::DeoptReason reason);
848 // This version, unspecific to a particular bci, asks if
|
407 stringStream* _ss;
408
409 public:
410 PrintInliningBuffer()
411 : _cg(NULL) { _ss = new stringStream(); }
412
413 stringStream* ss() const { return _ss; }
414 CallGenerator* cg() const { return _cg; }
415 void set_cg(CallGenerator* cg) { _cg = cg; }
416 };
417
418 GrowableArray<PrintInliningBuffer>* _print_inlining_list;
419 int _print_inlining_idx;
420
421 // Only keep nodes in the expensive node list that need to be optimized
422 void cleanup_expensive_nodes(PhaseIterGVN &igvn);
423 // Use for sorting expensive nodes to bring similar nodes together
424 static int cmp_expensive_nodes(Node** n1, Node** n2);
425 // Expensive nodes list already sorted?
426 bool expensive_nodes_sorted() const;
427 // Remove the speculative part of types and clean up the graph
428 void remove_speculative_types(PhaseIterGVN &igvn);
429
430 // Are we within a PreserveJVMState block?
431 int _preserve_jvm_state;
432
433 public:
434
435 outputStream* print_inlining_stream() const {
436 return _print_inlining_list->adr_at(_print_inlining_idx)->ss();
437 }
438
439 void print_inlining_skip(CallGenerator* cg) {
440 if (_print_inlining) {
441 _print_inlining_list->adr_at(_print_inlining_idx)->set_cg(cg);
442 _print_inlining_idx++;
443 _print_inlining_list->insert_before(_print_inlining_idx, PrintInliningBuffer());
444 }
445 }
446
447 void print_inlining_insert(CallGenerator* cg) {
448 if (_print_inlining) {
809 _last_tf = tf;
810 }
811
812 AliasType* alias_type(int idx) { assert(idx < num_alias_types(), "oob"); return _alias_types[idx]; }
813 AliasType* alias_type(const TypePtr* adr_type, ciField* field = NULL) { return find_alias_type(adr_type, false, field); }
814 bool have_alias_type(const TypePtr* adr_type);
815 AliasType* alias_type(ciField* field);
816
817 int get_alias_index(const TypePtr* at) { return alias_type(at)->index(); }
818 const TypePtr* get_adr_type(uint aidx) { return alias_type(aidx)->adr_type(); }
819 int get_general_index(uint aidx) { return alias_type(aidx)->general_index(); }
820
821 // Building nodes
822 void rethrow_exceptions(JVMState* jvms);
823 void return_values(JVMState* jvms);
824 JVMState* build_start_state(StartNode* start, const TypeFunc* tf);
825
826 // Decide how to build a call.
827 // The profile factor is a discount to apply to this site's interp. profile.
828 CallGenerator* call_generator(ciMethod* call_method, int vtable_index, bool call_does_dispatch,
829 JVMState* jvms, bool allow_inline, float profile_factor, ciKlass* spec_rcvr_type = NULL,
830 bool allow_intrinsics = true, bool delayed_forbidden = false);
831 bool should_delay_inlining(ciMethod* call_method, JVMState* jvms) {
832 return should_delay_string_inlining(call_method, jvms) ||
833 should_delay_boxing_inlining(call_method, jvms);
834 }
835 bool should_delay_string_inlining(ciMethod* call_method, JVMState* jvms);
836 bool should_delay_boxing_inlining(ciMethod* call_method, JVMState* jvms);
837
838 // Helper functions to identify inlining potential at call-site
839 ciMethod* optimize_virtual_call(ciMethod* caller, int bci, ciInstanceKlass* klass,
840 ciMethod* callee, const TypeOopPtr* receiver_type,
841 bool is_virtual,
842 bool &call_does_dispatch, int &vtable_index);
843 ciMethod* optimize_inlining(ciMethod* caller, int bci, ciInstanceKlass* klass,
844 ciMethod* callee, const TypeOopPtr* receiver_type);
845
846 // Report if there were too many traps at a current method and bci.
847 // Report if a trap was recorded, and/or PerMethodTrapLimit was exceeded.
848 // If there is no MDO at all, report no trap unless told to assume it.
849 bool too_many_traps(ciMethod* method, int bci, Deoptimization::DeoptReason reason);
850 // This version, unspecific to a particular bci, asks if
|