src/share/vm/opto/bytecodeInfo.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File
*** old/src/share/vm/opto/bytecodeInfo.cpp	Thu Oct  3 15:09:27 2013
--- new/src/share/vm/opto/bytecodeInfo.cpp	Thu Oct  3 15:09:27 2013

*** 195,204 **** --- 195,205 ---- // negative filter: should callee NOT be inlined? bool InlineTree::should_not_inline(ciMethod *callee_method, ciMethod* caller_method, + JVMState* jvms, WarmCallInfo* wci_result) { const char* fail_msg = NULL; // First check all inlining restrictions which are required for correctness
*** 224,234 **** --- 225,235 ---- } // don't inline exception code unless the top method belongs to an // exception class if (callee_method->holder()->is_subclass_of(C->env()->Throwable_klass())) { ! ciMethod* top_method = caller_jvms() ? caller_jvms()->of_depth(1)->method() : method(); ! ciMethod* top_method = jvms->caller() != NULL ? jvms->caller()->of_depth(1)->method() : method(); if (!top_method->holder()->is_subclass_of(C->env()->Throwable_klass())) { wci_result->set_profit(wci_result->profit() * 0.1); } }
*** 326,336 **** --- 327,337 ---- //-----------------------------try_to_inline----------------------------------- // return true if ok // Relocated from "InliningClosure::try_to_inline" bool InlineTree::try_to_inline(ciMethod* callee_method, ciMethod* caller_method, ! int caller_bci, JVMState* jvms, ciCallProfile& profile, WarmCallInfo* wci_result, bool& should_delay) { // Old algorithm had funny accumulating BC-size counters if (UseOldInlining && ClipInlining && (int)count_inline_bcs() >= DesiredMethodLimit) {
*** 344,354 **** --- 345,355 ---- if (!should_inline(callee_method, caller_method, caller_bci, profile, wci_result)) { return false; } ! if (should_not_inline(callee_method, caller_method, jvms, wci_result)) { return false; } if (InlineAccessors && callee_method->is_accessor()) { // accessor methods are not subject to any of the following limits.
*** 395,422 **** --- 396,433 ---- should_delay = true; } } // detect direct and indirect recursive inlining - if (!callee_method->is_compiled_lambda_form()) { // count the current method and the callee - int inline_level = (method() == callee_method) ? 1 : 0; ! if (inline_level > MaxRecursiveInlineLevel) { set_msg("recursively inlining too deep"); ! return false; ! if (!callee_method->is_compiled_lambda_form()) { + if (method() == callee_method) { ! inline_level++; + } } // count callers of current method and callee JVMState* jvms = caller_jvms(); ! while (jvms != NULL && jvms->has_method()) { ! if (jvms->method() == callee_method) { + for (JVMState* j = jvms->caller(); j != NULL && j->has_method(); j = j->caller()) { ! if (j->method() == callee_method) { ! if (callee_method->is_compiled_lambda_form()) { + // Since compiled lambda forms are heavily reused we allow recursive inlining. If it is truly + // a recursion (using the same "receiver") we limit inlining otherwise we can easily blow the + // compiler stack. + Node* callee_argument0 = jvms->map()->argument(jvms, 0)->uncast(); + Node* caller_argument0 = j->map()->argument(j, 0)->uncast(); + if (caller_argument0 == callee_argument0) { + inline_level++; + } + } else { inline_level++; if (inline_level > MaxRecursiveInlineLevel) { set_msg("recursively inlining too deep"); return false; } } jvms = jvms->caller(); + } + if (inline_level > MaxRecursiveInlineLevel) { + set_msg("recursive inlining is too deep"); + return false; } } int size = callee_method->code_size_for_inlining();
*** 534,544 **** --- 545,555 ---- } // Check if inlining policy says no. WarmCallInfo wci = *(initial_wci); bool success = try_to_inline(callee_method, caller_method, caller_bci, ! jvms, profile, &wci, should_delay); #ifndef PRODUCT if (UseOldInlining && InlineWarmCalls && (PrintOpto || C->print_inlining())) { bool cold = wci.is_cold();

src/share/vm/opto/bytecodeInfo.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File