< prev index next >

src/hotspot/share/c1/c1_GraphBuilder.cpp

Print this page
rev 59189 : imported patch hotspot


2064   // jsr/ret pairs which are not associated with finally clauses and
2065   // do not have exception handlers in the containing method, and are
2066   // therefore not caught early enough to abort the inlining without
2067   // corrupting the graph. (We currently bail out with a non-empty
2068   // stack at a ret in these situations.)
2069   CHECK_BAILOUT();
2070 
2071   // inlining not successful => standard invoke
2072   ValueType* result_type = as_ValueType(declared_signature->return_type());
2073   ValueStack* state_before = copy_state_exhandling();
2074 
2075   // The bytecode (code) might change in this method so we are checking this very late.
2076   const bool has_receiver =
2077     code == Bytecodes::_invokespecial   ||
2078     code == Bytecodes::_invokevirtual   ||
2079     code == Bytecodes::_invokeinterface;
2080   Values* args = state()->pop_arguments(target->arg_size_no_receiver() + patching_appendix_arg);
2081   Value recv = has_receiver ? apop() : NULL;
2082   int vtable_index = Method::invalid_vtable_index;
2083 
2084 #ifdef SPARC
2085   // Currently only supported on Sparc.
2086   // The UseInlineCaches only controls dispatch to invokevirtuals for
2087   // loaded classes which we weren't able to statically bind.
2088   if (!UseInlineCaches && target->is_loaded() && code == Bytecodes::_invokevirtual
2089       && !target->can_be_statically_bound()) {
2090     // Find a vtable index if one is available
2091     // For arrays, callee_holder is Object. Resolving the call with
2092     // Object would allow an illegal call to finalize() on an
2093     // array. We use holder instead: illegal calls to finalize() won't
2094     // be compiled as vtable calls (IC call resolution will catch the
2095     // illegal call) and the few legal calls on array types won't be
2096     // either.
2097     vtable_index = target->resolve_vtable_index(calling_klass, holder);
2098   }
2099 #endif
2100 
2101   // A null check is required here (when there is a receiver) for any of the following cases
2102   // - invokespecial, always need a null check.
2103   // - invokevirtual, when the target is final and loaded. Calls to final targets will become optimized
2104   //   and require null checking. If the target is loaded a null check is emitted here.
2105   //   If the target isn't loaded the null check must happen after the call resolution. We achieve that
2106   //   by using the target methods unverified entry point (see CompiledIC::compute_monomorphic_entry).
2107   //   (The JVM specification requires that LinkageError must be thrown before a NPE. An unloaded target may
2108   //   potentially fail, and can't have the null check before the resolution.)
2109   // - A call that will be profiled. (But we can't add a null check when the target is unloaded, by the same
2110   //   reason as above, so calls with a receiver to unloaded targets can't be profiled.)
2111   //
2112   // Normal invokevirtual will perform the null check during lookup
2113 
2114   bool need_null_check = (code == Bytecodes::_invokespecial) ||
2115       (target->is_loaded() && (target->is_final_method() || (is_profiling() && profile_calls())));
2116 
2117   if (need_null_check) {
2118     if (recv != NULL) {
2119       null_check(recv);
2120     }




2064   // jsr/ret pairs which are not associated with finally clauses and
2065   // do not have exception handlers in the containing method, and are
2066   // therefore not caught early enough to abort the inlining without
2067   // corrupting the graph. (We currently bail out with a non-empty
2068   // stack at a ret in these situations.)
2069   CHECK_BAILOUT();
2070 
2071   // inlining not successful => standard invoke
2072   ValueType* result_type = as_ValueType(declared_signature->return_type());
2073   ValueStack* state_before = copy_state_exhandling();
2074 
2075   // The bytecode (code) might change in this method so we are checking this very late.
2076   const bool has_receiver =
2077     code == Bytecodes::_invokespecial   ||
2078     code == Bytecodes::_invokevirtual   ||
2079     code == Bytecodes::_invokeinterface;
2080   Values* args = state()->pop_arguments(target->arg_size_no_receiver() + patching_appendix_arg);
2081   Value recv = has_receiver ? apop() : NULL;
2082   int vtable_index = Method::invalid_vtable_index;
2083 

















2084   // A null check is required here (when there is a receiver) for any of the following cases
2085   // - invokespecial, always need a null check.
2086   // - invokevirtual, when the target is final and loaded. Calls to final targets will become optimized
2087   //   and require null checking. If the target is loaded a null check is emitted here.
2088   //   If the target isn't loaded the null check must happen after the call resolution. We achieve that
2089   //   by using the target methods unverified entry point (see CompiledIC::compute_monomorphic_entry).
2090   //   (The JVM specification requires that LinkageError must be thrown before a NPE. An unloaded target may
2091   //   potentially fail, and can't have the null check before the resolution.)
2092   // - A call that will be profiled. (But we can't add a null check when the target is unloaded, by the same
2093   //   reason as above, so calls with a receiver to unloaded targets can't be profiled.)
2094   //
2095   // Normal invokevirtual will perform the null check during lookup
2096 
2097   bool need_null_check = (code == Bytecodes::_invokespecial) ||
2098       (target->is_loaded() && (target->is_final_method() || (is_profiling() && profile_calls())));
2099 
2100   if (need_null_check) {
2101     if (recv != NULL) {
2102       null_check(recv);
2103     }


< prev index next >