< prev index next >

src/hotspot/share/c1/c1_GraphBuilder.cpp

Print this page
rev 59383 : [mq]: final


2065   // jsr/ret pairs which are not associated with finally clauses and
2066   // do not have exception handlers in the containing method, and are
2067   // therefore not caught early enough to abort the inlining without
2068   // corrupting the graph. (We currently bail out with a non-empty
2069   // stack at a ret in these situations.)
2070   CHECK_BAILOUT();
2071 
2072   // inlining not successful => standard invoke
2073   ValueType* result_type = as_ValueType(declared_signature->return_type());
2074   ValueStack* state_before = copy_state_exhandling();
2075 
2076   // The bytecode (code) might change in this method so we are checking this very late.
2077   const bool has_receiver =
2078     code == Bytecodes::_invokespecial   ||
2079     code == Bytecodes::_invokevirtual   ||
2080     code == Bytecodes::_invokeinterface;
2081   Values* args = state()->pop_arguments(target->arg_size_no_receiver() + patching_appendix_arg);
2082   Value recv = has_receiver ? apop() : NULL;
2083   int vtable_index = Method::invalid_vtable_index;
2084 
2085 #ifdef SPARC
2086   // Currently only supported on Sparc.
2087   // The UseInlineCaches only controls dispatch to invokevirtuals for
2088   // loaded classes which we weren't able to statically bind.
2089   if (!UseInlineCaches && target->is_loaded() && code == Bytecodes::_invokevirtual
2090       && !target->can_be_statically_bound()) {
2091     // Find a vtable index if one is available
2092     // For arrays, callee_holder is Object. Resolving the call with
2093     // Object would allow an illegal call to finalize() on an
2094     // array. We use holder instead: illegal calls to finalize() won't
2095     // be compiled as vtable calls (IC call resolution will catch the
2096     // illegal call) and the few legal calls on array types won't be
2097     // either.
2098     vtable_index = target->resolve_vtable_index(calling_klass, holder);
2099   }
2100 #endif
2101 
2102   // A null check is required here (when there is a receiver) for any of the following cases
2103   // - invokespecial, always need a null check.
2104   // - invokevirtual, when the target is final and loaded. Calls to final targets will become optimized
2105   //   and require null checking. If the target is loaded a null check is emitted here.
2106   //   If the target isn't loaded the null check must happen after the call resolution. We achieve that
2107   //   by using the target methods unverified entry point (see CompiledIC::compute_monomorphic_entry).
2108   //   (The JVM specification requires that LinkageError must be thrown before a NPE. An unloaded target may
2109   //   potentially fail, and can't have the null check before the resolution.)
2110   // - A call that will be profiled. (But we can't add a null check when the target is unloaded, by the same
2111   //   reason as above, so calls with a receiver to unloaded targets can't be profiled.)
2112   //
2113   // Normal invokevirtual will perform the null check during lookup
2114 
2115   bool need_null_check = (code == Bytecodes::_invokespecial) ||
2116       (target->is_loaded() && (target->is_final_method() || (is_profiling() && profile_calls())));
2117 
2118   if (need_null_check) {
2119     if (recv != NULL) {
2120       null_check(recv);
2121     }




2065   // jsr/ret pairs which are not associated with finally clauses and
2066   // do not have exception handlers in the containing method, and are
2067   // therefore not caught early enough to abort the inlining without
2068   // corrupting the graph. (We currently bail out with a non-empty
2069   // stack at a ret in these situations.)
2070   CHECK_BAILOUT();
2071 
2072   // inlining not successful => standard invoke
2073   ValueType* result_type = as_ValueType(declared_signature->return_type());
2074   ValueStack* state_before = copy_state_exhandling();
2075 
2076   // The bytecode (code) might change in this method so we are checking this very late.
2077   const bool has_receiver =
2078     code == Bytecodes::_invokespecial   ||
2079     code == Bytecodes::_invokevirtual   ||
2080     code == Bytecodes::_invokeinterface;
2081   Values* args = state()->pop_arguments(target->arg_size_no_receiver() + patching_appendix_arg);
2082   Value recv = has_receiver ? apop() : NULL;
2083   int vtable_index = Method::invalid_vtable_index;
2084 

















2085   // A null check is required here (when there is a receiver) for any of the following cases
2086   // - invokespecial, always need a null check.
2087   // - invokevirtual, when the target is final and loaded. Calls to final targets will become optimized
2088   //   and require null checking. If the target is loaded a null check is emitted here.
2089   //   If the target isn't loaded the null check must happen after the call resolution. We achieve that
2090   //   by using the target methods unverified entry point (see CompiledIC::compute_monomorphic_entry).
2091   //   (The JVM specification requires that LinkageError must be thrown before a NPE. An unloaded target may
2092   //   potentially fail, and can't have the null check before the resolution.)
2093   // - A call that will be profiled. (But we can't add a null check when the target is unloaded, by the same
2094   //   reason as above, so calls with a receiver to unloaded targets can't be profiled.)
2095   //
2096   // Normal invokevirtual will perform the null check during lookup
2097 
2098   bool need_null_check = (code == Bytecodes::_invokespecial) ||
2099       (target->is_loaded() && (target->is_final_method() || (is_profiling() && profile_calls())));
2100 
2101   if (need_null_check) {
2102     if (recv != NULL) {
2103       null_check(recv);
2104     }


< prev index next >