84 int size = Method::size(access_flags.is_native());
85 return new (loader_data, size, MetaspaceObj::MethodType, THREAD) Method(cm, access_flags);
86 }
87
88 Method::Method(ConstMethod* xconst, AccessFlags access_flags) {
89 NoSafepointVerifier no_safepoint;
90 set_constMethod(xconst);
91 set_access_flags(access_flags);
92 set_intrinsic_id(vmIntrinsics::_none);
93 set_force_inline(false);
94 set_hidden(false);
95 set_dont_inline(false);
96 set_has_injected_profile(false);
97 set_method_data(NULL);
98 clear_method_counters();
99 set_vtable_index(Method::garbage_vtable_index);
100
101 // Fix and bury in Method*
102 set_interpreter_entry(NULL); // sets i2i entry and from_int
103 set_adapter_entry(NULL);
104 clear_code(false /* don't need a lock */); // from_c/from_i get set to c2i/i2i
105
106 if (access_flags.is_native()) {
107 clear_native_function();
108 set_signature_handler(NULL);
109 }
110
111 NOT_PRODUCT(set_compiled_invocation_count(0);)
112 }
113
114 // Release Method*. The nmethod will be gone when we get here because
115 // we've walked the code cache.
116 void Method::deallocate_contents(ClassLoaderData* loader_data) {
117 MetadataFactory::free_metadata(loader_data, constMethod());
118 set_constMethod(NULL);
119 #if INCLUDE_JVMCI
120 if (method_data()) {
121 FailedSpeculation::free_failed_speculations(method_data()->get_failed_speculations_address());
122 }
123 #endif
124 MetadataFactory::free_metadata(loader_data, method_data());
792 CompiledMethod* nm = code(); // Put it into local variable to guard against concurrent updates
793 if (nm != NULL) {
794 nm->make_not_entrant();
795 }
796 }
797
798
799 bool Method::has_native_function() const {
800 if (is_method_handle_intrinsic())
801 return false; // special-cased in SharedRuntime::generate_native_wrapper
802 address func = native_function();
803 return (func != NULL && func != SharedRuntime::native_method_throw_unsatisfied_link_error_entry());
804 }
805
806
807 void Method::clear_native_function() {
808 // Note: is_method_handle_intrinsic() is allowed here.
809 set_native_function(
810 SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
811 !native_bind_event_is_interesting);
812 clear_code();
813 }
814
815 address Method::critical_native_function() {
816 methodHandle mh(this);
817 return NativeLookup::lookup_critical_entry(mh);
818 }
819
820
821 void Method::set_signature_handler(address handler) {
822 address* signature_handler = signature_handler_addr();
823 *signature_handler = handler;
824 }
825
826
827 void Method::print_made_not_compilable(int comp_level, bool is_osr, bool report, const char* reason) {
828 if (PrintCompilation && report) {
829 ttyLocker ttyl;
830 tty->print("made not %scompilable on ", is_osr ? "OSR " : "");
831 if (comp_level == CompLevel_all) {
832 tty->print("all levels ");
915 return is_not_c2_osr_compilable();
916 return false;
917 }
918
919 void Method::set_not_osr_compilable(int comp_level, bool report, const char* reason) {
920 print_made_not_compilable(comp_level, /*is_osr*/ true, report, reason);
921 if (comp_level == CompLevel_all) {
922 set_not_c1_osr_compilable();
923 set_not_c2_osr_compilable();
924 } else {
925 if (is_c1_compile(comp_level))
926 set_not_c1_osr_compilable();
927 if (is_c2_compile(comp_level))
928 set_not_c2_osr_compilable();
929 }
930 CompilationPolicy::policy()->disable_compilation(this);
931 assert(!CompilationPolicy::can_be_osr_compiled(this, comp_level), "sanity check");
932 }
933
934 // Revert to using the interpreter and clear out the nmethod
935 void Method::clear_code(bool acquire_lock /* = true */) {
936 MutexLocker pl(acquire_lock ? Patching_lock : NULL, Mutex::_no_safepoint_check_flag);
937 // this may be NULL if c2i adapters have not been made yet
938 // Only should happen at allocate time.
939 if (adapter() == NULL) {
940 _from_compiled_entry = NULL;
941 } else {
942 _from_compiled_entry = adapter()->get_c2i_entry();
943 }
944 OrderAccess::storestore();
945 _from_interpreted_entry = _i2i_entry;
946 OrderAccess::storestore();
947 _code = NULL;
948 }
949
950 #if INCLUDE_CDS
951 // Called by class data sharing to remove any entry points (which are not shared)
952 void Method::unlink_method() {
953 _code = NULL;
954
955 assert(DumpSharedSpaces, "dump time only");
956 // Set the values to what they should be at run time. Note that
957 // this Method can no longer be executed during dump time.
958 _i2i_entry = Interpreter::entry_for_cds_method(this);
959 _from_interpreted_entry = _i2i_entry;
960
961 if (is_native()) {
962 *native_function_addr() = NULL;
963 set_signature_handler(NULL);
964 }
965 NOT_PRODUCT(set_compiled_invocation_count(0);)
966
967 CDSAdapterHandlerEntry* cds_adapter = (CDSAdapterHandlerEntry*)adapter();
968 constMethod()->set_adapter_trampoline(cds_adapter->get_adapter_trampoline());
969 _from_compiled_entry = cds_adapter->get_c2i_entry_trampoline();
1149 // This function is called after potential safepoints so that nmethod
1150 // or adapter that it points to is still live and valid.
1151 // This function must not hit a safepoint!
1152 address Method::verified_code_entry() {
1153 debug_only(NoSafepointVerifier nsv;)
1154 assert(_from_compiled_entry != NULL, "must be set");
1155 return _from_compiled_entry;
1156 }
1157
1158 // Check that if an nmethod ref exists, it has a backlink to this or no backlink at all
1159 // (could be racing a deopt).
1160 // Not inline to avoid circular ref.
1161 bool Method::check_code() const {
1162 // cached in a register or local. There's a race on the value of the field.
1163 CompiledMethod *code = OrderAccess::load_acquire(&_code);
1164 return code == NULL || (code->method() == NULL) || (code->method() == (Method*)this && !code->is_osr_method());
1165 }
1166
1167 // Install compiled code. Instantly it can execute.
1168 void Method::set_code(const methodHandle& mh, CompiledMethod *code) {
1169 MutexLocker pl(Patching_lock, Mutex::_no_safepoint_check_flag);
1170 assert( code, "use clear_code to remove code" );
1171 assert( mh->check_code(), "" );
1172
1173 guarantee(mh->adapter() != NULL, "Adapter blob must already exist!");
1174
1175 // These writes must happen in this order, because the interpreter will
1176 // directly jump to from_interpreted_entry which jumps to an i2c adapter
1177 // which jumps to _from_compiled_entry.
1178 mh->_code = code; // Assign before allowing compiled code to exec
1179
1180 int comp_level = code->comp_level();
1181 // In theory there could be a race here. In practice it is unlikely
1182 // and not worth worrying about.
1183 if (comp_level > mh->highest_comp_level()) {
1184 mh->set_highest_comp_level(comp_level);
1185 }
1186
1187 OrderAccess::storestore();
1188 mh->_from_compiled_entry = code->verified_entry_point();
1189 OrderAccess::storestore();
|
84 int size = Method::size(access_flags.is_native());
85 return new (loader_data, size, MetaspaceObj::MethodType, THREAD) Method(cm, access_flags);
86 }
87
88 Method::Method(ConstMethod* xconst, AccessFlags access_flags) {
89 NoSafepointVerifier no_safepoint;
90 set_constMethod(xconst);
91 set_access_flags(access_flags);
92 set_intrinsic_id(vmIntrinsics::_none);
93 set_force_inline(false);
94 set_hidden(false);
95 set_dont_inline(false);
96 set_has_injected_profile(false);
97 set_method_data(NULL);
98 clear_method_counters();
99 set_vtable_index(Method::garbage_vtable_index);
100
101 // Fix and bury in Method*
102 set_interpreter_entry(NULL); // sets i2i entry and from_int
103 set_adapter_entry(NULL);
104 Method::clear_code(); // from_c/from_i get set to c2i/i2i
105
106 if (access_flags.is_native()) {
107 clear_native_function();
108 set_signature_handler(NULL);
109 }
110
111 NOT_PRODUCT(set_compiled_invocation_count(0);)
112 }
113
114 // Release Method*. The nmethod will be gone when we get here because
115 // we've walked the code cache.
116 void Method::deallocate_contents(ClassLoaderData* loader_data) {
117 MetadataFactory::free_metadata(loader_data, constMethod());
118 set_constMethod(NULL);
119 #if INCLUDE_JVMCI
120 if (method_data()) {
121 FailedSpeculation::free_failed_speculations(method_data()->get_failed_speculations_address());
122 }
123 #endif
124 MetadataFactory::free_metadata(loader_data, method_data());
792 CompiledMethod* nm = code(); // Put it into local variable to guard against concurrent updates
793 if (nm != NULL) {
794 nm->make_not_entrant();
795 }
796 }
797
798
799 bool Method::has_native_function() const {
800 if (is_method_handle_intrinsic())
801 return false; // special-cased in SharedRuntime::generate_native_wrapper
802 address func = native_function();
803 return (func != NULL && func != SharedRuntime::native_method_throw_unsatisfied_link_error_entry());
804 }
805
806
807 void Method::clear_native_function() {
808 // Note: is_method_handle_intrinsic() is allowed here.
809 set_native_function(
810 SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
811 !native_bind_event_is_interesting);
812 Method::unlink_code(this);
813 }
814
815 address Method::critical_native_function() {
816 methodHandle mh(this);
817 return NativeLookup::lookup_critical_entry(mh);
818 }
819
820
821 void Method::set_signature_handler(address handler) {
822 address* signature_handler = signature_handler_addr();
823 *signature_handler = handler;
824 }
825
826
827 void Method::print_made_not_compilable(int comp_level, bool is_osr, bool report, const char* reason) {
828 if (PrintCompilation && report) {
829 ttyLocker ttyl;
830 tty->print("made not %scompilable on ", is_osr ? "OSR " : "");
831 if (comp_level == CompLevel_all) {
832 tty->print("all levels ");
915 return is_not_c2_osr_compilable();
916 return false;
917 }
918
919 void Method::set_not_osr_compilable(int comp_level, bool report, const char* reason) {
920 print_made_not_compilable(comp_level, /*is_osr*/ true, report, reason);
921 if (comp_level == CompLevel_all) {
922 set_not_c1_osr_compilable();
923 set_not_c2_osr_compilable();
924 } else {
925 if (is_c1_compile(comp_level))
926 set_not_c1_osr_compilable();
927 if (is_c2_compile(comp_level))
928 set_not_c2_osr_compilable();
929 }
930 CompilationPolicy::policy()->disable_compilation(this);
931 assert(!CompilationPolicy::can_be_osr_compiled(this, comp_level), "sanity check");
932 }
933
934 // Revert to using the interpreter and clear out the nmethod
935 void Method::clear_code() {
936 // this may be NULL if c2i adapters have not been made yet
937 // Only should happen at allocate time.
938 if (adapter() == NULL) {
939 _from_compiled_entry = NULL;
940 } else {
941 _from_compiled_entry = adapter()->get_c2i_entry();
942 }
943 OrderAccess::storestore();
944 _from_interpreted_entry = _i2i_entry;
945 OrderAccess::storestore();
946 _code = NULL;
947 }
948
949 void Method::unlink_code(Method *method, CompiledMethod *compare) {
950 if (method == NULL) {
951 return;
952 }
953 MutexLocker ml(CompiledMethod_lock->owned_by_self() ? NULL : CompiledMethod_lock, Mutex::_no_safepoint_check_flag);
954 if (method->code() == compare ||
955 method->from_compiled_entry() == compare->verified_entry_point()) {
956 method->clear_code();
957 }
958 }
959
960 void Method::unlink_code(Method *method) {
961 if (method != NULL) {
962 MutexLocker ml(CompiledMethod_lock->owned_by_self() ? NULL : CompiledMethod_lock, Mutex::_no_safepoint_check_flag);
963 method->clear_code();
964 }
965 }
966
967 #if INCLUDE_CDS
968 // Called by class data sharing to remove any entry points (which are not shared)
969 void Method::unlink_method() {
970 _code = NULL;
971
972 assert(DumpSharedSpaces, "dump time only");
973 // Set the values to what they should be at run time. Note that
974 // this Method can no longer be executed during dump time.
975 _i2i_entry = Interpreter::entry_for_cds_method(this);
976 _from_interpreted_entry = _i2i_entry;
977
978 if (is_native()) {
979 *native_function_addr() = NULL;
980 set_signature_handler(NULL);
981 }
982 NOT_PRODUCT(set_compiled_invocation_count(0);)
983
984 CDSAdapterHandlerEntry* cds_adapter = (CDSAdapterHandlerEntry*)adapter();
985 constMethod()->set_adapter_trampoline(cds_adapter->get_adapter_trampoline());
986 _from_compiled_entry = cds_adapter->get_c2i_entry_trampoline();
1166 // This function is called after potential safepoints so that nmethod
1167 // or adapter that it points to is still live and valid.
1168 // This function must not hit a safepoint!
1169 address Method::verified_code_entry() {
1170 debug_only(NoSafepointVerifier nsv;)
1171 assert(_from_compiled_entry != NULL, "must be set");
1172 return _from_compiled_entry;
1173 }
1174
1175 // Check that if an nmethod ref exists, it has a backlink to this or no backlink at all
1176 // (could be racing a deopt).
1177 // Not inline to avoid circular ref.
1178 bool Method::check_code() const {
1179 // cached in a register or local. There's a race on the value of the field.
1180 CompiledMethod *code = OrderAccess::load_acquire(&_code);
1181 return code == NULL || (code->method() == NULL) || (code->method() == (Method*)this && !code->is_osr_method());
1182 }
1183
1184 // Install compiled code. Instantly it can execute.
1185 void Method::set_code(const methodHandle& mh, CompiledMethod *code) {
1186 MutexLocker pl(CompiledMethod_lock, Mutex::_no_safepoint_check_flag);
1187 assert( code, "use clear_code to remove code" );
1188 assert( mh->check_code(), "" );
1189
1190 guarantee(mh->adapter() != NULL, "Adapter blob must already exist!");
1191
1192 // These writes must happen in this order, because the interpreter will
1193 // directly jump to from_interpreted_entry which jumps to an i2c adapter
1194 // which jumps to _from_compiled_entry.
1195 mh->_code = code; // Assign before allowing compiled code to exec
1196
1197 int comp_level = code->comp_level();
1198 // In theory there could be a race here. In practice it is unlikely
1199 // and not worth worrying about.
1200 if (comp_level > mh->highest_comp_level()) {
1201 mh->set_highest_comp_level(comp_level);
1202 }
1203
1204 OrderAccess::storestore();
1205 mh->_from_compiled_entry = code->verified_entry_point();
1206 OrderAccess::storestore();
|