527 return; // return the exception (which is cleared)
528 }
529
530 method->set_method_data(method_data);
531 if (PrintMethodData && (Verbose || WizardMode)) {
532 ResourceMark rm(THREAD);
533 tty->print("build_interpreter_method_data for ");
534 method->print_name(tty);
535 tty->cr();
536 // At the end of the run, the MDO, full of data, will be dumped.
537 }
538 }
539 }
540
541 MethodCounters* Method::build_method_counters(Method* m, TRAPS) {
542 // Do not profile the method if metaspace has hit an OOM previously
543 if (ClassLoaderDataGraph::has_metaspace_oom()) {
544 return NULL;
545 }
546
547 methodHandle mh(m);
548 MethodCounters* counters = MethodCounters::allocate(mh, THREAD);
549 if (HAS_PENDING_EXCEPTION) {
550 CompileBroker::log_metaspace_failure();
551 ClassLoaderDataGraph::set_metaspace_oom(true);
552 return NULL; // return the exception (which is cleared)
553 }
554 if (!mh->init_method_counters(counters)) {
555 MetadataFactory::free_metadata(mh->method_holder()->class_loader_data(), counters);
556 }
557
558 if (LogTouchedMethods) {
559 mh->log_touched(CHECK_NULL);
560 }
561
562 return mh->method_counters();
563 }
564
565 bool Method::init_method_counters(MethodCounters* counters) {
566 // Try to install a pointer to MethodCounters, return true on success.
567 return Atomic::replace_if_null(counters, &_method_counters);
609 int size = code_size();
610 // Check if size match
611 if (size == 0 || size % 5 != 0) return false;
612 address cb = code_base();
613 int last = size - 1;
614 if (cb[0] != Bytecodes::_aload_0 || cb[1] != Bytecodes::_invokespecial || cb[last] != Bytecodes::_return) {
615 // Does not call superclass default constructor
616 return false;
617 }
618 // Check optional sequence
619 for (int i = 4; i < last; i += 5) {
620 if (cb[i] != Bytecodes::_aload_0) return false;
621 if (!Bytecodes::is_zero_const(Bytecodes::cast(cb[i+1]))) return false;
622 if (cb[i+2] != Bytecodes::_putfield) return false;
623 }
624 return true;
625 }
626
627
628 bool Method::compute_has_loops_flag() {
629 BytecodeStream bcs(this);
630 Bytecodes::Code bc;
631
632 while ((bc = bcs.next()) >= 0) {
633 switch( bc ) {
634 case Bytecodes::_ifeq:
635 case Bytecodes::_ifnull:
636 case Bytecodes::_iflt:
637 case Bytecodes::_ifle:
638 case Bytecodes::_ifne:
639 case Bytecodes::_ifnonnull:
640 case Bytecodes::_ifgt:
641 case Bytecodes::_ifge:
642 case Bytecodes::_if_icmpeq:
643 case Bytecodes::_if_icmpne:
644 case Bytecodes::_if_icmplt:
645 case Bytecodes::_if_icmpgt:
646 case Bytecodes::_if_icmple:
647 case Bytecodes::_if_icmpge:
648 case Bytecodes::_if_acmpeq:
649 case Bytecodes::_if_acmpne:
967 return false;
968 }
969
970 // call this when compiler finds that this method is not compilable
971 void Method::set_not_compilable(const char* reason, int comp_level, bool report) {
972 if (is_always_compilable()) {
973 // Don't mark a method which should be always compilable
974 return;
975 }
976 print_made_not_compilable(comp_level, /*is_osr*/ false, report, reason);
977 if (comp_level == CompLevel_all) {
978 set_not_c1_compilable();
979 set_not_c2_compilable();
980 } else {
981 if (is_c1_compile(comp_level))
982 set_not_c1_compilable();
983 if (is_c2_compile(comp_level))
984 set_not_c2_compilable();
985 }
986 CompilationPolicy::policy()->disable_compilation(this);
987 assert(!CompilationPolicy::can_be_compiled(this, comp_level), "sanity check");
988 }
989
990 bool Method::is_not_osr_compilable(int comp_level) const {
991 if (is_not_compilable(comp_level))
992 return true;
993 if (comp_level == CompLevel_any)
994 return is_not_c1_osr_compilable() || is_not_c2_osr_compilable();
995 if (is_c1_compile(comp_level))
996 return is_not_c1_osr_compilable();
997 if (is_c2_compile(comp_level))
998 return is_not_c2_osr_compilable();
999 return false;
1000 }
1001
1002 void Method::set_not_osr_compilable(const char* reason, int comp_level, bool report) {
1003 print_made_not_compilable(comp_level, /*is_osr*/ true, report, reason);
1004 if (comp_level == CompLevel_all) {
1005 set_not_c1_osr_compilable();
1006 set_not_c2_osr_compilable();
1007 } else {
1008 if (is_c1_compile(comp_level))
1009 set_not_c1_osr_compilable();
1010 if (is_c2_compile(comp_level))
1011 set_not_c2_osr_compilable();
1012 }
1013 CompilationPolicy::policy()->disable_compilation(this);
1014 assert(!CompilationPolicy::can_be_osr_compiled(this, comp_level), "sanity check");
1015 }
1016
1017 // Revert to using the interpreter and clear out the nmethod
1018 void Method::clear_code() {
1019 // this may be NULL if c2i adapters have not been made yet
1020 // Only should happen at allocate time.
1021 if (adapter() == NULL) {
1022 _from_compiled_entry = NULL;
1023 } else {
1024 _from_compiled_entry = adapter()->get_c2i_entry();
1025 }
1026 OrderAccess::storestore();
1027 _from_interpreted_entry = _i2i_entry;
1028 OrderAccess::storestore();
1029 _code = NULL;
1030 }
1031
1032 void Method::unlink_code(CompiledMethod *compare) {
1033 MutexLocker ml(CompiledMethod_lock->owned_by_self() ? NULL : CompiledMethod_lock, Mutex::_no_safepoint_check_flag);
1034 // We need to check if either the _code or _from_compiled_code_entry_point
1039 // through it.
1040 if (code() == compare ||
1041 from_compiled_entry() == compare->verified_entry_point()) {
1042 clear_code();
1043 }
1044 }
1045
1046 void Method::unlink_code() {
1047 MutexLocker ml(CompiledMethod_lock->owned_by_self() ? NULL : CompiledMethod_lock, Mutex::_no_safepoint_check_flag);
1048 clear_code();
1049 }
1050
1051 #if INCLUDE_CDS
1052 // Called by class data sharing to remove any entry points (which are not shared)
1053 void Method::unlink_method() {
1054 _code = NULL;
1055
1056 Arguments::assert_is_dumping_archive();
1057 // Set the values to what they should be at run time. Note that
1058 // this Method can no longer be executed during dump time.
1059 _i2i_entry = Interpreter::entry_for_cds_method(this);
1060 _from_interpreted_entry = _i2i_entry;
1061
1062 if (DynamicDumpSharedSpaces) {
1063 assert(_from_compiled_entry != NULL, "sanity");
1064 } else {
1065 // TODO: Simplify the adapter trampoline allocation for static archiving.
1066 // Remove the use of CDSAdapterHandlerEntry.
1067 CDSAdapterHandlerEntry* cds_adapter = (CDSAdapterHandlerEntry*)adapter();
1068 constMethod()->set_adapter_trampoline(cds_adapter->get_adapter_trampoline());
1069 _from_compiled_entry = cds_adapter->get_c2i_entry_trampoline();
1070 assert(*((int*)_from_compiled_entry) == 0,
1071 "must be NULL during dump time, to be initialized at run time");
1072 }
1073
1074 if (is_native()) {
1075 *native_function_addr() = NULL;
1076 set_signature_handler(NULL);
1077 }
1078 NOT_PRODUCT(set_compiled_invocation_count(0);)
1079
1551 memcpy(newm->checked_exceptions_start(),
1552 m->checked_exceptions_start(),
1553 checked_exceptions_len * sizeof(CheckedExceptionElement));
1554 }
1555 // Copy exception table
1556 if (exception_table_len > 0) {
1557 memcpy(newm->exception_table_start(),
1558 m->exception_table_start(),
1559 exception_table_len * sizeof(ExceptionTableElement));
1560 }
1561 // Copy local variable number table
1562 if (localvariable_len > 0) {
1563 memcpy(newm->localvariable_table_start(),
1564 m->localvariable_table_start(),
1565 localvariable_len * sizeof(LocalVariableTableElement));
1566 }
1567 // Copy stackmap table
1568 if (m->has_stackmap_table()) {
1569 int code_attribute_length = m->stackmap_data()->length();
1570 Array<u1>* stackmap_data =
1571 MetadataFactory::new_array<u1>(loader_data, code_attribute_length, 0, CHECK_NULL);
1572 memcpy((void*)stackmap_data->adr_at(0),
1573 (void*)m->stackmap_data()->adr_at(0), code_attribute_length);
1574 newm->set_stackmap_data(stackmap_data);
1575 }
1576
1577 // copy annotations over to new method
1578 newcm->copy_annotations_from(loader_data, cm, CHECK_NULL);
1579 return newm;
1580 }
1581
1582 vmSymbols::SID Method::klass_id_for_intrinsics(const Klass* holder) {
1583 // if loader is not the default loader (i.e., != NULL), we can't know the intrinsics
1584 // because we are not loading from core libraries
1585 // exception: the AES intrinsics come from lib/ext/sunjce_provider.jar
1586 // which does not use the class default class loader so we check for its loader here
1587 const InstanceKlass* ik = InstanceKlass::cast(holder);
1588 if ((ik->class_loader() != NULL) && !SystemDictionary::is_platform_class_loader(ik->class_loader())) {
1589 return vmSymbols::NO_SID; // regardless of name, no intrinsics here
1590 }
1591
1592 // see if the klass name is well-known:
1593 Symbol* klass_name = ik->name();
1594 return vmSymbols::find_sid(klass_name);
1595 }
1596
1597 void Method::init_intrinsic_id() {
1598 assert(_intrinsic_id == vmIntrinsics::_none, "do this just once");
2197 ClassLoaderData* cld = loader_data;
2198
2199 if (!SafepointSynchronize::is_at_safepoint()) {
2200 // Have to add jmethod_ids() to class loader data thread-safely.
2201 // Also have to add the method to the list safely, which the cld lock
2202 // protects as well.
2203 MutexLocker ml(cld->metaspace_lock(), Mutex::_no_safepoint_check_flag);
2204 if (cld->jmethod_ids() == NULL) {
2205 cld->set_jmethod_ids(new JNIMethodBlock());
2206 }
2207 // jmethodID is a pointer to Method*
2208 return (jmethodID)cld->jmethod_ids()->add_method(m);
2209 } else {
2210 // At safepoint, we are single threaded and can set this.
2211 if (cld->jmethod_ids() == NULL) {
2212 cld->set_jmethod_ids(new JNIMethodBlock());
2213 }
2214 // jmethodID is a pointer to Method*
2215 return (jmethodID)cld->jmethod_ids()->add_method(m);
2216 }
2217 }
2218
2219 // Mark a jmethodID as free. This is called when there is a data race in
2220 // InstanceKlass while creating the jmethodID cache.
2221 void Method::destroy_jmethod_id(ClassLoaderData* loader_data, jmethodID m) {
2222 ClassLoaderData* cld = loader_data;
2223 Method** ptr = (Method**)m;
2224 assert(cld->jmethod_ids() != NULL, "should have method handles");
2225 cld->jmethod_ids()->destroy_method(ptr);
2226 }
2227
2228 void Method::change_method_associated_with_jmethod_id(jmethodID jmid, Method* new_method) {
2229 // Can't assert the method_holder is the same because the new method has the
2230 // scratch method holder.
2231 assert(resolve_jmethod_id(jmid)->method_holder()->class_loader()
2232 == new_method->method_holder()->class_loader() ||
2233 new_method->method_holder()->class_loader() == NULL, // allow Unsafe substitution
2234 "changing to a different class loader");
2235 // Just change the method in place, jmethodID pointer doesn't change.
2236 *((Method**)jmid) = new_method;
|
527 return; // return the exception (which is cleared)
528 }
529
530 method->set_method_data(method_data);
531 if (PrintMethodData && (Verbose || WizardMode)) {
532 ResourceMark rm(THREAD);
533 tty->print("build_interpreter_method_data for ");
534 method->print_name(tty);
535 tty->cr();
536 // At the end of the run, the MDO, full of data, will be dumped.
537 }
538 }
539 }
540
541 MethodCounters* Method::build_method_counters(Method* m, TRAPS) {
542 // Do not profile the method if metaspace has hit an OOM previously
543 if (ClassLoaderDataGraph::has_metaspace_oom()) {
544 return NULL;
545 }
546
547 methodHandle mh(THREAD, m);
548 MethodCounters* counters = MethodCounters::allocate(mh, THREAD);
549 if (HAS_PENDING_EXCEPTION) {
550 CompileBroker::log_metaspace_failure();
551 ClassLoaderDataGraph::set_metaspace_oom(true);
552 return NULL; // return the exception (which is cleared)
553 }
554 if (!mh->init_method_counters(counters)) {
555 MetadataFactory::free_metadata(mh->method_holder()->class_loader_data(), counters);
556 }
557
558 if (LogTouchedMethods) {
559 mh->log_touched(CHECK_NULL);
560 }
561
562 return mh->method_counters();
563 }
564
565 bool Method::init_method_counters(MethodCounters* counters) {
566 // Try to install a pointer to MethodCounters, return true on success.
567 return Atomic::replace_if_null(counters, &_method_counters);
609 int size = code_size();
610 // Check if size match
611 if (size == 0 || size % 5 != 0) return false;
612 address cb = code_base();
613 int last = size - 1;
614 if (cb[0] != Bytecodes::_aload_0 || cb[1] != Bytecodes::_invokespecial || cb[last] != Bytecodes::_return) {
615 // Does not call superclass default constructor
616 return false;
617 }
618 // Check optional sequence
619 for (int i = 4; i < last; i += 5) {
620 if (cb[i] != Bytecodes::_aload_0) return false;
621 if (!Bytecodes::is_zero_const(Bytecodes::cast(cb[i+1]))) return false;
622 if (cb[i+2] != Bytecodes::_putfield) return false;
623 }
624 return true;
625 }
626
627
628 bool Method::compute_has_loops_flag() {
629 BytecodeStream bcs(methodHandle(Thread::current(), this));
630 Bytecodes::Code bc;
631
632 while ((bc = bcs.next()) >= 0) {
633 switch( bc ) {
634 case Bytecodes::_ifeq:
635 case Bytecodes::_ifnull:
636 case Bytecodes::_iflt:
637 case Bytecodes::_ifle:
638 case Bytecodes::_ifne:
639 case Bytecodes::_ifnonnull:
640 case Bytecodes::_ifgt:
641 case Bytecodes::_ifge:
642 case Bytecodes::_if_icmpeq:
643 case Bytecodes::_if_icmpne:
644 case Bytecodes::_if_icmplt:
645 case Bytecodes::_if_icmpgt:
646 case Bytecodes::_if_icmple:
647 case Bytecodes::_if_icmpge:
648 case Bytecodes::_if_acmpeq:
649 case Bytecodes::_if_acmpne:
967 return false;
968 }
969
970 // call this when compiler finds that this method is not compilable
971 void Method::set_not_compilable(const char* reason, int comp_level, bool report) {
972 if (is_always_compilable()) {
973 // Don't mark a method which should be always compilable
974 return;
975 }
976 print_made_not_compilable(comp_level, /*is_osr*/ false, report, reason);
977 if (comp_level == CompLevel_all) {
978 set_not_c1_compilable();
979 set_not_c2_compilable();
980 } else {
981 if (is_c1_compile(comp_level))
982 set_not_c1_compilable();
983 if (is_c2_compile(comp_level))
984 set_not_c2_compilable();
985 }
986 CompilationPolicy::policy()->disable_compilation(this);
987 assert(!CompilationPolicy::can_be_compiled(methodHandle(Thread::current(), this), comp_level), "sanity check");
988 }
989
990 bool Method::is_not_osr_compilable(int comp_level) const {
991 if (is_not_compilable(comp_level))
992 return true;
993 if (comp_level == CompLevel_any)
994 return is_not_c1_osr_compilable() || is_not_c2_osr_compilable();
995 if (is_c1_compile(comp_level))
996 return is_not_c1_osr_compilable();
997 if (is_c2_compile(comp_level))
998 return is_not_c2_osr_compilable();
999 return false;
1000 }
1001
1002 void Method::set_not_osr_compilable(const char* reason, int comp_level, bool report) {
1003 print_made_not_compilable(comp_level, /*is_osr*/ true, report, reason);
1004 if (comp_level == CompLevel_all) {
1005 set_not_c1_osr_compilable();
1006 set_not_c2_osr_compilable();
1007 } else {
1008 if (is_c1_compile(comp_level))
1009 set_not_c1_osr_compilable();
1010 if (is_c2_compile(comp_level))
1011 set_not_c2_osr_compilable();
1012 }
1013 CompilationPolicy::policy()->disable_compilation(this);
1014 assert(!CompilationPolicy::can_be_osr_compiled(methodHandle(Thread::current(), this), comp_level), "sanity check");
1015 }
1016
1017 // Revert to using the interpreter and clear out the nmethod
1018 void Method::clear_code() {
1019 // this may be NULL if c2i adapters have not been made yet
1020 // Only should happen at allocate time.
1021 if (adapter() == NULL) {
1022 _from_compiled_entry = NULL;
1023 } else {
1024 _from_compiled_entry = adapter()->get_c2i_entry();
1025 }
1026 OrderAccess::storestore();
1027 _from_interpreted_entry = _i2i_entry;
1028 OrderAccess::storestore();
1029 _code = NULL;
1030 }
1031
1032 void Method::unlink_code(CompiledMethod *compare) {
1033 MutexLocker ml(CompiledMethod_lock->owned_by_self() ? NULL : CompiledMethod_lock, Mutex::_no_safepoint_check_flag);
1034 // We need to check if either the _code or _from_compiled_code_entry_point
1039 // through it.
1040 if (code() == compare ||
1041 from_compiled_entry() == compare->verified_entry_point()) {
1042 clear_code();
1043 }
1044 }
1045
1046 void Method::unlink_code() {
1047 MutexLocker ml(CompiledMethod_lock->owned_by_self() ? NULL : CompiledMethod_lock, Mutex::_no_safepoint_check_flag);
1048 clear_code();
1049 }
1050
1051 #if INCLUDE_CDS
1052 // Called by class data sharing to remove any entry points (which are not shared)
1053 void Method::unlink_method() {
1054 _code = NULL;
1055
1056 Arguments::assert_is_dumping_archive();
1057 // Set the values to what they should be at run time. Note that
1058 // this Method can no longer be executed during dump time.
1059 _i2i_entry = Interpreter::entry_for_cds_method(methodHandle(Thread::current(), this));
1060 _from_interpreted_entry = _i2i_entry;
1061
1062 if (DynamicDumpSharedSpaces) {
1063 assert(_from_compiled_entry != NULL, "sanity");
1064 } else {
1065 // TODO: Simplify the adapter trampoline allocation for static archiving.
1066 // Remove the use of CDSAdapterHandlerEntry.
1067 CDSAdapterHandlerEntry* cds_adapter = (CDSAdapterHandlerEntry*)adapter();
1068 constMethod()->set_adapter_trampoline(cds_adapter->get_adapter_trampoline());
1069 _from_compiled_entry = cds_adapter->get_c2i_entry_trampoline();
1070 assert(*((int*)_from_compiled_entry) == 0,
1071 "must be NULL during dump time, to be initialized at run time");
1072 }
1073
1074 if (is_native()) {
1075 *native_function_addr() = NULL;
1076 set_signature_handler(NULL);
1077 }
1078 NOT_PRODUCT(set_compiled_invocation_count(0);)
1079
1551 memcpy(newm->checked_exceptions_start(),
1552 m->checked_exceptions_start(),
1553 checked_exceptions_len * sizeof(CheckedExceptionElement));
1554 }
1555 // Copy exception table
1556 if (exception_table_len > 0) {
1557 memcpy(newm->exception_table_start(),
1558 m->exception_table_start(),
1559 exception_table_len * sizeof(ExceptionTableElement));
1560 }
1561 // Copy local variable number table
1562 if (localvariable_len > 0) {
1563 memcpy(newm->localvariable_table_start(),
1564 m->localvariable_table_start(),
1565 localvariable_len * sizeof(LocalVariableTableElement));
1566 }
1567 // Copy stackmap table
1568 if (m->has_stackmap_table()) {
1569 int code_attribute_length = m->stackmap_data()->length();
1570 Array<u1>* stackmap_data =
1571 MetadataFactory::new_array<u1>(loader_data, code_attribute_length, 0, CHECK_(methodHandle()));
1572 memcpy((void*)stackmap_data->adr_at(0),
1573 (void*)m->stackmap_data()->adr_at(0), code_attribute_length);
1574 newm->set_stackmap_data(stackmap_data);
1575 }
1576
1577 // copy annotations over to new method
1578 newcm->copy_annotations_from(loader_data, cm, CHECK_(methodHandle()));
1579 return newm;
1580 }
1581
1582 vmSymbols::SID Method::klass_id_for_intrinsics(const Klass* holder) {
1583 // if loader is not the default loader (i.e., != NULL), we can't know the intrinsics
1584 // because we are not loading from core libraries
1585 // exception: the AES intrinsics come from lib/ext/sunjce_provider.jar
1586 // which does not use the class default class loader so we check for its loader here
1587 const InstanceKlass* ik = InstanceKlass::cast(holder);
1588 if ((ik->class_loader() != NULL) && !SystemDictionary::is_platform_class_loader(ik->class_loader())) {
1589 return vmSymbols::NO_SID; // regardless of name, no intrinsics here
1590 }
1591
1592 // see if the klass name is well-known:
1593 Symbol* klass_name = ik->name();
1594 return vmSymbols::find_sid(klass_name);
1595 }
1596
1597 void Method::init_intrinsic_id() {
1598 assert(_intrinsic_id == vmIntrinsics::_none, "do this just once");
2197 ClassLoaderData* cld = loader_data;
2198
2199 if (!SafepointSynchronize::is_at_safepoint()) {
2200 // Have to add jmethod_ids() to class loader data thread-safely.
2201 // Also have to add the method to the list safely, which the cld lock
2202 // protects as well.
2203 MutexLocker ml(cld->metaspace_lock(), Mutex::_no_safepoint_check_flag);
2204 if (cld->jmethod_ids() == NULL) {
2205 cld->set_jmethod_ids(new JNIMethodBlock());
2206 }
2207 // jmethodID is a pointer to Method*
2208 return (jmethodID)cld->jmethod_ids()->add_method(m);
2209 } else {
2210 // At safepoint, we are single threaded and can set this.
2211 if (cld->jmethod_ids() == NULL) {
2212 cld->set_jmethod_ids(new JNIMethodBlock());
2213 }
2214 // jmethodID is a pointer to Method*
2215 return (jmethodID)cld->jmethod_ids()->add_method(m);
2216 }
2217 }
2218
2219 jmethodID Method::jmethod_id() {
2220 methodHandle mh(Thread::current(), this);
2221 return method_holder()->get_jmethod_id(mh);
2222 }
2223
2224 // Mark a jmethodID as free. This is called when there is a data race in
2225 // InstanceKlass while creating the jmethodID cache.
2226 void Method::destroy_jmethod_id(ClassLoaderData* loader_data, jmethodID m) {
2227 ClassLoaderData* cld = loader_data;
2228 Method** ptr = (Method**)m;
2229 assert(cld->jmethod_ids() != NULL, "should have method handles");
2230 cld->jmethod_ids()->destroy_method(ptr);
2231 }
2232
2233 void Method::change_method_associated_with_jmethod_id(jmethodID jmid, Method* new_method) {
2234 // Can't assert the method_holder is the same because the new method has the
2235 // scratch method holder.
2236 assert(resolve_jmethod_id(jmid)->method_holder()->class_loader()
2237 == new_method->method_holder()->class_loader() ||
2238 new_method->method_holder()->class_loader() == NULL, // allow Unsafe substitution
2239 "changing to a different class loader");
2240 // Just change the method in place, jmethodID pointer doesn't change.
2241 *((Method**)jmid) = new_method;
|