< prev index next >

src/share/vm/oops/instanceKlass.cpp

Print this page




 357 
 358   if (inner_classes() != NULL &&
 359       inner_classes() != Universe::the_empty_short_array() &&
 360       !inner_classes()->is_shared()) {
 361     MetadataFactory::free_array<jushort>(loader_data, inner_classes());
 362   }
 363   set_inner_classes(NULL);
 364 
 365   // We should deallocate the Annotations instance if it's not in shared spaces.
 366   if (annotations() != NULL && !annotations()->is_shared()) {
 367     MetadataFactory::free_metadata(loader_data, annotations());
 368   }
 369   set_annotations(NULL);
 370 }
 371 
 372 bool InstanceKlass::should_be_initialized() const {
 373   return !is_initialized();
 374 }
 375 
 376 klassItable* InstanceKlass::itable() const {
 377   return new klassItable(instanceKlassHandle(this));
 378 }
 379 
 380 void InstanceKlass::eager_initialize(Thread *thread) {
 381   if (!EagerInitialization) return;
 382 
 383   if (this->is_not_initialized()) {
 384     // abort if the the class has a class initializer
 385     if (this->class_initializer() != NULL) return;
 386 
 387     // abort if it is java.lang.Object (initialization is handled in genesis)
 388     Klass* super = this->super();
 389     if (super == NULL) return;
 390 
 391     // abort if the super class should be initialized
 392     if (!InstanceKlass::cast(super)->is_initialized()) return;
 393 
 394     // call body to expose the this pointer
 395     instanceKlassHandle this_k(thread, this);
 396     eager_initialize_impl(this_k);
 397   }
 398 }
 399 
 400 // JVMTI spec thinks there are signers and protection domain in the
 401 // instanceKlass.  These accessors pretend these fields are there.
 402 // The hprof specification also thinks these fields are in InstanceKlass.
 403 oop InstanceKlass::protection_domain() const {
 404   // return the protection_domain from the mirror
 405   return java_lang_Class::protection_domain(java_mirror());
 406 }
 407 
 408 // To remove these from requires an incompatible change and CCC request.
 409 objArrayOop InstanceKlass::signers() const {
 410   // return the signers from the mirror
 411   return java_lang_Class::signers(java_mirror());
 412 }
 413 
 414 oop InstanceKlass::init_lock() const {
 415   // return the init lock from the mirror
 416   oop lock = java_lang_Class::init_lock(java_mirror());
 417   // Prevent reordering with any access of initialization state
 418   OrderAccess::loadload();
 419   assert((oop)lock != NULL || !is_not_initialized(), // initialized or in_error state
 420          "only fully initialized state can have a null lock");
 421   return lock;
 422 }
 423 
 424 // Set the initialization lock to null so the object can be GC'ed.  Any racing
 425 // threads to get this lock will see a null lock and will not lock.
 426 // That's okay because they all check for initialized state after getting
 427 // the lock and return.
 428 void InstanceKlass::fence_and_clear_init_lock() {
 429   // make sure previous stores are all done, notably the init_state.
 430   OrderAccess::storestore();
 431   java_lang_Class::set_init_lock(java_mirror(), NULL);
 432   assert(!is_not_initialized(), "class must be initialized now");
 433 }
 434 
 435 void InstanceKlass::eager_initialize_impl(instanceKlassHandle this_k) {
 436   EXCEPTION_MARK;
 437   HandleMark hm(THREAD);
 438   Handle init_lock(THREAD, this_k->init_lock());
 439   ObjectLocker ol(init_lock, THREAD, init_lock() != NULL);
 440 
 441   // abort if someone beat us to the initialization
 442   if (!this_k->is_not_initialized()) return;  // note: not equivalent to is_initialized()
 443 
 444   ClassState old_state = this_k->init_state();
 445   link_class_impl(this_k, true, THREAD);
 446   if (HAS_PENDING_EXCEPTION) {
 447     CLEAR_PENDING_EXCEPTION;
 448     // Abort if linking the class throws an exception.
 449 
 450     // Use a test to avoid redundantly resetting the state if there's
 451     // no change.  Set_init_state() asserts that state changes make
 452     // progress, whereas here we might just be spinning in place.
 453     if( old_state != this_k->_init_state )
 454       this_k->set_init_state (old_state);
 455   } else {
 456     // linking successfull, mark class as initialized
 457     this_k->set_init_state (fully_initialized);
 458     this_k->fence_and_clear_init_lock();
 459     // trace
 460     if (log_is_enabled(Info, class, init)) {
 461       ResourceMark rm(THREAD);
 462       log_info(class, init)("[Initialized %s without side effects]", this_k->external_name());
 463     }
 464   }
 465 }
 466 
 467 
 468 // See "The Virtual Machine Specification" section 2.16.5 for a detailed explanation of the class initialization
 469 // process. The step comments refers to the procedure described in that section.
 470 // Note: implementation moved to static method to expose the this pointer.
 471 void InstanceKlass::initialize(TRAPS) {
 472   if (this->should_be_initialized()) {
 473     instanceKlassHandle this_k(THREAD, this);
 474     initialize_impl(this_k, CHECK);
 475     // Note: at this point the class may be initialized
 476     //       OR it may be in the state of being initialized
 477     //       in case of recursive initialization!
 478   } else {
 479     assert(is_initialized(), "sanity check");
 480   }
 481 }
 482 
 483 
 484 bool InstanceKlass::verify_code(
 485     instanceKlassHandle this_k, bool throw_verifyerror, TRAPS) {
 486   // 1) Verify the bytecodes
 487   Verifier::Mode mode =
 488     throw_verifyerror ? Verifier::ThrowException : Verifier::NoException;
 489   return Verifier::verify(this_k, mode, this_k->should_verify_class(), THREAD);
 490 }
 491 
 492 
 493 // Used exclusively by the shared spaces dump mechanism to prevent
 494 // classes mapped into the shared regions in new VMs from appearing linked.
 495 
 496 void InstanceKlass::unlink_class() {
 497   assert(is_linked(), "must be linked");
 498   _init_state = loaded;
 499 }
 500 
 501 void InstanceKlass::link_class(TRAPS) {
 502   assert(is_loaded(), "must be loaded");
 503   if (!is_linked()) {
 504     instanceKlassHandle this_k(THREAD, this);
 505     link_class_impl(this_k, true, CHECK);
 506   }
 507 }
 508 
 509 // Called to verify that a class can link during initialization, without
 510 // throwing a VerifyError.
 511 bool InstanceKlass::link_class_or_fail(TRAPS) {
 512   assert(is_loaded(), "must be loaded");
 513   if (!is_linked()) {
 514     instanceKlassHandle this_k(THREAD, this);
 515     link_class_impl(this_k, false, CHECK_false);
 516   }
 517   return is_linked();
 518 }
 519 
 520 bool InstanceKlass::link_class_impl(
 521     instanceKlassHandle this_k, bool throw_verifyerror, TRAPS) {
 522   if (DumpSharedSpaces && this_k->is_in_error_state()) {
 523     // This is for CDS dumping phase only -- we use the in_error_state to indicate that
 524     // the class has failed verification. Throwing the NoClassDefFoundError here is just
 525     // a convenient way to stop repeat attempts to verify the same (bad) class.
 526     //
 527     // Note that the NoClassDefFoundError is not part of the JLS, and should not be thrown
 528     // if we are executing Java code. This is not a problem for CDS dumping phase since
 529     // it doesn't execute any Java code.
 530     ResourceMark rm(THREAD);
 531     THROW_MSG_(vmSymbols::java_lang_NoClassDefFoundError(),
 532                this_k->external_name(), false);
 533   }
 534   // return if already verified
 535   if (this_k->is_linked()) {
 536     return true;
 537   }
 538 
 539   // Timing
 540   // timer handles recursion
 541   assert(THREAD->is_Java_thread(), "non-JavaThread in link_class_impl");
 542   JavaThread* jt = (JavaThread*)THREAD;
 543 
 544   // link super class before linking this class
 545   instanceKlassHandle super(THREAD, this_k->super());
 546   if (super.not_null()) {
 547     if (super->is_interface()) {  // check if super class is an interface
 548       ResourceMark rm(THREAD);
 549       Exceptions::fthrow(
 550         THREAD_AND_LOCATION,
 551         vmSymbols::java_lang_IncompatibleClassChangeError(),
 552         "class %s has interface %s as super class",
 553         this_k->external_name(),
 554         super->external_name()
 555       );
 556       return false;
 557     }
 558 
 559     link_class_impl(super, throw_verifyerror, CHECK_false);

 560   }
 561 
 562   // link all interfaces implemented by this class before linking this class
 563   Array<Klass*>* interfaces = this_k->local_interfaces();
 564   int num_interfaces = interfaces->length();
 565   for (int index = 0; index < num_interfaces; index++) {
 566     instanceKlassHandle ih(THREAD, interfaces->at(index));
 567     link_class_impl(ih, throw_verifyerror, CHECK_false);
 568   }
 569 
 570   // in case the class is linked in the process of linking its superclasses
 571   if (this_k->is_linked()) {
 572     return true;
 573   }
 574 
 575   // trace only the link time for this klass that includes
 576   // the verification time
 577   PerfClassTraceTime vmtimer(ClassLoader::perf_class_link_time(),
 578                              ClassLoader::perf_class_link_selftime(),
 579                              ClassLoader::perf_classes_linked(),
 580                              jt->get_thread_stat()->perf_recursion_counts_addr(),
 581                              jt->get_thread_stat()->perf_timers_addr(),
 582                              PerfClassTraceTime::CLASS_LINK);
 583 
 584   // verification & rewriting
 585   {
 586     HandleMark hm(THREAD);
 587     Handle init_lock(THREAD, this_k->init_lock());


 625       // a shared class if the class is not loaded by the NULL classloader.
 626       ClassLoaderData * loader_data = this_k->class_loader_data();
 627       if (!(this_k->is_shared() &&
 628             loader_data->is_the_null_class_loader_data())) {
 629         ResourceMark rm(THREAD);
 630         this_k->vtable()->initialize_vtable(true, CHECK_false);
 631         this_k->itable()->initialize_itable(true, CHECK_false);
 632       }
 633 #ifdef ASSERT
 634       else {
 635         ResourceMark rm(THREAD);
 636         this_k->vtable()->verify(tty, true);
 637         // In case itable verification is ever added.
 638         // this_k->itable()->verify(tty, true);
 639       }
 640 #endif
 641       this_k->set_init_state(linked);
 642       if (JvmtiExport::should_post_class_prepare()) {
 643         Thread *thread = THREAD;
 644         assert(thread->is_Java_thread(), "thread->is_Java_thread()");
 645         JvmtiExport::post_class_prepare((JavaThread *) thread, this_k());
 646       }
 647     }
 648   }
 649   return true;
 650 }
 651 
 652 
 653 // Rewrite the byte codes of all of the methods of a class.
 654 // The rewriter must be called exactly once. Rewriting must happen after
 655 // verification but before the first method of the class is executed.
 656 void InstanceKlass::rewrite_class(TRAPS) {
 657   assert(is_loaded(), "must be loaded");
 658   instanceKlassHandle this_k(THREAD, this);
 659   if (this_k->is_rewritten()) {
 660     assert(this_k()->is_shared(), "rewriting an unshared class?");
 661     return;
 662   }
 663   Rewriter::rewrite(this_k, CHECK);
 664   this_k->set_rewritten();
 665 }
 666 
 667 // Now relocate and link method entry points after class is rewritten.
 668 // This is outside is_rewritten flag. In case of an exception, it can be
 669 // executed more than once.
 670 void InstanceKlass::link_methods(TRAPS) {
 671   int len = methods()->length();
 672   for (int i = len-1; i >= 0; i--) {
 673     methodHandle m(THREAD, methods()->at(i));
 674 
 675     // Set up method entry points for compiler and interpreter    .
 676     m->link_method(m, CHECK);
 677   }
 678 }
 679 
 680 // Eagerly initialize superinterfaces that declare default methods (concrete instance: any access)
 681 void InstanceKlass::initialize_super_interfaces(instanceKlassHandle this_k, TRAPS) {
 682   assert (this_k->has_nonstatic_concrete_methods(), "caller should have checked this");
 683   for (int i = 0; i < this_k->local_interfaces()->length(); ++i) {
 684     Klass* iface = this_k->local_interfaces()->at(i);
 685     InstanceKlass* ik = InstanceKlass::cast(iface);
 686 
 687     // Initialization is depth first search ie. we start with top of the inheritance tree
 688     // has_nonstatic_concrete_methods drives searching superinterfaces since it
 689     // means has_nonstatic_concrete_methods in its superinterface hierarchy
 690     if (ik->has_nonstatic_concrete_methods()) {
 691       ik->initialize_super_interfaces(ik, CHECK);
 692     }
 693 
 694     // Only initialize() interfaces that "declare" concrete methods.
 695     if (ik->should_be_initialized() && ik->declares_nonstatic_concrete_methods()) {
 696       ik->initialize(CHECK);
 697     }
 698   }
 699 }
 700 
 701 void InstanceKlass::initialize_impl(instanceKlassHandle this_k, TRAPS) {
 702   HandleMark hm(THREAD);
 703 
 704   // Make sure klass is linked (verified) before initialization
 705   // A class could already be verified, since it has been reflected upon.
 706   this_k->link_class(CHECK);
 707 
 708   DTRACE_CLASSINIT_PROBE(required, this_k(), -1);
 709 
 710   bool wait = false;
 711 
 712   // refer to the JVM book page 47 for description of steps
 713   // Step 1
 714   {
 715     Handle init_lock(THREAD, this_k->init_lock());
 716     ObjectLocker ol(init_lock, THREAD, init_lock() != NULL);
 717 
 718     Thread *self = THREAD; // it's passed the current thread
 719 
 720     // Step 2
 721     // If we were to use wait() instead of waitInterruptibly() then
 722     // we might end up throwing IE from link/symbol resolution sites
 723     // that aren't expected to throw.  This would wreak havoc.  See 6320309.
 724     while(this_k->is_being_initialized() && !this_k->is_reentrant_initialization(self)) {
 725         wait = true;
 726       ol.waitUninterruptibly(CHECK);
 727     }
 728 
 729     // Step 3
 730     if (this_k->is_being_initialized() && this_k->is_reentrant_initialization(self)) {
 731       DTRACE_CLASSINIT_PROBE_WAIT(recursive, this_k(), -1,wait);
 732       return;
 733     }
 734 
 735     // Step 4
 736     if (this_k->is_initialized()) {
 737       DTRACE_CLASSINIT_PROBE_WAIT(concurrent, this_k(), -1,wait);
 738       return;
 739     }
 740 
 741     // Step 5
 742     if (this_k->is_in_error_state()) {
 743       DTRACE_CLASSINIT_PROBE_WAIT(erroneous, this_k(), -1,wait);
 744       ResourceMark rm(THREAD);
 745       const char* desc = "Could not initialize class ";
 746       const char* className = this_k->external_name();
 747       size_t msglen = strlen(desc) + strlen(className) + 1;
 748       char* message = NEW_RESOURCE_ARRAY(char, msglen);
 749       if (NULL == message) {
 750         // Out of memory: can't create detailed error message
 751         THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), className);
 752       } else {
 753         jio_snprintf(message, msglen, "%s%s", desc, className);
 754         THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), message);
 755       }
 756     }
 757 
 758     // Step 6
 759     this_k->set_init_state(being_initialized);
 760     this_k->set_init_thread(self);
 761   }
 762 
 763   // Step 7


 769       super_klass->initialize(THREAD);
 770     }
 771     // If C implements any interface that declares a non-static, concrete method,
 772     // the initialization of C triggers initialization of its super interfaces.
 773     // Only need to recurse if has_nonstatic_concrete_methods which includes declaring and
 774     // having a superinterface that declares, non-static, concrete methods
 775     if (!HAS_PENDING_EXCEPTION && this_k->has_nonstatic_concrete_methods()) {
 776       this_k->initialize_super_interfaces(this_k, THREAD);
 777     }
 778 
 779     // If any exceptions, complete abruptly, throwing the same exception as above.
 780     if (HAS_PENDING_EXCEPTION) {
 781       Handle e(THREAD, PENDING_EXCEPTION);
 782       CLEAR_PENDING_EXCEPTION;
 783       {
 784         EXCEPTION_MARK;
 785         // Locks object, set state, and notify all waiting threads
 786         this_k->set_initialization_state_and_notify(initialization_error, THREAD);
 787         CLEAR_PENDING_EXCEPTION;
 788       }
 789       DTRACE_CLASSINIT_PROBE_WAIT(super__failed, this_k(), -1,wait);
 790       THROW_OOP(e());
 791     }
 792   }
 793 
 794 
 795   // Look for aot compiled methods for this klass, including class initializer.
 796   AOTLoader::load_for_klass(this_k, THREAD);
 797 
 798   // Step 8
 799   {
 800     assert(THREAD->is_Java_thread(), "non-JavaThread in initialize_impl");
 801     JavaThread* jt = (JavaThread*)THREAD;
 802     DTRACE_CLASSINIT_PROBE_WAIT(clinit, this_k(), -1,wait);
 803     // Timer includes any side effects of class initialization (resolution,
 804     // etc), but not recursive entry into call_class_initializer().
 805     PerfClassTraceTime timer(ClassLoader::perf_class_init_time(),
 806                              ClassLoader::perf_class_init_selftime(),
 807                              ClassLoader::perf_classes_inited(),
 808                              jt->get_thread_stat()->perf_recursion_counts_addr(),
 809                              jt->get_thread_stat()->perf_timers_addr(),
 810                              PerfClassTraceTime::CLASS_CLINIT);
 811     this_k->call_class_initializer(THREAD);
 812   }
 813 
 814   // Step 9
 815   if (!HAS_PENDING_EXCEPTION) {
 816     this_k->set_initialization_state_and_notify(fully_initialized, CHECK);
 817     { ResourceMark rm(THREAD);
 818       debug_only(this_k->vtable()->verify(tty, true);)
 819     }
 820   }
 821   else {
 822     // Step 10 and 11
 823     Handle e(THREAD, PENDING_EXCEPTION);
 824     CLEAR_PENDING_EXCEPTION;
 825     // JVMTI has already reported the pending exception
 826     // JVMTI internal flag reset is needed in order to report ExceptionInInitializerError
 827     JvmtiExport::clear_detected_exception((JavaThread*)THREAD);
 828     {
 829       EXCEPTION_MARK;
 830       this_k->set_initialization_state_and_notify(initialization_error, THREAD);
 831       CLEAR_PENDING_EXCEPTION;   // ignore any exception thrown, class initialization error is thrown below
 832       // JVMTI has already reported the pending exception
 833       // JVMTI internal flag reset is needed in order to report ExceptionInInitializerError
 834       JvmtiExport::clear_detected_exception((JavaThread*)THREAD);
 835     }
 836     DTRACE_CLASSINIT_PROBE_WAIT(error, this_k(), -1,wait);
 837     if (e->is_a(SystemDictionary::Error_klass())) {
 838       THROW_OOP(e());
 839     } else {
 840       JavaCallArguments args(e);
 841       THROW_ARG(vmSymbols::java_lang_ExceptionInInitializerError(),
 842                 vmSymbols::throwable_void_signature(),
 843                 &args);
 844     }
 845   }
 846   DTRACE_CLASSINIT_PROBE_WAIT(end, this_k(), -1,wait);
 847 }
 848 
 849 
 850 // Note: implementation moved to static method to expose the this pointer.
 851 void InstanceKlass::set_initialization_state_and_notify(ClassState state, TRAPS) {
 852   instanceKlassHandle kh(THREAD, this);
 853   set_initialization_state_and_notify_impl(kh, state, CHECK);
 854 }
 855 
 856 void InstanceKlass::set_initialization_state_and_notify_impl(instanceKlassHandle this_k, ClassState state, TRAPS) {
 857   Handle init_lock(THREAD, this_k->init_lock());
 858   if (init_lock() != NULL) {
 859     ObjectLocker ol(init_lock, THREAD);
 860     this_k->set_init_state(state);
 861     this_k->fence_and_clear_init_lock();
 862     ol.notify_all(CHECK);
 863   } else {
 864     assert(init_lock() != NULL, "The initialization state should never be set twice");
 865     this_k->set_init_state(state);
 866   }
 867 }
 868 
 869 // The embedded _implementor field can only record one implementor.
 870 // When there are more than one implementors, the _implementor field
 871 // is set to the interface Klass* itself. Following are the possible
 872 // values for the _implementor field:
 873 //   NULL                  - no implementor
 874 //   implementor Klass*    - one implementor
 875 //   self                  - more than one implementor
 876 //


 978   // Verify direct super interface
 979   if (this == k) return true;
 980   assert(k->is_interface(), "should be an interface class");
 981   for (int i = 0; i < local_interfaces()->length(); i++) {
 982     if (local_interfaces()->at(i) == k) {
 983       return true;
 984     }
 985   }
 986   return false;
 987 }
 988 
 989 objArrayOop InstanceKlass::allocate_objArray(int n, int length, TRAPS) {
 990   if (length < 0) THROW_0(vmSymbols::java_lang_NegativeArraySizeException());
 991   if (length > arrayOopDesc::max_array_length(T_OBJECT)) {
 992     report_java_out_of_memory("Requested array size exceeds VM limit");
 993     JvmtiExport::post_array_size_exhausted();
 994     THROW_OOP_0(Universe::out_of_memory_error_array_size());
 995   }
 996   int size = objArrayOopDesc::object_size(length);
 997   Klass* ak = array_klass(n, CHECK_NULL);
 998   KlassHandle h_ak (THREAD, ak);
 999   objArrayOop o =
1000     (objArrayOop)CollectedHeap::array_allocate(h_ak, size, length, CHECK_NULL);
1001   return o;
1002 }
1003 
1004 instanceOop InstanceKlass::register_finalizer(instanceOop i, TRAPS) {
1005   if (TraceFinalizerRegistration) {
1006     tty->print("Registered ");
1007     i->print_value_on(tty);
1008     tty->print_cr(" (" INTPTR_FORMAT ") as finalizable", p2i(i));
1009   }
1010   instanceHandle h_i(THREAD, i);
1011   // Pass the handle as argument, JavaCalls::call expects oop as jobjects
1012   JavaValue result(T_VOID);
1013   JavaCallArguments args(h_i);
1014   methodHandle mh (THREAD, Universe::finalizer_register_method());
1015   JavaCalls::call(&result, mh, &args, CHECK_NULL);
1016   return h_i();
1017 }
1018 
1019 instanceOop InstanceKlass::allocate_instance(TRAPS) {
1020   bool has_finalizer_flag = has_finalizer(); // Query before possible GC
1021   int size = size_helper();  // Query before forming handle.
1022 
1023   KlassHandle h_k(THREAD, this);
1024 
1025   instanceOop i;
1026 
1027   i = (instanceOop)CollectedHeap::obj_allocate(h_k, size, CHECK_NULL);
1028   if (has_finalizer_flag && !RegisterFinalizersAtInit) {
1029     i = register_finalizer(i, CHECK_NULL);
1030   }
1031   return i;
1032 }
1033 
1034 void InstanceKlass::check_valid_for_instantiation(bool throwError, TRAPS) {
1035   if (is_interface() || is_abstract()) {
1036     ResourceMark rm(THREAD);
1037     THROW_MSG(throwError ? vmSymbols::java_lang_InstantiationError()
1038               : vmSymbols::java_lang_InstantiationException(), external_name());
1039   }
1040   if (this == SystemDictionary::Class_klass()) {
1041     ResourceMark rm(THREAD);
1042     THROW_MSG(throwError ? vmSymbols::java_lang_IllegalAccessError()
1043               : vmSymbols::java_lang_IllegalAccessException(), external_name());
1044   }
1045 }
1046 
1047 Klass* InstanceKlass::array_klass_impl(bool or_null, int n, TRAPS) {
1048   instanceKlassHandle this_k(THREAD, this);
1049   return array_klass_impl(this_k, or_null, n, THREAD);
1050 }
1051 
1052 Klass* InstanceKlass::array_klass_impl(instanceKlassHandle this_k, bool or_null, int n, TRAPS) {
1053   // Need load-acquire for lock-free read
1054   if (this_k->array_klasses_acquire() == NULL) {
1055     if (or_null) return NULL;
1056 
1057     ResourceMark rm;
1058     JavaThread *jt = (JavaThread *)THREAD;
1059     {
1060       // Atomic creation of array_klasses
1061       MutexLocker mc(Compile_lock, THREAD);   // for vtables
1062       MutexLocker ma(MultiArray_lock, THREAD);
1063 
1064       // Check if update has already taken place
1065       if (this_k->array_klasses() == NULL) {
1066         Klass*    k = ObjArrayKlass::allocate_objArray_klass(this_k->class_loader_data(), 1, this_k, CHECK_NULL);
1067         // use 'release' to pair with lock-free load
1068         this_k->release_set_array_klasses(k);
1069       }
1070     }
1071   }
1072   // _this will always be set at this point
1073   ObjArrayKlass* oak = (ObjArrayKlass*)this_k->array_klasses();
1074   if (or_null) {
1075     return oak->array_klass_or_null(n);
1076   }
1077   return oak->array_klass(n, THREAD);
1078 }
1079 
1080 Klass* InstanceKlass::array_klass_impl(bool or_null, TRAPS) {
1081   return array_klass_impl(or_null, 1, THREAD);
1082 }
1083 
1084 void InstanceKlass::call_class_initializer(TRAPS) {
1085   instanceKlassHandle ik (THREAD, this);
1086   call_class_initializer_impl(ik, THREAD);
1087 }
1088 
1089 static int call_class_initializer_impl_counter = 0;   // for debugging
1090 
1091 Method* InstanceKlass::class_initializer() {
1092   Method* clinit = find_method(
1093       vmSymbols::class_initializer_name(), vmSymbols::void_method_signature());
1094   if (clinit != NULL && clinit->has_valid_initializer_flags()) {
1095     return clinit;
1096   }
1097   return NULL;
1098 }
1099 
1100 void InstanceKlass::call_class_initializer_impl(instanceKlassHandle this_k, TRAPS) {
1101   if (ReplayCompiles &&
1102       (ReplaySuppressInitializers == 1 ||
1103        ReplaySuppressInitializers >= 2 && this_k->class_loader() != NULL)) {
1104     // Hide the existence of the initializer for the purpose of replaying the compile
1105     return;
1106   }
1107 
1108   methodHandle h_method(THREAD, this_k->class_initializer());
1109   assert(!this_k->is_initialized(), "we cannot initialize twice");
1110   if (log_is_enabled(Info, class, init)) {
1111     ResourceMark rm;
1112     outputStream* log = Log(class, init)::info_stream();
1113     log->print("%d Initializing ", call_class_initializer_impl_counter++);
1114     this_k->name()->print_value_on(log);
1115     log->print_cr("%s (" INTPTR_FORMAT ")", h_method() == NULL ? "(no method)" : "", p2i(this_k()));
1116   }
1117   if (h_method() != NULL) {
1118     JavaCallArguments args; // No arguments
1119     JavaValue result(T_VOID);
1120     JavaCalls::call(&result, h_method, &args, CHECK); // Static call (no args)
1121   }
1122 }
1123 
1124 
1125 void InstanceKlass::mask_for(const methodHandle& method, int bci,
1126   InterpreterOopMap* entry_for) {
1127   // Lazily create the _oop_map_cache at first request
1128   // Lock-free access requires load_ptr_acquire.
1129   OopMapCache* oop_map_cache =
1130       static_cast<OopMapCache*>(OrderAccess::load_ptr_acquire(&_oop_map_cache));
1131   if (oop_map_cache == NULL) {
1132     MutexLocker x(OopMapCacheAlloc_lock);
1133     // Check if _oop_map_cache was allocated while we were waiting for this lock
1134     if ((oop_map_cache = _oop_map_cache) == NULL) {
1135       oop_map_cache = new OopMapCache();


1246   int len = methods()->length();
1247   for (int index = 0; index < len; index++) {
1248     Method* m = methods()->at(index);
1249     assert(m->is_method(), "must be method");
1250     f(m);
1251   }
1252 }
1253 
1254 
1255 void InstanceKlass::do_local_static_fields(FieldClosure* cl) {
1256   for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
1257     if (fs.access_flags().is_static()) {
1258       fieldDescriptor& fd = fs.field_descriptor();
1259       cl->do_field(&fd);
1260     }
1261   }
1262 }
1263 
1264 
1265 void InstanceKlass::do_local_static_fields(void f(fieldDescriptor*, Handle, TRAPS), Handle mirror, TRAPS) {
1266   instanceKlassHandle h_this(THREAD, this);
1267   do_local_static_fields_impl(h_this, f, mirror, CHECK);
1268 }
1269 
1270 
1271 void InstanceKlass::do_local_static_fields_impl(instanceKlassHandle this_k,
1272                              void f(fieldDescriptor* fd, Handle, TRAPS), Handle mirror, TRAPS) {
1273   for (JavaFieldStream fs(this_k()); !fs.done(); fs.next()) {
1274     if (fs.access_flags().is_static()) {
1275       fieldDescriptor& fd = fs.field_descriptor();
1276       f(&fd, mirror, CHECK);
1277     }
1278   }
1279 }
1280 
1281 
1282 static int compare_fields_by_offset(int* a, int* b) {
1283   return a[0] - b[0];
1284 }
1285 
1286 void InstanceKlass::do_nonstatic_fields(FieldClosure* cl) {
1287   InstanceKlass* super = superklass();
1288   if (super != NULL) {
1289     super->do_nonstatic_fields(cl);
1290   }
1291   fieldDescriptor fd;
1292   int length = java_fields_count();
1293   // In DebugInfo nonstatic fields are sorted by offset.


1612 // Do NOT return private or static methods, new in JDK8 which are not externally visible
1613 // They should only be found in the initial InterfaceMethodRef
1614 Method* InstanceKlass::lookup_method_in_all_interfaces(Symbol* name,
1615                                                        Symbol* signature,
1616                                                        DefaultsLookupMode defaults_mode) const {
1617   Array<Klass*>* all_ifs = transitive_interfaces();
1618   int num_ifs = all_ifs->length();
1619   InstanceKlass *ik = NULL;
1620   for (int i = 0; i < num_ifs; i++) {
1621     ik = InstanceKlass::cast(all_ifs->at(i));
1622     Method* m = ik->lookup_method(name, signature);
1623     if (m != NULL && m->is_public() && !m->is_static() &&
1624         ((defaults_mode != skip_defaults) || !m->is_default_method())) {
1625       return m;
1626     }
1627   }
1628   return NULL;
1629 }
1630 
1631 /* jni_id_for_impl for jfieldIds only */
1632 JNIid* InstanceKlass::jni_id_for_impl(instanceKlassHandle this_k, int offset) {
1633   MutexLocker ml(JfieldIdCreation_lock);
1634   // Retry lookup after we got the lock
1635   JNIid* probe = this_k->jni_ids() == NULL ? NULL : this_k->jni_ids()->find(offset);
1636   if (probe == NULL) {
1637     // Slow case, allocate new static field identifier
1638     probe = new JNIid(this_k(), offset, this_k->jni_ids());
1639     this_k->set_jni_ids(probe);
1640   }
1641   return probe;
1642 }
1643 
1644 
1645 /* jni_id_for for jfieldIds only */
1646 JNIid* InstanceKlass::jni_id_for(int offset) {
1647   JNIid* probe = jni_ids() == NULL ? NULL : jni_ids()->find(offset);
1648   if (probe == NULL) {
1649     probe = jni_id_for_impl(this, offset);
1650   }
1651   return probe;
1652 }
1653 
1654 u2 InstanceKlass::enclosing_method_data(int offset) const {
1655   const Array<jushort>* const inner_class_list = inner_classes();
1656   if (inner_class_list == NULL) {
1657     return 0;
1658   }


1667 
1668 void InstanceKlass::set_enclosing_method_indices(u2 class_index,
1669                                                  u2 method_index) {
1670   Array<jushort>* inner_class_list = inner_classes();
1671   assert (inner_class_list != NULL, "_inner_classes list is not set up");
1672   int length = inner_class_list->length();
1673   if (length % inner_class_next_offset == enclosing_method_attribute_size) {
1674     int index = length - enclosing_method_attribute_size;
1675     inner_class_list->at_put(
1676       index + enclosing_method_class_index_offset, class_index);
1677     inner_class_list->at_put(
1678       index + enclosing_method_method_index_offset, method_index);
1679   }
1680 }
1681 
1682 // Lookup or create a jmethodID.
1683 // This code is called by the VMThread and JavaThreads so the
1684 // locking has to be done very carefully to avoid deadlocks
1685 // and/or other cache consistency problems.
1686 //
1687 jmethodID InstanceKlass::get_jmethod_id(instanceKlassHandle ik_h, const methodHandle& method_h) {
1688   size_t idnum = (size_t)method_h->method_idnum();
1689   jmethodID* jmeths = ik_h->methods_jmethod_ids_acquire();
1690   size_t length = 0;
1691   jmethodID id = NULL;
1692 
1693   // We use a double-check locking idiom here because this cache is
1694   // performance sensitive. In the normal system, this cache only
1695   // transitions from NULL to non-NULL which is safe because we use
1696   // release_set_methods_jmethod_ids() to advertise the new cache.
1697   // A partially constructed cache should never be seen by a racing
1698   // thread. We also use release_store_ptr() to save a new jmethodID
1699   // in the cache so a partially constructed jmethodID should never be
1700   // seen either. Cache reads of existing jmethodIDs proceed without a
1701   // lock, but cache writes of a new jmethodID requires uniqueness and
1702   // creation of the cache itself requires no leaks so a lock is
1703   // generally acquired in those two cases.
1704   //
1705   // If the RedefineClasses() API has been used, then this cache can
1706   // grow and we'll have transitions from non-NULL to bigger non-NULL.
1707   // Cache creation requires no leaks and we require safety between all
1708   // cache accesses and freeing of the old cache so a lock is generally
1709   // acquired when the RedefineClasses() API has been used.
1710 
1711   if (jmeths != NULL) {
1712     // the cache already exists
1713     if (!ik_h->idnum_can_increment()) {
1714       // the cache can't grow so we can just get the current values
1715       get_jmethod_id_length_value(jmeths, idnum, &length, &id);
1716     } else {
1717       // cache can grow so we have to be more careful
1718       if (Threads::number_of_threads() == 0 ||
1719           SafepointSynchronize::is_at_safepoint()) {
1720         // we're single threaded or at a safepoint - no locking needed
1721         get_jmethod_id_length_value(jmeths, idnum, &length, &id);
1722       } else {
1723         MutexLocker ml(JmethodIdCreation_lock);
1724         get_jmethod_id_length_value(jmeths, idnum, &length, &id);
1725       }
1726     }
1727   }
1728   // implied else:
1729   // we need to allocate a cache so default length and id values are good
1730 
1731   if (jmeths == NULL ||   // no cache yet
1732       length <= idnum ||  // cache is too short
1733       id == NULL) {       // cache doesn't contain entry
1734 
1735     // This function can be called by the VMThread so we have to do all
1736     // things that might block on a safepoint before grabbing the lock.
1737     // Otherwise, we can deadlock with the VMThread or have a cache
1738     // consistency issue. These vars keep track of what we might have
1739     // to free after the lock is dropped.
1740     jmethodID  to_dealloc_id     = NULL;
1741     jmethodID* to_dealloc_jmeths = NULL;
1742 
1743     // may not allocate new_jmeths or use it if we allocate it
1744     jmethodID* new_jmeths = NULL;
1745     if (length <= idnum) {
1746       // allocate a new cache that might be used
1747       size_t size = MAX2(idnum+1, (size_t)ik_h->idnum_allocated_count());
1748       new_jmeths = NEW_C_HEAP_ARRAY(jmethodID, size+1, mtClass);
1749       memset(new_jmeths, 0, (size+1)*sizeof(jmethodID));
1750       // cache size is stored in element[0], other elements offset by one
1751       new_jmeths[0] = (jmethodID)size;
1752     }
1753 
1754     // allocate a new jmethodID that might be used
1755     jmethodID new_id = NULL;
1756     if (method_h->is_old() && !method_h->is_obsolete()) {
1757       // The method passed in is old (but not obsolete), we need to use the current version
1758       Method* current_method = ik_h->method_with_idnum((int)idnum);
1759       assert(current_method != NULL, "old and but not obsolete, so should exist");
1760       new_id = Method::make_jmethod_id(ik_h->class_loader_data(), current_method);
1761     } else {
1762       // It is the current version of the method or an obsolete method,
1763       // use the version passed in
1764       new_id = Method::make_jmethod_id(ik_h->class_loader_data(), method_h());
1765     }
1766 
1767     if (Threads::number_of_threads() == 0 ||
1768         SafepointSynchronize::is_at_safepoint()) {
1769       // we're single threaded or at a safepoint - no locking needed
1770       id = get_jmethod_id_fetch_or_update(ik_h, idnum, new_id, new_jmeths,
1771                                           &to_dealloc_id, &to_dealloc_jmeths);
1772     } else {
1773       MutexLocker ml(JmethodIdCreation_lock);
1774       id = get_jmethod_id_fetch_or_update(ik_h, idnum, new_id, new_jmeths,
1775                                           &to_dealloc_id, &to_dealloc_jmeths);
1776     }
1777 
1778     // The lock has been dropped so we can free resources.
1779     // Free up either the old cache or the new cache if we allocated one.
1780     if (to_dealloc_jmeths != NULL) {
1781       FreeHeap(to_dealloc_jmeths);
1782     }
1783     // free up the new ID since it wasn't needed
1784     if (to_dealloc_id != NULL) {
1785       Method::destroy_jmethod_id(ik_h->class_loader_data(), to_dealloc_id);
1786     }
1787   }
1788   return id;
1789 }
1790 
1791 // Figure out how many jmethodIDs haven't been allocated, and make
1792 // sure space for them is pre-allocated.  This makes getting all
1793 // method ids much, much faster with classes with more than 8
1794 // methods, and has a *substantial* effect on performance with jvmti
1795 // code that loads all jmethodIDs for all classes.
1796 void InstanceKlass::ensure_space_for_methodids(int start_offset) {
1797   int new_jmeths = 0;
1798   int length = methods()->length();
1799   for (int index = start_offset; index < length; index++) {
1800     Method* m = methods()->at(index);
1801     jmethodID id = m->find_jmethod_id_or_null();
1802     if (id == NULL) {
1803       new_jmeths++;
1804     }
1805   }
1806   if (new_jmeths != 0) {
1807     Method::ensure_jmethod_ids(class_loader_data(), new_jmeths);
1808   }
1809 }
1810 
1811 // Common code to fetch the jmethodID from the cache or update the
1812 // cache with the new jmethodID. This function should never do anything
1813 // that causes the caller to go to a safepoint or we can deadlock with
1814 // the VMThread or have cache consistency issues.
1815 //
1816 jmethodID InstanceKlass::get_jmethod_id_fetch_or_update(
1817             instanceKlassHandle ik_h, size_t idnum, jmethodID new_id,
1818             jmethodID* new_jmeths, jmethodID* to_dealloc_id_p,
1819             jmethodID** to_dealloc_jmeths_p) {
1820   assert(new_id != NULL, "sanity check");
1821   assert(to_dealloc_id_p != NULL, "sanity check");
1822   assert(to_dealloc_jmeths_p != NULL, "sanity check");
1823   assert(Threads::number_of_threads() == 0 ||
1824          SafepointSynchronize::is_at_safepoint() ||
1825          JmethodIdCreation_lock->owned_by_self(), "sanity check");
1826 
1827   // reacquire the cache - we are locked, single threaded or at a safepoint
1828   jmethodID* jmeths = ik_h->methods_jmethod_ids_acquire();
1829   jmethodID  id     = NULL;
1830   size_t     length = 0;
1831 
1832   if (jmeths == NULL ||                         // no cache yet
1833       (length = (size_t)jmeths[0]) <= idnum) {  // cache is too short
1834     if (jmeths != NULL) {
1835       // copy any existing entries from the old cache
1836       for (size_t index = 0; index < length; index++) {
1837         new_jmeths[index+1] = jmeths[index+1];
1838       }
1839       *to_dealloc_jmeths_p = jmeths;  // save old cache for later delete
1840     }
1841     ik_h->release_set_methods_jmethod_ids(jmeths = new_jmeths);
1842   } else {
1843     // fetch jmethodID (if any) from the existing cache
1844     id = jmeths[idnum+1];
1845     *to_dealloc_jmeths_p = new_jmeths;  // save new cache for later delete
1846   }
1847   if (id == NULL) {
1848     // No matching jmethodID in the existing cache or we have a new
1849     // cache or we just grew the cache. This cache write is done here
1850     // by the first thread to win the foot race because a jmethodID
1851     // needs to be unique once it is generally available.
1852     id = new_id;
1853 
1854     // The jmethodID cache can be read while unlocked so we have to
1855     // make sure the new jmethodID is complete before installing it
1856     // in the cache.
1857     OrderAccess::release_store_ptr(&jmeths[idnum+1], id);
1858   } else {
1859     *to_dealloc_id_p = new_id; // save new id for later delete
1860   }
1861   return id;


2041   constants()->remove_unshareable_info();
2042 
2043   assert(_dep_context == DependencyContext::EMPTY, "dependency context is not shareable");
2044 
2045   for (int i = 0; i < methods()->length(); i++) {
2046     Method* m = methods()->at(i);
2047     m->remove_unshareable_info();
2048   }
2049 
2050   // do array classes also.
2051   array_klasses_do(remove_unshareable_in_class);
2052 }
2053 
2054 static void restore_unshareable_in_class(Klass* k, TRAPS) {
2055   // Array classes have null protection domain.
2056   // --> see ArrayKlass::complete_create_array_klass()
2057   k->restore_unshareable_info(ClassLoaderData::the_null_class_loader_data(), Handle(), CHECK);
2058 }
2059 
2060 void InstanceKlass::restore_unshareable_info(ClassLoaderData* loader_data, Handle protection_domain, TRAPS) {
2061   instanceKlassHandle ik(THREAD, this);
2062   ik->set_package(loader_data, CHECK);
2063   Klass::restore_unshareable_info(loader_data, protection_domain, CHECK);
2064 
2065   Array<Method*>* methods = ik->methods();
2066   int num_methods = methods->length();
2067   for (int index2 = 0; index2 < num_methods; ++index2) {
2068     methodHandle m(THREAD, methods->at(index2));
2069     m->restore_unshareable_info(CHECK);
2070   }
2071   if (JvmtiExport::has_redefined_a_class()) {
2072     // Reinitialize vtable because RedefineClasses may have changed some
2073     // entries in this vtable for super classes so the CDS vtable might
2074     // point to old or obsolete entries.  RedefineClasses doesn't fix up
2075     // vtables in the shared system dictionary, only the main one.
2076     // It also redefines the itable too so fix that too.
2077     ResourceMark rm(THREAD);
2078     ik->vtable()->initialize_vtable(false, CHECK);
2079     ik->itable()->initialize_itable(false, CHECK);
2080   }
2081 
2082   // restore constant pool resolved references
2083   ik->constants()->restore_unshareable_info(CHECK);
2084 
2085   ik->array_klasses_do(restore_unshareable_in_class, CHECK);
2086 }
2087 
2088 // returns true IFF is_in_error_state() has been changed as a result of this call.
2089 bool InstanceKlass::check_sharing_error_state() {
2090   assert(DumpSharedSpaces, "should only be called during dumping");
2091   bool old_state = is_in_error_state();
2092 
2093   if (!is_in_error_state()) {
2094     bool bad = false;
2095     for (InstanceKlass* sup = java_super(); sup; sup = sup->java_super()) {
2096       if (sup->is_in_error_state()) {
2097         bad = true;
2098         break;
2099       }
2100     }
2101     if (!bad) {
2102       Array<Klass*>* interfaces = transitive_interfaces();
2103       for (int i = 0; i < interfaces->length(); i++) {
2104         Klass* iface = interfaces->at(i);
2105         if (InstanceKlass::cast(iface)->is_in_error_state()) {


2431 // See JLS 3rd edition 8.4.6.1
2432 // Assumes name-signature match
2433 // "this" is InstanceKlass of super_method which must exist
2434 // note that the InstanceKlass of the method in the targetclassname has not always been created yet
2435 bool InstanceKlass::is_override(const methodHandle& super_method, Handle targetclassloader, Symbol* targetclassname, TRAPS) {
2436    // Private methods can not be overridden
2437    if (super_method->is_private()) {
2438      return false;
2439    }
2440    // If super method is accessible, then override
2441    if ((super_method->is_protected()) ||
2442        (super_method->is_public())) {
2443      return true;
2444    }
2445    // Package-private methods are not inherited outside of package
2446    assert(super_method->is_package_private(), "must be package private");
2447    return(is_same_class_package(targetclassloader(), targetclassname));
2448 }
2449 
2450 /* defined for now in jvm.cpp, for historical reasons *--
2451 Klass* InstanceKlass::compute_enclosing_class_impl(instanceKlassHandle self,
2452                                                      Symbol*& simple_name_result, TRAPS) {
2453   ...
2454 }
2455 */
2456 
2457 // Only boot and platform class loaders can define classes in "java/" packages.
2458 void InstanceKlass::check_prohibited_package(Symbol* class_name,
2459                                              Handle class_loader,
2460                                              TRAPS) {
2461   ResourceMark rm(THREAD);
2462   if (!class_loader.is_null() &&
2463       !SystemDictionary::is_platform_class_loader(class_loader()) &&
2464       class_name != NULL &&
2465       strncmp(class_name->as_C_string(), JAVAPKG, JAVAPKG_LEN) == 0) {
2466     TempNewSymbol pkg_name = InstanceKlass::package_from_name(class_name, CHECK);
2467     assert(pkg_name != NULL, "Error in parsing package name starting with 'java/'");
2468     char* name = pkg_name->as_C_string();
2469     const char* class_loader_name = SystemDictionary::loader_name(class_loader());
2470     StringUtils::replace_no_expand(name, "/", ".");
2471     const char* msg_text1 = "Class loader (instance of): ";


2505   }
2506 
2507   // Now do the same for class2.
2508   const InstanceKlass* outer2 = InstanceKlass::cast(class2);
2509   for (;;) {
2510     bool ignore_inner_is_member;
2511     Klass* next = outer2->compute_enclosing_class(&ignore_inner_is_member,
2512                                                     CHECK_false);
2513     if (next == NULL)  break;
2514     // Might as well check the new outer against all available values.
2515     if (next == class1)  return true;
2516     if (next == outer1)  return true;
2517     outer2 = InstanceKlass::cast(next);
2518   }
2519 
2520   // If by this point we have not found an equality between the
2521   // two classes, we know they are in separate package members.
2522   return false;
2523 }
2524 
2525 bool InstanceKlass::find_inner_classes_attr(instanceKlassHandle k, int* ooff, int* noff, TRAPS) {
2526   constantPoolHandle i_cp(THREAD, k->constants());
2527   for (InnerClassesIterator iter(k); !iter.done(); iter.next()) {
2528     int ioff = iter.inner_class_info_index();
2529     if (ioff != 0) {
2530       // Check to see if the name matches the class we're looking for
2531       // before attempting to find the class.
2532       if (i_cp->klass_name_at_matches(k, ioff)) {
2533         Klass* inner_klass = i_cp->klass_at(ioff, CHECK_false);
2534         if (k() == inner_klass) {
2535           *ooff = iter.outer_class_info_index();
2536           *noff = iter.inner_name_index();
2537           return true;
2538         }
2539       }
2540     }
2541   }
2542   return false;
2543 }
2544 
2545 InstanceKlass* InstanceKlass::compute_enclosing_class_impl(const InstanceKlass* k,
2546                                                            bool* inner_is_member,
2547                                                            TRAPS) {
2548   InstanceKlass* outer_klass = NULL;
2549   *inner_is_member = false;
2550   int ooff = 0, noff = 0;
2551   if (find_inner_classes_attr(k, &ooff, &noff, THREAD)) {
2552     constantPoolHandle i_cp(THREAD, k->constants());
2553     if (ooff != 0) {
2554       Klass* ok = i_cp->klass_at(ooff, CHECK_NULL);


2563         outer_klass = InstanceKlass::cast(ok);
2564         *inner_is_member = false;
2565       }
2566     }
2567   }
2568 
2569   // If no inner class attribute found for this class.
2570   if (NULL == outer_klass) return NULL;
2571 
2572   // Throws an exception if outer klass has not declared k as an inner klass
2573   // We need evidence that each klass knows about the other, or else
2574   // the system could allow a spoof of an inner class to gain access rights.
2575   Reflection::check_for_inner_class(outer_klass, k, *inner_is_member, CHECK_NULL);
2576   return outer_klass;
2577 }
2578 
2579 jint InstanceKlass::compute_modifier_flags(TRAPS) const {
2580   jint access = access_flags().as_int();
2581 
2582   // But check if it happens to be member class.
2583   instanceKlassHandle ik(THREAD, this);
2584   InnerClassesIterator iter(ik);
2585   for (; !iter.done(); iter.next()) {
2586     int ioff = iter.inner_class_info_index();
2587     // Inner class attribute can be zero, skip it.
2588     // Strange but true:  JVM spec. allows null inner class refs.
2589     if (ioff == 0) continue;
2590 
2591     // only look at classes that are already loaded
2592     // since we are looking for the flags for our self.
2593     Symbol* inner_name = ik->constants()->klass_name_at(ioff);
2594     if ((ik->name() == inner_name)) {
2595       // This is really a member class.
2596       access = iter.inner_access_flags();
2597       break;
2598     }
2599   }
2600   // Remember to strip ACC_SUPER bit
2601   return (access & (~JVM_ACC_SUPER)) & JVM_ACC_WRITTEN_FLAGS;
2602 }
2603 
2604 jint InstanceKlass::jvmti_class_status() const {
2605   jint result = 0;
2606 
2607   if (is_linked()) {
2608     result |= JVMTI_CLASS_STATUS_VERIFIED | JVMTI_CLASS_STATUS_PREPARED;
2609   }
2610 
2611   if (is_initialized()) {
2612     assert(is_linked(), "Class status is not consistent");
2613     result |= JVMTI_CLASS_STATUS_INITIALIZED;
2614   }


3592           // The previous loop may not find a matching EMCP method, but
3593           // that doesn't mean that we can optimize and not go any
3594           // further back in the PreviousVersion generations. The EMCP
3595           // method for this generation could have already been made obsolete,
3596           // but there still may be an older EMCP method that has not
3597           // been made obsolete.
3598         }
3599 
3600         if (++local_count >= obsolete_method_count) {
3601           // no more obsolete methods so bail out now
3602           break;
3603         }
3604       }
3605     }
3606   }
3607 }
3608 
3609 // Save the scratch_class as the previous version if any of the methods are running.
3610 // The previous_versions are used to set breakpoints in EMCP methods and they are
3611 // also used to clean MethodData links to redefined methods that are no longer running.
3612 void InstanceKlass::add_previous_version(instanceKlassHandle scratch_class,
3613                                          int emcp_method_count) {
3614   assert(Thread::current()->is_VM_thread(),
3615          "only VMThread can add previous versions");
3616 
3617   ResourceMark rm;
3618   log_trace(redefine, class, iklass, add)
3619     ("adding previous version ref for %s, EMCP_cnt=%d", scratch_class->external_name(), emcp_method_count);
3620 
3621   // Clean out old previous versions for this class
3622   purge_previous_version_list();
3623 
3624   // Mark newly obsolete methods in remaining previous versions.  An EMCP method from
3625   // a previous redefinition may be made obsolete by this redefinition.
3626   Array<Method*>* old_methods = scratch_class->methods();
3627   mark_newly_obsolete_methods(old_methods, emcp_method_count);
3628 
3629   // If the constant pool for this previous version of the class
3630   // is not marked as being on the stack, then none of the methods
3631   // in this previous version of the class are on the stack so
3632   // we don't need to add this as a previous version.
3633   ConstantPool* cp_ref = scratch_class->constants();
3634   if (!cp_ref->on_stack()) {
3635     log_trace(redefine, class, iklass, add)("scratch class not added; no methods are running");
3636     // For debugging purposes.
3637     scratch_class->set_is_scratch_class();
3638     scratch_class->class_loader_data()->add_to_deallocate_list(scratch_class());
3639     return;
3640   }
3641 
3642   if (emcp_method_count != 0) {
3643     // At least one method is still running, check for EMCP methods
3644     for (int i = 0; i < old_methods->length(); i++) {
3645       Method* old_method = old_methods->at(i);
3646       if (!old_method->is_obsolete() && old_method->on_stack()) {
3647         // if EMCP method (not obsolete) is on the stack, mark as EMCP so that
3648         // we can add breakpoints for it.
3649 
3650         // We set the method->on_stack bit during safepoints for class redefinition
3651         // and use this bit to set the is_running_emcp bit.
3652         // After the safepoint, the on_stack bit is cleared and the running emcp
3653         // method may exit.   If so, we would set a breakpoint in a method that
3654         // is never reached, but this won't be noticeable to the programmer.
3655         old_method->set_running_emcp(true);
3656         log_trace(redefine, class, iklass, add)
3657           ("EMCP method %s is on_stack " INTPTR_FORMAT, old_method->name_and_sig_as_C_string(), p2i(old_method));
3658       } else if (!old_method->is_obsolete()) {
3659         log_trace(redefine, class, iklass, add)
3660           ("EMCP method %s is NOT on_stack " INTPTR_FORMAT, old_method->name_and_sig_as_C_string(), p2i(old_method));
3661       }
3662     }
3663   }
3664 
3665   // Add previous version if any methods are still running.
3666   // Set has_previous_version flag for processing during class unloading.
3667   _has_previous_versions = true;
3668   log_trace(redefine, class, iklass, add) ("scratch class added; one of its methods is on_stack.");
3669   assert(scratch_class->previous_versions() == NULL, "shouldn't have a previous version");
3670   scratch_class->link_previous_versions(previous_versions());
3671   link_previous_versions(scratch_class());
3672 } // end add_previous_version()
3673 
3674 #endif // INCLUDE_JVMTI
3675 
3676 Method* InstanceKlass::method_with_idnum(int idnum) {
3677   Method* m = NULL;
3678   if (idnum < methods()->length()) {
3679     m = methods()->at(idnum);
3680   }
3681   if (m == NULL || m->method_idnum() != idnum) {
3682     for (int index = 0; index < methods()->length(); ++index) {
3683       m = methods()->at(index);
3684       if (m->method_idnum() == idnum) {
3685         return m;
3686       }
3687     }
3688     // None found, return null for the caller to handle.
3689     return NULL;
3690   }
3691   return m;




 357 
 358   if (inner_classes() != NULL &&
 359       inner_classes() != Universe::the_empty_short_array() &&
 360       !inner_classes()->is_shared()) {
 361     MetadataFactory::free_array<jushort>(loader_data, inner_classes());
 362   }
 363   set_inner_classes(NULL);
 364 
 365   // We should deallocate the Annotations instance if it's not in shared spaces.
 366   if (annotations() != NULL && !annotations()->is_shared()) {
 367     MetadataFactory::free_metadata(loader_data, annotations());
 368   }
 369   set_annotations(NULL);
 370 }
 371 
 372 bool InstanceKlass::should_be_initialized() const {
 373   return !is_initialized();
 374 }
 375 
 376 klassItable* InstanceKlass::itable() const {
 377   return new klassItable(const_cast<InstanceKlass*>(this));
 378 }
 379 
 380 void InstanceKlass::eager_initialize(Thread *thread) {
 381   if (!EagerInitialization) return;
 382 
 383   if (this->is_not_initialized()) {
 384     // abort if the the class has a class initializer
 385     if (this->class_initializer() != NULL) return;
 386 
 387     // abort if it is java.lang.Object (initialization is handled in genesis)
 388     Klass* super = this->super();
 389     if (super == NULL) return;
 390 
 391     // abort if the super class should be initialized
 392     if (!InstanceKlass::cast(super)->is_initialized()) return;
 393 
 394     // call body to expose the this pointer
 395     eager_initialize_impl(this);

 396   }
 397 }
 398 
 399 // JVMTI spec thinks there are signers and protection domain in the
 400 // instanceKlass.  These accessors pretend these fields are there.
 401 // The hprof specification also thinks these fields are in InstanceKlass.
 402 oop InstanceKlass::protection_domain() const {
 403   // return the protection_domain from the mirror
 404   return java_lang_Class::protection_domain(java_mirror());
 405 }
 406 
 407 // To remove these from requires an incompatible change and CCC request.
 408 objArrayOop InstanceKlass::signers() const {
 409   // return the signers from the mirror
 410   return java_lang_Class::signers(java_mirror());
 411 }
 412 
 413 oop InstanceKlass::init_lock() const {
 414   // return the init lock from the mirror
 415   oop lock = java_lang_Class::init_lock(java_mirror());
 416   // Prevent reordering with any access of initialization state
 417   OrderAccess::loadload();
 418   assert((oop)lock != NULL || !is_not_initialized(), // initialized or in_error state
 419          "only fully initialized state can have a null lock");
 420   return lock;
 421 }
 422 
 423 // Set the initialization lock to null so the object can be GC'ed.  Any racing
 424 // threads to get this lock will see a null lock and will not lock.
 425 // That's okay because they all check for initialized state after getting
 426 // the lock and return.
 427 void InstanceKlass::fence_and_clear_init_lock() {
 428   // make sure previous stores are all done, notably the init_state.
 429   OrderAccess::storestore();
 430   java_lang_Class::set_init_lock(java_mirror(), NULL);
 431   assert(!is_not_initialized(), "class must be initialized now");
 432 }
 433 
 434 void InstanceKlass::eager_initialize_impl(InstanceKlass* this_k) {
 435   EXCEPTION_MARK;
 436   HandleMark hm(THREAD);
 437   Handle init_lock(THREAD, this_k->init_lock());
 438   ObjectLocker ol(init_lock, THREAD, init_lock() != NULL);
 439 
 440   // abort if someone beat us to the initialization
 441   if (!this_k->is_not_initialized()) return;  // note: not equivalent to is_initialized()
 442 
 443   ClassState old_state = this_k->init_state();
 444   link_class_impl(this_k, true, THREAD);
 445   if (HAS_PENDING_EXCEPTION) {
 446     CLEAR_PENDING_EXCEPTION;
 447     // Abort if linking the class throws an exception.
 448 
 449     // Use a test to avoid redundantly resetting the state if there's
 450     // no change.  Set_init_state() asserts that state changes make
 451     // progress, whereas here we might just be spinning in place.
 452     if( old_state != this_k->_init_state )
 453       this_k->set_init_state (old_state);
 454   } else {
 455     // linking successfull, mark class as initialized
 456     this_k->set_init_state (fully_initialized);
 457     this_k->fence_and_clear_init_lock();
 458     // trace
 459     if (log_is_enabled(Info, class, init)) {
 460       ResourceMark rm(THREAD);
 461       log_info(class, init)("[Initialized %s without side effects]", this_k->external_name());
 462     }
 463   }
 464 }
 465 
 466 
 467 // See "The Virtual Machine Specification" section 2.16.5 for a detailed explanation of the class initialization
 468 // process. The step comments refers to the procedure described in that section.
 469 // Note: implementation moved to static method to expose the this pointer.
 470 void InstanceKlass::initialize(TRAPS) {
 471   if (this->should_be_initialized()) {
 472     initialize_impl(this, CHECK);

 473     // Note: at this point the class may be initialized
 474     //       OR it may be in the state of being initialized
 475     //       in case of recursive initialization!
 476   } else {
 477     assert(is_initialized(), "sanity check");
 478   }
 479 }
 480 
 481 
 482 bool InstanceKlass::verify_code(
 483     InstanceKlass* this_k, bool throw_verifyerror, TRAPS) {
 484   // 1) Verify the bytecodes
 485   Verifier::Mode mode =
 486     throw_verifyerror ? Verifier::ThrowException : Verifier::NoException;
 487   return Verifier::verify(this_k, mode, this_k->should_verify_class(), THREAD);
 488 }
 489 
 490 
 491 // Used exclusively by the shared spaces dump mechanism to prevent
 492 // classes mapped into the shared regions in new VMs from appearing linked.
 493 
 494 void InstanceKlass::unlink_class() {
 495   assert(is_linked(), "must be linked");
 496   _init_state = loaded;
 497 }
 498 
 499 void InstanceKlass::link_class(TRAPS) {
 500   assert(is_loaded(), "must be loaded");
 501   if (!is_linked()) {
 502     link_class_impl(this, true, CHECK);

 503   }
 504 }
 505 
 506 // Called to verify that a class can link during initialization, without
 507 // throwing a VerifyError.
 508 bool InstanceKlass::link_class_or_fail(TRAPS) {
 509   assert(is_loaded(), "must be loaded");
 510   if (!is_linked()) {
 511     link_class_impl(this, false, CHECK_false);

 512   }
 513   return is_linked();
 514 }
 515 
 516 bool InstanceKlass::link_class_impl(
 517     InstanceKlass* this_k, bool throw_verifyerror, TRAPS) {
 518   if (DumpSharedSpaces && this_k->is_in_error_state()) {
 519     // This is for CDS dumping phase only -- we use the in_error_state to indicate that
 520     // the class has failed verification. Throwing the NoClassDefFoundError here is just
 521     // a convenient way to stop repeat attempts to verify the same (bad) class.
 522     //
 523     // Note that the NoClassDefFoundError is not part of the JLS, and should not be thrown
 524     // if we are executing Java code. This is not a problem for CDS dumping phase since
 525     // it doesn't execute any Java code.
 526     ResourceMark rm(THREAD);
 527     THROW_MSG_(vmSymbols::java_lang_NoClassDefFoundError(),
 528                this_k->external_name(), false);
 529   }
 530   // return if already verified
 531   if (this_k->is_linked()) {
 532     return true;
 533   }
 534 
 535   // Timing
 536   // timer handles recursion
 537   assert(THREAD->is_Java_thread(), "non-JavaThread in link_class_impl");
 538   JavaThread* jt = (JavaThread*)THREAD;
 539 
 540   // link super class before linking this class
 541   Klass* super = this_k->super();
 542   if (super != NULL) {
 543     if (super->is_interface()) {  // check if super class is an interface
 544       ResourceMark rm(THREAD);
 545       Exceptions::fthrow(
 546         THREAD_AND_LOCATION,
 547         vmSymbols::java_lang_IncompatibleClassChangeError(),
 548         "class %s has interface %s as super class",
 549         this_k->external_name(),
 550         super->external_name()
 551       );
 552       return false;
 553     }
 554 
 555     InstanceKlass* ik_super = InstanceKlass::cast(super);
 556     link_class_impl(ik_super, throw_verifyerror, CHECK_false);
 557   }
 558 
 559   // link all interfaces implemented by this class before linking this class
 560   Array<Klass*>* interfaces = this_k->local_interfaces();
 561   int num_interfaces = interfaces->length();
 562   for (int index = 0; index < num_interfaces; index++) {
 563     InstanceKlass* interk = InstanceKlass::cast(interfaces->at(index));
 564     link_class_impl(interk, throw_verifyerror, CHECK_false);
 565   }
 566 
 567   // in case the class is linked in the process of linking its superclasses
 568   if (this_k->is_linked()) {
 569     return true;
 570   }
 571 
 572   // trace only the link time for this klass that includes
 573   // the verification time
 574   PerfClassTraceTime vmtimer(ClassLoader::perf_class_link_time(),
 575                              ClassLoader::perf_class_link_selftime(),
 576                              ClassLoader::perf_classes_linked(),
 577                              jt->get_thread_stat()->perf_recursion_counts_addr(),
 578                              jt->get_thread_stat()->perf_timers_addr(),
 579                              PerfClassTraceTime::CLASS_LINK);
 580 
 581   // verification & rewriting
 582   {
 583     HandleMark hm(THREAD);
 584     Handle init_lock(THREAD, this_k->init_lock());


 622       // a shared class if the class is not loaded by the NULL classloader.
 623       ClassLoaderData * loader_data = this_k->class_loader_data();
 624       if (!(this_k->is_shared() &&
 625             loader_data->is_the_null_class_loader_data())) {
 626         ResourceMark rm(THREAD);
 627         this_k->vtable()->initialize_vtable(true, CHECK_false);
 628         this_k->itable()->initialize_itable(true, CHECK_false);
 629       }
 630 #ifdef ASSERT
 631       else {
 632         ResourceMark rm(THREAD);
 633         this_k->vtable()->verify(tty, true);
 634         // In case itable verification is ever added.
 635         // this_k->itable()->verify(tty, true);
 636       }
 637 #endif
 638       this_k->set_init_state(linked);
 639       if (JvmtiExport::should_post_class_prepare()) {
 640         Thread *thread = THREAD;
 641         assert(thread->is_Java_thread(), "thread->is_Java_thread()");
 642         JvmtiExport::post_class_prepare((JavaThread *) thread, this_k);
 643       }
 644     }
 645   }
 646   return true;
 647 }
 648 
 649 
 650 // Rewrite the byte codes of all of the methods of a class.
 651 // The rewriter must be called exactly once. Rewriting must happen after
 652 // verification but before the first method of the class is executed.
 653 void InstanceKlass::rewrite_class(TRAPS) {
 654   assert(is_loaded(), "must be loaded");
 655   if (is_rewritten()) {
 656     assert(is_shared(), "rewriting an unshared class?");

 657     return;
 658   }
 659   Rewriter::rewrite(this, CHECK);
 660   set_rewritten();
 661 }
 662 
 663 // Now relocate and link method entry points after class is rewritten.
 664 // This is outside is_rewritten flag. In case of an exception, it can be
 665 // executed more than once.
 666 void InstanceKlass::link_methods(TRAPS) {
 667   int len = methods()->length();
 668   for (int i = len-1; i >= 0; i--) {
 669     methodHandle m(THREAD, methods()->at(i));
 670 
 671     // Set up method entry points for compiler and interpreter    .
 672     m->link_method(m, CHECK);
 673   }
 674 }
 675 
 676 // Eagerly initialize superinterfaces that declare default methods (concrete instance: any access)
 677 void InstanceKlass::initialize_super_interfaces(InstanceKlass* this_k, TRAPS) {
 678   assert (this_k->has_nonstatic_concrete_methods(), "caller should have checked this");
 679   for (int i = 0; i < this_k->local_interfaces()->length(); ++i) {
 680     Klass* iface = this_k->local_interfaces()->at(i);
 681     InstanceKlass* ik = InstanceKlass::cast(iface);
 682 
 683     // Initialization is depth first search ie. we start with top of the inheritance tree
 684     // has_nonstatic_concrete_methods drives searching superinterfaces since it
 685     // means has_nonstatic_concrete_methods in its superinterface hierarchy
 686     if (ik->has_nonstatic_concrete_methods()) {
 687       ik->initialize_super_interfaces(ik, CHECK);
 688     }
 689 
 690     // Only initialize() interfaces that "declare" concrete methods.
 691     if (ik->should_be_initialized() && ik->declares_nonstatic_concrete_methods()) {
 692       ik->initialize(CHECK);
 693     }
 694   }
 695 }
 696 
 697 void InstanceKlass::initialize_impl(InstanceKlass* this_k, TRAPS) {
 698   HandleMark hm(THREAD);
 699 
 700   // Make sure klass is linked (verified) before initialization
 701   // A class could already be verified, since it has been reflected upon.
 702   this_k->link_class(CHECK);
 703 
 704   DTRACE_CLASSINIT_PROBE(required, this_k, -1);
 705 
 706   bool wait = false;
 707 
 708   // refer to the JVM book page 47 for description of steps
 709   // Step 1
 710   {
 711     Handle init_lock(THREAD, this_k->init_lock());
 712     ObjectLocker ol(init_lock, THREAD, init_lock() != NULL);
 713 
 714     Thread *self = THREAD; // it's passed the current thread
 715 
 716     // Step 2
 717     // If we were to use wait() instead of waitInterruptibly() then
 718     // we might end up throwing IE from link/symbol resolution sites
 719     // that aren't expected to throw.  This would wreak havoc.  See 6320309.
 720     while(this_k->is_being_initialized() && !this_k->is_reentrant_initialization(self)) {
 721         wait = true;
 722       ol.waitUninterruptibly(CHECK);
 723     }
 724 
 725     // Step 3
 726     if (this_k->is_being_initialized() && this_k->is_reentrant_initialization(self)) {
 727       DTRACE_CLASSINIT_PROBE_WAIT(recursive, this_k, -1,wait);
 728       return;
 729     }
 730 
 731     // Step 4
 732     if (this_k->is_initialized()) {
 733       DTRACE_CLASSINIT_PROBE_WAIT(concurrent, this_k, -1,wait);
 734       return;
 735     }
 736 
 737     // Step 5
 738     if (this_k->is_in_error_state()) {
 739       DTRACE_CLASSINIT_PROBE_WAIT(erroneous, this_k, -1,wait);
 740       ResourceMark rm(THREAD);
 741       const char* desc = "Could not initialize class ";
 742       const char* className = this_k->external_name();
 743       size_t msglen = strlen(desc) + strlen(className) + 1;
 744       char* message = NEW_RESOURCE_ARRAY(char, msglen);
 745       if (NULL == message) {
 746         // Out of memory: can't create detailed error message
 747         THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), className);
 748       } else {
 749         jio_snprintf(message, msglen, "%s%s", desc, className);
 750         THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), message);
 751       }
 752     }
 753 
 754     // Step 6
 755     this_k->set_init_state(being_initialized);
 756     this_k->set_init_thread(self);
 757   }
 758 
 759   // Step 7


 765       super_klass->initialize(THREAD);
 766     }
 767     // If C implements any interface that declares a non-static, concrete method,
 768     // the initialization of C triggers initialization of its super interfaces.
 769     // Only need to recurse if has_nonstatic_concrete_methods which includes declaring and
 770     // having a superinterface that declares, non-static, concrete methods
 771     if (!HAS_PENDING_EXCEPTION && this_k->has_nonstatic_concrete_methods()) {
 772       this_k->initialize_super_interfaces(this_k, THREAD);
 773     }
 774 
 775     // If any exceptions, complete abruptly, throwing the same exception as above.
 776     if (HAS_PENDING_EXCEPTION) {
 777       Handle e(THREAD, PENDING_EXCEPTION);
 778       CLEAR_PENDING_EXCEPTION;
 779       {
 780         EXCEPTION_MARK;
 781         // Locks object, set state, and notify all waiting threads
 782         this_k->set_initialization_state_and_notify(initialization_error, THREAD);
 783         CLEAR_PENDING_EXCEPTION;
 784       }
 785       DTRACE_CLASSINIT_PROBE_WAIT(super__failed, this_k, -1,wait);
 786       THROW_OOP(e());
 787     }
 788   }
 789 
 790 
 791   // Look for aot compiled methods for this klass, including class initializer.
 792   AOTLoader::load_for_klass(this_k, THREAD);
 793 
 794   // Step 8
 795   {
 796     assert(THREAD->is_Java_thread(), "non-JavaThread in initialize_impl");
 797     JavaThread* jt = (JavaThread*)THREAD;
 798     DTRACE_CLASSINIT_PROBE_WAIT(clinit, this_k, -1,wait);
 799     // Timer includes any side effects of class initialization (resolution,
 800     // etc), but not recursive entry into call_class_initializer().
 801     PerfClassTraceTime timer(ClassLoader::perf_class_init_time(),
 802                              ClassLoader::perf_class_init_selftime(),
 803                              ClassLoader::perf_classes_inited(),
 804                              jt->get_thread_stat()->perf_recursion_counts_addr(),
 805                              jt->get_thread_stat()->perf_timers_addr(),
 806                              PerfClassTraceTime::CLASS_CLINIT);
 807     this_k->call_class_initializer(THREAD);
 808   }
 809 
 810   // Step 9
 811   if (!HAS_PENDING_EXCEPTION) {
 812     this_k->set_initialization_state_and_notify(fully_initialized, CHECK);
 813     { ResourceMark rm(THREAD);
 814       debug_only(this_k->vtable()->verify(tty, true);)
 815     }
 816   }
 817   else {
 818     // Step 10 and 11
 819     Handle e(THREAD, PENDING_EXCEPTION);
 820     CLEAR_PENDING_EXCEPTION;
 821     // JVMTI has already reported the pending exception
 822     // JVMTI internal flag reset is needed in order to report ExceptionInInitializerError
 823     JvmtiExport::clear_detected_exception((JavaThread*)THREAD);
 824     {
 825       EXCEPTION_MARK;
 826       this_k->set_initialization_state_and_notify(initialization_error, THREAD);
 827       CLEAR_PENDING_EXCEPTION;   // ignore any exception thrown, class initialization error is thrown below
 828       // JVMTI has already reported the pending exception
 829       // JVMTI internal flag reset is needed in order to report ExceptionInInitializerError
 830       JvmtiExport::clear_detected_exception((JavaThread*)THREAD);
 831     }
 832     DTRACE_CLASSINIT_PROBE_WAIT(error, this_k, -1,wait);
 833     if (e->is_a(SystemDictionary::Error_klass())) {
 834       THROW_OOP(e());
 835     } else {
 836       JavaCallArguments args(e);
 837       THROW_ARG(vmSymbols::java_lang_ExceptionInInitializerError(),
 838                 vmSymbols::throwable_void_signature(),
 839                 &args);
 840     }
 841   }
 842   DTRACE_CLASSINIT_PROBE_WAIT(end, this_k, -1,wait);
 843 }
 844 
 845 
 846 // Note: implementation moved to static method to expose the this pointer.
 847 void InstanceKlass::set_initialization_state_and_notify(ClassState state, TRAPS) {
 848   set_initialization_state_and_notify_impl(this, state, CHECK);

 849 }
 850 
 851 void InstanceKlass::set_initialization_state_and_notify_impl(InstanceKlass* this_k, ClassState state, TRAPS) {
 852   Handle init_lock(THREAD, this_k->init_lock());
 853   if (init_lock() != NULL) {
 854     ObjectLocker ol(init_lock, THREAD);
 855     this_k->set_init_state(state);
 856     this_k->fence_and_clear_init_lock();
 857     ol.notify_all(CHECK);
 858   } else {
 859     assert(init_lock() != NULL, "The initialization state should never be set twice");
 860     this_k->set_init_state(state);
 861   }
 862 }
 863 
 864 // The embedded _implementor field can only record one implementor.
 865 // When there are more than one implementors, the _implementor field
 866 // is set to the interface Klass* itself. Following are the possible
 867 // values for the _implementor field:
 868 //   NULL                  - no implementor
 869 //   implementor Klass*    - one implementor
 870 //   self                  - more than one implementor
 871 //


 973   // Verify direct super interface
 974   if (this == k) return true;
 975   assert(k->is_interface(), "should be an interface class");
 976   for (int i = 0; i < local_interfaces()->length(); i++) {
 977     if (local_interfaces()->at(i) == k) {
 978       return true;
 979     }
 980   }
 981   return false;
 982 }
 983 
 984 objArrayOop InstanceKlass::allocate_objArray(int n, int length, TRAPS) {
 985   if (length < 0) THROW_0(vmSymbols::java_lang_NegativeArraySizeException());
 986   if (length > arrayOopDesc::max_array_length(T_OBJECT)) {
 987     report_java_out_of_memory("Requested array size exceeds VM limit");
 988     JvmtiExport::post_array_size_exhausted();
 989     THROW_OOP_0(Universe::out_of_memory_error_array_size());
 990   }
 991   int size = objArrayOopDesc::object_size(length);
 992   Klass* ak = array_klass(n, CHECK_NULL);

 993   objArrayOop o =
 994     (objArrayOop)CollectedHeap::array_allocate(ak, size, length, CHECK_NULL);
 995   return o;
 996 }
 997 
 998 instanceOop InstanceKlass::register_finalizer(instanceOop i, TRAPS) {
 999   if (TraceFinalizerRegistration) {
1000     tty->print("Registered ");
1001     i->print_value_on(tty);
1002     tty->print_cr(" (" INTPTR_FORMAT ") as finalizable", p2i(i));
1003   }
1004   instanceHandle h_i(THREAD, i);
1005   // Pass the handle as argument, JavaCalls::call expects oop as jobjects
1006   JavaValue result(T_VOID);
1007   JavaCallArguments args(h_i);
1008   methodHandle mh (THREAD, Universe::finalizer_register_method());
1009   JavaCalls::call(&result, mh, &args, CHECK_NULL);
1010   return h_i();
1011 }
1012 
1013 instanceOop InstanceKlass::allocate_instance(TRAPS) {
1014   bool has_finalizer_flag = has_finalizer(); // Query before possible GC
1015   int size = size_helper();  // Query before forming handle.
1016 


1017   instanceOop i;
1018 
1019   i = (instanceOop)CollectedHeap::obj_allocate(this, size, CHECK_NULL);
1020   if (has_finalizer_flag && !RegisterFinalizersAtInit) {
1021     i = register_finalizer(i, CHECK_NULL);
1022   }
1023   return i;
1024 }
1025 
1026 void InstanceKlass::check_valid_for_instantiation(bool throwError, TRAPS) {
1027   if (is_interface() || is_abstract()) {
1028     ResourceMark rm(THREAD);
1029     THROW_MSG(throwError ? vmSymbols::java_lang_InstantiationError()
1030               : vmSymbols::java_lang_InstantiationException(), external_name());
1031   }
1032   if (this == SystemDictionary::Class_klass()) {
1033     ResourceMark rm(THREAD);
1034     THROW_MSG(throwError ? vmSymbols::java_lang_IllegalAccessError()
1035               : vmSymbols::java_lang_IllegalAccessException(), external_name());
1036   }
1037 }
1038 
1039 Klass* InstanceKlass::array_klass_impl(bool or_null, int n, TRAPS) {
1040   return array_klass_impl(this, or_null, n, THREAD);

1041 }
1042 
1043 Klass* InstanceKlass::array_klass_impl(InstanceKlass* this_k, bool or_null, int n, TRAPS) {
1044   // Need load-acquire for lock-free read
1045   if (this_k->array_klasses_acquire() == NULL) {
1046     if (or_null) return NULL;
1047 
1048     ResourceMark rm;
1049     JavaThread *jt = (JavaThread *)THREAD;
1050     {
1051       // Atomic creation of array_klasses
1052       MutexLocker mc(Compile_lock, THREAD);   // for vtables
1053       MutexLocker ma(MultiArray_lock, THREAD);
1054 
1055       // Check if update has already taken place
1056       if (this_k->array_klasses() == NULL) {
1057         Klass*    k = ObjArrayKlass::allocate_objArray_klass(this_k->class_loader_data(), 1, this_k, CHECK_NULL);
1058         // use 'release' to pair with lock-free load
1059         this_k->release_set_array_klasses(k);
1060       }
1061     }
1062   }
1063   // _this will always be set at this point
1064   ObjArrayKlass* oak = (ObjArrayKlass*)this_k->array_klasses();
1065   if (or_null) {
1066     return oak->array_klass_or_null(n);
1067   }
1068   return oak->array_klass(n, THREAD);
1069 }
1070 
1071 Klass* InstanceKlass::array_klass_impl(bool or_null, TRAPS) {
1072   return array_klass_impl(or_null, 1, THREAD);
1073 }
1074 
1075 void InstanceKlass::call_class_initializer(TRAPS) {
1076   call_class_initializer_impl(this, THREAD);

1077 }
1078 
1079 static int call_class_initializer_impl_counter = 0;   // for debugging
1080 
1081 Method* InstanceKlass::class_initializer() const {
1082   Method* clinit = find_method(
1083       vmSymbols::class_initializer_name(), vmSymbols::void_method_signature());
1084   if (clinit != NULL && clinit->has_valid_initializer_flags()) {
1085     return clinit;
1086   }
1087   return NULL;
1088 }
1089 
1090 void InstanceKlass::call_class_initializer_impl(InstanceKlass* this_k, TRAPS) {
1091   if (ReplayCompiles &&
1092       (ReplaySuppressInitializers == 1 ||
1093        ReplaySuppressInitializers >= 2 && this_k->class_loader() != NULL)) {
1094     // Hide the existence of the initializer for the purpose of replaying the compile
1095     return;
1096   }
1097 
1098   methodHandle h_method(THREAD, this_k->class_initializer());
1099   assert(!this_k->is_initialized(), "we cannot initialize twice");
1100   if (log_is_enabled(Info, class, init)) {
1101     ResourceMark rm;
1102     outputStream* log = Log(class, init)::info_stream();
1103     log->print("%d Initializing ", call_class_initializer_impl_counter++);
1104     this_k->name()->print_value_on(log);
1105     log->print_cr("%s (" INTPTR_FORMAT ")", h_method() == NULL ? "(no method)" : "", p2i(this_k));
1106   }
1107   if (h_method() != NULL) {
1108     JavaCallArguments args; // No arguments
1109     JavaValue result(T_VOID);
1110     JavaCalls::call(&result, h_method, &args, CHECK); // Static call (no args)
1111   }
1112 }
1113 
1114 
1115 void InstanceKlass::mask_for(const methodHandle& method, int bci,
1116   InterpreterOopMap* entry_for) {
1117   // Lazily create the _oop_map_cache at first request
1118   // Lock-free access requires load_ptr_acquire.
1119   OopMapCache* oop_map_cache =
1120       static_cast<OopMapCache*>(OrderAccess::load_ptr_acquire(&_oop_map_cache));
1121   if (oop_map_cache == NULL) {
1122     MutexLocker x(OopMapCacheAlloc_lock);
1123     // Check if _oop_map_cache was allocated while we were waiting for this lock
1124     if ((oop_map_cache = _oop_map_cache) == NULL) {
1125       oop_map_cache = new OopMapCache();


1236   int len = methods()->length();
1237   for (int index = 0; index < len; index++) {
1238     Method* m = methods()->at(index);
1239     assert(m->is_method(), "must be method");
1240     f(m);
1241   }
1242 }
1243 
1244 
1245 void InstanceKlass::do_local_static_fields(FieldClosure* cl) {
1246   for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
1247     if (fs.access_flags().is_static()) {
1248       fieldDescriptor& fd = fs.field_descriptor();
1249       cl->do_field(&fd);
1250     }
1251   }
1252 }
1253 
1254 
1255 void InstanceKlass::do_local_static_fields(void f(fieldDescriptor*, Handle, TRAPS), Handle mirror, TRAPS) {
1256   do_local_static_fields_impl(this, f, mirror, CHECK);

1257 }
1258 
1259 
1260 void InstanceKlass::do_local_static_fields_impl(InstanceKlass* this_k,
1261                              void f(fieldDescriptor* fd, Handle, TRAPS), Handle mirror, TRAPS) {
1262   for (JavaFieldStream fs(this_k); !fs.done(); fs.next()) {
1263     if (fs.access_flags().is_static()) {
1264       fieldDescriptor& fd = fs.field_descriptor();
1265       f(&fd, mirror, CHECK);
1266     }
1267   }
1268 }
1269 
1270 
1271 static int compare_fields_by_offset(int* a, int* b) {
1272   return a[0] - b[0];
1273 }
1274 
1275 void InstanceKlass::do_nonstatic_fields(FieldClosure* cl) {
1276   InstanceKlass* super = superklass();
1277   if (super != NULL) {
1278     super->do_nonstatic_fields(cl);
1279   }
1280   fieldDescriptor fd;
1281   int length = java_fields_count();
1282   // In DebugInfo nonstatic fields are sorted by offset.


1601 // Do NOT return private or static methods, new in JDK8 which are not externally visible
1602 // They should only be found in the initial InterfaceMethodRef
1603 Method* InstanceKlass::lookup_method_in_all_interfaces(Symbol* name,
1604                                                        Symbol* signature,
1605                                                        DefaultsLookupMode defaults_mode) const {
1606   Array<Klass*>* all_ifs = transitive_interfaces();
1607   int num_ifs = all_ifs->length();
1608   InstanceKlass *ik = NULL;
1609   for (int i = 0; i < num_ifs; i++) {
1610     ik = InstanceKlass::cast(all_ifs->at(i));
1611     Method* m = ik->lookup_method(name, signature);
1612     if (m != NULL && m->is_public() && !m->is_static() &&
1613         ((defaults_mode != skip_defaults) || !m->is_default_method())) {
1614       return m;
1615     }
1616   }
1617   return NULL;
1618 }
1619 
1620 /* jni_id_for_impl for jfieldIds only */
1621 JNIid* InstanceKlass::jni_id_for_impl(InstanceKlass* this_k, int offset) {
1622   MutexLocker ml(JfieldIdCreation_lock);
1623   // Retry lookup after we got the lock
1624   JNIid* probe = this_k->jni_ids() == NULL ? NULL : this_k->jni_ids()->find(offset);
1625   if (probe == NULL) {
1626     // Slow case, allocate new static field identifier
1627     probe = new JNIid(this_k, offset, this_k->jni_ids());
1628     this_k->set_jni_ids(probe);
1629   }
1630   return probe;
1631 }
1632 
1633 
1634 /* jni_id_for for jfieldIds only */
1635 JNIid* InstanceKlass::jni_id_for(int offset) {
1636   JNIid* probe = jni_ids() == NULL ? NULL : jni_ids()->find(offset);
1637   if (probe == NULL) {
1638     probe = jni_id_for_impl(this, offset);
1639   }
1640   return probe;
1641 }
1642 
1643 u2 InstanceKlass::enclosing_method_data(int offset) const {
1644   const Array<jushort>* const inner_class_list = inner_classes();
1645   if (inner_class_list == NULL) {
1646     return 0;
1647   }


1656 
1657 void InstanceKlass::set_enclosing_method_indices(u2 class_index,
1658                                                  u2 method_index) {
1659   Array<jushort>* inner_class_list = inner_classes();
1660   assert (inner_class_list != NULL, "_inner_classes list is not set up");
1661   int length = inner_class_list->length();
1662   if (length % inner_class_next_offset == enclosing_method_attribute_size) {
1663     int index = length - enclosing_method_attribute_size;
1664     inner_class_list->at_put(
1665       index + enclosing_method_class_index_offset, class_index);
1666     inner_class_list->at_put(
1667       index + enclosing_method_method_index_offset, method_index);
1668   }
1669 }
1670 
1671 // Lookup or create a jmethodID.
1672 // This code is called by the VMThread and JavaThreads so the
1673 // locking has to be done very carefully to avoid deadlocks
1674 // and/or other cache consistency problems.
1675 //
1676 jmethodID InstanceKlass::get_jmethod_id(InstanceKlass* ik, const methodHandle& method_h) {
1677   size_t idnum = (size_t)method_h->method_idnum();
1678   jmethodID* jmeths = ik->methods_jmethod_ids_acquire();
1679   size_t length = 0;
1680   jmethodID id = NULL;
1681 
1682   // We use a double-check locking idiom here because this cache is
1683   // performance sensitive. In the normal system, this cache only
1684   // transitions from NULL to non-NULL which is safe because we use
1685   // release_set_methods_jmethod_ids() to advertise the new cache.
1686   // A partially constructed cache should never be seen by a racing
1687   // thread. We also use release_store_ptr() to save a new jmethodID
1688   // in the cache so a partially constructed jmethodID should never be
1689   // seen either. Cache reads of existing jmethodIDs proceed without a
1690   // lock, but cache writes of a new jmethodID requires uniqueness and
1691   // creation of the cache itself requires no leaks so a lock is
1692   // generally acquired in those two cases.
1693   //
1694   // If the RedefineClasses() API has been used, then this cache can
1695   // grow and we'll have transitions from non-NULL to bigger non-NULL.
1696   // Cache creation requires no leaks and we require safety between all
1697   // cache accesses and freeing of the old cache so a lock is generally
1698   // acquired when the RedefineClasses() API has been used.
1699 
1700   if (jmeths != NULL) {
1701     // the cache already exists
1702     if (!ik->idnum_can_increment()) {
1703       // the cache can't grow so we can just get the current values
1704       get_jmethod_id_length_value(jmeths, idnum, &length, &id);
1705     } else {
1706       // cache can grow so we have to be more careful
1707       if (Threads::number_of_threads() == 0 ||
1708           SafepointSynchronize::is_at_safepoint()) {
1709         // we're single threaded or at a safepoint - no locking needed
1710         get_jmethod_id_length_value(jmeths, idnum, &length, &id);
1711       } else {
1712         MutexLocker ml(JmethodIdCreation_lock);
1713         get_jmethod_id_length_value(jmeths, idnum, &length, &id);
1714       }
1715     }
1716   }
1717   // implied else:
1718   // we need to allocate a cache so default length and id values are good
1719 
1720   if (jmeths == NULL ||   // no cache yet
1721       length <= idnum ||  // cache is too short
1722       id == NULL) {       // cache doesn't contain entry
1723 
1724     // This function can be called by the VMThread so we have to do all
1725     // things that might block on a safepoint before grabbing the lock.
1726     // Otherwise, we can deadlock with the VMThread or have a cache
1727     // consistency issue. These vars keep track of what we might have
1728     // to free after the lock is dropped.
1729     jmethodID  to_dealloc_id     = NULL;
1730     jmethodID* to_dealloc_jmeths = NULL;
1731 
1732     // may not allocate new_jmeths or use it if we allocate it
1733     jmethodID* new_jmeths = NULL;
1734     if (length <= idnum) {
1735       // allocate a new cache that might be used
1736       size_t size = MAX2(idnum+1, (size_t)ik->idnum_allocated_count());
1737       new_jmeths = NEW_C_HEAP_ARRAY(jmethodID, size+1, mtClass);
1738       memset(new_jmeths, 0, (size+1)*sizeof(jmethodID));
1739       // cache size is stored in element[0], other elements offset by one
1740       new_jmeths[0] = (jmethodID)size;
1741     }
1742 
1743     // allocate a new jmethodID that might be used
1744     jmethodID new_id = NULL;
1745     if (method_h->is_old() && !method_h->is_obsolete()) {
1746       // The method passed in is old (but not obsolete), we need to use the current version
1747       Method* current_method = ik->method_with_idnum((int)idnum);
1748       assert(current_method != NULL, "old and but not obsolete, so should exist");
1749       new_id = Method::make_jmethod_id(ik->class_loader_data(), current_method);
1750     } else {
1751       // It is the current version of the method or an obsolete method,
1752       // use the version passed in
1753       new_id = Method::make_jmethod_id(ik->class_loader_data(), method_h());
1754     }
1755 
1756     if (Threads::number_of_threads() == 0 ||
1757         SafepointSynchronize::is_at_safepoint()) {
1758       // we're single threaded or at a safepoint - no locking needed
1759       id = get_jmethod_id_fetch_or_update(ik, idnum, new_id, new_jmeths,
1760                                           &to_dealloc_id, &to_dealloc_jmeths);
1761     } else {
1762       MutexLocker ml(JmethodIdCreation_lock);
1763       id = get_jmethod_id_fetch_or_update(ik, idnum, new_id, new_jmeths,
1764                                           &to_dealloc_id, &to_dealloc_jmeths);
1765     }
1766 
1767     // The lock has been dropped so we can free resources.
1768     // Free up either the old cache or the new cache if we allocated one.
1769     if (to_dealloc_jmeths != NULL) {
1770       FreeHeap(to_dealloc_jmeths);
1771     }
1772     // free up the new ID since it wasn't needed
1773     if (to_dealloc_id != NULL) {
1774       Method::destroy_jmethod_id(ik->class_loader_data(), to_dealloc_id);
1775     }
1776   }
1777   return id;
1778 }
1779 
1780 // Figure out how many jmethodIDs haven't been allocated, and make
1781 // sure space for them is pre-allocated.  This makes getting all
1782 // method ids much, much faster with classes with more than 8
1783 // methods, and has a *substantial* effect on performance with jvmti
1784 // code that loads all jmethodIDs for all classes.
1785 void InstanceKlass::ensure_space_for_methodids(int start_offset) {
1786   int new_jmeths = 0;
1787   int length = methods()->length();
1788   for (int index = start_offset; index < length; index++) {
1789     Method* m = methods()->at(index);
1790     jmethodID id = m->find_jmethod_id_or_null();
1791     if (id == NULL) {
1792       new_jmeths++;
1793     }
1794   }
1795   if (new_jmeths != 0) {
1796     Method::ensure_jmethod_ids(class_loader_data(), new_jmeths);
1797   }
1798 }
1799 
1800 // Common code to fetch the jmethodID from the cache or update the
1801 // cache with the new jmethodID. This function should never do anything
1802 // that causes the caller to go to a safepoint or we can deadlock with
1803 // the VMThread or have cache consistency issues.
1804 //
1805 jmethodID InstanceKlass::get_jmethod_id_fetch_or_update(
1806             InstanceKlass* ik, size_t idnum, jmethodID new_id,
1807             jmethodID* new_jmeths, jmethodID* to_dealloc_id_p,
1808             jmethodID** to_dealloc_jmeths_p) {
1809   assert(new_id != NULL, "sanity check");
1810   assert(to_dealloc_id_p != NULL, "sanity check");
1811   assert(to_dealloc_jmeths_p != NULL, "sanity check");
1812   assert(Threads::number_of_threads() == 0 ||
1813          SafepointSynchronize::is_at_safepoint() ||
1814          JmethodIdCreation_lock->owned_by_self(), "sanity check");
1815 
1816   // reacquire the cache - we are locked, single threaded or at a safepoint
1817   jmethodID* jmeths = ik->methods_jmethod_ids_acquire();
1818   jmethodID  id     = NULL;
1819   size_t     length = 0;
1820 
1821   if (jmeths == NULL ||                         // no cache yet
1822       (length = (size_t)jmeths[0]) <= idnum) {  // cache is too short
1823     if (jmeths != NULL) {
1824       // copy any existing entries from the old cache
1825       for (size_t index = 0; index < length; index++) {
1826         new_jmeths[index+1] = jmeths[index+1];
1827       }
1828       *to_dealloc_jmeths_p = jmeths;  // save old cache for later delete
1829     }
1830     ik->release_set_methods_jmethod_ids(jmeths = new_jmeths);
1831   } else {
1832     // fetch jmethodID (if any) from the existing cache
1833     id = jmeths[idnum+1];
1834     *to_dealloc_jmeths_p = new_jmeths;  // save new cache for later delete
1835   }
1836   if (id == NULL) {
1837     // No matching jmethodID in the existing cache or we have a new
1838     // cache or we just grew the cache. This cache write is done here
1839     // by the first thread to win the foot race because a jmethodID
1840     // needs to be unique once it is generally available.
1841     id = new_id;
1842 
1843     // The jmethodID cache can be read while unlocked so we have to
1844     // make sure the new jmethodID is complete before installing it
1845     // in the cache.
1846     OrderAccess::release_store_ptr(&jmeths[idnum+1], id);
1847   } else {
1848     *to_dealloc_id_p = new_id; // save new id for later delete
1849   }
1850   return id;


2030   constants()->remove_unshareable_info();
2031 
2032   assert(_dep_context == DependencyContext::EMPTY, "dependency context is not shareable");
2033 
2034   for (int i = 0; i < methods()->length(); i++) {
2035     Method* m = methods()->at(i);
2036     m->remove_unshareable_info();
2037   }
2038 
2039   // do array classes also.
2040   array_klasses_do(remove_unshareable_in_class);
2041 }
2042 
2043 static void restore_unshareable_in_class(Klass* k, TRAPS) {
2044   // Array classes have null protection domain.
2045   // --> see ArrayKlass::complete_create_array_klass()
2046   k->restore_unshareable_info(ClassLoaderData::the_null_class_loader_data(), Handle(), CHECK);
2047 }
2048 
2049 void InstanceKlass::restore_unshareable_info(ClassLoaderData* loader_data, Handle protection_domain, TRAPS) {
2050   set_package(loader_data, CHECK);

2051   Klass::restore_unshareable_info(loader_data, protection_domain, CHECK);
2052 
2053   Array<Method*>* methods = this->methods();
2054   int num_methods = methods->length();
2055   for (int index2 = 0; index2 < num_methods; ++index2) {
2056     methodHandle m(THREAD, methods->at(index2));
2057     m->restore_unshareable_info(CHECK);
2058   }
2059   if (JvmtiExport::has_redefined_a_class()) {
2060     // Reinitialize vtable because RedefineClasses may have changed some
2061     // entries in this vtable for super classes so the CDS vtable might
2062     // point to old or obsolete entries.  RedefineClasses doesn't fix up
2063     // vtables in the shared system dictionary, only the main one.
2064     // It also redefines the itable too so fix that too.
2065     ResourceMark rm(THREAD);
2066     vtable()->initialize_vtable(false, CHECK);
2067     itable()->initialize_itable(false, CHECK);
2068   }
2069 
2070   // restore constant pool resolved references
2071   constants()->restore_unshareable_info(CHECK);
2072 
2073   array_klasses_do(restore_unshareable_in_class, CHECK);
2074 }
2075 
2076 // returns true IFF is_in_error_state() has been changed as a result of this call.
2077 bool InstanceKlass::check_sharing_error_state() {
2078   assert(DumpSharedSpaces, "should only be called during dumping");
2079   bool old_state = is_in_error_state();
2080 
2081   if (!is_in_error_state()) {
2082     bool bad = false;
2083     for (InstanceKlass* sup = java_super(); sup; sup = sup->java_super()) {
2084       if (sup->is_in_error_state()) {
2085         bad = true;
2086         break;
2087       }
2088     }
2089     if (!bad) {
2090       Array<Klass*>* interfaces = transitive_interfaces();
2091       for (int i = 0; i < interfaces->length(); i++) {
2092         Klass* iface = interfaces->at(i);
2093         if (InstanceKlass::cast(iface)->is_in_error_state()) {


2419 // See JLS 3rd edition 8.4.6.1
2420 // Assumes name-signature match
2421 // "this" is InstanceKlass of super_method which must exist
2422 // note that the InstanceKlass of the method in the targetclassname has not always been created yet
2423 bool InstanceKlass::is_override(const methodHandle& super_method, Handle targetclassloader, Symbol* targetclassname, TRAPS) {
2424    // Private methods can not be overridden
2425    if (super_method->is_private()) {
2426      return false;
2427    }
2428    // If super method is accessible, then override
2429    if ((super_method->is_protected()) ||
2430        (super_method->is_public())) {
2431      return true;
2432    }
2433    // Package-private methods are not inherited outside of package
2434    assert(super_method->is_package_private(), "must be package private");
2435    return(is_same_class_package(targetclassloader(), targetclassname));
2436 }
2437 
2438 /* defined for now in jvm.cpp, for historical reasons *--
2439 Klass* InstanceKlass::compute_enclosing_class_impl(InstanceKlass* self,
2440                                                      Symbol*& simple_name_result, TRAPS) {
2441   ...
2442 }
2443 */
2444 
2445 // Only boot and platform class loaders can define classes in "java/" packages.
2446 void InstanceKlass::check_prohibited_package(Symbol* class_name,
2447                                              Handle class_loader,
2448                                              TRAPS) {
2449   ResourceMark rm(THREAD);
2450   if (!class_loader.is_null() &&
2451       !SystemDictionary::is_platform_class_loader(class_loader()) &&
2452       class_name != NULL &&
2453       strncmp(class_name->as_C_string(), JAVAPKG, JAVAPKG_LEN) == 0) {
2454     TempNewSymbol pkg_name = InstanceKlass::package_from_name(class_name, CHECK);
2455     assert(pkg_name != NULL, "Error in parsing package name starting with 'java/'");
2456     char* name = pkg_name->as_C_string();
2457     const char* class_loader_name = SystemDictionary::loader_name(class_loader());
2458     StringUtils::replace_no_expand(name, "/", ".");
2459     const char* msg_text1 = "Class loader (instance of): ";


2493   }
2494 
2495   // Now do the same for class2.
2496   const InstanceKlass* outer2 = InstanceKlass::cast(class2);
2497   for (;;) {
2498     bool ignore_inner_is_member;
2499     Klass* next = outer2->compute_enclosing_class(&ignore_inner_is_member,
2500                                                     CHECK_false);
2501     if (next == NULL)  break;
2502     // Might as well check the new outer against all available values.
2503     if (next == class1)  return true;
2504     if (next == outer1)  return true;
2505     outer2 = InstanceKlass::cast(next);
2506   }
2507 
2508   // If by this point we have not found an equality between the
2509   // two classes, we know they are in separate package members.
2510   return false;
2511 }
2512 
2513 bool InstanceKlass::find_inner_classes_attr(const InstanceKlass* k, int* ooff, int* noff, TRAPS) {
2514   constantPoolHandle i_cp(THREAD, k->constants());
2515   for (InnerClassesIterator iter(k); !iter.done(); iter.next()) {
2516     int ioff = iter.inner_class_info_index();
2517     if (ioff != 0) {
2518       // Check to see if the name matches the class we're looking for
2519       // before attempting to find the class.
2520       if (i_cp->klass_name_at_matches(k, ioff)) {
2521         Klass* inner_klass = i_cp->klass_at(ioff, CHECK_false);
2522         if (k == inner_klass) {
2523           *ooff = iter.outer_class_info_index();
2524           *noff = iter.inner_name_index();
2525           return true;
2526         }
2527       }
2528     }
2529   }
2530   return false;
2531 }
2532 
2533 InstanceKlass* InstanceKlass::compute_enclosing_class_impl(const InstanceKlass* k,
2534                                                            bool* inner_is_member,
2535                                                            TRAPS) {
2536   InstanceKlass* outer_klass = NULL;
2537   *inner_is_member = false;
2538   int ooff = 0, noff = 0;
2539   if (find_inner_classes_attr(k, &ooff, &noff, THREAD)) {
2540     constantPoolHandle i_cp(THREAD, k->constants());
2541     if (ooff != 0) {
2542       Klass* ok = i_cp->klass_at(ooff, CHECK_NULL);


2551         outer_klass = InstanceKlass::cast(ok);
2552         *inner_is_member = false;
2553       }
2554     }
2555   }
2556 
2557   // If no inner class attribute found for this class.
2558   if (NULL == outer_klass) return NULL;
2559 
2560   // Throws an exception if outer klass has not declared k as an inner klass
2561   // We need evidence that each klass knows about the other, or else
2562   // the system could allow a spoof of an inner class to gain access rights.
2563   Reflection::check_for_inner_class(outer_klass, k, *inner_is_member, CHECK_NULL);
2564   return outer_klass;
2565 }
2566 
2567 jint InstanceKlass::compute_modifier_flags(TRAPS) const {
2568   jint access = access_flags().as_int();
2569 
2570   // But check if it happens to be member class.
2571   InnerClassesIterator iter(this);

2572   for (; !iter.done(); iter.next()) {
2573     int ioff = iter.inner_class_info_index();
2574     // Inner class attribute can be zero, skip it.
2575     // Strange but true:  JVM spec. allows null inner class refs.
2576     if (ioff == 0) continue;
2577 
2578     // only look at classes that are already loaded
2579     // since we are looking for the flags for our self.
2580     Symbol* inner_name = constants()->klass_name_at(ioff);
2581     if ((name() == inner_name)) {
2582       // This is really a member class.
2583       access = iter.inner_access_flags();
2584       break;
2585     }
2586   }
2587   // Remember to strip ACC_SUPER bit
2588   return (access & (~JVM_ACC_SUPER)) & JVM_ACC_WRITTEN_FLAGS;
2589 }
2590 
2591 jint InstanceKlass::jvmti_class_status() const {
2592   jint result = 0;
2593 
2594   if (is_linked()) {
2595     result |= JVMTI_CLASS_STATUS_VERIFIED | JVMTI_CLASS_STATUS_PREPARED;
2596   }
2597 
2598   if (is_initialized()) {
2599     assert(is_linked(), "Class status is not consistent");
2600     result |= JVMTI_CLASS_STATUS_INITIALIZED;
2601   }


3579           // The previous loop may not find a matching EMCP method, but
3580           // that doesn't mean that we can optimize and not go any
3581           // further back in the PreviousVersion generations. The EMCP
3582           // method for this generation could have already been made obsolete,
3583           // but there still may be an older EMCP method that has not
3584           // been made obsolete.
3585         }
3586 
3587         if (++local_count >= obsolete_method_count) {
3588           // no more obsolete methods so bail out now
3589           break;
3590         }
3591       }
3592     }
3593   }
3594 }
3595 
3596 // Save the scratch_class as the previous version if any of the methods are running.
3597 // The previous_versions are used to set breakpoints in EMCP methods and they are
3598 // also used to clean MethodData links to redefined methods that are no longer running.
3599 void InstanceKlass::add_previous_version(InstanceKlass* scratch_class,
3600                                          int emcp_method_count) {
3601   assert(Thread::current()->is_VM_thread(),
3602          "only VMThread can add previous versions");
3603 
3604   ResourceMark rm;
3605   log_trace(redefine, class, iklass, add)
3606     ("adding previous version ref for %s, EMCP_cnt=%d", scratch_class->external_name(), emcp_method_count);
3607 
3608   // Clean out old previous versions for this class
3609   purge_previous_version_list();
3610 
3611   // Mark newly obsolete methods in remaining previous versions.  An EMCP method from
3612   // a previous redefinition may be made obsolete by this redefinition.
3613   Array<Method*>* old_methods = scratch_class->methods();
3614   mark_newly_obsolete_methods(old_methods, emcp_method_count);
3615 
3616   // If the constant pool for this previous version of the class
3617   // is not marked as being on the stack, then none of the methods
3618   // in this previous version of the class are on the stack so
3619   // we don't need to add this as a previous version.
3620   ConstantPool* cp_ref = scratch_class->constants();
3621   if (!cp_ref->on_stack()) {
3622     log_trace(redefine, class, iklass, add)("scratch class not added; no methods are running");
3623     // For debugging purposes.
3624     scratch_class->set_is_scratch_class();
3625     scratch_class->class_loader_data()->add_to_deallocate_list(scratch_class);
3626     return;
3627   }
3628 
3629   if (emcp_method_count != 0) {
3630     // At least one method is still running, check for EMCP methods
3631     for (int i = 0; i < old_methods->length(); i++) {
3632       Method* old_method = old_methods->at(i);
3633       if (!old_method->is_obsolete() && old_method->on_stack()) {
3634         // if EMCP method (not obsolete) is on the stack, mark as EMCP so that
3635         // we can add breakpoints for it.
3636 
3637         // We set the method->on_stack bit during safepoints for class redefinition
3638         // and use this bit to set the is_running_emcp bit.
3639         // After the safepoint, the on_stack bit is cleared and the running emcp
3640         // method may exit.   If so, we would set a breakpoint in a method that
3641         // is never reached, but this won't be noticeable to the programmer.
3642         old_method->set_running_emcp(true);
3643         log_trace(redefine, class, iklass, add)
3644           ("EMCP method %s is on_stack " INTPTR_FORMAT, old_method->name_and_sig_as_C_string(), p2i(old_method));
3645       } else if (!old_method->is_obsolete()) {
3646         log_trace(redefine, class, iklass, add)
3647           ("EMCP method %s is NOT on_stack " INTPTR_FORMAT, old_method->name_and_sig_as_C_string(), p2i(old_method));
3648       }
3649     }
3650   }
3651 
3652   // Add previous version if any methods are still running.
3653   // Set has_previous_version flag for processing during class unloading.
3654   _has_previous_versions = true;
3655   log_trace(redefine, class, iklass, add) ("scratch class added; one of its methods is on_stack.");
3656   assert(scratch_class->previous_versions() == NULL, "shouldn't have a previous version");
3657   scratch_class->link_previous_versions(previous_versions());
3658   link_previous_versions(scratch_class);
3659 } // end add_previous_version()
3660 
3661 #endif // INCLUDE_JVMTI
3662 
3663 Method* InstanceKlass::method_with_idnum(int idnum) {
3664   Method* m = NULL;
3665   if (idnum < methods()->length()) {
3666     m = methods()->at(idnum);
3667   }
3668   if (m == NULL || m->method_idnum() != idnum) {
3669     for (int index = 0; index < methods()->length(); ++index) {
3670       m = methods()->at(index);
3671       if (m->method_idnum() == idnum) {
3672         return m;
3673       }
3674     }
3675     // None found, return null for the caller to handle.
3676     return NULL;
3677   }
3678   return m;


< prev index next >