1 /*
   2  * Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "classfile/javaClasses.hpp"
  27 #include "classfile/systemDictionary.hpp"
  28 #include "classfile/verifier.hpp"
  29 #include "classfile/vmSymbols.hpp"
  30 #include "compiler/compileBroker.hpp"
  31 #include "gc_implementation/shared/markSweep.inline.hpp"
  32 #include "gc_interface/collectedHeap.inline.hpp"
  33 #include "interpreter/oopMapCache.hpp"
  34 #include "interpreter/rewriter.hpp"
  35 #include "jvmtifiles/jvmti.h"
  36 #include "memory/genOopClosures.inline.hpp"
  37 #include "memory/heapInspection.hpp"
  38 #include "memory/metadataFactory.hpp"
  39 #include "memory/oopFactory.hpp"
  40 #include "oops/fieldStreams.hpp"
  41 #include "oops/instanceClassLoaderKlass.hpp"
  42 #include "oops/instanceKlass.hpp"
  43 #include "oops/instanceMirrorKlass.hpp"
  44 #include "oops/instanceOop.hpp"
  45 #include "oops/klass.inline.hpp"
  46 #include "oops/method.hpp"
  47 #include "oops/oop.inline.hpp"
  48 #include "oops/symbol.hpp"
  49 #include "prims/jvmtiExport.hpp"
  50 #include "prims/jvmtiRedefineClassesTrace.hpp"
  51 #include "prims/jvmtiRedefineClasses.hpp"
  52 #include "prims/methodComparator.hpp"
  53 #include "runtime/fieldDescriptor.hpp"
  54 #include "runtime/handles.inline.hpp"
  55 #include "runtime/javaCalls.hpp"
  56 #include "runtime/mutexLocker.hpp"
  57 #include "runtime/thread.inline.hpp"
  58 #include "services/classLoadingService.hpp"
  59 #include "services/threadService.hpp"
  60 #include "utilities/dtrace.hpp"
  61 #include "utilities/macros.hpp"
  62 #if INCLUDE_ALL_GCS
  63 #include "gc_implementation/concurrentMarkSweep/cmsOopClosures.inline.hpp"
  64 #include "gc_implementation/g1/g1CollectedHeap.inline.hpp"
  65 #include "gc_implementation/g1/g1OopClosures.inline.hpp"
  66 #include "gc_implementation/g1/g1RemSet.inline.hpp"
  67 #include "gc_implementation/g1/heapRegionSeq.inline.hpp"
  68 #include "gc_implementation/parNew/parOopClosures.inline.hpp"
  69 #include "gc_implementation/parallelScavenge/parallelScavengeHeap.inline.hpp"
  70 #include "gc_implementation/parallelScavenge/psPromotionManager.inline.hpp"
  71 #include "gc_implementation/parallelScavenge/psScavenge.inline.hpp"
  72 #include "oops/oop.pcgc.inline.hpp"
  73 #endif // INCLUDE_ALL_GCS
  74 #ifdef COMPILER1
  75 #include "c1/c1_Compiler.hpp"
  76 #endif
  77 
  78 #ifdef DTRACE_ENABLED
  79 
  80 #ifndef USDT2
  81 
  82 HS_DTRACE_PROBE_DECL4(hotspot, class__initialization__required,
  83   char*, intptr_t, oop, intptr_t);
  84 HS_DTRACE_PROBE_DECL5(hotspot, class__initialization__recursive,
  85   char*, intptr_t, oop, intptr_t, int);
  86 HS_DTRACE_PROBE_DECL5(hotspot, class__initialization__concurrent,
  87   char*, intptr_t, oop, intptr_t, int);
  88 HS_DTRACE_PROBE_DECL5(hotspot, class__initialization__erroneous,
  89   char*, intptr_t, oop, intptr_t, int);
  90 HS_DTRACE_PROBE_DECL5(hotspot, class__initialization__super__failed,
  91   char*, intptr_t, oop, intptr_t, int);
  92 HS_DTRACE_PROBE_DECL5(hotspot, class__initialization__clinit,
  93   char*, intptr_t, oop, intptr_t, int);
  94 HS_DTRACE_PROBE_DECL5(hotspot, class__initialization__error,
  95   char*, intptr_t, oop, intptr_t, int);
  96 HS_DTRACE_PROBE_DECL5(hotspot, class__initialization__end,
  97   char*, intptr_t, oop, intptr_t, int);
  98 
  99 #define DTRACE_CLASSINIT_PROBE(type, clss, thread_type)          \
 100   {                                                              \
 101     char* data = NULL;                                           \
 102     int len = 0;                                                 \
 103     Symbol* name = (clss)->name();                               \
 104     if (name != NULL) {                                          \
 105       data = (char*)name->bytes();                               \
 106       len = name->utf8_length();                                 \
 107     }                                                            \
 108     HS_DTRACE_PROBE4(hotspot, class__initialization__##type,     \
 109       data, len, (clss)->class_loader(), thread_type);           \
 110   }
 111 
 112 #define DTRACE_CLASSINIT_PROBE_WAIT(type, clss, thread_type, wait) \
 113   {                                                              \
 114     char* data = NULL;                                           \
 115     int len = 0;                                                 \
 116     Symbol* name = (clss)->name();                               \
 117     if (name != NULL) {                                          \
 118       data = (char*)name->bytes();                               \
 119       len = name->utf8_length();                                 \
 120     }                                                            \
 121     HS_DTRACE_PROBE5(hotspot, class__initialization__##type,     \
 122       data, len, (clss)->class_loader(), thread_type, wait);     \
 123   }
 124 #else /* USDT2 */
 125 
 126 #define HOTSPOT_CLASS_INITIALIZATION_required HOTSPOT_CLASS_INITIALIZATION_REQUIRED
 127 #define HOTSPOT_CLASS_INITIALIZATION_recursive HOTSPOT_CLASS_INITIALIZATION_RECURSIVE
 128 #define HOTSPOT_CLASS_INITIALIZATION_concurrent HOTSPOT_CLASS_INITIALIZATION_CONCURRENT
 129 #define HOTSPOT_CLASS_INITIALIZATION_erroneous HOTSPOT_CLASS_INITIALIZATION_ERRONEOUS
 130 #define HOTSPOT_CLASS_INITIALIZATION_super__failed HOTSPOT_CLASS_INITIALIZATION_SUPER_FAILED
 131 #define HOTSPOT_CLASS_INITIALIZATION_clinit HOTSPOT_CLASS_INITIALIZATION_CLINIT
 132 #define HOTSPOT_CLASS_INITIALIZATION_error HOTSPOT_CLASS_INITIALIZATION_ERROR
 133 #define HOTSPOT_CLASS_INITIALIZATION_end HOTSPOT_CLASS_INITIALIZATION_END
 134 #define DTRACE_CLASSINIT_PROBE(type, clss, thread_type)          \
 135   {                                                              \
 136     char* data = NULL;                                           \
 137     int len = 0;                                                 \
 138     Symbol* name = (clss)->name();                               \
 139     if (name != NULL) {                                          \
 140       data = (char*)name->bytes();                               \
 141       len = name->utf8_length();                                 \
 142     }                                                            \
 143     HOTSPOT_CLASS_INITIALIZATION_##type(                         \
 144       data, len, (clss)->class_loader(), thread_type);           \
 145   }
 146 
 147 #define DTRACE_CLASSINIT_PROBE_WAIT(type, clss, thread_type, wait) \
 148   {                                                              \
 149     char* data = NULL;                                           \
 150     int len = 0;                                                 \
 151     Symbol* name = (clss)->name();                               \
 152     if (name != NULL) {                                          \
 153       data = (char*)name->bytes();                               \
 154       len = name->utf8_length();                                 \
 155     }                                                            \
 156     HOTSPOT_CLASS_INITIALIZATION_##type(                         \
 157       data, len, (clss)->class_loader(), thread_type, wait);     \
 158   }
 159 #endif /* USDT2 */
 160 
 161 #else //  ndef DTRACE_ENABLED
 162 
 163 #define DTRACE_CLASSINIT_PROBE(type, clss, thread_type)
 164 #define DTRACE_CLASSINIT_PROBE_WAIT(type, clss, thread_type, wait)
 165 
 166 #endif //  ndef DTRACE_ENABLED
 167 
 168 volatile int InstanceKlass::_total_instanceKlass_count = 0;
 169 
 170 InstanceKlass* InstanceKlass::allocate_instance_klass(
 171                                               ClassLoaderData* loader_data,
 172                                               int vtable_len,
 173                                               int itable_len,
 174                                               int static_field_size,
 175                                               int nonstatic_oop_map_size,
 176                                               ReferenceType rt,
 177                                               AccessFlags access_flags,
 178                                               Symbol* name,
 179                                               Klass* super_klass,
 180                                               bool is_anonymous,
 181                                               TRAPS) {
 182 
 183   int size = InstanceKlass::size(vtable_len, itable_len, nonstatic_oop_map_size,
 184                                  access_flags.is_interface(), is_anonymous);
 185 
 186   // Allocation
 187   InstanceKlass* ik;
 188   if (rt == REF_NONE) {
 189     if (name == vmSymbols::java_lang_Class()) {
 190       ik = new (loader_data, size, THREAD) InstanceMirrorKlass(
 191         vtable_len, itable_len, static_field_size, nonstatic_oop_map_size, rt,
 192         access_flags, is_anonymous);
 193     } else if (name == vmSymbols::java_lang_ClassLoader() ||
 194           (SystemDictionary::ClassLoader_klass_loaded() &&
 195           super_klass != NULL &&
 196           super_klass->is_subtype_of(SystemDictionary::ClassLoader_klass()))) {
 197       ik = new (loader_data, size, THREAD) InstanceClassLoaderKlass(
 198         vtable_len, itable_len, static_field_size, nonstatic_oop_map_size, rt,
 199         access_flags, is_anonymous);
 200     } else {
 201       // normal class
 202       ik = new (loader_data, size, THREAD) InstanceKlass(
 203         vtable_len, itable_len, static_field_size, nonstatic_oop_map_size, rt,
 204         access_flags, is_anonymous);
 205     }
 206   } else {
 207     // reference klass
 208     ik = new (loader_data, size, THREAD) InstanceRefKlass(
 209         vtable_len, itable_len, static_field_size, nonstatic_oop_map_size, rt,
 210         access_flags, is_anonymous);
 211   }
 212 
 213   // Check for pending exception before adding to the loader data and incrementing
 214   // class count.  Can get OOM here.
 215   if (HAS_PENDING_EXCEPTION) {
 216     return NULL;
 217   }
 218 
 219   // Add all classes to our internal class loader list here,
 220   // including classes in the bootstrap (NULL) class loader.
 221   loader_data->add_class(ik);
 222 
 223   Atomic::inc(&_total_instanceKlass_count);
 224   return ik;
 225 }
 226 
 227 
 228 // copy method ordering from resource area to Metaspace
 229 void InstanceKlass::copy_method_ordering(intArray* m, TRAPS) {
 230   if (m != NULL) {
 231     // allocate a new array and copy contents (memcpy?)
 232     _method_ordering = MetadataFactory::new_array<int>(class_loader_data(), m->length(), CHECK);
 233     for (int i = 0; i < m->length(); i++) {
 234       _method_ordering->at_put(i, m->at(i));
 235     }
 236   } else {
 237     _method_ordering = Universe::the_empty_int_array();
 238   }
 239 }
 240 
 241 
 242 InstanceKlass::InstanceKlass(int vtable_len,
 243                              int itable_len,
 244                              int static_field_size,
 245                              int nonstatic_oop_map_size,
 246                              ReferenceType rt,
 247                              AccessFlags access_flags,
 248                              bool is_anonymous) {
 249   No_Safepoint_Verifier no_safepoint; // until k becomes parsable
 250 
 251   int iksize = InstanceKlass::size(vtable_len, itable_len, nonstatic_oop_map_size,
 252                                    access_flags.is_interface(), is_anonymous);
 253 
 254   set_vtable_length(vtable_len);
 255   set_itable_length(itable_len);
 256   set_static_field_size(static_field_size);
 257   set_nonstatic_oop_map_size(nonstatic_oop_map_size);
 258   set_access_flags(access_flags);
 259   _misc_flags = 0;  // initialize to zero
 260   set_is_anonymous(is_anonymous);
 261   assert(size() == iksize, "wrong size for object");
 262 
 263   set_array_klasses(NULL);
 264   set_methods(NULL);
 265   set_method_ordering(NULL);
 266   set_local_interfaces(NULL);
 267   set_transitive_interfaces(NULL);
 268   init_implementor();
 269   set_fields(NULL, 0);
 270   set_constants(NULL);
 271   set_class_loader_data(NULL);
 272   set_source_file_name_index(0);
 273   set_source_debug_extension(NULL, 0);
 274   set_array_name(NULL);
 275   set_inner_classes(NULL);
 276   set_static_oop_field_count(0);
 277   set_nonstatic_field_size(0);
 278   set_is_marked_dependent(false);
 279   set_init_state(InstanceKlass::allocated);
 280   set_init_thread(NULL);
 281   set_reference_type(rt);
 282   set_oop_map_cache(NULL);
 283   set_jni_ids(NULL);
 284   set_osr_nmethods_head(NULL);
 285   set_breakpoints(NULL);
 286   init_previous_versions();
 287   set_generic_signature_index(0);
 288   release_set_methods_jmethod_ids(NULL);
 289   release_set_methods_cached_itable_indices(NULL);
 290   set_annotations(NULL);
 291   set_jvmti_cached_class_field_map(NULL);
 292   set_initial_method_idnum(0);
 293   _dependencies = NULL;
 294   set_jvmti_cached_class_field_map(NULL);
 295   set_cached_class_file(NULL);
 296   set_initial_method_idnum(0);
 297   set_minor_version(0);
 298   set_major_version(0);
 299   NOT_PRODUCT(_verify_count = 0;)
 300 
 301   // initialize the non-header words to zero
 302   intptr_t* p = (intptr_t*)this;
 303   for (int index = InstanceKlass::header_size(); index < iksize; index++) {
 304     p[index] = NULL_WORD;
 305   }
 306 
 307   // Set temporary value until parseClassFile updates it with the real instance
 308   // size.
 309   set_layout_helper(Klass::instance_layout_helper(0, true));
 310 }
 311 
 312 
 313 void InstanceKlass::deallocate_methods(ClassLoaderData* loader_data,
 314                                        Array<Method*>* methods) {
 315   if (methods != NULL && methods != Universe::the_empty_method_array()) {
 316     for (int i = 0; i < methods->length(); i++) {
 317       Method* method = methods->at(i);
 318       if (method == NULL) continue;  // maybe null if error processing
 319       // Only want to delete methods that are not executing for RedefineClasses.
 320       // The previous version will point to them so they're not totally dangling
 321       assert (!method->on_stack(), "shouldn't be called with methods on stack");
 322       MetadataFactory::free_metadata(loader_data, method);
 323     }
 324     MetadataFactory::free_array<Method*>(loader_data, methods);
 325   }
 326 }
 327 
 328 void InstanceKlass::deallocate_interfaces(ClassLoaderData* loader_data,
 329                                           Klass* super_klass,
 330                                           Array<Klass*>* local_interfaces,
 331                                           Array<Klass*>* transitive_interfaces) {
 332   // Only deallocate transitive interfaces if not empty, same as super class
 333   // or same as local interfaces.  See code in parseClassFile.
 334   Array<Klass*>* ti = transitive_interfaces;
 335   if (ti != Universe::the_empty_klass_array() && ti != local_interfaces) {
 336     // check that the interfaces don't come from super class
 337     Array<Klass*>* sti = (super_klass == NULL) ? NULL :
 338                     InstanceKlass::cast(super_klass)->transitive_interfaces();
 339     if (ti != sti) {
 340       MetadataFactory::free_array<Klass*>(loader_data, ti);
 341     }
 342   }
 343 
 344   // local interfaces can be empty
 345   if (local_interfaces != Universe::the_empty_klass_array()) {
 346     MetadataFactory::free_array<Klass*>(loader_data, local_interfaces);
 347   }
 348 }
 349 
 350 // This function deallocates the metadata and C heap pointers that the
 351 // InstanceKlass points to.
 352 void InstanceKlass::deallocate_contents(ClassLoaderData* loader_data) {
 353 
 354   // Orphan the mirror first, CMS thinks it's still live.
 355   if (java_mirror() != NULL) {
 356     java_lang_Class::set_klass(java_mirror(), NULL);
 357   }
 358 
 359   // Need to take this class off the class loader data list.
 360   loader_data->remove_class(this);
 361 
 362   // The array_klass for this class is created later, after error handling.
 363   // For class redefinition, we keep the original class so this scratch class
 364   // doesn't have an array class.  Either way, assert that there is nothing
 365   // to deallocate.
 366   assert(array_klasses() == NULL, "array classes shouldn't be created for this class yet");
 367 
 368   // Release C heap allocated data that this might point to, which includes
 369   // reference counting symbol names.
 370   release_C_heap_structures();
 371 
 372   deallocate_methods(loader_data, methods());
 373   set_methods(NULL);
 374 
 375   if (method_ordering() != Universe::the_empty_int_array()) {
 376     MetadataFactory::free_array<int>(loader_data, method_ordering());
 377   }
 378   set_method_ordering(NULL);
 379 
 380   // This array is in Klass, but remove it with the InstanceKlass since
 381   // this place would be the only caller and it can share memory with transitive
 382   // interfaces.
 383   if (secondary_supers() != Universe::the_empty_klass_array() &&
 384       secondary_supers() != transitive_interfaces()) {
 385     MetadataFactory::free_array<Klass*>(loader_data, secondary_supers());
 386   }
 387   set_secondary_supers(NULL);
 388 
 389   deallocate_interfaces(loader_data, super(), local_interfaces(), transitive_interfaces());
 390   set_transitive_interfaces(NULL);
 391   set_local_interfaces(NULL);
 392 
 393   MetadataFactory::free_array<jushort>(loader_data, fields());
 394   set_fields(NULL, 0);
 395 
 396   // If a method from a redefined class is using this constant pool, don't
 397   // delete it, yet.  The new class's previous version will point to this.
 398   if (constants() != NULL) {
 399     assert (!constants()->on_stack(), "shouldn't be called if anything is onstack");
 400     MetadataFactory::free_metadata(loader_data, constants());
 401     set_constants(NULL);
 402   }
 403 
 404   if (inner_classes() != Universe::the_empty_short_array()) {
 405     MetadataFactory::free_array<jushort>(loader_data, inner_classes());
 406   }
 407   set_inner_classes(NULL);
 408 
 409   // We should deallocate the Annotations instance
 410   MetadataFactory::free_metadata(loader_data, annotations());
 411   set_annotations(NULL);
 412 }
 413 
 414 bool InstanceKlass::should_be_initialized() const {
 415   return !is_initialized();
 416 }
 417 
 418 klassVtable* InstanceKlass::vtable() const {
 419   return new klassVtable(this, start_of_vtable(), vtable_length() / vtableEntry::size());
 420 }
 421 
 422 klassItable* InstanceKlass::itable() const {
 423   return new klassItable(instanceKlassHandle(this));
 424 }
 425 
 426 void InstanceKlass::eager_initialize(Thread *thread) {
 427   if (!EagerInitialization) return;
 428 
 429   if (this->is_not_initialized()) {
 430     // abort if the the class has a class initializer
 431     if (this->class_initializer() != NULL) return;
 432 
 433     // abort if it is java.lang.Object (initialization is handled in genesis)
 434     Klass* super = this->super();
 435     if (super == NULL) return;
 436 
 437     // abort if the super class should be initialized
 438     if (!InstanceKlass::cast(super)->is_initialized()) return;
 439 
 440     // call body to expose the this pointer
 441     instanceKlassHandle this_oop(thread, this);
 442     eager_initialize_impl(this_oop);
 443   }
 444 }
 445 
 446 // JVMTI spec thinks there are signers and protection domain in the
 447 // instanceKlass.  These accessors pretend these fields are there.
 448 // The hprof specification also thinks these fields are in InstanceKlass.
 449 oop InstanceKlass::protection_domain() const {
 450   // return the protection_domain from the mirror
 451   return java_lang_Class::protection_domain(java_mirror());
 452 }
 453 
 454 // To remove these from requires an incompatible change and CCC request.
 455 objArrayOop InstanceKlass::signers() const {
 456   // return the signers from the mirror
 457   return java_lang_Class::signers(java_mirror());
 458 }
 459 
 460 volatile oop InstanceKlass::init_lock() const {
 461   // return the init lock from the mirror
 462   return java_lang_Class::init_lock(java_mirror());
 463 }
 464 
 465 void InstanceKlass::eager_initialize_impl(instanceKlassHandle this_oop) {
 466   EXCEPTION_MARK;
 467   volatile oop init_lock = this_oop->init_lock();
 468   ObjectLocker ol(init_lock, THREAD);
 469 
 470   // abort if someone beat us to the initialization
 471   if (!this_oop->is_not_initialized()) return;  // note: not equivalent to is_initialized()
 472 
 473   ClassState old_state = this_oop->init_state();
 474   link_class_impl(this_oop, true, THREAD);
 475   if (HAS_PENDING_EXCEPTION) {
 476     CLEAR_PENDING_EXCEPTION;
 477     // Abort if linking the class throws an exception.
 478 
 479     // Use a test to avoid redundantly resetting the state if there's
 480     // no change.  Set_init_state() asserts that state changes make
 481     // progress, whereas here we might just be spinning in place.
 482     if( old_state != this_oop->_init_state )
 483       this_oop->set_init_state (old_state);
 484   } else {
 485     // linking successfull, mark class as initialized
 486     this_oop->set_init_state (fully_initialized);
 487     // trace
 488     if (TraceClassInitialization) {
 489       ResourceMark rm(THREAD);
 490       tty->print_cr("[Initialized %s without side effects]", this_oop->external_name());
 491     }
 492   }
 493 }
 494 
 495 
 496 // See "The Virtual Machine Specification" section 2.16.5 for a detailed explanation of the class initialization
 497 // process. The step comments refers to the procedure described in that section.
 498 // Note: implementation moved to static method to expose the this pointer.
 499 void InstanceKlass::initialize(TRAPS) {
 500   if (this->should_be_initialized()) {
 501     HandleMark hm(THREAD);
 502     instanceKlassHandle this_oop(THREAD, this);
 503     initialize_impl(this_oop, CHECK);
 504     // Note: at this point the class may be initialized
 505     //       OR it may be in the state of being initialized
 506     //       in case of recursive initialization!
 507   } else {
 508     assert(is_initialized(), "sanity check");
 509   }
 510 }
 511 
 512 
 513 bool InstanceKlass::verify_code(
 514     instanceKlassHandle this_oop, bool throw_verifyerror, TRAPS) {
 515   // 1) Verify the bytecodes
 516   Verifier::Mode mode =
 517     throw_verifyerror ? Verifier::ThrowException : Verifier::NoException;
 518   return Verifier::verify(this_oop, mode, this_oop->should_verify_class(), CHECK_false);
 519 }
 520 
 521 
 522 // Used exclusively by the shared spaces dump mechanism to prevent
 523 // classes mapped into the shared regions in new VMs from appearing linked.
 524 
 525 void InstanceKlass::unlink_class() {
 526   assert(is_linked(), "must be linked");
 527   _init_state = loaded;
 528 }
 529 
 530 void InstanceKlass::link_class(TRAPS) {
 531   assert(is_loaded(), "must be loaded");
 532   if (!is_linked()) {
 533     HandleMark hm(THREAD);
 534     instanceKlassHandle this_oop(THREAD, this);
 535     link_class_impl(this_oop, true, CHECK);
 536   }
 537 }
 538 
 539 // Called to verify that a class can link during initialization, without
 540 // throwing a VerifyError.
 541 bool InstanceKlass::link_class_or_fail(TRAPS) {
 542   assert(is_loaded(), "must be loaded");
 543   if (!is_linked()) {
 544     HandleMark hm(THREAD);
 545     instanceKlassHandle this_oop(THREAD, this);
 546     link_class_impl(this_oop, false, CHECK_false);
 547   }
 548   return is_linked();
 549 }
 550 
 551 bool InstanceKlass::link_class_impl(
 552     instanceKlassHandle this_oop, bool throw_verifyerror, TRAPS) {
 553   // check for error state
 554   if (this_oop->is_in_error_state()) {
 555     ResourceMark rm(THREAD);
 556     THROW_MSG_(vmSymbols::java_lang_NoClassDefFoundError(),
 557                this_oop->external_name(), false);
 558   }
 559   // return if already verified
 560   if (this_oop->is_linked()) {
 561     return true;
 562   }
 563 
 564   // Timing
 565   // timer handles recursion
 566   assert(THREAD->is_Java_thread(), "non-JavaThread in link_class_impl");
 567   JavaThread* jt = (JavaThread*)THREAD;
 568 
 569   // link super class before linking this class
 570   instanceKlassHandle super(THREAD, this_oop->super());
 571   if (super.not_null()) {
 572     if (super->is_interface()) {  // check if super class is an interface
 573       ResourceMark rm(THREAD);
 574       Exceptions::fthrow(
 575         THREAD_AND_LOCATION,
 576         vmSymbols::java_lang_IncompatibleClassChangeError(),
 577         "class %s has interface %s as super class",
 578         this_oop->external_name(),
 579         super->external_name()
 580       );
 581       return false;
 582     }
 583 
 584     link_class_impl(super, throw_verifyerror, CHECK_false);
 585   }
 586 
 587   // link all interfaces implemented by this class before linking this class
 588   Array<Klass*>* interfaces = this_oop->local_interfaces();
 589   int num_interfaces = interfaces->length();
 590   for (int index = 0; index < num_interfaces; index++) {
 591     HandleMark hm(THREAD);
 592     instanceKlassHandle ih(THREAD, interfaces->at(index));
 593     link_class_impl(ih, throw_verifyerror, CHECK_false);
 594   }
 595 
 596   // in case the class is linked in the process of linking its superclasses
 597   if (this_oop->is_linked()) {
 598     return true;
 599   }
 600 
 601   // trace only the link time for this klass that includes
 602   // the verification time
 603   PerfClassTraceTime vmtimer(ClassLoader::perf_class_link_time(),
 604                              ClassLoader::perf_class_link_selftime(),
 605                              ClassLoader::perf_classes_linked(),
 606                              jt->get_thread_stat()->perf_recursion_counts_addr(),
 607                              jt->get_thread_stat()->perf_timers_addr(),
 608                              PerfClassTraceTime::CLASS_LINK);
 609 
 610   // verification & rewriting
 611   {
 612     volatile oop init_lock = this_oop->init_lock();
 613     ObjectLocker ol(init_lock, THREAD);
 614     // rewritten will have been set if loader constraint error found
 615     // on an earlier link attempt
 616     // don't verify or rewrite if already rewritten
 617 
 618     if (!this_oop->is_linked()) {
 619       if (!this_oop->is_rewritten()) {
 620         {
 621           // Timer includes any side effects of class verification (resolution,
 622           // etc), but not recursive entry into verify_code().
 623           PerfClassTraceTime timer(ClassLoader::perf_class_verify_time(),
 624                                    ClassLoader::perf_class_verify_selftime(),
 625                                    ClassLoader::perf_classes_verified(),
 626                                    jt->get_thread_stat()->perf_recursion_counts_addr(),
 627                                    jt->get_thread_stat()->perf_timers_addr(),
 628                                    PerfClassTraceTime::CLASS_VERIFY);
 629           bool verify_ok = verify_code(this_oop, throw_verifyerror, THREAD);
 630           if (!verify_ok) {
 631             return false;
 632           }
 633         }
 634 
 635         // Just in case a side-effect of verify linked this class already
 636         // (which can sometimes happen since the verifier loads classes
 637         // using custom class loaders, which are free to initialize things)
 638         if (this_oop->is_linked()) {
 639           return true;
 640         }
 641 
 642         // also sets rewritten
 643         this_oop->rewrite_class(CHECK_false);
 644       }
 645 
 646       // relocate jsrs and link methods after they are all rewritten
 647       this_oop->link_methods(CHECK_false);
 648 
 649       // Initialize the vtable and interface table after
 650       // methods have been rewritten since rewrite may
 651       // fabricate new Method*s.
 652       // also does loader constraint checking
 653       if (!this_oop()->is_shared()) {
 654         ResourceMark rm(THREAD);
 655         this_oop->vtable()->initialize_vtable(true, CHECK_false);
 656         this_oop->itable()->initialize_itable(true, CHECK_false);
 657       }
 658 #ifdef ASSERT
 659       else {
 660         ResourceMark rm(THREAD);
 661         this_oop->vtable()->verify(tty, true);
 662         // In case itable verification is ever added.
 663         // this_oop->itable()->verify(tty, true);
 664       }
 665 #endif
 666       this_oop->set_init_state(linked);
 667       if (JvmtiExport::should_post_class_prepare()) {
 668         Thread *thread = THREAD;
 669         assert(thread->is_Java_thread(), "thread->is_Java_thread()");
 670         JvmtiExport::post_class_prepare((JavaThread *) thread, this_oop());
 671       }
 672     }
 673   }
 674   return true;
 675 }
 676 
 677 
 678 // Rewrite the byte codes of all of the methods of a class.
 679 // The rewriter must be called exactly once. Rewriting must happen after
 680 // verification but before the first method of the class is executed.
 681 void InstanceKlass::rewrite_class(TRAPS) {
 682   assert(is_loaded(), "must be loaded");
 683   instanceKlassHandle this_oop(THREAD, this);
 684   if (this_oop->is_rewritten()) {
 685     assert(this_oop()->is_shared(), "rewriting an unshared class?");
 686     return;
 687   }
 688   Rewriter::rewrite(this_oop, CHECK);
 689   this_oop->set_rewritten();
 690 }
 691 
 692 // Now relocate and link method entry points after class is rewritten.
 693 // This is outside is_rewritten flag. In case of an exception, it can be
 694 // executed more than once.
 695 void InstanceKlass::link_methods(TRAPS) {
 696   int len = methods()->length();
 697   for (int i = len-1; i >= 0; i--) {
 698     methodHandle m(THREAD, methods()->at(i));
 699 
 700     // Set up method entry points for compiler and interpreter    .
 701     m->link_method(m, CHECK);
 702 
 703     // This is for JVMTI and unrelated to relocator but the last thing we do
 704 #ifdef ASSERT
 705     if (StressMethodComparator) {
 706       ResourceMark rm(THREAD);
 707       static int nmc = 0;
 708       for (int j = i; j >= 0 && j >= i-4; j--) {
 709         if ((++nmc % 1000) == 0)  tty->print_cr("Have run MethodComparator %d times...", nmc);
 710         bool z = MethodComparator::methods_EMCP(m(),
 711                    methods()->at(j));
 712         if (j == i && !z) {
 713           tty->print("MethodComparator FAIL: "); m->print(); m->print_codes();
 714           assert(z, "method must compare equal to itself");
 715         }
 716       }
 717     }
 718 #endif //ASSERT
 719   }
 720 }
 721 
 722 
 723 void InstanceKlass::initialize_impl(instanceKlassHandle this_oop, TRAPS) {
 724   // Make sure klass is linked (verified) before initialization
 725   // A class could already be verified, since it has been reflected upon.
 726   this_oop->link_class(CHECK);
 727 
 728   DTRACE_CLASSINIT_PROBE(required, InstanceKlass::cast(this_oop()), -1);
 729 
 730   bool wait = false;
 731 
 732   // refer to the JVM book page 47 for description of steps
 733   // Step 1
 734   {
 735     volatile oop init_lock = this_oop->init_lock();
 736     ObjectLocker ol(init_lock, THREAD);
 737 
 738     Thread *self = THREAD; // it's passed the current thread
 739 
 740     // Step 2
 741     // If we were to use wait() instead of waitInterruptibly() then
 742     // we might end up throwing IE from link/symbol resolution sites
 743     // that aren't expected to throw.  This would wreak havoc.  See 6320309.
 744     while(this_oop->is_being_initialized() && !this_oop->is_reentrant_initialization(self)) {
 745         wait = true;
 746       ol.waitUninterruptibly(CHECK);
 747     }
 748 
 749     // Step 3
 750     if (this_oop->is_being_initialized() && this_oop->is_reentrant_initialization(self)) {
 751       DTRACE_CLASSINIT_PROBE_WAIT(recursive, InstanceKlass::cast(this_oop()), -1,wait);
 752       return;
 753     }
 754 
 755     // Step 4
 756     if (this_oop->is_initialized()) {
 757       DTRACE_CLASSINIT_PROBE_WAIT(concurrent, InstanceKlass::cast(this_oop()), -1,wait);
 758       return;
 759     }
 760 
 761     // Step 5
 762     if (this_oop->is_in_error_state()) {
 763       DTRACE_CLASSINIT_PROBE_WAIT(erroneous, InstanceKlass::cast(this_oop()), -1,wait);
 764       ResourceMark rm(THREAD);
 765       const char* desc = "Could not initialize class ";
 766       const char* className = this_oop->external_name();
 767       size_t msglen = strlen(desc) + strlen(className) + 1;
 768       char* message = NEW_RESOURCE_ARRAY(char, msglen);
 769       if (NULL == message) {
 770         // Out of memory: can't create detailed error message
 771         THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), className);
 772       } else {
 773         jio_snprintf(message, msglen, "%s%s", desc, className);
 774         THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), message);
 775       }
 776     }
 777 
 778     // Step 6
 779     this_oop->set_init_state(being_initialized);
 780     this_oop->set_init_thread(self);
 781   }
 782 
 783   // Step 7
 784   Klass* super_klass = this_oop->super();
 785   if (super_klass != NULL && !this_oop->is_interface() && super_klass->should_be_initialized()) {
 786     super_klass->initialize(THREAD);
 787 
 788     if (HAS_PENDING_EXCEPTION) {
 789       Handle e(THREAD, PENDING_EXCEPTION);
 790       CLEAR_PENDING_EXCEPTION;
 791       {
 792         EXCEPTION_MARK;
 793         this_oop->set_initialization_state_and_notify(initialization_error, THREAD); // Locks object, set state, and notify all waiting threads
 794         CLEAR_PENDING_EXCEPTION;   // ignore any exception thrown, superclass initialization error is thrown below
 795       }
 796       DTRACE_CLASSINIT_PROBE_WAIT(super__failed, InstanceKlass::cast(this_oop()), -1,wait);
 797       THROW_OOP(e());
 798     }
 799   }
 800 
 801   if (this_oop->has_default_methods()) {
 802     // Step 7.5: initialize any interfaces which have default methods
 803     for (int i = 0; i < this_oop->local_interfaces()->length(); ++i) {
 804       Klass* iface = this_oop->local_interfaces()->at(i);
 805       InstanceKlass* ik = InstanceKlass::cast(iface);
 806       if (ik->has_default_methods() && ik->should_be_initialized()) {
 807         ik->initialize(THREAD);
 808 
 809         if (HAS_PENDING_EXCEPTION) {
 810           Handle e(THREAD, PENDING_EXCEPTION);
 811           CLEAR_PENDING_EXCEPTION;
 812           {
 813             EXCEPTION_MARK;
 814             // Locks object, set state, and notify all waiting threads
 815             this_oop->set_initialization_state_and_notify(
 816                 initialization_error, THREAD);
 817 
 818             // ignore any exception thrown, superclass initialization error is
 819             // thrown below
 820             CLEAR_PENDING_EXCEPTION;
 821           }
 822           DTRACE_CLASSINIT_PROBE_WAIT(
 823               super__failed, InstanceKlass::cast(this_oop()), -1, wait);
 824           THROW_OOP(e());
 825         }
 826       }
 827     }
 828   }
 829 
 830   // Step 8
 831   {
 832     assert(THREAD->is_Java_thread(), "non-JavaThread in initialize_impl");
 833     JavaThread* jt = (JavaThread*)THREAD;
 834     DTRACE_CLASSINIT_PROBE_WAIT(clinit, InstanceKlass::cast(this_oop()), -1,wait);
 835     // Timer includes any side effects of class initialization (resolution,
 836     // etc), but not recursive entry into call_class_initializer().
 837     PerfClassTraceTime timer(ClassLoader::perf_class_init_time(),
 838                              ClassLoader::perf_class_init_selftime(),
 839                              ClassLoader::perf_classes_inited(),
 840                              jt->get_thread_stat()->perf_recursion_counts_addr(),
 841                              jt->get_thread_stat()->perf_timers_addr(),
 842                              PerfClassTraceTime::CLASS_CLINIT);
 843     this_oop->call_class_initializer(THREAD);
 844   }
 845 
 846   // Step 9
 847   if (!HAS_PENDING_EXCEPTION) {
 848     this_oop->set_initialization_state_and_notify(fully_initialized, CHECK);
 849     { ResourceMark rm(THREAD);
 850       debug_only(this_oop->vtable()->verify(tty, true);)
 851     }
 852   }
 853   else {
 854     // Step 10 and 11
 855     Handle e(THREAD, PENDING_EXCEPTION);
 856     CLEAR_PENDING_EXCEPTION;
 857     {
 858       EXCEPTION_MARK;
 859       this_oop->set_initialization_state_and_notify(initialization_error, THREAD);
 860       CLEAR_PENDING_EXCEPTION;   // ignore any exception thrown, class initialization error is thrown below
 861     }
 862     DTRACE_CLASSINIT_PROBE_WAIT(error, InstanceKlass::cast(this_oop()), -1,wait);
 863     if (e->is_a(SystemDictionary::Error_klass())) {
 864       THROW_OOP(e());
 865     } else {
 866       JavaCallArguments args(e);
 867       THROW_ARG(vmSymbols::java_lang_ExceptionInInitializerError(),
 868                 vmSymbols::throwable_void_signature(),
 869                 &args);
 870     }
 871   }
 872   DTRACE_CLASSINIT_PROBE_WAIT(end, InstanceKlass::cast(this_oop()), -1,wait);
 873 }
 874 
 875 
 876 // Note: implementation moved to static method to expose the this pointer.
 877 void InstanceKlass::set_initialization_state_and_notify(ClassState state, TRAPS) {
 878   instanceKlassHandle kh(THREAD, this);
 879   set_initialization_state_and_notify_impl(kh, state, CHECK);
 880 }
 881 
 882 void InstanceKlass::set_initialization_state_and_notify_impl(instanceKlassHandle this_oop, ClassState state, TRAPS) {
 883   volatile oop init_lock = this_oop->init_lock();
 884   ObjectLocker ol(init_lock, THREAD);
 885   this_oop->set_init_state(state);
 886   ol.notify_all(CHECK);
 887 }
 888 
 889 // The embedded _implementor field can only record one implementor.
 890 // When there are more than one implementors, the _implementor field
 891 // is set to the interface Klass* itself. Following are the possible
 892 // values for the _implementor field:
 893 //   NULL                  - no implementor
 894 //   implementor Klass*    - one implementor
 895 //   self                  - more than one implementor
 896 //
 897 // The _implementor field only exists for interfaces.
 898 void InstanceKlass::add_implementor(Klass* k) {
 899   assert(Compile_lock->owned_by_self(), "");
 900   assert(is_interface(), "not interface");
 901   // Filter out my subinterfaces.
 902   // (Note: Interfaces are never on the subklass list.)
 903   if (InstanceKlass::cast(k)->is_interface()) return;
 904 
 905   // Filter out subclasses whose supers already implement me.
 906   // (Note: CHA must walk subclasses of direct implementors
 907   // in order to locate indirect implementors.)
 908   Klass* sk = InstanceKlass::cast(k)->super();
 909   if (sk != NULL && InstanceKlass::cast(sk)->implements_interface(this))
 910     // We only need to check one immediate superclass, since the
 911     // implements_interface query looks at transitive_interfaces.
 912     // Any supers of the super have the same (or fewer) transitive_interfaces.
 913     return;
 914 
 915   Klass* ik = implementor();
 916   if (ik == NULL) {
 917     set_implementor(k);
 918   } else if (ik != this) {
 919     // There is already an implementor. Use itself as an indicator of
 920     // more than one implementors.
 921     set_implementor(this);
 922   }
 923 
 924   // The implementor also implements the transitive_interfaces
 925   for (int index = 0; index < local_interfaces()->length(); index++) {
 926     InstanceKlass::cast(local_interfaces()->at(index))->add_implementor(k);
 927   }
 928 }
 929 
 930 void InstanceKlass::init_implementor() {
 931   if (is_interface()) {
 932     set_implementor(NULL);
 933   }
 934 }
 935 
 936 
 937 void InstanceKlass::process_interfaces(Thread *thread) {
 938   // link this class into the implementors list of every interface it implements
 939   Klass* this_as_klass_oop = this;
 940   for (int i = local_interfaces()->length() - 1; i >= 0; i--) {
 941     assert(local_interfaces()->at(i)->is_klass(), "must be a klass");
 942     InstanceKlass* interf = InstanceKlass::cast(local_interfaces()->at(i));
 943     assert(interf->is_interface(), "expected interface");
 944     interf->add_implementor(this_as_klass_oop);
 945   }
 946 }
 947 
 948 bool InstanceKlass::can_be_primary_super_slow() const {
 949   if (is_interface())
 950     return false;
 951   else
 952     return Klass::can_be_primary_super_slow();
 953 }
 954 
 955 GrowableArray<Klass*>* InstanceKlass::compute_secondary_supers(int num_extra_slots) {
 956   // The secondaries are the implemented interfaces.
 957   InstanceKlass* ik = InstanceKlass::cast(this);
 958   Array<Klass*>* interfaces = ik->transitive_interfaces();
 959   int num_secondaries = num_extra_slots + interfaces->length();
 960   if (num_secondaries == 0) {
 961     // Must share this for correct bootstrapping!
 962     set_secondary_supers(Universe::the_empty_klass_array());
 963     return NULL;
 964   } else if (num_extra_slots == 0) {
 965     // The secondary super list is exactly the same as the transitive interfaces.
 966     // Redefine classes has to be careful not to delete this!
 967     set_secondary_supers(interfaces);
 968     return NULL;
 969   } else {
 970     // Copy transitive interfaces to a temporary growable array to be constructed
 971     // into the secondary super list with extra slots.
 972     GrowableArray<Klass*>* secondaries = new GrowableArray<Klass*>(interfaces->length());
 973     for (int i = 0; i < interfaces->length(); i++) {
 974       secondaries->push(interfaces->at(i));
 975     }
 976     return secondaries;
 977   }
 978 }
 979 
 980 bool InstanceKlass::compute_is_subtype_of(Klass* k) {
 981   if (k->is_interface()) {
 982     return implements_interface(k);
 983   } else {
 984     return Klass::compute_is_subtype_of(k);
 985   }
 986 }
 987 
 988 bool InstanceKlass::implements_interface(Klass* k) const {
 989   if (this == k) return true;
 990   assert(k->is_interface(), "should be an interface class");
 991   for (int i = 0; i < transitive_interfaces()->length(); i++) {
 992     if (transitive_interfaces()->at(i) == k) {
 993       return true;
 994     }
 995   }
 996   return false;
 997 }
 998 
 999 objArrayOop InstanceKlass::allocate_objArray(int n, int length, TRAPS) {
1000   if (length < 0) THROW_0(vmSymbols::java_lang_NegativeArraySizeException());
1001   if (length > arrayOopDesc::max_array_length(T_OBJECT)) {
1002     report_java_out_of_memory("Requested array size exceeds VM limit");
1003     JvmtiExport::post_array_size_exhausted();
1004     THROW_OOP_0(Universe::out_of_memory_error_array_size());
1005   }
1006   int size = objArrayOopDesc::object_size(length);
1007   Klass* ak = array_klass(n, CHECK_NULL);
1008   KlassHandle h_ak (THREAD, ak);
1009   objArrayOop o =
1010     (objArrayOop)CollectedHeap::array_allocate(h_ak, size, length, CHECK_NULL);
1011   return o;
1012 }
1013 
1014 instanceOop InstanceKlass::register_finalizer(instanceOop i, TRAPS) {
1015   if (TraceFinalizerRegistration) {
1016     tty->print("Registered ");
1017     i->print_value_on(tty);
1018     tty->print_cr(" (" INTPTR_FORMAT ") as finalizable", (address)i);
1019   }
1020   instanceHandle h_i(THREAD, i);
1021   // Pass the handle as argument, JavaCalls::call expects oop as jobjects
1022   JavaValue result(T_VOID);
1023   JavaCallArguments args(h_i);
1024   methodHandle mh (THREAD, Universe::finalizer_register_method());
1025   JavaCalls::call(&result, mh, &args, CHECK_NULL);
1026   return h_i();
1027 }
1028 
1029 instanceOop InstanceKlass::allocate_instance(TRAPS) {
1030   bool has_finalizer_flag = has_finalizer(); // Query before possible GC
1031   int size = size_helper();  // Query before forming handle.
1032 
1033   KlassHandle h_k(THREAD, this);
1034 
1035   instanceOop i;
1036 
1037   i = (instanceOop)CollectedHeap::obj_allocate(h_k, size, CHECK_NULL);
1038   if (has_finalizer_flag && !RegisterFinalizersAtInit) {
1039     i = register_finalizer(i, CHECK_NULL);
1040   }
1041   return i;
1042 }
1043 
1044 void InstanceKlass::check_valid_for_instantiation(bool throwError, TRAPS) {
1045   if (is_interface() || is_abstract()) {
1046     ResourceMark rm(THREAD);
1047     THROW_MSG(throwError ? vmSymbols::java_lang_InstantiationError()
1048               : vmSymbols::java_lang_InstantiationException(), external_name());
1049   }
1050   if (this == SystemDictionary::Class_klass()) {
1051     ResourceMark rm(THREAD);
1052     THROW_MSG(throwError ? vmSymbols::java_lang_IllegalAccessError()
1053               : vmSymbols::java_lang_IllegalAccessException(), external_name());
1054   }
1055 }
1056 
1057 Klass* InstanceKlass::array_klass_impl(bool or_null, int n, TRAPS) {
1058   instanceKlassHandle this_oop(THREAD, this);
1059   return array_klass_impl(this_oop, or_null, n, THREAD);
1060 }
1061 
1062 Klass* InstanceKlass::array_klass_impl(instanceKlassHandle this_oop, bool or_null, int n, TRAPS) {
1063   if (this_oop->array_klasses() == NULL) {
1064     if (or_null) return NULL;
1065 
1066     ResourceMark rm;
1067     JavaThread *jt = (JavaThread *)THREAD;
1068     {
1069       // Atomic creation of array_klasses
1070       MutexLocker mc(Compile_lock, THREAD);   // for vtables
1071       MutexLocker ma(MultiArray_lock, THREAD);
1072 
1073       // Check if update has already taken place
1074       if (this_oop->array_klasses() == NULL) {
1075         Klass*    k = ObjArrayKlass::allocate_objArray_klass(this_oop->class_loader_data(), 1, this_oop, CHECK_NULL);
1076         this_oop->set_array_klasses(k);
1077       }
1078     }
1079   }
1080   // _this will always be set at this point
1081   ObjArrayKlass* oak = (ObjArrayKlass*)this_oop->array_klasses();
1082   if (or_null) {
1083     return oak->array_klass_or_null(n);
1084   }
1085   return oak->array_klass(n, CHECK_NULL);
1086 }
1087 
1088 Klass* InstanceKlass::array_klass_impl(bool or_null, TRAPS) {
1089   return array_klass_impl(or_null, 1, THREAD);
1090 }
1091 
1092 void InstanceKlass::call_class_initializer(TRAPS) {
1093   instanceKlassHandle ik (THREAD, this);
1094   call_class_initializer_impl(ik, THREAD);
1095 }
1096 
1097 static int call_class_initializer_impl_counter = 0;   // for debugging
1098 
1099 Method* InstanceKlass::class_initializer() {
1100   Method* clinit = find_method(
1101       vmSymbols::class_initializer_name(), vmSymbols::void_method_signature());
1102   if (clinit != NULL && clinit->has_valid_initializer_flags()) {
1103     return clinit;
1104   }
1105   return NULL;
1106 }
1107 
1108 void InstanceKlass::call_class_initializer_impl(instanceKlassHandle this_oop, TRAPS) {
1109   if (ReplayCompiles &&
1110       (ReplaySuppressInitializers == 1 ||
1111        ReplaySuppressInitializers >= 2 && this_oop->class_loader() != NULL)) {
1112     // Hide the existence of the initializer for the purpose of replaying the compile
1113     return;
1114   }
1115 
1116   methodHandle h_method(THREAD, this_oop->class_initializer());
1117   assert(!this_oop->is_initialized(), "we cannot initialize twice");
1118   if (TraceClassInitialization) {
1119     tty->print("%d Initializing ", call_class_initializer_impl_counter++);
1120     this_oop->name()->print_value();
1121     tty->print_cr("%s (" INTPTR_FORMAT ")", h_method() == NULL ? "(no method)" : "", (address)this_oop());
1122   }
1123   if (h_method() != NULL) {
1124     JavaCallArguments args; // No arguments
1125     JavaValue result(T_VOID);
1126     JavaCalls::call(&result, h_method, &args, CHECK); // Static call (no args)
1127   }
1128 }
1129 
1130 
1131 void InstanceKlass::mask_for(methodHandle method, int bci,
1132   InterpreterOopMap* entry_for) {
1133   // Dirty read, then double-check under a lock.
1134   if (_oop_map_cache == NULL) {
1135     // Otherwise, allocate a new one.
1136     MutexLocker x(OopMapCacheAlloc_lock);
1137     // First time use. Allocate a cache in C heap
1138     if (_oop_map_cache == NULL) {
1139       _oop_map_cache = new OopMapCache();
1140     }
1141   }
1142   // _oop_map_cache is constant after init; lookup below does is own locking.
1143   _oop_map_cache->lookup(method, bci, entry_for);
1144 }
1145 
1146 
1147 bool InstanceKlass::find_local_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
1148   for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
1149     Symbol* f_name = fs.name();
1150     Symbol* f_sig  = fs.signature();
1151     if (f_name == name && f_sig == sig) {
1152       fd->initialize(const_cast<InstanceKlass*>(this), fs.index());
1153       return true;
1154     }
1155   }
1156   return false;
1157 }
1158 
1159 
1160 Klass* InstanceKlass::find_interface_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
1161   const int n = local_interfaces()->length();
1162   for (int i = 0; i < n; i++) {
1163     Klass* intf1 = local_interfaces()->at(i);
1164     assert(intf1->is_interface(), "just checking type");
1165     // search for field in current interface
1166     if (InstanceKlass::cast(intf1)->find_local_field(name, sig, fd)) {
1167       assert(fd->is_static(), "interface field must be static");
1168       return intf1;
1169     }
1170     // search for field in direct superinterfaces
1171     Klass* intf2 = InstanceKlass::cast(intf1)->find_interface_field(name, sig, fd);
1172     if (intf2 != NULL) return intf2;
1173   }
1174   // otherwise field lookup fails
1175   return NULL;
1176 }
1177 
1178 
1179 Klass* InstanceKlass::find_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
1180   // search order according to newest JVM spec (5.4.3.2, p.167).
1181   // 1) search for field in current klass
1182   if (find_local_field(name, sig, fd)) {
1183     return const_cast<InstanceKlass*>(this);
1184   }
1185   // 2) search for field recursively in direct superinterfaces
1186   { Klass* intf = find_interface_field(name, sig, fd);
1187     if (intf != NULL) return intf;
1188   }
1189   // 3) apply field lookup recursively if superclass exists
1190   { Klass* supr = super();
1191     if (supr != NULL) return InstanceKlass::cast(supr)->find_field(name, sig, fd);
1192   }
1193   // 4) otherwise field lookup fails
1194   return NULL;
1195 }
1196 
1197 
1198 Klass* InstanceKlass::find_field(Symbol* name, Symbol* sig, bool is_static, fieldDescriptor* fd) const {
1199   // search order according to newest JVM spec (5.4.3.2, p.167).
1200   // 1) search for field in current klass
1201   if (find_local_field(name, sig, fd)) {
1202     if (fd->is_static() == is_static) return const_cast<InstanceKlass*>(this);
1203   }
1204   // 2) search for field recursively in direct superinterfaces
1205   if (is_static) {
1206     Klass* intf = find_interface_field(name, sig, fd);
1207     if (intf != NULL) return intf;
1208   }
1209   // 3) apply field lookup recursively if superclass exists
1210   { Klass* supr = super();
1211     if (supr != NULL) return InstanceKlass::cast(supr)->find_field(name, sig, is_static, fd);
1212   }
1213   // 4) otherwise field lookup fails
1214   return NULL;
1215 }
1216 
1217 
1218 bool InstanceKlass::find_local_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const {
1219   for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
1220     if (fs.offset() == offset) {
1221       fd->initialize(const_cast<InstanceKlass*>(this), fs.index());
1222       if (fd->is_static() == is_static) return true;
1223     }
1224   }
1225   return false;
1226 }
1227 
1228 
1229 bool InstanceKlass::find_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const {
1230   Klass* klass = const_cast<InstanceKlass*>(this);
1231   while (klass != NULL) {
1232     if (InstanceKlass::cast(klass)->find_local_field_from_offset(offset, is_static, fd)) {
1233       return true;
1234     }
1235     klass = klass->super();
1236   }
1237   return false;
1238 }
1239 
1240 
1241 void InstanceKlass::methods_do(void f(Method* method)) {
1242   int len = methods()->length();
1243   for (int index = 0; index < len; index++) {
1244     Method* m = methods()->at(index);
1245     assert(m->is_method(), "must be method");
1246     f(m);
1247   }
1248 }
1249 
1250 
1251 void InstanceKlass::do_local_static_fields(FieldClosure* cl) {
1252   for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
1253     if (fs.access_flags().is_static()) {
1254       fieldDescriptor fd;
1255       fd.initialize(this, fs.index());
1256       cl->do_field(&fd);
1257     }
1258   }
1259 }
1260 
1261 
1262 void InstanceKlass::do_local_static_fields(void f(fieldDescriptor*, TRAPS), TRAPS) {
1263   instanceKlassHandle h_this(THREAD, this);
1264   do_local_static_fields_impl(h_this, f, CHECK);
1265 }
1266 
1267 
1268 void InstanceKlass::do_local_static_fields_impl(instanceKlassHandle this_oop, void f(fieldDescriptor* fd, TRAPS), TRAPS) {
1269   for (JavaFieldStream fs(this_oop()); !fs.done(); fs.next()) {
1270     if (fs.access_flags().is_static()) {
1271       fieldDescriptor fd;
1272       fd.initialize(this_oop(), fs.index());
1273       f(&fd, CHECK);
1274     }
1275   }
1276 }
1277 
1278 
1279 static int compare_fields_by_offset(int* a, int* b) {
1280   return a[0] - b[0];
1281 }
1282 
1283 void InstanceKlass::do_nonstatic_fields(FieldClosure* cl) {
1284   InstanceKlass* super = superklass();
1285   if (super != NULL) {
1286     super->do_nonstatic_fields(cl);
1287   }
1288   fieldDescriptor fd;
1289   int length = java_fields_count();
1290   // In DebugInfo nonstatic fields are sorted by offset.
1291   int* fields_sorted = NEW_C_HEAP_ARRAY(int, 2*(length+1), mtClass);
1292   int j = 0;
1293   for (int i = 0; i < length; i += 1) {
1294     fd.initialize(this, i);
1295     if (!fd.is_static()) {
1296       fields_sorted[j + 0] = fd.offset();
1297       fields_sorted[j + 1] = i;
1298       j += 2;
1299     }
1300   }
1301   if (j > 0) {
1302     length = j;
1303     // _sort_Fn is defined in growableArray.hpp.
1304     qsort(fields_sorted, length/2, 2*sizeof(int), (_sort_Fn)compare_fields_by_offset);
1305     for (int i = 0; i < length; i += 2) {
1306       fd.initialize(this, fields_sorted[i + 1]);
1307       assert(!fd.is_static() && fd.offset() == fields_sorted[i], "only nonstatic fields");
1308       cl->do_field(&fd);
1309     }
1310   }
1311   FREE_C_HEAP_ARRAY(int, fields_sorted, mtClass);
1312 }
1313 
1314 
1315 void InstanceKlass::array_klasses_do(void f(Klass* k, TRAPS), TRAPS) {
1316   if (array_klasses() != NULL)
1317     ArrayKlass::cast(array_klasses())->array_klasses_do(f, THREAD);
1318 }
1319 
1320 void InstanceKlass::array_klasses_do(void f(Klass* k)) {
1321   if (array_klasses() != NULL)
1322     ArrayKlass::cast(array_klasses())->array_klasses_do(f);
1323 }
1324 
1325 #ifdef ASSERT
1326 static int linear_search(Array<Method*>* methods, Symbol* name, Symbol* signature) {
1327   int len = methods->length();
1328   for (int index = 0; index < len; index++) {
1329     Method* m = methods->at(index);
1330     assert(m->is_method(), "must be method");
1331     if (m->signature() == signature && m->name() == name) {
1332        return index;
1333     }
1334   }
1335   return -1;
1336 }
1337 #endif
1338 
1339 static int binary_search(Array<Method*>* methods, Symbol* name) {
1340   int len = methods->length();
1341   // methods are sorted, so do binary search
1342   int l = 0;
1343   int h = len - 1;
1344   while (l <= h) {
1345     int mid = (l + h) >> 1;
1346     Method* m = methods->at(mid);
1347     assert(m->is_method(), "must be method");
1348     int res = m->name()->fast_compare(name);
1349     if (res == 0) {
1350       return mid;
1351     } else if (res < 0) {
1352       l = mid + 1;
1353     } else {
1354       h = mid - 1;
1355     }
1356   }
1357   return -1;
1358 }
1359 
1360 Method* InstanceKlass::find_method(Symbol* name, Symbol* signature) const {
1361   return InstanceKlass::find_method(methods(), name, signature);
1362 }
1363 
1364 Method* InstanceKlass::find_method(
1365     Array<Method*>* methods, Symbol* name, Symbol* signature) {
1366   int hit = binary_search(methods, name);
1367   if (hit != -1) {
1368     Method* m = methods->at(hit);
1369     // Do linear search to find matching signature.  First, quick check
1370     // for common case
1371     if (m->signature() == signature) return m;
1372     // search downwards through overloaded methods
1373     int i;
1374     for (i = hit - 1; i >= 0; --i) {
1375         Method* m = methods->at(i);
1376         assert(m->is_method(), "must be method");
1377         if (m->name() != name) break;
1378         if (m->signature() == signature) return m;
1379     }
1380     // search upwards
1381     for (i = hit + 1; i < methods->length(); ++i) {
1382         Method* m = methods->at(i);
1383         assert(m->is_method(), "must be method");
1384         if (m->name() != name) break;
1385         if (m->signature() == signature) return m;
1386     }
1387     // not found
1388 #ifdef ASSERT
1389     int index = linear_search(methods, name, signature);
1390     assert(index == -1, err_msg("binary search should have found entry %d", index));
1391 #endif
1392   }
1393   return NULL;
1394 }
1395 
1396 int InstanceKlass::find_method_by_name(Symbol* name, int* end) {
1397   return find_method_by_name(methods(), name, end);
1398 }
1399 
1400 int InstanceKlass::find_method_by_name(
1401     Array<Method*>* methods, Symbol* name, int* end_ptr) {
1402   assert(end_ptr != NULL, "just checking");
1403   int start = binary_search(methods, name);
1404   int end = start + 1;
1405   if (start != -1) {
1406     while (start - 1 >= 0 && (methods->at(start - 1))->name() == name) --start;
1407     while (end < methods->length() && (methods->at(end))->name() == name) ++end;
1408     *end_ptr = end;
1409     return start;
1410   }
1411   return -1;
1412 }
1413 
1414 Method* InstanceKlass::uncached_lookup_method(Symbol* name, Symbol* signature) const {
1415   Klass* klass = const_cast<InstanceKlass*>(this);
1416   while (klass != NULL) {
1417     Method* method = InstanceKlass::cast(klass)->find_method(name, signature);
1418     if (method != NULL) return method;
1419     klass = InstanceKlass::cast(klass)->super();
1420   }
1421   return NULL;
1422 }
1423 
1424 // lookup a method in all the interfaces that this class implements
1425 Method* InstanceKlass::lookup_method_in_all_interfaces(Symbol* name,
1426                                                          Symbol* signature) const {
1427   Array<Klass*>* all_ifs = transitive_interfaces();
1428   int num_ifs = all_ifs->length();
1429   InstanceKlass *ik = NULL;
1430   for (int i = 0; i < num_ifs; i++) {
1431     ik = InstanceKlass::cast(all_ifs->at(i));
1432     Method* m = ik->lookup_method(name, signature);
1433     if (m != NULL) {
1434       return m;
1435     }
1436   }
1437   return NULL;
1438 }
1439 
1440 /* jni_id_for_impl for jfieldIds only */
1441 JNIid* InstanceKlass::jni_id_for_impl(instanceKlassHandle this_oop, int offset) {
1442   MutexLocker ml(JfieldIdCreation_lock);
1443   // Retry lookup after we got the lock
1444   JNIid* probe = this_oop->jni_ids() == NULL ? NULL : this_oop->jni_ids()->find(offset);
1445   if (probe == NULL) {
1446     // Slow case, allocate new static field identifier
1447     probe = new JNIid(this_oop(), offset, this_oop->jni_ids());
1448     this_oop->set_jni_ids(probe);
1449   }
1450   return probe;
1451 }
1452 
1453 
1454 /* jni_id_for for jfieldIds only */
1455 JNIid* InstanceKlass::jni_id_for(int offset) {
1456   JNIid* probe = jni_ids() == NULL ? NULL : jni_ids()->find(offset);
1457   if (probe == NULL) {
1458     probe = jni_id_for_impl(this, offset);
1459   }
1460   return probe;
1461 }
1462 
1463 u2 InstanceKlass::enclosing_method_data(int offset) {
1464   Array<jushort>* inner_class_list = inner_classes();
1465   if (inner_class_list == NULL) {
1466     return 0;
1467   }
1468   int length = inner_class_list->length();
1469   if (length % inner_class_next_offset == 0) {
1470     return 0;
1471   } else {
1472     int index = length - enclosing_method_attribute_size;
1473     assert(offset < enclosing_method_attribute_size, "invalid offset");
1474     return inner_class_list->at(index + offset);
1475   }
1476 }
1477 
1478 void InstanceKlass::set_enclosing_method_indices(u2 class_index,
1479                                                  u2 method_index) {
1480   Array<jushort>* inner_class_list = inner_classes();
1481   assert (inner_class_list != NULL, "_inner_classes list is not set up");
1482   int length = inner_class_list->length();
1483   if (length % inner_class_next_offset == enclosing_method_attribute_size) {
1484     int index = length - enclosing_method_attribute_size;
1485     inner_class_list->at_put(
1486       index + enclosing_method_class_index_offset, class_index);
1487     inner_class_list->at_put(
1488       index + enclosing_method_method_index_offset, method_index);
1489   }
1490 }
1491 
1492 // Lookup or create a jmethodID.
1493 // This code is called by the VMThread and JavaThreads so the
1494 // locking has to be done very carefully to avoid deadlocks
1495 // and/or other cache consistency problems.
1496 //
1497 jmethodID InstanceKlass::get_jmethod_id(instanceKlassHandle ik_h, methodHandle method_h) {
1498   size_t idnum = (size_t)method_h->method_idnum();
1499   jmethodID* jmeths = ik_h->methods_jmethod_ids_acquire();
1500   size_t length = 0;
1501   jmethodID id = NULL;
1502 
1503   // We use a double-check locking idiom here because this cache is
1504   // performance sensitive. In the normal system, this cache only
1505   // transitions from NULL to non-NULL which is safe because we use
1506   // release_set_methods_jmethod_ids() to advertise the new cache.
1507   // A partially constructed cache should never be seen by a racing
1508   // thread. We also use release_store_ptr() to save a new jmethodID
1509   // in the cache so a partially constructed jmethodID should never be
1510   // seen either. Cache reads of existing jmethodIDs proceed without a
1511   // lock, but cache writes of a new jmethodID requires uniqueness and
1512   // creation of the cache itself requires no leaks so a lock is
1513   // generally acquired in those two cases.
1514   //
1515   // If the RedefineClasses() API has been used, then this cache can
1516   // grow and we'll have transitions from non-NULL to bigger non-NULL.
1517   // Cache creation requires no leaks and we require safety between all
1518   // cache accesses and freeing of the old cache so a lock is generally
1519   // acquired when the RedefineClasses() API has been used.
1520 
1521   if (jmeths != NULL) {
1522     // the cache already exists
1523     if (!ik_h->idnum_can_increment()) {
1524       // the cache can't grow so we can just get the current values
1525       get_jmethod_id_length_value(jmeths, idnum, &length, &id);
1526     } else {
1527       // cache can grow so we have to be more careful
1528       if (Threads::number_of_threads() == 0 ||
1529           SafepointSynchronize::is_at_safepoint()) {
1530         // we're single threaded or at a safepoint - no locking needed
1531         get_jmethod_id_length_value(jmeths, idnum, &length, &id);
1532       } else {
1533         MutexLocker ml(JmethodIdCreation_lock);
1534         get_jmethod_id_length_value(jmeths, idnum, &length, &id);
1535       }
1536     }
1537   }
1538   // implied else:
1539   // we need to allocate a cache so default length and id values are good
1540 
1541   if (jmeths == NULL ||   // no cache yet
1542       length <= idnum ||  // cache is too short
1543       id == NULL) {       // cache doesn't contain entry
1544 
1545     // This function can be called by the VMThread so we have to do all
1546     // things that might block on a safepoint before grabbing the lock.
1547     // Otherwise, we can deadlock with the VMThread or have a cache
1548     // consistency issue. These vars keep track of what we might have
1549     // to free after the lock is dropped.
1550     jmethodID  to_dealloc_id     = NULL;
1551     jmethodID* to_dealloc_jmeths = NULL;
1552 
1553     // may not allocate new_jmeths or use it if we allocate it
1554     jmethodID* new_jmeths = NULL;
1555     if (length <= idnum) {
1556       // allocate a new cache that might be used
1557       size_t size = MAX2(idnum+1, (size_t)ik_h->idnum_allocated_count());
1558       new_jmeths = NEW_C_HEAP_ARRAY(jmethodID, size+1, mtClass);
1559       memset(new_jmeths, 0, (size+1)*sizeof(jmethodID));
1560       // cache size is stored in element[0], other elements offset by one
1561       new_jmeths[0] = (jmethodID)size;
1562     }
1563 
1564     // allocate a new jmethodID that might be used
1565     jmethodID new_id = NULL;
1566     if (method_h->is_old() && !method_h->is_obsolete()) {
1567       // The method passed in is old (but not obsolete), we need to use the current version
1568       Method* current_method = ik_h->method_with_idnum((int)idnum);
1569       assert(current_method != NULL, "old and but not obsolete, so should exist");
1570       new_id = Method::make_jmethod_id(ik_h->class_loader_data(), current_method);
1571     } else {
1572       // It is the current version of the method or an obsolete method,
1573       // use the version passed in
1574       new_id = Method::make_jmethod_id(ik_h->class_loader_data(), method_h());
1575     }
1576 
1577     if (Threads::number_of_threads() == 0 ||
1578         SafepointSynchronize::is_at_safepoint()) {
1579       // we're single threaded or at a safepoint - no locking needed
1580       id = get_jmethod_id_fetch_or_update(ik_h, idnum, new_id, new_jmeths,
1581                                           &to_dealloc_id, &to_dealloc_jmeths);
1582     } else {
1583       MutexLocker ml(JmethodIdCreation_lock);
1584       id = get_jmethod_id_fetch_or_update(ik_h, idnum, new_id, new_jmeths,
1585                                           &to_dealloc_id, &to_dealloc_jmeths);
1586     }
1587 
1588     // The lock has been dropped so we can free resources.
1589     // Free up either the old cache or the new cache if we allocated one.
1590     if (to_dealloc_jmeths != NULL) {
1591       FreeHeap(to_dealloc_jmeths);
1592     }
1593     // free up the new ID since it wasn't needed
1594     if (to_dealloc_id != NULL) {
1595       Method::destroy_jmethod_id(ik_h->class_loader_data(), to_dealloc_id);
1596     }
1597   }
1598   return id;
1599 }
1600 
1601 
1602 // Common code to fetch the jmethodID from the cache or update the
1603 // cache with the new jmethodID. This function should never do anything
1604 // that causes the caller to go to a safepoint or we can deadlock with
1605 // the VMThread or have cache consistency issues.
1606 //
1607 jmethodID InstanceKlass::get_jmethod_id_fetch_or_update(
1608             instanceKlassHandle ik_h, size_t idnum, jmethodID new_id,
1609             jmethodID* new_jmeths, jmethodID* to_dealloc_id_p,
1610             jmethodID** to_dealloc_jmeths_p) {
1611   assert(new_id != NULL, "sanity check");
1612   assert(to_dealloc_id_p != NULL, "sanity check");
1613   assert(to_dealloc_jmeths_p != NULL, "sanity check");
1614   assert(Threads::number_of_threads() == 0 ||
1615          SafepointSynchronize::is_at_safepoint() ||
1616          JmethodIdCreation_lock->owned_by_self(), "sanity check");
1617 
1618   // reacquire the cache - we are locked, single threaded or at a safepoint
1619   jmethodID* jmeths = ik_h->methods_jmethod_ids_acquire();
1620   jmethodID  id     = NULL;
1621   size_t     length = 0;
1622 
1623   if (jmeths == NULL ||                         // no cache yet
1624       (length = (size_t)jmeths[0]) <= idnum) {  // cache is too short
1625     if (jmeths != NULL) {
1626       // copy any existing entries from the old cache
1627       for (size_t index = 0; index < length; index++) {
1628         new_jmeths[index+1] = jmeths[index+1];
1629       }
1630       *to_dealloc_jmeths_p = jmeths;  // save old cache for later delete
1631     }
1632     ik_h->release_set_methods_jmethod_ids(jmeths = new_jmeths);
1633   } else {
1634     // fetch jmethodID (if any) from the existing cache
1635     id = jmeths[idnum+1];
1636     *to_dealloc_jmeths_p = new_jmeths;  // save new cache for later delete
1637   }
1638   if (id == NULL) {
1639     // No matching jmethodID in the existing cache or we have a new
1640     // cache or we just grew the cache. This cache write is done here
1641     // by the first thread to win the foot race because a jmethodID
1642     // needs to be unique once it is generally available.
1643     id = new_id;
1644 
1645     // The jmethodID cache can be read while unlocked so we have to
1646     // make sure the new jmethodID is complete before installing it
1647     // in the cache.
1648     OrderAccess::release_store_ptr(&jmeths[idnum+1], id);
1649   } else {
1650     *to_dealloc_id_p = new_id; // save new id for later delete
1651   }
1652   return id;
1653 }
1654 
1655 
1656 // Common code to get the jmethodID cache length and the jmethodID
1657 // value at index idnum if there is one.
1658 //
1659 void InstanceKlass::get_jmethod_id_length_value(jmethodID* cache,
1660        size_t idnum, size_t *length_p, jmethodID* id_p) {
1661   assert(cache != NULL, "sanity check");
1662   assert(length_p != NULL, "sanity check");
1663   assert(id_p != NULL, "sanity check");
1664 
1665   // cache size is stored in element[0], other elements offset by one
1666   *length_p = (size_t)cache[0];
1667   if (*length_p <= idnum) {  // cache is too short
1668     *id_p = NULL;
1669   } else {
1670     *id_p = cache[idnum+1];  // fetch jmethodID (if any)
1671   }
1672 }
1673 
1674 
1675 // Lookup a jmethodID, NULL if not found.  Do no blocking, no allocations, no handles
1676 jmethodID InstanceKlass::jmethod_id_or_null(Method* method) {
1677   size_t idnum = (size_t)method->method_idnum();
1678   jmethodID* jmeths = methods_jmethod_ids_acquire();
1679   size_t length;                                // length assigned as debugging crumb
1680   jmethodID id = NULL;
1681   if (jmeths != NULL &&                         // If there is a cache
1682       (length = (size_t)jmeths[0]) > idnum) {   // and if it is long enough,
1683     id = jmeths[idnum+1];                       // Look up the id (may be NULL)
1684   }
1685   return id;
1686 }
1687 
1688 
1689 // Cache an itable index
1690 void InstanceKlass::set_cached_itable_index(size_t idnum, int index) {
1691   int* indices = methods_cached_itable_indices_acquire();
1692   int* to_dealloc_indices = NULL;
1693 
1694   // We use a double-check locking idiom here because this cache is
1695   // performance sensitive. In the normal system, this cache only
1696   // transitions from NULL to non-NULL which is safe because we use
1697   // release_set_methods_cached_itable_indices() to advertise the
1698   // new cache. A partially constructed cache should never be seen
1699   // by a racing thread. Cache reads and writes proceed without a
1700   // lock, but creation of the cache itself requires no leaks so a
1701   // lock is generally acquired in that case.
1702   //
1703   // If the RedefineClasses() API has been used, then this cache can
1704   // grow and we'll have transitions from non-NULL to bigger non-NULL.
1705   // Cache creation requires no leaks and we require safety between all
1706   // cache accesses and freeing of the old cache so a lock is generally
1707   // acquired when the RedefineClasses() API has been used.
1708 
1709   if (indices == NULL || idnum_can_increment()) {
1710     // we need a cache or the cache can grow
1711     MutexLocker ml(JNICachedItableIndex_lock);
1712     // reacquire the cache to see if another thread already did the work
1713     indices = methods_cached_itable_indices_acquire();
1714     size_t length = 0;
1715     // cache size is stored in element[0], other elements offset by one
1716     if (indices == NULL || (length = (size_t)indices[0]) <= idnum) {
1717       size_t size = MAX2(idnum+1, (size_t)idnum_allocated_count());
1718       int* new_indices = NEW_C_HEAP_ARRAY(int, size+1, mtClass);
1719       new_indices[0] = (int)size;
1720       // copy any existing entries
1721       size_t i;
1722       for (i = 0; i < length; i++) {
1723         new_indices[i+1] = indices[i+1];
1724       }
1725       // Set all the rest to -1
1726       for (i = length; i < size; i++) {
1727         new_indices[i+1] = -1;
1728       }
1729       if (indices != NULL) {
1730         // We have an old cache to delete so save it for after we
1731         // drop the lock.
1732         to_dealloc_indices = indices;
1733       }
1734       release_set_methods_cached_itable_indices(indices = new_indices);
1735     }
1736 
1737     if (idnum_can_increment()) {
1738       // this cache can grow so we have to write to it safely
1739       indices[idnum+1] = index;
1740     }
1741   } else {
1742     CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
1743   }
1744 
1745   if (!idnum_can_increment()) {
1746     // The cache cannot grow and this JNI itable index value does not
1747     // have to be unique like a jmethodID. If there is a race to set it,
1748     // it doesn't matter.
1749     indices[idnum+1] = index;
1750   }
1751 
1752   if (to_dealloc_indices != NULL) {
1753     // we allocated a new cache so free the old one
1754     FreeHeap(to_dealloc_indices);
1755   }
1756 }
1757 
1758 
1759 // Retrieve a cached itable index
1760 int InstanceKlass::cached_itable_index(size_t idnum) {
1761   int* indices = methods_cached_itable_indices_acquire();
1762   if (indices != NULL && ((size_t)indices[0]) > idnum) {
1763      // indices exist and are long enough, retrieve possible cached
1764     return indices[idnum+1];
1765   }
1766   return -1;
1767 }
1768 
1769 
1770 //
1771 // Walk the list of dependent nmethods searching for nmethods which
1772 // are dependent on the changes that were passed in and mark them for
1773 // deoptimization.  Returns the number of nmethods found.
1774 //
1775 int InstanceKlass::mark_dependent_nmethods(DepChange& changes) {
1776   assert_locked_or_safepoint(CodeCache_lock);
1777   int found = 0;
1778   nmethodBucket* b = _dependencies;
1779   while (b != NULL) {
1780     nmethod* nm = b->get_nmethod();
1781     // since dependencies aren't removed until an nmethod becomes a zombie,
1782     // the dependency list may contain nmethods which aren't alive.
1783     if (nm->is_alive() && !nm->is_marked_for_deoptimization() && nm->check_dependency_on(changes)) {
1784       if (TraceDependencies) {
1785         ResourceMark rm;
1786         tty->print_cr("Marked for deoptimization");
1787         tty->print_cr("  context = %s", this->external_name());
1788         changes.print();
1789         nm->print();
1790         nm->print_dependencies();
1791       }
1792       nm->mark_for_deoptimization();
1793       found++;
1794     }
1795     b = b->next();
1796   }
1797   return found;
1798 }
1799 
1800 
1801 //
1802 // Add an nmethodBucket to the list of dependencies for this nmethod.
1803 // It's possible that an nmethod has multiple dependencies on this klass
1804 // so a count is kept for each bucket to guarantee that creation and
1805 // deletion of dependencies is consistent.
1806 //
1807 void InstanceKlass::add_dependent_nmethod(nmethod* nm) {
1808   assert_locked_or_safepoint(CodeCache_lock);
1809   nmethodBucket* b = _dependencies;
1810   nmethodBucket* last = NULL;
1811   while (b != NULL) {
1812     if (nm == b->get_nmethod()) {
1813       b->increment();
1814       return;
1815     }
1816     b = b->next();
1817   }
1818   _dependencies = new nmethodBucket(nm, _dependencies);
1819 }
1820 
1821 
1822 //
1823 // Decrement count of the nmethod in the dependency list and remove
1824 // the bucket competely when the count goes to 0.  This method must
1825 // find a corresponding bucket otherwise there's a bug in the
1826 // recording of dependecies.
1827 //
1828 void InstanceKlass::remove_dependent_nmethod(nmethod* nm) {
1829   assert_locked_or_safepoint(CodeCache_lock);
1830   nmethodBucket* b = _dependencies;
1831   nmethodBucket* last = NULL;
1832   while (b != NULL) {
1833     if (nm == b->get_nmethod()) {
1834       if (b->decrement() == 0) {
1835         if (last == NULL) {
1836           _dependencies = b->next();
1837         } else {
1838           last->set_next(b->next());
1839         }
1840         delete b;
1841       }
1842       return;
1843     }
1844     last = b;
1845     b = b->next();
1846   }
1847 #ifdef ASSERT
1848   tty->print_cr("### %s can't find dependent nmethod:", this->external_name());
1849   nm->print();
1850 #endif // ASSERT
1851   ShouldNotReachHere();
1852 }
1853 
1854 
1855 #ifndef PRODUCT
1856 void InstanceKlass::print_dependent_nmethods(bool verbose) {
1857   nmethodBucket* b = _dependencies;
1858   int idx = 0;
1859   while (b != NULL) {
1860     nmethod* nm = b->get_nmethod();
1861     tty->print("[%d] count=%d { ", idx++, b->count());
1862     if (!verbose) {
1863       nm->print_on(tty, "nmethod");
1864       tty->print_cr(" } ");
1865     } else {
1866       nm->print();
1867       nm->print_dependencies();
1868       tty->print_cr("--- } ");
1869     }
1870     b = b->next();
1871   }
1872 }
1873 
1874 
1875 bool InstanceKlass::is_dependent_nmethod(nmethod* nm) {
1876   nmethodBucket* b = _dependencies;
1877   while (b != NULL) {
1878     if (nm == b->get_nmethod()) {
1879       return true;
1880     }
1881     b = b->next();
1882   }
1883   return false;
1884 }
1885 #endif //PRODUCT
1886 
1887 
1888 // Garbage collection
1889 
1890 #ifdef ASSERT
1891 template <class T> void assert_is_in(T *p) {
1892   T heap_oop = oopDesc::load_heap_oop(p);
1893   if (!oopDesc::is_null(heap_oop)) {
1894     oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
1895     assert(Universe::heap()->is_in(o), "should be in heap");
1896   }
1897 }
1898 template <class T> void assert_is_in_closed_subset(T *p) {
1899   T heap_oop = oopDesc::load_heap_oop(p);
1900   if (!oopDesc::is_null(heap_oop)) {
1901     oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
1902     assert(Universe::heap()->is_in_closed_subset(o),
1903            err_msg("should be in closed *p " INTPTR_FORMAT " " INTPTR_FORMAT, (address)p, (address)o));
1904   }
1905 }
1906 template <class T> void assert_is_in_reserved(T *p) {
1907   T heap_oop = oopDesc::load_heap_oop(p);
1908   if (!oopDesc::is_null(heap_oop)) {
1909     oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
1910     assert(Universe::heap()->is_in_reserved(o), "should be in reserved");
1911   }
1912 }
1913 template <class T> void assert_nothing(T *p) {}
1914 
1915 #else
1916 template <class T> void assert_is_in(T *p) {}
1917 template <class T> void assert_is_in_closed_subset(T *p) {}
1918 template <class T> void assert_is_in_reserved(T *p) {}
1919 template <class T> void assert_nothing(T *p) {}
1920 #endif // ASSERT
1921 
1922 //
1923 // Macros that iterate over areas of oops which are specialized on type of
1924 // oop pointer either narrow or wide, depending on UseCompressedOops
1925 //
1926 // Parameters are:
1927 //   T         - type of oop to point to (either oop or narrowOop)
1928 //   start_p   - starting pointer for region to iterate over
1929 //   count     - number of oops or narrowOops to iterate over
1930 //   do_oop    - action to perform on each oop (it's arbitrary C code which
1931 //               makes it more efficient to put in a macro rather than making
1932 //               it a template function)
1933 //   assert_fn - assert function which is template function because performance
1934 //               doesn't matter when enabled.
1935 #define InstanceKlass_SPECIALIZED_OOP_ITERATE( \
1936   T, start_p, count, do_oop,                \
1937   assert_fn)                                \
1938 {                                           \
1939   T* p         = (T*)(start_p);             \
1940   T* const end = p + (count);               \
1941   while (p < end) {                         \
1942     (assert_fn)(p);                         \
1943     do_oop;                                 \
1944     ++p;                                    \
1945   }                                         \
1946 }
1947 
1948 #define InstanceKlass_SPECIALIZED_OOP_REVERSE_ITERATE( \
1949   T, start_p, count, do_oop,                \
1950   assert_fn)                                \
1951 {                                           \
1952   T* const start = (T*)(start_p);           \
1953   T*       p     = start + (count);         \
1954   while (start < p) {                       \
1955     --p;                                    \
1956     (assert_fn)(p);                         \
1957     do_oop;                                 \
1958   }                                         \
1959 }
1960 
1961 #define InstanceKlass_SPECIALIZED_BOUNDED_OOP_ITERATE( \
1962   T, start_p, count, low, high,             \
1963   do_oop, assert_fn)                        \
1964 {                                           \
1965   T* const l = (T*)(low);                   \
1966   T* const h = (T*)(high);                  \
1967   assert(mask_bits((intptr_t)l, sizeof(T)-1) == 0 && \
1968          mask_bits((intptr_t)h, sizeof(T)-1) == 0,   \
1969          "bounded region must be properly aligned"); \
1970   T* p       = (T*)(start_p);               \
1971   T* end     = p + (count);                 \
1972   if (p < l) p = l;                         \
1973   if (end > h) end = h;                     \
1974   while (p < end) {                         \
1975     (assert_fn)(p);                         \
1976     do_oop;                                 \
1977     ++p;                                    \
1978   }                                         \
1979 }
1980 
1981 
1982 // The following macros call specialized macros, passing either oop or
1983 // narrowOop as the specialization type.  These test the UseCompressedOops
1984 // flag.
1985 #define InstanceKlass_OOP_MAP_ITERATE(obj, do_oop, assert_fn)            \
1986 {                                                                        \
1987   /* Compute oopmap block range. The common case                         \
1988      is nonstatic_oop_map_size == 1. */                                  \
1989   OopMapBlock* map           = start_of_nonstatic_oop_maps();            \
1990   OopMapBlock* const end_map = map + nonstatic_oop_map_count();          \
1991   if (UseCompressedOops) {                                               \
1992     while (map < end_map) {                                              \
1993       InstanceKlass_SPECIALIZED_OOP_ITERATE(narrowOop,                   \
1994         obj->obj_field_addr<narrowOop>(map->offset()), map->count(),     \
1995         do_oop, assert_fn)                                               \
1996       ++map;                                                             \
1997     }                                                                    \
1998   } else {                                                               \
1999     while (map < end_map) {                                              \
2000       InstanceKlass_SPECIALIZED_OOP_ITERATE(oop,                         \
2001         obj->obj_field_addr<oop>(map->offset()), map->count(),           \
2002         do_oop, assert_fn)                                               \
2003       ++map;                                                             \
2004     }                                                                    \
2005   }                                                                      \
2006 }
2007 
2008 #define InstanceKlass_OOP_MAP_REVERSE_ITERATE(obj, do_oop, assert_fn)    \
2009 {                                                                        \
2010   OopMapBlock* const start_map = start_of_nonstatic_oop_maps();          \
2011   OopMapBlock* map             = start_map + nonstatic_oop_map_count();  \
2012   if (UseCompressedOops) {                                               \
2013     while (start_map < map) {                                            \
2014       --map;                                                             \
2015       InstanceKlass_SPECIALIZED_OOP_REVERSE_ITERATE(narrowOop,           \
2016         obj->obj_field_addr<narrowOop>(map->offset()), map->count(),     \
2017         do_oop, assert_fn)                                               \
2018     }                                                                    \
2019   } else {                                                               \
2020     while (start_map < map) {                                            \
2021       --map;                                                             \
2022       InstanceKlass_SPECIALIZED_OOP_REVERSE_ITERATE(oop,                 \
2023         obj->obj_field_addr<oop>(map->offset()), map->count(),           \
2024         do_oop, assert_fn)                                               \
2025     }                                                                    \
2026   }                                                                      \
2027 }
2028 
2029 #define InstanceKlass_BOUNDED_OOP_MAP_ITERATE(obj, low, high, do_oop,    \
2030                                               assert_fn)                 \
2031 {                                                                        \
2032   /* Compute oopmap block range. The common case is                      \
2033      nonstatic_oop_map_size == 1, so we accept the                       \
2034      usually non-existent extra overhead of examining                    \
2035      all the maps. */                                                    \
2036   OopMapBlock* map           = start_of_nonstatic_oop_maps();            \
2037   OopMapBlock* const end_map = map + nonstatic_oop_map_count();          \
2038   if (UseCompressedOops) {                                               \
2039     while (map < end_map) {                                              \
2040       InstanceKlass_SPECIALIZED_BOUNDED_OOP_ITERATE(narrowOop,           \
2041         obj->obj_field_addr<narrowOop>(map->offset()), map->count(),     \
2042         low, high,                                                       \
2043         do_oop, assert_fn)                                               \
2044       ++map;                                                             \
2045     }                                                                    \
2046   } else {                                                               \
2047     while (map < end_map) {                                              \
2048       InstanceKlass_SPECIALIZED_BOUNDED_OOP_ITERATE(oop,                 \
2049         obj->obj_field_addr<oop>(map->offset()), map->count(),           \
2050         low, high,                                                       \
2051         do_oop, assert_fn)                                               \
2052       ++map;                                                             \
2053     }                                                                    \
2054   }                                                                      \
2055 }
2056 
2057 void InstanceKlass::oop_follow_contents(oop obj) {
2058   assert(obj != NULL, "can't follow the content of NULL object");
2059   MarkSweep::follow_klass(obj->klass());
2060   InstanceKlass_OOP_MAP_ITERATE( \
2061     obj, \
2062     MarkSweep::mark_and_push(p), \
2063     assert_is_in_closed_subset)
2064 }
2065 
2066 #if INCLUDE_ALL_GCS
2067 void InstanceKlass::oop_follow_contents(ParCompactionManager* cm,
2068                                         oop obj) {
2069   assert(obj != NULL, "can't follow the content of NULL object");
2070   PSParallelCompact::follow_klass(cm, obj->klass());
2071   // Only mark the header and let the scan of the meta-data mark
2072   // everything else.
2073   InstanceKlass_OOP_MAP_ITERATE( \
2074     obj, \
2075     PSParallelCompact::mark_and_push(cm, p), \
2076     assert_is_in)
2077 }
2078 #endif // INCLUDE_ALL_GCS
2079 
2080 // closure's do_metadata() method dictates whether the given closure should be
2081 // applied to the klass ptr in the object header.
2082 
2083 #define if_do_metadata_checked(closure, nv_suffix)                    \
2084   /* Make sure the non-virtual and the virtual versions match. */     \
2085   assert(closure->do_metadata##nv_suffix() == closure->do_metadata(), \
2086       "Inconsistency in do_metadata");                                \
2087   if (closure->do_metadata##nv_suffix())
2088 
2089 #define InstanceKlass_OOP_OOP_ITERATE_DEFN(OopClosureType, nv_suffix)        \
2090                                                                              \
2091 int InstanceKlass::oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) { \
2092   SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::ik);\
2093   /* header */                                                          \
2094   if_do_metadata_checked(closure, nv_suffix) {                          \
2095     closure->do_klass##nv_suffix(obj->klass());                         \
2096   }                                                                     \
2097   InstanceKlass_OOP_MAP_ITERATE(                                        \
2098     obj,                                                                \
2099     SpecializationStats::                                               \
2100       record_do_oop_call##nv_suffix(SpecializationStats::ik);           \
2101     (closure)->do_oop##nv_suffix(p),                                    \
2102     assert_is_in_closed_subset)                                         \
2103   return size_helper();                                                 \
2104 }
2105 
2106 #if INCLUDE_ALL_GCS
2107 #define InstanceKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN(OopClosureType, nv_suffix) \
2108                                                                                 \
2109 int InstanceKlass::oop_oop_iterate_backwards##nv_suffix(oop obj,                \
2110                                               OopClosureType* closure) {        \
2111   SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::ik); \
2112   /* header */                                                                  \
2113   if_do_metadata_checked(closure, nv_suffix) {                                  \
2114     closure->do_klass##nv_suffix(obj->klass());                                 \
2115   }                                                                             \
2116   /* instance variables */                                                      \
2117   InstanceKlass_OOP_MAP_REVERSE_ITERATE(                                        \
2118     obj,                                                                        \
2119     SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::ik);\
2120     (closure)->do_oop##nv_suffix(p),                                            \
2121     assert_is_in_closed_subset)                                                 \
2122    return size_helper();                                                        \
2123 }
2124 #endif // INCLUDE_ALL_GCS
2125 
2126 #define InstanceKlass_OOP_OOP_ITERATE_DEFN_m(OopClosureType, nv_suffix) \
2127                                                                         \
2128 int InstanceKlass::oop_oop_iterate##nv_suffix##_m(oop obj,              \
2129                                                   OopClosureType* closure, \
2130                                                   MemRegion mr) {          \
2131   SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::ik);\
2132   if_do_metadata_checked(closure, nv_suffix) {                           \
2133     if (mr.contains(obj)) {                                              \
2134       closure->do_klass##nv_suffix(obj->klass());                        \
2135     }                                                                    \
2136   }                                                                      \
2137   InstanceKlass_BOUNDED_OOP_MAP_ITERATE(                                 \
2138     obj, mr.start(), mr.end(),                                           \
2139     (closure)->do_oop##nv_suffix(p),                                     \
2140     assert_is_in_closed_subset)                                          \
2141   return size_helper();                                                  \
2142 }
2143 
2144 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceKlass_OOP_OOP_ITERATE_DEFN)
2145 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceKlass_OOP_OOP_ITERATE_DEFN)
2146 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceKlass_OOP_OOP_ITERATE_DEFN_m)
2147 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceKlass_OOP_OOP_ITERATE_DEFN_m)
2148 #if INCLUDE_ALL_GCS
2149 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
2150 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
2151 #endif // INCLUDE_ALL_GCS
2152 
2153 int InstanceKlass::oop_adjust_pointers(oop obj) {
2154   int size = size_helper();
2155   InstanceKlass_OOP_MAP_ITERATE( \
2156     obj, \
2157     MarkSweep::adjust_pointer(p), \
2158     assert_is_in)
2159   MarkSweep::adjust_klass(obj->klass());
2160   return size;
2161 }
2162 
2163 #if INCLUDE_ALL_GCS
2164 void InstanceKlass::oop_push_contents(PSPromotionManager* pm, oop obj) {
2165   InstanceKlass_OOP_MAP_REVERSE_ITERATE( \
2166     obj, \
2167     if (PSScavenge::should_scavenge(p)) { \
2168       pm->claim_or_forward_depth(p); \
2169     }, \
2170     assert_nothing )
2171 }
2172 
2173 int InstanceKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) {
2174   int size = size_helper();
2175   InstanceKlass_OOP_MAP_ITERATE( \
2176     obj, \
2177     PSParallelCompact::adjust_pointer(p), \
2178     assert_is_in)
2179   obj->update_header(cm);
2180   return size;
2181 }
2182 
2183 #endif // INCLUDE_ALL_GCS
2184 
2185 void InstanceKlass::clean_implementors_list(BoolObjectClosure* is_alive) {
2186   assert(is_loader_alive(is_alive), "this klass should be live");
2187   if (is_interface()) {
2188     if (ClassUnloading) {
2189       Klass* impl = implementor();
2190       if (impl != NULL) {
2191         if (!impl->is_loader_alive(is_alive)) {
2192           // remove this guy
2193           Klass** klass = adr_implementor();
2194           assert(klass != NULL, "null klass");
2195           if (klass != NULL) {
2196             *klass = NULL;
2197           }
2198         }
2199       }
2200     }
2201   }
2202 }
2203 
2204 void InstanceKlass::clean_method_data(BoolObjectClosure* is_alive) {
2205   for (int m = 0; m < methods()->length(); m++) {
2206     MethodData* mdo = methods()->at(m)->method_data();
2207     if (mdo != NULL) {
2208       for (ProfileData* data = mdo->first_data();
2209            mdo->is_valid(data);
2210            data = mdo->next_data(data)) {
2211         data->clean_weak_klass_links(is_alive);
2212       }
2213     }
2214   }
2215 }
2216 
2217 
2218 static void remove_unshareable_in_class(Klass* k) {
2219   // remove klass's unshareable info
2220   k->remove_unshareable_info();
2221 }
2222 
2223 void InstanceKlass::remove_unshareable_info() {
2224   Klass::remove_unshareable_info();
2225   // Unlink the class
2226   if (is_linked()) {
2227     unlink_class();
2228   }
2229   init_implementor();
2230 
2231   constants()->remove_unshareable_info();
2232 
2233   for (int i = 0; i < methods()->length(); i++) {
2234     Method* m = methods()->at(i);
2235     m->remove_unshareable_info();
2236   }
2237 
2238   // do array classes also.
2239   array_klasses_do(remove_unshareable_in_class);
2240 }
2241 
2242 void restore_unshareable_in_class(Klass* k, TRAPS) {
2243   k->restore_unshareable_info(CHECK);
2244 }
2245 
2246 void InstanceKlass::restore_unshareable_info(TRAPS) {
2247   Klass::restore_unshareable_info(CHECK);
2248   instanceKlassHandle ik(THREAD, this);
2249 
2250   Array<Method*>* methods = ik->methods();
2251   int num_methods = methods->length();
2252   for (int index2 = 0; index2 < num_methods; ++index2) {
2253     methodHandle m(THREAD, methods->at(index2));
2254     m()->link_method(m, CHECK);
2255     // restore method's vtable by calling a virtual function
2256     m->restore_vtable();
2257   }
2258   if (JvmtiExport::has_redefined_a_class()) {
2259     // Reinitialize vtable because RedefineClasses may have changed some
2260     // entries in this vtable for super classes so the CDS vtable might
2261     // point to old or obsolete entries.  RedefineClasses doesn't fix up
2262     // vtables in the shared system dictionary, only the main one.
2263     // It also redefines the itable too so fix that too.
2264     ResourceMark rm(THREAD);
2265     ik->vtable()->initialize_vtable(false, CHECK);
2266     ik->itable()->initialize_itable(false, CHECK);
2267   }
2268 
2269   // restore constant pool resolved references
2270   ik->constants()->restore_unshareable_info(CHECK);
2271 
2272   ik->array_klasses_do(restore_unshareable_in_class, CHECK);
2273 }
2274 
2275 static void clear_all_breakpoints(Method* m) {
2276   m->clear_all_breakpoints();
2277 }
2278 
2279 
2280 void InstanceKlass::notify_unload_class(InstanceKlass* ik) {
2281   // notify the debugger
2282   if (JvmtiExport::should_post_class_unload()) {
2283     JvmtiExport::post_class_unload(ik);
2284   }
2285 
2286   // notify ClassLoadingService of class unload
2287   ClassLoadingService::notify_class_unloaded(ik);
2288 }
2289 
2290 void InstanceKlass::release_C_heap_structures(InstanceKlass* ik) {
2291   // Clean up C heap
2292   ik->release_C_heap_structures();
2293   ik->constants()->release_C_heap_structures();
2294 }
2295 
2296 void InstanceKlass::release_C_heap_structures() {
2297 
2298   // Can't release the constant pool here because the constant pool can be
2299   // deallocated separately from the InstanceKlass for default methods and
2300   // redefine classes.
2301 
2302   // Deallocate oop map cache
2303   if (_oop_map_cache != NULL) {
2304     delete _oop_map_cache;
2305     _oop_map_cache = NULL;
2306   }
2307 
2308   // Deallocate JNI identifiers for jfieldIDs
2309   JNIid::deallocate(jni_ids());
2310   set_jni_ids(NULL);
2311 
2312   jmethodID* jmeths = methods_jmethod_ids_acquire();
2313   if (jmeths != (jmethodID*)NULL) {
2314     release_set_methods_jmethod_ids(NULL);
2315     FreeHeap(jmeths);
2316   }
2317 
2318   // Deallocate MemberNameTable
2319   {
2320     Mutex* lock_or_null = SafepointSynchronize::is_at_safepoint() ? NULL : MemberNameTable_lock;
2321     MutexLockerEx ml(lock_or_null, Mutex::_no_safepoint_check_flag);
2322     MemberNameTable* mnt = member_names();
2323     if (mnt != NULL) {
2324       delete mnt;
2325       set_member_names(NULL);
2326     }
2327   }
2328 
2329   int* indices = methods_cached_itable_indices_acquire();
2330   if (indices != (int*)NULL) {
2331     release_set_methods_cached_itable_indices(NULL);
2332     FreeHeap(indices);
2333   }
2334 
2335   // release dependencies
2336   nmethodBucket* b = _dependencies;
2337   _dependencies = NULL;
2338   while (b != NULL) {
2339     nmethodBucket* next = b->next();
2340     delete b;
2341     b = next;
2342   }
2343 
2344   // Deallocate breakpoint records
2345   if (breakpoints() != 0x0) {
2346     methods_do(clear_all_breakpoints);
2347     assert(breakpoints() == 0x0, "should have cleared breakpoints");
2348   }
2349 
2350   // deallocate information about previous versions
2351   if (_previous_versions != NULL) {
2352     for (int i = _previous_versions->length() - 1; i >= 0; i--) {
2353       PreviousVersionNode * pv_node = _previous_versions->at(i);
2354       delete pv_node;
2355     }
2356     delete _previous_versions;
2357     _previous_versions = NULL;
2358   }
2359 
2360   // deallocate the cached class file
2361   if (_cached_class_file != NULL) {
2362     os::free(_cached_class_file, mtClass);
2363     _cached_class_file = NULL;
2364   }
2365 
2366   // Decrement symbol reference counts associated with the unloaded class.
2367   if (_name != NULL) _name->decrement_refcount();
2368   // unreference array name derived from this class name (arrays of an unloaded
2369   // class can't be referenced anymore).
2370   if (_array_name != NULL)  _array_name->decrement_refcount();
2371   if (_source_debug_extension != NULL) FREE_C_HEAP_ARRAY(char, _source_debug_extension, mtClass);
2372 
2373   assert(_total_instanceKlass_count >= 1, "Sanity check");
2374   Atomic::dec(&_total_instanceKlass_count);
2375 }
2376 
2377 void InstanceKlass::set_source_debug_extension(char* array, int length) {
2378   if (array == NULL) {
2379     _source_debug_extension = NULL;
2380   } else {
2381     // Adding one to the attribute length in order to store a null terminator
2382     // character could cause an overflow because the attribute length is
2383     // already coded with an u4 in the classfile, but in practice, it's
2384     // unlikely to happen.
2385     assert((length+1) > length, "Overflow checking");
2386     char* sde = NEW_C_HEAP_ARRAY(char, (length + 1), mtClass);
2387     for (int i = 0; i < length; i++) {
2388       sde[i] = array[i];
2389     }
2390     sde[length] = '\0';
2391     _source_debug_extension = sde;
2392   }
2393 }
2394 
2395 address InstanceKlass::static_field_addr(int offset) {
2396   return (address)(offset + InstanceMirrorKlass::offset_of_static_fields() + (intptr_t)java_mirror());
2397 }
2398 
2399 
2400 const char* InstanceKlass::signature_name() const {
2401   const char* src = (const char*) (name()->as_C_string());
2402   const int src_length = (int)strlen(src);
2403   char* dest = NEW_RESOURCE_ARRAY(char, src_length + 3);
2404   int src_index = 0;
2405   int dest_index = 0;
2406   dest[dest_index++] = 'L';
2407   while (src_index < src_length) {
2408     dest[dest_index++] = src[src_index++];
2409   }
2410   dest[dest_index++] = ';';
2411   dest[dest_index] = '\0';
2412   return dest;
2413 }
2414 
2415 // different verisons of is_same_class_package
2416 bool InstanceKlass::is_same_class_package(Klass* class2) {
2417   Klass* class1 = this;
2418   oop classloader1 = InstanceKlass::cast(class1)->class_loader();
2419   Symbol* classname1 = class1->name();
2420 
2421   if (class2->oop_is_objArray()) {
2422     class2 = ObjArrayKlass::cast(class2)->bottom_klass();
2423   }
2424   oop classloader2;
2425   if (class2->oop_is_instance()) {
2426     classloader2 = InstanceKlass::cast(class2)->class_loader();
2427   } else {
2428     assert(class2->oop_is_typeArray(), "should be type array");
2429     classloader2 = NULL;
2430   }
2431   Symbol* classname2 = class2->name();
2432 
2433   return InstanceKlass::is_same_class_package(classloader1, classname1,
2434                                               classloader2, classname2);
2435 }
2436 
2437 bool InstanceKlass::is_same_class_package(oop classloader2, Symbol* classname2) {
2438   Klass* class1 = this;
2439   oop classloader1 = InstanceKlass::cast(class1)->class_loader();
2440   Symbol* classname1 = class1->name();
2441 
2442   return InstanceKlass::is_same_class_package(classloader1, classname1,
2443                                               classloader2, classname2);
2444 }
2445 
2446 // return true if two classes are in the same package, classloader
2447 // and classname information is enough to determine a class's package
2448 bool InstanceKlass::is_same_class_package(oop class_loader1, Symbol* class_name1,
2449                                           oop class_loader2, Symbol* class_name2) {
2450   if (class_loader1 != class_loader2) {
2451     return false;
2452   } else if (class_name1 == class_name2) {
2453     return true;                // skip painful bytewise comparison
2454   } else {
2455     ResourceMark rm;
2456 
2457     // The Symbol*'s are in UTF8 encoding. Since we only need to check explicitly
2458     // for ASCII characters ('/', 'L', '['), we can keep them in UTF8 encoding.
2459     // Otherwise, we just compare jbyte values between the strings.
2460     const jbyte *name1 = class_name1->base();
2461     const jbyte *name2 = class_name2->base();
2462 
2463     const jbyte *last_slash1 = UTF8::strrchr(name1, class_name1->utf8_length(), '/');
2464     const jbyte *last_slash2 = UTF8::strrchr(name2, class_name2->utf8_length(), '/');
2465 
2466     if ((last_slash1 == NULL) || (last_slash2 == NULL)) {
2467       // One of the two doesn't have a package.  Only return true
2468       // if the other one also doesn't have a package.
2469       return last_slash1 == last_slash2;
2470     } else {
2471       // Skip over '['s
2472       if (*name1 == '[') {
2473         do {
2474           name1++;
2475         } while (*name1 == '[');
2476         if (*name1 != 'L') {
2477           // Something is terribly wrong.  Shouldn't be here.
2478           return false;
2479         }
2480       }
2481       if (*name2 == '[') {
2482         do {
2483           name2++;
2484         } while (*name2 == '[');
2485         if (*name2 != 'L') {
2486           // Something is terribly wrong.  Shouldn't be here.
2487           return false;
2488         }
2489       }
2490 
2491       // Check that package part is identical
2492       int length1 = last_slash1 - name1;
2493       int length2 = last_slash2 - name2;
2494 
2495       return UTF8::equal(name1, length1, name2, length2);
2496     }
2497   }
2498 }
2499 
2500 // Returns true iff super_method can be overridden by a method in targetclassname
2501 // See JSL 3rd edition 8.4.6.1
2502 // Assumes name-signature match
2503 // "this" is InstanceKlass of super_method which must exist
2504 // note that the InstanceKlass of the method in the targetclassname has not always been created yet
2505 bool InstanceKlass::is_override(methodHandle super_method, Handle targetclassloader, Symbol* targetclassname, TRAPS) {
2506    // Private methods can not be overridden
2507    if (super_method->is_private()) {
2508      return false;
2509    }
2510    // If super method is accessible, then override
2511    if ((super_method->is_protected()) ||
2512        (super_method->is_public())) {
2513      return true;
2514    }
2515    // Package-private methods are not inherited outside of package
2516    assert(super_method->is_package_private(), "must be package private");
2517    return(is_same_class_package(targetclassloader(), targetclassname));
2518 }
2519 
2520 /* defined for now in jvm.cpp, for historical reasons *--
2521 Klass* InstanceKlass::compute_enclosing_class_impl(instanceKlassHandle self,
2522                                                      Symbol*& simple_name_result, TRAPS) {
2523   ...
2524 }
2525 */
2526 
2527 // tell if two classes have the same enclosing class (at package level)
2528 bool InstanceKlass::is_same_package_member_impl(instanceKlassHandle class1,
2529                                                 Klass* class2_oop, TRAPS) {
2530   if (class2_oop == class1())                       return true;
2531   if (!class2_oop->oop_is_instance())  return false;
2532   instanceKlassHandle class2(THREAD, class2_oop);
2533 
2534   // must be in same package before we try anything else
2535   if (!class1->is_same_class_package(class2->class_loader(), class2->name()))
2536     return false;
2537 
2538   // As long as there is an outer1.getEnclosingClass,
2539   // shift the search outward.
2540   instanceKlassHandle outer1 = class1;
2541   for (;;) {
2542     // As we walk along, look for equalities between outer1 and class2.
2543     // Eventually, the walks will terminate as outer1 stops
2544     // at the top-level class around the original class.
2545     bool ignore_inner_is_member;
2546     Klass* next = outer1->compute_enclosing_class(&ignore_inner_is_member,
2547                                                     CHECK_false);
2548     if (next == NULL)  break;
2549     if (next == class2())  return true;
2550     outer1 = instanceKlassHandle(THREAD, next);
2551   }
2552 
2553   // Now do the same for class2.
2554   instanceKlassHandle outer2 = class2;
2555   for (;;) {
2556     bool ignore_inner_is_member;
2557     Klass* next = outer2->compute_enclosing_class(&ignore_inner_is_member,
2558                                                     CHECK_false);
2559     if (next == NULL)  break;
2560     // Might as well check the new outer against all available values.
2561     if (next == class1())  return true;
2562     if (next == outer1())  return true;
2563     outer2 = instanceKlassHandle(THREAD, next);
2564   }
2565 
2566   // If by this point we have not found an equality between the
2567   // two classes, we know they are in separate package members.
2568   return false;
2569 }
2570 
2571 
2572 jint InstanceKlass::compute_modifier_flags(TRAPS) const {
2573   jint access = access_flags().as_int();
2574 
2575   // But check if it happens to be member class.
2576   instanceKlassHandle ik(THREAD, this);
2577   InnerClassesIterator iter(ik);
2578   for (; !iter.done(); iter.next()) {
2579     int ioff = iter.inner_class_info_index();
2580     // Inner class attribute can be zero, skip it.
2581     // Strange but true:  JVM spec. allows null inner class refs.
2582     if (ioff == 0) continue;
2583 
2584     // only look at classes that are already loaded
2585     // since we are looking for the flags for our self.
2586     Symbol* inner_name = ik->constants()->klass_name_at(ioff);
2587     if ((ik->name() == inner_name)) {
2588       // This is really a member class.
2589       access = iter.inner_access_flags();
2590       break;
2591     }
2592   }
2593   // Remember to strip ACC_SUPER bit
2594   return (access & (~JVM_ACC_SUPER)) & JVM_ACC_WRITTEN_FLAGS;
2595 }
2596 
2597 jint InstanceKlass::jvmti_class_status() const {
2598   jint result = 0;
2599 
2600   if (is_linked()) {
2601     result |= JVMTI_CLASS_STATUS_VERIFIED | JVMTI_CLASS_STATUS_PREPARED;
2602   }
2603 
2604   if (is_initialized()) {
2605     assert(is_linked(), "Class status is not consistent");
2606     result |= JVMTI_CLASS_STATUS_INITIALIZED;
2607   }
2608   if (is_in_error_state()) {
2609     result |= JVMTI_CLASS_STATUS_ERROR;
2610   }
2611   return result;
2612 }
2613 
2614 Method* InstanceKlass::method_at_itable(Klass* holder, int index, TRAPS) {
2615   itableOffsetEntry* ioe = (itableOffsetEntry*)start_of_itable();
2616   int method_table_offset_in_words = ioe->offset()/wordSize;
2617   int nof_interfaces = (method_table_offset_in_words - itable_offset_in_words())
2618                        / itableOffsetEntry::size();
2619 
2620   for (int cnt = 0 ; ; cnt ++, ioe ++) {
2621     // If the interface isn't implemented by the receiver class,
2622     // the VM should throw IncompatibleClassChangeError.
2623     if (cnt >= nof_interfaces) {
2624       THROW_NULL(vmSymbols::java_lang_IncompatibleClassChangeError());
2625     }
2626 
2627     Klass* ik = ioe->interface_klass();
2628     if (ik == holder) break;
2629   }
2630 
2631   itableMethodEntry* ime = ioe->first_method_entry(this);
2632   Method* m = ime[index].method();
2633   if (m == NULL) {
2634     THROW_NULL(vmSymbols::java_lang_AbstractMethodError());
2635   }
2636   return m;
2637 }
2638 
2639 // On-stack replacement stuff
2640 void InstanceKlass::add_osr_nmethod(nmethod* n) {
2641   // only one compilation can be active
2642   NEEDS_CLEANUP
2643   // This is a short non-blocking critical region, so the no safepoint check is ok.
2644   OsrList_lock->lock_without_safepoint_check();
2645   assert(n->is_osr_method(), "wrong kind of nmethod");
2646   n->set_osr_link(osr_nmethods_head());
2647   set_osr_nmethods_head(n);
2648   // Raise the highest osr level if necessary
2649   if (TieredCompilation) {
2650     Method* m = n->method();
2651     m->set_highest_osr_comp_level(MAX2(m->highest_osr_comp_level(), n->comp_level()));
2652   }
2653   // Remember to unlock again
2654   OsrList_lock->unlock();
2655 
2656   // Get rid of the osr methods for the same bci that have lower levels.
2657   if (TieredCompilation) {
2658     for (int l = CompLevel_limited_profile; l < n->comp_level(); l++) {
2659       nmethod *inv = lookup_osr_nmethod(n->method(), n->osr_entry_bci(), l, true);
2660       if (inv != NULL && inv->is_in_use()) {
2661         inv->make_not_entrant();
2662       }
2663     }
2664   }
2665 }
2666 
2667 
2668 void InstanceKlass::remove_osr_nmethod(nmethod* n) {
2669   // This is a short non-blocking critical region, so the no safepoint check is ok.
2670   OsrList_lock->lock_without_safepoint_check();
2671   assert(n->is_osr_method(), "wrong kind of nmethod");
2672   nmethod* last = NULL;
2673   nmethod* cur  = osr_nmethods_head();
2674   int max_level = CompLevel_none;  // Find the max comp level excluding n
2675   Method* m = n->method();
2676   // Search for match
2677   while(cur != NULL && cur != n) {
2678     if (TieredCompilation) {
2679       // Find max level before n
2680       max_level = MAX2(max_level, cur->comp_level());
2681     }
2682     last = cur;
2683     cur = cur->osr_link();
2684   }
2685   nmethod* next = NULL;
2686   if (cur == n) {
2687     next = cur->osr_link();
2688     if (last == NULL) {
2689       // Remove first element
2690       set_osr_nmethods_head(next);
2691     } else {
2692       last->set_osr_link(next);
2693     }
2694   }
2695   n->set_osr_link(NULL);
2696   if (TieredCompilation) {
2697     cur = next;
2698     while (cur != NULL) {
2699       // Find max level after n
2700       max_level = MAX2(max_level, cur->comp_level());
2701       cur = cur->osr_link();
2702     }
2703     m->set_highest_osr_comp_level(max_level);
2704   }
2705   // Remember to unlock again
2706   OsrList_lock->unlock();
2707 }
2708 
2709 nmethod* InstanceKlass::lookup_osr_nmethod(const Method* m, int bci, int comp_level, bool match_level) const {
2710   // This is a short non-blocking critical region, so the no safepoint check is ok.
2711   OsrList_lock->lock_without_safepoint_check();
2712   nmethod* osr = osr_nmethods_head();
2713   nmethod* best = NULL;
2714   while (osr != NULL) {
2715     assert(osr->is_osr_method(), "wrong kind of nmethod found in chain");
2716     // There can be a time when a c1 osr method exists but we are waiting
2717     // for a c2 version. When c2 completes its osr nmethod we will trash
2718     // the c1 version and only be able to find the c2 version. However
2719     // while we overflow in the c1 code at back branches we don't want to
2720     // try and switch to the same code as we are already running
2721 
2722     if (osr->method() == m &&
2723         (bci == InvocationEntryBci || osr->osr_entry_bci() == bci)) {
2724       if (match_level) {
2725         if (osr->comp_level() == comp_level) {
2726           // Found a match - return it.
2727           OsrList_lock->unlock();
2728           return osr;
2729         }
2730       } else {
2731         if (best == NULL || (osr->comp_level() > best->comp_level())) {
2732           if (osr->comp_level() == CompLevel_highest_tier) {
2733             // Found the best possible - return it.
2734             OsrList_lock->unlock();
2735             return osr;
2736           }
2737           best = osr;
2738         }
2739       }
2740     }
2741     osr = osr->osr_link();
2742   }
2743   OsrList_lock->unlock();
2744   if (best != NULL && best->comp_level() >= comp_level && match_level == false) {
2745     return best;
2746   }
2747   return NULL;
2748 }
2749 
2750 void InstanceKlass::add_member_name(int index, Handle mem_name) {
2751   jweak mem_name_wref = JNIHandles::make_weak_global(mem_name);
2752   MutexLocker ml(MemberNameTable_lock);
2753   assert(0 <= index && index < idnum_allocated_count(), "index is out of bounds");
2754   DEBUG_ONLY(No_Safepoint_Verifier nsv);
2755 
2756   if (_member_names == NULL) {
2757     _member_names = new (ResourceObj::C_HEAP, mtClass) MemberNameTable(idnum_allocated_count());
2758   }
2759   _member_names->add_member_name(index, mem_name_wref);
2760 }
2761 
2762 oop InstanceKlass::get_member_name(int index) {
2763   MutexLocker ml(MemberNameTable_lock);
2764   assert(0 <= index && index < idnum_allocated_count(), "index is out of bounds");
2765   DEBUG_ONLY(No_Safepoint_Verifier nsv);
2766 
2767   if (_member_names == NULL) {
2768     return NULL;
2769   }
2770   oop mem_name =_member_names->get_member_name(index);
2771   return mem_name;
2772 }
2773 
2774 // -----------------------------------------------------------------------------------------------------
2775 // Printing
2776 
2777 #ifndef PRODUCT
2778 
2779 #define BULLET  " - "
2780 
2781 static const char* state_names[] = {
2782   "allocated", "loaded", "linked", "being_initialized", "fully_initialized", "initialization_error"
2783 };
2784 
2785 void InstanceKlass::print_on(outputStream* st) const {
2786   assert(is_klass(), "must be klass");
2787   Klass::print_on(st);
2788 
2789   st->print(BULLET"instance size:     %d", size_helper());                        st->cr();
2790   st->print(BULLET"klass size:        %d", size());                               st->cr();
2791   st->print(BULLET"access:            "); access_flags().print_on(st);            st->cr();
2792   st->print(BULLET"state:             "); st->print_cr(state_names[_init_state]);
2793   st->print(BULLET"name:              "); name()->print_value_on(st);             st->cr();
2794   st->print(BULLET"super:             "); super()->print_value_on_maybe_null(st); st->cr();
2795   st->print(BULLET"sub:               ");
2796   Klass* sub = subklass();
2797   int n;
2798   for (n = 0; sub != NULL; n++, sub = sub->next_sibling()) {
2799     if (n < MaxSubklassPrintSize) {
2800       sub->print_value_on(st);
2801       st->print("   ");
2802     }
2803   }
2804   if (n >= MaxSubklassPrintSize) st->print("(%d more klasses...)", n - MaxSubklassPrintSize);
2805   st->cr();
2806 
2807   if (is_interface()) {
2808     st->print_cr(BULLET"nof implementors:  %d", nof_implementors());
2809     if (nof_implementors() == 1) {
2810       st->print_cr(BULLET"implementor:    ");
2811       st->print("   ");
2812       implementor()->print_value_on(st);
2813       st->cr();
2814     }
2815   }
2816 
2817   st->print(BULLET"arrays:            "); array_klasses()->print_value_on_maybe_null(st); st->cr();
2818   st->print(BULLET"methods:           "); methods()->print_value_on(st);                  st->cr();
2819   if (Verbose) {
2820     Array<Method*>* method_array = methods();
2821     for(int i = 0; i < method_array->length(); i++) {
2822       st->print("%d : ", i); method_array->at(i)->print_value(); st->cr();
2823     }
2824   }
2825   st->print(BULLET"method ordering:   "); method_ordering()->print_value_on(st);       st->cr();
2826   st->print(BULLET"local interfaces:  "); local_interfaces()->print_value_on(st);      st->cr();
2827   st->print(BULLET"trans. interfaces: "); transitive_interfaces()->print_value_on(st); st->cr();
2828   st->print(BULLET"constants:         "); constants()->print_value_on(st);         st->cr();
2829   if (class_loader_data() != NULL) {
2830     st->print(BULLET"class loader data:  ");
2831     class_loader_data()->print_value_on(st);
2832     st->cr();
2833   }
2834   st->print(BULLET"host class:        "); host_klass()->print_value_on_maybe_null(st); st->cr();
2835   if (source_file_name() != NULL) {
2836     st->print(BULLET"source file:       ");
2837     source_file_name()->print_value_on(st);
2838     st->cr();
2839   }
2840   if (source_debug_extension() != NULL) {
2841     st->print(BULLET"source debug extension:       ");
2842     st->print("%s", source_debug_extension());
2843     st->cr();
2844   }
2845   st->print(BULLET"class annotations:       "); class_annotations()->print_value_on(st); st->cr();
2846   st->print(BULLET"class type annotations:  "); class_type_annotations()->print_value_on(st); st->cr();
2847   st->print(BULLET"field annotations:       "); fields_annotations()->print_value_on(st); st->cr();
2848   st->print(BULLET"field type annotations:  "); fields_type_annotations()->print_value_on(st); st->cr();
2849   {
2850     ResourceMark rm;
2851     // PreviousVersionInfo objects returned via PreviousVersionWalker
2852     // contain a GrowableArray of handles. We have to clean up the
2853     // GrowableArray _after_ the PreviousVersionWalker destructor
2854     // has destroyed the handles.
2855     {
2856       bool have_pv = false;
2857       PreviousVersionWalker pvw((InstanceKlass*)this);
2858       for (PreviousVersionInfo * pv_info = pvw.next_previous_version();
2859            pv_info != NULL; pv_info = pvw.next_previous_version()) {
2860         if (!have_pv)
2861           st->print(BULLET"previous version:  ");
2862         have_pv = true;
2863         pv_info->prev_constant_pool_handle()()->print_value_on(st);
2864       }
2865       if (have_pv)  st->cr();
2866     } // pvw is cleaned up
2867   } // rm is cleaned up
2868 
2869   if (generic_signature() != NULL) {
2870     st->print(BULLET"generic signature: ");
2871     generic_signature()->print_value_on(st);
2872     st->cr();
2873   }
2874   st->print(BULLET"inner classes:     "); inner_classes()->print_value_on(st);     st->cr();
2875   st->print(BULLET"java mirror:       "); java_mirror()->print_value_on(st);       st->cr();
2876   st->print(BULLET"vtable length      %d  (start addr: " INTPTR_FORMAT ")", vtable_length(), start_of_vtable());  st->cr();
2877   st->print(BULLET"itable length      %d (start addr: " INTPTR_FORMAT ")", itable_length(), start_of_itable()); st->cr();
2878   st->print_cr(BULLET"---- static fields (%d words):", static_field_size());
2879   FieldPrinter print_static_field(st);
2880   ((InstanceKlass*)this)->do_local_static_fields(&print_static_field);
2881   st->print_cr(BULLET"---- non-static fields (%d words):", nonstatic_field_size());
2882   FieldPrinter print_nonstatic_field(st);
2883   ((InstanceKlass*)this)->do_nonstatic_fields(&print_nonstatic_field);
2884 
2885   st->print(BULLET"non-static oop maps: ");
2886   OopMapBlock* map     = start_of_nonstatic_oop_maps();
2887   OopMapBlock* end_map = map + nonstatic_oop_map_count();
2888   while (map < end_map) {
2889     st->print("%d-%d ", map->offset(), map->offset() + heapOopSize*(map->count() - 1));
2890     map++;
2891   }
2892   st->cr();
2893 }
2894 
2895 #endif //PRODUCT
2896 
2897 void InstanceKlass::print_value_on(outputStream* st) const {
2898   assert(is_klass(), "must be klass");
2899   name()->print_value_on(st);
2900 }
2901 
2902 #ifndef PRODUCT
2903 
2904 void FieldPrinter::do_field(fieldDescriptor* fd) {
2905   _st->print(BULLET);
2906    if (_obj == NULL) {
2907      fd->print_on(_st);
2908      _st->cr();
2909    } else {
2910      fd->print_on_for(_st, _obj);
2911      _st->cr();
2912    }
2913 }
2914 
2915 
2916 void InstanceKlass::oop_print_on(oop obj, outputStream* st) {
2917   Klass::oop_print_on(obj, st);
2918 
2919   if (this == SystemDictionary::String_klass()) {
2920     typeArrayOop value  = java_lang_String::value(obj);
2921     juint        offset = java_lang_String::offset(obj);
2922     juint        length = java_lang_String::length(obj);
2923     if (value != NULL &&
2924         value->is_typeArray() &&
2925         offset          <= (juint) value->length() &&
2926         offset + length <= (juint) value->length()) {
2927       st->print(BULLET"string: ");
2928       Handle h_obj(obj);
2929       java_lang_String::print(h_obj, st);
2930       st->cr();
2931       if (!WizardMode)  return;  // that is enough
2932     }
2933   }
2934 
2935   st->print_cr(BULLET"---- fields (total size %d words):", oop_size(obj));
2936   FieldPrinter print_field(st, obj);
2937   do_nonstatic_fields(&print_field);
2938 
2939   if (this == SystemDictionary::Class_klass()) {
2940     st->print(BULLET"signature: ");
2941     java_lang_Class::print_signature(obj, st);
2942     st->cr();
2943     Klass* mirrored_klass = java_lang_Class::as_Klass(obj);
2944     st->print(BULLET"fake entry for mirror: ");
2945     mirrored_klass->print_value_on_maybe_null(st);
2946     st->cr();
2947     Klass* array_klass = java_lang_Class::array_klass(obj);
2948     st->print(BULLET"fake entry for array: ");
2949     array_klass->print_value_on_maybe_null(st);
2950     st->cr();
2951     st->print_cr(BULLET"fake entry for oop_size: %d", java_lang_Class::oop_size(obj));
2952     st->print_cr(BULLET"fake entry for static_oop_field_count: %d", java_lang_Class::static_oop_field_count(obj));
2953     Klass* real_klass = java_lang_Class::as_Klass(obj);
2954     if (real_klass != NULL && real_klass->oop_is_instance()) {
2955       InstanceKlass::cast(real_klass)->do_local_static_fields(&print_field);
2956     }
2957   } else if (this == SystemDictionary::MethodType_klass()) {
2958     st->print(BULLET"signature: ");
2959     java_lang_invoke_MethodType::print_signature(obj, st);
2960     st->cr();
2961   }
2962 }
2963 
2964 #endif //PRODUCT
2965 
2966 void InstanceKlass::oop_print_value_on(oop obj, outputStream* st) {
2967   st->print("a ");
2968   name()->print_value_on(st);
2969   obj->print_address_on(st);
2970   if (this == SystemDictionary::String_klass()
2971       && java_lang_String::value(obj) != NULL) {
2972     ResourceMark rm;
2973     int len = java_lang_String::length(obj);
2974     int plen = (len < 24 ? len : 12);
2975     char* str = java_lang_String::as_utf8_string(obj, 0, plen);
2976     st->print(" = \"%s\"", str);
2977     if (len > plen)
2978       st->print("...[%d]", len);
2979   } else if (this == SystemDictionary::Class_klass()) {
2980     Klass* k = java_lang_Class::as_Klass(obj);
2981     st->print(" = ");
2982     if (k != NULL) {
2983       k->print_value_on(st);
2984     } else {
2985       const char* tname = type2name(java_lang_Class::primitive_type(obj));
2986       st->print("%s", tname ? tname : "type?");
2987     }
2988   } else if (this == SystemDictionary::MethodType_klass()) {
2989     st->print(" = ");
2990     java_lang_invoke_MethodType::print_signature(obj, st);
2991   } else if (java_lang_boxing_object::is_instance(obj)) {
2992     st->print(" = ");
2993     java_lang_boxing_object::print(obj, st);
2994   } else if (this == SystemDictionary::LambdaForm_klass()) {
2995     oop vmentry = java_lang_invoke_LambdaForm::vmentry(obj);
2996     if (vmentry != NULL) {
2997       st->print(" => ");
2998       vmentry->print_value_on(st);
2999     }
3000   } else if (this == SystemDictionary::MemberName_klass()) {
3001     Metadata* vmtarget = java_lang_invoke_MemberName::vmtarget(obj);
3002     if (vmtarget != NULL) {
3003       st->print(" = ");
3004       vmtarget->print_value_on(st);
3005     } else {
3006       java_lang_invoke_MemberName::clazz(obj)->print_value_on(st);
3007       st->print(".");
3008       java_lang_invoke_MemberName::name(obj)->print_value_on(st);
3009     }
3010   }
3011 }
3012 
3013 const char* InstanceKlass::internal_name() const {
3014   return external_name();
3015 }
3016 
3017 #if INCLUDE_SERVICES
3018 // Size Statistics
3019 void InstanceKlass::collect_statistics(KlassSizeStats *sz) const {
3020   Klass::collect_statistics(sz);
3021 
3022   sz->_inst_size  = HeapWordSize * size_helper();
3023   sz->_vtab_bytes = HeapWordSize * align_object_offset(vtable_length());
3024   sz->_itab_bytes = HeapWordSize * align_object_offset(itable_length());
3025   sz->_nonstatic_oopmap_bytes = HeapWordSize *
3026         ((is_interface() || is_anonymous()) ?
3027          align_object_offset(nonstatic_oop_map_size()) :
3028          nonstatic_oop_map_size());
3029 
3030   int n = 0;
3031   n += (sz->_methods_array_bytes         = sz->count_array(methods()));
3032   n += (sz->_method_ordering_bytes       = sz->count_array(method_ordering()));
3033   n += (sz->_local_interfaces_bytes      = sz->count_array(local_interfaces()));
3034   n += (sz->_transitive_interfaces_bytes = sz->count_array(transitive_interfaces()));
3035   n += (sz->_fields_bytes                = sz->count_array(fields()));
3036   n += (sz->_inner_classes_bytes         = sz->count_array(inner_classes()));
3037   sz->_ro_bytes += n;
3038 
3039   const ConstantPool* cp = constants();
3040   if (cp) {
3041     cp->collect_statistics(sz);
3042   }
3043 
3044   const Annotations* anno = annotations();
3045   if (anno) {
3046     anno->collect_statistics(sz);
3047   }
3048 
3049   const Array<Method*>* methods_array = methods();
3050   if (methods()) {
3051     for (int i = 0; i < methods_array->length(); i++) {
3052       Method* method = methods_array->at(i);
3053       if (method) {
3054         sz->_method_count ++;
3055         method->collect_statistics(sz);
3056       }
3057     }
3058   }
3059 }
3060 #endif // INCLUDE_SERVICES
3061 
3062 // Verification
3063 
3064 class VerifyFieldClosure: public OopClosure {
3065  protected:
3066   template <class T> void do_oop_work(T* p) {
3067     oop obj = oopDesc::load_decode_heap_oop(p);
3068     if (!obj->is_oop_or_null()) {
3069       tty->print_cr("Failed: " PTR_FORMAT " -> " PTR_FORMAT, p, (address)obj);
3070       Universe::print();
3071       guarantee(false, "boom");
3072     }
3073   }
3074  public:
3075   virtual void do_oop(oop* p)       { VerifyFieldClosure::do_oop_work(p); }
3076   virtual void do_oop(narrowOop* p) { VerifyFieldClosure::do_oop_work(p); }
3077 };
3078 
3079 void InstanceKlass::verify_on(outputStream* st, bool check_dictionary) {
3080 #ifndef PRODUCT
3081   // Avoid redundant verifies, this really should be in product.
3082   if (_verify_count == Universe::verify_count()) return;
3083   _verify_count = Universe::verify_count();
3084 #endif
3085 
3086   // Verify Klass
3087   Klass::verify_on(st, check_dictionary);
3088 
3089   // Verify that klass is present in SystemDictionary if not already
3090   // verifying the SystemDictionary.
3091   if (is_loaded() && !is_anonymous() && check_dictionary) {
3092     Symbol* h_name = name();
3093     SystemDictionary::verify_obj_klass_present(h_name, class_loader_data());
3094   }
3095 
3096   // Verify vtables
3097   if (is_linked()) {
3098     ResourceMark rm;
3099     // $$$ This used to be done only for m/s collections.  Doing it
3100     // always seemed a valid generalization.  (DLD -- 6/00)
3101     vtable()->verify(st);
3102   }
3103 
3104   // Verify first subklass
3105   if (subklass_oop() != NULL) {
3106     guarantee(subklass_oop()->is_klass(), "should be klass");
3107   }
3108 
3109   // Verify siblings
3110   Klass* super = this->super();
3111   Klass* sib = next_sibling();
3112   if (sib != NULL) {
3113     if (sib == this) {
3114       fatal(err_msg("subclass points to itself " PTR_FORMAT, sib));
3115     }
3116 
3117     guarantee(sib->is_klass(), "should be klass");
3118     guarantee(sib->super() == super, "siblings should have same superklass");
3119   }
3120 
3121   // Verify implementor fields
3122   Klass* im = implementor();
3123   if (im != NULL) {
3124     guarantee(is_interface(), "only interfaces should have implementor set");
3125     guarantee(im->is_klass(), "should be klass");
3126     guarantee(!im->is_interface() || im == this,
3127       "implementors cannot be interfaces");
3128   }
3129 
3130   // Verify local interfaces
3131   if (local_interfaces()) {
3132     Array<Klass*>* local_interfaces = this->local_interfaces();
3133     for (int j = 0; j < local_interfaces->length(); j++) {
3134       Klass* e = local_interfaces->at(j);
3135       guarantee(e->is_klass() && e->is_interface(), "invalid local interface");
3136     }
3137   }
3138 
3139   // Verify transitive interfaces
3140   if (transitive_interfaces() != NULL) {
3141     Array<Klass*>* transitive_interfaces = this->transitive_interfaces();
3142     for (int j = 0; j < transitive_interfaces->length(); j++) {
3143       Klass* e = transitive_interfaces->at(j);
3144       guarantee(e->is_klass() && e->is_interface(), "invalid transitive interface");
3145     }
3146   }
3147 
3148   // Verify methods
3149   if (methods() != NULL) {
3150     Array<Method*>* methods = this->methods();
3151     for (int j = 0; j < methods->length(); j++) {
3152       guarantee(methods->at(j)->is_method(), "non-method in methods array");
3153     }
3154     for (int j = 0; j < methods->length() - 1; j++) {
3155       Method* m1 = methods->at(j);
3156       Method* m2 = methods->at(j + 1);
3157       guarantee(m1->name()->fast_compare(m2->name()) <= 0, "methods not sorted correctly");
3158     }
3159   }
3160 
3161   // Verify method ordering
3162   if (method_ordering() != NULL) {
3163     Array<int>* method_ordering = this->method_ordering();
3164     int length = method_ordering->length();
3165     if (JvmtiExport::can_maintain_original_method_order() ||
3166         ((UseSharedSpaces || DumpSharedSpaces) && length != 0)) {
3167       guarantee(length == methods()->length(), "invalid method ordering length");
3168       jlong sum = 0;
3169       for (int j = 0; j < length; j++) {
3170         int original_index = method_ordering->at(j);
3171         guarantee(original_index >= 0, "invalid method ordering index");
3172         guarantee(original_index < length, "invalid method ordering index");
3173         sum += original_index;
3174       }
3175       // Verify sum of indices 0,1,...,length-1
3176       guarantee(sum == ((jlong)length*(length-1))/2, "invalid method ordering sum");
3177     } else {
3178       guarantee(length == 0, "invalid method ordering length");
3179     }
3180   }
3181 
3182   // Verify JNI static field identifiers
3183   if (jni_ids() != NULL) {
3184     jni_ids()->verify(this);
3185   }
3186 
3187   // Verify other fields
3188   if (array_klasses() != NULL) {
3189     guarantee(array_klasses()->is_klass(), "should be klass");
3190   }
3191   if (constants() != NULL) {
3192     guarantee(constants()->is_constantPool(), "should be constant pool");
3193   }
3194   const Klass* host = host_klass();
3195   if (host != NULL) {
3196     guarantee(host->is_klass(), "should be klass");
3197   }
3198 }
3199 
3200 void InstanceKlass::oop_verify_on(oop obj, outputStream* st) {
3201   Klass::oop_verify_on(obj, st);
3202   VerifyFieldClosure blk;
3203   obj->oop_iterate_no_header(&blk);
3204 }
3205 
3206 
3207 // JNIid class for jfieldIDs only
3208 // Note to reviewers:
3209 // These JNI functions are just moved over to column 1 and not changed
3210 // in the compressed oops workspace.
3211 JNIid::JNIid(Klass* holder, int offset, JNIid* next) {
3212   _holder = holder;
3213   _offset = offset;
3214   _next = next;
3215   debug_only(_is_static_field_id = false;)
3216 }
3217 
3218 
3219 JNIid* JNIid::find(int offset) {
3220   JNIid* current = this;
3221   while (current != NULL) {
3222     if (current->offset() == offset) return current;
3223     current = current->next();
3224   }
3225   return NULL;
3226 }
3227 
3228 void JNIid::deallocate(JNIid* current) {
3229   while (current != NULL) {
3230     JNIid* next = current->next();
3231     delete current;
3232     current = next;
3233   }
3234 }
3235 
3236 
3237 void JNIid::verify(Klass* holder) {
3238   int first_field_offset  = InstanceMirrorKlass::offset_of_static_fields();
3239   int end_field_offset;
3240   end_field_offset = first_field_offset + (InstanceKlass::cast(holder)->static_field_size() * wordSize);
3241 
3242   JNIid* current = this;
3243   while (current != NULL) {
3244     guarantee(current->holder() == holder, "Invalid klass in JNIid");
3245 #ifdef ASSERT
3246     int o = current->offset();
3247     if (current->is_static_field_id()) {
3248       guarantee(o >= first_field_offset  && o < end_field_offset,  "Invalid static field offset in JNIid");
3249     }
3250 #endif
3251     current = current->next();
3252   }
3253 }
3254 
3255 
3256 #ifdef ASSERT
3257 void InstanceKlass::set_init_state(ClassState state) {
3258   bool good_state = is_shared() ? (_init_state <= state)
3259                                                : (_init_state < state);
3260   assert(good_state || state == allocated, "illegal state transition");
3261   _init_state = (u1)state;
3262 }
3263 #endif
3264 
3265 
3266 // RedefineClasses() support for previous versions:
3267 
3268 // Purge previous versions
3269 static void purge_previous_versions_internal(InstanceKlass* ik, int emcp_method_count) {
3270   if (ik->previous_versions() != NULL) {
3271     // This klass has previous versions so see what we can cleanup
3272     // while it is safe to do so.
3273 
3274     int deleted_count = 0;    // leave debugging breadcrumbs
3275     int live_count = 0;
3276     ClassLoaderData* loader_data = ik->class_loader_data() == NULL ?
3277                        ClassLoaderData::the_null_class_loader_data() :
3278                        ik->class_loader_data();
3279 
3280     // RC_TRACE macro has an embedded ResourceMark
3281     RC_TRACE(0x00000200, ("purge: %s: previous version length=%d",
3282       ik->external_name(), ik->previous_versions()->length()));
3283 
3284     for (int i = ik->previous_versions()->length() - 1; i >= 0; i--) {
3285       // check the previous versions array
3286       PreviousVersionNode * pv_node = ik->previous_versions()->at(i);
3287       ConstantPool* cp_ref = pv_node->prev_constant_pool();
3288       assert(cp_ref != NULL, "cp ref was unexpectedly cleared");
3289 
3290       ConstantPool* pvcp = cp_ref;
3291       if (!pvcp->on_stack()) {
3292         // If the constant pool isn't on stack, none of the methods
3293         // are executing.  Delete all the methods, the constant pool and
3294         // and this previous version node.
3295         GrowableArray<Method*>* method_refs = pv_node->prev_EMCP_methods();
3296         if (method_refs != NULL) {
3297           for (int j = method_refs->length() - 1; j >= 0; j--) {
3298             Method* method = method_refs->at(j);
3299             assert(method != NULL, "method ref was unexpectedly cleared");
3300             method_refs->remove_at(j);
3301             // method will be freed with associated class.
3302           }
3303         }
3304         // Remove the constant pool
3305         delete pv_node;
3306         // Since we are traversing the array backwards, we don't have to
3307         // do anything special with the index.
3308         ik->previous_versions()->remove_at(i);
3309         deleted_count++;
3310         continue;
3311       } else {
3312         RC_TRACE(0x00000200, ("purge: previous version @%d is alive", i));
3313         assert(pvcp->pool_holder() != NULL, "Constant pool with no holder");
3314         guarantee (!loader_data->is_unloading(), "unloaded classes can't be on the stack");
3315         live_count++;
3316       }
3317 
3318       // At least one method is live in this previous version, clean out
3319       // the others or mark them as obsolete.
3320       GrowableArray<Method*>* method_refs = pv_node->prev_EMCP_methods();
3321       if (method_refs != NULL) {
3322         RC_TRACE(0x00000200, ("purge: previous methods length=%d",
3323           method_refs->length()));
3324         for (int j = method_refs->length() - 1; j >= 0; j--) {
3325           Method* method = method_refs->at(j);
3326           assert(method != NULL, "method ref was unexpectedly cleared");
3327 
3328           // Remove the emcp method if it's not executing
3329           // If it's been made obsolete by a redefinition of a non-emcp
3330           // method, mark it as obsolete but leave it to clean up later.
3331           if (!method->on_stack()) {
3332             method_refs->remove_at(j);
3333           } else if (emcp_method_count == 0) {
3334             method->set_is_obsolete();
3335           } else {
3336             // RC_TRACE macro has an embedded ResourceMark
3337             RC_TRACE(0x00000200,
3338               ("purge: %s(%s): prev method @%d in version @%d is alive",
3339               method->name()->as_C_string(),
3340               method->signature()->as_C_string(), j, i));
3341           }
3342         }
3343       }
3344     }
3345     assert(ik->previous_versions()->length() == live_count, "sanity check");
3346     RC_TRACE(0x00000200,
3347       ("purge: previous version stats: live=%d, deleted=%d", live_count,
3348       deleted_count));
3349   }
3350 }
3351 
3352 // External interface for use during class unloading.
3353 void InstanceKlass::purge_previous_versions(InstanceKlass* ik) {
3354   // Call with >0 emcp methods since they are not currently being redefined.
3355   purge_previous_versions_internal(ik, 1);
3356 }
3357 
3358 
3359 // Potentially add an information node that contains pointers to the
3360 // interesting parts of the previous version of the_class.
3361 // This is also where we clean out any unused references.
3362 // Note that while we delete nodes from the _previous_versions
3363 // array, we never delete the array itself until the klass is
3364 // unloaded. The has_been_redefined() query depends on that fact.
3365 //
3366 void InstanceKlass::add_previous_version(instanceKlassHandle ikh,
3367        BitMap* emcp_methods, int emcp_method_count) {
3368   assert(Thread::current()->is_VM_thread(),
3369          "only VMThread can add previous versions");
3370 
3371   if (_previous_versions == NULL) {
3372     // This is the first previous version so make some space.
3373     // Start with 2 elements under the assumption that the class
3374     // won't be redefined much.
3375     _previous_versions =  new (ResourceObj::C_HEAP, mtClass)
3376                             GrowableArray<PreviousVersionNode *>(2, true);
3377   }
3378 
3379   ConstantPool* cp_ref = ikh->constants();
3380 
3381   // RC_TRACE macro has an embedded ResourceMark
3382   RC_TRACE(0x00000400, ("adding previous version ref for %s @%d, EMCP_cnt=%d "
3383                         "on_stack=%d",
3384     ikh->external_name(), _previous_versions->length(), emcp_method_count,
3385     cp_ref->on_stack()));
3386 
3387   // If the constant pool for this previous version of the class
3388   // is not marked as being on the stack, then none of the methods
3389   // in this previous version of the class are on the stack so
3390   // we don't need to create a new PreviousVersionNode. However,
3391   // we still need to examine older previous versions below.
3392   Array<Method*>* old_methods = ikh->methods();
3393 
3394   if (cp_ref->on_stack()) {
3395   PreviousVersionNode * pv_node = NULL;
3396   if (emcp_method_count == 0) {
3397       // non-shared ConstantPool gets a reference
3398       pv_node = new PreviousVersionNode(cp_ref, !cp_ref->is_shared(), NULL);
3399     RC_TRACE(0x00000400,
3400         ("add: all methods are obsolete; flushing any EMCP refs"));
3401   } else {
3402     int local_count = 0;
3403       GrowableArray<Method*>* method_refs = new (ResourceObj::C_HEAP, mtClass)
3404         GrowableArray<Method*>(emcp_method_count, true);
3405     for (int i = 0; i < old_methods->length(); i++) {
3406       if (emcp_methods->at(i)) {
3407           // this old method is EMCP. Save it only if it's on the stack
3408           Method* old_method = old_methods->at(i);
3409           if (old_method->on_stack()) {
3410             method_refs->append(old_method);
3411           }
3412         if (++local_count >= emcp_method_count) {
3413           // no more EMCP methods so bail out now
3414           break;
3415         }
3416       }
3417     }
3418       // non-shared ConstantPool gets a reference
3419       pv_node = new PreviousVersionNode(cp_ref, !cp_ref->is_shared(), method_refs);
3420     }
3421     // append new previous version.
3422   _previous_versions->append(pv_node);
3423   }
3424 
3425   // Since the caller is the VMThread and we are at a safepoint, this
3426   // is a good time to clear out unused references.
3427 
3428   RC_TRACE(0x00000400, ("add: previous version length=%d",
3429     _previous_versions->length()));
3430 
3431   // Purge previous versions not executing on the stack
3432   purge_previous_versions_internal(this, emcp_method_count);
3433 
3434   int obsolete_method_count = old_methods->length() - emcp_method_count;
3435 
3436   if (emcp_method_count != 0 && obsolete_method_count != 0 &&
3437       _previous_versions->length() > 0) {
3438     // We have a mix of obsolete and EMCP methods so we have to
3439     // clear out any matching EMCP method entries the hard way.
3440     int local_count = 0;
3441     for (int i = 0; i < old_methods->length(); i++) {
3442       if (!emcp_methods->at(i)) {
3443         // only obsolete methods are interesting
3444         Method* old_method = old_methods->at(i);
3445         Symbol* m_name = old_method->name();
3446         Symbol* m_signature = old_method->signature();
3447 
3448         // we might not have added the last entry
3449         for (int j = _previous_versions->length() - 1; j >= 0; j--) {
3450           // check the previous versions array for non executing obsolete methods
3451           PreviousVersionNode * pv_node = _previous_versions->at(j);
3452 
3453           GrowableArray<Method*>* method_refs = pv_node->prev_EMCP_methods();
3454           if (method_refs == NULL) {
3455             // We have run into a PreviousVersion generation where
3456             // all methods were made obsolete during that generation's
3457             // RedefineClasses() operation. At the time of that
3458             // operation, all EMCP methods were flushed so we don't
3459             // have to go back any further.
3460             //
3461             // A NULL method_refs is different than an empty method_refs.
3462             // We cannot infer any optimizations about older generations
3463             // from an empty method_refs for the current generation.
3464             break;
3465           }
3466 
3467           for (int k = method_refs->length() - 1; k >= 0; k--) {
3468             Method* method = method_refs->at(k);
3469 
3470             if (!method->is_obsolete() &&
3471                 method->name() == m_name &&
3472                 method->signature() == m_signature) {
3473               // The current RedefineClasses() call has made all EMCP
3474               // versions of this method obsolete so mark it as obsolete
3475               // and remove the reference.
3476               RC_TRACE(0x00000400,
3477                 ("add: %s(%s): flush obsolete method @%d in version @%d",
3478                 m_name->as_C_string(), m_signature->as_C_string(), k, j));
3479 
3480               method->set_is_obsolete();
3481               // Leave obsolete methods on the previous version list to
3482               // clean up later.
3483               break;
3484             }
3485           }
3486 
3487           // The previous loop may not find a matching EMCP method, but
3488           // that doesn't mean that we can optimize and not go any
3489           // further back in the PreviousVersion generations. The EMCP
3490           // method for this generation could have already been deleted,
3491           // but there still may be an older EMCP method that has not
3492           // been deleted.
3493         }
3494 
3495         if (++local_count >= obsolete_method_count) {
3496           // no more obsolete methods so bail out now
3497           break;
3498         }
3499       }
3500     }
3501   }
3502 } // end add_previous_version()
3503 
3504 
3505 // Determine if InstanceKlass has a previous version.
3506 bool InstanceKlass::has_previous_version() const {
3507   return (_previous_versions != NULL && _previous_versions->length() > 0);
3508 } // end has_previous_version()
3509 
3510 
3511 Method* InstanceKlass::method_with_idnum(int idnum) {
3512   Method* m = NULL;
3513   if (idnum < methods()->length()) {
3514     m = methods()->at(idnum);
3515   }
3516   if (m == NULL || m->method_idnum() != idnum) {
3517     for (int index = 0; index < methods()->length(); ++index) {
3518       m = methods()->at(index);
3519       if (m->method_idnum() == idnum) {
3520         return m;
3521       }
3522     }
3523   }
3524   return m;
3525 }
3526 
3527 jint InstanceKlass::get_cached_class_file_len() {
3528   return VM_RedefineClasses::get_cached_class_file_len(_cached_class_file);
3529 }
3530 
3531 unsigned char * InstanceKlass::get_cached_class_file_bytes() {
3532   return VM_RedefineClasses::get_cached_class_file_bytes(_cached_class_file);
3533 }
3534 
3535 
3536 // Construct a PreviousVersionNode entry for the array hung off
3537 // the InstanceKlass.
3538 PreviousVersionNode::PreviousVersionNode(ConstantPool* prev_constant_pool,
3539   bool prev_cp_is_weak, GrowableArray<Method*>* prev_EMCP_methods) {
3540 
3541   _prev_constant_pool = prev_constant_pool;
3542   _prev_cp_is_weak = prev_cp_is_weak;
3543   _prev_EMCP_methods = prev_EMCP_methods;
3544 }
3545 
3546 
3547 // Destroy a PreviousVersionNode
3548 PreviousVersionNode::~PreviousVersionNode() {
3549   if (_prev_constant_pool != NULL) {
3550     _prev_constant_pool = NULL;
3551   }
3552 
3553   if (_prev_EMCP_methods != NULL) {
3554     delete _prev_EMCP_methods;
3555   }
3556 }
3557 
3558 
3559 // Construct a PreviousVersionInfo entry
3560 PreviousVersionInfo::PreviousVersionInfo(PreviousVersionNode *pv_node) {
3561   _prev_constant_pool_handle = constantPoolHandle();  // NULL handle
3562   _prev_EMCP_method_handles = NULL;
3563 
3564   ConstantPool* cp = pv_node->prev_constant_pool();
3565   assert(cp != NULL, "constant pool ref was unexpectedly cleared");
3566   if (cp == NULL) {
3567     return;  // robustness
3568   }
3569 
3570   // make the ConstantPool* safe to return
3571   _prev_constant_pool_handle = constantPoolHandle(cp);
3572 
3573   GrowableArray<Method*>* method_refs = pv_node->prev_EMCP_methods();
3574   if (method_refs == NULL) {
3575     // the InstanceKlass did not have any EMCP methods
3576     return;
3577   }
3578 
3579   _prev_EMCP_method_handles = new GrowableArray<methodHandle>(10);
3580 
3581   int n_methods = method_refs->length();
3582   for (int i = 0; i < n_methods; i++) {
3583     Method* method = method_refs->at(i);
3584     assert (method != NULL, "method has been cleared");
3585     if (method == NULL) {
3586       continue;  // robustness
3587     }
3588     // make the Method* safe to return
3589     _prev_EMCP_method_handles->append(methodHandle(method));
3590   }
3591 }
3592 
3593 
3594 // Destroy a PreviousVersionInfo
3595 PreviousVersionInfo::~PreviousVersionInfo() {
3596   // Since _prev_EMCP_method_handles is not C-heap allocated, we
3597   // don't have to delete it.
3598 }
3599 
3600 
3601 // Construct a helper for walking the previous versions array
3602 PreviousVersionWalker::PreviousVersionWalker(InstanceKlass *ik) {
3603   _previous_versions = ik->previous_versions();
3604   _current_index = 0;
3605   // _hm needs no initialization
3606   _current_p = NULL;
3607 }
3608 
3609 
3610 // Destroy a PreviousVersionWalker
3611 PreviousVersionWalker::~PreviousVersionWalker() {
3612   // Delete the current info just in case the caller didn't walk to
3613   // the end of the previous versions list. No harm if _current_p is
3614   // already NULL.
3615   delete _current_p;
3616 
3617   // When _hm is destroyed, all the Handles returned in
3618   // PreviousVersionInfo objects will be destroyed.
3619   // Also, after this destructor is finished it will be
3620   // safe to delete the GrowableArray allocated in the
3621   // PreviousVersionInfo objects.
3622 }
3623 
3624 
3625 // Return the interesting information for the next previous version
3626 // of the klass. Returns NULL if there are no more previous versions.
3627 PreviousVersionInfo* PreviousVersionWalker::next_previous_version() {
3628   if (_previous_versions == NULL) {
3629     // no previous versions so nothing to return
3630     return NULL;
3631   }
3632 
3633   delete _current_p;  // cleanup the previous info for the caller
3634   _current_p = NULL;  // reset to NULL so we don't delete same object twice
3635 
3636   int length = _previous_versions->length();
3637 
3638   while (_current_index < length) {
3639     PreviousVersionNode * pv_node = _previous_versions->at(_current_index++);
3640     PreviousVersionInfo * pv_info = new (ResourceObj::C_HEAP, mtClass)
3641                                           PreviousVersionInfo(pv_node);
3642 
3643     constantPoolHandle cp_h = pv_info->prev_constant_pool_handle();
3644     assert (!cp_h.is_null(), "null cp found in previous version");
3645 
3646     // The caller will need to delete pv_info when they are done with it.
3647     _current_p = pv_info;
3648     return pv_info;
3649   }
3650 
3651   // all of the underlying nodes' info has been deleted
3652   return NULL;
3653 } // end next_previous_version()