1 /*
   2  * Copyright (c) 1997, 2012, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "classfile/javaClasses.hpp"
  27 #include "classfile/systemDictionary.hpp"
  28 #include "classfile/verifier.hpp"
  29 #include "classfile/vmSymbols.hpp"
  30 #include "compiler/compileBroker.hpp"
  31 #include "gc_implementation/shared/markSweep.inline.hpp"
  32 #include "gc_interface/collectedHeap.inline.hpp"
  33 #include "interpreter/oopMapCache.hpp"
  34 #include "interpreter/rewriter.hpp"
  35 #include "jvmtifiles/jvmti.h"
  36 #include "memory/genOopClosures.inline.hpp"
  37 #include "memory/oopFactory.hpp"
  38 #include "memory/permGen.hpp"
  39 #include "oops/fieldStreams.hpp"
  40 #include "oops/instanceKlass.hpp"
  41 #include "oops/instanceMirrorKlass.hpp"
  42 #include "oops/instanceOop.hpp"
  43 #include "oops/methodOop.hpp"
  44 #include "oops/objArrayKlassKlass.hpp"
  45 #include "oops/oop.inline.hpp"
  46 #include "oops/symbol.hpp"
  47 #include "prims/jvmtiExport.hpp"
  48 #include "prims/jvmtiRedefineClassesTrace.hpp"
  49 #include "runtime/fieldDescriptor.hpp"
  50 #include "runtime/handles.inline.hpp"
  51 #include "runtime/javaCalls.hpp"
  52 #include "runtime/mutexLocker.hpp"
  53 #include "services/threadService.hpp"
  54 #include "utilities/dtrace.hpp"
  55 #ifdef TARGET_OS_FAMILY_linux
  56 # include "thread_linux.inline.hpp"
  57 #endif
  58 #ifdef TARGET_OS_FAMILY_solaris
  59 # include "thread_solaris.inline.hpp"
  60 #endif
  61 #ifdef TARGET_OS_FAMILY_windows
  62 # include "thread_windows.inline.hpp"
  63 #endif
  64 #ifdef TARGET_OS_FAMILY_bsd
  65 # include "thread_bsd.inline.hpp"
  66 #endif
  67 #ifndef SERIALGC
  68 #include "gc_implementation/g1/g1CollectedHeap.inline.hpp"
  69 #include "gc_implementation/g1/g1OopClosures.inline.hpp"
  70 #include "gc_implementation/g1/g1RemSet.inline.hpp"
  71 #include "gc_implementation/g1/heapRegionSeq.inline.hpp"
  72 #include "gc_implementation/parNew/parOopClosures.inline.hpp"
  73 #include "gc_implementation/parallelScavenge/psPromotionManager.inline.hpp"
  74 #include "gc_implementation/parallelScavenge/psScavenge.inline.hpp"
  75 #include "oops/oop.pcgc.inline.hpp"
  76 #endif
  77 #ifdef COMPILER1
  78 #include "c1/c1_Compiler.hpp"
  79 #endif
  80 
  81 #ifdef DTRACE_ENABLED
  82 
  83 #ifndef USDT2
  84 
  85 HS_DTRACE_PROBE_DECL4(hotspot, class__initialization__required,
  86   char*, intptr_t, oop, intptr_t);
  87 HS_DTRACE_PROBE_DECL5(hotspot, class__initialization__recursive,
  88   char*, intptr_t, oop, intptr_t, int);
  89 HS_DTRACE_PROBE_DECL5(hotspot, class__initialization__concurrent,
  90   char*, intptr_t, oop, intptr_t, int);
  91 HS_DTRACE_PROBE_DECL5(hotspot, class__initialization__erroneous,
  92   char*, intptr_t, oop, intptr_t, int);
  93 HS_DTRACE_PROBE_DECL5(hotspot, class__initialization__super__failed,
  94   char*, intptr_t, oop, intptr_t, int);
  95 HS_DTRACE_PROBE_DECL5(hotspot, class__initialization__clinit,
  96   char*, intptr_t, oop, intptr_t, int);
  97 HS_DTRACE_PROBE_DECL5(hotspot, class__initialization__error,
  98   char*, intptr_t, oop, intptr_t, int);
  99 HS_DTRACE_PROBE_DECL5(hotspot, class__initialization__end,
 100   char*, intptr_t, oop, intptr_t, int);
 101 
 102 #define DTRACE_CLASSINIT_PROBE(type, clss, thread_type)          \
 103   {                                                              \
 104     char* data = NULL;                                           \
 105     int len = 0;                                                 \
 106     Symbol* name = (clss)->name();                               \
 107     if (name != NULL) {                                          \
 108       data = (char*)name->bytes();                               \
 109       len = name->utf8_length();                                 \
 110     }                                                            \
 111     HS_DTRACE_PROBE4(hotspot, class__initialization__##type,     \
 112       data, len, (clss)->class_loader(), thread_type);           \
 113   }
 114 
 115 #define DTRACE_CLASSINIT_PROBE_WAIT(type, clss, thread_type, wait) \
 116   {                                                              \
 117     char* data = NULL;                                           \
 118     int len = 0;                                                 \
 119     Symbol* name = (clss)->name();                               \
 120     if (name != NULL) {                                          \
 121       data = (char*)name->bytes();                               \
 122       len = name->utf8_length();                                 \
 123     }                                                            \
 124     HS_DTRACE_PROBE5(hotspot, class__initialization__##type,     \
 125       data, len, (clss)->class_loader(), thread_type, wait);     \
 126   }
 127 #else /* USDT2 */
 128 
 129 #define HOTSPOT_CLASS_INITIALIZATION_required HOTSPOT_CLASS_INITIALIZATION_REQUIRED
 130 #define HOTSPOT_CLASS_INITIALIZATION_recursive HOTSPOT_CLASS_INITIALIZATION_RECURSIVE
 131 #define HOTSPOT_CLASS_INITIALIZATION_concurrent HOTSPOT_CLASS_INITIALIZATION_CONCURRENT
 132 #define HOTSPOT_CLASS_INITIALIZATION_erroneous HOTSPOT_CLASS_INITIALIZATION_ERRONEOUS
 133 #define HOTSPOT_CLASS_INITIALIZATION_super__failed HOTSPOT_CLASS_INITIALIZATION_SUPER_FAILED
 134 #define HOTSPOT_CLASS_INITIALIZATION_clinit HOTSPOT_CLASS_INITIALIZATION_CLINIT
 135 #define HOTSPOT_CLASS_INITIALIZATION_error HOTSPOT_CLASS_INITIALIZATION_ERROR
 136 #define HOTSPOT_CLASS_INITIALIZATION_end HOTSPOT_CLASS_INITIALIZATION_END
 137 #define DTRACE_CLASSINIT_PROBE(type, clss, thread_type)          \
 138   {                                                              \
 139     char* data = NULL;                                           \
 140     int len = 0;                                                 \
 141     Symbol* name = (clss)->name();                               \
 142     if (name != NULL) {                                          \
 143       data = (char*)name->bytes();                               \
 144       len = name->utf8_length();                                 \
 145     }                                                            \
 146     HOTSPOT_CLASS_INITIALIZATION_##type(                         \
 147       data, len, (clss)->class_loader(), thread_type);           \
 148   }
 149 
 150 #define DTRACE_CLASSINIT_PROBE_WAIT(type, clss, thread_type, wait) \
 151   {                                                              \
 152     char* data = NULL;                                           \
 153     int len = 0;                                                 \
 154     Symbol* name = (clss)->name();                               \
 155     if (name != NULL) {                                          \
 156       data = (char*)name->bytes();                               \
 157       len = name->utf8_length();                                 \
 158     }                                                            \
 159     HOTSPOT_CLASS_INITIALIZATION_##type(                         \
 160       data, len, (clss)->class_loader(), thread_type, wait);     \
 161   }
 162 #endif /* USDT2 */
 163 
 164 #else //  ndef DTRACE_ENABLED
 165 
 166 #define DTRACE_CLASSINIT_PROBE(type, clss, thread_type)
 167 #define DTRACE_CLASSINIT_PROBE_WAIT(type, clss, thread_type, wait)
 168 
 169 #endif //  ndef DTRACE_ENABLED
 170 
 171 bool instanceKlass::should_be_initialized() const {
 172   return !is_initialized();
 173 }
 174 
 175 klassVtable* instanceKlass::vtable() const {
 176   return new klassVtable(as_klassOop(), start_of_vtable(), vtable_length() / vtableEntry::size());
 177 }
 178 
 179 klassItable* instanceKlass::itable() const {
 180   return new klassItable(as_klassOop());
 181 }
 182 
 183 void instanceKlass::eager_initialize(Thread *thread) {
 184   if (!EagerInitialization) return;
 185 
 186   if (this->is_not_initialized()) {
 187     // abort if the the class has a class initializer
 188     if (this->class_initializer() != NULL) return;
 189 
 190     // abort if it is java.lang.Object (initialization is handled in genesis)
 191     klassOop super = this->super();
 192     if (super == NULL) return;
 193 
 194     // abort if the super class should be initialized
 195     if (!instanceKlass::cast(super)->is_initialized()) return;
 196 
 197     // call body to expose the this pointer
 198     instanceKlassHandle this_oop(thread, this->as_klassOop());
 199     eager_initialize_impl(this_oop);
 200   }
 201 }
 202 
 203 
 204 void instanceKlass::eager_initialize_impl(instanceKlassHandle this_oop) {
 205   EXCEPTION_MARK;
 206   ObjectLocker ol(this_oop, THREAD);
 207 
 208   // abort if someone beat us to the initialization
 209   if (!this_oop->is_not_initialized()) return;  // note: not equivalent to is_initialized()
 210 
 211   ClassState old_state = this_oop->init_state();
 212   link_class_impl(this_oop, true, THREAD);
 213   if (HAS_PENDING_EXCEPTION) {
 214     CLEAR_PENDING_EXCEPTION;
 215     // Abort if linking the class throws an exception.
 216 
 217     // Use a test to avoid redundantly resetting the state if there's
 218     // no change.  Set_init_state() asserts that state changes make
 219     // progress, whereas here we might just be spinning in place.
 220     if( old_state != this_oop->_init_state )
 221       this_oop->set_init_state (old_state);
 222   } else {
 223     // linking successfull, mark class as initialized
 224     this_oop->set_init_state (fully_initialized);
 225     // trace
 226     if (TraceClassInitialization) {
 227       ResourceMark rm(THREAD);
 228       tty->print_cr("[Initialized %s without side effects]", this_oop->external_name());
 229     }
 230   }
 231 }
 232 
 233 
 234 // See "The Virtual Machine Specification" section 2.16.5 for a detailed explanation of the class initialization
 235 // process. The step comments refers to the procedure described in that section.
 236 // Note: implementation moved to static method to expose the this pointer.
 237 void instanceKlass::initialize(TRAPS) {
 238   if (this->should_be_initialized()) {
 239     HandleMark hm(THREAD);
 240     instanceKlassHandle this_oop(THREAD, this->as_klassOop());
 241     initialize_impl(this_oop, CHECK);
 242     // Note: at this point the class may be initialized
 243     //       OR it may be in the state of being initialized
 244     //       in case of recursive initialization!
 245   } else {
 246     assert(is_initialized(), "sanity check");
 247   }
 248 }
 249 
 250 
 251 bool instanceKlass::verify_code(
 252     instanceKlassHandle this_oop, bool throw_verifyerror, TRAPS) {
 253   // 1) Verify the bytecodes
 254   Verifier::Mode mode =
 255     throw_verifyerror ? Verifier::ThrowException : Verifier::NoException;
 256   return Verifier::verify(this_oop, mode, this_oop->should_verify_class(), CHECK_false);
 257 }
 258 
 259 
 260 // Used exclusively by the shared spaces dump mechanism to prevent
 261 // classes mapped into the shared regions in new VMs from appearing linked.
 262 
 263 void instanceKlass::unlink_class() {
 264   assert(is_linked(), "must be linked");
 265   _init_state = loaded;
 266 }
 267 
 268 void instanceKlass::link_class(TRAPS) {
 269   assert(is_loaded(), "must be loaded");
 270   if (!is_linked()) {
 271     instanceKlassHandle this_oop(THREAD, this->as_klassOop());
 272     link_class_impl(this_oop, true, CHECK);
 273   }
 274 }
 275 
 276 // Called to verify that a class can link during initialization, without
 277 // throwing a VerifyError.
 278 bool instanceKlass::link_class_or_fail(TRAPS) {
 279   assert(is_loaded(), "must be loaded");
 280   if (!is_linked()) {
 281     instanceKlassHandle this_oop(THREAD, this->as_klassOop());
 282     link_class_impl(this_oop, false, CHECK_false);
 283   }
 284   return is_linked();
 285 }
 286 
 287 bool instanceKlass::link_class_impl(
 288     instanceKlassHandle this_oop, bool throw_verifyerror, TRAPS) {
 289   // check for error state
 290   if (this_oop->is_in_error_state()) {
 291     ResourceMark rm(THREAD);
 292     THROW_MSG_(vmSymbols::java_lang_NoClassDefFoundError(),
 293                this_oop->external_name(), false);
 294   }
 295   // return if already verified
 296   if (this_oop->is_linked()) {
 297     return true;
 298   }
 299 
 300   // Timing
 301   // timer handles recursion
 302   assert(THREAD->is_Java_thread(), "non-JavaThread in link_class_impl");
 303   JavaThread* jt = (JavaThread*)THREAD;
 304 
 305   // link super class before linking this class
 306   instanceKlassHandle super(THREAD, this_oop->super());
 307   if (super.not_null()) {
 308     if (super->is_interface()) {  // check if super class is an interface
 309       ResourceMark rm(THREAD);
 310       Exceptions::fthrow(
 311         THREAD_AND_LOCATION,
 312         vmSymbols::java_lang_IncompatibleClassChangeError(),
 313         "class %s has interface %s as super class",
 314         this_oop->external_name(),
 315         super->external_name()
 316       );
 317       return false;
 318     }
 319 
 320     link_class_impl(super, throw_verifyerror, CHECK_false);
 321   }
 322 
 323   // link all interfaces implemented by this class before linking this class
 324   objArrayHandle interfaces (THREAD, this_oop->local_interfaces());
 325   int num_interfaces = interfaces->length();
 326   for (int index = 0; index < num_interfaces; index++) {
 327     HandleMark hm(THREAD);
 328     instanceKlassHandle ih(THREAD, klassOop(interfaces->obj_at(index)));
 329     link_class_impl(ih, throw_verifyerror, CHECK_false);
 330   }
 331 
 332   // in case the class is linked in the process of linking its superclasses
 333   if (this_oop->is_linked()) {
 334     return true;
 335   }
 336 
 337   // trace only the link time for this klass that includes
 338   // the verification time
 339   PerfClassTraceTime vmtimer(ClassLoader::perf_class_link_time(),
 340                              ClassLoader::perf_class_link_selftime(),
 341                              ClassLoader::perf_classes_linked(),
 342                              jt->get_thread_stat()->perf_recursion_counts_addr(),
 343                              jt->get_thread_stat()->perf_timers_addr(),
 344                              PerfClassTraceTime::CLASS_LINK);
 345 
 346   // verification & rewriting
 347   {
 348     ObjectLocker ol(this_oop, THREAD);
 349     // rewritten will have been set if loader constraint error found
 350     // on an earlier link attempt
 351     // don't verify or rewrite if already rewritten
 352     if (!this_oop->is_linked()) {
 353       if (!this_oop->is_rewritten()) {
 354         {
 355           // Timer includes any side effects of class verification (resolution,
 356           // etc), but not recursive entry into verify_code().
 357           PerfClassTraceTime timer(ClassLoader::perf_class_verify_time(),
 358                                    ClassLoader::perf_class_verify_selftime(),
 359                                    ClassLoader::perf_classes_verified(),
 360                                    jt->get_thread_stat()->perf_recursion_counts_addr(),
 361                                    jt->get_thread_stat()->perf_timers_addr(),
 362                                    PerfClassTraceTime::CLASS_VERIFY);
 363           bool verify_ok = verify_code(this_oop, throw_verifyerror, THREAD);
 364           if (!verify_ok) {
 365             return false;
 366           }
 367         }
 368 
 369         // Just in case a side-effect of verify linked this class already
 370         // (which can sometimes happen since the verifier loads classes
 371         // using custom class loaders, which are free to initialize things)
 372         if (this_oop->is_linked()) {
 373           return true;
 374         }
 375 
 376         // also sets rewritten
 377         this_oop->rewrite_class(CHECK_false);
 378       }
 379 
 380       // relocate jsrs and link methods after they are all rewritten
 381       this_oop->relocate_and_link_methods(CHECK_false);
 382 
 383       // Initialize the vtable and interface table after
 384       // methods have been rewritten since rewrite may
 385       // fabricate new methodOops.
 386       // also does loader constraint checking
 387       if (!this_oop()->is_shared()) {
 388         ResourceMark rm(THREAD);
 389         this_oop->vtable()->initialize_vtable(true, CHECK_false);
 390         this_oop->itable()->initialize_itable(true, CHECK_false);
 391       }
 392 #ifdef ASSERT
 393       else {
 394         ResourceMark rm(THREAD);
 395         this_oop->vtable()->verify(tty, true);
 396         // In case itable verification is ever added.
 397         // this_oop->itable()->verify(tty, true);
 398       }
 399 #endif
 400       this_oop->set_init_state(linked);
 401       if (JvmtiExport::should_post_class_prepare()) {
 402         Thread *thread = THREAD;
 403         assert(thread->is_Java_thread(), "thread->is_Java_thread()");
 404         JvmtiExport::post_class_prepare((JavaThread *) thread, this_oop());
 405       }
 406     }
 407   }
 408   return true;
 409 }
 410 
 411 
 412 // Rewrite the byte codes of all of the methods of a class.
 413 // The rewriter must be called exactly once. Rewriting must happen after
 414 // verification but before the first method of the class is executed.
 415 void instanceKlass::rewrite_class(TRAPS) {
 416   assert(is_loaded(), "must be loaded");
 417   instanceKlassHandle this_oop(THREAD, this->as_klassOop());
 418   if (this_oop->is_rewritten()) {
 419     assert(this_oop()->is_shared(), "rewriting an unshared class?");
 420     return;
 421   }
 422   Rewriter::rewrite(this_oop, CHECK);
 423   this_oop->set_rewritten();
 424 }
 425 
 426 // Now relocate and link method entry points after class is rewritten.
 427 // This is outside is_rewritten flag. In case of an exception, it can be
 428 // executed more than once.
 429 void instanceKlass::relocate_and_link_methods(TRAPS) {
 430   assert(is_loaded(), "must be loaded");
 431   instanceKlassHandle this_oop(THREAD, this->as_klassOop());
 432   Rewriter::relocate_and_link(this_oop, CHECK);
 433 }
 434 
 435 
 436 void instanceKlass::initialize_impl(instanceKlassHandle this_oop, TRAPS) {
 437   // Make sure klass is linked (verified) before initialization
 438   // A class could already be verified, since it has been reflected upon.
 439   this_oop->link_class(CHECK);
 440 
 441   DTRACE_CLASSINIT_PROBE(required, instanceKlass::cast(this_oop()), -1);
 442 
 443   bool wait = false;
 444 
 445   // refer to the JVM book page 47 for description of steps
 446   // Step 1
 447   { ObjectLocker ol(this_oop, THREAD);
 448 
 449     Thread *self = THREAD; // it's passed the current thread
 450 
 451     // Step 2
 452     // If we were to use wait() instead of waitInterruptibly() then
 453     // we might end up throwing IE from link/symbol resolution sites
 454     // that aren't expected to throw.  This would wreak havoc.  See 6320309.
 455     while(this_oop->is_being_initialized() && !this_oop->is_reentrant_initialization(self)) {
 456         wait = true;
 457       ol.waitUninterruptibly(CHECK);
 458     }
 459 
 460     // Step 3
 461     if (this_oop->is_being_initialized() && this_oop->is_reentrant_initialization(self)) {
 462       DTRACE_CLASSINIT_PROBE_WAIT(recursive, instanceKlass::cast(this_oop()), -1,wait);
 463       return;
 464     }
 465 
 466     // Step 4
 467     if (this_oop->is_initialized()) {
 468       DTRACE_CLASSINIT_PROBE_WAIT(concurrent, instanceKlass::cast(this_oop()), -1,wait);
 469       return;
 470     }
 471 
 472     // Step 5
 473     if (this_oop->is_in_error_state()) {
 474       DTRACE_CLASSINIT_PROBE_WAIT(erroneous, instanceKlass::cast(this_oop()), -1,wait);
 475       ResourceMark rm(THREAD);
 476       const char* desc = "Could not initialize class ";
 477       const char* className = this_oop->external_name();
 478       size_t msglen = strlen(desc) + strlen(className) + 1;
 479       char* message = NEW_RESOURCE_ARRAY(char, msglen);
 480       if (NULL == message) {
 481         // Out of memory: can't create detailed error message
 482         THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), className);
 483       } else {
 484         jio_snprintf(message, msglen, "%s%s", desc, className);
 485         THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), message);
 486       }
 487     }
 488 
 489     // Step 6
 490     this_oop->set_init_state(being_initialized);
 491     this_oop->set_init_thread(self);
 492   }
 493 
 494   // Step 7
 495   klassOop super_klass = this_oop->super();
 496   if (super_klass != NULL && !this_oop->is_interface() && Klass::cast(super_klass)->should_be_initialized()) {
 497     Klass::cast(super_klass)->initialize(THREAD);
 498 
 499     if (HAS_PENDING_EXCEPTION) {
 500       Handle e(THREAD, PENDING_EXCEPTION);
 501       CLEAR_PENDING_EXCEPTION;
 502       {
 503         EXCEPTION_MARK;
 504         this_oop->set_initialization_state_and_notify(initialization_error, THREAD); // Locks object, set state, and notify all waiting threads
 505         CLEAR_PENDING_EXCEPTION;   // ignore any exception thrown, superclass initialization error is thrown below
 506       }
 507       DTRACE_CLASSINIT_PROBE_WAIT(super__failed, instanceKlass::cast(this_oop()), -1,wait);
 508       THROW_OOP(e());
 509     }
 510   }
 511 
 512   // Step 8
 513   {
 514     assert(THREAD->is_Java_thread(), "non-JavaThread in initialize_impl");
 515     JavaThread* jt = (JavaThread*)THREAD;
 516     DTRACE_CLASSINIT_PROBE_WAIT(clinit, instanceKlass::cast(this_oop()), -1,wait);
 517     // Timer includes any side effects of class initialization (resolution,
 518     // etc), but not recursive entry into call_class_initializer().
 519     PerfClassTraceTime timer(ClassLoader::perf_class_init_time(),
 520                              ClassLoader::perf_class_init_selftime(),
 521                              ClassLoader::perf_classes_inited(),
 522                              jt->get_thread_stat()->perf_recursion_counts_addr(),
 523                              jt->get_thread_stat()->perf_timers_addr(),
 524                              PerfClassTraceTime::CLASS_CLINIT);
 525     this_oop->call_class_initializer(THREAD);
 526   }
 527 
 528   // Step 9
 529   if (!HAS_PENDING_EXCEPTION) {
 530     this_oop->set_initialization_state_and_notify(fully_initialized, CHECK);
 531     { ResourceMark rm(THREAD);
 532       debug_only(this_oop->vtable()->verify(tty, true);)
 533     }
 534   }
 535   else {
 536     // Step 10 and 11
 537     Handle e(THREAD, PENDING_EXCEPTION);
 538     CLEAR_PENDING_EXCEPTION;
 539     {
 540       EXCEPTION_MARK;
 541       this_oop->set_initialization_state_and_notify(initialization_error, THREAD);
 542       CLEAR_PENDING_EXCEPTION;   // ignore any exception thrown, class initialization error is thrown below
 543     }
 544     DTRACE_CLASSINIT_PROBE_WAIT(error, instanceKlass::cast(this_oop()), -1,wait);
 545     if (e->is_a(SystemDictionary::Error_klass())) {
 546       THROW_OOP(e());
 547     } else {
 548       JavaCallArguments args(e);
 549       THROW_ARG(vmSymbols::java_lang_ExceptionInInitializerError(),
 550                 vmSymbols::throwable_void_signature(),
 551                 &args);
 552     }
 553   }
 554   DTRACE_CLASSINIT_PROBE_WAIT(end, instanceKlass::cast(this_oop()), -1,wait);
 555 }
 556 
 557 
 558 // Note: implementation moved to static method to expose the this pointer.
 559 void instanceKlass::set_initialization_state_and_notify(ClassState state, TRAPS) {
 560   instanceKlassHandle kh(THREAD, this->as_klassOop());
 561   set_initialization_state_and_notify_impl(kh, state, CHECK);
 562 }
 563 
 564 void instanceKlass::set_initialization_state_and_notify_impl(instanceKlassHandle this_oop, ClassState state, TRAPS) {
 565   ObjectLocker ol(this_oop, THREAD);
 566   this_oop->set_init_state(state);
 567   ol.notify_all(CHECK);
 568 }
 569 
 570 // The embedded _implementor field can only record one implementor.
 571 // When there are more than one implementors, the _implementor field
 572 // is set to the interface klassOop itself. Following are the possible
 573 // values for the _implementor field:
 574 //   NULL                  - no implementor
 575 //   implementor klassOop  - one implementor
 576 //   self                  - more than one implementor
 577 //
 578 // The _implementor field only exists for interfaces.
 579 void instanceKlass::add_implementor(klassOop k) {
 580   assert(Compile_lock->owned_by_self(), "");
 581   assert(is_interface(), "not interface");
 582   // Filter out my subinterfaces.
 583   // (Note: Interfaces are never on the subklass list.)
 584   if (instanceKlass::cast(k)->is_interface()) return;
 585 
 586   // Filter out subclasses whose supers already implement me.
 587   // (Note: CHA must walk subclasses of direct implementors
 588   // in order to locate indirect implementors.)
 589   klassOop sk = instanceKlass::cast(k)->super();
 590   if (sk != NULL && instanceKlass::cast(sk)->implements_interface(as_klassOop()))
 591     // We only need to check one immediate superclass, since the
 592     // implements_interface query looks at transitive_interfaces.
 593     // Any supers of the super have the same (or fewer) transitive_interfaces.
 594     return;
 595 
 596   klassOop ik = implementor();
 597   if (ik == NULL) {
 598     set_implementor(k);
 599   } else if (ik != this->as_klassOop()) {
 600     // There is already an implementor. Use itself as an indicator of
 601     // more than one implementors.
 602     set_implementor(this->as_klassOop());
 603   }
 604 
 605   // The implementor also implements the transitive_interfaces
 606   for (int index = 0; index < local_interfaces()->length(); index++) {
 607     instanceKlass::cast(klassOop(local_interfaces()->obj_at(index)))->add_implementor(k);
 608   }
 609 }
 610 
 611 void instanceKlass::init_implementor() {
 612   if (is_interface()) {
 613     set_implementor(NULL);
 614   }
 615 }
 616 
 617 
 618 void instanceKlass::process_interfaces(Thread *thread) {
 619   // link this class into the implementors list of every interface it implements
 620   KlassHandle this_as_oop (thread, this->as_klassOop());
 621   for (int i = local_interfaces()->length() - 1; i >= 0; i--) {
 622     assert(local_interfaces()->obj_at(i)->is_klass(), "must be a klass");
 623     instanceKlass* interf = instanceKlass::cast(klassOop(local_interfaces()->obj_at(i)));
 624     assert(interf->is_interface(), "expected interface");
 625     interf->add_implementor(this_as_oop());
 626   }
 627 }
 628 
 629 bool instanceKlass::can_be_primary_super_slow() const {
 630   if (is_interface())
 631     return false;
 632   else
 633     return Klass::can_be_primary_super_slow();
 634 }
 635 
 636 objArrayOop instanceKlass::compute_secondary_supers(int num_extra_slots, TRAPS) {
 637   // The secondaries are the implemented interfaces.
 638   instanceKlass* ik = instanceKlass::cast(as_klassOop());
 639   objArrayHandle interfaces (THREAD, ik->transitive_interfaces());
 640   int num_secondaries = num_extra_slots + interfaces->length();
 641   if (num_secondaries == 0) {
 642     return Universe::the_empty_system_obj_array();
 643   } else if (num_extra_slots == 0) {
 644     return interfaces();
 645   } else {
 646     // a mix of both
 647     objArrayOop secondaries = oopFactory::new_system_objArray(num_secondaries, CHECK_NULL);
 648     for (int i = 0; i < interfaces->length(); i++) {
 649       secondaries->obj_at_put(num_extra_slots+i, interfaces->obj_at(i));
 650     }
 651     return secondaries;
 652   }
 653 }
 654 
 655 bool instanceKlass::compute_is_subtype_of(klassOop k) {
 656   if (Klass::cast(k)->is_interface()) {
 657     return implements_interface(k);
 658   } else {
 659     return Klass::compute_is_subtype_of(k);
 660   }
 661 }
 662 
 663 bool instanceKlass::implements_interface(klassOop k) const {
 664   if (as_klassOop() == k) return true;
 665   assert(Klass::cast(k)->is_interface(), "should be an interface class");
 666   for (int i = 0; i < transitive_interfaces()->length(); i++) {
 667     if (transitive_interfaces()->obj_at(i) == k) {
 668       return true;
 669     }
 670   }
 671   return false;
 672 }
 673 
 674 objArrayOop instanceKlass::allocate_objArray(int n, int length, TRAPS) {
 675   if (length < 0) THROW_0(vmSymbols::java_lang_NegativeArraySizeException());
 676   if (length > arrayOopDesc::max_array_length(T_OBJECT)) {
 677     report_java_out_of_memory("Requested array size exceeds VM limit");
 678     JvmtiExport::post_array_size_exhausted();
 679     THROW_OOP_0(Universe::out_of_memory_error_array_size());
 680   }
 681   int size = objArrayOopDesc::object_size(length);
 682   klassOop ak = array_klass(n, CHECK_NULL);
 683   KlassHandle h_ak (THREAD, ak);
 684   objArrayOop o =
 685     (objArrayOop)CollectedHeap::array_allocate(h_ak, size, length, CHECK_NULL);
 686   return o;
 687 }
 688 
 689 instanceOop instanceKlass::register_finalizer(instanceOop i, TRAPS) {
 690   if (TraceFinalizerRegistration) {
 691     tty->print("Registered ");
 692     i->print_value_on(tty);
 693     tty->print_cr(" (" INTPTR_FORMAT ") as finalizable", (address)i);
 694   }
 695   instanceHandle h_i(THREAD, i);
 696   // Pass the handle as argument, JavaCalls::call expects oop as jobjects
 697   JavaValue result(T_VOID);
 698   JavaCallArguments args(h_i);
 699   methodHandle mh (THREAD, Universe::finalizer_register_method());
 700   JavaCalls::call(&result, mh, &args, CHECK_NULL);
 701   return h_i();
 702 }
 703 
 704 instanceOop instanceKlass::allocate_instance(TRAPS) {
 705   assert(!oop_is_instanceMirror(), "wrong allocation path");
 706   bool has_finalizer_flag = has_finalizer(); // Query before possible GC
 707   int size = size_helper();  // Query before forming handle.
 708 
 709   KlassHandle h_k(THREAD, as_klassOop());
 710 
 711   instanceOop i;
 712 
 713   i = (instanceOop)CollectedHeap::obj_allocate(h_k, size, CHECK_NULL);
 714   if (has_finalizer_flag && !RegisterFinalizersAtInit) {
 715     i = register_finalizer(i, CHECK_NULL);
 716   }
 717   return i;
 718 }
 719 
 720 instanceOop instanceKlass::allocate_permanent_instance(TRAPS) {
 721   // Finalizer registration occurs in the Object.<init> constructor
 722   // and constructors normally aren't run when allocating perm
 723   // instances so simply disallow finalizable perm objects.  This can
 724   // be relaxed if a need for it is found.
 725   assert(!has_finalizer(), "perm objects not allowed to have finalizers");
 726   assert(!oop_is_instanceMirror(), "wrong allocation path");
 727   int size = size_helper();  // Query before forming handle.
 728   KlassHandle h_k(THREAD, as_klassOop());
 729   instanceOop i = (instanceOop)
 730     CollectedHeap::permanent_obj_allocate(h_k, size, CHECK_NULL);
 731   return i;
 732 }
 733 
 734 void instanceKlass::check_valid_for_instantiation(bool throwError, TRAPS) {
 735   if (is_interface() || is_abstract()) {
 736     ResourceMark rm(THREAD);
 737     THROW_MSG(throwError ? vmSymbols::java_lang_InstantiationError()
 738               : vmSymbols::java_lang_InstantiationException(), external_name());
 739   }
 740   if (as_klassOop() == SystemDictionary::Class_klass()) {
 741     ResourceMark rm(THREAD);
 742     THROW_MSG(throwError ? vmSymbols::java_lang_IllegalAccessError()
 743               : vmSymbols::java_lang_IllegalAccessException(), external_name());
 744   }
 745 }
 746 
 747 klassOop instanceKlass::array_klass_impl(bool or_null, int n, TRAPS) {
 748   instanceKlassHandle this_oop(THREAD, as_klassOop());
 749   return array_klass_impl(this_oop, or_null, n, THREAD);
 750 }
 751 
 752 klassOop instanceKlass::array_klass_impl(instanceKlassHandle this_oop, bool or_null, int n, TRAPS) {
 753   if (this_oop->array_klasses() == NULL) {
 754     if (or_null) return NULL;
 755 
 756     ResourceMark rm;
 757     JavaThread *jt = (JavaThread *)THREAD;
 758     {
 759       // Atomic creation of array_klasses
 760       MutexLocker mc(Compile_lock, THREAD);   // for vtables
 761       MutexLocker ma(MultiArray_lock, THREAD);
 762 
 763       // Check if update has already taken place
 764       if (this_oop->array_klasses() == NULL) {
 765         objArrayKlassKlass* oakk =
 766           (objArrayKlassKlass*)Universe::objArrayKlassKlassObj()->klass_part();
 767 
 768         klassOop  k = oakk->allocate_objArray_klass(1, this_oop, CHECK_NULL);
 769         this_oop->set_array_klasses(k);
 770       }
 771     }
 772   }
 773   // _this will always be set at this point
 774   objArrayKlass* oak = (objArrayKlass*)this_oop->array_klasses()->klass_part();
 775   if (or_null) {
 776     return oak->array_klass_or_null(n);
 777   }
 778   return oak->array_klass(n, CHECK_NULL);
 779 }
 780 
 781 klassOop instanceKlass::array_klass_impl(bool or_null, TRAPS) {
 782   return array_klass_impl(or_null, 1, THREAD);
 783 }
 784 
 785 void instanceKlass::call_class_initializer(TRAPS) {
 786   instanceKlassHandle ik (THREAD, as_klassOop());
 787   call_class_initializer_impl(ik, THREAD);
 788 }
 789 
 790 static int call_class_initializer_impl_counter = 0;   // for debugging
 791 
 792 methodOop instanceKlass::class_initializer() {
 793   methodOop clinit = find_method(
 794       vmSymbols::class_initializer_name(), vmSymbols::void_method_signature());
 795   if (clinit != NULL && clinit->has_valid_initializer_flags()) {
 796     return clinit;
 797   }
 798   return NULL;
 799 }
 800 
 801 void instanceKlass::call_class_initializer_impl(instanceKlassHandle this_oop, TRAPS) {
 802   methodHandle h_method(THREAD, this_oop->class_initializer());
 803   assert(!this_oop->is_initialized(), "we cannot initialize twice");
 804   if (TraceClassInitialization) {
 805     tty->print("%d Initializing ", call_class_initializer_impl_counter++);
 806     this_oop->name()->print_value();
 807     tty->print_cr("%s (" INTPTR_FORMAT ")", h_method() == NULL ? "(no method)" : "", (address)this_oop());
 808   }
 809   if (h_method() != NULL) {
 810     JavaCallArguments args; // No arguments
 811     JavaValue result(T_VOID);
 812     JavaCalls::call(&result, h_method, &args, CHECK); // Static call (no args)
 813   }
 814 }
 815 
 816 
 817 void instanceKlass::mask_for(methodHandle method, int bci,
 818   InterpreterOopMap* entry_for) {
 819   // Dirty read, then double-check under a lock.
 820   if (_oop_map_cache == NULL) {
 821     // Otherwise, allocate a new one.
 822     MutexLocker x(OopMapCacheAlloc_lock);
 823     // First time use. Allocate a cache in C heap
 824     if (_oop_map_cache == NULL) {
 825       _oop_map_cache = new OopMapCache();
 826     }
 827   }
 828   // _oop_map_cache is constant after init; lookup below does is own locking.
 829   _oop_map_cache->lookup(method, bci, entry_for);
 830 }
 831 
 832 
 833 bool instanceKlass::find_local_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
 834   for (JavaFieldStream fs(as_klassOop()); !fs.done(); fs.next()) {
 835     Symbol* f_name = fs.name();
 836     Symbol* f_sig  = fs.signature();
 837     if (f_name == name && f_sig == sig) {
 838       fd->initialize(as_klassOop(), fs.index());
 839       return true;
 840     }
 841   }
 842   return false;
 843 }
 844 
 845 
 846 void instanceKlass::shared_symbols_iterate(SymbolClosure* closure) {
 847   Klass::shared_symbols_iterate(closure);
 848   closure->do_symbol(&_generic_signature);
 849   closure->do_symbol(&_source_file_name);
 850   closure->do_symbol(&_source_debug_extension);
 851 
 852   for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
 853     int name_index = fs.name_index();
 854     closure->do_symbol(constants()->symbol_at_addr(name_index));
 855     int sig_index  = fs.signature_index();
 856     closure->do_symbol(constants()->symbol_at_addr(sig_index));
 857   }
 858 }
 859 
 860 
 861 klassOop instanceKlass::find_interface_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
 862   const int n = local_interfaces()->length();
 863   for (int i = 0; i < n; i++) {
 864     klassOop intf1 = klassOop(local_interfaces()->obj_at(i));
 865     assert(Klass::cast(intf1)->is_interface(), "just checking type");
 866     // search for field in current interface
 867     if (instanceKlass::cast(intf1)->find_local_field(name, sig, fd)) {
 868       assert(fd->is_static(), "interface field must be static");
 869       return intf1;
 870     }
 871     // search for field in direct superinterfaces
 872     klassOop intf2 = instanceKlass::cast(intf1)->find_interface_field(name, sig, fd);
 873     if (intf2 != NULL) return intf2;
 874   }
 875   // otherwise field lookup fails
 876   return NULL;
 877 }
 878 
 879 
 880 klassOop instanceKlass::find_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
 881   // search order according to newest JVM spec (5.4.3.2, p.167).
 882   // 1) search for field in current klass
 883   if (find_local_field(name, sig, fd)) {
 884     return as_klassOop();
 885   }
 886   // 2) search for field recursively in direct superinterfaces
 887   { klassOop intf = find_interface_field(name, sig, fd);
 888     if (intf != NULL) return intf;
 889   }
 890   // 3) apply field lookup recursively if superclass exists
 891   { klassOop supr = super();
 892     if (supr != NULL) return instanceKlass::cast(supr)->find_field(name, sig, fd);
 893   }
 894   // 4) otherwise field lookup fails
 895   return NULL;
 896 }
 897 
 898 
 899 klassOop instanceKlass::find_field(Symbol* name, Symbol* sig, bool is_static, fieldDescriptor* fd) const {
 900   // search order according to newest JVM spec (5.4.3.2, p.167).
 901   // 1) search for field in current klass
 902   if (find_local_field(name, sig, fd)) {
 903     if (fd->is_static() == is_static) return as_klassOop();
 904   }
 905   // 2) search for field recursively in direct superinterfaces
 906   if (is_static) {
 907     klassOop intf = find_interface_field(name, sig, fd);
 908     if (intf != NULL) return intf;
 909   }
 910   // 3) apply field lookup recursively if superclass exists
 911   { klassOop supr = super();
 912     if (supr != NULL) return instanceKlass::cast(supr)->find_field(name, sig, is_static, fd);
 913   }
 914   // 4) otherwise field lookup fails
 915   return NULL;
 916 }
 917 
 918 
 919 bool instanceKlass::find_local_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const {
 920   for (JavaFieldStream fs(as_klassOop()); !fs.done(); fs.next()) {
 921     if (fs.offset() == offset) {
 922       fd->initialize(as_klassOop(), fs.index());
 923       if (fd->is_static() == is_static) return true;
 924     }
 925   }
 926   return false;
 927 }
 928 
 929 
 930 bool instanceKlass::find_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const {
 931   klassOop klass = as_klassOop();
 932   while (klass != NULL) {
 933     if (instanceKlass::cast(klass)->find_local_field_from_offset(offset, is_static, fd)) {
 934       return true;
 935     }
 936     klass = Klass::cast(klass)->super();
 937   }
 938   return false;
 939 }
 940 
 941 
 942 void instanceKlass::methods_do(void f(methodOop method)) {
 943   int len = methods()->length();
 944   for (int index = 0; index < len; index++) {
 945     methodOop m = methodOop(methods()->obj_at(index));
 946     assert(m->is_method(), "must be method");
 947     f(m);
 948   }
 949 }
 950 
 951 
 952 void instanceKlass::do_local_static_fields(FieldClosure* cl) {
 953   for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
 954     if (fs.access_flags().is_static()) {
 955       fieldDescriptor fd;
 956       fd.initialize(as_klassOop(), fs.index());
 957       cl->do_field(&fd);
 958     }
 959   }
 960 }
 961 
 962 
 963 void instanceKlass::do_local_static_fields(void f(fieldDescriptor*, TRAPS), TRAPS) {
 964   instanceKlassHandle h_this(THREAD, as_klassOop());
 965   do_local_static_fields_impl(h_this, f, CHECK);
 966 }
 967 
 968 
 969 void instanceKlass::do_local_static_fields_impl(instanceKlassHandle this_oop, void f(fieldDescriptor* fd, TRAPS), TRAPS) {
 970   for (JavaFieldStream fs(this_oop()); !fs.done(); fs.next()) {
 971     if (fs.access_flags().is_static()) {
 972       fieldDescriptor fd;
 973       fd.initialize(this_oop(), fs.index());
 974       f(&fd, CHECK);
 975     }
 976   }
 977 }
 978 
 979 
 980 static int compare_fields_by_offset(int* a, int* b) {
 981   return a[0] - b[0];
 982 }
 983 
 984 void instanceKlass::do_nonstatic_fields(FieldClosure* cl) {
 985   instanceKlass* super = superklass();
 986   if (super != NULL) {
 987     super->do_nonstatic_fields(cl);
 988   }
 989   fieldDescriptor fd;
 990   int length = java_fields_count();
 991   // In DebugInfo nonstatic fields are sorted by offset.
 992   int* fields_sorted = NEW_C_HEAP_ARRAY(int, 2*(length+1), mtClass);
 993   int j = 0;
 994   for (int i = 0; i < length; i += 1) {
 995     fd.initialize(as_klassOop(), i);
 996     if (!fd.is_static()) {
 997       fields_sorted[j + 0] = fd.offset();
 998       fields_sorted[j + 1] = i;
 999       j += 2;
1000     }
1001   }
1002   if (j > 0) {
1003     length = j;
1004     // _sort_Fn is defined in growableArray.hpp.
1005     qsort(fields_sorted, length/2, 2*sizeof(int), (_sort_Fn)compare_fields_by_offset);
1006     for (int i = 0; i < length; i += 2) {
1007       fd.initialize(as_klassOop(), fields_sorted[i + 1]);
1008       assert(!fd.is_static() && fd.offset() == fields_sorted[i], "only nonstatic fields");
1009       cl->do_field(&fd);
1010     }
1011   }
1012   FREE_C_HEAP_ARRAY(int, fields_sorted, mtClass);
1013 }
1014 
1015 
1016 void instanceKlass::array_klasses_do(void f(klassOop k)) {
1017   if (array_klasses() != NULL)
1018     arrayKlass::cast(array_klasses())->array_klasses_do(f);
1019 }
1020 
1021 
1022 void instanceKlass::with_array_klasses_do(void f(klassOop k)) {
1023   f(as_klassOop());
1024   array_klasses_do(f);
1025 }
1026 
1027 #ifdef ASSERT
1028 static int linear_search(objArrayOop methods, Symbol* name, Symbol* signature) {
1029   int len = methods->length();
1030   for (int index = 0; index < len; index++) {
1031     methodOop m = (methodOop)(methods->obj_at(index));
1032     assert(m->is_method(), "must be method");
1033     if (m->signature() == signature && m->name() == name) {
1034        return index;
1035     }
1036   }
1037   return -1;
1038 }
1039 #endif
1040 
1041 methodOop instanceKlass::find_method(Symbol* name, Symbol* signature) const {
1042   return instanceKlass::find_method(methods(), name, signature);
1043 }
1044 
1045 methodOop instanceKlass::find_method(objArrayOop methods, Symbol* name, Symbol* signature) {
1046   int len = methods->length();
1047   // methods are sorted, so do binary search
1048   int l = 0;
1049   int h = len - 1;
1050   while (l <= h) {
1051     int mid = (l + h) >> 1;
1052     methodOop m = (methodOop)methods->obj_at(mid);
1053     assert(m->is_method(), "must be method");
1054     int res = m->name()->fast_compare(name);
1055     if (res == 0) {
1056       // found matching name; do linear search to find matching signature
1057       // first, quick check for common case
1058       if (m->signature() == signature) return m;
1059       // search downwards through overloaded methods
1060       int i;
1061       for (i = mid - 1; i >= l; i--) {
1062         methodOop m = (methodOop)methods->obj_at(i);
1063         assert(m->is_method(), "must be method");
1064         if (m->name() != name) break;
1065         if (m->signature() == signature) return m;
1066       }
1067       // search upwards
1068       for (i = mid + 1; i <= h; i++) {
1069         methodOop m = (methodOop)methods->obj_at(i);
1070         assert(m->is_method(), "must be method");
1071         if (m->name() != name) break;
1072         if (m->signature() == signature) return m;
1073       }
1074       // not found
1075 #ifdef ASSERT
1076       int index = linear_search(methods, name, signature);
1077       assert(index == -1, err_msg("binary search should have found entry %d", index));
1078 #endif
1079       return NULL;
1080     } else if (res < 0) {
1081       l = mid + 1;
1082     } else {
1083       h = mid - 1;
1084     }
1085   }
1086 #ifdef ASSERT
1087   int index = linear_search(methods, name, signature);
1088   assert(index == -1, err_msg("binary search should have found entry %d", index));
1089 #endif
1090   return NULL;
1091 }
1092 
1093 methodOop instanceKlass::uncached_lookup_method(Symbol* name, Symbol* signature) const {
1094   klassOop klass = as_klassOop();
1095   while (klass != NULL) {
1096     methodOop method = instanceKlass::cast(klass)->find_method(name, signature);
1097     if (method != NULL) return method;
1098     klass = instanceKlass::cast(klass)->super();
1099   }
1100   return NULL;
1101 }
1102 
1103 // lookup a method in all the interfaces that this class implements
1104 methodOop instanceKlass::lookup_method_in_all_interfaces(Symbol* name,
1105                                                          Symbol* signature) const {
1106   objArrayOop all_ifs = instanceKlass::cast(as_klassOop())->transitive_interfaces();
1107   int num_ifs = all_ifs->length();
1108   instanceKlass *ik = NULL;
1109   for (int i = 0; i < num_ifs; i++) {
1110     ik = instanceKlass::cast(klassOop(all_ifs->obj_at(i)));
1111     methodOop m = ik->lookup_method(name, signature);
1112     if (m != NULL) {
1113       return m;
1114     }
1115   }
1116   return NULL;
1117 }
1118 
1119 /* jni_id_for_impl for jfieldIds only */
1120 JNIid* instanceKlass::jni_id_for_impl(instanceKlassHandle this_oop, int offset) {
1121   MutexLocker ml(JfieldIdCreation_lock);
1122   // Retry lookup after we got the lock
1123   JNIid* probe = this_oop->jni_ids() == NULL ? NULL : this_oop->jni_ids()->find(offset);
1124   if (probe == NULL) {
1125     // Slow case, allocate new static field identifier
1126     probe = new JNIid(this_oop->as_klassOop(), offset, this_oop->jni_ids());
1127     this_oop->set_jni_ids(probe);
1128   }
1129   return probe;
1130 }
1131 
1132 
1133 /* jni_id_for for jfieldIds only */
1134 JNIid* instanceKlass::jni_id_for(int offset) {
1135   JNIid* probe = jni_ids() == NULL ? NULL : jni_ids()->find(offset);
1136   if (probe == NULL) {
1137     probe = jni_id_for_impl(this->as_klassOop(), offset);
1138   }
1139   return probe;
1140 }
1141 
1142 u2 instanceKlass::enclosing_method_data(int offset) {
1143   typeArrayOop inner_class_list = inner_classes();
1144   if (inner_class_list == NULL) {
1145     return 0;
1146   }
1147   int length = inner_class_list->length();
1148   if (length % inner_class_next_offset == 0) {
1149     return 0;
1150   } else {
1151     int index = length - enclosing_method_attribute_size;
1152     typeArrayHandle inner_class_list_h(inner_class_list);
1153     assert(offset < enclosing_method_attribute_size, "invalid offset");
1154     return inner_class_list_h->ushort_at(index + offset);
1155   }
1156 }
1157 
1158 void instanceKlass::set_enclosing_method_indices(u2 class_index,
1159                                                  u2 method_index) {
1160   typeArrayOop inner_class_list = inner_classes();
1161   assert (inner_class_list != NULL, "_inner_classes list is not set up");
1162   int length = inner_class_list->length();
1163   if (length % inner_class_next_offset == enclosing_method_attribute_size) {
1164     int index = length - enclosing_method_attribute_size;
1165     typeArrayHandle inner_class_list_h(inner_class_list);
1166     inner_class_list_h->ushort_at_put(
1167       index + enclosing_method_class_index_offset, class_index);
1168     inner_class_list_h->ushort_at_put(
1169       index + enclosing_method_method_index_offset, method_index);
1170   }
1171 }
1172 
1173 // Lookup or create a jmethodID.
1174 // This code is called by the VMThread and JavaThreads so the
1175 // locking has to be done very carefully to avoid deadlocks
1176 // and/or other cache consistency problems.
1177 //
1178 jmethodID instanceKlass::get_jmethod_id(instanceKlassHandle ik_h, methodHandle method_h) {
1179   size_t idnum = (size_t)method_h->method_idnum();
1180   jmethodID* jmeths = ik_h->methods_jmethod_ids_acquire();
1181   size_t length = 0;
1182   jmethodID id = NULL;
1183 
1184   // We use a double-check locking idiom here because this cache is
1185   // performance sensitive. In the normal system, this cache only
1186   // transitions from NULL to non-NULL which is safe because we use
1187   // release_set_methods_jmethod_ids() to advertise the new cache.
1188   // A partially constructed cache should never be seen by a racing
1189   // thread. We also use release_store_ptr() to save a new jmethodID
1190   // in the cache so a partially constructed jmethodID should never be
1191   // seen either. Cache reads of existing jmethodIDs proceed without a
1192   // lock, but cache writes of a new jmethodID requires uniqueness and
1193   // creation of the cache itself requires no leaks so a lock is
1194   // generally acquired in those two cases.
1195   //
1196   // If the RedefineClasses() API has been used, then this cache can
1197   // grow and we'll have transitions from non-NULL to bigger non-NULL.
1198   // Cache creation requires no leaks and we require safety between all
1199   // cache accesses and freeing of the old cache so a lock is generally
1200   // acquired when the RedefineClasses() API has been used.
1201 
1202   if (jmeths != NULL) {
1203     // the cache already exists
1204     if (!ik_h->idnum_can_increment()) {
1205       // the cache can't grow so we can just get the current values
1206       get_jmethod_id_length_value(jmeths, idnum, &length, &id);
1207     } else {
1208       // cache can grow so we have to be more careful
1209       if (Threads::number_of_threads() == 0 ||
1210           SafepointSynchronize::is_at_safepoint()) {
1211         // we're single threaded or at a safepoint - no locking needed
1212         get_jmethod_id_length_value(jmeths, idnum, &length, &id);
1213       } else {
1214         MutexLocker ml(JmethodIdCreation_lock);
1215         get_jmethod_id_length_value(jmeths, idnum, &length, &id);
1216       }
1217     }
1218   }
1219   // implied else:
1220   // we need to allocate a cache so default length and id values are good
1221 
1222   if (jmeths == NULL ||   // no cache yet
1223       length <= idnum ||  // cache is too short
1224       id == NULL) {       // cache doesn't contain entry
1225 
1226     // This function can be called by the VMThread so we have to do all
1227     // things that might block on a safepoint before grabbing the lock.
1228     // Otherwise, we can deadlock with the VMThread or have a cache
1229     // consistency issue. These vars keep track of what we might have
1230     // to free after the lock is dropped.
1231     jmethodID  to_dealloc_id     = NULL;
1232     jmethodID* to_dealloc_jmeths = NULL;
1233 
1234     // may not allocate new_jmeths or use it if we allocate it
1235     jmethodID* new_jmeths = NULL;
1236     if (length <= idnum) {
1237       // allocate a new cache that might be used
1238       size_t size = MAX2(idnum+1, (size_t)ik_h->idnum_allocated_count());
1239       new_jmeths = NEW_C_HEAP_ARRAY(jmethodID, size+1, mtClass);
1240       memset(new_jmeths, 0, (size+1)*sizeof(jmethodID));
1241       // cache size is stored in element[0], other elements offset by one
1242       new_jmeths[0] = (jmethodID)size;
1243     }
1244 
1245     // allocate a new jmethodID that might be used
1246     jmethodID new_id = NULL;
1247     if (method_h->is_old() && !method_h->is_obsolete()) {
1248       // The method passed in is old (but not obsolete), we need to use the current version
1249       methodOop current_method = ik_h->method_with_idnum((int)idnum);
1250       assert(current_method != NULL, "old and but not obsolete, so should exist");
1251       methodHandle current_method_h(current_method == NULL? method_h() : current_method);
1252       new_id = JNIHandles::make_jmethod_id(current_method_h);
1253     } else {
1254       // It is the current version of the method or an obsolete method,
1255       // use the version passed in
1256       new_id = JNIHandles::make_jmethod_id(method_h);
1257     }
1258 
1259     if (Threads::number_of_threads() == 0 ||
1260         SafepointSynchronize::is_at_safepoint()) {
1261       // we're single threaded or at a safepoint - no locking needed
1262       id = get_jmethod_id_fetch_or_update(ik_h, idnum, new_id, new_jmeths,
1263                                           &to_dealloc_id, &to_dealloc_jmeths);
1264     } else {
1265       MutexLocker ml(JmethodIdCreation_lock);
1266       id = get_jmethod_id_fetch_or_update(ik_h, idnum, new_id, new_jmeths,
1267                                           &to_dealloc_id, &to_dealloc_jmeths);
1268     }
1269 
1270     // The lock has been dropped so we can free resources.
1271     // Free up either the old cache or the new cache if we allocated one.
1272     if (to_dealloc_jmeths != NULL) {
1273       FreeHeap(to_dealloc_jmeths);
1274     }
1275     // free up the new ID since it wasn't needed
1276     if (to_dealloc_id != NULL) {
1277       JNIHandles::destroy_jmethod_id(to_dealloc_id);
1278     }
1279   }
1280   return id;
1281 }
1282 
1283 
1284 // Common code to fetch the jmethodID from the cache or update the
1285 // cache with the new jmethodID. This function should never do anything
1286 // that causes the caller to go to a safepoint or we can deadlock with
1287 // the VMThread or have cache consistency issues.
1288 //
1289 jmethodID instanceKlass::get_jmethod_id_fetch_or_update(
1290             instanceKlassHandle ik_h, size_t idnum, jmethodID new_id,
1291             jmethodID* new_jmeths, jmethodID* to_dealloc_id_p,
1292             jmethodID** to_dealloc_jmeths_p) {
1293   assert(new_id != NULL, "sanity check");
1294   assert(to_dealloc_id_p != NULL, "sanity check");
1295   assert(to_dealloc_jmeths_p != NULL, "sanity check");
1296   assert(Threads::number_of_threads() == 0 ||
1297          SafepointSynchronize::is_at_safepoint() ||
1298          JmethodIdCreation_lock->owned_by_self(), "sanity check");
1299 
1300   // reacquire the cache - we are locked, single threaded or at a safepoint
1301   jmethodID* jmeths = ik_h->methods_jmethod_ids_acquire();
1302   jmethodID  id     = NULL;
1303   size_t     length = 0;
1304 
1305   if (jmeths == NULL ||                         // no cache yet
1306       (length = (size_t)jmeths[0]) <= idnum) {  // cache is too short
1307     if (jmeths != NULL) {
1308       // copy any existing entries from the old cache
1309       for (size_t index = 0; index < length; index++) {
1310         new_jmeths[index+1] = jmeths[index+1];
1311       }
1312       *to_dealloc_jmeths_p = jmeths;  // save old cache for later delete
1313     }
1314     ik_h->release_set_methods_jmethod_ids(jmeths = new_jmeths);
1315   } else {
1316     // fetch jmethodID (if any) from the existing cache
1317     id = jmeths[idnum+1];
1318     *to_dealloc_jmeths_p = new_jmeths;  // save new cache for later delete
1319   }
1320   if (id == NULL) {
1321     // No matching jmethodID in the existing cache or we have a new
1322     // cache or we just grew the cache. This cache write is done here
1323     // by the first thread to win the foot race because a jmethodID
1324     // needs to be unique once it is generally available.
1325     id = new_id;
1326 
1327     // The jmethodID cache can be read while unlocked so we have to
1328     // make sure the new jmethodID is complete before installing it
1329     // in the cache.
1330     OrderAccess::release_store_ptr(&jmeths[idnum+1], id);
1331   } else {
1332     *to_dealloc_id_p = new_id; // save new id for later delete
1333   }
1334   return id;
1335 }
1336 
1337 
1338 // Common code to get the jmethodID cache length and the jmethodID
1339 // value at index idnum if there is one.
1340 //
1341 void instanceKlass::get_jmethod_id_length_value(jmethodID* cache,
1342        size_t idnum, size_t *length_p, jmethodID* id_p) {
1343   assert(cache != NULL, "sanity check");
1344   assert(length_p != NULL, "sanity check");
1345   assert(id_p != NULL, "sanity check");
1346 
1347   // cache size is stored in element[0], other elements offset by one
1348   *length_p = (size_t)cache[0];
1349   if (*length_p <= idnum) {  // cache is too short
1350     *id_p = NULL;
1351   } else {
1352     *id_p = cache[idnum+1];  // fetch jmethodID (if any)
1353   }
1354 }
1355 
1356 
1357 // Lookup a jmethodID, NULL if not found.  Do no blocking, no allocations, no handles
1358 jmethodID instanceKlass::jmethod_id_or_null(methodOop method) {
1359   size_t idnum = (size_t)method->method_idnum();
1360   jmethodID* jmeths = methods_jmethod_ids_acquire();
1361   size_t length;                                // length assigned as debugging crumb
1362   jmethodID id = NULL;
1363   if (jmeths != NULL &&                         // If there is a cache
1364       (length = (size_t)jmeths[0]) > idnum) {   // and if it is long enough,
1365     id = jmeths[idnum+1];                       // Look up the id (may be NULL)
1366   }
1367   return id;
1368 }
1369 
1370 
1371 // Cache an itable index
1372 void instanceKlass::set_cached_itable_index(size_t idnum, int index) {
1373   int* indices = methods_cached_itable_indices_acquire();
1374   int* to_dealloc_indices = NULL;
1375 
1376   // We use a double-check locking idiom here because this cache is
1377   // performance sensitive. In the normal system, this cache only
1378   // transitions from NULL to non-NULL which is safe because we use
1379   // release_set_methods_cached_itable_indices() to advertise the
1380   // new cache. A partially constructed cache should never be seen
1381   // by a racing thread. Cache reads and writes proceed without a
1382   // lock, but creation of the cache itself requires no leaks so a
1383   // lock is generally acquired in that case.
1384   //
1385   // If the RedefineClasses() API has been used, then this cache can
1386   // grow and we'll have transitions from non-NULL to bigger non-NULL.
1387   // Cache creation requires no leaks and we require safety between all
1388   // cache accesses and freeing of the old cache so a lock is generally
1389   // acquired when the RedefineClasses() API has been used.
1390 
1391   if (indices == NULL || idnum_can_increment()) {
1392     // we need a cache or the cache can grow
1393     MutexLocker ml(JNICachedItableIndex_lock);
1394     // reacquire the cache to see if another thread already did the work
1395     indices = methods_cached_itable_indices_acquire();
1396     size_t length = 0;
1397     // cache size is stored in element[0], other elements offset by one
1398     if (indices == NULL || (length = (size_t)indices[0]) <= idnum) {
1399       size_t size = MAX2(idnum+1, (size_t)idnum_allocated_count());
1400       int* new_indices = NEW_C_HEAP_ARRAY(int, size+1, mtClass);
1401       new_indices[0] = (int)size;
1402       // copy any existing entries
1403       size_t i;
1404       for (i = 0; i < length; i++) {
1405         new_indices[i+1] = indices[i+1];
1406       }
1407       // Set all the rest to -1
1408       for (i = length; i < size; i++) {
1409         new_indices[i+1] = -1;
1410       }
1411       if (indices != NULL) {
1412         // We have an old cache to delete so save it for after we
1413         // drop the lock.
1414         to_dealloc_indices = indices;
1415       }
1416       release_set_methods_cached_itable_indices(indices = new_indices);
1417     }
1418 
1419     if (idnum_can_increment()) {
1420       // this cache can grow so we have to write to it safely
1421       indices[idnum+1] = index;
1422     }
1423   } else {
1424     CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
1425   }
1426 
1427   if (!idnum_can_increment()) {
1428     // The cache cannot grow and this JNI itable index value does not
1429     // have to be unique like a jmethodID. If there is a race to set it,
1430     // it doesn't matter.
1431     indices[idnum+1] = index;
1432   }
1433 
1434   if (to_dealloc_indices != NULL) {
1435     // we allocated a new cache so free the old one
1436     FreeHeap(to_dealloc_indices);
1437   }
1438 }
1439 
1440 
1441 // Retrieve a cached itable index
1442 int instanceKlass::cached_itable_index(size_t idnum) {
1443   int* indices = methods_cached_itable_indices_acquire();
1444   if (indices != NULL && ((size_t)indices[0]) > idnum) {
1445      // indices exist and are long enough, retrieve possible cached
1446     return indices[idnum+1];
1447   }
1448   return -1;
1449 }
1450 
1451 
1452 //
1453 // Walk the list of dependent nmethods searching for nmethods which
1454 // are dependent on the changes that were passed in and mark them for
1455 // deoptimization.  Returns the number of nmethods found.
1456 //
1457 int instanceKlass::mark_dependent_nmethods(DepChange& changes) {
1458   assert_locked_or_safepoint(CodeCache_lock);
1459   int found = 0;
1460   nmethodBucket* b = _dependencies;
1461   while (b != NULL) {
1462     nmethod* nm = b->get_nmethod();
1463     // since dependencies aren't removed until an nmethod becomes a zombie,
1464     // the dependency list may contain nmethods which aren't alive.
1465     if (nm->is_alive() && !nm->is_marked_for_deoptimization() && nm->check_dependency_on(changes)) {
1466       if (TraceDependencies) {
1467         ResourceMark rm;
1468         tty->print_cr("Marked for deoptimization");
1469         tty->print_cr("  context = %s", this->external_name());
1470         changes.print();
1471         nm->print();
1472         nm->print_dependencies();
1473       }
1474       nm->mark_for_deoptimization();
1475       found++;
1476     }
1477     b = b->next();
1478   }
1479   return found;
1480 }
1481 
1482 
1483 //
1484 // Add an nmethodBucket to the list of dependencies for this nmethod.
1485 // It's possible that an nmethod has multiple dependencies on this klass
1486 // so a count is kept for each bucket to guarantee that creation and
1487 // deletion of dependencies is consistent.
1488 //
1489 void instanceKlass::add_dependent_nmethod(nmethod* nm) {
1490   assert_locked_or_safepoint(CodeCache_lock);
1491   nmethodBucket* b = _dependencies;
1492   nmethodBucket* last = NULL;
1493   while (b != NULL) {
1494     if (nm == b->get_nmethod()) {
1495       b->increment();
1496       return;
1497     }
1498     b = b->next();
1499   }
1500   _dependencies = new nmethodBucket(nm, _dependencies);
1501 }
1502 
1503 
1504 //
1505 // Decrement count of the nmethod in the dependency list and remove
1506 // the bucket competely when the count goes to 0.  This method must
1507 // find a corresponding bucket otherwise there's a bug in the
1508 // recording of dependecies.
1509 //
1510 void instanceKlass::remove_dependent_nmethod(nmethod* nm) {
1511   assert_locked_or_safepoint(CodeCache_lock);
1512   nmethodBucket* b = _dependencies;
1513   nmethodBucket* last = NULL;
1514   while (b != NULL) {
1515     if (nm == b->get_nmethod()) {
1516       if (b->decrement() == 0) {
1517         if (last == NULL) {
1518           _dependencies = b->next();
1519         } else {
1520           last->set_next(b->next());
1521         }
1522         delete b;
1523       }
1524       return;
1525     }
1526     last = b;
1527     b = b->next();
1528   }
1529 #ifdef ASSERT
1530   tty->print_cr("### %s can't find dependent nmethod:", this->external_name());
1531   nm->print();
1532 #endif // ASSERT
1533   ShouldNotReachHere();
1534 }
1535 
1536 
1537 #ifndef PRODUCT
1538 void instanceKlass::print_dependent_nmethods(bool verbose) {
1539   nmethodBucket* b = _dependencies;
1540   int idx = 0;
1541   while (b != NULL) {
1542     nmethod* nm = b->get_nmethod();
1543     tty->print("[%d] count=%d { ", idx++, b->count());
1544     if (!verbose) {
1545       nm->print_on(tty, "nmethod");
1546       tty->print_cr(" } ");
1547     } else {
1548       nm->print();
1549       nm->print_dependencies();
1550       tty->print_cr("--- } ");
1551     }
1552     b = b->next();
1553   }
1554 }
1555 
1556 
1557 bool instanceKlass::is_dependent_nmethod(nmethod* nm) {
1558   nmethodBucket* b = _dependencies;
1559   while (b != NULL) {
1560     if (nm == b->get_nmethod()) {
1561       return true;
1562     }
1563     b = b->next();
1564   }
1565   return false;
1566 }
1567 #endif //PRODUCT
1568 
1569 
1570 #ifdef ASSERT
1571 template <class T> void assert_is_in(T *p) {
1572   T heap_oop = oopDesc::load_heap_oop(p);
1573   if (!oopDesc::is_null(heap_oop)) {
1574     oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
1575     assert(Universe::heap()->is_in(o), "should be in heap");
1576   }
1577 }
1578 template <class T> void assert_is_in_closed_subset(T *p) {
1579   T heap_oop = oopDesc::load_heap_oop(p);
1580   if (!oopDesc::is_null(heap_oop)) {
1581     oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
1582     assert(Universe::heap()->is_in_closed_subset(o), "should be in closed");
1583   }
1584 }
1585 template <class T> void assert_is_in_reserved(T *p) {
1586   T heap_oop = oopDesc::load_heap_oop(p);
1587   if (!oopDesc::is_null(heap_oop)) {
1588     oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
1589     assert(Universe::heap()->is_in_reserved(o), "should be in reserved");
1590   }
1591 }
1592 template <class T> void assert_nothing(T *p) {}
1593 
1594 #else
1595 template <class T> void assert_is_in(T *p) {}
1596 template <class T> void assert_is_in_closed_subset(T *p) {}
1597 template <class T> void assert_is_in_reserved(T *p) {}
1598 template <class T> void assert_nothing(T *p) {}
1599 #endif // ASSERT
1600 
1601 //
1602 // Macros that iterate over areas of oops which are specialized on type of
1603 // oop pointer either narrow or wide, depending on UseCompressedOops
1604 //
1605 // Parameters are:
1606 //   T         - type of oop to point to (either oop or narrowOop)
1607 //   start_p   - starting pointer for region to iterate over
1608 //   count     - number of oops or narrowOops to iterate over
1609 //   do_oop    - action to perform on each oop (it's arbitrary C code which
1610 //               makes it more efficient to put in a macro rather than making
1611 //               it a template function)
1612 //   assert_fn - assert function which is template function because performance
1613 //               doesn't matter when enabled.
1614 #define InstanceKlass_SPECIALIZED_OOP_ITERATE( \
1615   T, start_p, count, do_oop,                \
1616   assert_fn)                                \
1617 {                                           \
1618   T* p         = (T*)(start_p);             \
1619   T* const end = p + (count);               \
1620   while (p < end) {                         \
1621     (assert_fn)(p);                         \
1622     do_oop;                                 \
1623     ++p;                                    \
1624   }                                         \
1625 }
1626 
1627 #define InstanceKlass_SPECIALIZED_OOP_REVERSE_ITERATE( \
1628   T, start_p, count, do_oop,                \
1629   assert_fn)                                \
1630 {                                           \
1631   T* const start = (T*)(start_p);           \
1632   T*       p     = start + (count);         \
1633   while (start < p) {                       \
1634     --p;                                    \
1635     (assert_fn)(p);                         \
1636     do_oop;                                 \
1637   }                                         \
1638 }
1639 
1640 #define InstanceKlass_SPECIALIZED_BOUNDED_OOP_ITERATE( \
1641   T, start_p, count, low, high,             \
1642   do_oop, assert_fn)                        \
1643 {                                           \
1644   T* const l = (T*)(low);                   \
1645   T* const h = (T*)(high);                  \
1646   assert(mask_bits((intptr_t)l, sizeof(T)-1) == 0 && \
1647          mask_bits((intptr_t)h, sizeof(T)-1) == 0,   \
1648          "bounded region must be properly aligned"); \
1649   T* p       = (T*)(start_p);               \
1650   T* end     = p + (count);                 \
1651   if (p < l) p = l;                         \
1652   if (end > h) end = h;                     \
1653   while (p < end) {                         \
1654     (assert_fn)(p);                         \
1655     do_oop;                                 \
1656     ++p;                                    \
1657   }                                         \
1658 }
1659 
1660 
1661 // The following macros call specialized macros, passing either oop or
1662 // narrowOop as the specialization type.  These test the UseCompressedOops
1663 // flag.
1664 #define InstanceKlass_OOP_MAP_ITERATE(obj, do_oop, assert_fn)            \
1665 {                                                                        \
1666   /* Compute oopmap block range. The common case                         \
1667      is nonstatic_oop_map_size == 1. */                                  \
1668   OopMapBlock* map           = start_of_nonstatic_oop_maps();            \
1669   OopMapBlock* const end_map = map + nonstatic_oop_map_count();          \
1670   if (UseCompressedOops) {                                               \
1671     while (map < end_map) {                                              \
1672       InstanceKlass_SPECIALIZED_OOP_ITERATE(narrowOop,                   \
1673         obj->obj_field_addr<narrowOop>(map->offset()), map->count(),     \
1674         do_oop, assert_fn)                                               \
1675       ++map;                                                             \
1676     }                                                                    \
1677   } else {                                                               \
1678     while (map < end_map) {                                              \
1679       InstanceKlass_SPECIALIZED_OOP_ITERATE(oop,                         \
1680         obj->obj_field_addr<oop>(map->offset()), map->count(),           \
1681         do_oop, assert_fn)                                               \
1682       ++map;                                                             \
1683     }                                                                    \
1684   }                                                                      \
1685 }
1686 
1687 #define InstanceKlass_OOP_MAP_REVERSE_ITERATE(obj, do_oop, assert_fn)    \
1688 {                                                                        \
1689   OopMapBlock* const start_map = start_of_nonstatic_oop_maps();          \
1690   OopMapBlock* map             = start_map + nonstatic_oop_map_count();  \
1691   if (UseCompressedOops) {                                               \
1692     while (start_map < map) {                                            \
1693       --map;                                                             \
1694       InstanceKlass_SPECIALIZED_OOP_REVERSE_ITERATE(narrowOop,           \
1695         obj->obj_field_addr<narrowOop>(map->offset()), map->count(),     \
1696         do_oop, assert_fn)                                               \
1697     }                                                                    \
1698   } else {                                                               \
1699     while (start_map < map) {                                            \
1700       --map;                                                             \
1701       InstanceKlass_SPECIALIZED_OOP_REVERSE_ITERATE(oop,                 \
1702         obj->obj_field_addr<oop>(map->offset()), map->count(),           \
1703         do_oop, assert_fn)                                               \
1704     }                                                                    \
1705   }                                                                      \
1706 }
1707 
1708 #define InstanceKlass_BOUNDED_OOP_MAP_ITERATE(obj, low, high, do_oop,    \
1709                                               assert_fn)                 \
1710 {                                                                        \
1711   /* Compute oopmap block range. The common case is                      \
1712      nonstatic_oop_map_size == 1, so we accept the                       \
1713      usually non-existent extra overhead of examining                    \
1714      all the maps. */                                                    \
1715   OopMapBlock* map           = start_of_nonstatic_oop_maps();            \
1716   OopMapBlock* const end_map = map + nonstatic_oop_map_count();          \
1717   if (UseCompressedOops) {                                               \
1718     while (map < end_map) {                                              \
1719       InstanceKlass_SPECIALIZED_BOUNDED_OOP_ITERATE(narrowOop,           \
1720         obj->obj_field_addr<narrowOop>(map->offset()), map->count(),     \
1721         low, high,                                                       \
1722         do_oop, assert_fn)                                               \
1723       ++map;                                                             \
1724     }                                                                    \
1725   } else {                                                               \
1726     while (map < end_map) {                                              \
1727       InstanceKlass_SPECIALIZED_BOUNDED_OOP_ITERATE(oop,                 \
1728         obj->obj_field_addr<oop>(map->offset()), map->count(),           \
1729         low, high,                                                       \
1730         do_oop, assert_fn)                                               \
1731       ++map;                                                             \
1732     }                                                                    \
1733   }                                                                      \
1734 }
1735 
1736 void instanceKlass::oop_follow_contents(oop obj) {
1737   assert(obj != NULL, "can't follow the content of NULL object");
1738   obj->follow_header();
1739   InstanceKlass_OOP_MAP_ITERATE( \
1740     obj, \
1741     MarkSweep::mark_and_push(p), \
1742     assert_is_in_closed_subset)
1743 }
1744 
1745 #ifndef SERIALGC
1746 void instanceKlass::oop_follow_contents(ParCompactionManager* cm,
1747                                         oop obj) {
1748   assert(obj != NULL, "can't follow the content of NULL object");
1749   obj->follow_header(cm);
1750   InstanceKlass_OOP_MAP_ITERATE( \
1751     obj, \
1752     PSParallelCompact::mark_and_push(cm, p), \
1753     assert_is_in)
1754 }
1755 #endif // SERIALGC
1756 
1757 // closure's do_header() method dicates whether the given closure should be
1758 // applied to the klass ptr in the object header.
1759 
1760 #define InstanceKlass_OOP_OOP_ITERATE_DEFN(OopClosureType, nv_suffix)        \
1761                                                                              \
1762 int instanceKlass::oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) { \
1763   SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::ik);\
1764   /* header */                                                          \
1765   if (closure->do_header()) {                                           \
1766     obj->oop_iterate_header(closure);                                   \
1767   }                                                                     \
1768   InstanceKlass_OOP_MAP_ITERATE(                                        \
1769     obj,                                                                \
1770     SpecializationStats::                                               \
1771       record_do_oop_call##nv_suffix(SpecializationStats::ik);           \
1772     (closure)->do_oop##nv_suffix(p),                                    \
1773     assert_is_in_closed_subset)                                         \
1774   return size_helper();                                                 \
1775 }
1776 
1777 #ifndef SERIALGC
1778 #define InstanceKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN(OopClosureType, nv_suffix) \
1779                                                                                 \
1780 int instanceKlass::oop_oop_iterate_backwards##nv_suffix(oop obj,                \
1781                                               OopClosureType* closure) {        \
1782   SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::ik); \
1783   /* header */                                                                  \
1784   if (closure->do_header()) {                                                   \
1785     obj->oop_iterate_header(closure);                                           \
1786   }                                                                             \
1787   /* instance variables */                                                      \
1788   InstanceKlass_OOP_MAP_REVERSE_ITERATE(                                        \
1789     obj,                                                                        \
1790     SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::ik);\
1791     (closure)->do_oop##nv_suffix(p),                                            \
1792     assert_is_in_closed_subset)                                                 \
1793    return size_helper();                                                        \
1794 }
1795 #endif // !SERIALGC
1796 
1797 #define InstanceKlass_OOP_OOP_ITERATE_DEFN_m(OopClosureType, nv_suffix) \
1798                                                                         \
1799 int instanceKlass::oop_oop_iterate##nv_suffix##_m(oop obj,              \
1800                                                   OopClosureType* closure, \
1801                                                   MemRegion mr) {          \
1802   SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::ik);\
1803   if (closure->do_header()) {                                            \
1804     obj->oop_iterate_header(closure, mr);                                \
1805   }                                                                      \
1806   InstanceKlass_BOUNDED_OOP_MAP_ITERATE(                                 \
1807     obj, mr.start(), mr.end(),                                           \
1808     (closure)->do_oop##nv_suffix(p),                                     \
1809     assert_is_in_closed_subset)                                          \
1810   return size_helper();                                                  \
1811 }
1812 
1813 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceKlass_OOP_OOP_ITERATE_DEFN)
1814 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceKlass_OOP_OOP_ITERATE_DEFN)
1815 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceKlass_OOP_OOP_ITERATE_DEFN_m)
1816 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceKlass_OOP_OOP_ITERATE_DEFN_m)
1817 #ifndef SERIALGC
1818 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
1819 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
1820 #endif // !SERIALGC
1821 
1822 int instanceKlass::oop_adjust_pointers(oop obj) {
1823   int size = size_helper();
1824   InstanceKlass_OOP_MAP_ITERATE( \
1825     obj, \
1826     MarkSweep::adjust_pointer(p), \
1827     assert_is_in)
1828   obj->adjust_header();
1829   return size;
1830 }
1831 
1832 #ifndef SERIALGC
1833 void instanceKlass::oop_push_contents(PSPromotionManager* pm, oop obj) {
1834   InstanceKlass_OOP_MAP_REVERSE_ITERATE( \
1835     obj, \
1836     if (PSScavenge::should_scavenge(p)) { \
1837       pm->claim_or_forward_depth(p); \
1838     }, \
1839     assert_nothing )
1840 }
1841 
1842 int instanceKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) {
1843   InstanceKlass_OOP_MAP_ITERATE( \
1844     obj, \
1845     PSParallelCompact::adjust_pointer(p), \
1846     assert_nothing)
1847   return size_helper();
1848 }
1849 
1850 #endif // SERIALGC
1851 
1852 // This klass is alive but the implementor link is not followed/updated.
1853 // Subklass and sibling links are handled by Klass::follow_weak_klass_links
1854 
1855 void instanceKlass::follow_weak_klass_links(
1856   BoolObjectClosure* is_alive, OopClosure* keep_alive) {
1857   assert(is_alive->do_object_b(as_klassOop()), "this oop should be live");
1858 
1859   if (is_interface()) {
1860     if (ClassUnloading) {
1861       klassOop impl = implementor();
1862       if (impl != NULL) {
1863         if (!is_alive->do_object_b(impl)) {
1864           // remove this guy
1865           *adr_implementor() = NULL;
1866         }
1867       }
1868     } else {
1869       assert(adr_implementor() != NULL, "just checking");
1870       keep_alive->do_oop(adr_implementor());
1871     }
1872   }
1873 
1874   Klass::follow_weak_klass_links(is_alive, keep_alive);
1875 }
1876 
1877 void instanceKlass::remove_unshareable_info() {
1878   Klass::remove_unshareable_info();
1879   init_implementor();
1880 }
1881 
1882 static void clear_all_breakpoints(methodOop m) {
1883   m->clear_all_breakpoints();
1884 }
1885 
1886 void instanceKlass::release_C_heap_structures() {
1887   // Deallocate oop map cache
1888   if (_oop_map_cache != NULL) {
1889     delete _oop_map_cache;
1890     _oop_map_cache = NULL;
1891   }
1892 
1893   // Deallocate JNI identifiers for jfieldIDs
1894   JNIid::deallocate(jni_ids());
1895   set_jni_ids(NULL);
1896 
1897   jmethodID* jmeths = methods_jmethod_ids_acquire();
1898   if (jmeths != (jmethodID*)NULL) {
1899     release_set_methods_jmethod_ids(NULL);
1900     FreeHeap(jmeths);
1901   }
1902 
1903   int* indices = methods_cached_itable_indices_acquire();
1904   if (indices != (int*)NULL) {
1905     release_set_methods_cached_itable_indices(NULL);
1906     FreeHeap(indices);
1907   }
1908 
1909   // release dependencies
1910   nmethodBucket* b = _dependencies;
1911   _dependencies = NULL;
1912   while (b != NULL) {
1913     nmethodBucket* next = b->next();
1914     delete b;
1915     b = next;
1916   }
1917 
1918   // Deallocate breakpoint records
1919   if (breakpoints() != 0x0) {
1920     methods_do(clear_all_breakpoints);
1921     assert(breakpoints() == 0x0, "should have cleared breakpoints");
1922   }
1923 
1924   // deallocate information about previous versions
1925   if (_previous_versions != NULL) {
1926     for (int i = _previous_versions->length() - 1; i >= 0; i--) {
1927       PreviousVersionNode * pv_node = _previous_versions->at(i);
1928       delete pv_node;
1929     }
1930     delete _previous_versions;
1931     _previous_versions = NULL;
1932   }
1933 
1934   // deallocate the cached class file
1935   if (_cached_class_file_bytes != NULL) {
1936     os::free(_cached_class_file_bytes, mtClass);
1937     _cached_class_file_bytes = NULL;
1938     _cached_class_file_len = 0;
1939   }
1940 
1941   // Decrement symbol reference counts associated with the unloaded class.
1942   if (_name != NULL) _name->decrement_refcount();
1943   // unreference array name derived from this class name (arrays of an unloaded
1944   // class can't be referenced anymore).
1945   if (_array_name != NULL)  _array_name->decrement_refcount();
1946   if (_source_file_name != NULL) _source_file_name->decrement_refcount();
1947   if (_source_debug_extension != NULL) _source_debug_extension->decrement_refcount();
1948   // walk constant pool and decrement symbol reference counts
1949   _constants->unreference_symbols();
1950 }
1951 
1952 void instanceKlass::set_source_file_name(Symbol* n) {
1953   _source_file_name = n;
1954   if (_source_file_name != NULL) _source_file_name->increment_refcount();
1955 }
1956 
1957 void instanceKlass::set_source_debug_extension(Symbol* n) {
1958   _source_debug_extension = n;
1959   if (_source_debug_extension != NULL) _source_debug_extension->increment_refcount();
1960 }
1961 
1962 address instanceKlass::static_field_addr(int offset) {
1963   return (address)(offset + instanceMirrorKlass::offset_of_static_fields() + (intptr_t)java_mirror());
1964 }
1965 
1966 
1967 const char* instanceKlass::signature_name() const {
1968   const char* src = (const char*) (name()->as_C_string());
1969   const int src_length = (int)strlen(src);
1970   char* dest = NEW_RESOURCE_ARRAY(char, src_length + 3);
1971   int src_index = 0;
1972   int dest_index = 0;
1973   dest[dest_index++] = 'L';
1974   while (src_index < src_length) {
1975     dest[dest_index++] = src[src_index++];
1976   }
1977   dest[dest_index++] = ';';
1978   dest[dest_index] = '\0';
1979   return dest;
1980 }
1981 
1982 // different verisons of is_same_class_package
1983 bool instanceKlass::is_same_class_package(klassOop class2) {
1984   klassOop class1 = as_klassOop();
1985   oop classloader1 = instanceKlass::cast(class1)->class_loader();
1986   Symbol* classname1 = Klass::cast(class1)->name();
1987 
1988   if (Klass::cast(class2)->oop_is_objArray()) {
1989     class2 = objArrayKlass::cast(class2)->bottom_klass();
1990   }
1991   oop classloader2;
1992   if (Klass::cast(class2)->oop_is_instance()) {
1993     classloader2 = instanceKlass::cast(class2)->class_loader();
1994   } else {
1995     assert(Klass::cast(class2)->oop_is_typeArray(), "should be type array");
1996     classloader2 = NULL;
1997   }
1998   Symbol* classname2 = Klass::cast(class2)->name();
1999 
2000   return instanceKlass::is_same_class_package(classloader1, classname1,
2001                                               classloader2, classname2);
2002 }
2003 
2004 bool instanceKlass::is_same_class_package(oop classloader2, Symbol* classname2) {
2005   klassOop class1 = as_klassOop();
2006   oop classloader1 = instanceKlass::cast(class1)->class_loader();
2007   Symbol* classname1 = Klass::cast(class1)->name();
2008 
2009   return instanceKlass::is_same_class_package(classloader1, classname1,
2010                                               classloader2, classname2);
2011 }
2012 
2013 // return true if two classes are in the same package, classloader
2014 // and classname information is enough to determine a class's package
2015 bool instanceKlass::is_same_class_package(oop class_loader1, Symbol* class_name1,
2016                                           oop class_loader2, Symbol* class_name2) {
2017   if (class_loader1 != class_loader2) {
2018     return false;
2019   } else if (class_name1 == class_name2) {
2020     return true;                // skip painful bytewise comparison
2021   } else {
2022     ResourceMark rm;
2023 
2024     // The Symbol*'s are in UTF8 encoding. Since we only need to check explicitly
2025     // for ASCII characters ('/', 'L', '['), we can keep them in UTF8 encoding.
2026     // Otherwise, we just compare jbyte values between the strings.
2027     const jbyte *name1 = class_name1->base();
2028     const jbyte *name2 = class_name2->base();
2029 
2030     const jbyte *last_slash1 = UTF8::strrchr(name1, class_name1->utf8_length(), '/');
2031     const jbyte *last_slash2 = UTF8::strrchr(name2, class_name2->utf8_length(), '/');
2032 
2033     if ((last_slash1 == NULL) || (last_slash2 == NULL)) {
2034       // One of the two doesn't have a package.  Only return true
2035       // if the other one also doesn't have a package.
2036       return last_slash1 == last_slash2;
2037     } else {
2038       // Skip over '['s
2039       if (*name1 == '[') {
2040         do {
2041           name1++;
2042         } while (*name1 == '[');
2043         if (*name1 != 'L') {
2044           // Something is terribly wrong.  Shouldn't be here.
2045           return false;
2046         }
2047       }
2048       if (*name2 == '[') {
2049         do {
2050           name2++;
2051         } while (*name2 == '[');
2052         if (*name2 != 'L') {
2053           // Something is terribly wrong.  Shouldn't be here.
2054           return false;
2055         }
2056       }
2057 
2058       // Check that package part is identical
2059       int length1 = last_slash1 - name1;
2060       int length2 = last_slash2 - name2;
2061 
2062       return UTF8::equal(name1, length1, name2, length2);
2063     }
2064   }
2065 }
2066 
2067 // Returns true iff super_method can be overridden by a method in targetclassname
2068 // See JSL 3rd edition 8.4.6.1
2069 // Assumes name-signature match
2070 // "this" is instanceKlass of super_method which must exist
2071 // note that the instanceKlass of the method in the targetclassname has not always been created yet
2072 bool instanceKlass::is_override(methodHandle super_method, Handle targetclassloader, Symbol* targetclassname, TRAPS) {
2073    // Private methods can not be overridden
2074    if (super_method->is_private()) {
2075      return false;
2076    }
2077    // If super method is accessible, then override
2078    if ((super_method->is_protected()) ||
2079        (super_method->is_public())) {
2080      return true;
2081    }
2082    // Package-private methods are not inherited outside of package
2083    assert(super_method->is_package_private(), "must be package private");
2084    return(is_same_class_package(targetclassloader(), targetclassname));
2085 }
2086 
2087 /* defined for now in jvm.cpp, for historical reasons *--
2088 klassOop instanceKlass::compute_enclosing_class_impl(instanceKlassHandle self,
2089                                                      Symbol*& simple_name_result, TRAPS) {
2090   ...
2091 }
2092 */
2093 
2094 // tell if two classes have the same enclosing class (at package level)
2095 bool instanceKlass::is_same_package_member_impl(instanceKlassHandle class1,
2096                                                 klassOop class2_oop, TRAPS) {
2097   if (class2_oop == class1->as_klassOop())          return true;
2098   if (!Klass::cast(class2_oop)->oop_is_instance())  return false;
2099   instanceKlassHandle class2(THREAD, class2_oop);
2100 
2101   // must be in same package before we try anything else
2102   if (!class1->is_same_class_package(class2->class_loader(), class2->name()))
2103     return false;
2104 
2105   // As long as there is an outer1.getEnclosingClass,
2106   // shift the search outward.
2107   instanceKlassHandle outer1 = class1;
2108   for (;;) {
2109     // As we walk along, look for equalities between outer1 and class2.
2110     // Eventually, the walks will terminate as outer1 stops
2111     // at the top-level class around the original class.
2112     bool ignore_inner_is_member;
2113     klassOop next = outer1->compute_enclosing_class(&ignore_inner_is_member,
2114                                                     CHECK_false);
2115     if (next == NULL)  break;
2116     if (next == class2())  return true;
2117     outer1 = instanceKlassHandle(THREAD, next);
2118   }
2119 
2120   // Now do the same for class2.
2121   instanceKlassHandle outer2 = class2;
2122   for (;;) {
2123     bool ignore_inner_is_member;
2124     klassOop next = outer2->compute_enclosing_class(&ignore_inner_is_member,
2125                                                     CHECK_false);
2126     if (next == NULL)  break;
2127     // Might as well check the new outer against all available values.
2128     if (next == class1())  return true;
2129     if (next == outer1())  return true;
2130     outer2 = instanceKlassHandle(THREAD, next);
2131   }
2132 
2133   // If by this point we have not found an equality between the
2134   // two classes, we know they are in separate package members.
2135   return false;
2136 }
2137 
2138 
2139 jint instanceKlass::compute_modifier_flags(TRAPS) const {
2140   klassOop k = as_klassOop();
2141   jint access = access_flags().as_int();
2142 
2143   // But check if it happens to be member class.
2144   instanceKlassHandle ik(THREAD, k);
2145   InnerClassesIterator iter(ik);
2146   for (; !iter.done(); iter.next()) {
2147     int ioff = iter.inner_class_info_index();
2148     // Inner class attribute can be zero, skip it.
2149     // Strange but true:  JVM spec. allows null inner class refs.
2150     if (ioff == 0) continue;
2151 
2152     // only look at classes that are already loaded
2153     // since we are looking for the flags for our self.
2154     Symbol* inner_name = ik->constants()->klass_name_at(ioff);
2155     if ((ik->name() == inner_name)) {
2156       // This is really a member class.
2157       access = iter.inner_access_flags();
2158       break;
2159     }
2160   }
2161   // Remember to strip ACC_SUPER bit
2162   return (access & (~JVM_ACC_SUPER)) & JVM_ACC_WRITTEN_FLAGS;
2163 }
2164 
2165 jint instanceKlass::jvmti_class_status() const {
2166   jint result = 0;
2167 
2168   if (is_linked()) {
2169     result |= JVMTI_CLASS_STATUS_VERIFIED | JVMTI_CLASS_STATUS_PREPARED;
2170   }
2171 
2172   if (is_initialized()) {
2173     assert(is_linked(), "Class status is not consistent");
2174     result |= JVMTI_CLASS_STATUS_INITIALIZED;
2175   }
2176   if (is_in_error_state()) {
2177     result |= JVMTI_CLASS_STATUS_ERROR;
2178   }
2179   return result;
2180 }
2181 
2182 methodOop instanceKlass::method_at_itable(klassOop holder, int index, TRAPS) {
2183   itableOffsetEntry* ioe = (itableOffsetEntry*)start_of_itable();
2184   int method_table_offset_in_words = ioe->offset()/wordSize;
2185   int nof_interfaces = (method_table_offset_in_words - itable_offset_in_words())
2186                        / itableOffsetEntry::size();
2187 
2188   for (int cnt = 0 ; ; cnt ++, ioe ++) {
2189     // If the interface isn't implemented by the receiver class,
2190     // the VM should throw IncompatibleClassChangeError.
2191     if (cnt >= nof_interfaces) {
2192       THROW_0(vmSymbols::java_lang_IncompatibleClassChangeError());
2193     }
2194 
2195     klassOop ik = ioe->interface_klass();
2196     if (ik == holder) break;
2197   }
2198 
2199   itableMethodEntry* ime = ioe->first_method_entry(as_klassOop());
2200   methodOop m = ime[index].method();
2201   if (m == NULL) {
2202     THROW_0(vmSymbols::java_lang_AbstractMethodError());
2203   }
2204   return m;
2205 }
2206 
2207 // On-stack replacement stuff
2208 void instanceKlass::add_osr_nmethod(nmethod* n) {
2209   // only one compilation can be active
2210   NEEDS_CLEANUP
2211   // This is a short non-blocking critical region, so the no safepoint check is ok.
2212   OsrList_lock->lock_without_safepoint_check();
2213   assert(n->is_osr_method(), "wrong kind of nmethod");
2214   n->set_osr_link(osr_nmethods_head());
2215   set_osr_nmethods_head(n);
2216   // Raise the highest osr level if necessary
2217   if (TieredCompilation) {
2218     methodOop m = n->method();
2219     m->set_highest_osr_comp_level(MAX2(m->highest_osr_comp_level(), n->comp_level()));
2220   }
2221   // Remember to unlock again
2222   OsrList_lock->unlock();
2223 
2224   // Get rid of the osr methods for the same bci that have lower levels.
2225   if (TieredCompilation) {
2226     for (int l = CompLevel_limited_profile; l < n->comp_level(); l++) {
2227       nmethod *inv = lookup_osr_nmethod(n->method(), n->osr_entry_bci(), l, true);
2228       if (inv != NULL && inv->is_in_use()) {
2229         inv->make_not_entrant();
2230       }
2231     }
2232   }
2233 }
2234 
2235 
2236 void instanceKlass::remove_osr_nmethod(nmethod* n) {
2237   // This is a short non-blocking critical region, so the no safepoint check is ok.
2238   OsrList_lock->lock_without_safepoint_check();
2239   assert(n->is_osr_method(), "wrong kind of nmethod");
2240   nmethod* last = NULL;
2241   nmethod* cur  = osr_nmethods_head();
2242   int max_level = CompLevel_none;  // Find the max comp level excluding n
2243   methodOop m = n->method();
2244   // Search for match
2245   while(cur != NULL && cur != n) {
2246     if (TieredCompilation) {
2247       // Find max level before n
2248       max_level = MAX2(max_level, cur->comp_level());
2249     }
2250     last = cur;
2251     cur = cur->osr_link();
2252   }
2253   nmethod* next = NULL;
2254   if (cur == n) {
2255     next = cur->osr_link();
2256     if (last == NULL) {
2257       // Remove first element
2258       set_osr_nmethods_head(next);
2259     } else {
2260       last->set_osr_link(next);
2261     }
2262   }
2263   n->set_osr_link(NULL);
2264   if (TieredCompilation) {
2265     cur = next;
2266     while (cur != NULL) {
2267       // Find max level after n
2268       max_level = MAX2(max_level, cur->comp_level());
2269       cur = cur->osr_link();
2270     }
2271     m->set_highest_osr_comp_level(max_level);
2272   }
2273   // Remember to unlock again
2274   OsrList_lock->unlock();
2275 }
2276 
2277 nmethod* instanceKlass::lookup_osr_nmethod(const methodOop m, int bci, int comp_level, bool match_level) const {
2278   // This is a short non-blocking critical region, so the no safepoint check is ok.
2279   OsrList_lock->lock_without_safepoint_check();
2280   nmethod* osr = osr_nmethods_head();
2281   nmethod* best = NULL;
2282   while (osr != NULL) {
2283     assert(osr->is_osr_method(), "wrong kind of nmethod found in chain");
2284     // There can be a time when a c1 osr method exists but we are waiting
2285     // for a c2 version. When c2 completes its osr nmethod we will trash
2286     // the c1 version and only be able to find the c2 version. However
2287     // while we overflow in the c1 code at back branches we don't want to
2288     // try and switch to the same code as we are already running
2289 
2290     if (osr->method() == m &&
2291         (bci == InvocationEntryBci || osr->osr_entry_bci() == bci)) {
2292       if (match_level) {
2293         if (osr->comp_level() == comp_level) {
2294           // Found a match - return it.
2295           OsrList_lock->unlock();
2296           return osr;
2297         }
2298       } else {
2299         if (best == NULL || (osr->comp_level() > best->comp_level())) {
2300           if (osr->comp_level() == CompLevel_highest_tier) {
2301             // Found the best possible - return it.
2302             OsrList_lock->unlock();
2303             return osr;
2304           }
2305           best = osr;
2306         }
2307       }
2308     }
2309     osr = osr->osr_link();
2310   }
2311   OsrList_lock->unlock();
2312   if (best != NULL && best->comp_level() >= comp_level && match_level == false) {
2313     return best;
2314   }
2315   return NULL;
2316 }
2317 
2318 // -----------------------------------------------------------------------------------------------------
2319 #ifndef PRODUCT
2320 
2321 // Printing
2322 
2323 #define BULLET  " - "
2324 
2325 void FieldPrinter::do_field(fieldDescriptor* fd) {
2326   _st->print(BULLET);
2327    if (_obj == NULL) {
2328      fd->print_on(_st);
2329      _st->cr();
2330    } else {
2331      fd->print_on_for(_st, _obj);
2332      _st->cr();
2333    }
2334 }
2335 
2336 
2337 void instanceKlass::oop_print_on(oop obj, outputStream* st) {
2338   Klass::oop_print_on(obj, st);
2339 
2340   if (as_klassOop() == SystemDictionary::String_klass()) {
2341     typeArrayOop value  = java_lang_String::value(obj);
2342     juint        offset = java_lang_String::offset(obj);
2343     juint        length = java_lang_String::length(obj);
2344     if (value != NULL &&
2345         value->is_typeArray() &&
2346         offset          <= (juint) value->length() &&
2347         offset + length <= (juint) value->length()) {
2348       st->print(BULLET"string: ");
2349       Handle h_obj(obj);
2350       java_lang_String::print(h_obj, st);
2351       st->cr();
2352       if (!WizardMode)  return;  // that is enough
2353     }
2354   }
2355 
2356   st->print_cr(BULLET"---- fields (total size %d words):", oop_size(obj));
2357   FieldPrinter print_field(st, obj);
2358   do_nonstatic_fields(&print_field);
2359 
2360   if (as_klassOop() == SystemDictionary::Class_klass()) {
2361     st->print(BULLET"signature: ");
2362     java_lang_Class::print_signature(obj, st);
2363     st->cr();
2364     klassOop mirrored_klass = java_lang_Class::as_klassOop(obj);
2365     st->print(BULLET"fake entry for mirror: ");
2366     mirrored_klass->print_value_on(st);
2367     st->cr();
2368     st->print(BULLET"fake entry resolved_constructor: ");
2369     methodOop ctor = java_lang_Class::resolved_constructor(obj);
2370     ctor->print_value_on(st);
2371     klassOop array_klass = java_lang_Class::array_klass(obj);
2372     st->cr();
2373     st->print(BULLET"fake entry for array: ");
2374     array_klass->print_value_on(st);
2375     st->cr();
2376     st->print_cr(BULLET"fake entry for oop_size: %d", java_lang_Class::oop_size(obj));
2377     st->print_cr(BULLET"fake entry for static_oop_field_count: %d", java_lang_Class::static_oop_field_count(obj));
2378     klassOop real_klass = java_lang_Class::as_klassOop(obj);
2379     if (real_klass != NULL && real_klass->klass_part()->oop_is_instance()) {
2380       instanceKlass::cast(real_klass)->do_local_static_fields(&print_field);
2381     }
2382   } else if (as_klassOop() == SystemDictionary::MethodType_klass()) {
2383     st->print(BULLET"signature: ");
2384     java_lang_invoke_MethodType::print_signature(obj, st);
2385     st->cr();
2386   }
2387 }
2388 
2389 #endif //PRODUCT
2390 
2391 void instanceKlass::oop_print_value_on(oop obj, outputStream* st) {
2392   st->print("a ");
2393   name()->print_value_on(st);
2394   obj->print_address_on(st);
2395   if (as_klassOop() == SystemDictionary::String_klass()
2396       && java_lang_String::value(obj) != NULL) {
2397     ResourceMark rm;
2398     int len = java_lang_String::length(obj);
2399     int plen = (len < 24 ? len : 12);
2400     char* str = java_lang_String::as_utf8_string(obj, 0, plen);
2401     st->print(" = \"%s\"", str);
2402     if (len > plen)
2403       st->print("...[%d]", len);
2404   } else if (as_klassOop() == SystemDictionary::Class_klass()) {
2405     klassOop k = java_lang_Class::as_klassOop(obj);
2406     st->print(" = ");
2407     if (k != NULL) {
2408       k->print_value_on(st);
2409     } else {
2410       const char* tname = type2name(java_lang_Class::primitive_type(obj));
2411       st->print("%s", tname ? tname : "type?");
2412     }
2413   } else if (as_klassOop() == SystemDictionary::MethodType_klass()) {
2414     st->print(" = ");
2415     java_lang_invoke_MethodType::print_signature(obj, st);
2416   } else if (java_lang_boxing_object::is_instance(obj)) {
2417     st->print(" = ");
2418     java_lang_boxing_object::print(obj, st);
2419   }
2420 }
2421 
2422 const char* instanceKlass::internal_name() const {
2423   return external_name();
2424 }
2425 
2426 // Verification
2427 
2428 class VerifyFieldClosure: public OopClosure {
2429  protected:
2430   template <class T> void do_oop_work(T* p) {
2431     guarantee(Universe::heap()->is_in_closed_subset(p), "should be in heap");
2432     oop obj = oopDesc::load_decode_heap_oop(p);
2433     if (!obj->is_oop_or_null()) {
2434       tty->print_cr("Failed: " PTR_FORMAT " -> " PTR_FORMAT, p, (address)obj);
2435       Universe::print();
2436       guarantee(false, "boom");
2437     }
2438   }
2439  public:
2440   virtual void do_oop(oop* p)       { VerifyFieldClosure::do_oop_work(p); }
2441   virtual void do_oop(narrowOop* p) { VerifyFieldClosure::do_oop_work(p); }
2442 };
2443 
2444 void instanceKlass::oop_verify_on(oop obj, outputStream* st) {
2445   Klass::oop_verify_on(obj, st);
2446   VerifyFieldClosure blk;
2447   oop_oop_iterate(obj, &blk);
2448 }
2449 
2450 // JNIid class for jfieldIDs only
2451 // Note to reviewers:
2452 // These JNI functions are just moved over to column 1 and not changed
2453 // in the compressed oops workspace.
2454 JNIid::JNIid(klassOop holder, int offset, JNIid* next) {
2455   _holder = holder;
2456   _offset = offset;
2457   _next = next;
2458   debug_only(_is_static_field_id = false;)
2459 }
2460 
2461 
2462 JNIid* JNIid::find(int offset) {
2463   JNIid* current = this;
2464   while (current != NULL) {
2465     if (current->offset() == offset) return current;
2466     current = current->next();
2467   }
2468   return NULL;
2469 }
2470 
2471 void JNIid::oops_do(OopClosure* f) {
2472   for (JNIid* cur = this; cur != NULL; cur = cur->next()) {
2473     f->do_oop(cur->holder_addr());
2474   }
2475 }
2476 
2477 void JNIid::deallocate(JNIid* current) {
2478   while (current != NULL) {
2479     JNIid* next = current->next();
2480     delete current;
2481     current = next;
2482   }
2483 }
2484 
2485 
2486 void JNIid::verify(klassOop holder) {
2487   int first_field_offset  = instanceMirrorKlass::offset_of_static_fields();
2488   int end_field_offset;
2489   end_field_offset = first_field_offset + (instanceKlass::cast(holder)->static_field_size() * wordSize);
2490 
2491   JNIid* current = this;
2492   while (current != NULL) {
2493     guarantee(current->holder() == holder, "Invalid klass in JNIid");
2494 #ifdef ASSERT
2495     int o = current->offset();
2496     if (current->is_static_field_id()) {
2497       guarantee(o >= first_field_offset  && o < end_field_offset,  "Invalid static field offset in JNIid");
2498     }
2499 #endif
2500     current = current->next();
2501   }
2502 }
2503 
2504 
2505 #ifdef ASSERT
2506 void instanceKlass::set_init_state(ClassState state) {
2507   bool good_state = as_klassOop()->is_shared() ? (_init_state <= state)
2508                                                : (_init_state < state);
2509   assert(good_state || state == allocated, "illegal state transition");
2510   _init_state = (u1)state;
2511 }
2512 #endif
2513 
2514 
2515 // RedefineClasses() support for previous versions:
2516 
2517 // Add an information node that contains weak references to the
2518 // interesting parts of the previous version of the_class.
2519 // This is also where we clean out any unused weak references.
2520 // Note that while we delete nodes from the _previous_versions
2521 // array, we never delete the array itself until the klass is
2522 // unloaded. The has_been_redefined() query depends on that fact.
2523 //
2524 void instanceKlass::add_previous_version(instanceKlassHandle ikh,
2525        BitMap* emcp_methods, int emcp_method_count) {
2526   assert(Thread::current()->is_VM_thread(),
2527          "only VMThread can add previous versions");
2528 
2529   if (_previous_versions == NULL) {
2530     // This is the first previous version so make some space.
2531     // Start with 2 elements under the assumption that the class
2532     // won't be redefined much.
2533     _previous_versions =  new (ResourceObj::C_HEAP, mtClass)
2534                             GrowableArray<PreviousVersionNode *>(2, true);
2535   }
2536 
2537   // RC_TRACE macro has an embedded ResourceMark
2538   RC_TRACE(0x00000100, ("adding previous version ref for %s @%d, EMCP_cnt=%d",
2539     ikh->external_name(), _previous_versions->length(), emcp_method_count));
2540   constantPoolHandle cp_h(ikh->constants());
2541   jobject cp_ref;
2542   if (cp_h->is_shared()) {
2543     // a shared ConstantPool requires a regular reference; a weak
2544     // reference would be collectible
2545     cp_ref = JNIHandles::make_global(cp_h);
2546   } else {
2547     cp_ref = JNIHandles::make_weak_global(cp_h);
2548   }
2549   PreviousVersionNode * pv_node = NULL;
2550   objArrayOop old_methods = ikh->methods();
2551 
2552   if (emcp_method_count == 0) {
2553     // non-shared ConstantPool gets a weak reference
2554     pv_node = new PreviousVersionNode(cp_ref, !cp_h->is_shared(), NULL);
2555     RC_TRACE(0x00000400,
2556       ("add: all methods are obsolete; flushing any EMCP weak refs"));
2557   } else {
2558     int local_count = 0;
2559     GrowableArray<jweak>* method_refs = new (ResourceObj::C_HEAP, mtClass)
2560       GrowableArray<jweak>(emcp_method_count, true);
2561     for (int i = 0; i < old_methods->length(); i++) {
2562       if (emcp_methods->at(i)) {
2563         // this old method is EMCP so save a weak ref
2564         methodOop old_method = (methodOop) old_methods->obj_at(i);
2565         methodHandle old_method_h(old_method);
2566         jweak method_ref = JNIHandles::make_weak_global(old_method_h);
2567         method_refs->append(method_ref);
2568         if (++local_count >= emcp_method_count) {
2569           // no more EMCP methods so bail out now
2570           break;
2571         }
2572       }
2573     }
2574     // non-shared ConstantPool gets a weak reference
2575     pv_node = new PreviousVersionNode(cp_ref, !cp_h->is_shared(), method_refs);
2576   }
2577 
2578   _previous_versions->append(pv_node);
2579 
2580   // Using weak references allows the interesting parts of previous
2581   // classes to be GC'ed when they are no longer needed. Since the
2582   // caller is the VMThread and we are at a safepoint, this is a good
2583   // time to clear out unused weak references.
2584 
2585   RC_TRACE(0x00000400, ("add: previous version length=%d",
2586     _previous_versions->length()));
2587 
2588   // skip the last entry since we just added it
2589   for (int i = _previous_versions->length() - 2; i >= 0; i--) {
2590     // check the previous versions array for a GC'ed weak refs
2591     pv_node = _previous_versions->at(i);
2592     cp_ref = pv_node->prev_constant_pool();
2593     assert(cp_ref != NULL, "cp ref was unexpectedly cleared");
2594     if (cp_ref == NULL) {
2595       delete pv_node;
2596       _previous_versions->remove_at(i);
2597       // Since we are traversing the array backwards, we don't have to
2598       // do anything special with the index.
2599       continue;  // robustness
2600     }
2601 
2602     constantPoolOop cp = (constantPoolOop)JNIHandles::resolve(cp_ref);
2603     if (cp == NULL) {
2604       // this entry has been GC'ed so remove it
2605       delete pv_node;
2606       _previous_versions->remove_at(i);
2607       // Since we are traversing the array backwards, we don't have to
2608       // do anything special with the index.
2609       continue;
2610     } else {
2611       RC_TRACE(0x00000400, ("add: previous version @%d is alive", i));
2612     }
2613 
2614     GrowableArray<jweak>* method_refs = pv_node->prev_EMCP_methods();
2615     if (method_refs != NULL) {
2616       RC_TRACE(0x00000400, ("add: previous methods length=%d",
2617         method_refs->length()));
2618       for (int j = method_refs->length() - 1; j >= 0; j--) {
2619         jweak method_ref = method_refs->at(j);
2620         assert(method_ref != NULL, "weak method ref was unexpectedly cleared");
2621         if (method_ref == NULL) {
2622           method_refs->remove_at(j);
2623           // Since we are traversing the array backwards, we don't have to
2624           // do anything special with the index.
2625           continue;  // robustness
2626         }
2627 
2628         methodOop method = (methodOop)JNIHandles::resolve(method_ref);
2629         if (method == NULL || emcp_method_count == 0) {
2630           // This method entry has been GC'ed or the current
2631           // RedefineClasses() call has made all methods obsolete
2632           // so remove it.
2633           JNIHandles::destroy_weak_global(method_ref);
2634           method_refs->remove_at(j);
2635         } else {
2636           // RC_TRACE macro has an embedded ResourceMark
2637           RC_TRACE(0x00000400,
2638             ("add: %s(%s): previous method @%d in version @%d is alive",
2639             method->name()->as_C_string(), method->signature()->as_C_string(),
2640             j, i));
2641         }
2642       }
2643     }
2644   }
2645 
2646   int obsolete_method_count = old_methods->length() - emcp_method_count;
2647 
2648   if (emcp_method_count != 0 && obsolete_method_count != 0 &&
2649       _previous_versions->length() > 1) {
2650     // We have a mix of obsolete and EMCP methods. If there is more
2651     // than the previous version that we just added, then we have to
2652     // clear out any matching EMCP method entries the hard way.
2653     int local_count = 0;
2654     for (int i = 0; i < old_methods->length(); i++) {
2655       if (!emcp_methods->at(i)) {
2656         // only obsolete methods are interesting
2657         methodOop old_method = (methodOop) old_methods->obj_at(i);
2658         Symbol* m_name = old_method->name();
2659         Symbol* m_signature = old_method->signature();
2660 
2661         // skip the last entry since we just added it
2662         for (int j = _previous_versions->length() - 2; j >= 0; j--) {
2663           // check the previous versions array for a GC'ed weak refs
2664           pv_node = _previous_versions->at(j);
2665           cp_ref = pv_node->prev_constant_pool();
2666           assert(cp_ref != NULL, "cp ref was unexpectedly cleared");
2667           if (cp_ref == NULL) {
2668             delete pv_node;
2669             _previous_versions->remove_at(j);
2670             // Since we are traversing the array backwards, we don't have to
2671             // do anything special with the index.
2672             continue;  // robustness
2673           }
2674 
2675           constantPoolOop cp = (constantPoolOop)JNIHandles::resolve(cp_ref);
2676           if (cp == NULL) {
2677             // this entry has been GC'ed so remove it
2678             delete pv_node;
2679             _previous_versions->remove_at(j);
2680             // Since we are traversing the array backwards, we don't have to
2681             // do anything special with the index.
2682             continue;
2683           }
2684 
2685           GrowableArray<jweak>* method_refs = pv_node->prev_EMCP_methods();
2686           if (method_refs == NULL) {
2687             // We have run into a PreviousVersion generation where
2688             // all methods were made obsolete during that generation's
2689             // RedefineClasses() operation. At the time of that
2690             // operation, all EMCP methods were flushed so we don't
2691             // have to go back any further.
2692             //
2693             // A NULL method_refs is different than an empty method_refs.
2694             // We cannot infer any optimizations about older generations
2695             // from an empty method_refs for the current generation.
2696             break;
2697           }
2698 
2699           for (int k = method_refs->length() - 1; k >= 0; k--) {
2700             jweak method_ref = method_refs->at(k);
2701             assert(method_ref != NULL,
2702               "weak method ref was unexpectedly cleared");
2703             if (method_ref == NULL) {
2704               method_refs->remove_at(k);
2705               // Since we are traversing the array backwards, we don't
2706               // have to do anything special with the index.
2707               continue;  // robustness
2708             }
2709 
2710             methodOop method = (methodOop)JNIHandles::resolve(method_ref);
2711             if (method == NULL) {
2712               // this method entry has been GC'ed so skip it
2713               JNIHandles::destroy_weak_global(method_ref);
2714               method_refs->remove_at(k);
2715               continue;
2716             }
2717 
2718             if (method->name() == m_name &&
2719                 method->signature() == m_signature) {
2720               // The current RedefineClasses() call has made all EMCP
2721               // versions of this method obsolete so mark it as obsolete
2722               // and remove the weak ref.
2723               RC_TRACE(0x00000400,
2724                 ("add: %s(%s): flush obsolete method @%d in version @%d",
2725                 m_name->as_C_string(), m_signature->as_C_string(), k, j));
2726 
2727               method->set_is_obsolete();
2728               JNIHandles::destroy_weak_global(method_ref);
2729               method_refs->remove_at(k);
2730               break;
2731             }
2732           }
2733 
2734           // The previous loop may not find a matching EMCP method, but
2735           // that doesn't mean that we can optimize and not go any
2736           // further back in the PreviousVersion generations. The EMCP
2737           // method for this generation could have already been GC'ed,
2738           // but there still may be an older EMCP method that has not
2739           // been GC'ed.
2740         }
2741 
2742         if (++local_count >= obsolete_method_count) {
2743           // no more obsolete methods so bail out now
2744           break;
2745         }
2746       }
2747     }
2748   }
2749 } // end add_previous_version()
2750 
2751 
2752 // Determine if instanceKlass has a previous version.
2753 bool instanceKlass::has_previous_version() const {
2754   if (_previous_versions == NULL) {
2755     // no previous versions array so answer is easy
2756     return false;
2757   }
2758 
2759   for (int i = _previous_versions->length() - 1; i >= 0; i--) {
2760     // Check the previous versions array for an info node that hasn't
2761     // been GC'ed
2762     PreviousVersionNode * pv_node = _previous_versions->at(i);
2763 
2764     jobject cp_ref = pv_node->prev_constant_pool();
2765     assert(cp_ref != NULL, "cp reference was unexpectedly cleared");
2766     if (cp_ref == NULL) {
2767       continue;  // robustness
2768     }
2769 
2770     constantPoolOop cp = (constantPoolOop)JNIHandles::resolve(cp_ref);
2771     if (cp != NULL) {
2772       // we have at least one previous version
2773       return true;
2774     }
2775 
2776     // We don't have to check the method refs. If the constant pool has
2777     // been GC'ed then so have the methods.
2778   }
2779 
2780   // all of the underlying nodes' info has been GC'ed
2781   return false;
2782 } // end has_previous_version()
2783 
2784 methodOop instanceKlass::method_with_idnum(int idnum) {
2785   methodOop m = NULL;
2786   if (idnum < methods()->length()) {
2787     m = (methodOop) methods()->obj_at(idnum);
2788   }
2789   if (m == NULL || m->method_idnum() != idnum) {
2790     for (int index = 0; index < methods()->length(); ++index) {
2791       m = (methodOop) methods()->obj_at(index);
2792       if (m->method_idnum() == idnum) {
2793         return m;
2794       }
2795     }
2796   }
2797   return m;
2798 }
2799 
2800 
2801 // Set the annotation at 'idnum' to 'anno'.
2802 // We don't want to create or extend the array if 'anno' is NULL, since that is the
2803 // default value.  However, if the array exists and is long enough, we must set NULL values.
2804 void instanceKlass::set_methods_annotations_of(int idnum, typeArrayOop anno, objArrayOop* md_p) {
2805   objArrayOop md = *md_p;
2806   if (md != NULL && md->length() > idnum) {
2807     md->obj_at_put(idnum, anno);
2808   } else if (anno != NULL) {
2809     // create the array
2810     int length = MAX2(idnum+1, (int)_idnum_allocated_count);
2811     md = oopFactory::new_system_objArray(length, Thread::current());
2812     if (*md_p != NULL) {
2813       // copy the existing entries
2814       for (int index = 0; index < (*md_p)->length(); index++) {
2815         md->obj_at_put(index, (*md_p)->obj_at(index));
2816       }
2817     }
2818     set_annotations(md, md_p);
2819     md->obj_at_put(idnum, anno);
2820   } // if no array and idnum isn't included there is nothing to do
2821 }
2822 
2823 // Construct a PreviousVersionNode entry for the array hung off
2824 // the instanceKlass.
2825 PreviousVersionNode::PreviousVersionNode(jobject prev_constant_pool,
2826   bool prev_cp_is_weak, GrowableArray<jweak>* prev_EMCP_methods) {
2827 
2828   _prev_constant_pool = prev_constant_pool;
2829   _prev_cp_is_weak = prev_cp_is_weak;
2830   _prev_EMCP_methods = prev_EMCP_methods;
2831 }
2832 
2833 
2834 // Destroy a PreviousVersionNode
2835 PreviousVersionNode::~PreviousVersionNode() {
2836   if (_prev_constant_pool != NULL) {
2837     if (_prev_cp_is_weak) {
2838       JNIHandles::destroy_weak_global(_prev_constant_pool);
2839     } else {
2840       JNIHandles::destroy_global(_prev_constant_pool);
2841     }
2842   }
2843 
2844   if (_prev_EMCP_methods != NULL) {
2845     for (int i = _prev_EMCP_methods->length() - 1; i >= 0; i--) {
2846       jweak method_ref = _prev_EMCP_methods->at(i);
2847       if (method_ref != NULL) {
2848         JNIHandles::destroy_weak_global(method_ref);
2849       }
2850     }
2851     delete _prev_EMCP_methods;
2852   }
2853 }
2854 
2855 
2856 // Construct a PreviousVersionInfo entry
2857 PreviousVersionInfo::PreviousVersionInfo(PreviousVersionNode *pv_node) {
2858   _prev_constant_pool_handle = constantPoolHandle();  // NULL handle
2859   _prev_EMCP_method_handles = NULL;
2860 
2861   jobject cp_ref = pv_node->prev_constant_pool();
2862   assert(cp_ref != NULL, "constant pool ref was unexpectedly cleared");
2863   if (cp_ref == NULL) {
2864     return;  // robustness
2865   }
2866 
2867   constantPoolOop cp = (constantPoolOop)JNIHandles::resolve(cp_ref);
2868   if (cp == NULL) {
2869     // Weak reference has been GC'ed. Since the constant pool has been
2870     // GC'ed, the methods have also been GC'ed.
2871     return;
2872   }
2873 
2874   // make the constantPoolOop safe to return
2875   _prev_constant_pool_handle = constantPoolHandle(cp);
2876 
2877   GrowableArray<jweak>* method_refs = pv_node->prev_EMCP_methods();
2878   if (method_refs == NULL) {
2879     // the instanceKlass did not have any EMCP methods
2880     return;
2881   }
2882 
2883   _prev_EMCP_method_handles = new GrowableArray<methodHandle>(10);
2884 
2885   int n_methods = method_refs->length();
2886   for (int i = 0; i < n_methods; i++) {
2887     jweak method_ref = method_refs->at(i);
2888     assert(method_ref != NULL, "weak method ref was unexpectedly cleared");
2889     if (method_ref == NULL) {
2890       continue;  // robustness
2891     }
2892 
2893     methodOop method = (methodOop)JNIHandles::resolve(method_ref);
2894     if (method == NULL) {
2895       // this entry has been GC'ed so skip it
2896       continue;
2897     }
2898 
2899     // make the methodOop safe to return
2900     _prev_EMCP_method_handles->append(methodHandle(method));
2901   }
2902 }
2903 
2904 
2905 // Destroy a PreviousVersionInfo
2906 PreviousVersionInfo::~PreviousVersionInfo() {
2907   // Since _prev_EMCP_method_handles is not C-heap allocated, we
2908   // don't have to delete it.
2909 }
2910 
2911 
2912 // Construct a helper for walking the previous versions array
2913 PreviousVersionWalker::PreviousVersionWalker(instanceKlass *ik) {
2914   _previous_versions = ik->previous_versions();
2915   _current_index = 0;
2916   // _hm needs no initialization
2917   _current_p = NULL;
2918 }
2919 
2920 
2921 // Destroy a PreviousVersionWalker
2922 PreviousVersionWalker::~PreviousVersionWalker() {
2923   // Delete the current info just in case the caller didn't walk to
2924   // the end of the previous versions list. No harm if _current_p is
2925   // already NULL.
2926   delete _current_p;
2927 
2928   // When _hm is destroyed, all the Handles returned in
2929   // PreviousVersionInfo objects will be destroyed.
2930   // Also, after this destructor is finished it will be
2931   // safe to delete the GrowableArray allocated in the
2932   // PreviousVersionInfo objects.
2933 }
2934 
2935 
2936 // Return the interesting information for the next previous version
2937 // of the klass. Returns NULL if there are no more previous versions.
2938 PreviousVersionInfo* PreviousVersionWalker::next_previous_version() {
2939   if (_previous_versions == NULL) {
2940     // no previous versions so nothing to return
2941     return NULL;
2942   }
2943 
2944   delete _current_p;  // cleanup the previous info for the caller
2945   _current_p = NULL;  // reset to NULL so we don't delete same object twice
2946 
2947   int length = _previous_versions->length();
2948 
2949   while (_current_index < length) {
2950     PreviousVersionNode * pv_node = _previous_versions->at(_current_index++);
2951     PreviousVersionInfo * pv_info = new (ResourceObj::C_HEAP, mtClass)
2952                                           PreviousVersionInfo(pv_node);
2953 
2954     constantPoolHandle cp_h = pv_info->prev_constant_pool_handle();
2955     if (cp_h.is_null()) {
2956       delete pv_info;
2957 
2958       // The underlying node's info has been GC'ed so try the next one.
2959       // We don't have to check the methods. If the constant pool has
2960       // GC'ed then so have the methods.
2961       continue;
2962     }
2963 
2964     // Found a node with non GC'ed info so return it. The caller will
2965     // need to delete pv_info when they are done with it.
2966     _current_p = pv_info;
2967     return pv_info;
2968   }
2969 
2970   // all of the underlying nodes' info has been GC'ed
2971   return NULL;
2972 } // end next_previous_version()