1 /*
   2  * Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "classfile/metadataOnStackMark.hpp"
  27 #include "classfile/systemDictionary.hpp"
  28 #include "classfile/verifier.hpp"
  29 #include "code/codeCache.hpp"
  30 #include "compiler/compileBroker.hpp"
  31 #include "interpreter/oopMapCache.hpp"
  32 #include "interpreter/rewriter.hpp"
  33 #include "memory/gcLocker.hpp"
  34 #include "memory/metadataFactory.hpp"
  35 #include "memory/metaspaceShared.hpp"
  36 #include "memory/universe.inline.hpp"
  37 #include "oops/fieldStreams.hpp"
  38 #include "oops/klassVtable.hpp"
  39 #include "prims/jvmtiImpl.hpp"
  40 #include "prims/jvmtiRedefineClasses.hpp"
  41 #include "prims/methodComparator.hpp"
  42 #include "runtime/deoptimization.hpp"
  43 #include "runtime/relocator.hpp"
  44 #include "utilities/bitMap.inline.hpp"
  45 
  46 
  47 Array<Method*>* VM_RedefineClasses::_old_methods = NULL;
  48 Array<Method*>* VM_RedefineClasses::_new_methods = NULL;
  49 Method**  VM_RedefineClasses::_matching_old_methods = NULL;
  50 Method**  VM_RedefineClasses::_matching_new_methods = NULL;
  51 Method**  VM_RedefineClasses::_deleted_methods      = NULL;
  52 Method**  VM_RedefineClasses::_added_methods        = NULL;
  53 int         VM_RedefineClasses::_matching_methods_length = 0;
  54 int         VM_RedefineClasses::_deleted_methods_length  = 0;
  55 int         VM_RedefineClasses::_added_methods_length    = 0;
  56 Klass*      VM_RedefineClasses::_the_class_oop = NULL;
  57 
  58 
  59 VM_RedefineClasses::VM_RedefineClasses(jint class_count,
  60                                        const jvmtiClassDefinition *class_defs,
  61                                        JvmtiClassLoadKind class_load_kind) {
  62   _class_count = class_count;
  63   _class_defs = class_defs;
  64   _class_load_kind = class_load_kind;
  65   _res = JVMTI_ERROR_NONE;
  66 }
  67 
  68 bool VM_RedefineClasses::doit_prologue() {
  69   if (_class_count == 0) {
  70     _res = JVMTI_ERROR_NONE;
  71     return false;
  72   }
  73   if (_class_defs == NULL) {
  74     _res = JVMTI_ERROR_NULL_POINTER;
  75     return false;
  76   }
  77   for (int i = 0; i < _class_count; i++) {
  78     if (_class_defs[i].klass == NULL) {
  79       _res = JVMTI_ERROR_INVALID_CLASS;
  80       return false;
  81     }
  82     if (_class_defs[i].class_byte_count == 0) {
  83       _res = JVMTI_ERROR_INVALID_CLASS_FORMAT;
  84       return false;
  85     }
  86     if (_class_defs[i].class_bytes == NULL) {
  87       _res = JVMTI_ERROR_NULL_POINTER;
  88       return false;
  89     }
  90   }
  91 
  92   // Start timer after all the sanity checks; not quite accurate, but
  93   // better than adding a bunch of stop() calls.
  94   RC_TIMER_START(_timer_vm_op_prologue);
  95 
  96   // We first load new class versions in the prologue, because somewhere down the
  97   // call chain it is required that the current thread is a Java thread.
  98   _res = load_new_class_versions(Thread::current());
  99   if (_res != JVMTI_ERROR_NONE) {
 100     // free any successfully created classes, since none are redefined
 101     for (int i = 0; i < _class_count; i++) {
 102       if (_scratch_classes[i] != NULL) {
 103         ClassLoaderData* cld = _scratch_classes[i]->class_loader_data();
 104         // Free the memory for this class at class unloading time.  Not before
 105         // because CMS might think this is still live.
 106         cld->add_to_deallocate_list((InstanceKlass*)_scratch_classes[i]);
 107       }
 108     }
 109     // Free os::malloc allocated memory in load_new_class_version.
 110     os::free(_scratch_classes);
 111     RC_TIMER_STOP(_timer_vm_op_prologue);
 112     return false;
 113   }
 114 
 115   RC_TIMER_STOP(_timer_vm_op_prologue);
 116   return true;
 117 }
 118 
 119 void VM_RedefineClasses::doit() {
 120   Thread *thread = Thread::current();
 121 
 122   if (UseSharedSpaces) {
 123     // Sharing is enabled so we remap the shared readonly space to
 124     // shared readwrite, private just in case we need to redefine
 125     // a shared class. We do the remap during the doit() phase of
 126     // the safepoint to be safer.
 127     if (!MetaspaceShared::remap_shared_readonly_as_readwrite()) {
 128       RC_TRACE_WITH_THREAD(0x00000001, thread,
 129         ("failed to remap shared readonly space to readwrite, private"));
 130       _res = JVMTI_ERROR_INTERNAL;
 131       return;
 132     }
 133   }
 134 
 135   // Mark methods seen on stack and everywhere else so old methods are not
 136   // cleaned up if they're on the stack.
 137   MetadataOnStackMark md_on_stack;
 138   HandleMark hm(thread);   // make sure any handles created are deleted
 139                            // before the stack walk again.
 140 
 141   for (int i = 0; i < _class_count; i++) {
 142     redefine_single_class(_class_defs[i].klass, _scratch_classes[i], thread);
 143     ClassLoaderData* cld = _scratch_classes[i]->class_loader_data();
 144     // Free the memory for this class at class unloading time.  Not before
 145     // because CMS might think this is still live.
 146     cld->add_to_deallocate_list((InstanceKlass*)_scratch_classes[i]);
 147     _scratch_classes[i] = NULL;
 148   }
 149 
 150   // Disable any dependent concurrent compilations
 151   SystemDictionary::notice_modification();
 152 
 153   // Set flag indicating that some invariants are no longer true.
 154   // See jvmtiExport.hpp for detailed explanation.
 155   JvmtiExport::set_has_redefined_a_class();
 156 
 157 // check_class() is optionally called for product bits, but is
 158 // always called for non-product bits.
 159 #ifdef PRODUCT
 160   if (RC_TRACE_ENABLED(0x00004000)) {
 161 #endif
 162     RC_TRACE_WITH_THREAD(0x00004000, thread, ("calling check_class"));
 163     SystemDictionary::classes_do(check_class, thread);
 164 #ifdef PRODUCT
 165   }
 166 #endif
 167 }
 168 
 169 void VM_RedefineClasses::doit_epilogue() {
 170   // Free os::malloc allocated memory.
 171   os::free(_scratch_classes);
 172 
 173   if (RC_TRACE_ENABLED(0x00000004)) {
 174     // Used to have separate timers for "doit" and "all", but the timer
 175     // overhead skewed the measurements.
 176     jlong doit_time = _timer_rsc_phase1.milliseconds() +
 177                       _timer_rsc_phase2.milliseconds();
 178     jlong all_time = _timer_vm_op_prologue.milliseconds() + doit_time;
 179 
 180     RC_TRACE(0x00000004, ("vm_op: all=" UINT64_FORMAT
 181       "  prologue=" UINT64_FORMAT "  doit=" UINT64_FORMAT, all_time,
 182       _timer_vm_op_prologue.milliseconds(), doit_time));
 183     RC_TRACE(0x00000004,
 184       ("redefine_single_class: phase1=" UINT64_FORMAT "  phase2=" UINT64_FORMAT,
 185        _timer_rsc_phase1.milliseconds(), _timer_rsc_phase2.milliseconds()));
 186   }
 187 }
 188 
 189 bool VM_RedefineClasses::is_modifiable_class(oop klass_mirror) {
 190   // classes for primitives cannot be redefined
 191   if (java_lang_Class::is_primitive(klass_mirror)) {
 192     return false;
 193   }
 194   Klass* the_class_oop = java_lang_Class::as_Klass(klass_mirror);
 195   // classes for arrays cannot be redefined
 196   if (the_class_oop == NULL || !the_class_oop->oop_is_instance()) {
 197     return false;
 198   }
 199   return true;
 200 }
 201 
 202 // Append the current entry at scratch_i in scratch_cp to *merge_cp_p
 203 // where the end of *merge_cp_p is specified by *merge_cp_length_p. For
 204 // direct CP entries, there is just the current entry to append. For
 205 // indirect and double-indirect CP entries, there are zero or more
 206 // referenced CP entries along with the current entry to append.
 207 // Indirect and double-indirect CP entries are handled by recursive
 208 // calls to append_entry() as needed. The referenced CP entries are
 209 // always appended to *merge_cp_p before the referee CP entry. These
 210 // referenced CP entries may already exist in *merge_cp_p in which case
 211 // there is nothing extra to append and only the current entry is
 212 // appended.
 213 void VM_RedefineClasses::append_entry(constantPoolHandle scratch_cp,
 214        int scratch_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p,
 215        TRAPS) {
 216 
 217   // append is different depending on entry tag type
 218   switch (scratch_cp->tag_at(scratch_i).value()) {
 219 
 220     // The old verifier is implemented outside the VM. It loads classes,
 221     // but does not resolve constant pool entries directly so we never
 222     // see Class entries here with the old verifier. Similarly the old
 223     // verifier does not like Class entries in the input constant pool.
 224     // The split-verifier is implemented in the VM so it can optionally
 225     // and directly resolve constant pool entries to load classes. The
 226     // split-verifier can accept either Class entries or UnresolvedClass
 227     // entries in the input constant pool. We revert the appended copy
 228     // back to UnresolvedClass so that either verifier will be happy
 229     // with the constant pool entry.
 230     case JVM_CONSTANT_Class:
 231     {
 232       // revert the copy to JVM_CONSTANT_UnresolvedClass
 233       (*merge_cp_p)->unresolved_klass_at_put(*merge_cp_length_p,
 234         scratch_cp->klass_name_at(scratch_i));
 235 
 236       if (scratch_i != *merge_cp_length_p) {
 237         // The new entry in *merge_cp_p is at a different index than
 238         // the new entry in scratch_cp so we need to map the index values.
 239         map_index(scratch_cp, scratch_i, *merge_cp_length_p);
 240       }
 241       (*merge_cp_length_p)++;
 242     } break;
 243 
 244     // these are direct CP entries so they can be directly appended,
 245     // but double and long take two constant pool entries
 246     case JVM_CONSTANT_Double:  // fall through
 247     case JVM_CONSTANT_Long:
 248     {
 249       ConstantPool::copy_entry_to(scratch_cp, scratch_i, *merge_cp_p, *merge_cp_length_p,
 250         THREAD);
 251 
 252       if (scratch_i != *merge_cp_length_p) {
 253         // The new entry in *merge_cp_p is at a different index than
 254         // the new entry in scratch_cp so we need to map the index values.
 255         map_index(scratch_cp, scratch_i, *merge_cp_length_p);
 256       }
 257       (*merge_cp_length_p) += 2;
 258     } break;
 259 
 260     // these are direct CP entries so they can be directly appended
 261     case JVM_CONSTANT_Float:   // fall through
 262     case JVM_CONSTANT_Integer: // fall through
 263     case JVM_CONSTANT_Utf8:    // fall through
 264 
 265     // This was an indirect CP entry, but it has been changed into
 266     // Symbol*s so this entry can be directly appended.
 267     case JVM_CONSTANT_String:      // fall through
 268 
 269     // These were indirect CP entries, but they have been changed into
 270     // Symbol*s so these entries can be directly appended.
 271     case JVM_CONSTANT_UnresolvedClass:  // fall through
 272     {
 273       ConstantPool::copy_entry_to(scratch_cp, scratch_i, *merge_cp_p, *merge_cp_length_p,
 274         THREAD);
 275 
 276       if (scratch_i != *merge_cp_length_p) {
 277         // The new entry in *merge_cp_p is at a different index than
 278         // the new entry in scratch_cp so we need to map the index values.
 279         map_index(scratch_cp, scratch_i, *merge_cp_length_p);
 280       }
 281       (*merge_cp_length_p)++;
 282     } break;
 283 
 284     // this is an indirect CP entry so it needs special handling
 285     case JVM_CONSTANT_NameAndType:
 286     {
 287       int name_ref_i = scratch_cp->name_ref_index_at(scratch_i);
 288       int new_name_ref_i = find_or_append_indirect_entry(scratch_cp, name_ref_i, merge_cp_p,
 289                                                          merge_cp_length_p, THREAD);
 290 
 291       int signature_ref_i = scratch_cp->signature_ref_index_at(scratch_i);
 292       int new_signature_ref_i = find_or_append_indirect_entry(scratch_cp, signature_ref_i,
 293                                                               merge_cp_p, merge_cp_length_p,
 294                                                               THREAD);
 295 
 296       // If the referenced entries already exist in *merge_cp_p, then
 297       // both new_name_ref_i and new_signature_ref_i will both be 0.
 298       // In that case, all we are appending is the current entry.
 299       if (new_name_ref_i != name_ref_i) {
 300         RC_TRACE(0x00080000,
 301           ("NameAndType entry@%d name_ref_index change: %d to %d",
 302           *merge_cp_length_p, name_ref_i, new_name_ref_i));
 303       }
 304       if (new_signature_ref_i != signature_ref_i) {
 305         RC_TRACE(0x00080000,
 306           ("NameAndType entry@%d signature_ref_index change: %d to %d",
 307           *merge_cp_length_p, signature_ref_i, new_signature_ref_i));
 308       }
 309 
 310       (*merge_cp_p)->name_and_type_at_put(*merge_cp_length_p,
 311         new_name_ref_i, new_signature_ref_i);
 312       if (scratch_i != *merge_cp_length_p) {
 313         // The new entry in *merge_cp_p is at a different index than
 314         // the new entry in scratch_cp so we need to map the index values.
 315         map_index(scratch_cp, scratch_i, *merge_cp_length_p);
 316       }
 317       (*merge_cp_length_p)++;
 318     } break;
 319 
 320     // this is a double-indirect CP entry so it needs special handling
 321     case JVM_CONSTANT_Fieldref:           // fall through
 322     case JVM_CONSTANT_InterfaceMethodref: // fall through
 323     case JVM_CONSTANT_Methodref:
 324     {
 325       int klass_ref_i = scratch_cp->uncached_klass_ref_index_at(scratch_i);
 326       int new_klass_ref_i = find_or_append_indirect_entry(scratch_cp, klass_ref_i,
 327                                                           merge_cp_p, merge_cp_length_p, THREAD);
 328 
 329       int name_and_type_ref_i = scratch_cp->uncached_name_and_type_ref_index_at(scratch_i);
 330       int new_name_and_type_ref_i = find_or_append_indirect_entry(scratch_cp, name_and_type_ref_i,
 331                                                           merge_cp_p, merge_cp_length_p, THREAD);
 332 
 333       const char *entry_name;
 334       switch (scratch_cp->tag_at(scratch_i).value()) {
 335       case JVM_CONSTANT_Fieldref:
 336         entry_name = "Fieldref";
 337         (*merge_cp_p)->field_at_put(*merge_cp_length_p, new_klass_ref_i,
 338           new_name_and_type_ref_i);
 339         break;
 340       case JVM_CONSTANT_InterfaceMethodref:
 341         entry_name = "IFMethodref";
 342         (*merge_cp_p)->interface_method_at_put(*merge_cp_length_p,
 343           new_klass_ref_i, new_name_and_type_ref_i);
 344         break;
 345       case JVM_CONSTANT_Methodref:
 346         entry_name = "Methodref";
 347         (*merge_cp_p)->method_at_put(*merge_cp_length_p, new_klass_ref_i,
 348           new_name_and_type_ref_i);
 349         break;
 350       default:
 351         guarantee(false, "bad switch");
 352         break;
 353       }
 354 
 355       if (klass_ref_i != new_klass_ref_i) {
 356         RC_TRACE(0x00080000, ("%s entry@%d class_index changed: %d to %d",
 357           entry_name, *merge_cp_length_p, klass_ref_i, new_klass_ref_i));
 358       }
 359       if (name_and_type_ref_i != new_name_and_type_ref_i) {
 360         RC_TRACE(0x00080000,
 361           ("%s entry@%d name_and_type_index changed: %d to %d",
 362           entry_name, *merge_cp_length_p, name_and_type_ref_i,
 363           new_name_and_type_ref_i));
 364       }
 365 
 366       if (scratch_i != *merge_cp_length_p) {
 367         // The new entry in *merge_cp_p is at a different index than
 368         // the new entry in scratch_cp so we need to map the index values.
 369         map_index(scratch_cp, scratch_i, *merge_cp_length_p);
 370       }
 371       (*merge_cp_length_p)++;
 372     } break;
 373 
 374     // this is an indirect CP entry so it needs special handling
 375     case JVM_CONSTANT_MethodType:
 376     {
 377       int ref_i = scratch_cp->method_type_index_at(scratch_i);
 378       int new_ref_i = find_or_append_indirect_entry(scratch_cp, ref_i, merge_cp_p,
 379                                                     merge_cp_length_p, THREAD);
 380       if (new_ref_i != ref_i) {
 381         RC_TRACE(0x00080000,
 382                  ("MethodType entry@%d ref_index change: %d to %d",
 383                   *merge_cp_length_p, ref_i, new_ref_i));
 384       }
 385       (*merge_cp_p)->method_type_index_at_put(*merge_cp_length_p, new_ref_i);
 386       if (scratch_i != *merge_cp_length_p) {
 387         // The new entry in *merge_cp_p is at a different index than
 388         // the new entry in scratch_cp so we need to map the index values.
 389         map_index(scratch_cp, scratch_i, *merge_cp_length_p);
 390       }
 391       (*merge_cp_length_p)++;
 392     } break;
 393 
 394     // this is an indirect CP entry so it needs special handling
 395     case JVM_CONSTANT_MethodHandle:
 396     {
 397       int ref_kind = scratch_cp->method_handle_ref_kind_at(scratch_i);
 398       int ref_i = scratch_cp->method_handle_index_at(scratch_i);
 399       int new_ref_i = find_or_append_indirect_entry(scratch_cp, ref_i, merge_cp_p,
 400                                                     merge_cp_length_p, THREAD);
 401       if (new_ref_i != ref_i) {
 402         RC_TRACE(0x00080000,
 403                  ("MethodHandle entry@%d ref_index change: %d to %d",
 404                   *merge_cp_length_p, ref_i, new_ref_i));
 405       }
 406       (*merge_cp_p)->method_handle_index_at_put(*merge_cp_length_p, ref_kind, new_ref_i);
 407       if (scratch_i != *merge_cp_length_p) {
 408         // The new entry in *merge_cp_p is at a different index than
 409         // the new entry in scratch_cp so we need to map the index values.
 410         map_index(scratch_cp, scratch_i, *merge_cp_length_p);
 411       }
 412       (*merge_cp_length_p)++;
 413     } break;
 414 
 415     // this is an indirect CP entry so it needs special handling
 416     case JVM_CONSTANT_InvokeDynamic:
 417     {
 418       // TBD: cross-checks and possible extra appends into CP and bsm operands
 419       // are needed as well. This issue is tracked by a separate bug 8007037.
 420       int bss_idx = scratch_cp->invoke_dynamic_bootstrap_specifier_index(scratch_i);
 421 
 422       int ref_i = scratch_cp->invoke_dynamic_name_and_type_ref_index_at(scratch_i);
 423       int new_ref_i = find_or_append_indirect_entry(scratch_cp, ref_i, merge_cp_p,
 424                                                     merge_cp_length_p, THREAD);
 425       if (new_ref_i != ref_i) {
 426         RC_TRACE(0x00080000,
 427                  ("InvokeDynamic entry@%d name_and_type ref_index change: %d to %d",
 428                   *merge_cp_length_p, ref_i, new_ref_i));
 429       }
 430 
 431       (*merge_cp_p)->invoke_dynamic_at_put(*merge_cp_length_p, bss_idx, new_ref_i);
 432       if (scratch_i != *merge_cp_length_p) {
 433         // The new entry in *merge_cp_p is at a different index than
 434         // the new entry in scratch_cp so we need to map the index values.
 435         map_index(scratch_cp, scratch_i, *merge_cp_length_p);
 436       }
 437       (*merge_cp_length_p)++;
 438     } break;
 439 
 440     // At this stage, Class or UnresolvedClass could be here, but not
 441     // ClassIndex
 442     case JVM_CONSTANT_ClassIndex: // fall through
 443 
 444     // Invalid is used as the tag for the second constant pool entry
 445     // occupied by JVM_CONSTANT_Double or JVM_CONSTANT_Long. It should
 446     // not be seen by itself.
 447     case JVM_CONSTANT_Invalid: // fall through
 448 
 449     // At this stage, String could be here, but not StringIndex
 450     case JVM_CONSTANT_StringIndex: // fall through
 451 
 452     // At this stage JVM_CONSTANT_UnresolvedClassInError should not be
 453     // here
 454     case JVM_CONSTANT_UnresolvedClassInError: // fall through
 455 
 456     default:
 457     {
 458       // leave a breadcrumb
 459       jbyte bad_value = scratch_cp->tag_at(scratch_i).value();
 460       ShouldNotReachHere();
 461     } break;
 462   } // end switch tag value
 463 } // end append_entry()
 464 
 465 
 466 int VM_RedefineClasses::find_or_append_indirect_entry(constantPoolHandle scratch_cp,
 467       int ref_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p, TRAPS) {
 468 
 469   int new_ref_i = ref_i;
 470   bool match = (ref_i < *merge_cp_length_p) &&
 471                scratch_cp->compare_entry_to(ref_i, *merge_cp_p, ref_i, THREAD);
 472 
 473   if (!match) {
 474     // forward reference in *merge_cp_p or not a direct match
 475     int found_i = scratch_cp->find_matching_entry(ref_i, *merge_cp_p, THREAD);
 476     if (found_i != 0) {
 477       guarantee(found_i != ref_i, "compare_entry_to() and find_matching_entry() do not agree");
 478       // Found a matching entry somewhere else in *merge_cp_p so just need a mapping entry.
 479       new_ref_i = found_i;
 480       map_index(scratch_cp, ref_i, found_i);
 481     } else {
 482       // no match found so we have to append this entry to *merge_cp_p
 483       append_entry(scratch_cp, ref_i, merge_cp_p, merge_cp_length_p, THREAD);
 484       // The above call to append_entry() can only append one entry
 485       // so the post call query of *merge_cp_length_p is only for
 486       // the sake of consistency.
 487       new_ref_i = *merge_cp_length_p - 1;
 488     }
 489   }
 490 
 491   return new_ref_i;
 492 } // end find_or_append_indirect_entry()
 493 
 494 
 495 void VM_RedefineClasses::swap_all_method_annotations(int i, int j, instanceKlassHandle scratch_class, TRAPS) {
 496   AnnotationArray* save;
 497 
 498   Annotations* sca = scratch_class->annotations();
 499   if (sca == NULL) return;
 500 
 501   save = sca->get_method_annotations_of(i);
 502   sca->set_method_annotations_of(scratch_class, i, sca->get_method_annotations_of(j), CHECK);
 503   sca->set_method_annotations_of(scratch_class, j, save, CHECK);
 504 
 505   save = sca->get_method_parameter_annotations_of(i);
 506   sca->set_method_parameter_annotations_of(scratch_class, i, sca->get_method_parameter_annotations_of(j), CHECK);
 507   sca->set_method_parameter_annotations_of(scratch_class, j, save, CHECK);
 508 
 509   save = sca->get_method_default_annotations_of(i);
 510   sca->set_method_default_annotations_of(scratch_class, i, sca->get_method_default_annotations_of(j), CHECK);
 511   sca->set_method_default_annotations_of(scratch_class, j, save, CHECK);
 512 }
 513 
 514 
 515 jvmtiError VM_RedefineClasses::compare_and_normalize_class_versions(
 516              instanceKlassHandle the_class,
 517              instanceKlassHandle scratch_class) {
 518   int i;
 519 
 520   // Check superclasses, or rather their names, since superclasses themselves can be
 521   // requested to replace.
 522   // Check for NULL superclass first since this might be java.lang.Object
 523   if (the_class->super() != scratch_class->super() &&
 524       (the_class->super() == NULL || scratch_class->super() == NULL ||
 525        the_class->super()->name() !=
 526        scratch_class->super()->name())) {
 527     return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
 528   }
 529 
 530   // Check if the number, names and order of directly implemented interfaces are the same.
 531   // I think in principle we should just check if the sets of names of directly implemented
 532   // interfaces are the same, i.e. the order of declaration (which, however, if changed in the
 533   // .java file, also changes in .class file) should not matter. However, comparing sets is
 534   // technically a bit more difficult, and, more importantly, I am not sure at present that the
 535   // order of interfaces does not matter on the implementation level, i.e. that the VM does not
 536   // rely on it somewhere.
 537   Array<Klass*>* k_interfaces = the_class->local_interfaces();
 538   Array<Klass*>* k_new_interfaces = scratch_class->local_interfaces();
 539   int n_intfs = k_interfaces->length();
 540   if (n_intfs != k_new_interfaces->length()) {
 541     return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
 542   }
 543   for (i = 0; i < n_intfs; i++) {
 544     if (k_interfaces->at(i)->name() !=
 545         k_new_interfaces->at(i)->name()) {
 546       return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
 547     }
 548   }
 549 
 550   // Check whether class is in the error init state.
 551   if (the_class->is_in_error_state()) {
 552     // TBD #5057930: special error code is needed in 1.6
 553     return JVMTI_ERROR_INVALID_CLASS;
 554   }
 555 
 556   // Check whether class modifiers are the same.
 557   jushort old_flags = (jushort) the_class->access_flags().get_flags();
 558   jushort new_flags = (jushort) scratch_class->access_flags().get_flags();
 559   if (old_flags != new_flags) {
 560     return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_MODIFIERS_CHANGED;
 561   }
 562 
 563   // Check if the number, names, types and order of fields declared in these classes
 564   // are the same.
 565   JavaFieldStream old_fs(the_class);
 566   JavaFieldStream new_fs(scratch_class);
 567   for (; !old_fs.done() && !new_fs.done(); old_fs.next(), new_fs.next()) {
 568     // access
 569     old_flags = old_fs.access_flags().as_short();
 570     new_flags = new_fs.access_flags().as_short();
 571     if ((old_flags ^ new_flags) & JVM_RECOGNIZED_FIELD_MODIFIERS) {
 572       return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
 573     }
 574     // offset
 575     if (old_fs.offset() != new_fs.offset()) {
 576       return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
 577     }
 578     // name and signature
 579     Symbol* name_sym1 = the_class->constants()->symbol_at(old_fs.name_index());
 580     Symbol* sig_sym1 = the_class->constants()->symbol_at(old_fs.signature_index());
 581     Symbol* name_sym2 = scratch_class->constants()->symbol_at(new_fs.name_index());
 582     Symbol* sig_sym2 = scratch_class->constants()->symbol_at(new_fs.signature_index());
 583     if (name_sym1 != name_sym2 || sig_sym1 != sig_sym2) {
 584       return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
 585     }
 586   }
 587 
 588   // If both streams aren't done then we have a differing number of
 589   // fields.
 590   if (!old_fs.done() || !new_fs.done()) {
 591     return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
 592   }
 593 
 594   // Do a parallel walk through the old and new methods. Detect
 595   // cases where they match (exist in both), have been added in
 596   // the new methods, or have been deleted (exist only in the
 597   // old methods).  The class file parser places methods in order
 598   // by method name, but does not order overloaded methods by
 599   // signature.  In order to determine what fate befell the methods,
 600   // this code places the overloaded new methods that have matching
 601   // old methods in the same order as the old methods and places
 602   // new overloaded methods at the end of overloaded methods of
 603   // that name. The code for this order normalization is adapted
 604   // from the algorithm used in InstanceKlass::find_method().
 605   // Since we are swapping out of order entries as we find them,
 606   // we only have to search forward through the overloaded methods.
 607   // Methods which are added and have the same name as an existing
 608   // method (but different signature) will be put at the end of
 609   // the methods with that name, and the name mismatch code will
 610   // handle them.
 611   Array<Method*>* k_old_methods(the_class->methods());
 612   Array<Method*>* k_new_methods(scratch_class->methods());
 613   int n_old_methods = k_old_methods->length();
 614   int n_new_methods = k_new_methods->length();
 615   Thread* thread = Thread::current();
 616 
 617   int ni = 0;
 618   int oi = 0;
 619   while (true) {
 620     Method* k_old_method;
 621     Method* k_new_method;
 622     enum { matched, added, deleted, undetermined } method_was = undetermined;
 623 
 624     if (oi >= n_old_methods) {
 625       if (ni >= n_new_methods) {
 626         break; // we've looked at everything, done
 627       }
 628       // New method at the end
 629       k_new_method = k_new_methods->at(ni);
 630       method_was = added;
 631     } else if (ni >= n_new_methods) {
 632       // Old method, at the end, is deleted
 633       k_old_method = k_old_methods->at(oi);
 634       method_was = deleted;
 635     } else {
 636       // There are more methods in both the old and new lists
 637       k_old_method = k_old_methods->at(oi);
 638       k_new_method = k_new_methods->at(ni);
 639       if (k_old_method->name() != k_new_method->name()) {
 640         // Methods are sorted by method name, so a mismatch means added
 641         // or deleted
 642         if (k_old_method->name()->fast_compare(k_new_method->name()) > 0) {
 643           method_was = added;
 644         } else {
 645           method_was = deleted;
 646         }
 647       } else if (k_old_method->signature() == k_new_method->signature()) {
 648         // Both the name and signature match
 649         method_was = matched;
 650       } else {
 651         // The name matches, but the signature doesn't, which means we have to
 652         // search forward through the new overloaded methods.
 653         int nj;  // outside the loop for post-loop check
 654         for (nj = ni + 1; nj < n_new_methods; nj++) {
 655           Method* m = k_new_methods->at(nj);
 656           if (k_old_method->name() != m->name()) {
 657             // reached another method name so no more overloaded methods
 658             method_was = deleted;
 659             break;
 660           }
 661           if (k_old_method->signature() == m->signature()) {
 662             // found a match so swap the methods
 663             k_new_methods->at_put(ni, m);
 664             k_new_methods->at_put(nj, k_new_method);
 665             k_new_method = m;
 666             method_was = matched;
 667             break;
 668           }
 669         }
 670 
 671         if (nj >= n_new_methods) {
 672           // reached the end without a match; so method was deleted
 673           method_was = deleted;
 674         }
 675       }
 676     }
 677 
 678     switch (method_was) {
 679     case matched:
 680       // methods match, be sure modifiers do too
 681       old_flags = (jushort) k_old_method->access_flags().get_flags();
 682       new_flags = (jushort) k_new_method->access_flags().get_flags();
 683       if ((old_flags ^ new_flags) & ~(JVM_ACC_NATIVE)) {
 684         return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_MODIFIERS_CHANGED;
 685       }
 686       {
 687         u2 new_num = k_new_method->method_idnum();
 688         u2 old_num = k_old_method->method_idnum();
 689         if (new_num != old_num) {
 690           Method* idnum_owner = scratch_class->method_with_idnum(old_num);
 691           if (idnum_owner != NULL) {
 692             // There is already a method assigned this idnum -- switch them
 693             idnum_owner->set_method_idnum(new_num);
 694           }
 695           k_new_method->set_method_idnum(old_num);
 696           swap_all_method_annotations(old_num, new_num, scratch_class, thread);
 697            if (thread->has_pending_exception()) {
 698              return JVMTI_ERROR_OUT_OF_MEMORY;
 699            }
 700         }
 701       }
 702       RC_TRACE(0x00008000, ("Method matched: new: %s [%d] == old: %s [%d]",
 703                             k_new_method->name_and_sig_as_C_string(), ni,
 704                             k_old_method->name_and_sig_as_C_string(), oi));
 705       // advance to next pair of methods
 706       ++oi;
 707       ++ni;
 708       break;
 709     case added:
 710       // method added, see if it is OK
 711       new_flags = (jushort) k_new_method->access_flags().get_flags();
 712       if ((new_flags & JVM_ACC_PRIVATE) == 0
 713            // hack: private should be treated as final, but alas
 714           || (new_flags & (JVM_ACC_FINAL|JVM_ACC_STATIC)) == 0
 715          ) {
 716         // new methods must be private
 717         return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_ADDED;
 718       }
 719       {
 720         u2 num = the_class->next_method_idnum();
 721         if (num == ConstMethod::UNSET_IDNUM) {
 722           // cannot add any more methods
 723           return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_ADDED;
 724         }
 725         u2 new_num = k_new_method->method_idnum();
 726         Method* idnum_owner = scratch_class->method_with_idnum(num);
 727         if (idnum_owner != NULL) {
 728           // There is already a method assigned this idnum -- switch them
 729           idnum_owner->set_method_idnum(new_num);
 730         }
 731         k_new_method->set_method_idnum(num);
 732         swap_all_method_annotations(new_num, num, scratch_class, thread);
 733         if (thread->has_pending_exception()) {
 734           return JVMTI_ERROR_OUT_OF_MEMORY;
 735         }
 736       }
 737       RC_TRACE(0x00008000, ("Method added: new: %s [%d]",
 738                             k_new_method->name_and_sig_as_C_string(), ni));
 739       ++ni; // advance to next new method
 740       break;
 741     case deleted:
 742       // method deleted, see if it is OK
 743       old_flags = (jushort) k_old_method->access_flags().get_flags();
 744       if ((old_flags & JVM_ACC_PRIVATE) == 0
 745            // hack: private should be treated as final, but alas
 746           || (old_flags & (JVM_ACC_FINAL|JVM_ACC_STATIC)) == 0
 747          ) {
 748         // deleted methods must be private
 749         return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_DELETED;
 750       }
 751       RC_TRACE(0x00008000, ("Method deleted: old: %s [%d]",
 752                             k_old_method->name_and_sig_as_C_string(), oi));
 753       ++oi; // advance to next old method
 754       break;
 755     default:
 756       ShouldNotReachHere();
 757     }
 758   }
 759 
 760   return JVMTI_ERROR_NONE;
 761 }
 762 
 763 
 764 // Find new constant pool index value for old constant pool index value
 765 // by seaching the index map. Returns zero (0) if there is no mapped
 766 // value for the old constant pool index.
 767 int VM_RedefineClasses::find_new_index(int old_index) {
 768   if (_index_map_count == 0) {
 769     // map is empty so nothing can be found
 770     return 0;
 771   }
 772 
 773   if (old_index < 1 || old_index >= _index_map_p->length()) {
 774     // The old_index is out of range so it is not mapped. This should
 775     // not happen in regular constant pool merging use, but it can
 776     // happen if a corrupt annotation is processed.
 777     return 0;
 778   }
 779 
 780   int value = _index_map_p->at(old_index);
 781   if (value == -1) {
 782     // the old_index is not mapped
 783     return 0;
 784   }
 785 
 786   return value;
 787 } // end find_new_index()
 788 
 789 
 790 // Returns true if the current mismatch is due to a resolved/unresolved
 791 // class pair. Otherwise, returns false.
 792 bool VM_RedefineClasses::is_unresolved_class_mismatch(constantPoolHandle cp1,
 793        int index1, constantPoolHandle cp2, int index2) {
 794 
 795   jbyte t1 = cp1->tag_at(index1).value();
 796   if (t1 != JVM_CONSTANT_Class && t1 != JVM_CONSTANT_UnresolvedClass) {
 797     return false;  // wrong entry type; not our special case
 798   }
 799 
 800   jbyte t2 = cp2->tag_at(index2).value();
 801   if (t2 != JVM_CONSTANT_Class && t2 != JVM_CONSTANT_UnresolvedClass) {
 802     return false;  // wrong entry type; not our special case
 803   }
 804 
 805   if (t1 == t2) {
 806     return false;  // not a mismatch; not our special case
 807   }
 808 
 809   char *s1 = cp1->klass_name_at(index1)->as_C_string();
 810   char *s2 = cp2->klass_name_at(index2)->as_C_string();
 811   if (strcmp(s1, s2) != 0) {
 812     return false;  // strings don't match; not our special case
 813   }
 814 
 815   return true;  // made it through the gauntlet; this is our special case
 816 } // end is_unresolved_class_mismatch()
 817 
 818 
 819 jvmtiError VM_RedefineClasses::load_new_class_versions(TRAPS) {
 820 
 821   // For consistency allocate memory using os::malloc wrapper.
 822   _scratch_classes = (Klass**)
 823     os::malloc(sizeof(Klass*) * _class_count, mtClass);
 824   if (_scratch_classes == NULL) {
 825     return JVMTI_ERROR_OUT_OF_MEMORY;
 826   }
 827   // Zero initialize the _scratch_classes array.
 828   for (int i = 0; i < _class_count; i++) {
 829     _scratch_classes[i] = NULL;
 830   }
 831 
 832   ResourceMark rm(THREAD);
 833 
 834   JvmtiThreadState *state = JvmtiThreadState::state_for(JavaThread::current());
 835   // state can only be NULL if the current thread is exiting which
 836   // should not happen since we're trying to do a RedefineClasses
 837   guarantee(state != NULL, "exiting thread calling load_new_class_versions");
 838   for (int i = 0; i < _class_count; i++) {
 839     // Create HandleMark so that any handles created while loading new class
 840     // versions are deleted. Constant pools are deallocated while merging
 841     // constant pools
 842     HandleMark hm(THREAD);
 843 
 844     oop mirror = JNIHandles::resolve_non_null(_class_defs[i].klass);
 845     // classes for primitives cannot be redefined
 846     if (!is_modifiable_class(mirror)) {
 847       return JVMTI_ERROR_UNMODIFIABLE_CLASS;
 848     }
 849     Klass* the_class_oop = java_lang_Class::as_Klass(mirror);
 850     instanceKlassHandle the_class = instanceKlassHandle(THREAD, the_class_oop);
 851     Symbol*  the_class_sym = the_class->name();
 852 
 853     // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
 854     RC_TRACE_WITH_THREAD(0x00000001, THREAD,
 855       ("loading name=%s kind=%d (avail_mem=" UINT64_FORMAT "K)",
 856       the_class->external_name(), _class_load_kind,
 857       os::available_memory() >> 10));
 858 
 859     ClassFileStream st((u1*) _class_defs[i].class_bytes,
 860       _class_defs[i].class_byte_count, (char *)"__VM_RedefineClasses__");
 861 
 862     // Parse the stream.
 863     Handle the_class_loader(THREAD, the_class->class_loader());
 864     Handle protection_domain(THREAD, the_class->protection_domain());
 865     // Set redefined class handle in JvmtiThreadState class.
 866     // This redefined class is sent to agent event handler for class file
 867     // load hook event.
 868     state->set_class_being_redefined(&the_class, _class_load_kind);
 869 
 870     Klass* k = SystemDictionary::parse_stream(the_class_sym,
 871                                                 the_class_loader,
 872                                                 protection_domain,
 873                                                 &st,
 874                                                 THREAD);
 875     // Clear class_being_redefined just to be sure.
 876     state->clear_class_being_redefined();
 877 
 878     // TODO: if this is retransform, and nothing changed we can skip it
 879 
 880     instanceKlassHandle scratch_class (THREAD, k);
 881 
 882     // Need to clean up allocated InstanceKlass if there's an error so assign
 883     // the result here. Caller deallocates all the scratch classes in case of
 884     // an error.
 885     _scratch_classes[i] = k;
 886 
 887     if (HAS_PENDING_EXCEPTION) {
 888       Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
 889       // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
 890       RC_TRACE_WITH_THREAD(0x00000002, THREAD, ("parse_stream exception: '%s'",
 891         ex_name->as_C_string()));
 892       CLEAR_PENDING_EXCEPTION;
 893 
 894       if (ex_name == vmSymbols::java_lang_UnsupportedClassVersionError()) {
 895         return JVMTI_ERROR_UNSUPPORTED_VERSION;
 896       } else if (ex_name == vmSymbols::java_lang_ClassFormatError()) {
 897         return JVMTI_ERROR_INVALID_CLASS_FORMAT;
 898       } else if (ex_name == vmSymbols::java_lang_ClassCircularityError()) {
 899         return JVMTI_ERROR_CIRCULAR_CLASS_DEFINITION;
 900       } else if (ex_name == vmSymbols::java_lang_NoClassDefFoundError()) {
 901         // The message will be "XXX (wrong name: YYY)"
 902         return JVMTI_ERROR_NAMES_DONT_MATCH;
 903       } else if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
 904         return JVMTI_ERROR_OUT_OF_MEMORY;
 905       } else {  // Just in case more exceptions can be thrown..
 906         return JVMTI_ERROR_FAILS_VERIFICATION;
 907       }
 908     }
 909 
 910     // Ensure class is linked before redefine
 911     if (!the_class->is_linked()) {
 912       the_class->link_class(THREAD);
 913       if (HAS_PENDING_EXCEPTION) {
 914         Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
 915         // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
 916         RC_TRACE_WITH_THREAD(0x00000002, THREAD, ("link_class exception: '%s'",
 917           ex_name->as_C_string()));
 918         CLEAR_PENDING_EXCEPTION;
 919         if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
 920           return JVMTI_ERROR_OUT_OF_MEMORY;
 921         } else {
 922           return JVMTI_ERROR_INTERNAL;
 923         }
 924       }
 925     }
 926 
 927     // Do the validity checks in compare_and_normalize_class_versions()
 928     // before verifying the byte codes. By doing these checks first, we
 929     // limit the number of functions that require redirection from
 930     // the_class to scratch_class. In particular, we don't have to
 931     // modify JNI GetSuperclass() and thus won't change its performance.
 932     jvmtiError res = compare_and_normalize_class_versions(the_class,
 933                        scratch_class);
 934     if (res != JVMTI_ERROR_NONE) {
 935       return res;
 936     }
 937 
 938     // verify what the caller passed us
 939     {
 940       // The bug 6214132 caused the verification to fail.
 941       // Information about the_class and scratch_class is temporarily
 942       // recorded into jvmtiThreadState. This data is used to redirect
 943       // the_class to scratch_class in the JVM_* functions called by the
 944       // verifier. Please, refer to jvmtiThreadState.hpp for the detailed
 945       // description.
 946       RedefineVerifyMark rvm(&the_class, &scratch_class, state);
 947       Verifier::verify(
 948         scratch_class, Verifier::ThrowException, true, THREAD);
 949     }
 950 
 951     if (HAS_PENDING_EXCEPTION) {
 952       Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
 953       // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
 954       RC_TRACE_WITH_THREAD(0x00000002, THREAD,
 955         ("verify_byte_codes exception: '%s'", ex_name->as_C_string()));
 956       CLEAR_PENDING_EXCEPTION;
 957       if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
 958         return JVMTI_ERROR_OUT_OF_MEMORY;
 959       } else {
 960         // tell the caller the bytecodes are bad
 961         return JVMTI_ERROR_FAILS_VERIFICATION;
 962       }
 963     }
 964 
 965     res = merge_cp_and_rewrite(the_class, scratch_class, THREAD);
 966     if (res != JVMTI_ERROR_NONE) {
 967       return res;
 968     }
 969 
 970     if (VerifyMergedCPBytecodes) {
 971       // verify what we have done during constant pool merging
 972       {
 973         RedefineVerifyMark rvm(&the_class, &scratch_class, state);
 974         Verifier::verify(scratch_class, Verifier::ThrowException, true, THREAD);
 975       }
 976 
 977       if (HAS_PENDING_EXCEPTION) {
 978         Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
 979         // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
 980         RC_TRACE_WITH_THREAD(0x00000002, THREAD,
 981           ("verify_byte_codes post merge-CP exception: '%s'",
 982           ex_name->as_C_string()));
 983         CLEAR_PENDING_EXCEPTION;
 984         if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
 985           return JVMTI_ERROR_OUT_OF_MEMORY;
 986         } else {
 987           // tell the caller that constant pool merging screwed up
 988           return JVMTI_ERROR_INTERNAL;
 989         }
 990       }
 991     }
 992 
 993     Rewriter::rewrite(scratch_class, THREAD);
 994     if (!HAS_PENDING_EXCEPTION) {
 995       scratch_class->link_methods(THREAD);
 996     }
 997     if (HAS_PENDING_EXCEPTION) {
 998       Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
 999       CLEAR_PENDING_EXCEPTION;
1000       if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1001         return JVMTI_ERROR_OUT_OF_MEMORY;
1002       } else {
1003         return JVMTI_ERROR_INTERNAL;
1004       }
1005     }
1006 
1007     // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
1008     RC_TRACE_WITH_THREAD(0x00000001, THREAD,
1009       ("loaded name=%s (avail_mem=" UINT64_FORMAT "K)",
1010       the_class->external_name(), os::available_memory() >> 10));
1011   }
1012 
1013   return JVMTI_ERROR_NONE;
1014 }
1015 
1016 
1017 // Map old_index to new_index as needed. scratch_cp is only needed
1018 // for RC_TRACE() calls.
1019 void VM_RedefineClasses::map_index(constantPoolHandle scratch_cp,
1020        int old_index, int new_index) {
1021   if (find_new_index(old_index) != 0) {
1022     // old_index is already mapped
1023     return;
1024   }
1025 
1026   if (old_index == new_index) {
1027     // no mapping is needed
1028     return;
1029   }
1030 
1031   _index_map_p->at_put(old_index, new_index);
1032   _index_map_count++;
1033 
1034   RC_TRACE(0x00040000, ("mapped tag %d at index %d to %d",
1035     scratch_cp->tag_at(old_index).value(), old_index, new_index));
1036 } // end map_index()
1037 
1038 
1039 // Merge old_cp and scratch_cp and return the results of the merge via
1040 // merge_cp_p. The number of entries in *merge_cp_p is returned via
1041 // merge_cp_length_p. The entries in old_cp occupy the same locations
1042 // in *merge_cp_p. Also creates a map of indices from entries in
1043 // scratch_cp to the corresponding entry in *merge_cp_p. Index map
1044 // entries are only created for entries in scratch_cp that occupy a
1045 // different location in *merged_cp_p.
1046 bool VM_RedefineClasses::merge_constant_pools(constantPoolHandle old_cp,
1047        constantPoolHandle scratch_cp, constantPoolHandle *merge_cp_p,
1048        int *merge_cp_length_p, TRAPS) {
1049 
1050   if (merge_cp_p == NULL) {
1051     assert(false, "caller must provide scratch constantPool");
1052     return false; // robustness
1053   }
1054   if (merge_cp_length_p == NULL) {
1055     assert(false, "caller must provide scratch CP length");
1056     return false; // robustness
1057   }
1058   // Worst case we need old_cp->length() + scratch_cp()->length(),
1059   // but the caller might be smart so make sure we have at least
1060   // the minimum.
1061   if ((*merge_cp_p)->length() < old_cp->length()) {
1062     assert(false, "merge area too small");
1063     return false; // robustness
1064   }
1065 
1066   RC_TRACE_WITH_THREAD(0x00010000, THREAD,
1067     ("old_cp_len=%d, scratch_cp_len=%d", old_cp->length(),
1068     scratch_cp->length()));
1069 
1070   {
1071     // Pass 0:
1072     // The old_cp is copied to *merge_cp_p; this means that any code
1073     // using old_cp does not have to change. This work looks like a
1074     // perfect fit for ConstantPool*::copy_cp_to(), but we need to
1075     // handle one special case:
1076     // - revert JVM_CONSTANT_Class to JVM_CONSTANT_UnresolvedClass
1077     // This will make verification happy.
1078 
1079     int old_i;  // index into old_cp
1080 
1081     // index zero (0) is not used in constantPools
1082     for (old_i = 1; old_i < old_cp->length(); old_i++) {
1083       // leave debugging crumb
1084       jbyte old_tag = old_cp->tag_at(old_i).value();
1085       switch (old_tag) {
1086       case JVM_CONSTANT_Class:
1087       case JVM_CONSTANT_UnresolvedClass:
1088         // revert the copy to JVM_CONSTANT_UnresolvedClass
1089         // May be resolving while calling this so do the same for
1090         // JVM_CONSTANT_UnresolvedClass (klass_name_at() deals with transition)
1091         (*merge_cp_p)->unresolved_klass_at_put(old_i,
1092           old_cp->klass_name_at(old_i));
1093         break;
1094 
1095       case JVM_CONSTANT_Double:
1096       case JVM_CONSTANT_Long:
1097         // just copy the entry to *merge_cp_p, but double and long take
1098         // two constant pool entries
1099         ConstantPool::copy_entry_to(old_cp, old_i, *merge_cp_p, old_i, CHECK_0);
1100         old_i++;
1101         break;
1102 
1103       default:
1104         // just copy the entry to *merge_cp_p
1105         ConstantPool::copy_entry_to(old_cp, old_i, *merge_cp_p, old_i, CHECK_0);
1106         break;
1107       }
1108     } // end for each old_cp entry
1109 
1110     ConstantPool::copy_operands(old_cp, *merge_cp_p, CHECK_0);
1111 
1112     // We don't need to sanity check that *merge_cp_length_p is within
1113     // *merge_cp_p bounds since we have the minimum on-entry check above.
1114     (*merge_cp_length_p) = old_i;
1115   }
1116 
1117   // merge_cp_len should be the same as old_cp->length() at this point
1118   // so this trace message is really a "warm-and-breathing" message.
1119   RC_TRACE_WITH_THREAD(0x00020000, THREAD,
1120     ("after pass 0: merge_cp_len=%d", *merge_cp_length_p));
1121 
1122   int scratch_i;  // index into scratch_cp
1123   {
1124     // Pass 1a:
1125     // Compare scratch_cp entries to the old_cp entries that we have
1126     // already copied to *merge_cp_p. In this pass, we are eliminating
1127     // exact duplicates (matching entry at same index) so we only
1128     // compare entries in the common indice range.
1129     int increment = 1;
1130     int pass1a_length = MIN2(old_cp->length(), scratch_cp->length());
1131     for (scratch_i = 1; scratch_i < pass1a_length; scratch_i += increment) {
1132       switch (scratch_cp->tag_at(scratch_i).value()) {
1133       case JVM_CONSTANT_Double:
1134       case JVM_CONSTANT_Long:
1135         // double and long take two constant pool entries
1136         increment = 2;
1137         break;
1138 
1139       default:
1140         increment = 1;
1141         break;
1142       }
1143 
1144       bool match = scratch_cp->compare_entry_to(scratch_i, *merge_cp_p,
1145         scratch_i, CHECK_0);
1146       if (match) {
1147         // found a match at the same index so nothing more to do
1148         continue;
1149       } else if (is_unresolved_class_mismatch(scratch_cp, scratch_i,
1150                                               *merge_cp_p, scratch_i)) {
1151         // The mismatch in compare_entry_to() above is because of a
1152         // resolved versus unresolved class entry at the same index
1153         // with the same string value. Since Pass 0 reverted any
1154         // class entries to unresolved class entries in *merge_cp_p,
1155         // we go with the unresolved class entry.
1156         continue;
1157       }
1158 
1159       int found_i = scratch_cp->find_matching_entry(scratch_i, *merge_cp_p,
1160         CHECK_0);
1161       if (found_i != 0) {
1162         guarantee(found_i != scratch_i,
1163           "compare_entry_to() and find_matching_entry() do not agree");
1164 
1165         // Found a matching entry somewhere else in *merge_cp_p so
1166         // just need a mapping entry.
1167         map_index(scratch_cp, scratch_i, found_i);
1168         continue;
1169       }
1170 
1171       // The find_matching_entry() call above could fail to find a match
1172       // due to a resolved versus unresolved class or string entry situation
1173       // like we solved above with the is_unresolved_*_mismatch() calls.
1174       // However, we would have to call is_unresolved_*_mismatch() over
1175       // all of *merge_cp_p (potentially) and that doesn't seem to be
1176       // worth the time.
1177 
1178       // No match found so we have to append this entry and any unique
1179       // referenced entries to *merge_cp_p.
1180       append_entry(scratch_cp, scratch_i, merge_cp_p, merge_cp_length_p,
1181         CHECK_0);
1182     }
1183   }
1184 
1185   RC_TRACE_WITH_THREAD(0x00020000, THREAD,
1186     ("after pass 1a: merge_cp_len=%d, scratch_i=%d, index_map_len=%d",
1187     *merge_cp_length_p, scratch_i, _index_map_count));
1188 
1189   if (scratch_i < scratch_cp->length()) {
1190     // Pass 1b:
1191     // old_cp is smaller than scratch_cp so there are entries in
1192     // scratch_cp that we have not yet processed. We take care of
1193     // those now.
1194     int increment = 1;
1195     for (; scratch_i < scratch_cp->length(); scratch_i += increment) {
1196       switch (scratch_cp->tag_at(scratch_i).value()) {
1197       case JVM_CONSTANT_Double:
1198       case JVM_CONSTANT_Long:
1199         // double and long take two constant pool entries
1200         increment = 2;
1201         break;
1202 
1203       default:
1204         increment = 1;
1205         break;
1206       }
1207 
1208       int found_i =
1209         scratch_cp->find_matching_entry(scratch_i, *merge_cp_p, CHECK_0);
1210       if (found_i != 0) {
1211         // Found a matching entry somewhere else in *merge_cp_p so
1212         // just need a mapping entry.
1213         map_index(scratch_cp, scratch_i, found_i);
1214         continue;
1215       }
1216 
1217       // No match found so we have to append this entry and any unique
1218       // referenced entries to *merge_cp_p.
1219       append_entry(scratch_cp, scratch_i, merge_cp_p, merge_cp_length_p,
1220         CHECK_0);
1221     }
1222 
1223     RC_TRACE_WITH_THREAD(0x00020000, THREAD,
1224       ("after pass 1b: merge_cp_len=%d, scratch_i=%d, index_map_len=%d",
1225       *merge_cp_length_p, scratch_i, _index_map_count));
1226   }
1227 
1228   return true;
1229 } // end merge_constant_pools()
1230 
1231 
1232 // Scoped object to clean up the constant pool(s) created for merging
1233 class MergeCPCleaner {
1234   ClassLoaderData*   _loader_data;
1235   ConstantPool*      _cp;
1236   ConstantPool*      _scratch_cp;
1237  public:
1238   MergeCPCleaner(ClassLoaderData* loader_data, ConstantPool* merge_cp) :
1239                  _loader_data(loader_data), _cp(merge_cp), _scratch_cp(NULL) {}
1240   ~MergeCPCleaner() {
1241     _loader_data->add_to_deallocate_list(_cp);
1242     if (_scratch_cp != NULL) {
1243       _loader_data->add_to_deallocate_list(_scratch_cp);
1244     }
1245   }
1246   void add_scratch_cp(ConstantPool* scratch_cp) { _scratch_cp = scratch_cp; }
1247 };
1248 
1249 // Merge constant pools between the_class and scratch_class and
1250 // potentially rewrite bytecodes in scratch_class to use the merged
1251 // constant pool.
1252 jvmtiError VM_RedefineClasses::merge_cp_and_rewrite(
1253              instanceKlassHandle the_class, instanceKlassHandle scratch_class,
1254              TRAPS) {
1255   // worst case merged constant pool length is old and new combined
1256   int merge_cp_length = the_class->constants()->length()
1257         + scratch_class->constants()->length();
1258 
1259   // Constant pools are not easily reused so we allocate a new one
1260   // each time.
1261   // merge_cp is created unsafe for concurrent GC processing.  It
1262   // should be marked safe before discarding it. Even though
1263   // garbage,  if it crosses a card boundary, it may be scanned
1264   // in order to find the start of the first complete object on the card.
1265   ClassLoaderData* loader_data = the_class->class_loader_data();
1266   ConstantPool* merge_cp_oop =
1267     ConstantPool::allocate(loader_data,
1268                                   merge_cp_length,
1269                                   THREAD);
1270   MergeCPCleaner cp_cleaner(loader_data, merge_cp_oop);
1271 
1272   HandleMark hm(THREAD);  // make sure handles are cleared before
1273                           // MergeCPCleaner clears out merge_cp_oop
1274   constantPoolHandle merge_cp(THREAD, merge_cp_oop);
1275 
1276   // Get constants() from the old class because it could have been rewritten
1277   // while we were at a safepoint allocating a new constant pool.
1278   constantPoolHandle old_cp(THREAD, the_class->constants());
1279   constantPoolHandle scratch_cp(THREAD, scratch_class->constants());
1280 
1281   // If the length changed, the class was redefined out from under us. Return
1282   // an error.
1283   if (merge_cp_length != the_class->constants()->length()
1284          + scratch_class->constants()->length()) {
1285     return JVMTI_ERROR_INTERNAL;
1286   }
1287 
1288   // Update the version number of the constant pool
1289   merge_cp->increment_and_save_version(old_cp->version());
1290 
1291   ResourceMark rm(THREAD);
1292   _index_map_count = 0;
1293   _index_map_p = new intArray(scratch_cp->length(), -1);
1294 
1295   // reference to the cp holder is needed for copy_operands()
1296   merge_cp->set_pool_holder(scratch_class());
1297   bool result = merge_constant_pools(old_cp, scratch_cp, &merge_cp,
1298                   &merge_cp_length, THREAD);
1299   merge_cp->set_pool_holder(NULL);
1300 
1301   if (!result) {
1302     // The merge can fail due to memory allocation failure or due
1303     // to robustness checks.
1304     return JVMTI_ERROR_INTERNAL;
1305   }
1306 
1307   RC_TRACE_WITH_THREAD(0x00010000, THREAD,
1308     ("merge_cp_len=%d, index_map_len=%d", merge_cp_length, _index_map_count));
1309 
1310   if (_index_map_count == 0) {
1311     // there is nothing to map between the new and merged constant pools
1312 
1313     if (old_cp->length() == scratch_cp->length()) {
1314       // The old and new constant pools are the same length and the
1315       // index map is empty. This means that the three constant pools
1316       // are equivalent (but not the same). Unfortunately, the new
1317       // constant pool has not gone through link resolution nor have
1318       // the new class bytecodes gone through constant pool cache
1319       // rewriting so we can't use the old constant pool with the new
1320       // class.
1321 
1322       // toss the merged constant pool at return
1323     } else if (old_cp->length() < scratch_cp->length()) {
1324       // The old constant pool has fewer entries than the new constant
1325       // pool and the index map is empty. This means the new constant
1326       // pool is a superset of the old constant pool. However, the old
1327       // class bytecodes have already gone through constant pool cache
1328       // rewriting so we can't use the new constant pool with the old
1329       // class.
1330 
1331       // toss the merged constant pool at return
1332     } else {
1333       // The old constant pool has more entries than the new constant
1334       // pool and the index map is empty. This means that both the old
1335       // and merged constant pools are supersets of the new constant
1336       // pool.
1337 
1338       // Replace the new constant pool with a shrunken copy of the
1339       // merged constant pool
1340       set_new_constant_pool(loader_data, scratch_class, merge_cp, merge_cp_length, THREAD);
1341       // The new constant pool replaces scratch_cp so have cleaner clean it up.
1342       // It can't be cleaned up while there are handles to it.
1343       cp_cleaner.add_scratch_cp(scratch_cp());
1344     }
1345   } else {
1346     if (RC_TRACE_ENABLED(0x00040000)) {
1347       // don't want to loop unless we are tracing
1348       int count = 0;
1349       for (int i = 1; i < _index_map_p->length(); i++) {
1350         int value = _index_map_p->at(i);
1351 
1352         if (value != -1) {
1353           RC_TRACE_WITH_THREAD(0x00040000, THREAD,
1354             ("index_map[%d]: old=%d new=%d", count, i, value));
1355           count++;
1356         }
1357       }
1358     }
1359 
1360     // We have entries mapped between the new and merged constant pools
1361     // so we have to rewrite some constant pool references.
1362     if (!rewrite_cp_refs(scratch_class, THREAD)) {
1363       return JVMTI_ERROR_INTERNAL;
1364     }
1365 
1366     // Replace the new constant pool with a shrunken copy of the
1367     // merged constant pool so now the rewritten bytecodes have
1368     // valid references; the previous new constant pool will get
1369     // GCed.
1370     set_new_constant_pool(loader_data, scratch_class, merge_cp, merge_cp_length, THREAD);
1371     // The new constant pool replaces scratch_cp so have cleaner clean it up.
1372     // It can't be cleaned up while there are handles to it.
1373     cp_cleaner.add_scratch_cp(scratch_cp());
1374   }
1375 
1376   return JVMTI_ERROR_NONE;
1377 } // end merge_cp_and_rewrite()
1378 
1379 
1380 // Rewrite constant pool references in klass scratch_class.
1381 bool VM_RedefineClasses::rewrite_cp_refs(instanceKlassHandle scratch_class,
1382        TRAPS) {
1383 
1384   // rewrite constant pool references in the methods:
1385   if (!rewrite_cp_refs_in_methods(scratch_class, THREAD)) {
1386     // propagate failure back to caller
1387     return false;
1388   }
1389 
1390   // rewrite constant pool references in the class_annotations:
1391   if (!rewrite_cp_refs_in_class_annotations(scratch_class, THREAD)) {
1392     // propagate failure back to caller
1393     return false;
1394   }
1395 
1396   // rewrite constant pool references in the fields_annotations:
1397   if (!rewrite_cp_refs_in_fields_annotations(scratch_class, THREAD)) {
1398     // propagate failure back to caller
1399     return false;
1400   }
1401 
1402   // rewrite constant pool references in the methods_annotations:
1403   if (!rewrite_cp_refs_in_methods_annotations(scratch_class, THREAD)) {
1404     // propagate failure back to caller
1405     return false;
1406   }
1407 
1408   // rewrite constant pool references in the methods_parameter_annotations:
1409   if (!rewrite_cp_refs_in_methods_parameter_annotations(scratch_class,
1410          THREAD)) {
1411     // propagate failure back to caller
1412     return false;
1413   }
1414 
1415   // rewrite constant pool references in the methods_default_annotations:
1416   if (!rewrite_cp_refs_in_methods_default_annotations(scratch_class,
1417          THREAD)) {
1418     // propagate failure back to caller
1419     return false;
1420   }
1421 
1422   return true;
1423 } // end rewrite_cp_refs()
1424 
1425 
1426 // Rewrite constant pool references in the methods.
1427 bool VM_RedefineClasses::rewrite_cp_refs_in_methods(
1428        instanceKlassHandle scratch_class, TRAPS) {
1429 
1430   Array<Method*>* methods = scratch_class->methods();
1431 
1432   if (methods == NULL || methods->length() == 0) {
1433     // no methods so nothing to do
1434     return true;
1435   }
1436 
1437   // rewrite constant pool references in the methods:
1438   for (int i = methods->length() - 1; i >= 0; i--) {
1439     methodHandle method(THREAD, methods->at(i));
1440     methodHandle new_method;
1441     rewrite_cp_refs_in_method(method, &new_method, CHECK_false);
1442     if (!new_method.is_null()) {
1443       // the method has been replaced so save the new method version
1444       methods->at_put(i, new_method());
1445     }
1446   }
1447 
1448   return true;
1449 }
1450 
1451 
1452 // Rewrite constant pool references in the specific method. This code
1453 // was adapted from Rewriter::rewrite_method().
1454 void VM_RedefineClasses::rewrite_cp_refs_in_method(methodHandle method,
1455        methodHandle *new_method_p, TRAPS) {
1456 
1457   *new_method_p = methodHandle();  // default is no new method
1458 
1459   // We cache a pointer to the bytecodes here in code_base. If GC
1460   // moves the Method*, then the bytecodes will also move which
1461   // will likely cause a crash. We create a No_Safepoint_Verifier
1462   // object to detect whether we pass a possible safepoint in this
1463   // code block.
1464   No_Safepoint_Verifier nsv;
1465 
1466   // Bytecodes and their length
1467   address code_base = method->code_base();
1468   int code_length = method->code_size();
1469 
1470   int bc_length;
1471   for (int bci = 0; bci < code_length; bci += bc_length) {
1472     address bcp = code_base + bci;
1473     Bytecodes::Code c = (Bytecodes::Code)(*bcp);
1474 
1475     bc_length = Bytecodes::length_for(c);
1476     if (bc_length == 0) {
1477       // More complicated bytecodes report a length of zero so
1478       // we have to try again a slightly different way.
1479       bc_length = Bytecodes::length_at(method(), bcp);
1480     }
1481 
1482     assert(bc_length != 0, "impossible bytecode length");
1483 
1484     switch (c) {
1485       case Bytecodes::_ldc:
1486       {
1487         int cp_index = *(bcp + 1);
1488         int new_index = find_new_index(cp_index);
1489 
1490         if (StressLdcRewrite && new_index == 0) {
1491           // If we are stressing ldc -> ldc_w rewriting, then we
1492           // always need a new_index value.
1493           new_index = cp_index;
1494         }
1495         if (new_index != 0) {
1496           // the original index is mapped so we have more work to do
1497           if (!StressLdcRewrite && new_index <= max_jubyte) {
1498             // The new value can still use ldc instead of ldc_w
1499             // unless we are trying to stress ldc -> ldc_w rewriting
1500             RC_TRACE_WITH_THREAD(0x00080000, THREAD,
1501               ("%s@" INTPTR_FORMAT " old=%d, new=%d", Bytecodes::name(c),
1502               bcp, cp_index, new_index));
1503             *(bcp + 1) = new_index;
1504           } else {
1505             RC_TRACE_WITH_THREAD(0x00080000, THREAD,
1506               ("%s->ldc_w@" INTPTR_FORMAT " old=%d, new=%d",
1507               Bytecodes::name(c), bcp, cp_index, new_index));
1508             // the new value needs ldc_w instead of ldc
1509             u_char inst_buffer[4]; // max instruction size is 4 bytes
1510             bcp = (address)inst_buffer;
1511             // construct new instruction sequence
1512             *bcp = Bytecodes::_ldc_w;
1513             bcp++;
1514             // Rewriter::rewrite_method() does not rewrite ldc -> ldc_w.
1515             // See comment below for difference between put_Java_u2()
1516             // and put_native_u2().
1517             Bytes::put_Java_u2(bcp, new_index);
1518 
1519             Relocator rc(method, NULL /* no RelocatorListener needed */);
1520             methodHandle m;
1521             {
1522               Pause_No_Safepoint_Verifier pnsv(&nsv);
1523 
1524               // ldc is 2 bytes and ldc_w is 3 bytes
1525               m = rc.insert_space_at(bci, 3, inst_buffer, THREAD);
1526               if (m.is_null() || HAS_PENDING_EXCEPTION) {
1527                 guarantee(false, "insert_space_at() failed");
1528               }
1529             }
1530 
1531             // return the new method so that the caller can update
1532             // the containing class
1533             *new_method_p = method = m;
1534             // switch our bytecode processing loop from the old method
1535             // to the new method
1536             code_base = method->code_base();
1537             code_length = method->code_size();
1538             bcp = code_base + bci;
1539             c = (Bytecodes::Code)(*bcp);
1540             bc_length = Bytecodes::length_for(c);
1541             assert(bc_length != 0, "sanity check");
1542           } // end we need ldc_w instead of ldc
1543         } // end if there is a mapped index
1544       } break;
1545 
1546       // these bytecodes have a two-byte constant pool index
1547       case Bytecodes::_anewarray      : // fall through
1548       case Bytecodes::_checkcast      : // fall through
1549       case Bytecodes::_getfield       : // fall through
1550       case Bytecodes::_getstatic      : // fall through
1551       case Bytecodes::_instanceof     : // fall through
1552       case Bytecodes::_invokedynamic  : // fall through
1553       case Bytecodes::_invokeinterface: // fall through
1554       case Bytecodes::_invokespecial  : // fall through
1555       case Bytecodes::_invokestatic   : // fall through
1556       case Bytecodes::_invokevirtual  : // fall through
1557       case Bytecodes::_ldc_w          : // fall through
1558       case Bytecodes::_ldc2_w         : // fall through
1559       case Bytecodes::_multianewarray : // fall through
1560       case Bytecodes::_new            : // fall through
1561       case Bytecodes::_putfield       : // fall through
1562       case Bytecodes::_putstatic      :
1563       {
1564         address p = bcp + 1;
1565         int cp_index = Bytes::get_Java_u2(p);
1566         int new_index = find_new_index(cp_index);
1567         if (new_index != 0) {
1568           // the original index is mapped so update w/ new value
1569           RC_TRACE_WITH_THREAD(0x00080000, THREAD,
1570             ("%s@" INTPTR_FORMAT " old=%d, new=%d", Bytecodes::name(c),
1571             bcp, cp_index, new_index));
1572           // Rewriter::rewrite_method() uses put_native_u2() in this
1573           // situation because it is reusing the constant pool index
1574           // location for a native index into the ConstantPoolCache.
1575           // Since we are updating the constant pool index prior to
1576           // verification and ConstantPoolCache initialization, we
1577           // need to keep the new index in Java byte order.
1578           Bytes::put_Java_u2(p, new_index);
1579         }
1580       } break;
1581     }
1582   } // end for each bytecode
1583 } // end rewrite_cp_refs_in_method()
1584 
1585 
1586 // Rewrite constant pool references in the class_annotations field.
1587 bool VM_RedefineClasses::rewrite_cp_refs_in_class_annotations(
1588        instanceKlassHandle scratch_class, TRAPS) {
1589 
1590   AnnotationArray* class_annotations = scratch_class->class_annotations();
1591   if (class_annotations == NULL || class_annotations->length() == 0) {
1592     // no class_annotations so nothing to do
1593     return true;
1594   }
1595 
1596   RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1597     ("class_annotations length=%d", class_annotations->length()));
1598 
1599   int byte_i = 0;  // byte index into class_annotations
1600   return rewrite_cp_refs_in_annotations_typeArray(class_annotations, byte_i,
1601            THREAD);
1602 }
1603 
1604 
1605 // Rewrite constant pool references in an annotations typeArray. This
1606 // "structure" is adapted from the RuntimeVisibleAnnotations_attribute
1607 // that is described in section 4.8.15 of the 2nd-edition of the VM spec:
1608 //
1609 // annotations_typeArray {
1610 //   u2 num_annotations;
1611 //   annotation annotations[num_annotations];
1612 // }
1613 //
1614 bool VM_RedefineClasses::rewrite_cp_refs_in_annotations_typeArray(
1615        AnnotationArray* annotations_typeArray, int &byte_i_ref, TRAPS) {
1616 
1617   if ((byte_i_ref + 2) > annotations_typeArray->length()) {
1618     // not enough room for num_annotations field
1619     RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1620       ("length() is too small for num_annotations field"));
1621     return false;
1622   }
1623 
1624   u2 num_annotations = Bytes::get_Java_u2((address)
1625                          annotations_typeArray->adr_at(byte_i_ref));
1626   byte_i_ref += 2;
1627 
1628   RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1629     ("num_annotations=%d", num_annotations));
1630 
1631   int calc_num_annotations = 0;
1632   for (; calc_num_annotations < num_annotations; calc_num_annotations++) {
1633     if (!rewrite_cp_refs_in_annotation_struct(annotations_typeArray,
1634            byte_i_ref, THREAD)) {
1635       RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1636         ("bad annotation_struct at %d", calc_num_annotations));
1637       // propagate failure back to caller
1638       return false;
1639     }
1640   }
1641   assert(num_annotations == calc_num_annotations, "sanity check");
1642 
1643   return true;
1644 } // end rewrite_cp_refs_in_annotations_typeArray()
1645 
1646 
1647 // Rewrite constant pool references in the annotation struct portion of
1648 // an annotations_typeArray. This "structure" is from section 4.8.15 of
1649 // the 2nd-edition of the VM spec:
1650 //
1651 // struct annotation {
1652 //   u2 type_index;
1653 //   u2 num_element_value_pairs;
1654 //   {
1655 //     u2 element_name_index;
1656 //     element_value value;
1657 //   } element_value_pairs[num_element_value_pairs];
1658 // }
1659 //
1660 bool VM_RedefineClasses::rewrite_cp_refs_in_annotation_struct(
1661        AnnotationArray* annotations_typeArray, int &byte_i_ref, TRAPS) {
1662   if ((byte_i_ref + 2 + 2) > annotations_typeArray->length()) {
1663     // not enough room for smallest annotation_struct
1664     RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1665       ("length() is too small for annotation_struct"));
1666     return false;
1667   }
1668 
1669   u2 type_index = rewrite_cp_ref_in_annotation_data(annotations_typeArray,
1670                     byte_i_ref, "mapped old type_index=%d", THREAD);
1671 
1672   u2 num_element_value_pairs = Bytes::get_Java_u2((address)
1673                                  annotations_typeArray->adr_at(byte_i_ref));
1674   byte_i_ref += 2;
1675 
1676   RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1677     ("type_index=%d  num_element_value_pairs=%d", type_index,
1678     num_element_value_pairs));
1679 
1680   int calc_num_element_value_pairs = 0;
1681   for (; calc_num_element_value_pairs < num_element_value_pairs;
1682        calc_num_element_value_pairs++) {
1683     if ((byte_i_ref + 2) > annotations_typeArray->length()) {
1684       // not enough room for another element_name_index, let alone
1685       // the rest of another component
1686       RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1687         ("length() is too small for element_name_index"));
1688       return false;
1689     }
1690 
1691     u2 element_name_index = rewrite_cp_ref_in_annotation_data(
1692                               annotations_typeArray, byte_i_ref,
1693                               "mapped old element_name_index=%d", THREAD);
1694 
1695     RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1696       ("element_name_index=%d", element_name_index));
1697 
1698     if (!rewrite_cp_refs_in_element_value(annotations_typeArray,
1699            byte_i_ref, THREAD)) {
1700       RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1701         ("bad element_value at %d", calc_num_element_value_pairs));
1702       // propagate failure back to caller
1703       return false;
1704     }
1705   } // end for each component
1706   assert(num_element_value_pairs == calc_num_element_value_pairs,
1707     "sanity check");
1708 
1709   return true;
1710 } // end rewrite_cp_refs_in_annotation_struct()
1711 
1712 
1713 // Rewrite a constant pool reference at the current position in
1714 // annotations_typeArray if needed. Returns the original constant
1715 // pool reference if a rewrite was not needed or the new constant
1716 // pool reference if a rewrite was needed.
1717 u2 VM_RedefineClasses::rewrite_cp_ref_in_annotation_data(
1718      AnnotationArray* annotations_typeArray, int &byte_i_ref,
1719      const char * trace_mesg, TRAPS) {
1720 
1721   address cp_index_addr = (address)
1722     annotations_typeArray->adr_at(byte_i_ref);
1723   u2 old_cp_index = Bytes::get_Java_u2(cp_index_addr);
1724   u2 new_cp_index = find_new_index(old_cp_index);
1725   if (new_cp_index != 0) {
1726     RC_TRACE_WITH_THREAD(0x02000000, THREAD, (trace_mesg, old_cp_index));
1727     Bytes::put_Java_u2(cp_index_addr, new_cp_index);
1728     old_cp_index = new_cp_index;
1729   }
1730   byte_i_ref += 2;
1731   return old_cp_index;
1732 }
1733 
1734 
1735 // Rewrite constant pool references in the element_value portion of an
1736 // annotations_typeArray. This "structure" is from section 4.8.15.1 of
1737 // the 2nd-edition of the VM spec:
1738 //
1739 // struct element_value {
1740 //   u1 tag;
1741 //   union {
1742 //     u2 const_value_index;
1743 //     {
1744 //       u2 type_name_index;
1745 //       u2 const_name_index;
1746 //     } enum_const_value;
1747 //     u2 class_info_index;
1748 //     annotation annotation_value;
1749 //     struct {
1750 //       u2 num_values;
1751 //       element_value values[num_values];
1752 //     } array_value;
1753 //   } value;
1754 // }
1755 //
1756 bool VM_RedefineClasses::rewrite_cp_refs_in_element_value(
1757        AnnotationArray* annotations_typeArray, int &byte_i_ref, TRAPS) {
1758 
1759   if ((byte_i_ref + 1) > annotations_typeArray->length()) {
1760     // not enough room for a tag let alone the rest of an element_value
1761     RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1762       ("length() is too small for a tag"));
1763     return false;
1764   }
1765 
1766   u1 tag = annotations_typeArray->at(byte_i_ref);
1767   byte_i_ref++;
1768   RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("tag='%c'", tag));
1769 
1770   switch (tag) {
1771     // These BaseType tag values are from Table 4.2 in VM spec:
1772     case 'B':  // byte
1773     case 'C':  // char
1774     case 'D':  // double
1775     case 'F':  // float
1776     case 'I':  // int
1777     case 'J':  // long
1778     case 'S':  // short
1779     case 'Z':  // boolean
1780 
1781     // The remaining tag values are from Table 4.8 in the 2nd-edition of
1782     // the VM spec:
1783     case 's':
1784     {
1785       // For the above tag values (including the BaseType values),
1786       // value.const_value_index is right union field.
1787 
1788       if ((byte_i_ref + 2) > annotations_typeArray->length()) {
1789         // not enough room for a const_value_index
1790         RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1791           ("length() is too small for a const_value_index"));
1792         return false;
1793       }
1794 
1795       u2 const_value_index = rewrite_cp_ref_in_annotation_data(
1796                                annotations_typeArray, byte_i_ref,
1797                                "mapped old const_value_index=%d", THREAD);
1798 
1799       RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1800         ("const_value_index=%d", const_value_index));
1801     } break;
1802 
1803     case 'e':
1804     {
1805       // for the above tag value, value.enum_const_value is right union field
1806 
1807       if ((byte_i_ref + 4) > annotations_typeArray->length()) {
1808         // not enough room for a enum_const_value
1809         RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1810           ("length() is too small for a enum_const_value"));
1811         return false;
1812       }
1813 
1814       u2 type_name_index = rewrite_cp_ref_in_annotation_data(
1815                              annotations_typeArray, byte_i_ref,
1816                              "mapped old type_name_index=%d", THREAD);
1817 
1818       u2 const_name_index = rewrite_cp_ref_in_annotation_data(
1819                               annotations_typeArray, byte_i_ref,
1820                               "mapped old const_name_index=%d", THREAD);
1821 
1822       RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1823         ("type_name_index=%d  const_name_index=%d", type_name_index,
1824         const_name_index));
1825     } break;
1826 
1827     case 'c':
1828     {
1829       // for the above tag value, value.class_info_index is right union field
1830 
1831       if ((byte_i_ref + 2) > annotations_typeArray->length()) {
1832         // not enough room for a class_info_index
1833         RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1834           ("length() is too small for a class_info_index"));
1835         return false;
1836       }
1837 
1838       u2 class_info_index = rewrite_cp_ref_in_annotation_data(
1839                               annotations_typeArray, byte_i_ref,
1840                               "mapped old class_info_index=%d", THREAD);
1841 
1842       RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1843         ("class_info_index=%d", class_info_index));
1844     } break;
1845 
1846     case '@':
1847       // For the above tag value, value.attr_value is the right union
1848       // field. This is a nested annotation.
1849       if (!rewrite_cp_refs_in_annotation_struct(annotations_typeArray,
1850              byte_i_ref, THREAD)) {
1851         // propagate failure back to caller
1852         return false;
1853       }
1854       break;
1855 
1856     case '[':
1857     {
1858       if ((byte_i_ref + 2) > annotations_typeArray->length()) {
1859         // not enough room for a num_values field
1860         RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1861           ("length() is too small for a num_values field"));
1862         return false;
1863       }
1864 
1865       // For the above tag value, value.array_value is the right union
1866       // field. This is an array of nested element_value.
1867       u2 num_values = Bytes::get_Java_u2((address)
1868                         annotations_typeArray->adr_at(byte_i_ref));
1869       byte_i_ref += 2;
1870       RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("num_values=%d", num_values));
1871 
1872       int calc_num_values = 0;
1873       for (; calc_num_values < num_values; calc_num_values++) {
1874         if (!rewrite_cp_refs_in_element_value(
1875                annotations_typeArray, byte_i_ref, THREAD)) {
1876           RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1877             ("bad nested element_value at %d", calc_num_values));
1878           // propagate failure back to caller
1879           return false;
1880         }
1881       }
1882       assert(num_values == calc_num_values, "sanity check");
1883     } break;
1884 
1885     default:
1886       RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("bad tag=0x%x", tag));
1887       return false;
1888   } // end decode tag field
1889 
1890   return true;
1891 } // end rewrite_cp_refs_in_element_value()
1892 
1893 
1894 // Rewrite constant pool references in a fields_annotations field.
1895 bool VM_RedefineClasses::rewrite_cp_refs_in_fields_annotations(
1896        instanceKlassHandle scratch_class, TRAPS) {
1897 
1898   Annotations* sca = scratch_class->annotations();
1899   if (sca == NULL) return true;
1900 
1901   Array<AnnotationArray*>* fields_annotations = sca->fields_annotations();
1902 
1903   if (fields_annotations == NULL || fields_annotations->length() == 0) {
1904     // no fields_annotations so nothing to do
1905     return true;
1906   }
1907 
1908   RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1909     ("fields_annotations length=%d", fields_annotations->length()));
1910 
1911   for (int i = 0; i < fields_annotations->length(); i++) {
1912     AnnotationArray* field_annotations = fields_annotations->at(i);
1913     if (field_annotations == NULL || field_annotations->length() == 0) {
1914       // this field does not have any annotations so skip it
1915       continue;
1916     }
1917 
1918     int byte_i = 0;  // byte index into field_annotations
1919     if (!rewrite_cp_refs_in_annotations_typeArray(field_annotations, byte_i,
1920            THREAD)) {
1921       RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1922         ("bad field_annotations at %d", i));
1923       // propagate failure back to caller
1924       return false;
1925     }
1926   }
1927 
1928   return true;
1929 } // end rewrite_cp_refs_in_fields_annotations()
1930 
1931 
1932 // Rewrite constant pool references in a methods_annotations field.
1933 bool VM_RedefineClasses::rewrite_cp_refs_in_methods_annotations(
1934        instanceKlassHandle scratch_class, TRAPS) {
1935 
1936   Annotations* sca = scratch_class->annotations();
1937   if (sca == NULL) return true;
1938 
1939   Array<AnnotationArray*>* methods_annotations = sca->methods_annotations();
1940 
1941   if (methods_annotations == NULL || methods_annotations->length() == 0) {
1942     // no methods_annotations so nothing to do
1943     return true;
1944   }
1945 
1946   RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1947     ("methods_annotations length=%d", methods_annotations->length()));
1948 
1949   for (int i = 0; i < methods_annotations->length(); i++) {
1950     AnnotationArray* method_annotations = methods_annotations->at(i);
1951     if (method_annotations == NULL || method_annotations->length() == 0) {
1952       // this method does not have any annotations so skip it
1953       continue;
1954     }
1955 
1956     int byte_i = 0;  // byte index into method_annotations
1957     if (!rewrite_cp_refs_in_annotations_typeArray(method_annotations, byte_i,
1958            THREAD)) {
1959       RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1960         ("bad method_annotations at %d", i));
1961       // propagate failure back to caller
1962       return false;
1963     }
1964   }
1965 
1966   return true;
1967 } // end rewrite_cp_refs_in_methods_annotations()
1968 
1969 
1970 // Rewrite constant pool references in a methods_parameter_annotations
1971 // field. This "structure" is adapted from the
1972 // RuntimeVisibleParameterAnnotations_attribute described in section
1973 // 4.8.17 of the 2nd-edition of the VM spec:
1974 //
1975 // methods_parameter_annotations_typeArray {
1976 //   u1 num_parameters;
1977 //   {
1978 //     u2 num_annotations;
1979 //     annotation annotations[num_annotations];
1980 //   } parameter_annotations[num_parameters];
1981 // }
1982 //
1983 bool VM_RedefineClasses::rewrite_cp_refs_in_methods_parameter_annotations(
1984        instanceKlassHandle scratch_class, TRAPS) {
1985 
1986   Annotations* sca = scratch_class->annotations();
1987   if (sca == NULL) return true;
1988 
1989   Array<AnnotationArray*>* methods_parameter_annotations =
1990     sca->methods_parameter_annotations();
1991 
1992   if (methods_parameter_annotations == NULL
1993       || methods_parameter_annotations->length() == 0) {
1994     // no methods_parameter_annotations so nothing to do
1995     return true;
1996   }
1997 
1998   RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1999     ("methods_parameter_annotations length=%d",
2000     methods_parameter_annotations->length()));
2001 
2002   for (int i = 0; i < methods_parameter_annotations->length(); i++) {
2003     AnnotationArray* method_parameter_annotations = methods_parameter_annotations->at(i);
2004     if (method_parameter_annotations == NULL
2005         || method_parameter_annotations->length() == 0) {
2006       // this method does not have any parameter annotations so skip it
2007       continue;
2008     }
2009 
2010     if (method_parameter_annotations->length() < 1) {
2011       // not enough room for a num_parameters field
2012       RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2013         ("length() is too small for a num_parameters field at %d", i));
2014       return false;
2015     }
2016 
2017     int byte_i = 0;  // byte index into method_parameter_annotations
2018 
2019     u1 num_parameters = method_parameter_annotations->at(byte_i);
2020     byte_i++;
2021 
2022     RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2023       ("num_parameters=%d", num_parameters));
2024 
2025     int calc_num_parameters = 0;
2026     for (; calc_num_parameters < num_parameters; calc_num_parameters++) {
2027       if (!rewrite_cp_refs_in_annotations_typeArray(
2028              method_parameter_annotations, byte_i, THREAD)) {
2029         RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2030           ("bad method_parameter_annotations at %d", calc_num_parameters));
2031         // propagate failure back to caller
2032         return false;
2033       }
2034     }
2035     assert(num_parameters == calc_num_parameters, "sanity check");
2036   }
2037 
2038   return true;
2039 } // end rewrite_cp_refs_in_methods_parameter_annotations()
2040 
2041 
2042 // Rewrite constant pool references in a methods_default_annotations
2043 // field. This "structure" is adapted from the AnnotationDefault_attribute
2044 // that is described in section 4.8.19 of the 2nd-edition of the VM spec:
2045 //
2046 // methods_default_annotations_typeArray {
2047 //   element_value default_value;
2048 // }
2049 //
2050 bool VM_RedefineClasses::rewrite_cp_refs_in_methods_default_annotations(
2051        instanceKlassHandle scratch_class, TRAPS) {
2052 
2053   Annotations* sca = scratch_class->annotations();
2054   if (sca == NULL) return true;
2055 
2056   Array<AnnotationArray*>* methods_default_annotations =
2057     sca->methods_default_annotations();
2058 
2059   if (methods_default_annotations == NULL
2060       || methods_default_annotations->length() == 0) {
2061     // no methods_default_annotations so nothing to do
2062     return true;
2063   }
2064 
2065   RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2066     ("methods_default_annotations length=%d",
2067     methods_default_annotations->length()));
2068 
2069   for (int i = 0; i < methods_default_annotations->length(); i++) {
2070     AnnotationArray* method_default_annotations = methods_default_annotations->at(i);
2071     if (method_default_annotations == NULL
2072         || method_default_annotations->length() == 0) {
2073       // this method does not have any default annotations so skip it
2074       continue;
2075     }
2076 
2077     int byte_i = 0;  // byte index into method_default_annotations
2078 
2079     if (!rewrite_cp_refs_in_element_value(
2080            method_default_annotations, byte_i, THREAD)) {
2081       RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2082         ("bad default element_value at %d", i));
2083       // propagate failure back to caller
2084       return false;
2085     }
2086   }
2087 
2088   return true;
2089 } // end rewrite_cp_refs_in_methods_default_annotations()
2090 
2091 
2092 // Rewrite constant pool references in the method's stackmap table.
2093 // These "structures" are adapted from the StackMapTable_attribute that
2094 // is described in section 4.8.4 of the 6.0 version of the VM spec
2095 // (dated 2005.10.26):
2096 // file:///net/quincunx.sfbay/export/gbracha/ClassFile-Java6.pdf
2097 //
2098 // stack_map {
2099 //   u2 number_of_entries;
2100 //   stack_map_frame entries[number_of_entries];
2101 // }
2102 //
2103 void VM_RedefineClasses::rewrite_cp_refs_in_stack_map_table(
2104        methodHandle method, TRAPS) {
2105 
2106   if (!method->has_stackmap_table()) {
2107     return;
2108   }
2109 
2110   AnnotationArray* stackmap_data = method->stackmap_data();
2111   address stackmap_p = (address)stackmap_data->adr_at(0);
2112   address stackmap_end = stackmap_p + stackmap_data->length();
2113 
2114   assert(stackmap_p + 2 <= stackmap_end, "no room for number_of_entries");
2115   u2 number_of_entries = Bytes::get_Java_u2(stackmap_p);
2116   stackmap_p += 2;
2117 
2118   RC_TRACE_WITH_THREAD(0x04000000, THREAD,
2119     ("number_of_entries=%u", number_of_entries));
2120 
2121   // walk through each stack_map_frame
2122   u2 calc_number_of_entries = 0;
2123   for (; calc_number_of_entries < number_of_entries; calc_number_of_entries++) {
2124     // The stack_map_frame structure is a u1 frame_type followed by
2125     // 0 or more bytes of data:
2126     //
2127     // union stack_map_frame {
2128     //   same_frame;
2129     //   same_locals_1_stack_item_frame;
2130     //   same_locals_1_stack_item_frame_extended;
2131     //   chop_frame;
2132     //   same_frame_extended;
2133     //   append_frame;
2134     //   full_frame;
2135     // }
2136 
2137     assert(stackmap_p + 1 <= stackmap_end, "no room for frame_type");
2138     // The Linux compiler does not like frame_type to be u1 or u2. It
2139     // issues the following warning for the first if-statement below:
2140     //
2141     // "warning: comparison is always true due to limited range of data type"
2142     //
2143     u4 frame_type = *stackmap_p;
2144     stackmap_p++;
2145 
2146     // same_frame {
2147     //   u1 frame_type = SAME; /* 0-63 */
2148     // }
2149     if (frame_type >= 0 && frame_type <= 63) {
2150       // nothing more to do for same_frame
2151     }
2152 
2153     // same_locals_1_stack_item_frame {
2154     //   u1 frame_type = SAME_LOCALS_1_STACK_ITEM; /* 64-127 */
2155     //   verification_type_info stack[1];
2156     // }
2157     else if (frame_type >= 64 && frame_type <= 127) {
2158       rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
2159         calc_number_of_entries, frame_type, THREAD);
2160     }
2161 
2162     // reserved for future use
2163     else if (frame_type >= 128 && frame_type <= 246) {
2164       // nothing more to do for reserved frame_types
2165     }
2166 
2167     // same_locals_1_stack_item_frame_extended {
2168     //   u1 frame_type = SAME_LOCALS_1_STACK_ITEM_EXTENDED; /* 247 */
2169     //   u2 offset_delta;
2170     //   verification_type_info stack[1];
2171     // }
2172     else if (frame_type == 247) {
2173       stackmap_p += 2;
2174       rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
2175         calc_number_of_entries, frame_type, THREAD);
2176     }
2177 
2178     // chop_frame {
2179     //   u1 frame_type = CHOP; /* 248-250 */
2180     //   u2 offset_delta;
2181     // }
2182     else if (frame_type >= 248 && frame_type <= 250) {
2183       stackmap_p += 2;
2184     }
2185 
2186     // same_frame_extended {
2187     //   u1 frame_type = SAME_FRAME_EXTENDED; /* 251*/
2188     //   u2 offset_delta;
2189     // }
2190     else if (frame_type == 251) {
2191       stackmap_p += 2;
2192     }
2193 
2194     // append_frame {
2195     //   u1 frame_type = APPEND; /* 252-254 */
2196     //   u2 offset_delta;
2197     //   verification_type_info locals[frame_type - 251];
2198     // }
2199     else if (frame_type >= 252 && frame_type <= 254) {
2200       assert(stackmap_p + 2 <= stackmap_end,
2201         "no room for offset_delta");
2202       stackmap_p += 2;
2203       u1 len = frame_type - 251;
2204       for (u1 i = 0; i < len; i++) {
2205         rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
2206           calc_number_of_entries, frame_type, THREAD);
2207       }
2208     }
2209 
2210     // full_frame {
2211     //   u1 frame_type = FULL_FRAME; /* 255 */
2212     //   u2 offset_delta;
2213     //   u2 number_of_locals;
2214     //   verification_type_info locals[number_of_locals];
2215     //   u2 number_of_stack_items;
2216     //   verification_type_info stack[number_of_stack_items];
2217     // }
2218     else if (frame_type == 255) {
2219       assert(stackmap_p + 2 + 2 <= stackmap_end,
2220         "no room for smallest full_frame");
2221       stackmap_p += 2;
2222 
2223       u2 number_of_locals = Bytes::get_Java_u2(stackmap_p);
2224       stackmap_p += 2;
2225 
2226       for (u2 locals_i = 0; locals_i < number_of_locals; locals_i++) {
2227         rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
2228           calc_number_of_entries, frame_type, THREAD);
2229       }
2230 
2231       // Use the largest size for the number_of_stack_items, but only get
2232       // the right number of bytes.
2233       u2 number_of_stack_items = Bytes::get_Java_u2(stackmap_p);
2234       stackmap_p += 2;
2235 
2236       for (u2 stack_i = 0; stack_i < number_of_stack_items; stack_i++) {
2237         rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
2238           calc_number_of_entries, frame_type, THREAD);
2239       }
2240     }
2241   } // end while there is a stack_map_frame
2242   assert(number_of_entries == calc_number_of_entries, "sanity check");
2243 } // end rewrite_cp_refs_in_stack_map_table()
2244 
2245 
2246 // Rewrite constant pool references in the verification type info
2247 // portion of the method's stackmap table. These "structures" are
2248 // adapted from the StackMapTable_attribute that is described in
2249 // section 4.8.4 of the 6.0 version of the VM spec (dated 2005.10.26):
2250 // file:///net/quincunx.sfbay/export/gbracha/ClassFile-Java6.pdf
2251 //
2252 // The verification_type_info structure is a u1 tag followed by 0 or
2253 // more bytes of data:
2254 //
2255 // union verification_type_info {
2256 //   Top_variable_info;
2257 //   Integer_variable_info;
2258 //   Float_variable_info;
2259 //   Long_variable_info;
2260 //   Double_variable_info;
2261 //   Null_variable_info;
2262 //   UninitializedThis_variable_info;
2263 //   Object_variable_info;
2264 //   Uninitialized_variable_info;
2265 // }
2266 //
2267 void VM_RedefineClasses::rewrite_cp_refs_in_verification_type_info(
2268        address& stackmap_p_ref, address stackmap_end, u2 frame_i,
2269        u1 frame_type, TRAPS) {
2270 
2271   assert(stackmap_p_ref + 1 <= stackmap_end, "no room for tag");
2272   u1 tag = *stackmap_p_ref;
2273   stackmap_p_ref++;
2274 
2275   switch (tag) {
2276   // Top_variable_info {
2277   //   u1 tag = ITEM_Top; /* 0 */
2278   // }
2279   // verificationType.hpp has zero as ITEM_Bogus instead of ITEM_Top
2280   case 0:  // fall through
2281 
2282   // Integer_variable_info {
2283   //   u1 tag = ITEM_Integer; /* 1 */
2284   // }
2285   case ITEM_Integer:  // fall through
2286 
2287   // Float_variable_info {
2288   //   u1 tag = ITEM_Float; /* 2 */
2289   // }
2290   case ITEM_Float:  // fall through
2291 
2292   // Double_variable_info {
2293   //   u1 tag = ITEM_Double; /* 3 */
2294   // }
2295   case ITEM_Double:  // fall through
2296 
2297   // Long_variable_info {
2298   //   u1 tag = ITEM_Long; /* 4 */
2299   // }
2300   case ITEM_Long:  // fall through
2301 
2302   // Null_variable_info {
2303   //   u1 tag = ITEM_Null; /* 5 */
2304   // }
2305   case ITEM_Null:  // fall through
2306 
2307   // UninitializedThis_variable_info {
2308   //   u1 tag = ITEM_UninitializedThis; /* 6 */
2309   // }
2310   case ITEM_UninitializedThis:
2311     // nothing more to do for the above tag types
2312     break;
2313 
2314   // Object_variable_info {
2315   //   u1 tag = ITEM_Object; /* 7 */
2316   //   u2 cpool_index;
2317   // }
2318   case ITEM_Object:
2319   {
2320     assert(stackmap_p_ref + 2 <= stackmap_end, "no room for cpool_index");
2321     u2 cpool_index = Bytes::get_Java_u2(stackmap_p_ref);
2322     u2 new_cp_index = find_new_index(cpool_index);
2323     if (new_cp_index != 0) {
2324       RC_TRACE_WITH_THREAD(0x04000000, THREAD,
2325         ("mapped old cpool_index=%d", cpool_index));
2326       Bytes::put_Java_u2(stackmap_p_ref, new_cp_index);
2327       cpool_index = new_cp_index;
2328     }
2329     stackmap_p_ref += 2;
2330 
2331     RC_TRACE_WITH_THREAD(0x04000000, THREAD,
2332       ("frame_i=%u, frame_type=%u, cpool_index=%d", frame_i,
2333       frame_type, cpool_index));
2334   } break;
2335 
2336   // Uninitialized_variable_info {
2337   //   u1 tag = ITEM_Uninitialized; /* 8 */
2338   //   u2 offset;
2339   // }
2340   case ITEM_Uninitialized:
2341     assert(stackmap_p_ref + 2 <= stackmap_end, "no room for offset");
2342     stackmap_p_ref += 2;
2343     break;
2344 
2345   default:
2346     RC_TRACE_WITH_THREAD(0x04000000, THREAD,
2347       ("frame_i=%u, frame_type=%u, bad tag=0x%x", frame_i, frame_type, tag));
2348     ShouldNotReachHere();
2349     break;
2350   } // end switch (tag)
2351 } // end rewrite_cp_refs_in_verification_type_info()
2352 
2353 
2354 // Change the constant pool associated with klass scratch_class to
2355 // scratch_cp. If shrink is true, then scratch_cp_length elements
2356 // are copied from scratch_cp to a smaller constant pool and the
2357 // smaller constant pool is associated with scratch_class.
2358 void VM_RedefineClasses::set_new_constant_pool(
2359        ClassLoaderData* loader_data,
2360        instanceKlassHandle scratch_class, constantPoolHandle scratch_cp,
2361        int scratch_cp_length, TRAPS) {
2362   assert(scratch_cp->length() >= scratch_cp_length, "sanity check");
2363 
2364   // scratch_cp is a merged constant pool and has enough space for a
2365   // worst case merge situation. We want to associate the minimum
2366   // sized constant pool with the klass to save space.
2367   constantPoolHandle smaller_cp(THREAD,
2368           ConstantPool::allocate(loader_data, scratch_cp_length, THREAD));
2369 
2370   // preserve version() value in the smaller copy
2371   int version = scratch_cp->version();
2372   assert(version != 0, "sanity check");
2373   smaller_cp->set_version(version);
2374 
2375   // attach klass to new constant pool
2376   // reference to the cp holder is needed for copy_operands()
2377   smaller_cp->set_pool_holder(scratch_class());
2378 
2379   scratch_cp->copy_cp_to(1, scratch_cp_length - 1, smaller_cp, 1, THREAD);
2380   scratch_cp = smaller_cp;
2381 
2382   // attach new constant pool to klass
2383   scratch_class->set_constants(scratch_cp());
2384 
2385   int i;  // for portability
2386 
2387   // update each field in klass to use new constant pool indices as needed
2388   for (JavaFieldStream fs(scratch_class); !fs.done(); fs.next()) {
2389     jshort cur_index = fs.name_index();
2390     jshort new_index = find_new_index(cur_index);
2391     if (new_index != 0) {
2392       RC_TRACE_WITH_THREAD(0x00080000, THREAD,
2393         ("field-name_index change: %d to %d", cur_index, new_index));
2394       fs.set_name_index(new_index);
2395     }
2396     cur_index = fs.signature_index();
2397     new_index = find_new_index(cur_index);
2398     if (new_index != 0) {
2399       RC_TRACE_WITH_THREAD(0x00080000, THREAD,
2400         ("field-signature_index change: %d to %d", cur_index, new_index));
2401       fs.set_signature_index(new_index);
2402     }
2403     cur_index = fs.initval_index();
2404     new_index = find_new_index(cur_index);
2405     if (new_index != 0) {
2406       RC_TRACE_WITH_THREAD(0x00080000, THREAD,
2407         ("field-initval_index change: %d to %d", cur_index, new_index));
2408       fs.set_initval_index(new_index);
2409     }
2410     cur_index = fs.generic_signature_index();
2411     new_index = find_new_index(cur_index);
2412     if (new_index != 0) {
2413       RC_TRACE_WITH_THREAD(0x00080000, THREAD,
2414         ("field-generic_signature change: %d to %d", cur_index, new_index));
2415       fs.set_generic_signature_index(new_index);
2416     }
2417   } // end for each field
2418 
2419   // Update constant pool indices in the inner classes info to use
2420   // new constant indices as needed. The inner classes info is a
2421   // quadruple:
2422   // (inner_class_info, outer_class_info, inner_name, inner_access_flags)
2423   InnerClassesIterator iter(scratch_class);
2424   for (; !iter.done(); iter.next()) {
2425     int cur_index = iter.inner_class_info_index();
2426     if (cur_index == 0) {
2427       continue;  // JVM spec. allows null inner class refs so skip it
2428     }
2429     int new_index = find_new_index(cur_index);
2430     if (new_index != 0) {
2431       RC_TRACE_WITH_THREAD(0x00080000, THREAD,
2432         ("inner_class_info change: %d to %d", cur_index, new_index));
2433       iter.set_inner_class_info_index(new_index);
2434     }
2435     cur_index = iter.outer_class_info_index();
2436     new_index = find_new_index(cur_index);
2437     if (new_index != 0) {
2438       RC_TRACE_WITH_THREAD(0x00080000, THREAD,
2439         ("outer_class_info change: %d to %d", cur_index, new_index));
2440       iter.set_outer_class_info_index(new_index);
2441     }
2442     cur_index = iter.inner_name_index();
2443     new_index = find_new_index(cur_index);
2444     if (new_index != 0) {
2445       RC_TRACE_WITH_THREAD(0x00080000, THREAD,
2446         ("inner_name change: %d to %d", cur_index, new_index));
2447       iter.set_inner_name_index(new_index);
2448     }
2449   } // end for each inner class
2450 
2451   // Attach each method in klass to the new constant pool and update
2452   // to use new constant pool indices as needed:
2453   Array<Method*>* methods = scratch_class->methods();
2454   for (i = methods->length() - 1; i >= 0; i--) {
2455     methodHandle method(THREAD, methods->at(i));
2456     method->set_constants(scratch_cp());
2457 
2458     int new_index = find_new_index(method->name_index());
2459     if (new_index != 0) {
2460       RC_TRACE_WITH_THREAD(0x00080000, THREAD,
2461         ("method-name_index change: %d to %d", method->name_index(),
2462         new_index));
2463       method->set_name_index(new_index);
2464     }
2465     new_index = find_new_index(method->signature_index());
2466     if (new_index != 0) {
2467       RC_TRACE_WITH_THREAD(0x00080000, THREAD,
2468         ("method-signature_index change: %d to %d",
2469         method->signature_index(), new_index));
2470       method->set_signature_index(new_index);
2471     }
2472     new_index = find_new_index(method->generic_signature_index());
2473     if (new_index != 0) {
2474       RC_TRACE_WITH_THREAD(0x00080000, THREAD,
2475         ("method-generic_signature_index change: %d to %d",
2476         method->generic_signature_index(), new_index));
2477       method->set_generic_signature_index(new_index);
2478     }
2479 
2480     // Update constant pool indices in the method's checked exception
2481     // table to use new constant indices as needed.
2482     int cext_length = method->checked_exceptions_length();
2483     if (cext_length > 0) {
2484       CheckedExceptionElement * cext_table =
2485         method->checked_exceptions_start();
2486       for (int j = 0; j < cext_length; j++) {
2487         int cur_index = cext_table[j].class_cp_index;
2488         int new_index = find_new_index(cur_index);
2489         if (new_index != 0) {
2490           RC_TRACE_WITH_THREAD(0x00080000, THREAD,
2491             ("cext-class_cp_index change: %d to %d", cur_index, new_index));
2492           cext_table[j].class_cp_index = (u2)new_index;
2493         }
2494       } // end for each checked exception table entry
2495     } // end if there are checked exception table entries
2496 
2497     // Update each catch type index in the method's exception table
2498     // to use new constant pool indices as needed. The exception table
2499     // holds quadruple entries of the form:
2500     //   (beg_bci, end_bci, handler_bci, klass_index)
2501 
2502     ExceptionTable ex_table(method());
2503     int ext_length = ex_table.length();
2504 
2505     for (int j = 0; j < ext_length; j ++) {
2506       int cur_index = ex_table.catch_type_index(j);
2507       int new_index = find_new_index(cur_index);
2508       if (new_index != 0) {
2509         RC_TRACE_WITH_THREAD(0x00080000, THREAD,
2510           ("ext-klass_index change: %d to %d", cur_index, new_index));
2511         ex_table.set_catch_type_index(j, new_index);
2512       }
2513     } // end for each exception table entry
2514 
2515     // Update constant pool indices in the method's local variable
2516     // table to use new constant indices as needed. The local variable
2517     // table hold sextuple entries of the form:
2518     // (start_pc, length, name_index, descriptor_index, signature_index, slot)
2519     int lvt_length = method->localvariable_table_length();
2520     if (lvt_length > 0) {
2521       LocalVariableTableElement * lv_table =
2522         method->localvariable_table_start();
2523       for (int j = 0; j < lvt_length; j++) {
2524         int cur_index = lv_table[j].name_cp_index;
2525         int new_index = find_new_index(cur_index);
2526         if (new_index != 0) {
2527           RC_TRACE_WITH_THREAD(0x00080000, THREAD,
2528             ("lvt-name_cp_index change: %d to %d", cur_index, new_index));
2529           lv_table[j].name_cp_index = (u2)new_index;
2530         }
2531         cur_index = lv_table[j].descriptor_cp_index;
2532         new_index = find_new_index(cur_index);
2533         if (new_index != 0) {
2534           RC_TRACE_WITH_THREAD(0x00080000, THREAD,
2535             ("lvt-descriptor_cp_index change: %d to %d", cur_index,
2536             new_index));
2537           lv_table[j].descriptor_cp_index = (u2)new_index;
2538         }
2539         cur_index = lv_table[j].signature_cp_index;
2540         new_index = find_new_index(cur_index);
2541         if (new_index != 0) {
2542           RC_TRACE_WITH_THREAD(0x00080000, THREAD,
2543             ("lvt-signature_cp_index change: %d to %d", cur_index, new_index));
2544           lv_table[j].signature_cp_index = (u2)new_index;
2545         }
2546       } // end for each local variable table entry
2547     } // end if there are local variable table entries
2548 
2549     rewrite_cp_refs_in_stack_map_table(method, THREAD);
2550   } // end for each method
2551 } // end set_new_constant_pool()
2552 
2553 
2554 void VM_RedefineClasses::adjust_array_vtable(Klass* k_oop) {
2555   ArrayKlass* ak = ArrayKlass::cast(k_oop);
2556   bool trace_name_printed = false;
2557   ak->vtable()->adjust_method_entries(_matching_old_methods,
2558                                       _matching_new_methods,
2559                                       _matching_methods_length,
2560                                       &trace_name_printed);
2561 }
2562 
2563 // Unevolving classes may point to methods of the_class directly
2564 // from their constant pool caches, itables, and/or vtables. We
2565 // use the SystemDictionary::classes_do() facility and this helper
2566 // to fix up these pointers.
2567 //
2568 // Note: We currently don't support updating the vtable in
2569 // arrayKlassOops. See Open Issues in jvmtiRedefineClasses.hpp.
2570 void VM_RedefineClasses::adjust_cpool_cache_and_vtable(Klass* k_oop,
2571        ClassLoaderData* initiating_loader,
2572        TRAPS) {
2573   Klass *k = k_oop;
2574   if (k->oop_is_instance()) {
2575     HandleMark hm(THREAD);
2576     InstanceKlass *ik = (InstanceKlass *) k;
2577 
2578     // HotSpot specific optimization! HotSpot does not currently
2579     // support delegation from the bootstrap class loader to a
2580     // user-defined class loader. This means that if the bootstrap
2581     // class loader is the initiating class loader, then it will also
2582     // be the defining class loader. This also means that classes
2583     // loaded by the bootstrap class loader cannot refer to classes
2584     // loaded by a user-defined class loader. Note: a user-defined
2585     // class loader can delegate to the bootstrap class loader.
2586     //
2587     // If the current class being redefined has a user-defined class
2588     // loader as its defining class loader, then we can skip all
2589     // classes loaded by the bootstrap class loader.
2590     bool is_user_defined =
2591            InstanceKlass::cast(_the_class_oop)->class_loader() != NULL;
2592     if (is_user_defined && ik->class_loader() == NULL) {
2593       return;
2594     }
2595 
2596     // If the class being redefined is java.lang.Object, we need to fix all
2597     // array class vtables also
2598     if (_the_class_oop == SystemDictionary::Object_klass()) {
2599       ik->array_klasses_do(adjust_array_vtable);
2600     }
2601 
2602     // This is a very busy routine. We don't want too much tracing
2603     // printed out.
2604     bool trace_name_printed = false;
2605 
2606     // Very noisy: only enable this call if you are trying to determine
2607     // that a specific class gets found by this routine.
2608     // RC_TRACE macro has an embedded ResourceMark
2609     // RC_TRACE_WITH_THREAD(0x00100000, THREAD,
2610     //   ("adjust check: name=%s", ik->external_name()));
2611     // trace_name_printed = true;
2612 
2613     // Fix the vtable embedded in the_class and subclasses of the_class,
2614     // if one exists. We discard scratch_class and we don't keep an
2615     // InstanceKlass around to hold obsolete methods so we don't have
2616     // any other InstanceKlass embedded vtables to update. The vtable
2617     // holds the Method*s for virtual (but not final) methods.
2618     if (ik->vtable_length() > 0 && ik->is_subtype_of(_the_class_oop)) {
2619       // ik->vtable() creates a wrapper object; rm cleans it up
2620       ResourceMark rm(THREAD);
2621       ik->vtable()->adjust_method_entries(_matching_old_methods,
2622                                           _matching_new_methods,
2623                                           _matching_methods_length,
2624                                           &trace_name_printed);
2625     }
2626 
2627     // If the current class has an itable and we are either redefining an
2628     // interface or if the current class is a subclass of the_class, then
2629     // we potentially have to fix the itable. If we are redefining an
2630     // interface, then we have to call adjust_method_entries() for
2631     // every InstanceKlass that has an itable since there isn't a
2632     // subclass relationship between an interface and an InstanceKlass.
2633     if (ik->itable_length() > 0 && (_the_class_oop->is_interface()
2634         || ik->is_subclass_of(_the_class_oop))) {
2635       // ik->itable() creates a wrapper object; rm cleans it up
2636       ResourceMark rm(THREAD);
2637       ik->itable()->adjust_method_entries(_matching_old_methods,
2638                                           _matching_new_methods,
2639                                           _matching_methods_length,
2640                                           &trace_name_printed);
2641     }
2642 
2643     // The constant pools in other classes (other_cp) can refer to
2644     // methods in the_class. We have to update method information in
2645     // other_cp's cache. If other_cp has a previous version, then we
2646     // have to repeat the process for each previous version. The
2647     // constant pool cache holds the Method*s for non-virtual
2648     // methods and for virtual, final methods.
2649     //
2650     // Special case: if the current class is the_class, then new_cp
2651     // has already been attached to the_class and old_cp has already
2652     // been added as a previous version. The new_cp doesn't have any
2653     // cached references to old methods so it doesn't need to be
2654     // updated. We can simply start with the previous version(s) in
2655     // that case.
2656     constantPoolHandle other_cp;
2657     ConstantPoolCache* cp_cache;
2658 
2659     if (k_oop != _the_class_oop) {
2660       // this klass' constant pool cache may need adjustment
2661       other_cp = constantPoolHandle(ik->constants());
2662       cp_cache = other_cp->cache();
2663       if (cp_cache != NULL) {
2664         cp_cache->adjust_method_entries(_matching_old_methods,
2665                                         _matching_new_methods,
2666                                         _matching_methods_length,
2667                                         &trace_name_printed);
2668       }
2669     }
2670     {
2671       ResourceMark rm(THREAD);
2672       // PreviousVersionInfo objects returned via PreviousVersionWalker
2673       // contain a GrowableArray of handles. We have to clean up the
2674       // GrowableArray _after_ the PreviousVersionWalker destructor
2675       // has destroyed the handles.
2676       {
2677         // the previous versions' constant pool caches may need adjustment
2678         PreviousVersionWalker pvw(ik);
2679         for (PreviousVersionInfo * pv_info = pvw.next_previous_version();
2680              pv_info != NULL; pv_info = pvw.next_previous_version()) {
2681           other_cp = pv_info->prev_constant_pool_handle();
2682           cp_cache = other_cp->cache();
2683           if (cp_cache != NULL) {
2684             cp_cache->adjust_method_entries(_matching_old_methods,
2685                                             _matching_new_methods,
2686                                             _matching_methods_length,
2687                                             &trace_name_printed);
2688           }
2689         }
2690       } // pvw is cleaned up
2691     } // rm is cleaned up
2692   }
2693 }
2694 
2695 void VM_RedefineClasses::update_jmethod_ids() {
2696   for (int j = 0; j < _matching_methods_length; ++j) {
2697     Method* old_method = _matching_old_methods[j];
2698     jmethodID jmid = old_method->find_jmethod_id_or_null();
2699     if (jmid != NULL) {
2700       // There is a jmethodID, change it to point to the new method
2701       methodHandle new_method_h(_matching_new_methods[j]);
2702       Method::change_method_associated_with_jmethod_id(jmid, new_method_h());
2703       assert(Method::resolve_jmethod_id(jmid) == _matching_new_methods[j],
2704              "should be replaced");
2705     }
2706   }
2707 }
2708 
2709 void VM_RedefineClasses::check_methods_and_mark_as_obsolete(
2710        BitMap *emcp_methods, int * emcp_method_count_p) {
2711   *emcp_method_count_p = 0;
2712   int obsolete_count = 0;
2713   int old_index = 0;
2714   for (int j = 0; j < _matching_methods_length; ++j, ++old_index) {
2715     Method* old_method = _matching_old_methods[j];
2716     Method* new_method = _matching_new_methods[j];
2717     Method* old_array_method;
2718 
2719     // Maintain an old_index into the _old_methods array by skipping
2720     // deleted methods
2721     while ((old_array_method = _old_methods->at(old_index)) != old_method) {
2722       ++old_index;
2723     }
2724 
2725     if (MethodComparator::methods_EMCP(old_method, new_method)) {
2726       // The EMCP definition from JSR-163 requires the bytecodes to be
2727       // the same with the exception of constant pool indices which may
2728       // differ. However, the constants referred to by those indices
2729       // must be the same.
2730       //
2731       // We use methods_EMCP() for comparison since constant pool
2732       // merging can remove duplicate constant pool entries that were
2733       // present in the old method and removed from the rewritten new
2734       // method. A faster binary comparison function would consider the
2735       // old and new methods to be different when they are actually
2736       // EMCP.
2737       //
2738       // The old and new methods are EMCP and you would think that we
2739       // could get rid of one of them here and now and save some space.
2740       // However, the concept of EMCP only considers the bytecodes and
2741       // the constant pool entries in the comparison. Other things,
2742       // e.g., the line number table (LNT) or the local variable table
2743       // (LVT) don't count in the comparison. So the new (and EMCP)
2744       // method can have a new LNT that we need so we can't just
2745       // overwrite the new method with the old method.
2746       //
2747       // When this routine is called, we have already attached the new
2748       // methods to the_class so the old methods are effectively
2749       // overwritten. However, if an old method is still executing,
2750       // then the old method cannot be collected until sometime after
2751       // the old method call has returned. So the overwriting of old
2752       // methods by new methods will save us space except for those
2753       // (hopefully few) old methods that are still executing.
2754       //
2755       // A method refers to a ConstMethod* and this presents another
2756       // possible avenue to space savings. The ConstMethod* in the
2757       // new method contains possibly new attributes (LNT, LVT, etc).
2758       // At first glance, it seems possible to save space by replacing
2759       // the ConstMethod* in the old method with the ConstMethod*
2760       // from the new method. The old and new methods would share the
2761       // same ConstMethod* and we would save the space occupied by
2762       // the old ConstMethod*. However, the ConstMethod* contains
2763       // a back reference to the containing method. Sharing the
2764       // ConstMethod* between two methods could lead to confusion in
2765       // the code that uses the back reference. This would lead to
2766       // brittle code that could be broken in non-obvious ways now or
2767       // in the future.
2768       //
2769       // Another possibility is to copy the ConstMethod* from the new
2770       // method to the old method and then overwrite the new method with
2771       // the old method. Since the ConstMethod* contains the bytecodes
2772       // for the method embedded in the oop, this option would change
2773       // the bytecodes out from under any threads executing the old
2774       // method and make the thread's bcp invalid. Since EMCP requires
2775       // that the bytecodes be the same modulo constant pool indices, it
2776       // is straight forward to compute the correct new bcp in the new
2777       // ConstMethod* from the old bcp in the old ConstMethod*. The
2778       // time consuming part would be searching all the frames in all
2779       // of the threads to find all of the calls to the old method.
2780       //
2781       // It looks like we will have to live with the limited savings
2782       // that we get from effectively overwriting the old methods
2783       // when the new methods are attached to the_class.
2784 
2785       // track which methods are EMCP for add_previous_version() call
2786       emcp_methods->set_bit(old_index);
2787       (*emcp_method_count_p)++;
2788 
2789       // An EMCP method is _not_ obsolete. An obsolete method has a
2790       // different jmethodID than the current method. An EMCP method
2791       // has the same jmethodID as the current method. Having the
2792       // same jmethodID for all EMCP versions of a method allows for
2793       // a consistent view of the EMCP methods regardless of which
2794       // EMCP method you happen to have in hand. For example, a
2795       // breakpoint set in one EMCP method will work for all EMCP
2796       // versions of the method including the current one.
2797     } else {
2798       // mark obsolete methods as such
2799       old_method->set_is_obsolete();
2800       obsolete_count++;
2801 
2802       // obsolete methods need a unique idnum
2803       u2 num = InstanceKlass::cast(_the_class_oop)->next_method_idnum();
2804       if (num != ConstMethod::UNSET_IDNUM) {
2805 //      u2 old_num = old_method->method_idnum();
2806         old_method->set_method_idnum(num);
2807 // TO DO: attach obsolete annotations to obsolete method's new idnum
2808       }
2809       // With tracing we try not to "yack" too much. The position of
2810       // this trace assumes there are fewer obsolete methods than
2811       // EMCP methods.
2812       RC_TRACE(0x00000100, ("mark %s(%s) as obsolete",
2813         old_method->name()->as_C_string(),
2814         old_method->signature()->as_C_string()));
2815     }
2816     old_method->set_is_old();
2817   }
2818   for (int i = 0; i < _deleted_methods_length; ++i) {
2819     Method* old_method = _deleted_methods[i];
2820 
2821     assert(old_method->vtable_index() < 0,
2822            "cannot delete methods with vtable entries");;
2823 
2824     // Mark all deleted methods as old and obsolete
2825     old_method->set_is_old();
2826     old_method->set_is_obsolete();
2827     ++obsolete_count;
2828     // With tracing we try not to "yack" too much. The position of
2829     // this trace assumes there are fewer obsolete methods than
2830     // EMCP methods.
2831     RC_TRACE(0x00000100, ("mark deleted %s(%s) as obsolete",
2832                           old_method->name()->as_C_string(),
2833                           old_method->signature()->as_C_string()));
2834   }
2835   assert((*emcp_method_count_p + obsolete_count) == _old_methods->length(),
2836     "sanity check");
2837   RC_TRACE(0x00000100, ("EMCP_cnt=%d, obsolete_cnt=%d", *emcp_method_count_p,
2838     obsolete_count));
2839 }
2840 
2841 // This internal class transfers the native function registration from old methods
2842 // to new methods.  It is designed to handle both the simple case of unchanged
2843 // native methods and the complex cases of native method prefixes being added and/or
2844 // removed.
2845 // It expects only to be used during the VM_RedefineClasses op (a safepoint).
2846 //
2847 // This class is used after the new methods have been installed in "the_class".
2848 //
2849 // So, for example, the following must be handled.  Where 'm' is a method and
2850 // a number followed by an underscore is a prefix.
2851 //
2852 //                                      Old Name    New Name
2853 // Simple transfer to new method        m       ->  m
2854 // Add prefix                           m       ->  1_m
2855 // Remove prefix                        1_m     ->  m
2856 // Simultaneous add of prefixes         m       ->  3_2_1_m
2857 // Simultaneous removal of prefixes     3_2_1_m ->  m
2858 // Simultaneous add and remove          1_m     ->  2_m
2859 // Same, caused by prefix removal only  3_2_1_m ->  3_2_m
2860 //
2861 class TransferNativeFunctionRegistration {
2862  private:
2863   instanceKlassHandle the_class;
2864   int prefix_count;
2865   char** prefixes;
2866 
2867   // Recursively search the binary tree of possibly prefixed method names.
2868   // Iteration could be used if all agents were well behaved. Full tree walk is
2869   // more resilent to agents not cleaning up intermediate methods.
2870   // Branch at each depth in the binary tree is:
2871   //    (1) without the prefix.
2872   //    (2) with the prefix.
2873   // where 'prefix' is the prefix at that 'depth' (first prefix, second prefix,...)
2874   Method* search_prefix_name_space(int depth, char* name_str, size_t name_len,
2875                                      Symbol* signature) {
2876     TempNewSymbol name_symbol = SymbolTable::probe(name_str, (int)name_len);
2877     if (name_symbol != NULL) {
2878       Method* method = the_class()->lookup_method(name_symbol, signature);
2879       if (method != NULL) {
2880         // Even if prefixed, intermediate methods must exist.
2881         if (method->is_native()) {
2882           // Wahoo, we found a (possibly prefixed) version of the method, return it.
2883           return method;
2884         }
2885         if (depth < prefix_count) {
2886           // Try applying further prefixes (other than this one).
2887           method = search_prefix_name_space(depth+1, name_str, name_len, signature);
2888           if (method != NULL) {
2889             return method; // found
2890           }
2891 
2892           // Try adding this prefix to the method name and see if it matches
2893           // another method name.
2894           char* prefix = prefixes[depth];
2895           size_t prefix_len = strlen(prefix);
2896           size_t trial_len = name_len + prefix_len;
2897           char* trial_name_str = NEW_RESOURCE_ARRAY(char, trial_len + 1);
2898           strcpy(trial_name_str, prefix);
2899           strcat(trial_name_str, name_str);
2900           method = search_prefix_name_space(depth+1, trial_name_str, trial_len,
2901                                             signature);
2902           if (method != NULL) {
2903             // If found along this branch, it was prefixed, mark as such
2904             method->set_is_prefixed_native();
2905             return method; // found
2906           }
2907         }
2908       }
2909     }
2910     return NULL;  // This whole branch bore nothing
2911   }
2912 
2913   // Return the method name with old prefixes stripped away.
2914   char* method_name_without_prefixes(Method* method) {
2915     Symbol* name = method->name();
2916     char* name_str = name->as_utf8();
2917 
2918     // Old prefixing may be defunct, strip prefixes, if any.
2919     for (int i = prefix_count-1; i >= 0; i--) {
2920       char* prefix = prefixes[i];
2921       size_t prefix_len = strlen(prefix);
2922       if (strncmp(prefix, name_str, prefix_len) == 0) {
2923         name_str += prefix_len;
2924       }
2925     }
2926     return name_str;
2927   }
2928 
2929   // Strip any prefixes off the old native method, then try to find a
2930   // (possibly prefixed) new native that matches it.
2931   Method* strip_and_search_for_new_native(Method* method) {
2932     ResourceMark rm;
2933     char* name_str = method_name_without_prefixes(method);
2934     return search_prefix_name_space(0, name_str, strlen(name_str),
2935                                     method->signature());
2936   }
2937 
2938  public:
2939 
2940   // Construct a native method transfer processor for this class.
2941   TransferNativeFunctionRegistration(instanceKlassHandle _the_class) {
2942     assert(SafepointSynchronize::is_at_safepoint(), "sanity check");
2943 
2944     the_class = _the_class;
2945     prefixes = JvmtiExport::get_all_native_method_prefixes(&prefix_count);
2946   }
2947 
2948   // Attempt to transfer any of the old or deleted methods that are native
2949   void transfer_registrations(Method** old_methods, int methods_length) {
2950     for (int j = 0; j < methods_length; j++) {
2951       Method* old_method = old_methods[j];
2952 
2953       if (old_method->is_native() && old_method->has_native_function()) {
2954         Method* new_method = strip_and_search_for_new_native(old_method);
2955         if (new_method != NULL) {
2956           // Actually set the native function in the new method.
2957           // Redefine does not send events (except CFLH), certainly not this
2958           // behind the scenes re-registration.
2959           new_method->set_native_function(old_method->native_function(),
2960                               !Method::native_bind_event_is_interesting);
2961         }
2962       }
2963     }
2964   }
2965 };
2966 
2967 // Don't lose the association between a native method and its JNI function.
2968 void VM_RedefineClasses::transfer_old_native_function_registrations(instanceKlassHandle the_class) {
2969   TransferNativeFunctionRegistration transfer(the_class);
2970   transfer.transfer_registrations(_deleted_methods, _deleted_methods_length);
2971   transfer.transfer_registrations(_matching_old_methods, _matching_methods_length);
2972 }
2973 
2974 // Deoptimize all compiled code that depends on this class.
2975 //
2976 // If the can_redefine_classes capability is obtained in the onload
2977 // phase then the compiler has recorded all dependencies from startup.
2978 // In that case we need only deoptimize and throw away all compiled code
2979 // that depends on the class.
2980 //
2981 // If can_redefine_classes is obtained sometime after the onload
2982 // phase then the dependency information may be incomplete. In that case
2983 // the first call to RedefineClasses causes all compiled code to be
2984 // thrown away. As can_redefine_classes has been obtained then
2985 // all future compilations will record dependencies so second and
2986 // subsequent calls to RedefineClasses need only throw away code
2987 // that depends on the class.
2988 //
2989 void VM_RedefineClasses::flush_dependent_code(instanceKlassHandle k_h, TRAPS) {
2990   assert_locked_or_safepoint(Compile_lock);
2991 
2992   // All dependencies have been recorded from startup or this is a second or
2993   // subsequent use of RedefineClasses
2994   if (JvmtiExport::all_dependencies_are_recorded()) {
2995     Universe::flush_evol_dependents_on(k_h);
2996   } else {
2997     CodeCache::mark_all_nmethods_for_deoptimization();
2998 
2999     ResourceMark rm(THREAD);
3000     DeoptimizationMarker dm;
3001 
3002     // Deoptimize all activations depending on marked nmethods
3003     Deoptimization::deoptimize_dependents();
3004 
3005     // Make the dependent methods not entrant (in VM_Deoptimize they are made zombies)
3006     CodeCache::make_marked_nmethods_not_entrant();
3007 
3008     // From now on we know that the dependency information is complete
3009     JvmtiExport::set_all_dependencies_are_recorded(true);
3010   }
3011 }
3012 
3013 void VM_RedefineClasses::compute_added_deleted_matching_methods() {
3014   Method* old_method;
3015   Method* new_method;
3016 
3017   _matching_old_methods = NEW_RESOURCE_ARRAY(Method*, _old_methods->length());
3018   _matching_new_methods = NEW_RESOURCE_ARRAY(Method*, _old_methods->length());
3019   _added_methods        = NEW_RESOURCE_ARRAY(Method*, _new_methods->length());
3020   _deleted_methods      = NEW_RESOURCE_ARRAY(Method*, _old_methods->length());
3021 
3022   _matching_methods_length = 0;
3023   _deleted_methods_length  = 0;
3024   _added_methods_length    = 0;
3025 
3026   int nj = 0;
3027   int oj = 0;
3028   while (true) {
3029     if (oj >= _old_methods->length()) {
3030       if (nj >= _new_methods->length()) {
3031         break; // we've looked at everything, done
3032       }
3033       // New method at the end
3034       new_method = _new_methods->at(nj);
3035       _added_methods[_added_methods_length++] = new_method;
3036       ++nj;
3037     } else if (nj >= _new_methods->length()) {
3038       // Old method, at the end, is deleted
3039       old_method = _old_methods->at(oj);
3040       _deleted_methods[_deleted_methods_length++] = old_method;
3041       ++oj;
3042     } else {
3043       old_method = _old_methods->at(oj);
3044       new_method = _new_methods->at(nj);
3045       if (old_method->name() == new_method->name()) {
3046         if (old_method->signature() == new_method->signature()) {
3047           _matching_old_methods[_matching_methods_length  ] = old_method;
3048           _matching_new_methods[_matching_methods_length++] = new_method;
3049           ++nj;
3050           ++oj;
3051         } else {
3052           // added overloaded have already been moved to the end,
3053           // so this is a deleted overloaded method
3054           _deleted_methods[_deleted_methods_length++] = old_method;
3055           ++oj;
3056         }
3057       } else { // names don't match
3058         if (old_method->name()->fast_compare(new_method->name()) > 0) {
3059           // new method
3060           _added_methods[_added_methods_length++] = new_method;
3061           ++nj;
3062         } else {
3063           // deleted method
3064           _deleted_methods[_deleted_methods_length++] = old_method;
3065           ++oj;
3066         }
3067       }
3068     }
3069   }
3070   assert(_matching_methods_length + _deleted_methods_length == _old_methods->length(), "sanity");
3071   assert(_matching_methods_length + _added_methods_length == _new_methods->length(), "sanity");
3072 }
3073 
3074 
3075 
3076 // Install the redefinition of a class:
3077 //    - house keeping (flushing breakpoints and caches, deoptimizing
3078 //      dependent compiled code)
3079 //    - replacing parts in the_class with parts from scratch_class
3080 //    - adding a weak reference to track the obsolete but interesting
3081 //      parts of the_class
3082 //    - adjusting constant pool caches and vtables in other classes
3083 //      that refer to methods in the_class. These adjustments use the
3084 //      SystemDictionary::classes_do() facility which only allows
3085 //      a helper method to be specified. The interesting parameters
3086 //      that we would like to pass to the helper method are saved in
3087 //      static global fields in the VM operation.
3088 void VM_RedefineClasses::redefine_single_class(jclass the_jclass,
3089        Klass* scratch_class_oop, TRAPS) {
3090 
3091   HandleMark hm(THREAD);   // make sure handles from this call are freed
3092   RC_TIMER_START(_timer_rsc_phase1);
3093 
3094   instanceKlassHandle scratch_class(scratch_class_oop);
3095 
3096   oop the_class_mirror = JNIHandles::resolve_non_null(the_jclass);
3097   Klass* the_class_oop = java_lang_Class::as_Klass(the_class_mirror);
3098   instanceKlassHandle the_class = instanceKlassHandle(THREAD, the_class_oop);
3099 
3100   // Remove all breakpoints in methods of this class
3101   JvmtiBreakpoints& jvmti_breakpoints = JvmtiCurrentBreakpoints::get_jvmti_breakpoints();
3102   jvmti_breakpoints.clearall_in_class_at_safepoint(the_class_oop);
3103 
3104   if (the_class_oop == Universe::reflect_invoke_cache()->klass()) {
3105     // We are redefining java.lang.reflect.Method. Method.invoke() is
3106     // cached and users of the cache care about each active version of
3107     // the method so we have to track this previous version.
3108     // Do this before methods get switched
3109     Universe::reflect_invoke_cache()->add_previous_version(
3110       the_class->method_with_idnum(Universe::reflect_invoke_cache()->method_idnum()));
3111   }
3112 
3113   // Deoptimize all compiled code that depends on this class
3114   flush_dependent_code(the_class, THREAD);
3115 
3116   _old_methods = the_class->methods();
3117   _new_methods = scratch_class->methods();
3118   _the_class_oop = the_class_oop;
3119   compute_added_deleted_matching_methods();
3120   update_jmethod_ids();
3121 
3122   // Attach new constant pool to the original klass. The original
3123   // klass still refers to the old constant pool (for now).
3124   scratch_class->constants()->set_pool_holder(the_class());
3125 
3126 #if 0
3127   // In theory, with constant pool merging in place we should be able
3128   // to save space by using the new, merged constant pool in place of
3129   // the old constant pool(s). By "pool(s)" I mean the constant pool in
3130   // the klass version we are replacing now and any constant pool(s) in
3131   // previous versions of klass. Nice theory, doesn't work in practice.
3132   // When this code is enabled, even simple programs throw NullPointer
3133   // exceptions. I'm guessing that this is caused by some constant pool
3134   // cache difference between the new, merged constant pool and the
3135   // constant pool that was just being used by the klass. I'm keeping
3136   // this code around to archive the idea, but the code has to remain
3137   // disabled for now.
3138 
3139   // Attach each old method to the new constant pool. This can be
3140   // done here since we are past the bytecode verification and
3141   // constant pool optimization phases.
3142   for (int i = _old_methods->length() - 1; i >= 0; i--) {
3143     Method* method = _old_methods->at(i);
3144     method->set_constants(scratch_class->constants());
3145   }
3146 
3147   {
3148     // walk all previous versions of the klass
3149     InstanceKlass *ik = (InstanceKlass *)the_class();
3150     PreviousVersionWalker pvw(ik);
3151     instanceKlassHandle ikh;
3152     do {
3153       ikh = pvw.next_previous_version();
3154       if (!ikh.is_null()) {
3155         ik = ikh();
3156 
3157         // attach previous version of klass to the new constant pool
3158         ik->set_constants(scratch_class->constants());
3159 
3160         // Attach each method in the previous version of klass to the
3161         // new constant pool
3162         Array<Method*>* prev_methods = ik->methods();
3163         for (int i = prev_methods->length() - 1; i >= 0; i--) {
3164           Method* method = prev_methods->at(i);
3165           method->set_constants(scratch_class->constants());
3166         }
3167       }
3168     } while (!ikh.is_null());
3169   }
3170 #endif
3171 
3172   // Replace methods and constantpool
3173   the_class->set_methods(_new_methods);
3174   scratch_class->set_methods(_old_methods);     // To prevent potential GCing of the old methods,
3175                                           // and to be able to undo operation easily.
3176 
3177   ConstantPool* old_constants = the_class->constants();
3178   the_class->set_constants(scratch_class->constants());
3179   scratch_class->set_constants(old_constants);  // See the previous comment.
3180 #if 0
3181   // We are swapping the guts of "the new class" with the guts of "the
3182   // class". Since the old constant pool has just been attached to "the
3183   // new class", it seems logical to set the pool holder in the old
3184   // constant pool also. However, doing this will change the observable
3185   // class hierarchy for any old methods that are still executing. A
3186   // method can query the identity of its "holder" and this query uses
3187   // the method's constant pool link to find the holder. The change in
3188   // holding class from "the class" to "the new class" can confuse
3189   // things.
3190   //
3191   // Setting the old constant pool's holder will also cause
3192   // verification done during vtable initialization below to fail.
3193   // During vtable initialization, the vtable's class is verified to be
3194   // a subtype of the method's holder. The vtable's class is "the
3195   // class" and the method's holder is gotten from the constant pool
3196   // link in the method itself. For "the class"'s directly implemented
3197   // methods, the method holder is "the class" itself (as gotten from
3198   // the new constant pool). The check works fine in this case. The
3199   // check also works fine for methods inherited from super classes.
3200   //
3201   // Miranda methods are a little more complicated. A miranda method is
3202   // provided by an interface when the class implementing the interface
3203   // does not provide its own method.  These interfaces are implemented
3204   // internally as an InstanceKlass. These special instanceKlasses
3205   // share the constant pool of the class that "implements" the
3206   // interface. By sharing the constant pool, the method holder of a
3207   // miranda method is the class that "implements" the interface. In a
3208   // non-redefine situation, the subtype check works fine. However, if
3209   // the old constant pool's pool holder is modified, then the check
3210   // fails because there is no class hierarchy relationship between the
3211   // vtable's class and "the new class".
3212 
3213   old_constants->set_pool_holder(scratch_class());
3214 #endif
3215 
3216   // track which methods are EMCP for add_previous_version() call below
3217   BitMap emcp_methods(_old_methods->length());
3218   int emcp_method_count = 0;
3219   emcp_methods.clear();  // clears 0..(length() - 1)
3220   check_methods_and_mark_as_obsolete(&emcp_methods, &emcp_method_count);
3221   transfer_old_native_function_registrations(the_class);
3222 
3223   // The class file bytes from before any retransformable agents mucked
3224   // with them was cached on the scratch class, move to the_class.
3225   // Note: we still want to do this if nothing needed caching since it
3226   // should get cleared in the_class too.
3227   if (the_class->get_cached_class_file_bytes() == 0) {
3228     // the_class doesn't have a cache yet so copy it
3229     the_class->set_cached_class_file(
3230       scratch_class->get_cached_class_file_bytes(),
3231       scratch_class->get_cached_class_file_len());
3232   }
3233 #ifndef PRODUCT
3234   else {
3235     assert(the_class->get_cached_class_file_bytes() ==
3236       scratch_class->get_cached_class_file_bytes(), "cache ptrs must match");
3237     assert(the_class->get_cached_class_file_len() ==
3238       scratch_class->get_cached_class_file_len(), "cache lens must match");
3239   }
3240 #endif
3241 
3242   // Replace inner_classes
3243   Array<u2>* old_inner_classes = the_class->inner_classes();
3244   the_class->set_inner_classes(scratch_class->inner_classes());
3245   scratch_class->set_inner_classes(old_inner_classes);
3246 
3247   // Initialize the vtable and interface table after
3248   // methods have been rewritten
3249   {
3250     ResourceMark rm(THREAD);
3251     // no exception should happen here since we explicitly
3252     // do not check loader constraints.
3253     // compare_and_normalize_class_versions has already checked:
3254     //  - classloaders unchanged, signatures unchanged
3255     //  - all instanceKlasses for redefined classes reused & contents updated
3256     the_class->vtable()->initialize_vtable(false, THREAD);
3257     the_class->itable()->initialize_itable(false, THREAD);
3258     assert(!HAS_PENDING_EXCEPTION || (THREAD->pending_exception()->is_a(SystemDictionary::ThreadDeath_klass())), "redefine exception");
3259   }
3260 
3261   // Leave arrays of jmethodIDs and itable index cache unchanged
3262 
3263   // Copy the "source file name" attribute from new class version
3264   the_class->set_source_file_name(scratch_class->source_file_name());
3265 
3266   // Copy the "source debug extension" attribute from new class version
3267   the_class->set_source_debug_extension(
3268     scratch_class->source_debug_extension(),
3269     scratch_class->source_debug_extension() == NULL ? 0 :
3270     (int)strlen(scratch_class->source_debug_extension()));
3271 
3272   // Use of javac -g could be different in the old and the new
3273   if (scratch_class->access_flags().has_localvariable_table() !=
3274       the_class->access_flags().has_localvariable_table()) {
3275 
3276     AccessFlags flags = the_class->access_flags();
3277     if (scratch_class->access_flags().has_localvariable_table()) {
3278       flags.set_has_localvariable_table();
3279     } else {
3280       flags.clear_has_localvariable_table();
3281     }
3282     the_class->set_access_flags(flags);
3283   }
3284 
3285   // Since there is currently no rewriting of type annotations indexes
3286   // into the CP, we null out type annotations on scratch_class before
3287   // we swap annotations with the_class rather than facing the
3288   // possibility of shipping annotations with broken indexes to
3289   // Java-land.
3290   Annotations* new_annotations = scratch_class->annotations();
3291   if (new_annotations != NULL) {
3292     Annotations* new_type_annotations = new_annotations->type_annotations();
3293     if (new_type_annotations != NULL) {
3294       MetadataFactory::free_metadata(scratch_class->class_loader_data(), new_type_annotations);
3295       new_annotations->set_type_annotations(NULL);
3296     }
3297   }
3298   // Swap annotation fields values
3299   Annotations* old_annotations = the_class->annotations();
3300   the_class->set_annotations(scratch_class->annotations());
3301   scratch_class->set_annotations(old_annotations);
3302 
3303   // Replace minor version number of class file
3304   u2 old_minor_version = the_class->minor_version();
3305   the_class->set_minor_version(scratch_class->minor_version());
3306   scratch_class->set_minor_version(old_minor_version);
3307 
3308   // Replace major version number of class file
3309   u2 old_major_version = the_class->major_version();
3310   the_class->set_major_version(scratch_class->major_version());
3311   scratch_class->set_major_version(old_major_version);
3312 
3313   // Replace CP indexes for class and name+type of enclosing method
3314   u2 old_class_idx  = the_class->enclosing_method_class_index();
3315   u2 old_method_idx = the_class->enclosing_method_method_index();
3316   the_class->set_enclosing_method_indices(
3317     scratch_class->enclosing_method_class_index(),
3318     scratch_class->enclosing_method_method_index());
3319   scratch_class->set_enclosing_method_indices(old_class_idx, old_method_idx);
3320 
3321   // keep track of previous versions of this class
3322   the_class->add_previous_version(scratch_class, &emcp_methods,
3323     emcp_method_count);
3324 
3325   RC_TIMER_STOP(_timer_rsc_phase1);
3326   RC_TIMER_START(_timer_rsc_phase2);
3327 
3328   // Adjust constantpool caches and vtables for all classes
3329   // that reference methods of the evolved class.
3330   SystemDictionary::classes_do(adjust_cpool_cache_and_vtable, THREAD);
3331 
3332   // Fix Resolution Error table also to remove old constant pools
3333   SystemDictionary::delete_resolution_error(old_constants);
3334 
3335   if (the_class->oop_map_cache() != NULL) {
3336     // Flush references to any obsolete methods from the oop map cache
3337     // so that obsolete methods are not pinned.
3338     the_class->oop_map_cache()->flush_obsolete_entries();
3339   }
3340 
3341   // increment the classRedefinedCount field in the_class and in any
3342   // direct and indirect subclasses of the_class
3343   increment_class_counter((InstanceKlass *)the_class(), THREAD);
3344 
3345   // RC_TRACE macro has an embedded ResourceMark
3346   RC_TRACE_WITH_THREAD(0x00000001, THREAD,
3347     ("redefined name=%s, count=%d (avail_mem=" UINT64_FORMAT "K)",
3348     the_class->external_name(),
3349     java_lang_Class::classRedefinedCount(the_class_mirror),
3350     os::available_memory() >> 10));
3351 
3352   RC_TIMER_STOP(_timer_rsc_phase2);
3353 } // end redefine_single_class()
3354 
3355 
3356 // Increment the classRedefinedCount field in the specific InstanceKlass
3357 // and in all direct and indirect subclasses.
3358 void VM_RedefineClasses::increment_class_counter(InstanceKlass *ik, TRAPS) {
3359   oop class_mirror = ik->java_mirror();
3360   Klass* class_oop = java_lang_Class::as_Klass(class_mirror);
3361   int new_count = java_lang_Class::classRedefinedCount(class_mirror) + 1;
3362   java_lang_Class::set_classRedefinedCount(class_mirror, new_count);
3363 
3364   if (class_oop != _the_class_oop) {
3365     // _the_class_oop count is printed at end of redefine_single_class()
3366     RC_TRACE_WITH_THREAD(0x00000008, THREAD,
3367       ("updated count in subclass=%s to %d", ik->external_name(), new_count));
3368   }
3369 
3370   for (Klass *subk = ik->subklass(); subk != NULL;
3371        subk = subk->next_sibling()) {
3372     if (subk->oop_is_instance()) {
3373       // Only update instanceKlasses
3374       InstanceKlass *subik = (InstanceKlass*)subk;
3375       // recursively do subclasses of the current subclass
3376       increment_class_counter(subik, THREAD);
3377     }
3378   }
3379 }
3380 
3381 void VM_RedefineClasses::check_class(Klass* k_oop,
3382                                      ClassLoaderData* initiating_loader,
3383                                      TRAPS) {
3384   Klass *k = k_oop;
3385   if (k->oop_is_instance()) {
3386     HandleMark hm(THREAD);
3387     InstanceKlass *ik = (InstanceKlass *) k;
3388     bool no_old_methods = true;  // be optimistic
3389     ResourceMark rm(THREAD);
3390 
3391     // a vtable should never contain old or obsolete methods
3392     if (ik->vtable_length() > 0 &&
3393         !ik->vtable()->check_no_old_or_obsolete_entries()) {
3394       if (RC_TRACE_ENABLED(0x00004000)) {
3395         RC_TRACE_WITH_THREAD(0x00004000, THREAD,
3396           ("klassVtable::check_no_old_or_obsolete_entries failure"
3397            " -- OLD or OBSOLETE method found -- class: %s",
3398            ik->signature_name()));
3399         ik->vtable()->dump_vtable();
3400       }
3401       no_old_methods = false;
3402     }
3403 
3404     // an itable should never contain old or obsolete methods
3405     if (ik->itable_length() > 0 &&
3406         !ik->itable()->check_no_old_or_obsolete_entries()) {
3407       if (RC_TRACE_ENABLED(0x00004000)) {
3408         RC_TRACE_WITH_THREAD(0x00004000, THREAD,
3409           ("klassItable::check_no_old_or_obsolete_entries failure"
3410            " -- OLD or OBSOLETE method found -- class: %s",
3411            ik->signature_name()));
3412         ik->itable()->dump_itable();
3413       }
3414       no_old_methods = false;
3415     }
3416 
3417     // the constant pool cache should never contain old or obsolete methods
3418     if (ik->constants() != NULL &&
3419         ik->constants()->cache() != NULL &&
3420         !ik->constants()->cache()->check_no_old_or_obsolete_entries()) {
3421       if (RC_TRACE_ENABLED(0x00004000)) {
3422         RC_TRACE_WITH_THREAD(0x00004000, THREAD,
3423           ("cp-cache::check_no_old_or_obsolete_entries failure"
3424            " -- OLD or OBSOLETE method found -- class: %s",
3425            ik->signature_name()));
3426         ik->constants()->cache()->dump_cache();
3427       }
3428       no_old_methods = false;
3429     }
3430 
3431     if (!no_old_methods) {
3432       if (RC_TRACE_ENABLED(0x00004000)) {
3433         dump_methods();
3434       } else {
3435         tty->print_cr("INFO: use the '-XX:TraceRedefineClasses=16384' option "
3436           "to see more info about the following guarantee() failure.");
3437       }
3438       guarantee(false, "OLD and/or OBSOLETE method(s) found");
3439     }
3440   }
3441 }
3442 
3443 void VM_RedefineClasses::dump_methods() {
3444   int j;
3445   RC_TRACE(0x00004000, ("_old_methods --"));
3446   for (j = 0; j < _old_methods->length(); ++j) {
3447     Method* m = _old_methods->at(j);
3448     RC_TRACE_NO_CR(0x00004000, ("%4d  (%5d)  ", j, m->vtable_index()));
3449     m->access_flags().print_on(tty);
3450     tty->print(" --  ");
3451     m->print_name(tty);
3452     tty->cr();
3453   }
3454   RC_TRACE(0x00004000, ("_new_methods --"));
3455   for (j = 0; j < _new_methods->length(); ++j) {
3456     Method* m = _new_methods->at(j);
3457     RC_TRACE_NO_CR(0x00004000, ("%4d  (%5d)  ", j, m->vtable_index()));
3458     m->access_flags().print_on(tty);
3459     tty->print(" --  ");
3460     m->print_name(tty);
3461     tty->cr();
3462   }
3463   RC_TRACE(0x00004000, ("_matching_(old/new)_methods --"));
3464   for (j = 0; j < _matching_methods_length; ++j) {
3465     Method* m = _matching_old_methods[j];
3466     RC_TRACE_NO_CR(0x00004000, ("%4d  (%5d)  ", j, m->vtable_index()));
3467     m->access_flags().print_on(tty);
3468     tty->print(" --  ");
3469     m->print_name(tty);
3470     tty->cr();
3471     m = _matching_new_methods[j];
3472     RC_TRACE_NO_CR(0x00004000, ("      (%5d)  ", m->vtable_index()));
3473     m->access_flags().print_on(tty);
3474     tty->cr();
3475   }
3476   RC_TRACE(0x00004000, ("_deleted_methods --"));
3477   for (j = 0; j < _deleted_methods_length; ++j) {
3478     Method* m = _deleted_methods[j];
3479     RC_TRACE_NO_CR(0x00004000, ("%4d  (%5d)  ", j, m->vtable_index()));
3480     m->access_flags().print_on(tty);
3481     tty->print(" --  ");
3482     m->print_name(tty);
3483     tty->cr();
3484   }
3485   RC_TRACE(0x00004000, ("_added_methods --"));
3486   for (j = 0; j < _added_methods_length; ++j) {
3487     Method* m = _added_methods[j];
3488     RC_TRACE_NO_CR(0x00004000, ("%4d  (%5d)  ", j, m->vtable_index()));
3489     m->access_flags().print_on(tty);
3490     tty->print(" --  ");
3491     m->print_name(tty);
3492     tty->cr();
3493   }
3494 }