1 /*
   2  * Copyright (c) 2003, 2015, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "classfile/metadataOnStackMark.hpp"
  27 #include "classfile/systemDictionary.hpp"
  28 #include "classfile/verifier.hpp"
  29 #include "code/codeCache.hpp"
  30 #include "compiler/compileBroker.hpp"
  31 #include "gc/shared/gcLocker.hpp"
  32 #include "interpreter/oopMapCache.hpp"
  33 #include "interpreter/rewriter.hpp"
  34 #include "memory/metadataFactory.hpp"
  35 #include "memory/metaspaceShared.hpp"
  36 #include "memory/universe.inline.hpp"
  37 #include "oops/fieldStreams.hpp"
  38 #include "oops/klassVtable.hpp"
  39 #include "oops/oop.inline.hpp"
  40 #include "prims/jvmtiImpl.hpp"
  41 #include "prims/jvmtiRedefineClasses.hpp"
  42 #include "prims/methodComparator.hpp"
  43 #include "runtime/deoptimization.hpp"
  44 #include "runtime/relocator.hpp"
  45 #include "utilities/bitMap.inline.hpp"
  46 #include "utilities/events.hpp"
  47 
  48 PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC
  49 
  50 Array<Method*>* VM_RedefineClasses::_old_methods = NULL;
  51 Array<Method*>* VM_RedefineClasses::_new_methods = NULL;
  52 Method**  VM_RedefineClasses::_matching_old_methods = NULL;
  53 Method**  VM_RedefineClasses::_matching_new_methods = NULL;
  54 Method**  VM_RedefineClasses::_deleted_methods      = NULL;
  55 Method**  VM_RedefineClasses::_added_methods        = NULL;
  56 int         VM_RedefineClasses::_matching_methods_length = 0;
  57 int         VM_RedefineClasses::_deleted_methods_length  = 0;
  58 int         VM_RedefineClasses::_added_methods_length    = 0;
  59 Klass*      VM_RedefineClasses::_the_class_oop = NULL;
  60 
  61 
  62 VM_RedefineClasses::VM_RedefineClasses(jint class_count,
  63                                        const jvmtiClassDefinition *class_defs,
  64                                        JvmtiClassLoadKind class_load_kind) {
  65   _class_count = class_count;
  66   _class_defs = class_defs;
  67   _class_load_kind = class_load_kind;
  68   _res = JVMTI_ERROR_NONE;
  69 }
  70 
  71 bool VM_RedefineClasses::doit_prologue() {
  72   if (_class_count == 0) {
  73     _res = JVMTI_ERROR_NONE;
  74     return false;
  75   }
  76   if (_class_defs == NULL) {
  77     _res = JVMTI_ERROR_NULL_POINTER;
  78     return false;
  79   }
  80   for (int i = 0; i < _class_count; i++) {
  81     if (_class_defs[i].klass == NULL) {
  82       _res = JVMTI_ERROR_INVALID_CLASS;
  83       return false;
  84     }
  85     if (_class_defs[i].class_byte_count == 0) {
  86       _res = JVMTI_ERROR_INVALID_CLASS_FORMAT;
  87       return false;
  88     }
  89     if (_class_defs[i].class_bytes == NULL) {
  90       _res = JVMTI_ERROR_NULL_POINTER;
  91       return false;
  92     }
  93   }
  94 
  95   // Start timer after all the sanity checks; not quite accurate, but
  96   // better than adding a bunch of stop() calls.
  97   RC_TIMER_START(_timer_vm_op_prologue);
  98 
  99   // We first load new class versions in the prologue, because somewhere down the
 100   // call chain it is required that the current thread is a Java thread.
 101   _res = load_new_class_versions(Thread::current());
 102   if (_res != JVMTI_ERROR_NONE) {
 103     // free any successfully created classes, since none are redefined
 104     for (int i = 0; i < _class_count; i++) {
 105       if (_scratch_classes[i] != NULL) {
 106         ClassLoaderData* cld = _scratch_classes[i]->class_loader_data();
 107         // Free the memory for this class at class unloading time.  Not before
 108         // because CMS might think this is still live.
 109         cld->add_to_deallocate_list((InstanceKlass*)_scratch_classes[i]);
 110       }
 111     }
 112     // Free os::malloc allocated memory in load_new_class_version.
 113     os::free(_scratch_classes);
 114     RC_TIMER_STOP(_timer_vm_op_prologue);
 115     return false;
 116   }
 117 
 118   RC_TIMER_STOP(_timer_vm_op_prologue);
 119   return true;
 120 }
 121 
 122 void VM_RedefineClasses::doit() {
 123   Thread *thread = Thread::current();
 124 
 125   if (UseSharedSpaces) {
 126     // Sharing is enabled so we remap the shared readonly space to
 127     // shared readwrite, private just in case we need to redefine
 128     // a shared class. We do the remap during the doit() phase of
 129     // the safepoint to be safer.
 130     if (!MetaspaceShared::remap_shared_readonly_as_readwrite()) {
 131       RC_TRACE_WITH_THREAD(0x00000001, thread,
 132         ("failed to remap shared readonly space to readwrite, private"));
 133       _res = JVMTI_ERROR_INTERNAL;
 134       return;
 135     }
 136   }
 137 
 138   // Mark methods seen on stack and everywhere else so old methods are not
 139   // cleaned up if they're on the stack.
 140   MetadataOnStackMark md_on_stack(true);
 141   HandleMark hm(thread);   // make sure any handles created are deleted
 142                            // before the stack walk again.
 143 
 144   for (int i = 0; i < _class_count; i++) {
 145     redefine_single_class(_class_defs[i].klass, _scratch_classes[i], thread);
 146   }
 147 
 148   // Clean out MethodData pointing to old Method*
 149   // Have to do this after all classes are redefined and all methods that
 150   // are redefined are marked as old.
 151   MethodDataCleaner clean_weak_method_links;
 152   ClassLoaderDataGraph::classes_do(&clean_weak_method_links);
 153 
 154   // Disable any dependent concurrent compilations
 155   SystemDictionary::notice_modification();
 156 
 157   // Set flag indicating that some invariants are no longer true.
 158   // See jvmtiExport.hpp for detailed explanation.
 159   JvmtiExport::set_has_redefined_a_class();
 160 
 161   // check_class() is optionally called for product bits, but is
 162   // always called for non-product bits.
 163 #ifdef PRODUCT
 164   if (RC_TRACE_ENABLED(0x00004000)) {
 165 #endif
 166     RC_TRACE_WITH_THREAD(0x00004000, thread, ("calling check_class"));
 167     CheckClass check_class(thread);
 168     ClassLoaderDataGraph::classes_do(&check_class);
 169 #ifdef PRODUCT
 170   }
 171 #endif
 172 }
 173 
 174 void VM_RedefineClasses::doit_epilogue() {
 175   // Free os::malloc allocated memory.
 176   os::free(_scratch_classes);
 177 
 178   // Reset the_class_oop to null for error printing.
 179   _the_class_oop = NULL;
 180 
 181   if (RC_TRACE_ENABLED(0x00000004)) {
 182     // Used to have separate timers for "doit" and "all", but the timer
 183     // overhead skewed the measurements.
 184     jlong doit_time = _timer_rsc_phase1.milliseconds() +
 185                       _timer_rsc_phase2.milliseconds();
 186     jlong all_time = _timer_vm_op_prologue.milliseconds() + doit_time;
 187 
 188     RC_TRACE(0x00000004, ("vm_op: all=" UINT64_FORMAT
 189       "  prologue=" UINT64_FORMAT "  doit=" UINT64_FORMAT, all_time,
 190       _timer_vm_op_prologue.milliseconds(), doit_time));
 191     RC_TRACE(0x00000004,
 192       ("redefine_single_class: phase1=" UINT64_FORMAT "  phase2=" UINT64_FORMAT,
 193        _timer_rsc_phase1.milliseconds(), _timer_rsc_phase2.milliseconds()));
 194   }
 195 }
 196 
 197 bool VM_RedefineClasses::is_modifiable_class(oop klass_mirror) {
 198   // classes for primitives cannot be redefined
 199   if (java_lang_Class::is_primitive(klass_mirror)) {
 200     return false;
 201   }
 202   Klass* the_class_oop = java_lang_Class::as_Klass(klass_mirror);
 203   // classes for arrays cannot be redefined
 204   if (the_class_oop == NULL || !the_class_oop->oop_is_instance()) {
 205     return false;
 206   }
 207   return true;
 208 }
 209 
 210 // Append the current entry at scratch_i in scratch_cp to *merge_cp_p
 211 // where the end of *merge_cp_p is specified by *merge_cp_length_p. For
 212 // direct CP entries, there is just the current entry to append. For
 213 // indirect and double-indirect CP entries, there are zero or more
 214 // referenced CP entries along with the current entry to append.
 215 // Indirect and double-indirect CP entries are handled by recursive
 216 // calls to append_entry() as needed. The referenced CP entries are
 217 // always appended to *merge_cp_p before the referee CP entry. These
 218 // referenced CP entries may already exist in *merge_cp_p in which case
 219 // there is nothing extra to append and only the current entry is
 220 // appended.
 221 void VM_RedefineClasses::append_entry(constantPoolHandle scratch_cp,
 222        int scratch_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p,
 223        TRAPS) {
 224 
 225   // append is different depending on entry tag type
 226   switch (scratch_cp->tag_at(scratch_i).value()) {
 227 
 228     // The old verifier is implemented outside the VM. It loads classes,
 229     // but does not resolve constant pool entries directly so we never
 230     // see Class entries here with the old verifier. Similarly the old
 231     // verifier does not like Class entries in the input constant pool.
 232     // The split-verifier is implemented in the VM so it can optionally
 233     // and directly resolve constant pool entries to load classes. The
 234     // split-verifier can accept either Class entries or UnresolvedClass
 235     // entries in the input constant pool. We revert the appended copy
 236     // back to UnresolvedClass so that either verifier will be happy
 237     // with the constant pool entry.
 238     case JVM_CONSTANT_Class:
 239     {
 240       // revert the copy to JVM_CONSTANT_UnresolvedClass
 241       (*merge_cp_p)->unresolved_klass_at_put(*merge_cp_length_p,
 242         scratch_cp->klass_name_at(scratch_i));
 243 
 244       if (scratch_i != *merge_cp_length_p) {
 245         // The new entry in *merge_cp_p is at a different index than
 246         // the new entry in scratch_cp so we need to map the index values.
 247         map_index(scratch_cp, scratch_i, *merge_cp_length_p);
 248       }
 249       (*merge_cp_length_p)++;
 250     } break;
 251 
 252     // these are direct CP entries so they can be directly appended,
 253     // but double and long take two constant pool entries
 254     case JVM_CONSTANT_Double:  // fall through
 255     case JVM_CONSTANT_Long:
 256     {
 257       ConstantPool::copy_entry_to(scratch_cp, scratch_i, *merge_cp_p, *merge_cp_length_p,
 258         THREAD);
 259 
 260       if (scratch_i != *merge_cp_length_p) {
 261         // The new entry in *merge_cp_p is at a different index than
 262         // the new entry in scratch_cp so we need to map the index values.
 263         map_index(scratch_cp, scratch_i, *merge_cp_length_p);
 264       }
 265       (*merge_cp_length_p) += 2;
 266     } break;
 267 
 268     // these are direct CP entries so they can be directly appended
 269     case JVM_CONSTANT_Float:   // fall through
 270     case JVM_CONSTANT_Integer: // fall through
 271     case JVM_CONSTANT_Utf8:    // fall through
 272 
 273     // This was an indirect CP entry, but it has been changed into
 274     // Symbol*s so this entry can be directly appended.
 275     case JVM_CONSTANT_String:      // fall through
 276 
 277     // These were indirect CP entries, but they have been changed into
 278     // Symbol*s so these entries can be directly appended.
 279     case JVM_CONSTANT_UnresolvedClass:  // fall through
 280     {
 281       ConstantPool::copy_entry_to(scratch_cp, scratch_i, *merge_cp_p, *merge_cp_length_p,
 282         THREAD);
 283 
 284       if (scratch_i != *merge_cp_length_p) {
 285         // The new entry in *merge_cp_p is at a different index than
 286         // the new entry in scratch_cp so we need to map the index values.
 287         map_index(scratch_cp, scratch_i, *merge_cp_length_p);
 288       }
 289       (*merge_cp_length_p)++;
 290     } break;
 291 
 292     // this is an indirect CP entry so it needs special handling
 293     case JVM_CONSTANT_NameAndType:
 294     {
 295       int name_ref_i = scratch_cp->name_ref_index_at(scratch_i);
 296       int new_name_ref_i = find_or_append_indirect_entry(scratch_cp, name_ref_i, merge_cp_p,
 297                                                          merge_cp_length_p, THREAD);
 298 
 299       int signature_ref_i = scratch_cp->signature_ref_index_at(scratch_i);
 300       int new_signature_ref_i = find_or_append_indirect_entry(scratch_cp, signature_ref_i,
 301                                                               merge_cp_p, merge_cp_length_p,
 302                                                               THREAD);
 303 
 304       // If the referenced entries already exist in *merge_cp_p, then
 305       // both new_name_ref_i and new_signature_ref_i will both be 0.
 306       // In that case, all we are appending is the current entry.
 307       if (new_name_ref_i != name_ref_i) {
 308         RC_TRACE(0x00080000,
 309           ("NameAndType entry@%d name_ref_index change: %d to %d",
 310           *merge_cp_length_p, name_ref_i, new_name_ref_i));
 311       }
 312       if (new_signature_ref_i != signature_ref_i) {
 313         RC_TRACE(0x00080000,
 314           ("NameAndType entry@%d signature_ref_index change: %d to %d",
 315           *merge_cp_length_p, signature_ref_i, new_signature_ref_i));
 316       }
 317 
 318       (*merge_cp_p)->name_and_type_at_put(*merge_cp_length_p,
 319         new_name_ref_i, new_signature_ref_i);
 320       if (scratch_i != *merge_cp_length_p) {
 321         // The new entry in *merge_cp_p is at a different index than
 322         // the new entry in scratch_cp so we need to map the index values.
 323         map_index(scratch_cp, scratch_i, *merge_cp_length_p);
 324       }
 325       (*merge_cp_length_p)++;
 326     } break;
 327 
 328     // this is a double-indirect CP entry so it needs special handling
 329     case JVM_CONSTANT_Fieldref:           // fall through
 330     case JVM_CONSTANT_InterfaceMethodref: // fall through
 331     case JVM_CONSTANT_Methodref:
 332     {
 333       int klass_ref_i = scratch_cp->uncached_klass_ref_index_at(scratch_i);
 334       int new_klass_ref_i = find_or_append_indirect_entry(scratch_cp, klass_ref_i,
 335                                                           merge_cp_p, merge_cp_length_p, THREAD);
 336 
 337       int name_and_type_ref_i = scratch_cp->uncached_name_and_type_ref_index_at(scratch_i);
 338       int new_name_and_type_ref_i = find_or_append_indirect_entry(scratch_cp, name_and_type_ref_i,
 339                                                           merge_cp_p, merge_cp_length_p, THREAD);
 340 
 341       const char *entry_name = NULL;
 342       switch (scratch_cp->tag_at(scratch_i).value()) {
 343       case JVM_CONSTANT_Fieldref:
 344         entry_name = "Fieldref";
 345         (*merge_cp_p)->field_at_put(*merge_cp_length_p, new_klass_ref_i,
 346           new_name_and_type_ref_i);
 347         break;
 348       case JVM_CONSTANT_InterfaceMethodref:
 349         entry_name = "IFMethodref";
 350         (*merge_cp_p)->interface_method_at_put(*merge_cp_length_p,
 351           new_klass_ref_i, new_name_and_type_ref_i);
 352         break;
 353       case JVM_CONSTANT_Methodref:
 354         entry_name = "Methodref";
 355         (*merge_cp_p)->method_at_put(*merge_cp_length_p, new_klass_ref_i,
 356           new_name_and_type_ref_i);
 357         break;
 358       default:
 359         guarantee(false, "bad switch");
 360         break;
 361       }
 362 
 363       if (klass_ref_i != new_klass_ref_i) {
 364         RC_TRACE(0x00080000, ("%s entry@%d class_index changed: %d to %d",
 365           entry_name, *merge_cp_length_p, klass_ref_i, new_klass_ref_i));
 366       }
 367       if (name_and_type_ref_i != new_name_and_type_ref_i) {
 368         RC_TRACE(0x00080000,
 369           ("%s entry@%d name_and_type_index changed: %d to %d",
 370           entry_name, *merge_cp_length_p, name_and_type_ref_i,
 371           new_name_and_type_ref_i));
 372       }
 373 
 374       if (scratch_i != *merge_cp_length_p) {
 375         // The new entry in *merge_cp_p is at a different index than
 376         // the new entry in scratch_cp so we need to map the index values.
 377         map_index(scratch_cp, scratch_i, *merge_cp_length_p);
 378       }
 379       (*merge_cp_length_p)++;
 380     } break;
 381 
 382     // this is an indirect CP entry so it needs special handling
 383     case JVM_CONSTANT_MethodType:
 384     {
 385       int ref_i = scratch_cp->method_type_index_at(scratch_i);
 386       int new_ref_i = find_or_append_indirect_entry(scratch_cp, ref_i, merge_cp_p,
 387                                                     merge_cp_length_p, THREAD);
 388       if (new_ref_i != ref_i) {
 389         RC_TRACE(0x00080000,
 390                  ("MethodType entry@%d ref_index change: %d to %d",
 391                   *merge_cp_length_p, ref_i, new_ref_i));
 392       }
 393       (*merge_cp_p)->method_type_index_at_put(*merge_cp_length_p, new_ref_i);
 394       if (scratch_i != *merge_cp_length_p) {
 395         // The new entry in *merge_cp_p is at a different index than
 396         // the new entry in scratch_cp so we need to map the index values.
 397         map_index(scratch_cp, scratch_i, *merge_cp_length_p);
 398       }
 399       (*merge_cp_length_p)++;
 400     } break;
 401 
 402     // this is an indirect CP entry so it needs special handling
 403     case JVM_CONSTANT_MethodHandle:
 404     {
 405       int ref_kind = scratch_cp->method_handle_ref_kind_at(scratch_i);
 406       int ref_i = scratch_cp->method_handle_index_at(scratch_i);
 407       int new_ref_i = find_or_append_indirect_entry(scratch_cp, ref_i, merge_cp_p,
 408                                                     merge_cp_length_p, THREAD);
 409       if (new_ref_i != ref_i) {
 410         RC_TRACE(0x00080000,
 411                  ("MethodHandle entry@%d ref_index change: %d to %d",
 412                   *merge_cp_length_p, ref_i, new_ref_i));
 413       }
 414       (*merge_cp_p)->method_handle_index_at_put(*merge_cp_length_p, ref_kind, new_ref_i);
 415       if (scratch_i != *merge_cp_length_p) {
 416         // The new entry in *merge_cp_p is at a different index than
 417         // the new entry in scratch_cp so we need to map the index values.
 418         map_index(scratch_cp, scratch_i, *merge_cp_length_p);
 419       }
 420       (*merge_cp_length_p)++;
 421     } break;
 422 
 423     // this is an indirect CP entry so it needs special handling
 424     case JVM_CONSTANT_InvokeDynamic:
 425     {
 426       // Index of the bootstrap specifier in the operands array
 427       int old_bs_i = scratch_cp->invoke_dynamic_bootstrap_specifier_index(scratch_i);
 428       int new_bs_i = find_or_append_operand(scratch_cp, old_bs_i, merge_cp_p,
 429                                             merge_cp_length_p, THREAD);
 430       // The bootstrap method NameAndType_info index
 431       int old_ref_i = scratch_cp->invoke_dynamic_name_and_type_ref_index_at(scratch_i);
 432       int new_ref_i = find_or_append_indirect_entry(scratch_cp, old_ref_i, merge_cp_p,
 433                                                     merge_cp_length_p, THREAD);
 434       if (new_bs_i != old_bs_i) {
 435         RC_TRACE(0x00080000,
 436                  ("InvokeDynamic entry@%d bootstrap_method_attr_index change: %d to %d",
 437                   *merge_cp_length_p, old_bs_i, new_bs_i));
 438       }
 439       if (new_ref_i != old_ref_i) {
 440         RC_TRACE(0x00080000,
 441                  ("InvokeDynamic entry@%d name_and_type_index change: %d to %d",
 442                   *merge_cp_length_p, old_ref_i, new_ref_i));
 443       }
 444 
 445       (*merge_cp_p)->invoke_dynamic_at_put(*merge_cp_length_p, new_bs_i, new_ref_i);
 446       if (scratch_i != *merge_cp_length_p) {
 447         // The new entry in *merge_cp_p is at a different index than
 448         // the new entry in scratch_cp so we need to map the index values.
 449         map_index(scratch_cp, scratch_i, *merge_cp_length_p);
 450       }
 451       (*merge_cp_length_p)++;
 452     } break;
 453 
 454     // At this stage, Class or UnresolvedClass could be here, but not
 455     // ClassIndex
 456     case JVM_CONSTANT_ClassIndex: // fall through
 457 
 458     // Invalid is used as the tag for the second constant pool entry
 459     // occupied by JVM_CONSTANT_Double or JVM_CONSTANT_Long. It should
 460     // not be seen by itself.
 461     case JVM_CONSTANT_Invalid: // fall through
 462 
 463     // At this stage, String could be here, but not StringIndex
 464     case JVM_CONSTANT_StringIndex: // fall through
 465 
 466     // At this stage JVM_CONSTANT_UnresolvedClassInError should not be
 467     // here
 468     case JVM_CONSTANT_UnresolvedClassInError: // fall through
 469 
 470     default:
 471     {
 472       // leave a breadcrumb
 473       jbyte bad_value = scratch_cp->tag_at(scratch_i).value();
 474       ShouldNotReachHere();
 475     } break;
 476   } // end switch tag value
 477 } // end append_entry()
 478 
 479 
 480 int VM_RedefineClasses::find_or_append_indirect_entry(constantPoolHandle scratch_cp,
 481       int ref_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p, TRAPS) {
 482 
 483   int new_ref_i = ref_i;
 484   bool match = (ref_i < *merge_cp_length_p) &&
 485                scratch_cp->compare_entry_to(ref_i, *merge_cp_p, ref_i, THREAD);
 486 
 487   if (!match) {
 488     // forward reference in *merge_cp_p or not a direct match
 489     int found_i = scratch_cp->find_matching_entry(ref_i, *merge_cp_p, THREAD);
 490     if (found_i != 0) {
 491       guarantee(found_i != ref_i, "compare_entry_to() and find_matching_entry() do not agree");
 492       // Found a matching entry somewhere else in *merge_cp_p so just need a mapping entry.
 493       new_ref_i = found_i;
 494       map_index(scratch_cp, ref_i, found_i);
 495     } else {
 496       // no match found so we have to append this entry to *merge_cp_p
 497       append_entry(scratch_cp, ref_i, merge_cp_p, merge_cp_length_p, THREAD);
 498       // The above call to append_entry() can only append one entry
 499       // so the post call query of *merge_cp_length_p is only for
 500       // the sake of consistency.
 501       new_ref_i = *merge_cp_length_p - 1;
 502     }
 503   }
 504 
 505   return new_ref_i;
 506 } // end find_or_append_indirect_entry()
 507 
 508 
 509 // Append a bootstrap specifier into the merge_cp operands that is semantically equal
 510 // to the scratch_cp operands bootstrap specifier passed by the old_bs_i index.
 511 // Recursively append new merge_cp entries referenced by the new bootstrap specifier.
 512 void VM_RedefineClasses::append_operand(constantPoolHandle scratch_cp, int old_bs_i,
 513        constantPoolHandle *merge_cp_p, int *merge_cp_length_p, TRAPS) {
 514 
 515   int old_ref_i = scratch_cp->operand_bootstrap_method_ref_index_at(old_bs_i);
 516   int new_ref_i = find_or_append_indirect_entry(scratch_cp, old_ref_i, merge_cp_p,
 517                                                 merge_cp_length_p, THREAD);
 518   if (new_ref_i != old_ref_i) {
 519     RC_TRACE(0x00080000,
 520              ("operands entry@%d bootstrap method ref_index change: %d to %d",
 521               _operands_cur_length, old_ref_i, new_ref_i));
 522   }
 523 
 524   Array<u2>* merge_ops = (*merge_cp_p)->operands();
 525   int new_bs_i = _operands_cur_length;
 526   // We have _operands_cur_length == 0 when the merge_cp operands is empty yet.
 527   // However, the operand_offset_at(0) was set in the extend_operands() call.
 528   int new_base = (new_bs_i == 0) ? (*merge_cp_p)->operand_offset_at(0)
 529                                  : (*merge_cp_p)->operand_next_offset_at(new_bs_i - 1);
 530   int argc     = scratch_cp->operand_argument_count_at(old_bs_i);
 531 
 532   ConstantPool::operand_offset_at_put(merge_ops, _operands_cur_length, new_base);
 533   merge_ops->at_put(new_base++, new_ref_i);
 534   merge_ops->at_put(new_base++, argc);
 535 
 536   for (int i = 0; i < argc; i++) {
 537     int old_arg_ref_i = scratch_cp->operand_argument_index_at(old_bs_i, i);
 538     int new_arg_ref_i = find_or_append_indirect_entry(scratch_cp, old_arg_ref_i, merge_cp_p,
 539                                                       merge_cp_length_p, THREAD);
 540     merge_ops->at_put(new_base++, new_arg_ref_i);
 541     if (new_arg_ref_i != old_arg_ref_i) {
 542       RC_TRACE(0x00080000,
 543                ("operands entry@%d bootstrap method argument ref_index change: %d to %d",
 544                 _operands_cur_length, old_arg_ref_i, new_arg_ref_i));
 545     }
 546   }
 547   if (old_bs_i != _operands_cur_length) {
 548     // The bootstrap specifier in *merge_cp_p is at a different index than
 549     // that in scratch_cp so we need to map the index values.
 550     map_operand_index(old_bs_i, new_bs_i);
 551   }
 552   _operands_cur_length++;
 553 } // end append_operand()
 554 
 555 
 556 int VM_RedefineClasses::find_or_append_operand(constantPoolHandle scratch_cp,
 557       int old_bs_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p, TRAPS) {
 558 
 559   int new_bs_i = old_bs_i; // bootstrap specifier index
 560   bool match = (old_bs_i < _operands_cur_length) &&
 561                scratch_cp->compare_operand_to(old_bs_i, *merge_cp_p, old_bs_i, THREAD);
 562 
 563   if (!match) {
 564     // forward reference in *merge_cp_p or not a direct match
 565     int found_i = scratch_cp->find_matching_operand(old_bs_i, *merge_cp_p,
 566                                                     _operands_cur_length, THREAD);
 567     if (found_i != -1) {
 568       guarantee(found_i != old_bs_i, "compare_operand_to() and find_matching_operand() disagree");
 569       // found a matching operand somewhere else in *merge_cp_p so just need a mapping
 570       new_bs_i = found_i;
 571       map_operand_index(old_bs_i, found_i);
 572     } else {
 573       // no match found so we have to append this bootstrap specifier to *merge_cp_p
 574       append_operand(scratch_cp, old_bs_i, merge_cp_p, merge_cp_length_p, THREAD);
 575       new_bs_i = _operands_cur_length - 1;
 576     }
 577   }
 578   return new_bs_i;
 579 } // end find_or_append_operand()
 580 
 581 
 582 void VM_RedefineClasses::finalize_operands_merge(constantPoolHandle merge_cp, TRAPS) {
 583   if (merge_cp->operands() == NULL) {
 584     return;
 585   }
 586   // Shrink the merge_cp operands
 587   merge_cp->shrink_operands(_operands_cur_length, CHECK);
 588 
 589   if (RC_TRACE_ENABLED(0x00040000)) {
 590     // don't want to loop unless we are tracing
 591     int count = 0;
 592     for (int i = 1; i < _operands_index_map_p->length(); i++) {
 593       int value = _operands_index_map_p->at(i);
 594       if (value != -1) {
 595         RC_TRACE_WITH_THREAD(0x00040000, THREAD,
 596           ("operands_index_map[%d]: old=%d new=%d", count, i, value));
 597         count++;
 598       }
 599     }
 600   }
 601   // Clean-up
 602   _operands_index_map_p = NULL;
 603   _operands_cur_length = 0;
 604   _operands_index_map_count = 0;
 605 } // end finalize_operands_merge()
 606 
 607 
 608 jvmtiError VM_RedefineClasses::compare_and_normalize_class_versions(
 609              instanceKlassHandle the_class,
 610              instanceKlassHandle scratch_class) {
 611   int i;
 612 
 613   // Check superclasses, or rather their names, since superclasses themselves can be
 614   // requested to replace.
 615   // Check for NULL superclass first since this might be java.lang.Object
 616   if (the_class->super() != scratch_class->super() &&
 617       (the_class->super() == NULL || scratch_class->super() == NULL ||
 618        the_class->super()->name() !=
 619        scratch_class->super()->name())) {
 620     return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
 621   }
 622 
 623   // Check if the number, names and order of directly implemented interfaces are the same.
 624   // I think in principle we should just check if the sets of names of directly implemented
 625   // interfaces are the same, i.e. the order of declaration (which, however, if changed in the
 626   // .java file, also changes in .class file) should not matter. However, comparing sets is
 627   // technically a bit more difficult, and, more importantly, I am not sure at present that the
 628   // order of interfaces does not matter on the implementation level, i.e. that the VM does not
 629   // rely on it somewhere.
 630   Array<Klass*>* k_interfaces = the_class->local_interfaces();
 631   Array<Klass*>* k_new_interfaces = scratch_class->local_interfaces();
 632   int n_intfs = k_interfaces->length();
 633   if (n_intfs != k_new_interfaces->length()) {
 634     return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
 635   }
 636   for (i = 0; i < n_intfs; i++) {
 637     if (k_interfaces->at(i)->name() !=
 638         k_new_interfaces->at(i)->name()) {
 639       return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
 640     }
 641   }
 642 
 643   // Check whether class is in the error init state.
 644   if (the_class->is_in_error_state()) {
 645     // TBD #5057930: special error code is needed in 1.6
 646     return JVMTI_ERROR_INVALID_CLASS;
 647   }
 648 
 649   // Check whether class modifiers are the same.
 650   jushort old_flags = (jushort) the_class->access_flags().get_flags();
 651   jushort new_flags = (jushort) scratch_class->access_flags().get_flags();
 652   if (old_flags != new_flags) {
 653     return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_MODIFIERS_CHANGED;
 654   }
 655 
 656   // Check if the number, names, types and order of fields declared in these classes
 657   // are the same.
 658   JavaFieldStream old_fs(the_class);
 659   JavaFieldStream new_fs(scratch_class);
 660   for (; !old_fs.done() && !new_fs.done(); old_fs.next(), new_fs.next()) {
 661     // access
 662     old_flags = old_fs.access_flags().as_short();
 663     new_flags = new_fs.access_flags().as_short();
 664     if ((old_flags ^ new_flags) & JVM_RECOGNIZED_FIELD_MODIFIERS) {
 665       return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
 666     }
 667     // offset
 668     if (old_fs.offset() != new_fs.offset()) {
 669       return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
 670     }
 671     // name and signature
 672     Symbol* name_sym1 = the_class->constants()->symbol_at(old_fs.name_index());
 673     Symbol* sig_sym1 = the_class->constants()->symbol_at(old_fs.signature_index());
 674     Symbol* name_sym2 = scratch_class->constants()->symbol_at(new_fs.name_index());
 675     Symbol* sig_sym2 = scratch_class->constants()->symbol_at(new_fs.signature_index());
 676     if (name_sym1 != name_sym2 || sig_sym1 != sig_sym2) {
 677       return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
 678     }
 679   }
 680 
 681   // If both streams aren't done then we have a differing number of
 682   // fields.
 683   if (!old_fs.done() || !new_fs.done()) {
 684     return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
 685   }
 686 
 687   // Do a parallel walk through the old and new methods. Detect
 688   // cases where they match (exist in both), have been added in
 689   // the new methods, or have been deleted (exist only in the
 690   // old methods).  The class file parser places methods in order
 691   // by method name, but does not order overloaded methods by
 692   // signature.  In order to determine what fate befell the methods,
 693   // this code places the overloaded new methods that have matching
 694   // old methods in the same order as the old methods and places
 695   // new overloaded methods at the end of overloaded methods of
 696   // that name. The code for this order normalization is adapted
 697   // from the algorithm used in InstanceKlass::find_method().
 698   // Since we are swapping out of order entries as we find them,
 699   // we only have to search forward through the overloaded methods.
 700   // Methods which are added and have the same name as an existing
 701   // method (but different signature) will be put at the end of
 702   // the methods with that name, and the name mismatch code will
 703   // handle them.
 704   Array<Method*>* k_old_methods(the_class->methods());
 705   Array<Method*>* k_new_methods(scratch_class->methods());
 706   int n_old_methods = k_old_methods->length();
 707   int n_new_methods = k_new_methods->length();
 708   Thread* thread = Thread::current();
 709 
 710   int ni = 0;
 711   int oi = 0;
 712   while (true) {
 713     Method* k_old_method;
 714     Method* k_new_method;
 715     enum { matched, added, deleted, undetermined } method_was = undetermined;
 716 
 717     if (oi >= n_old_methods) {
 718       if (ni >= n_new_methods) {
 719         break; // we've looked at everything, done
 720       }
 721       // New method at the end
 722       k_new_method = k_new_methods->at(ni);
 723       method_was = added;
 724     } else if (ni >= n_new_methods) {
 725       // Old method, at the end, is deleted
 726       k_old_method = k_old_methods->at(oi);
 727       method_was = deleted;
 728     } else {
 729       // There are more methods in both the old and new lists
 730       k_old_method = k_old_methods->at(oi);
 731       k_new_method = k_new_methods->at(ni);
 732       if (k_old_method->name() != k_new_method->name()) {
 733         // Methods are sorted by method name, so a mismatch means added
 734         // or deleted
 735         if (k_old_method->name()->fast_compare(k_new_method->name()) > 0) {
 736           method_was = added;
 737         } else {
 738           method_was = deleted;
 739         }
 740       } else if (k_old_method->signature() == k_new_method->signature()) {
 741         // Both the name and signature match
 742         method_was = matched;
 743       } else {
 744         // The name matches, but the signature doesn't, which means we have to
 745         // search forward through the new overloaded methods.
 746         int nj;  // outside the loop for post-loop check
 747         for (nj = ni + 1; nj < n_new_methods; nj++) {
 748           Method* m = k_new_methods->at(nj);
 749           if (k_old_method->name() != m->name()) {
 750             // reached another method name so no more overloaded methods
 751             method_was = deleted;
 752             break;
 753           }
 754           if (k_old_method->signature() == m->signature()) {
 755             // found a match so swap the methods
 756             k_new_methods->at_put(ni, m);
 757             k_new_methods->at_put(nj, k_new_method);
 758             k_new_method = m;
 759             method_was = matched;
 760             break;
 761           }
 762         }
 763 
 764         if (nj >= n_new_methods) {
 765           // reached the end without a match; so method was deleted
 766           method_was = deleted;
 767         }
 768       }
 769     }
 770 
 771     switch (method_was) {
 772     case matched:
 773       // methods match, be sure modifiers do too
 774       old_flags = (jushort) k_old_method->access_flags().get_flags();
 775       new_flags = (jushort) k_new_method->access_flags().get_flags();
 776       if ((old_flags ^ new_flags) & ~(JVM_ACC_NATIVE)) {
 777         return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_MODIFIERS_CHANGED;
 778       }
 779       {
 780         u2 new_num = k_new_method->method_idnum();
 781         u2 old_num = k_old_method->method_idnum();
 782         if (new_num != old_num) {
 783           Method* idnum_owner = scratch_class->method_with_idnum(old_num);
 784           if (idnum_owner != NULL) {
 785             // There is already a method assigned this idnum -- switch them
 786             // Take current and original idnum from the new_method
 787             idnum_owner->set_method_idnum(new_num);
 788             idnum_owner->set_orig_method_idnum(k_new_method->orig_method_idnum());
 789           }
 790           // Take current and original idnum from the old_method
 791           k_new_method->set_method_idnum(old_num);
 792           k_new_method->set_orig_method_idnum(k_old_method->orig_method_idnum());
 793           if (thread->has_pending_exception()) {
 794             return JVMTI_ERROR_OUT_OF_MEMORY;
 795           }
 796         }
 797       }
 798       RC_TRACE(0x00008000, ("Method matched: new: %s [%d] == old: %s [%d]",
 799                             k_new_method->name_and_sig_as_C_string(), ni,
 800                             k_old_method->name_and_sig_as_C_string(), oi));
 801       // advance to next pair of methods
 802       ++oi;
 803       ++ni;
 804       break;
 805     case added:
 806       // method added, see if it is OK
 807       new_flags = (jushort) k_new_method->access_flags().get_flags();
 808       if ((new_flags & JVM_ACC_PRIVATE) == 0
 809            // hack: private should be treated as final, but alas
 810           || (new_flags & (JVM_ACC_FINAL|JVM_ACC_STATIC)) == 0
 811          ) {
 812         // new methods must be private
 813         return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_ADDED;
 814       }
 815       {
 816         u2 num = the_class->next_method_idnum();
 817         if (num == ConstMethod::UNSET_IDNUM) {
 818           // cannot add any more methods
 819           return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_ADDED;
 820         }
 821         u2 new_num = k_new_method->method_idnum();
 822         Method* idnum_owner = scratch_class->method_with_idnum(num);
 823         if (idnum_owner != NULL) {
 824           // There is already a method assigned this idnum -- switch them
 825           // Take current and original idnum from the new_method
 826           idnum_owner->set_method_idnum(new_num);
 827           idnum_owner->set_orig_method_idnum(k_new_method->orig_method_idnum());
 828         }
 829         k_new_method->set_method_idnum(num);
 830         k_new_method->set_orig_method_idnum(num);
 831         if (thread->has_pending_exception()) {
 832           return JVMTI_ERROR_OUT_OF_MEMORY;
 833         }
 834       }
 835       RC_TRACE(0x00008000, ("Method added: new: %s [%d]",
 836                             k_new_method->name_and_sig_as_C_string(), ni));
 837       ++ni; // advance to next new method
 838       break;
 839     case deleted:
 840       // method deleted, see if it is OK
 841       old_flags = (jushort) k_old_method->access_flags().get_flags();
 842       if ((old_flags & JVM_ACC_PRIVATE) == 0
 843            // hack: private should be treated as final, but alas
 844           || (old_flags & (JVM_ACC_FINAL|JVM_ACC_STATIC)) == 0
 845          ) {
 846         // deleted methods must be private
 847         return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_DELETED;
 848       }
 849       RC_TRACE(0x00008000, ("Method deleted: old: %s [%d]",
 850                             k_old_method->name_and_sig_as_C_string(), oi));
 851       ++oi; // advance to next old method
 852       break;
 853     default:
 854       ShouldNotReachHere();
 855     }
 856   }
 857 
 858   return JVMTI_ERROR_NONE;
 859 }
 860 
 861 
 862 // Find new constant pool index value for old constant pool index value
 863 // by seaching the index map. Returns zero (0) if there is no mapped
 864 // value for the old constant pool index.
 865 int VM_RedefineClasses::find_new_index(int old_index) {
 866   if (_index_map_count == 0) {
 867     // map is empty so nothing can be found
 868     return 0;
 869   }
 870 
 871   if (old_index < 1 || old_index >= _index_map_p->length()) {
 872     // The old_index is out of range so it is not mapped. This should
 873     // not happen in regular constant pool merging use, but it can
 874     // happen if a corrupt annotation is processed.
 875     return 0;
 876   }
 877 
 878   int value = _index_map_p->at(old_index);
 879   if (value == -1) {
 880     // the old_index is not mapped
 881     return 0;
 882   }
 883 
 884   return value;
 885 } // end find_new_index()
 886 
 887 
 888 // Find new bootstrap specifier index value for old bootstrap specifier index
 889 // value by seaching the index map. Returns unused index (-1) if there is
 890 // no mapped value for the old bootstrap specifier index.
 891 int VM_RedefineClasses::find_new_operand_index(int old_index) {
 892   if (_operands_index_map_count == 0) {
 893     // map is empty so nothing can be found
 894     return -1;
 895   }
 896 
 897   if (old_index == -1 || old_index >= _operands_index_map_p->length()) {
 898     // The old_index is out of range so it is not mapped.
 899     // This should not happen in regular constant pool merging use.
 900     return -1;
 901   }
 902 
 903   int value = _operands_index_map_p->at(old_index);
 904   if (value == -1) {
 905     // the old_index is not mapped
 906     return -1;
 907   }
 908 
 909   return value;
 910 } // end find_new_operand_index()
 911 
 912 
 913 // Returns true if the current mismatch is due to a resolved/unresolved
 914 // class pair. Otherwise, returns false.
 915 bool VM_RedefineClasses::is_unresolved_class_mismatch(constantPoolHandle cp1,
 916        int index1, constantPoolHandle cp2, int index2) {
 917 
 918   jbyte t1 = cp1->tag_at(index1).value();
 919   if (t1 != JVM_CONSTANT_Class && t1 != JVM_CONSTANT_UnresolvedClass) {
 920     return false;  // wrong entry type; not our special case
 921   }
 922 
 923   jbyte t2 = cp2->tag_at(index2).value();
 924   if (t2 != JVM_CONSTANT_Class && t2 != JVM_CONSTANT_UnresolvedClass) {
 925     return false;  // wrong entry type; not our special case
 926   }
 927 
 928   if (t1 == t2) {
 929     return false;  // not a mismatch; not our special case
 930   }
 931 
 932   char *s1 = cp1->klass_name_at(index1)->as_C_string();
 933   char *s2 = cp2->klass_name_at(index2)->as_C_string();
 934   if (strcmp(s1, s2) != 0) {
 935     return false;  // strings don't match; not our special case
 936   }
 937 
 938   return true;  // made it through the gauntlet; this is our special case
 939 } // end is_unresolved_class_mismatch()
 940 
 941 
 942 jvmtiError VM_RedefineClasses::load_new_class_versions(TRAPS) {
 943 
 944   // For consistency allocate memory using os::malloc wrapper.
 945   _scratch_classes = (Klass**)
 946     os::malloc(sizeof(Klass*) * _class_count, mtClass);
 947   if (_scratch_classes == NULL) {
 948     return JVMTI_ERROR_OUT_OF_MEMORY;
 949   }
 950   // Zero initialize the _scratch_classes array.
 951   for (int i = 0; i < _class_count; i++) {
 952     _scratch_classes[i] = NULL;
 953   }
 954 
 955   ResourceMark rm(THREAD);
 956 
 957   JvmtiThreadState *state = JvmtiThreadState::state_for(JavaThread::current());
 958   // state can only be NULL if the current thread is exiting which
 959   // should not happen since we're trying to do a RedefineClasses
 960   guarantee(state != NULL, "exiting thread calling load_new_class_versions");
 961   for (int i = 0; i < _class_count; i++) {
 962     // Create HandleMark so that any handles created while loading new class
 963     // versions are deleted. Constant pools are deallocated while merging
 964     // constant pools
 965     HandleMark hm(THREAD);
 966 
 967     oop mirror = JNIHandles::resolve_non_null(_class_defs[i].klass);
 968     // classes for primitives cannot be redefined
 969     if (!is_modifiable_class(mirror)) {
 970       return JVMTI_ERROR_UNMODIFIABLE_CLASS;
 971     }
 972     Klass* the_class_oop = java_lang_Class::as_Klass(mirror);
 973     instanceKlassHandle the_class = instanceKlassHandle(THREAD, the_class_oop);
 974     Symbol*  the_class_sym = the_class->name();
 975 
 976     // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
 977     RC_TRACE_WITH_THREAD(0x00000001, THREAD,
 978       ("loading name=%s kind=%d (avail_mem=" UINT64_FORMAT "K)",
 979       the_class->external_name(), _class_load_kind,
 980       os::available_memory() >> 10));
 981 
 982     ClassFileStream st((u1*) _class_defs[i].class_bytes,
 983       _class_defs[i].class_byte_count, (char *)"__VM_RedefineClasses__");
 984 
 985     // Parse the stream.
 986     Handle the_class_loader(THREAD, the_class->class_loader());
 987     Handle protection_domain(THREAD, the_class->protection_domain());
 988     // Set redefined class handle in JvmtiThreadState class.
 989     // This redefined class is sent to agent event handler for class file
 990     // load hook event.
 991     state->set_class_being_redefined(&the_class, _class_load_kind);
 992 
 993     Klass* k = SystemDictionary::parse_stream(the_class_sym,
 994                                                 the_class_loader,
 995                                                 protection_domain,
 996                                                 &st,
 997                                                 THREAD);
 998     // Clear class_being_redefined just to be sure.
 999     state->clear_class_being_redefined();
1000 
1001     // TODO: if this is retransform, and nothing changed we can skip it
1002 
1003     instanceKlassHandle scratch_class (THREAD, k);
1004 
1005     // Need to clean up allocated InstanceKlass if there's an error so assign
1006     // the result here. Caller deallocates all the scratch classes in case of
1007     // an error.
1008     _scratch_classes[i] = k;
1009 
1010     if (HAS_PENDING_EXCEPTION) {
1011       Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1012       // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
1013       RC_TRACE_WITH_THREAD(0x00000002, THREAD, ("parse_stream exception: '%s'",
1014         ex_name->as_C_string()));
1015       CLEAR_PENDING_EXCEPTION;
1016 
1017       if (ex_name == vmSymbols::java_lang_UnsupportedClassVersionError()) {
1018         return JVMTI_ERROR_UNSUPPORTED_VERSION;
1019       } else if (ex_name == vmSymbols::java_lang_ClassFormatError()) {
1020         return JVMTI_ERROR_INVALID_CLASS_FORMAT;
1021       } else if (ex_name == vmSymbols::java_lang_ClassCircularityError()) {
1022         return JVMTI_ERROR_CIRCULAR_CLASS_DEFINITION;
1023       } else if (ex_name == vmSymbols::java_lang_NoClassDefFoundError()) {
1024         // The message will be "XXX (wrong name: YYY)"
1025         return JVMTI_ERROR_NAMES_DONT_MATCH;
1026       } else if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1027         return JVMTI_ERROR_OUT_OF_MEMORY;
1028       } else {  // Just in case more exceptions can be thrown..
1029         return JVMTI_ERROR_FAILS_VERIFICATION;
1030       }
1031     }
1032 
1033     // Ensure class is linked before redefine
1034     if (!the_class->is_linked()) {
1035       the_class->link_class(THREAD);
1036       if (HAS_PENDING_EXCEPTION) {
1037         Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1038         // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
1039         RC_TRACE_WITH_THREAD(0x00000002, THREAD, ("link_class exception: '%s'",
1040           ex_name->as_C_string()));
1041         CLEAR_PENDING_EXCEPTION;
1042         if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1043           return JVMTI_ERROR_OUT_OF_MEMORY;
1044         } else {
1045           return JVMTI_ERROR_INTERNAL;
1046         }
1047       }
1048     }
1049 
1050     // Do the validity checks in compare_and_normalize_class_versions()
1051     // before verifying the byte codes. By doing these checks first, we
1052     // limit the number of functions that require redirection from
1053     // the_class to scratch_class. In particular, we don't have to
1054     // modify JNI GetSuperclass() and thus won't change its performance.
1055     jvmtiError res = compare_and_normalize_class_versions(the_class,
1056                        scratch_class);
1057     if (res != JVMTI_ERROR_NONE) {
1058       return res;
1059     }
1060 
1061     // verify what the caller passed us
1062     {
1063       // The bug 6214132 caused the verification to fail.
1064       // Information about the_class and scratch_class is temporarily
1065       // recorded into jvmtiThreadState. This data is used to redirect
1066       // the_class to scratch_class in the JVM_* functions called by the
1067       // verifier. Please, refer to jvmtiThreadState.hpp for the detailed
1068       // description.
1069       RedefineVerifyMark rvm(&the_class, &scratch_class, state);
1070       Verifier::verify(
1071         scratch_class, Verifier::ThrowException, true, THREAD);
1072     }
1073 
1074     if (HAS_PENDING_EXCEPTION) {
1075       Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1076       // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
1077       RC_TRACE_WITH_THREAD(0x00000002, THREAD,
1078         ("verify_byte_codes exception: '%s'", ex_name->as_C_string()));
1079       CLEAR_PENDING_EXCEPTION;
1080       if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1081         return JVMTI_ERROR_OUT_OF_MEMORY;
1082       } else {
1083         // tell the caller the bytecodes are bad
1084         return JVMTI_ERROR_FAILS_VERIFICATION;
1085       }
1086     }
1087 
1088     res = merge_cp_and_rewrite(the_class, scratch_class, THREAD);
1089     if (HAS_PENDING_EXCEPTION) {
1090       Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1091       // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
1092       RC_TRACE_WITH_THREAD(0x00000002, THREAD,
1093         ("merge_cp_and_rewrite exception: '%s'", ex_name->as_C_string()));
1094       CLEAR_PENDING_EXCEPTION;
1095       if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1096         return JVMTI_ERROR_OUT_OF_MEMORY;
1097       } else {
1098         return JVMTI_ERROR_INTERNAL;
1099       }
1100     }
1101 
1102     if (VerifyMergedCPBytecodes) {
1103       // verify what we have done during constant pool merging
1104       {
1105         RedefineVerifyMark rvm(&the_class, &scratch_class, state);
1106         Verifier::verify(scratch_class, Verifier::ThrowException, true, THREAD);
1107       }
1108 
1109       if (HAS_PENDING_EXCEPTION) {
1110         Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1111         // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
1112         RC_TRACE_WITH_THREAD(0x00000002, THREAD,
1113           ("verify_byte_codes post merge-CP exception: '%s'",
1114           ex_name->as_C_string()));
1115         CLEAR_PENDING_EXCEPTION;
1116         if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1117           return JVMTI_ERROR_OUT_OF_MEMORY;
1118         } else {
1119           // tell the caller that constant pool merging screwed up
1120           return JVMTI_ERROR_INTERNAL;
1121         }
1122       }
1123     }
1124 
1125     Rewriter::rewrite(scratch_class, THREAD);
1126     if (!HAS_PENDING_EXCEPTION) {
1127       scratch_class->link_methods(THREAD);
1128     }
1129     if (HAS_PENDING_EXCEPTION) {
1130       Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1131       // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
1132       RC_TRACE_WITH_THREAD(0x00000002, THREAD,
1133         ("Rewriter::rewrite or link_methods exception: '%s'", ex_name->as_C_string()));
1134       CLEAR_PENDING_EXCEPTION;
1135       if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1136         return JVMTI_ERROR_OUT_OF_MEMORY;
1137       } else {
1138         return JVMTI_ERROR_INTERNAL;
1139       }
1140     }
1141 
1142     // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
1143     RC_TRACE_WITH_THREAD(0x00000001, THREAD,
1144       ("loaded name=%s (avail_mem=" UINT64_FORMAT "K)",
1145       the_class->external_name(), os::available_memory() >> 10));
1146   }
1147 
1148   return JVMTI_ERROR_NONE;
1149 }
1150 
1151 
1152 // Map old_index to new_index as needed. scratch_cp is only needed
1153 // for RC_TRACE() calls.
1154 void VM_RedefineClasses::map_index(constantPoolHandle scratch_cp,
1155        int old_index, int new_index) {
1156   if (find_new_index(old_index) != 0) {
1157     // old_index is already mapped
1158     return;
1159   }
1160 
1161   if (old_index == new_index) {
1162     // no mapping is needed
1163     return;
1164   }
1165 
1166   _index_map_p->at_put(old_index, new_index);
1167   _index_map_count++;
1168 
1169   RC_TRACE(0x00040000, ("mapped tag %d at index %d to %d",
1170     scratch_cp->tag_at(old_index).value(), old_index, new_index));
1171 } // end map_index()
1172 
1173 
1174 // Map old_index to new_index as needed.
1175 void VM_RedefineClasses::map_operand_index(int old_index, int new_index) {
1176   if (find_new_operand_index(old_index) != -1) {
1177     // old_index is already mapped
1178     return;
1179   }
1180 
1181   if (old_index == new_index) {
1182     // no mapping is needed
1183     return;
1184   }
1185 
1186   _operands_index_map_p->at_put(old_index, new_index);
1187   _operands_index_map_count++;
1188 
1189   RC_TRACE(0x00040000, ("mapped bootstrap specifier at index %d to %d", old_index, new_index));
1190 } // end map_index()
1191 
1192 
1193 // Merge old_cp and scratch_cp and return the results of the merge via
1194 // merge_cp_p. The number of entries in *merge_cp_p is returned via
1195 // merge_cp_length_p. The entries in old_cp occupy the same locations
1196 // in *merge_cp_p. Also creates a map of indices from entries in
1197 // scratch_cp to the corresponding entry in *merge_cp_p. Index map
1198 // entries are only created for entries in scratch_cp that occupy a
1199 // different location in *merged_cp_p.
1200 bool VM_RedefineClasses::merge_constant_pools(constantPoolHandle old_cp,
1201        constantPoolHandle scratch_cp, constantPoolHandle *merge_cp_p,
1202        int *merge_cp_length_p, TRAPS) {
1203 
1204   if (merge_cp_p == NULL) {
1205     assert(false, "caller must provide scratch constantPool");
1206     return false; // robustness
1207   }
1208   if (merge_cp_length_p == NULL) {
1209     assert(false, "caller must provide scratch CP length");
1210     return false; // robustness
1211   }
1212   // Worst case we need old_cp->length() + scratch_cp()->length(),
1213   // but the caller might be smart so make sure we have at least
1214   // the minimum.
1215   if ((*merge_cp_p)->length() < old_cp->length()) {
1216     assert(false, "merge area too small");
1217     return false; // robustness
1218   }
1219 
1220   RC_TRACE_WITH_THREAD(0x00010000, THREAD,
1221     ("old_cp_len=%d, scratch_cp_len=%d", old_cp->length(),
1222     scratch_cp->length()));
1223 
1224   {
1225     // Pass 0:
1226     // The old_cp is copied to *merge_cp_p; this means that any code
1227     // using old_cp does not have to change. This work looks like a
1228     // perfect fit for ConstantPool*::copy_cp_to(), but we need to
1229     // handle one special case:
1230     // - revert JVM_CONSTANT_Class to JVM_CONSTANT_UnresolvedClass
1231     // This will make verification happy.
1232 
1233     int old_i;  // index into old_cp
1234 
1235     // index zero (0) is not used in constantPools
1236     for (old_i = 1; old_i < old_cp->length(); old_i++) {
1237       // leave debugging crumb
1238       jbyte old_tag = old_cp->tag_at(old_i).value();
1239       switch (old_tag) {
1240       case JVM_CONSTANT_Class:
1241       case JVM_CONSTANT_UnresolvedClass:
1242         // revert the copy to JVM_CONSTANT_UnresolvedClass
1243         // May be resolving while calling this so do the same for
1244         // JVM_CONSTANT_UnresolvedClass (klass_name_at() deals with transition)
1245         (*merge_cp_p)->unresolved_klass_at_put(old_i,
1246           old_cp->klass_name_at(old_i));
1247         break;
1248 
1249       case JVM_CONSTANT_Double:
1250       case JVM_CONSTANT_Long:
1251         // just copy the entry to *merge_cp_p, but double and long take
1252         // two constant pool entries
1253         ConstantPool::copy_entry_to(old_cp, old_i, *merge_cp_p, old_i, CHECK_0);
1254         old_i++;
1255         break;
1256 
1257       default:
1258         // just copy the entry to *merge_cp_p
1259         ConstantPool::copy_entry_to(old_cp, old_i, *merge_cp_p, old_i, CHECK_0);
1260         break;
1261       }
1262     } // end for each old_cp entry
1263 
1264     ConstantPool::copy_operands(old_cp, *merge_cp_p, CHECK_0);
1265     (*merge_cp_p)->extend_operands(scratch_cp, CHECK_0);
1266 
1267     // We don't need to sanity check that *merge_cp_length_p is within
1268     // *merge_cp_p bounds since we have the minimum on-entry check above.
1269     (*merge_cp_length_p) = old_i;
1270   }
1271 
1272   // merge_cp_len should be the same as old_cp->length() at this point
1273   // so this trace message is really a "warm-and-breathing" message.
1274   RC_TRACE_WITH_THREAD(0x00020000, THREAD,
1275     ("after pass 0: merge_cp_len=%d", *merge_cp_length_p));
1276 
1277   int scratch_i;  // index into scratch_cp
1278   {
1279     // Pass 1a:
1280     // Compare scratch_cp entries to the old_cp entries that we have
1281     // already copied to *merge_cp_p. In this pass, we are eliminating
1282     // exact duplicates (matching entry at same index) so we only
1283     // compare entries in the common indice range.
1284     int increment = 1;
1285     int pass1a_length = MIN2(old_cp->length(), scratch_cp->length());
1286     for (scratch_i = 1; scratch_i < pass1a_length; scratch_i += increment) {
1287       switch (scratch_cp->tag_at(scratch_i).value()) {
1288       case JVM_CONSTANT_Double:
1289       case JVM_CONSTANT_Long:
1290         // double and long take two constant pool entries
1291         increment = 2;
1292         break;
1293 
1294       default:
1295         increment = 1;
1296         break;
1297       }
1298 
1299       bool match = scratch_cp->compare_entry_to(scratch_i, *merge_cp_p,
1300         scratch_i, CHECK_0);
1301       if (match) {
1302         // found a match at the same index so nothing more to do
1303         continue;
1304       } else if (is_unresolved_class_mismatch(scratch_cp, scratch_i,
1305                                               *merge_cp_p, scratch_i)) {
1306         // The mismatch in compare_entry_to() above is because of a
1307         // resolved versus unresolved class entry at the same index
1308         // with the same string value. Since Pass 0 reverted any
1309         // class entries to unresolved class entries in *merge_cp_p,
1310         // we go with the unresolved class entry.
1311         continue;
1312       }
1313 
1314       int found_i = scratch_cp->find_matching_entry(scratch_i, *merge_cp_p,
1315         CHECK_0);
1316       if (found_i != 0) {
1317         guarantee(found_i != scratch_i,
1318           "compare_entry_to() and find_matching_entry() do not agree");
1319 
1320         // Found a matching entry somewhere else in *merge_cp_p so
1321         // just need a mapping entry.
1322         map_index(scratch_cp, scratch_i, found_i);
1323         continue;
1324       }
1325 
1326       // The find_matching_entry() call above could fail to find a match
1327       // due to a resolved versus unresolved class or string entry situation
1328       // like we solved above with the is_unresolved_*_mismatch() calls.
1329       // However, we would have to call is_unresolved_*_mismatch() over
1330       // all of *merge_cp_p (potentially) and that doesn't seem to be
1331       // worth the time.
1332 
1333       // No match found so we have to append this entry and any unique
1334       // referenced entries to *merge_cp_p.
1335       append_entry(scratch_cp, scratch_i, merge_cp_p, merge_cp_length_p,
1336         CHECK_0);
1337     }
1338   }
1339 
1340   RC_TRACE_WITH_THREAD(0x00020000, THREAD,
1341     ("after pass 1a: merge_cp_len=%d, scratch_i=%d, index_map_len=%d",
1342     *merge_cp_length_p, scratch_i, _index_map_count));
1343 
1344   if (scratch_i < scratch_cp->length()) {
1345     // Pass 1b:
1346     // old_cp is smaller than scratch_cp so there are entries in
1347     // scratch_cp that we have not yet processed. We take care of
1348     // those now.
1349     int increment = 1;
1350     for (; scratch_i < scratch_cp->length(); scratch_i += increment) {
1351       switch (scratch_cp->tag_at(scratch_i).value()) {
1352       case JVM_CONSTANT_Double:
1353       case JVM_CONSTANT_Long:
1354         // double and long take two constant pool entries
1355         increment = 2;
1356         break;
1357 
1358       default:
1359         increment = 1;
1360         break;
1361       }
1362 
1363       int found_i =
1364         scratch_cp->find_matching_entry(scratch_i, *merge_cp_p, CHECK_0);
1365       if (found_i != 0) {
1366         // Found a matching entry somewhere else in *merge_cp_p so
1367         // just need a mapping entry.
1368         map_index(scratch_cp, scratch_i, found_i);
1369         continue;
1370       }
1371 
1372       // No match found so we have to append this entry and any unique
1373       // referenced entries to *merge_cp_p.
1374       append_entry(scratch_cp, scratch_i, merge_cp_p, merge_cp_length_p,
1375         CHECK_0);
1376     }
1377 
1378     RC_TRACE_WITH_THREAD(0x00020000, THREAD,
1379       ("after pass 1b: merge_cp_len=%d, scratch_i=%d, index_map_len=%d",
1380       *merge_cp_length_p, scratch_i, _index_map_count));
1381   }
1382   finalize_operands_merge(*merge_cp_p, THREAD);
1383 
1384   return true;
1385 } // end merge_constant_pools()
1386 
1387 
1388 // Scoped object to clean up the constant pool(s) created for merging
1389 class MergeCPCleaner {
1390   ClassLoaderData*   _loader_data;
1391   ConstantPool*      _cp;
1392   ConstantPool*      _scratch_cp;
1393  public:
1394   MergeCPCleaner(ClassLoaderData* loader_data, ConstantPool* merge_cp) :
1395                  _loader_data(loader_data), _cp(merge_cp), _scratch_cp(NULL) {}
1396   ~MergeCPCleaner() {
1397     _loader_data->add_to_deallocate_list(_cp);
1398     if (_scratch_cp != NULL) {
1399       _loader_data->add_to_deallocate_list(_scratch_cp);
1400     }
1401   }
1402   void add_scratch_cp(ConstantPool* scratch_cp) { _scratch_cp = scratch_cp; }
1403 };
1404 
1405 // Merge constant pools between the_class and scratch_class and
1406 // potentially rewrite bytecodes in scratch_class to use the merged
1407 // constant pool.
1408 jvmtiError VM_RedefineClasses::merge_cp_and_rewrite(
1409              instanceKlassHandle the_class, instanceKlassHandle scratch_class,
1410              TRAPS) {
1411   // worst case merged constant pool length is old and new combined
1412   int merge_cp_length = the_class->constants()->length()
1413         + scratch_class->constants()->length();
1414 
1415   // Constant pools are not easily reused so we allocate a new one
1416   // each time.
1417   // merge_cp is created unsafe for concurrent GC processing.  It
1418   // should be marked safe before discarding it. Even though
1419   // garbage,  if it crosses a card boundary, it may be scanned
1420   // in order to find the start of the first complete object on the card.
1421   ClassLoaderData* loader_data = the_class->class_loader_data();
1422   ConstantPool* merge_cp_oop =
1423     ConstantPool::allocate(loader_data,
1424                            merge_cp_length,
1425                            CHECK_(JVMTI_ERROR_OUT_OF_MEMORY));
1426   MergeCPCleaner cp_cleaner(loader_data, merge_cp_oop);
1427 
1428   HandleMark hm(THREAD);  // make sure handles are cleared before
1429                           // MergeCPCleaner clears out merge_cp_oop
1430   constantPoolHandle merge_cp(THREAD, merge_cp_oop);
1431 
1432   // Get constants() from the old class because it could have been rewritten
1433   // while we were at a safepoint allocating a new constant pool.
1434   constantPoolHandle old_cp(THREAD, the_class->constants());
1435   constantPoolHandle scratch_cp(THREAD, scratch_class->constants());
1436 
1437   // If the length changed, the class was redefined out from under us. Return
1438   // an error.
1439   if (merge_cp_length != the_class->constants()->length()
1440          + scratch_class->constants()->length()) {
1441     return JVMTI_ERROR_INTERNAL;
1442   }
1443 
1444   // Update the version number of the constant pool
1445   merge_cp->increment_and_save_version(old_cp->version());
1446 
1447   ResourceMark rm(THREAD);
1448   _index_map_count = 0;
1449   _index_map_p = new intArray(scratch_cp->length(), -1);
1450 
1451   _operands_cur_length = ConstantPool::operand_array_length(old_cp->operands());
1452   _operands_index_map_count = 0;
1453   _operands_index_map_p = new intArray(
1454     ConstantPool::operand_array_length(scratch_cp->operands()), -1);
1455 
1456   // reference to the cp holder is needed for copy_operands()
1457   merge_cp->set_pool_holder(scratch_class());
1458   bool result = merge_constant_pools(old_cp, scratch_cp, &merge_cp,
1459                   &merge_cp_length, THREAD);
1460   merge_cp->set_pool_holder(NULL);
1461 
1462   if (!result) {
1463     // The merge can fail due to memory allocation failure or due
1464     // to robustness checks.
1465     return JVMTI_ERROR_INTERNAL;
1466   }
1467 
1468   RC_TRACE_WITH_THREAD(0x00010000, THREAD,
1469     ("merge_cp_len=%d, index_map_len=%d", merge_cp_length, _index_map_count));
1470 
1471   if (_index_map_count == 0) {
1472     // there is nothing to map between the new and merged constant pools
1473 
1474     if (old_cp->length() == scratch_cp->length()) {
1475       // The old and new constant pools are the same length and the
1476       // index map is empty. This means that the three constant pools
1477       // are equivalent (but not the same). Unfortunately, the new
1478       // constant pool has not gone through link resolution nor have
1479       // the new class bytecodes gone through constant pool cache
1480       // rewriting so we can't use the old constant pool with the new
1481       // class.
1482 
1483       // toss the merged constant pool at return
1484     } else if (old_cp->length() < scratch_cp->length()) {
1485       // The old constant pool has fewer entries than the new constant
1486       // pool and the index map is empty. This means the new constant
1487       // pool is a superset of the old constant pool. However, the old
1488       // class bytecodes have already gone through constant pool cache
1489       // rewriting so we can't use the new constant pool with the old
1490       // class.
1491 
1492       // toss the merged constant pool at return
1493     } else {
1494       // The old constant pool has more entries than the new constant
1495       // pool and the index map is empty. This means that both the old
1496       // and merged constant pools are supersets of the new constant
1497       // pool.
1498 
1499       // Replace the new constant pool with a shrunken copy of the
1500       // merged constant pool
1501       set_new_constant_pool(loader_data, scratch_class, merge_cp, merge_cp_length,
1502                             CHECK_(JVMTI_ERROR_OUT_OF_MEMORY));
1503       // The new constant pool replaces scratch_cp so have cleaner clean it up.
1504       // It can't be cleaned up while there are handles to it.
1505       cp_cleaner.add_scratch_cp(scratch_cp());
1506     }
1507   } else {
1508     if (RC_TRACE_ENABLED(0x00040000)) {
1509       // don't want to loop unless we are tracing
1510       int count = 0;
1511       for (int i = 1; i < _index_map_p->length(); i++) {
1512         int value = _index_map_p->at(i);
1513 
1514         if (value != -1) {
1515           RC_TRACE_WITH_THREAD(0x00040000, THREAD,
1516             ("index_map[%d]: old=%d new=%d", count, i, value));
1517           count++;
1518         }
1519       }
1520     }
1521 
1522     // We have entries mapped between the new and merged constant pools
1523     // so we have to rewrite some constant pool references.
1524     if (!rewrite_cp_refs(scratch_class, THREAD)) {
1525       return JVMTI_ERROR_INTERNAL;
1526     }
1527 
1528     // Replace the new constant pool with a shrunken copy of the
1529     // merged constant pool so now the rewritten bytecodes have
1530     // valid references; the previous new constant pool will get
1531     // GCed.
1532     set_new_constant_pool(loader_data, scratch_class, merge_cp, merge_cp_length,
1533                           CHECK_(JVMTI_ERROR_OUT_OF_MEMORY));
1534     // The new constant pool replaces scratch_cp so have cleaner clean it up.
1535     // It can't be cleaned up while there are handles to it.
1536     cp_cleaner.add_scratch_cp(scratch_cp());
1537   }
1538 
1539   return JVMTI_ERROR_NONE;
1540 } // end merge_cp_and_rewrite()
1541 
1542 
1543 // Rewrite constant pool references in klass scratch_class.
1544 bool VM_RedefineClasses::rewrite_cp_refs(instanceKlassHandle scratch_class,
1545        TRAPS) {
1546 
1547   // rewrite constant pool references in the methods:
1548   if (!rewrite_cp_refs_in_methods(scratch_class, THREAD)) {
1549     // propagate failure back to caller
1550     return false;
1551   }
1552 
1553   // rewrite constant pool references in the class_annotations:
1554   if (!rewrite_cp_refs_in_class_annotations(scratch_class, THREAD)) {
1555     // propagate failure back to caller
1556     return false;
1557   }
1558 
1559   // rewrite constant pool references in the fields_annotations:
1560   if (!rewrite_cp_refs_in_fields_annotations(scratch_class, THREAD)) {
1561     // propagate failure back to caller
1562     return false;
1563   }
1564 
1565   // rewrite constant pool references in the methods_annotations:
1566   if (!rewrite_cp_refs_in_methods_annotations(scratch_class, THREAD)) {
1567     // propagate failure back to caller
1568     return false;
1569   }
1570 
1571   // rewrite constant pool references in the methods_parameter_annotations:
1572   if (!rewrite_cp_refs_in_methods_parameter_annotations(scratch_class,
1573          THREAD)) {
1574     // propagate failure back to caller
1575     return false;
1576   }
1577 
1578   // rewrite constant pool references in the methods_default_annotations:
1579   if (!rewrite_cp_refs_in_methods_default_annotations(scratch_class,
1580          THREAD)) {
1581     // propagate failure back to caller
1582     return false;
1583   }
1584 
1585   // rewrite constant pool references in the class_type_annotations:
1586   if (!rewrite_cp_refs_in_class_type_annotations(scratch_class, THREAD)) {
1587     // propagate failure back to caller
1588     return false;
1589   }
1590 
1591   // rewrite constant pool references in the fields_type_annotations:
1592   if (!rewrite_cp_refs_in_fields_type_annotations(scratch_class, THREAD)) {
1593     // propagate failure back to caller
1594     return false;
1595   }
1596 
1597   // rewrite constant pool references in the methods_type_annotations:
1598   if (!rewrite_cp_refs_in_methods_type_annotations(scratch_class, THREAD)) {
1599     // propagate failure back to caller
1600     return false;
1601   }
1602 
1603   // There can be type annotations in the Code part of a method_info attribute.
1604   // These annotations are not accessible, even by reflection.
1605   // Currently they are not even parsed by the ClassFileParser.
1606   // If runtime access is added they will also need to be rewritten.
1607 
1608   // rewrite source file name index:
1609   u2 source_file_name_idx = scratch_class->source_file_name_index();
1610   if (source_file_name_idx != 0) {
1611     u2 new_source_file_name_idx = find_new_index(source_file_name_idx);
1612     if (new_source_file_name_idx != 0) {
1613       scratch_class->set_source_file_name_index(new_source_file_name_idx);
1614     }
1615   }
1616 
1617   // rewrite class generic signature index:
1618   u2 generic_signature_index = scratch_class->generic_signature_index();
1619   if (generic_signature_index != 0) {
1620     u2 new_generic_signature_index = find_new_index(generic_signature_index);
1621     if (new_generic_signature_index != 0) {
1622       scratch_class->set_generic_signature_index(new_generic_signature_index);
1623     }
1624   }
1625 
1626   return true;
1627 } // end rewrite_cp_refs()
1628 
1629 // Rewrite constant pool references in the methods.
1630 bool VM_RedefineClasses::rewrite_cp_refs_in_methods(
1631        instanceKlassHandle scratch_class, TRAPS) {
1632 
1633   Array<Method*>* methods = scratch_class->methods();
1634 
1635   if (methods == NULL || methods->length() == 0) {
1636     // no methods so nothing to do
1637     return true;
1638   }
1639 
1640   // rewrite constant pool references in the methods:
1641   for (int i = methods->length() - 1; i >= 0; i--) {
1642     methodHandle method(THREAD, methods->at(i));
1643     methodHandle new_method;
1644     rewrite_cp_refs_in_method(method, &new_method, THREAD);
1645     if (!new_method.is_null()) {
1646       // the method has been replaced so save the new method version
1647       // even in the case of an exception.  original method is on the
1648       // deallocation list.
1649       methods->at_put(i, new_method());
1650     }
1651     if (HAS_PENDING_EXCEPTION) {
1652       Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1653       // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
1654       RC_TRACE_WITH_THREAD(0x00000002, THREAD,
1655         ("rewrite_cp_refs_in_method exception: '%s'", ex_name->as_C_string()));
1656       // Need to clear pending exception here as the super caller sets
1657       // the JVMTI_ERROR_INTERNAL if the returned value is false.
1658       CLEAR_PENDING_EXCEPTION;
1659       return false;
1660     }
1661   }
1662 
1663   return true;
1664 }
1665 
1666 
1667 // Rewrite constant pool references in the specific method. This code
1668 // was adapted from Rewriter::rewrite_method().
1669 void VM_RedefineClasses::rewrite_cp_refs_in_method(methodHandle method,
1670        methodHandle *new_method_p, TRAPS) {
1671 
1672   *new_method_p = methodHandle();  // default is no new method
1673 
1674   // We cache a pointer to the bytecodes here in code_base. If GC
1675   // moves the Method*, then the bytecodes will also move which
1676   // will likely cause a crash. We create a No_Safepoint_Verifier
1677   // object to detect whether we pass a possible safepoint in this
1678   // code block.
1679   No_Safepoint_Verifier nsv;
1680 
1681   // Bytecodes and their length
1682   address code_base = method->code_base();
1683   int code_length = method->code_size();
1684 
1685   int bc_length;
1686   for (int bci = 0; bci < code_length; bci += bc_length) {
1687     address bcp = code_base + bci;
1688     Bytecodes::Code c = (Bytecodes::Code)(*bcp);
1689 
1690     bc_length = Bytecodes::length_for(c);
1691     if (bc_length == 0) {
1692       // More complicated bytecodes report a length of zero so
1693       // we have to try again a slightly different way.
1694       bc_length = Bytecodes::length_at(method(), bcp);
1695     }
1696 
1697     assert(bc_length != 0, "impossible bytecode length");
1698 
1699     switch (c) {
1700       case Bytecodes::_ldc:
1701       {
1702         int cp_index = *(bcp + 1);
1703         int new_index = find_new_index(cp_index);
1704 
1705         if (StressLdcRewrite && new_index == 0) {
1706           // If we are stressing ldc -> ldc_w rewriting, then we
1707           // always need a new_index value.
1708           new_index = cp_index;
1709         }
1710         if (new_index != 0) {
1711           // the original index is mapped so we have more work to do
1712           if (!StressLdcRewrite && new_index <= max_jubyte) {
1713             // The new value can still use ldc instead of ldc_w
1714             // unless we are trying to stress ldc -> ldc_w rewriting
1715             RC_TRACE_WITH_THREAD(0x00080000, THREAD,
1716               ("%s@" INTPTR_FORMAT " old=%d, new=%d", Bytecodes::name(c),
1717               bcp, cp_index, new_index));
1718             *(bcp + 1) = new_index;
1719           } else {
1720             RC_TRACE_WITH_THREAD(0x00080000, THREAD,
1721               ("%s->ldc_w@" INTPTR_FORMAT " old=%d, new=%d",
1722               Bytecodes::name(c), bcp, cp_index, new_index));
1723             // the new value needs ldc_w instead of ldc
1724             u_char inst_buffer[4]; // max instruction size is 4 bytes
1725             bcp = (address)inst_buffer;
1726             // construct new instruction sequence
1727             *bcp = Bytecodes::_ldc_w;
1728             bcp++;
1729             // Rewriter::rewrite_method() does not rewrite ldc -> ldc_w.
1730             // See comment below for difference between put_Java_u2()
1731             // and put_native_u2().
1732             Bytes::put_Java_u2(bcp, new_index);
1733 
1734             Relocator rc(method, NULL /* no RelocatorListener needed */);
1735             methodHandle m;
1736             {
1737               Pause_No_Safepoint_Verifier pnsv(&nsv);
1738 
1739               // ldc is 2 bytes and ldc_w is 3 bytes
1740               m = rc.insert_space_at(bci, 3, inst_buffer, CHECK);
1741             }
1742 
1743             // return the new method so that the caller can update
1744             // the containing class
1745             *new_method_p = method = m;
1746             // switch our bytecode processing loop from the old method
1747             // to the new method
1748             code_base = method->code_base();
1749             code_length = method->code_size();
1750             bcp = code_base + bci;
1751             c = (Bytecodes::Code)(*bcp);
1752             bc_length = Bytecodes::length_for(c);
1753             assert(bc_length != 0, "sanity check");
1754           } // end we need ldc_w instead of ldc
1755         } // end if there is a mapped index
1756       } break;
1757 
1758       // these bytecodes have a two-byte constant pool index
1759       case Bytecodes::_anewarray      : // fall through
1760       case Bytecodes::_checkcast      : // fall through
1761       case Bytecodes::_getfield       : // fall through
1762       case Bytecodes::_getstatic      : // fall through
1763       case Bytecodes::_instanceof     : // fall through
1764       case Bytecodes::_invokedynamic  : // fall through
1765       case Bytecodes::_invokeinterface: // fall through
1766       case Bytecodes::_invokespecial  : // fall through
1767       case Bytecodes::_invokestatic   : // fall through
1768       case Bytecodes::_invokevirtual  : // fall through
1769       case Bytecodes::_ldc_w          : // fall through
1770       case Bytecodes::_ldc2_w         : // fall through
1771       case Bytecodes::_multianewarray : // fall through
1772       case Bytecodes::_new            : // fall through
1773       case Bytecodes::_putfield       : // fall through
1774       case Bytecodes::_putstatic      :
1775       {
1776         address p = bcp + 1;
1777         int cp_index = Bytes::get_Java_u2(p);
1778         int new_index = find_new_index(cp_index);
1779         if (new_index != 0) {
1780           // the original index is mapped so update w/ new value
1781           RC_TRACE_WITH_THREAD(0x00080000, THREAD,
1782             ("%s@" INTPTR_FORMAT " old=%d, new=%d", Bytecodes::name(c),
1783             bcp, cp_index, new_index));
1784           // Rewriter::rewrite_method() uses put_native_u2() in this
1785           // situation because it is reusing the constant pool index
1786           // location for a native index into the ConstantPoolCache.
1787           // Since we are updating the constant pool index prior to
1788           // verification and ConstantPoolCache initialization, we
1789           // need to keep the new index in Java byte order.
1790           Bytes::put_Java_u2(p, new_index);
1791         }
1792       } break;
1793     }
1794   } // end for each bytecode
1795 
1796   // We also need to rewrite the parameter name indexes, if there is
1797   // method parameter data present
1798   if(method->has_method_parameters()) {
1799     const int len = method->method_parameters_length();
1800     MethodParametersElement* elem = method->method_parameters_start();
1801 
1802     for (int i = 0; i < len; i++) {
1803       const u2 cp_index = elem[i].name_cp_index;
1804       const u2 new_cp_index = find_new_index(cp_index);
1805       if (new_cp_index != 0) {
1806         elem[i].name_cp_index = new_cp_index;
1807       }
1808     }
1809   }
1810 } // end rewrite_cp_refs_in_method()
1811 
1812 
1813 // Rewrite constant pool references in the class_annotations field.
1814 bool VM_RedefineClasses::rewrite_cp_refs_in_class_annotations(
1815        instanceKlassHandle scratch_class, TRAPS) {
1816 
1817   AnnotationArray* class_annotations = scratch_class->class_annotations();
1818   if (class_annotations == NULL || class_annotations->length() == 0) {
1819     // no class_annotations so nothing to do
1820     return true;
1821   }
1822 
1823   RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1824     ("class_annotations length=%d", class_annotations->length()));
1825 
1826   int byte_i = 0;  // byte index into class_annotations
1827   return rewrite_cp_refs_in_annotations_typeArray(class_annotations, byte_i,
1828            THREAD);
1829 }
1830 
1831 
1832 // Rewrite constant pool references in an annotations typeArray. This
1833 // "structure" is adapted from the RuntimeVisibleAnnotations_attribute
1834 // that is described in section 4.8.15 of the 2nd-edition of the VM spec:
1835 //
1836 // annotations_typeArray {
1837 //   u2 num_annotations;
1838 //   annotation annotations[num_annotations];
1839 // }
1840 //
1841 bool VM_RedefineClasses::rewrite_cp_refs_in_annotations_typeArray(
1842        AnnotationArray* annotations_typeArray, int &byte_i_ref, TRAPS) {
1843 
1844   if ((byte_i_ref + 2) > annotations_typeArray->length()) {
1845     // not enough room for num_annotations field
1846     RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1847       ("length() is too small for num_annotations field"));
1848     return false;
1849   }
1850 
1851   u2 num_annotations = Bytes::get_Java_u2((address)
1852                          annotations_typeArray->adr_at(byte_i_ref));
1853   byte_i_ref += 2;
1854 
1855   RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1856     ("num_annotations=%d", num_annotations));
1857 
1858   int calc_num_annotations = 0;
1859   for (; calc_num_annotations < num_annotations; calc_num_annotations++) {
1860     if (!rewrite_cp_refs_in_annotation_struct(annotations_typeArray,
1861            byte_i_ref, THREAD)) {
1862       RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1863         ("bad annotation_struct at %d", calc_num_annotations));
1864       // propagate failure back to caller
1865       return false;
1866     }
1867   }
1868   assert(num_annotations == calc_num_annotations, "sanity check");
1869 
1870   return true;
1871 } // end rewrite_cp_refs_in_annotations_typeArray()
1872 
1873 
1874 // Rewrite constant pool references in the annotation struct portion of
1875 // an annotations_typeArray. This "structure" is from section 4.8.15 of
1876 // the 2nd-edition of the VM spec:
1877 //
1878 // struct annotation {
1879 //   u2 type_index;
1880 //   u2 num_element_value_pairs;
1881 //   {
1882 //     u2 element_name_index;
1883 //     element_value value;
1884 //   } element_value_pairs[num_element_value_pairs];
1885 // }
1886 //
1887 bool VM_RedefineClasses::rewrite_cp_refs_in_annotation_struct(
1888        AnnotationArray* annotations_typeArray, int &byte_i_ref, TRAPS) {
1889   if ((byte_i_ref + 2 + 2) > annotations_typeArray->length()) {
1890     // not enough room for smallest annotation_struct
1891     RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1892       ("length() is too small for annotation_struct"));
1893     return false;
1894   }
1895 
1896   u2 type_index = rewrite_cp_ref_in_annotation_data(annotations_typeArray,
1897                     byte_i_ref, "mapped old type_index=%d", THREAD);
1898 
1899   u2 num_element_value_pairs = Bytes::get_Java_u2((address)
1900                                  annotations_typeArray->adr_at(byte_i_ref));
1901   byte_i_ref += 2;
1902 
1903   RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1904     ("type_index=%d  num_element_value_pairs=%d", type_index,
1905     num_element_value_pairs));
1906 
1907   int calc_num_element_value_pairs = 0;
1908   for (; calc_num_element_value_pairs < num_element_value_pairs;
1909        calc_num_element_value_pairs++) {
1910     if ((byte_i_ref + 2) > annotations_typeArray->length()) {
1911       // not enough room for another element_name_index, let alone
1912       // the rest of another component
1913       RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1914         ("length() is too small for element_name_index"));
1915       return false;
1916     }
1917 
1918     u2 element_name_index = rewrite_cp_ref_in_annotation_data(
1919                               annotations_typeArray, byte_i_ref,
1920                               "mapped old element_name_index=%d", THREAD);
1921 
1922     RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1923       ("element_name_index=%d", element_name_index));
1924 
1925     if (!rewrite_cp_refs_in_element_value(annotations_typeArray,
1926            byte_i_ref, THREAD)) {
1927       RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1928         ("bad element_value at %d", calc_num_element_value_pairs));
1929       // propagate failure back to caller
1930       return false;
1931     }
1932   } // end for each component
1933   assert(num_element_value_pairs == calc_num_element_value_pairs,
1934     "sanity check");
1935 
1936   return true;
1937 } // end rewrite_cp_refs_in_annotation_struct()
1938 
1939 
1940 // Rewrite a constant pool reference at the current position in
1941 // annotations_typeArray if needed. Returns the original constant
1942 // pool reference if a rewrite was not needed or the new constant
1943 // pool reference if a rewrite was needed.
1944 PRAGMA_DIAG_PUSH
1945 PRAGMA_FORMAT_NONLITERAL_IGNORED
1946 u2 VM_RedefineClasses::rewrite_cp_ref_in_annotation_data(
1947      AnnotationArray* annotations_typeArray, int &byte_i_ref,
1948      const char * trace_mesg, TRAPS) {
1949 
1950   address cp_index_addr = (address)
1951     annotations_typeArray->adr_at(byte_i_ref);
1952   u2 old_cp_index = Bytes::get_Java_u2(cp_index_addr);
1953   u2 new_cp_index = find_new_index(old_cp_index);
1954   if (new_cp_index != 0) {
1955     RC_TRACE_WITH_THREAD(0x02000000, THREAD, (trace_mesg, old_cp_index));
1956     Bytes::put_Java_u2(cp_index_addr, new_cp_index);
1957     old_cp_index = new_cp_index;
1958   }
1959   byte_i_ref += 2;
1960   return old_cp_index;
1961 }
1962 PRAGMA_DIAG_POP
1963 
1964 
1965 // Rewrite constant pool references in the element_value portion of an
1966 // annotations_typeArray. This "structure" is from section 4.8.15.1 of
1967 // the 2nd-edition of the VM spec:
1968 //
1969 // struct element_value {
1970 //   u1 tag;
1971 //   union {
1972 //     u2 const_value_index;
1973 //     {
1974 //       u2 type_name_index;
1975 //       u2 const_name_index;
1976 //     } enum_const_value;
1977 //     u2 class_info_index;
1978 //     annotation annotation_value;
1979 //     struct {
1980 //       u2 num_values;
1981 //       element_value values[num_values];
1982 //     } array_value;
1983 //   } value;
1984 // }
1985 //
1986 bool VM_RedefineClasses::rewrite_cp_refs_in_element_value(
1987        AnnotationArray* annotations_typeArray, int &byte_i_ref, TRAPS) {
1988 
1989   if ((byte_i_ref + 1) > annotations_typeArray->length()) {
1990     // not enough room for a tag let alone the rest of an element_value
1991     RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1992       ("length() is too small for a tag"));
1993     return false;
1994   }
1995 
1996   u1 tag = annotations_typeArray->at(byte_i_ref);
1997   byte_i_ref++;
1998   RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("tag='%c'", tag));
1999 
2000   switch (tag) {
2001     // These BaseType tag values are from Table 4.2 in VM spec:
2002     case 'B':  // byte
2003     case 'C':  // char
2004     case 'D':  // double
2005     case 'F':  // float
2006     case 'I':  // int
2007     case 'J':  // long
2008     case 'S':  // short
2009     case 'Z':  // boolean
2010 
2011     // The remaining tag values are from Table 4.8 in the 2nd-edition of
2012     // the VM spec:
2013     case 's':
2014     {
2015       // For the above tag values (including the BaseType values),
2016       // value.const_value_index is right union field.
2017 
2018       if ((byte_i_ref + 2) > annotations_typeArray->length()) {
2019         // not enough room for a const_value_index
2020         RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2021           ("length() is too small for a const_value_index"));
2022         return false;
2023       }
2024 
2025       u2 const_value_index = rewrite_cp_ref_in_annotation_data(
2026                                annotations_typeArray, byte_i_ref,
2027                                "mapped old const_value_index=%d", THREAD);
2028 
2029       RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2030         ("const_value_index=%d", const_value_index));
2031     } break;
2032 
2033     case 'e':
2034     {
2035       // for the above tag value, value.enum_const_value is right union field
2036 
2037       if ((byte_i_ref + 4) > annotations_typeArray->length()) {
2038         // not enough room for a enum_const_value
2039         RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2040           ("length() is too small for a enum_const_value"));
2041         return false;
2042       }
2043 
2044       u2 type_name_index = rewrite_cp_ref_in_annotation_data(
2045                              annotations_typeArray, byte_i_ref,
2046                              "mapped old type_name_index=%d", THREAD);
2047 
2048       u2 const_name_index = rewrite_cp_ref_in_annotation_data(
2049                               annotations_typeArray, byte_i_ref,
2050                               "mapped old const_name_index=%d", THREAD);
2051 
2052       RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2053         ("type_name_index=%d  const_name_index=%d", type_name_index,
2054         const_name_index));
2055     } break;
2056 
2057     case 'c':
2058     {
2059       // for the above tag value, value.class_info_index is right union field
2060 
2061       if ((byte_i_ref + 2) > annotations_typeArray->length()) {
2062         // not enough room for a class_info_index
2063         RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2064           ("length() is too small for a class_info_index"));
2065         return false;
2066       }
2067 
2068       u2 class_info_index = rewrite_cp_ref_in_annotation_data(
2069                               annotations_typeArray, byte_i_ref,
2070                               "mapped old class_info_index=%d", THREAD);
2071 
2072       RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2073         ("class_info_index=%d", class_info_index));
2074     } break;
2075 
2076     case '@':
2077       // For the above tag value, value.attr_value is the right union
2078       // field. This is a nested annotation.
2079       if (!rewrite_cp_refs_in_annotation_struct(annotations_typeArray,
2080              byte_i_ref, THREAD)) {
2081         // propagate failure back to caller
2082         return false;
2083       }
2084       break;
2085 
2086     case '[':
2087     {
2088       if ((byte_i_ref + 2) > annotations_typeArray->length()) {
2089         // not enough room for a num_values field
2090         RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2091           ("length() is too small for a num_values field"));
2092         return false;
2093       }
2094 
2095       // For the above tag value, value.array_value is the right union
2096       // field. This is an array of nested element_value.
2097       u2 num_values = Bytes::get_Java_u2((address)
2098                         annotations_typeArray->adr_at(byte_i_ref));
2099       byte_i_ref += 2;
2100       RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("num_values=%d", num_values));
2101 
2102       int calc_num_values = 0;
2103       for (; calc_num_values < num_values; calc_num_values++) {
2104         if (!rewrite_cp_refs_in_element_value(
2105                annotations_typeArray, byte_i_ref, THREAD)) {
2106           RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2107             ("bad nested element_value at %d", calc_num_values));
2108           // propagate failure back to caller
2109           return false;
2110         }
2111       }
2112       assert(num_values == calc_num_values, "sanity check");
2113     } break;
2114 
2115     default:
2116       RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("bad tag=0x%x", tag));
2117       return false;
2118   } // end decode tag field
2119 
2120   return true;
2121 } // end rewrite_cp_refs_in_element_value()
2122 
2123 
2124 // Rewrite constant pool references in a fields_annotations field.
2125 bool VM_RedefineClasses::rewrite_cp_refs_in_fields_annotations(
2126        instanceKlassHandle scratch_class, TRAPS) {
2127 
2128   Array<AnnotationArray*>* fields_annotations = scratch_class->fields_annotations();
2129 
2130   if (fields_annotations == NULL || fields_annotations->length() == 0) {
2131     // no fields_annotations so nothing to do
2132     return true;
2133   }
2134 
2135   RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2136     ("fields_annotations length=%d", fields_annotations->length()));
2137 
2138   for (int i = 0; i < fields_annotations->length(); i++) {
2139     AnnotationArray* field_annotations = fields_annotations->at(i);
2140     if (field_annotations == NULL || field_annotations->length() == 0) {
2141       // this field does not have any annotations so skip it
2142       continue;
2143     }
2144 
2145     int byte_i = 0;  // byte index into field_annotations
2146     if (!rewrite_cp_refs_in_annotations_typeArray(field_annotations, byte_i,
2147            THREAD)) {
2148       RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2149         ("bad field_annotations at %d", i));
2150       // propagate failure back to caller
2151       return false;
2152     }
2153   }
2154 
2155   return true;
2156 } // end rewrite_cp_refs_in_fields_annotations()
2157 
2158 
2159 // Rewrite constant pool references in a methods_annotations field.
2160 bool VM_RedefineClasses::rewrite_cp_refs_in_methods_annotations(
2161        instanceKlassHandle scratch_class, TRAPS) {
2162 
2163   for (int i = 0; i < scratch_class->methods()->length(); i++) {
2164     Method* m = scratch_class->methods()->at(i);
2165     AnnotationArray* method_annotations = m->constMethod()->method_annotations();
2166 
2167     if (method_annotations == NULL || method_annotations->length() == 0) {
2168       // this method does not have any annotations so skip it
2169       continue;
2170     }
2171 
2172     int byte_i = 0;  // byte index into method_annotations
2173     if (!rewrite_cp_refs_in_annotations_typeArray(method_annotations, byte_i,
2174            THREAD)) {
2175       RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2176         ("bad method_annotations at %d", i));
2177       // propagate failure back to caller
2178       return false;
2179     }
2180   }
2181 
2182   return true;
2183 } // end rewrite_cp_refs_in_methods_annotations()
2184 
2185 
2186 // Rewrite constant pool references in a methods_parameter_annotations
2187 // field. This "structure" is adapted from the
2188 // RuntimeVisibleParameterAnnotations_attribute described in section
2189 // 4.8.17 of the 2nd-edition of the VM spec:
2190 //
2191 // methods_parameter_annotations_typeArray {
2192 //   u1 num_parameters;
2193 //   {
2194 //     u2 num_annotations;
2195 //     annotation annotations[num_annotations];
2196 //   } parameter_annotations[num_parameters];
2197 // }
2198 //
2199 bool VM_RedefineClasses::rewrite_cp_refs_in_methods_parameter_annotations(
2200        instanceKlassHandle scratch_class, TRAPS) {
2201 
2202   for (int i = 0; i < scratch_class->methods()->length(); i++) {
2203     Method* m = scratch_class->methods()->at(i);
2204     AnnotationArray* method_parameter_annotations = m->constMethod()->parameter_annotations();
2205     if (method_parameter_annotations == NULL
2206         || method_parameter_annotations->length() == 0) {
2207       // this method does not have any parameter annotations so skip it
2208       continue;
2209     }
2210 
2211     if (method_parameter_annotations->length() < 1) {
2212       // not enough room for a num_parameters field
2213       RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2214         ("length() is too small for a num_parameters field at %d", i));
2215       return false;
2216     }
2217 
2218     int byte_i = 0;  // byte index into method_parameter_annotations
2219 
2220     u1 num_parameters = method_parameter_annotations->at(byte_i);
2221     byte_i++;
2222 
2223     RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2224       ("num_parameters=%d", num_parameters));
2225 
2226     int calc_num_parameters = 0;
2227     for (; calc_num_parameters < num_parameters; calc_num_parameters++) {
2228       if (!rewrite_cp_refs_in_annotations_typeArray(
2229              method_parameter_annotations, byte_i, THREAD)) {
2230         RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2231           ("bad method_parameter_annotations at %d", calc_num_parameters));
2232         // propagate failure back to caller
2233         return false;
2234       }
2235     }
2236     assert(num_parameters == calc_num_parameters, "sanity check");
2237   }
2238 
2239   return true;
2240 } // end rewrite_cp_refs_in_methods_parameter_annotations()
2241 
2242 
2243 // Rewrite constant pool references in a methods_default_annotations
2244 // field. This "structure" is adapted from the AnnotationDefault_attribute
2245 // that is described in section 4.8.19 of the 2nd-edition of the VM spec:
2246 //
2247 // methods_default_annotations_typeArray {
2248 //   element_value default_value;
2249 // }
2250 //
2251 bool VM_RedefineClasses::rewrite_cp_refs_in_methods_default_annotations(
2252        instanceKlassHandle scratch_class, TRAPS) {
2253 
2254   for (int i = 0; i < scratch_class->methods()->length(); i++) {
2255     Method* m = scratch_class->methods()->at(i);
2256     AnnotationArray* method_default_annotations = m->constMethod()->default_annotations();
2257     if (method_default_annotations == NULL
2258         || method_default_annotations->length() == 0) {
2259       // this method does not have any default annotations so skip it
2260       continue;
2261     }
2262 
2263     int byte_i = 0;  // byte index into method_default_annotations
2264 
2265     if (!rewrite_cp_refs_in_element_value(
2266            method_default_annotations, byte_i, THREAD)) {
2267       RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2268         ("bad default element_value at %d", i));
2269       // propagate failure back to caller
2270       return false;
2271     }
2272   }
2273 
2274   return true;
2275 } // end rewrite_cp_refs_in_methods_default_annotations()
2276 
2277 
2278 // Rewrite constant pool references in a class_type_annotations field.
2279 bool VM_RedefineClasses::rewrite_cp_refs_in_class_type_annotations(
2280        instanceKlassHandle scratch_class, TRAPS) {
2281 
2282   AnnotationArray* class_type_annotations = scratch_class->class_type_annotations();
2283   if (class_type_annotations == NULL || class_type_annotations->length() == 0) {
2284     // no class_type_annotations so nothing to do
2285     return true;
2286   }
2287 
2288   RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2289     ("class_type_annotations length=%d", class_type_annotations->length()));
2290 
2291   int byte_i = 0;  // byte index into class_type_annotations
2292   return rewrite_cp_refs_in_type_annotations_typeArray(class_type_annotations,
2293       byte_i, "ClassFile", THREAD);
2294 } // end rewrite_cp_refs_in_class_type_annotations()
2295 
2296 
2297 // Rewrite constant pool references in a fields_type_annotations field.
2298 bool VM_RedefineClasses::rewrite_cp_refs_in_fields_type_annotations(
2299        instanceKlassHandle scratch_class, TRAPS) {
2300 
2301   Array<AnnotationArray*>* fields_type_annotations = scratch_class->fields_type_annotations();
2302   if (fields_type_annotations == NULL || fields_type_annotations->length() == 0) {
2303     // no fields_type_annotations so nothing to do
2304     return true;
2305   }
2306 
2307   RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2308     ("fields_type_annotations length=%d", fields_type_annotations->length()));
2309 
2310   for (int i = 0; i < fields_type_annotations->length(); i++) {
2311     AnnotationArray* field_type_annotations = fields_type_annotations->at(i);
2312     if (field_type_annotations == NULL || field_type_annotations->length() == 0) {
2313       // this field does not have any annotations so skip it
2314       continue;
2315     }
2316 
2317     int byte_i = 0;  // byte index into field_type_annotations
2318     if (!rewrite_cp_refs_in_type_annotations_typeArray(field_type_annotations,
2319            byte_i, "field_info", THREAD)) {
2320       RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2321         ("bad field_type_annotations at %d", i));
2322       // propagate failure back to caller
2323       return false;
2324     }
2325   }
2326 
2327   return true;
2328 } // end rewrite_cp_refs_in_fields_type_annotations()
2329 
2330 
2331 // Rewrite constant pool references in a methods_type_annotations field.
2332 bool VM_RedefineClasses::rewrite_cp_refs_in_methods_type_annotations(
2333        instanceKlassHandle scratch_class, TRAPS) {
2334 
2335   for (int i = 0; i < scratch_class->methods()->length(); i++) {
2336     Method* m = scratch_class->methods()->at(i);
2337     AnnotationArray* method_type_annotations = m->constMethod()->type_annotations();
2338 
2339     if (method_type_annotations == NULL || method_type_annotations->length() == 0) {
2340       // this method does not have any annotations so skip it
2341       continue;
2342     }
2343 
2344     RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2345         ("methods type_annotations length=%d", method_type_annotations->length()));
2346 
2347     int byte_i = 0;  // byte index into method_type_annotations
2348     if (!rewrite_cp_refs_in_type_annotations_typeArray(method_type_annotations,
2349            byte_i, "method_info", THREAD)) {
2350       RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2351         ("bad method_type_annotations at %d", i));
2352       // propagate failure back to caller
2353       return false;
2354     }
2355   }
2356 
2357   return true;
2358 } // end rewrite_cp_refs_in_methods_type_annotations()
2359 
2360 
2361 // Rewrite constant pool references in a type_annotations
2362 // field. This "structure" is adapted from the
2363 // RuntimeVisibleTypeAnnotations_attribute described in
2364 // section 4.7.20 of the Java SE 8 Edition of the VM spec:
2365 //
2366 // type_annotations_typeArray {
2367 //   u2              num_annotations;
2368 //   type_annotation annotations[num_annotations];
2369 // }
2370 //
2371 bool VM_RedefineClasses::rewrite_cp_refs_in_type_annotations_typeArray(
2372        AnnotationArray* type_annotations_typeArray, int &byte_i_ref,
2373        const char * location_mesg, TRAPS) {
2374 
2375   if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
2376     // not enough room for num_annotations field
2377     RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2378       ("length() is too small for num_annotations field"));
2379     return false;
2380   }
2381 
2382   u2 num_annotations = Bytes::get_Java_u2((address)
2383                          type_annotations_typeArray->adr_at(byte_i_ref));
2384   byte_i_ref += 2;
2385 
2386   RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2387     ("num_type_annotations=%d", num_annotations));
2388 
2389   int calc_num_annotations = 0;
2390   for (; calc_num_annotations < num_annotations; calc_num_annotations++) {
2391     if (!rewrite_cp_refs_in_type_annotation_struct(type_annotations_typeArray,
2392            byte_i_ref, location_mesg, THREAD)) {
2393       RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2394         ("bad type_annotation_struct at %d", calc_num_annotations));
2395       // propagate failure back to caller
2396       return false;
2397     }
2398   }
2399   assert(num_annotations == calc_num_annotations, "sanity check");
2400 
2401   if (byte_i_ref != type_annotations_typeArray->length()) {
2402     RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2403       ("read wrong amount of bytes at end of processing "
2404        "type_annotations_typeArray (%d of %d bytes were read)",
2405        byte_i_ref, type_annotations_typeArray->length()));
2406     return false;
2407   }
2408 
2409   return true;
2410 } // end rewrite_cp_refs_in_type_annotations_typeArray()
2411 
2412 
2413 // Rewrite constant pool references in a type_annotation
2414 // field. This "structure" is adapted from the
2415 // RuntimeVisibleTypeAnnotations_attribute described in
2416 // section 4.7.20 of the Java SE 8 Edition of the VM spec:
2417 //
2418 // type_annotation {
2419 //   u1 target_type;
2420 //   union {
2421 //     type_parameter_target;
2422 //     supertype_target;
2423 //     type_parameter_bound_target;
2424 //     empty_target;
2425 //     method_formal_parameter_target;
2426 //     throws_target;
2427 //     localvar_target;
2428 //     catch_target;
2429 //     offset_target;
2430 //     type_argument_target;
2431 //   } target_info;
2432 //   type_path target_path;
2433 //   annotation anno;
2434 // }
2435 //
2436 bool VM_RedefineClasses::rewrite_cp_refs_in_type_annotation_struct(
2437        AnnotationArray* type_annotations_typeArray, int &byte_i_ref,
2438        const char * location_mesg, TRAPS) {
2439 
2440   if (!skip_type_annotation_target(type_annotations_typeArray,
2441          byte_i_ref, location_mesg, THREAD)) {
2442     return false;
2443   }
2444 
2445   if (!skip_type_annotation_type_path(type_annotations_typeArray,
2446          byte_i_ref, THREAD)) {
2447     return false;
2448   }
2449 
2450   if (!rewrite_cp_refs_in_annotation_struct(type_annotations_typeArray,
2451          byte_i_ref, THREAD)) {
2452     return false;
2453   }
2454 
2455   return true;
2456 } // end rewrite_cp_refs_in_type_annotation_struct()
2457 
2458 
2459 // Read, verify and skip over the target_type and target_info part
2460 // so that rewriting can continue in the later parts of the struct.
2461 //
2462 // u1 target_type;
2463 // union {
2464 //   type_parameter_target;
2465 //   supertype_target;
2466 //   type_parameter_bound_target;
2467 //   empty_target;
2468 //   method_formal_parameter_target;
2469 //   throws_target;
2470 //   localvar_target;
2471 //   catch_target;
2472 //   offset_target;
2473 //   type_argument_target;
2474 // } target_info;
2475 //
2476 bool VM_RedefineClasses::skip_type_annotation_target(
2477        AnnotationArray* type_annotations_typeArray, int &byte_i_ref,
2478        const char * location_mesg, TRAPS) {
2479 
2480   if ((byte_i_ref + 1) > type_annotations_typeArray->length()) {
2481     // not enough room for a target_type let alone the rest of a type_annotation
2482     RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2483       ("length() is too small for a target_type"));
2484     return false;
2485   }
2486 
2487   u1 target_type = type_annotations_typeArray->at(byte_i_ref);
2488   byte_i_ref += 1;
2489   RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("target_type=0x%.2x", target_type));
2490   RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("location=%s", location_mesg));
2491 
2492   // Skip over target_info
2493   switch (target_type) {
2494     case 0x00:
2495     // kind: type parameter declaration of generic class or interface
2496     // location: ClassFile
2497     case 0x01:
2498     // kind: type parameter declaration of generic method or constructor
2499     // location: method_info
2500 
2501     {
2502       // struct:
2503       // type_parameter_target {
2504       //   u1 type_parameter_index;
2505       // }
2506       //
2507       if ((byte_i_ref + 1) > type_annotations_typeArray->length()) {
2508         RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2509           ("length() is too small for a type_parameter_target"));
2510         return false;
2511       }
2512 
2513       u1 type_parameter_index = type_annotations_typeArray->at(byte_i_ref);
2514       byte_i_ref += 1;
2515 
2516       RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2517         ("type_parameter_target: type_parameter_index=%d",
2518          type_parameter_index));
2519     } break;
2520 
2521     case 0x10:
2522     // kind: type in extends clause of class or interface declaration
2523     //       (including the direct superclass of an anonymous class declaration),
2524     //       or in implements clause of interface declaration
2525     // location: ClassFile
2526 
2527     {
2528       // struct:
2529       // supertype_target {
2530       //   u2 supertype_index;
2531       // }
2532       //
2533       if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
2534         RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2535           ("length() is too small for a supertype_target"));
2536         return false;
2537       }
2538 
2539       u2 supertype_index = Bytes::get_Java_u2((address)
2540                              type_annotations_typeArray->adr_at(byte_i_ref));
2541       byte_i_ref += 2;
2542 
2543       RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2544         ("supertype_target: supertype_index=%d", supertype_index));
2545     } break;
2546 
2547     case 0x11:
2548     // kind: type in bound of type parameter declaration of generic class or interface
2549     // location: ClassFile
2550     case 0x12:
2551     // kind: type in bound of type parameter declaration of generic method or constructor
2552     // location: method_info
2553 
2554     {
2555       // struct:
2556       // type_parameter_bound_target {
2557       //   u1 type_parameter_index;
2558       //   u1 bound_index;
2559       // }
2560       //
2561       if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
2562         RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2563           ("length() is too small for a type_parameter_bound_target"));
2564         return false;
2565       }
2566 
2567       u1 type_parameter_index = type_annotations_typeArray->at(byte_i_ref);
2568       byte_i_ref += 1;
2569       u1 bound_index = type_annotations_typeArray->at(byte_i_ref);
2570       byte_i_ref += 1;
2571 
2572       RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2573         ("type_parameter_bound_target: type_parameter_index=%d, bound_index=%d",
2574          type_parameter_index, bound_index));
2575     } break;
2576 
2577     case 0x13:
2578     // kind: type in field declaration
2579     // location: field_info
2580     case 0x14:
2581     // kind: return type of method, or type of newly constructed object
2582     // location: method_info
2583     case 0x15:
2584     // kind: receiver type of method or constructor
2585     // location: method_info
2586 
2587     {
2588       // struct:
2589       // empty_target {
2590       // }
2591       //
2592       RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2593         ("empty_target"));
2594     } break;
2595 
2596     case 0x16:
2597     // kind: type in formal parameter declaration of method, constructor, or lambda expression
2598     // location: method_info
2599 
2600     {
2601       // struct:
2602       // formal_parameter_target {
2603       //   u1 formal_parameter_index;
2604       // }
2605       //
2606       if ((byte_i_ref + 1) > type_annotations_typeArray->length()) {
2607         RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2608           ("length() is too small for a formal_parameter_target"));
2609         return false;
2610       }
2611 
2612       u1 formal_parameter_index = type_annotations_typeArray->at(byte_i_ref);
2613       byte_i_ref += 1;
2614 
2615       RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2616         ("formal_parameter_target: formal_parameter_index=%d",
2617          formal_parameter_index));
2618     } break;
2619 
2620     case 0x17:
2621     // kind: type in throws clause of method or constructor
2622     // location: method_info
2623 
2624     {
2625       // struct:
2626       // throws_target {
2627       //   u2 throws_type_index
2628       // }
2629       //
2630       if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
2631         RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2632           ("length() is too small for a throws_target"));
2633         return false;
2634       }
2635 
2636       u2 throws_type_index = Bytes::get_Java_u2((address)
2637                                type_annotations_typeArray->adr_at(byte_i_ref));
2638       byte_i_ref += 2;
2639 
2640       RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2641         ("throws_target: throws_type_index=%d", throws_type_index));
2642     } break;
2643 
2644     case 0x40:
2645     // kind: type in local variable declaration
2646     // location: Code
2647     case 0x41:
2648     // kind: type in resource variable declaration
2649     // location: Code
2650 
2651     {
2652       // struct:
2653       // localvar_target {
2654       //   u2 table_length;
2655       //   struct {
2656       //     u2 start_pc;
2657       //     u2 length;
2658       //     u2 index;
2659       //   } table[table_length];
2660       // }
2661       //
2662       if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
2663         // not enough room for a table_length let alone the rest of a localvar_target
2664         RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2665           ("length() is too small for a localvar_target table_length"));
2666         return false;
2667       }
2668 
2669       u2 table_length = Bytes::get_Java_u2((address)
2670                           type_annotations_typeArray->adr_at(byte_i_ref));
2671       byte_i_ref += 2;
2672 
2673       RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2674         ("localvar_target: table_length=%d", table_length));
2675 
2676       int table_struct_size = 2 + 2 + 2; // 3 u2 variables per table entry
2677       int table_size = table_length * table_struct_size;
2678 
2679       if ((byte_i_ref + table_size) > type_annotations_typeArray->length()) {
2680         // not enough room for a table
2681         RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2682           ("length() is too small for a table array of length %d", table_length));
2683         return false;
2684       }
2685 
2686       // Skip over table
2687       byte_i_ref += table_size;
2688     } break;
2689 
2690     case 0x42:
2691     // kind: type in exception parameter declaration
2692     // location: Code
2693 
2694     {
2695       // struct:
2696       // catch_target {
2697       //   u2 exception_table_index;
2698       // }
2699       //
2700       if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
2701         RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2702           ("length() is too small for a catch_target"));
2703         return false;
2704       }
2705 
2706       u2 exception_table_index = Bytes::get_Java_u2((address)
2707                                    type_annotations_typeArray->adr_at(byte_i_ref));
2708       byte_i_ref += 2;
2709 
2710       RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2711         ("catch_target: exception_table_index=%d", exception_table_index));
2712     } break;
2713 
2714     case 0x43:
2715     // kind: type in instanceof expression
2716     // location: Code
2717     case 0x44:
2718     // kind: type in new expression
2719     // location: Code
2720     case 0x45:
2721     // kind: type in method reference expression using ::new
2722     // location: Code
2723     case 0x46:
2724     // kind: type in method reference expression using ::Identifier
2725     // location: Code
2726 
2727     {
2728       // struct:
2729       // offset_target {
2730       //   u2 offset;
2731       // }
2732       //
2733       if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
2734         RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2735           ("length() is too small for a offset_target"));
2736         return false;
2737       }
2738 
2739       u2 offset = Bytes::get_Java_u2((address)
2740                     type_annotations_typeArray->adr_at(byte_i_ref));
2741       byte_i_ref += 2;
2742 
2743       RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2744         ("offset_target: offset=%d", offset));
2745     } break;
2746 
2747     case 0x47:
2748     // kind: type in cast expression
2749     // location: Code
2750     case 0x48:
2751     // kind: type argument for generic constructor in new expression or
2752     //       explicit constructor invocation statement
2753     // location: Code
2754     case 0x49:
2755     // kind: type argument for generic method in method invocation expression
2756     // location: Code
2757     case 0x4A:
2758     // kind: type argument for generic constructor in method reference expression using ::new
2759     // location: Code
2760     case 0x4B:
2761     // kind: type argument for generic method in method reference expression using ::Identifier
2762     // location: Code
2763 
2764     {
2765       // struct:
2766       // type_argument_target {
2767       //   u2 offset;
2768       //   u1 type_argument_index;
2769       // }
2770       //
2771       if ((byte_i_ref + 3) > type_annotations_typeArray->length()) {
2772         RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2773           ("length() is too small for a type_argument_target"));
2774         return false;
2775       }
2776 
2777       u2 offset = Bytes::get_Java_u2((address)
2778                     type_annotations_typeArray->adr_at(byte_i_ref));
2779       byte_i_ref += 2;
2780       u1 type_argument_index = type_annotations_typeArray->at(byte_i_ref);
2781       byte_i_ref += 1;
2782 
2783       RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2784         ("type_argument_target: offset=%d, type_argument_index=%d",
2785          offset, type_argument_index));
2786     } break;
2787 
2788     default:
2789       RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2790         ("unknown target_type"));
2791 #ifdef ASSERT
2792       ShouldNotReachHere();
2793 #endif
2794       return false;
2795   }
2796 
2797   return true;
2798 } // end skip_type_annotation_target()
2799 
2800 
2801 // Read, verify and skip over the type_path part so that rewriting
2802 // can continue in the later parts of the struct.
2803 //
2804 // type_path {
2805 //   u1 path_length;
2806 //   {
2807 //     u1 type_path_kind;
2808 //     u1 type_argument_index;
2809 //   } path[path_length];
2810 // }
2811 //
2812 bool VM_RedefineClasses::skip_type_annotation_type_path(
2813        AnnotationArray* type_annotations_typeArray, int &byte_i_ref, TRAPS) {
2814 
2815   if ((byte_i_ref + 1) > type_annotations_typeArray->length()) {
2816     // not enough room for a path_length let alone the rest of the type_path
2817     RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2818       ("length() is too small for a type_path"));
2819     return false;
2820   }
2821 
2822   u1 path_length = type_annotations_typeArray->at(byte_i_ref);
2823   byte_i_ref += 1;
2824 
2825   RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2826     ("type_path: path_length=%d", path_length));
2827 
2828   int calc_path_length = 0;
2829   for (; calc_path_length < path_length; calc_path_length++) {
2830     if ((byte_i_ref + 1 + 1) > type_annotations_typeArray->length()) {
2831       // not enough room for a path
2832       RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2833         ("length() is too small for path entry %d of %d",
2834          calc_path_length, path_length));
2835       return false;
2836     }
2837 
2838     u1 type_path_kind = type_annotations_typeArray->at(byte_i_ref);
2839     byte_i_ref += 1;
2840     u1 type_argument_index = type_annotations_typeArray->at(byte_i_ref);
2841     byte_i_ref += 1;
2842 
2843     RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2844       ("type_path: path[%d]: type_path_kind=%d, type_argument_index=%d",
2845        calc_path_length, type_path_kind, type_argument_index));
2846 
2847     if (type_path_kind > 3 || (type_path_kind != 3 && type_argument_index != 0)) {
2848       // not enough room for a path
2849       RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2850         ("inconsistent type_path values"));
2851       return false;
2852     }
2853   }
2854   assert(path_length == calc_path_length, "sanity check");
2855 
2856   return true;
2857 } // end skip_type_annotation_type_path()
2858 
2859 
2860 // Rewrite constant pool references in the method's stackmap table.
2861 // These "structures" are adapted from the StackMapTable_attribute that
2862 // is described in section 4.8.4 of the 6.0 version of the VM spec
2863 // (dated 2005.10.26):
2864 // file:///net/quincunx.sfbay/export/gbracha/ClassFile-Java6.pdf
2865 //
2866 // stack_map {
2867 //   u2 number_of_entries;
2868 //   stack_map_frame entries[number_of_entries];
2869 // }
2870 //
2871 void VM_RedefineClasses::rewrite_cp_refs_in_stack_map_table(
2872        methodHandle method, TRAPS) {
2873 
2874   if (!method->has_stackmap_table()) {
2875     return;
2876   }
2877 
2878   AnnotationArray* stackmap_data = method->stackmap_data();
2879   address stackmap_p = (address)stackmap_data->adr_at(0);
2880   address stackmap_end = stackmap_p + stackmap_data->length();
2881 
2882   assert(stackmap_p + 2 <= stackmap_end, "no room for number_of_entries");
2883   u2 number_of_entries = Bytes::get_Java_u2(stackmap_p);
2884   stackmap_p += 2;
2885 
2886   RC_TRACE_WITH_THREAD(0x04000000, THREAD,
2887     ("number_of_entries=%u", number_of_entries));
2888 
2889   // walk through each stack_map_frame
2890   u2 calc_number_of_entries = 0;
2891   for (; calc_number_of_entries < number_of_entries; calc_number_of_entries++) {
2892     // The stack_map_frame structure is a u1 frame_type followed by
2893     // 0 or more bytes of data:
2894     //
2895     // union stack_map_frame {
2896     //   same_frame;
2897     //   same_locals_1_stack_item_frame;
2898     //   same_locals_1_stack_item_frame_extended;
2899     //   chop_frame;
2900     //   same_frame_extended;
2901     //   append_frame;
2902     //   full_frame;
2903     // }
2904 
2905     assert(stackmap_p + 1 <= stackmap_end, "no room for frame_type");
2906     u1 frame_type = *stackmap_p;
2907     stackmap_p++;
2908 
2909     // same_frame {
2910     //   u1 frame_type = SAME; /* 0-63 */
2911     // }
2912     if (frame_type <= 63) {
2913       // nothing more to do for same_frame
2914     }
2915 
2916     // same_locals_1_stack_item_frame {
2917     //   u1 frame_type = SAME_LOCALS_1_STACK_ITEM; /* 64-127 */
2918     //   verification_type_info stack[1];
2919     // }
2920     else if (frame_type >= 64 && frame_type <= 127) {
2921       rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
2922         calc_number_of_entries, frame_type, THREAD);
2923     }
2924 
2925     // reserved for future use
2926     else if (frame_type >= 128 && frame_type <= 246) {
2927       // nothing more to do for reserved frame_types
2928     }
2929 
2930     // same_locals_1_stack_item_frame_extended {
2931     //   u1 frame_type = SAME_LOCALS_1_STACK_ITEM_EXTENDED; /* 247 */
2932     //   u2 offset_delta;
2933     //   verification_type_info stack[1];
2934     // }
2935     else if (frame_type == 247) {
2936       stackmap_p += 2;
2937       rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
2938         calc_number_of_entries, frame_type, THREAD);
2939     }
2940 
2941     // chop_frame {
2942     //   u1 frame_type = CHOP; /* 248-250 */
2943     //   u2 offset_delta;
2944     // }
2945     else if (frame_type >= 248 && frame_type <= 250) {
2946       stackmap_p += 2;
2947     }
2948 
2949     // same_frame_extended {
2950     //   u1 frame_type = SAME_FRAME_EXTENDED; /* 251*/
2951     //   u2 offset_delta;
2952     // }
2953     else if (frame_type == 251) {
2954       stackmap_p += 2;
2955     }
2956 
2957     // append_frame {
2958     //   u1 frame_type = APPEND; /* 252-254 */
2959     //   u2 offset_delta;
2960     //   verification_type_info locals[frame_type - 251];
2961     // }
2962     else if (frame_type >= 252 && frame_type <= 254) {
2963       assert(stackmap_p + 2 <= stackmap_end,
2964         "no room for offset_delta");
2965       stackmap_p += 2;
2966       u1 len = frame_type - 251;
2967       for (u1 i = 0; i < len; i++) {
2968         rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
2969           calc_number_of_entries, frame_type, THREAD);
2970       }
2971     }
2972 
2973     // full_frame {
2974     //   u1 frame_type = FULL_FRAME; /* 255 */
2975     //   u2 offset_delta;
2976     //   u2 number_of_locals;
2977     //   verification_type_info locals[number_of_locals];
2978     //   u2 number_of_stack_items;
2979     //   verification_type_info stack[number_of_stack_items];
2980     // }
2981     else if (frame_type == 255) {
2982       assert(stackmap_p + 2 + 2 <= stackmap_end,
2983         "no room for smallest full_frame");
2984       stackmap_p += 2;
2985 
2986       u2 number_of_locals = Bytes::get_Java_u2(stackmap_p);
2987       stackmap_p += 2;
2988 
2989       for (u2 locals_i = 0; locals_i < number_of_locals; locals_i++) {
2990         rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
2991           calc_number_of_entries, frame_type, THREAD);
2992       }
2993 
2994       // Use the largest size for the number_of_stack_items, but only get
2995       // the right number of bytes.
2996       u2 number_of_stack_items = Bytes::get_Java_u2(stackmap_p);
2997       stackmap_p += 2;
2998 
2999       for (u2 stack_i = 0; stack_i < number_of_stack_items; stack_i++) {
3000         rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
3001           calc_number_of_entries, frame_type, THREAD);
3002       }
3003     }
3004   } // end while there is a stack_map_frame
3005   assert(number_of_entries == calc_number_of_entries, "sanity check");
3006 } // end rewrite_cp_refs_in_stack_map_table()
3007 
3008 
3009 // Rewrite constant pool references in the verification type info
3010 // portion of the method's stackmap table. These "structures" are
3011 // adapted from the StackMapTable_attribute that is described in
3012 // section 4.8.4 of the 6.0 version of the VM spec (dated 2005.10.26):
3013 // file:///net/quincunx.sfbay/export/gbracha/ClassFile-Java6.pdf
3014 //
3015 // The verification_type_info structure is a u1 tag followed by 0 or
3016 // more bytes of data:
3017 //
3018 // union verification_type_info {
3019 //   Top_variable_info;
3020 //   Integer_variable_info;
3021 //   Float_variable_info;
3022 //   Long_variable_info;
3023 //   Double_variable_info;
3024 //   Null_variable_info;
3025 //   UninitializedThis_variable_info;
3026 //   Object_variable_info;
3027 //   Uninitialized_variable_info;
3028 // }
3029 //
3030 void VM_RedefineClasses::rewrite_cp_refs_in_verification_type_info(
3031        address& stackmap_p_ref, address stackmap_end, u2 frame_i,
3032        u1 frame_type, TRAPS) {
3033 
3034   assert(stackmap_p_ref + 1 <= stackmap_end, "no room for tag");
3035   u1 tag = *stackmap_p_ref;
3036   stackmap_p_ref++;
3037 
3038   switch (tag) {
3039   // Top_variable_info {
3040   //   u1 tag = ITEM_Top; /* 0 */
3041   // }
3042   // verificationType.hpp has zero as ITEM_Bogus instead of ITEM_Top
3043   case 0:  // fall through
3044 
3045   // Integer_variable_info {
3046   //   u1 tag = ITEM_Integer; /* 1 */
3047   // }
3048   case ITEM_Integer:  // fall through
3049 
3050   // Float_variable_info {
3051   //   u1 tag = ITEM_Float; /* 2 */
3052   // }
3053   case ITEM_Float:  // fall through
3054 
3055   // Double_variable_info {
3056   //   u1 tag = ITEM_Double; /* 3 */
3057   // }
3058   case ITEM_Double:  // fall through
3059 
3060   // Long_variable_info {
3061   //   u1 tag = ITEM_Long; /* 4 */
3062   // }
3063   case ITEM_Long:  // fall through
3064 
3065   // Null_variable_info {
3066   //   u1 tag = ITEM_Null; /* 5 */
3067   // }
3068   case ITEM_Null:  // fall through
3069 
3070   // UninitializedThis_variable_info {
3071   //   u1 tag = ITEM_UninitializedThis; /* 6 */
3072   // }
3073   case ITEM_UninitializedThis:
3074     // nothing more to do for the above tag types
3075     break;
3076 
3077   // Object_variable_info {
3078   //   u1 tag = ITEM_Object; /* 7 */
3079   //   u2 cpool_index;
3080   // }
3081   case ITEM_Object:
3082   {
3083     assert(stackmap_p_ref + 2 <= stackmap_end, "no room for cpool_index");
3084     u2 cpool_index = Bytes::get_Java_u2(stackmap_p_ref);
3085     u2 new_cp_index = find_new_index(cpool_index);
3086     if (new_cp_index != 0) {
3087       RC_TRACE_WITH_THREAD(0x04000000, THREAD,
3088         ("mapped old cpool_index=%d", cpool_index));
3089       Bytes::put_Java_u2(stackmap_p_ref, new_cp_index);
3090       cpool_index = new_cp_index;
3091     }
3092     stackmap_p_ref += 2;
3093 
3094     RC_TRACE_WITH_THREAD(0x04000000, THREAD,
3095       ("frame_i=%u, frame_type=%u, cpool_index=%d", frame_i,
3096       frame_type, cpool_index));
3097   } break;
3098 
3099   // Uninitialized_variable_info {
3100   //   u1 tag = ITEM_Uninitialized; /* 8 */
3101   //   u2 offset;
3102   // }
3103   case ITEM_Uninitialized:
3104     assert(stackmap_p_ref + 2 <= stackmap_end, "no room for offset");
3105     stackmap_p_ref += 2;
3106     break;
3107 
3108   default:
3109     RC_TRACE_WITH_THREAD(0x04000000, THREAD,
3110       ("frame_i=%u, frame_type=%u, bad tag=0x%x", frame_i, frame_type, tag));
3111     ShouldNotReachHere();
3112     break;
3113   } // end switch (tag)
3114 } // end rewrite_cp_refs_in_verification_type_info()
3115 
3116 
3117 // Change the constant pool associated with klass scratch_class to
3118 // scratch_cp. If shrink is true, then scratch_cp_length elements
3119 // are copied from scratch_cp to a smaller constant pool and the
3120 // smaller constant pool is associated with scratch_class.
3121 void VM_RedefineClasses::set_new_constant_pool(
3122        ClassLoaderData* loader_data,
3123        instanceKlassHandle scratch_class, constantPoolHandle scratch_cp,
3124        int scratch_cp_length, TRAPS) {
3125   assert(scratch_cp->length() >= scratch_cp_length, "sanity check");
3126 
3127   // scratch_cp is a merged constant pool and has enough space for a
3128   // worst case merge situation. We want to associate the minimum
3129   // sized constant pool with the klass to save space.
3130   ConstantPool* cp = ConstantPool::allocate(loader_data, scratch_cp_length, CHECK);
3131   constantPoolHandle smaller_cp(THREAD, cp);
3132 
3133   // preserve version() value in the smaller copy
3134   int version = scratch_cp->version();
3135   assert(version != 0, "sanity check");
3136   smaller_cp->set_version(version);
3137 
3138   // attach klass to new constant pool
3139   // reference to the cp holder is needed for copy_operands()
3140   smaller_cp->set_pool_holder(scratch_class());
3141 
3142   scratch_cp->copy_cp_to(1, scratch_cp_length - 1, smaller_cp, 1, THREAD);
3143   if (HAS_PENDING_EXCEPTION) {
3144     // Exception is handled in the caller
3145     loader_data->add_to_deallocate_list(smaller_cp());
3146     return;
3147   }
3148   scratch_cp = smaller_cp;
3149 
3150   // attach new constant pool to klass
3151   scratch_class->set_constants(scratch_cp());
3152 
3153   int i;  // for portability
3154 
3155   // update each field in klass to use new constant pool indices as needed
3156   for (JavaFieldStream fs(scratch_class); !fs.done(); fs.next()) {
3157     jshort cur_index = fs.name_index();
3158     jshort new_index = find_new_index(cur_index);
3159     if (new_index != 0) {
3160       RC_TRACE_WITH_THREAD(0x00080000, THREAD,
3161         ("field-name_index change: %d to %d", cur_index, new_index));
3162       fs.set_name_index(new_index);
3163     }
3164     cur_index = fs.signature_index();
3165     new_index = find_new_index(cur_index);
3166     if (new_index != 0) {
3167       RC_TRACE_WITH_THREAD(0x00080000, THREAD,
3168         ("field-signature_index change: %d to %d", cur_index, new_index));
3169       fs.set_signature_index(new_index);
3170     }
3171     cur_index = fs.initval_index();
3172     new_index = find_new_index(cur_index);
3173     if (new_index != 0) {
3174       RC_TRACE_WITH_THREAD(0x00080000, THREAD,
3175         ("field-initval_index change: %d to %d", cur_index, new_index));
3176       fs.set_initval_index(new_index);
3177     }
3178     cur_index = fs.generic_signature_index();
3179     new_index = find_new_index(cur_index);
3180     if (new_index != 0) {
3181       RC_TRACE_WITH_THREAD(0x00080000, THREAD,
3182         ("field-generic_signature change: %d to %d", cur_index, new_index));
3183       fs.set_generic_signature_index(new_index);
3184     }
3185   } // end for each field
3186 
3187   // Update constant pool indices in the inner classes info to use
3188   // new constant indices as needed. The inner classes info is a
3189   // quadruple:
3190   // (inner_class_info, outer_class_info, inner_name, inner_access_flags)
3191   InnerClassesIterator iter(scratch_class);
3192   for (; !iter.done(); iter.next()) {
3193     int cur_index = iter.inner_class_info_index();
3194     if (cur_index == 0) {
3195       continue;  // JVM spec. allows null inner class refs so skip it
3196     }
3197     int new_index = find_new_index(cur_index);
3198     if (new_index != 0) {
3199       RC_TRACE_WITH_THREAD(0x00080000, THREAD,
3200         ("inner_class_info change: %d to %d", cur_index, new_index));
3201       iter.set_inner_class_info_index(new_index);
3202     }
3203     cur_index = iter.outer_class_info_index();
3204     new_index = find_new_index(cur_index);
3205     if (new_index != 0) {
3206       RC_TRACE_WITH_THREAD(0x00080000, THREAD,
3207         ("outer_class_info change: %d to %d", cur_index, new_index));
3208       iter.set_outer_class_info_index(new_index);
3209     }
3210     cur_index = iter.inner_name_index();
3211     new_index = find_new_index(cur_index);
3212     if (new_index != 0) {
3213       RC_TRACE_WITH_THREAD(0x00080000, THREAD,
3214         ("inner_name change: %d to %d", cur_index, new_index));
3215       iter.set_inner_name_index(new_index);
3216     }
3217   } // end for each inner class
3218 
3219   // Attach each method in klass to the new constant pool and update
3220   // to use new constant pool indices as needed:
3221   Array<Method*>* methods = scratch_class->methods();
3222   for (i = methods->length() - 1; i >= 0; i--) {
3223     methodHandle method(THREAD, methods->at(i));
3224     method->set_constants(scratch_cp());
3225 
3226     int new_index = find_new_index(method->name_index());
3227     if (new_index != 0) {
3228       RC_TRACE_WITH_THREAD(0x00080000, THREAD,
3229         ("method-name_index change: %d to %d", method->name_index(),
3230         new_index));
3231       method->set_name_index(new_index);
3232     }
3233     new_index = find_new_index(method->signature_index());
3234     if (new_index != 0) {
3235       RC_TRACE_WITH_THREAD(0x00080000, THREAD,
3236         ("method-signature_index change: %d to %d",
3237         method->signature_index(), new_index));
3238       method->set_signature_index(new_index);
3239     }
3240     new_index = find_new_index(method->generic_signature_index());
3241     if (new_index != 0) {
3242       RC_TRACE_WITH_THREAD(0x00080000, THREAD,
3243         ("method-generic_signature_index change: %d to %d",
3244         method->generic_signature_index(), new_index));
3245       method->set_generic_signature_index(new_index);
3246     }
3247 
3248     // Update constant pool indices in the method's checked exception
3249     // table to use new constant indices as needed.
3250     int cext_length = method->checked_exceptions_length();
3251     if (cext_length > 0) {
3252       CheckedExceptionElement * cext_table =
3253         method->checked_exceptions_start();
3254       for (int j = 0; j < cext_length; j++) {
3255         int cur_index = cext_table[j].class_cp_index;
3256         int new_index = find_new_index(cur_index);
3257         if (new_index != 0) {
3258           RC_TRACE_WITH_THREAD(0x00080000, THREAD,
3259             ("cext-class_cp_index change: %d to %d", cur_index, new_index));
3260           cext_table[j].class_cp_index = (u2)new_index;
3261         }
3262       } // end for each checked exception table entry
3263     } // end if there are checked exception table entries
3264 
3265     // Update each catch type index in the method's exception table
3266     // to use new constant pool indices as needed. The exception table
3267     // holds quadruple entries of the form:
3268     //   (beg_bci, end_bci, handler_bci, klass_index)
3269 
3270     ExceptionTable ex_table(method());
3271     int ext_length = ex_table.length();
3272 
3273     for (int j = 0; j < ext_length; j ++) {
3274       int cur_index = ex_table.catch_type_index(j);
3275       int new_index = find_new_index(cur_index);
3276       if (new_index != 0) {
3277         RC_TRACE_WITH_THREAD(0x00080000, THREAD,
3278           ("ext-klass_index change: %d to %d", cur_index, new_index));
3279         ex_table.set_catch_type_index(j, new_index);
3280       }
3281     } // end for each exception table entry
3282 
3283     // Update constant pool indices in the method's local variable
3284     // table to use new constant indices as needed. The local variable
3285     // table hold sextuple entries of the form:
3286     // (start_pc, length, name_index, descriptor_index, signature_index, slot)
3287     int lvt_length = method->localvariable_table_length();
3288     if (lvt_length > 0) {
3289       LocalVariableTableElement * lv_table =
3290         method->localvariable_table_start();
3291       for (int j = 0; j < lvt_length; j++) {
3292         int cur_index = lv_table[j].name_cp_index;
3293         int new_index = find_new_index(cur_index);
3294         if (new_index != 0) {
3295           RC_TRACE_WITH_THREAD(0x00080000, THREAD,
3296             ("lvt-name_cp_index change: %d to %d", cur_index, new_index));
3297           lv_table[j].name_cp_index = (u2)new_index;
3298         }
3299         cur_index = lv_table[j].descriptor_cp_index;
3300         new_index = find_new_index(cur_index);
3301         if (new_index != 0) {
3302           RC_TRACE_WITH_THREAD(0x00080000, THREAD,
3303             ("lvt-descriptor_cp_index change: %d to %d", cur_index,
3304             new_index));
3305           lv_table[j].descriptor_cp_index = (u2)new_index;
3306         }
3307         cur_index = lv_table[j].signature_cp_index;
3308         new_index = find_new_index(cur_index);
3309         if (new_index != 0) {
3310           RC_TRACE_WITH_THREAD(0x00080000, THREAD,
3311             ("lvt-signature_cp_index change: %d to %d", cur_index, new_index));
3312           lv_table[j].signature_cp_index = (u2)new_index;
3313         }
3314       } // end for each local variable table entry
3315     } // end if there are local variable table entries
3316 
3317     rewrite_cp_refs_in_stack_map_table(method, THREAD);
3318   } // end for each method
3319 } // end set_new_constant_pool()
3320 
3321 
3322 // Unevolving classes may point to methods of the_class directly
3323 // from their constant pool caches, itables, and/or vtables. We
3324 // use the ClassLoaderDataGraph::classes_do() facility and this helper
3325 // to fix up these pointers.
3326 
3327 // Adjust cpools and vtables closure
3328 void VM_RedefineClasses::AdjustCpoolCacheAndVtable::do_klass(Klass* k) {
3329 
3330   // This is a very busy routine. We don't want too much tracing
3331   // printed out.
3332   bool trace_name_printed = false;
3333   InstanceKlass *the_class = InstanceKlass::cast(_the_class_oop);
3334 
3335   // Very noisy: only enable this call if you are trying to determine
3336   // that a specific class gets found by this routine.
3337   // RC_TRACE macro has an embedded ResourceMark
3338   // RC_TRACE_WITH_THREAD(0x00100000, THREAD,
3339   //   ("adjust check: name=%s", k->external_name()));
3340   // trace_name_printed = true;
3341 
3342   // If the class being redefined is java.lang.Object, we need to fix all
3343   // array class vtables also
3344   if (k->oop_is_array() && _the_class_oop == SystemDictionary::Object_klass()) {
3345     k->vtable()->adjust_method_entries(the_class, &trace_name_printed);
3346 
3347   } else if (k->oop_is_instance()) {
3348     HandleMark hm(_thread);
3349     InstanceKlass *ik = InstanceKlass::cast(k);
3350 
3351     // HotSpot specific optimization! HotSpot does not currently
3352     // support delegation from the bootstrap class loader to a
3353     // user-defined class loader. This means that if the bootstrap
3354     // class loader is the initiating class loader, then it will also
3355     // be the defining class loader. This also means that classes
3356     // loaded by the bootstrap class loader cannot refer to classes
3357     // loaded by a user-defined class loader. Note: a user-defined
3358     // class loader can delegate to the bootstrap class loader.
3359     //
3360     // If the current class being redefined has a user-defined class
3361     // loader as its defining class loader, then we can skip all
3362     // classes loaded by the bootstrap class loader.
3363     bool is_user_defined =
3364            InstanceKlass::cast(_the_class_oop)->class_loader() != NULL;
3365     if (is_user_defined && ik->class_loader() == NULL) {
3366       return;
3367     }
3368 
3369     // Fix the vtable embedded in the_class and subclasses of the_class,
3370     // if one exists. We discard scratch_class and we don't keep an
3371     // InstanceKlass around to hold obsolete methods so we don't have
3372     // any other InstanceKlass embedded vtables to update. The vtable
3373     // holds the Method*s for virtual (but not final) methods.
3374     // Default methods, or concrete methods in interfaces are stored
3375     // in the vtable, so if an interface changes we need to check
3376     // adjust_method_entries() for every InstanceKlass, which will also
3377     // adjust the default method vtable indices.
3378     // We also need to adjust any default method entries that are
3379     // not yet in the vtable, because the vtable setup is in progress.
3380     // This must be done after we adjust the default_methods and
3381     // default_vtable_indices for methods already in the vtable.
3382     // If redefining Unsafe, walk all the vtables looking for entries.
3383     if (ik->vtable_length() > 0 && (_the_class_oop->is_interface()
3384         || _the_class_oop == SystemDictionary::misc_Unsafe_klass()
3385         || ik->is_subtype_of(_the_class_oop))) {
3386       // ik->vtable() creates a wrapper object; rm cleans it up
3387       ResourceMark rm(_thread);
3388 
3389       ik->vtable()->adjust_method_entries(the_class, &trace_name_printed);
3390       ik->adjust_default_methods(the_class, &trace_name_printed);
3391     }
3392 
3393     // If the current class has an itable and we are either redefining an
3394     // interface or if the current class is a subclass of the_class, then
3395     // we potentially have to fix the itable. If we are redefining an
3396     // interface, then we have to call adjust_method_entries() for
3397     // every InstanceKlass that has an itable since there isn't a
3398     // subclass relationship between an interface and an InstanceKlass.
3399     // If redefining Unsafe, walk all the itables looking for entries.
3400     if (ik->itable_length() > 0 && (_the_class_oop->is_interface()
3401         || _the_class_oop == SystemDictionary::misc_Unsafe_klass()
3402         || ik->is_subclass_of(_the_class_oop))) {
3403       // ik->itable() creates a wrapper object; rm cleans it up
3404       ResourceMark rm(_thread);
3405 
3406       ik->itable()->adjust_method_entries(the_class, &trace_name_printed);
3407     }
3408 
3409     // The constant pools in other classes (other_cp) can refer to
3410     // methods in the_class. We have to update method information in
3411     // other_cp's cache. If other_cp has a previous version, then we
3412     // have to repeat the process for each previous version. The
3413     // constant pool cache holds the Method*s for non-virtual
3414     // methods and for virtual, final methods.
3415     //
3416     // Special case: if the current class is the_class, then new_cp
3417     // has already been attached to the_class and old_cp has already
3418     // been added as a previous version. The new_cp doesn't have any
3419     // cached references to old methods so it doesn't need to be
3420     // updated. We can simply start with the previous version(s) in
3421     // that case.
3422     constantPoolHandle other_cp;
3423     ConstantPoolCache* cp_cache;
3424 
3425     if (ik != _the_class_oop) {
3426       // this klass' constant pool cache may need adjustment
3427       other_cp = constantPoolHandle(ik->constants());
3428       cp_cache = other_cp->cache();
3429       if (cp_cache != NULL) {
3430         cp_cache->adjust_method_entries(the_class, &trace_name_printed);
3431       }
3432     }
3433 
3434     // the previous versions' constant pool caches may need adjustment
3435     for (InstanceKlass* pv_node = ik->previous_versions();
3436          pv_node != NULL;
3437          pv_node = pv_node->previous_versions()) {
3438       cp_cache = pv_node->constants()->cache();
3439       if (cp_cache != NULL) {
3440         cp_cache->adjust_method_entries(pv_node, &trace_name_printed);
3441       }
3442     }
3443   }
3444 }
3445 
3446 // Clean method data for this class
3447 void VM_RedefineClasses::MethodDataCleaner::do_klass(Klass* k) {
3448   if (k->oop_is_instance()) {
3449     InstanceKlass *ik = InstanceKlass::cast(k);
3450     // Clean MethodData of this class's methods so they don't refer to
3451     // old methods that are no longer running.
3452     Array<Method*>* methods = ik->methods();
3453     int num_methods = methods->length();
3454     for (int index = 0; index < num_methods; ++index) {
3455       if (methods->at(index)->method_data() != NULL) {
3456         methods->at(index)->method_data()->clean_weak_method_links();
3457       }
3458     }
3459   }
3460 }
3461 
3462 void VM_RedefineClasses::update_jmethod_ids() {
3463   for (int j = 0; j < _matching_methods_length; ++j) {
3464     Method* old_method = _matching_old_methods[j];
3465     jmethodID jmid = old_method->find_jmethod_id_or_null();
3466     if (jmid != NULL) {
3467       // There is a jmethodID, change it to point to the new method
3468       methodHandle new_method_h(_matching_new_methods[j]);
3469       Method::change_method_associated_with_jmethod_id(jmid, new_method_h());
3470       assert(Method::resolve_jmethod_id(jmid) == _matching_new_methods[j],
3471              "should be replaced");
3472     }
3473   }
3474 }
3475 
3476 int VM_RedefineClasses::check_methods_and_mark_as_obsolete() {
3477   int emcp_method_count = 0;
3478   int obsolete_count = 0;
3479   int old_index = 0;
3480   for (int j = 0; j < _matching_methods_length; ++j, ++old_index) {
3481     Method* old_method = _matching_old_methods[j];
3482     Method* new_method = _matching_new_methods[j];
3483     Method* old_array_method;
3484 
3485     // Maintain an old_index into the _old_methods array by skipping
3486     // deleted methods
3487     while ((old_array_method = _old_methods->at(old_index)) != old_method) {
3488       ++old_index;
3489     }
3490 
3491     if (MethodComparator::methods_EMCP(old_method, new_method)) {
3492       // The EMCP definition from JSR-163 requires the bytecodes to be
3493       // the same with the exception of constant pool indices which may
3494       // differ. However, the constants referred to by those indices
3495       // must be the same.
3496       //
3497       // We use methods_EMCP() for comparison since constant pool
3498       // merging can remove duplicate constant pool entries that were
3499       // present in the old method and removed from the rewritten new
3500       // method. A faster binary comparison function would consider the
3501       // old and new methods to be different when they are actually
3502       // EMCP.
3503       //
3504       // The old and new methods are EMCP and you would think that we
3505       // could get rid of one of them here and now and save some space.
3506       // However, the concept of EMCP only considers the bytecodes and
3507       // the constant pool entries in the comparison. Other things,
3508       // e.g., the line number table (LNT) or the local variable table
3509       // (LVT) don't count in the comparison. So the new (and EMCP)
3510       // method can have a new LNT that we need so we can't just
3511       // overwrite the new method with the old method.
3512       //
3513       // When this routine is called, we have already attached the new
3514       // methods to the_class so the old methods are effectively
3515       // overwritten. However, if an old method is still executing,
3516       // then the old method cannot be collected until sometime after
3517       // the old method call has returned. So the overwriting of old
3518       // methods by new methods will save us space except for those
3519       // (hopefully few) old methods that are still executing.
3520       //
3521       // A method refers to a ConstMethod* and this presents another
3522       // possible avenue to space savings. The ConstMethod* in the
3523       // new method contains possibly new attributes (LNT, LVT, etc).
3524       // At first glance, it seems possible to save space by replacing
3525       // the ConstMethod* in the old method with the ConstMethod*
3526       // from the new method. The old and new methods would share the
3527       // same ConstMethod* and we would save the space occupied by
3528       // the old ConstMethod*. However, the ConstMethod* contains
3529       // a back reference to the containing method. Sharing the
3530       // ConstMethod* between two methods could lead to confusion in
3531       // the code that uses the back reference. This would lead to
3532       // brittle code that could be broken in non-obvious ways now or
3533       // in the future.
3534       //
3535       // Another possibility is to copy the ConstMethod* from the new
3536       // method to the old method and then overwrite the new method with
3537       // the old method. Since the ConstMethod* contains the bytecodes
3538       // for the method embedded in the oop, this option would change
3539       // the bytecodes out from under any threads executing the old
3540       // method and make the thread's bcp invalid. Since EMCP requires
3541       // that the bytecodes be the same modulo constant pool indices, it
3542       // is straight forward to compute the correct new bcp in the new
3543       // ConstMethod* from the old bcp in the old ConstMethod*. The
3544       // time consuming part would be searching all the frames in all
3545       // of the threads to find all of the calls to the old method.
3546       //
3547       // It looks like we will have to live with the limited savings
3548       // that we get from effectively overwriting the old methods
3549       // when the new methods are attached to the_class.
3550 
3551       // Count number of methods that are EMCP.  The method will be marked
3552       // old but not obsolete if it is EMCP.
3553       emcp_method_count++;
3554 
3555       // An EMCP method is _not_ obsolete. An obsolete method has a
3556       // different jmethodID than the current method. An EMCP method
3557       // has the same jmethodID as the current method. Having the
3558       // same jmethodID for all EMCP versions of a method allows for
3559       // a consistent view of the EMCP methods regardless of which
3560       // EMCP method you happen to have in hand. For example, a
3561       // breakpoint set in one EMCP method will work for all EMCP
3562       // versions of the method including the current one.
3563     } else {
3564       // mark obsolete methods as such
3565       old_method->set_is_obsolete();
3566       obsolete_count++;
3567 
3568       // obsolete methods need a unique idnum so they become new entries in
3569       // the jmethodID cache in InstanceKlass
3570       assert(old_method->method_idnum() == new_method->method_idnum(), "must match");
3571       u2 num = InstanceKlass::cast(_the_class_oop)->next_method_idnum();
3572       if (num != ConstMethod::UNSET_IDNUM) {
3573         old_method->set_method_idnum(num);
3574       }
3575 
3576       // With tracing we try not to "yack" too much. The position of
3577       // this trace assumes there are fewer obsolete methods than
3578       // EMCP methods.
3579       RC_TRACE(0x00000100, ("mark %s(%s) as obsolete",
3580         old_method->name()->as_C_string(),
3581         old_method->signature()->as_C_string()));
3582     }
3583     old_method->set_is_old();
3584   }
3585   for (int i = 0; i < _deleted_methods_length; ++i) {
3586     Method* old_method = _deleted_methods[i];
3587 
3588     assert(!old_method->has_vtable_index(),
3589            "cannot delete methods with vtable entries");;
3590 
3591     // Mark all deleted methods as old, obsolete and deleted
3592     old_method->set_is_deleted();
3593     old_method->set_is_old();
3594     old_method->set_is_obsolete();
3595     ++obsolete_count;
3596     // With tracing we try not to "yack" too much. The position of
3597     // this trace assumes there are fewer obsolete methods than
3598     // EMCP methods.
3599     RC_TRACE(0x00000100, ("mark deleted %s(%s) as obsolete",
3600                           old_method->name()->as_C_string(),
3601                           old_method->signature()->as_C_string()));
3602   }
3603   assert((emcp_method_count + obsolete_count) == _old_methods->length(),
3604     "sanity check");
3605   RC_TRACE(0x00000100, ("EMCP_cnt=%d, obsolete_cnt=%d", emcp_method_count,
3606     obsolete_count));
3607   return emcp_method_count;
3608 }
3609 
3610 // This internal class transfers the native function registration from old methods
3611 // to new methods.  It is designed to handle both the simple case of unchanged
3612 // native methods and the complex cases of native method prefixes being added and/or
3613 // removed.
3614 // It expects only to be used during the VM_RedefineClasses op (a safepoint).
3615 //
3616 // This class is used after the new methods have been installed in "the_class".
3617 //
3618 // So, for example, the following must be handled.  Where 'm' is a method and
3619 // a number followed by an underscore is a prefix.
3620 //
3621 //                                      Old Name    New Name
3622 // Simple transfer to new method        m       ->  m
3623 // Add prefix                           m       ->  1_m
3624 // Remove prefix                        1_m     ->  m
3625 // Simultaneous add of prefixes         m       ->  3_2_1_m
3626 // Simultaneous removal of prefixes     3_2_1_m ->  m
3627 // Simultaneous add and remove          1_m     ->  2_m
3628 // Same, caused by prefix removal only  3_2_1_m ->  3_2_m
3629 //
3630 class TransferNativeFunctionRegistration {
3631  private:
3632   instanceKlassHandle the_class;
3633   int prefix_count;
3634   char** prefixes;
3635 
3636   // Recursively search the binary tree of possibly prefixed method names.
3637   // Iteration could be used if all agents were well behaved. Full tree walk is
3638   // more resilent to agents not cleaning up intermediate methods.
3639   // Branch at each depth in the binary tree is:
3640   //    (1) without the prefix.
3641   //    (2) with the prefix.
3642   // where 'prefix' is the prefix at that 'depth' (first prefix, second prefix,...)
3643   Method* search_prefix_name_space(int depth, char* name_str, size_t name_len,
3644                                      Symbol* signature) {
3645     TempNewSymbol name_symbol = SymbolTable::probe(name_str, (int)name_len);
3646     if (name_symbol != NULL) {
3647       Method* method = the_class()->lookup_method(name_symbol, signature);
3648       if (method != NULL) {
3649         // Even if prefixed, intermediate methods must exist.
3650         if (method->is_native()) {
3651           // Wahoo, we found a (possibly prefixed) version of the method, return it.
3652           return method;
3653         }
3654         if (depth < prefix_count) {
3655           // Try applying further prefixes (other than this one).
3656           method = search_prefix_name_space(depth+1, name_str, name_len, signature);
3657           if (method != NULL) {
3658             return method; // found
3659           }
3660 
3661           // Try adding this prefix to the method name and see if it matches
3662           // another method name.
3663           char* prefix = prefixes[depth];
3664           size_t prefix_len = strlen(prefix);
3665           size_t trial_len = name_len + prefix_len;
3666           char* trial_name_str = NEW_RESOURCE_ARRAY(char, trial_len + 1);
3667           strcpy(trial_name_str, prefix);
3668           strcat(trial_name_str, name_str);
3669           method = search_prefix_name_space(depth+1, trial_name_str, trial_len,
3670                                             signature);
3671           if (method != NULL) {
3672             // If found along this branch, it was prefixed, mark as such
3673             method->set_is_prefixed_native();
3674             return method; // found
3675           }
3676         }
3677       }
3678     }
3679     return NULL;  // This whole branch bore nothing
3680   }
3681 
3682   // Return the method name with old prefixes stripped away.
3683   char* method_name_without_prefixes(Method* method) {
3684     Symbol* name = method->name();
3685     char* name_str = name->as_utf8();
3686 
3687     // Old prefixing may be defunct, strip prefixes, if any.
3688     for (int i = prefix_count-1; i >= 0; i--) {
3689       char* prefix = prefixes[i];
3690       size_t prefix_len = strlen(prefix);
3691       if (strncmp(prefix, name_str, prefix_len) == 0) {
3692         name_str += prefix_len;
3693       }
3694     }
3695     return name_str;
3696   }
3697 
3698   // Strip any prefixes off the old native method, then try to find a
3699   // (possibly prefixed) new native that matches it.
3700   Method* strip_and_search_for_new_native(Method* method) {
3701     ResourceMark rm;
3702     char* name_str = method_name_without_prefixes(method);
3703     return search_prefix_name_space(0, name_str, strlen(name_str),
3704                                     method->signature());
3705   }
3706 
3707  public:
3708 
3709   // Construct a native method transfer processor for this class.
3710   TransferNativeFunctionRegistration(instanceKlassHandle _the_class) {
3711     assert(SafepointSynchronize::is_at_safepoint(), "sanity check");
3712 
3713     the_class = _the_class;
3714     prefixes = JvmtiExport::get_all_native_method_prefixes(&prefix_count);
3715   }
3716 
3717   // Attempt to transfer any of the old or deleted methods that are native
3718   void transfer_registrations(Method** old_methods, int methods_length) {
3719     for (int j = 0; j < methods_length; j++) {
3720       Method* old_method = old_methods[j];
3721 
3722       if (old_method->is_native() && old_method->has_native_function()) {
3723         Method* new_method = strip_and_search_for_new_native(old_method);
3724         if (new_method != NULL) {
3725           // Actually set the native function in the new method.
3726           // Redefine does not send events (except CFLH), certainly not this
3727           // behind the scenes re-registration.
3728           new_method->set_native_function(old_method->native_function(),
3729                               !Method::native_bind_event_is_interesting);
3730         }
3731       }
3732     }
3733   }
3734 };
3735 
3736 // Don't lose the association between a native method and its JNI function.
3737 void VM_RedefineClasses::transfer_old_native_function_registrations(instanceKlassHandle the_class) {
3738   TransferNativeFunctionRegistration transfer(the_class);
3739   transfer.transfer_registrations(_deleted_methods, _deleted_methods_length);
3740   transfer.transfer_registrations(_matching_old_methods, _matching_methods_length);
3741 }
3742 
3743 // Deoptimize all compiled code that depends on this class.
3744 //
3745 // If the can_redefine_classes capability is obtained in the onload
3746 // phase then the compiler has recorded all dependencies from startup.
3747 // In that case we need only deoptimize and throw away all compiled code
3748 // that depends on the class.
3749 //
3750 // If can_redefine_classes is obtained sometime after the onload
3751 // phase then the dependency information may be incomplete. In that case
3752 // the first call to RedefineClasses causes all compiled code to be
3753 // thrown away. As can_redefine_classes has been obtained then
3754 // all future compilations will record dependencies so second and
3755 // subsequent calls to RedefineClasses need only throw away code
3756 // that depends on the class.
3757 //
3758 void VM_RedefineClasses::flush_dependent_code(instanceKlassHandle k_h, TRAPS) {
3759   assert_locked_or_safepoint(Compile_lock);
3760 
3761   // All dependencies have been recorded from startup or this is a second or
3762   // subsequent use of RedefineClasses
3763   if (JvmtiExport::all_dependencies_are_recorded()) {
3764     CodeCache::flush_evol_dependents_on(k_h);
3765   } else {
3766     CodeCache::mark_all_nmethods_for_deoptimization();
3767 
3768     ResourceMark rm(THREAD);
3769     DeoptimizationMarker dm;
3770 
3771     // Deoptimize all activations depending on marked nmethods
3772     Deoptimization::deoptimize_dependents();
3773 
3774     // Make the dependent methods not entrant
3775     CodeCache::make_marked_nmethods_not_entrant();
3776 
3777     // From now on we know that the dependency information is complete
3778     JvmtiExport::set_all_dependencies_are_recorded(true);
3779   }
3780 }
3781 
3782 void VM_RedefineClasses::compute_added_deleted_matching_methods() {
3783   Method* old_method;
3784   Method* new_method;
3785 
3786   _matching_old_methods = NEW_RESOURCE_ARRAY(Method*, _old_methods->length());
3787   _matching_new_methods = NEW_RESOURCE_ARRAY(Method*, _old_methods->length());
3788   _added_methods        = NEW_RESOURCE_ARRAY(Method*, _new_methods->length());
3789   _deleted_methods      = NEW_RESOURCE_ARRAY(Method*, _old_methods->length());
3790 
3791   _matching_methods_length = 0;
3792   _deleted_methods_length  = 0;
3793   _added_methods_length    = 0;
3794 
3795   int nj = 0;
3796   int oj = 0;
3797   while (true) {
3798     if (oj >= _old_methods->length()) {
3799       if (nj >= _new_methods->length()) {
3800         break; // we've looked at everything, done
3801       }
3802       // New method at the end
3803       new_method = _new_methods->at(nj);
3804       _added_methods[_added_methods_length++] = new_method;
3805       ++nj;
3806     } else if (nj >= _new_methods->length()) {
3807       // Old method, at the end, is deleted
3808       old_method = _old_methods->at(oj);
3809       _deleted_methods[_deleted_methods_length++] = old_method;
3810       ++oj;
3811     } else {
3812       old_method = _old_methods->at(oj);
3813       new_method = _new_methods->at(nj);
3814       if (old_method->name() == new_method->name()) {
3815         if (old_method->signature() == new_method->signature()) {
3816           _matching_old_methods[_matching_methods_length  ] = old_method;
3817           _matching_new_methods[_matching_methods_length++] = new_method;
3818           ++nj;
3819           ++oj;
3820         } else {
3821           // added overloaded have already been moved to the end,
3822           // so this is a deleted overloaded method
3823           _deleted_methods[_deleted_methods_length++] = old_method;
3824           ++oj;
3825         }
3826       } else { // names don't match
3827         if (old_method->name()->fast_compare(new_method->name()) > 0) {
3828           // new method
3829           _added_methods[_added_methods_length++] = new_method;
3830           ++nj;
3831         } else {
3832           // deleted method
3833           _deleted_methods[_deleted_methods_length++] = old_method;
3834           ++oj;
3835         }
3836       }
3837     }
3838   }
3839   assert(_matching_methods_length + _deleted_methods_length == _old_methods->length(), "sanity");
3840   assert(_matching_methods_length + _added_methods_length == _new_methods->length(), "sanity");
3841 }
3842 
3843 
3844 void VM_RedefineClasses::swap_annotations(instanceKlassHandle the_class,
3845                                           instanceKlassHandle scratch_class) {
3846   // Swap annotation fields values
3847   Annotations* old_annotations = the_class->annotations();
3848   the_class->set_annotations(scratch_class->annotations());
3849   scratch_class->set_annotations(old_annotations);
3850 }
3851 
3852 
3853 // Install the redefinition of a class:
3854 //    - house keeping (flushing breakpoints and caches, deoptimizing
3855 //      dependent compiled code)
3856 //    - replacing parts in the_class with parts from scratch_class
3857 //    - adding a weak reference to track the obsolete but interesting
3858 //      parts of the_class
3859 //    - adjusting constant pool caches and vtables in other classes
3860 //      that refer to methods in the_class. These adjustments use the
3861 //      ClassLoaderDataGraph::classes_do() facility which only allows
3862 //      a helper method to be specified. The interesting parameters
3863 //      that we would like to pass to the helper method are saved in
3864 //      static global fields in the VM operation.
3865 void VM_RedefineClasses::redefine_single_class(jclass the_jclass,
3866        Klass* scratch_class_oop, TRAPS) {
3867 
3868   HandleMark hm(THREAD);   // make sure handles from this call are freed
3869   RC_TIMER_START(_timer_rsc_phase1);
3870 
3871   instanceKlassHandle scratch_class(scratch_class_oop);
3872 
3873   oop the_class_mirror = JNIHandles::resolve_non_null(the_jclass);
3874   Klass* the_class_oop = java_lang_Class::as_Klass(the_class_mirror);
3875   instanceKlassHandle the_class = instanceKlassHandle(THREAD, the_class_oop);
3876 
3877   // Remove all breakpoints in methods of this class
3878   JvmtiBreakpoints& jvmti_breakpoints = JvmtiCurrentBreakpoints::get_jvmti_breakpoints();
3879   jvmti_breakpoints.clearall_in_class_at_safepoint(the_class_oop);
3880 
3881   // Deoptimize all compiled code that depends on this class
3882   flush_dependent_code(the_class, THREAD);
3883 
3884   _old_methods = the_class->methods();
3885   _new_methods = scratch_class->methods();
3886   _the_class_oop = the_class_oop;
3887   compute_added_deleted_matching_methods();
3888   update_jmethod_ids();
3889 
3890   // Attach new constant pool to the original klass. The original
3891   // klass still refers to the old constant pool (for now).
3892   scratch_class->constants()->set_pool_holder(the_class());
3893 
3894 #if 0
3895   // In theory, with constant pool merging in place we should be able
3896   // to save space by using the new, merged constant pool in place of
3897   // the old constant pool(s). By "pool(s)" I mean the constant pool in
3898   // the klass version we are replacing now and any constant pool(s) in
3899   // previous versions of klass. Nice theory, doesn't work in practice.
3900   // When this code is enabled, even simple programs throw NullPointer
3901   // exceptions. I'm guessing that this is caused by some constant pool
3902   // cache difference between the new, merged constant pool and the
3903   // constant pool that was just being used by the klass. I'm keeping
3904   // this code around to archive the idea, but the code has to remain
3905   // disabled for now.
3906 
3907   // Attach each old method to the new constant pool. This can be
3908   // done here since we are past the bytecode verification and
3909   // constant pool optimization phases.
3910   for (int i = _old_methods->length() - 1; i >= 0; i--) {
3911     Method* method = _old_methods->at(i);
3912     method->set_constants(scratch_class->constants());
3913   }
3914 
3915   {
3916     // walk all previous versions of the klass
3917     InstanceKlass *ik = (InstanceKlass *)the_class();
3918     PreviousVersionWalker pvw(ik);
3919     instanceKlassHandle ikh;
3920     do {
3921       ikh = pvw.next_previous_version();
3922       if (!ikh.is_null()) {
3923         ik = ikh();
3924 
3925         // attach previous version of klass to the new constant pool
3926         ik->set_constants(scratch_class->constants());
3927 
3928         // Attach each method in the previous version of klass to the
3929         // new constant pool
3930         Array<Method*>* prev_methods = ik->methods();
3931         for (int i = prev_methods->length() - 1; i >= 0; i--) {
3932           Method* method = prev_methods->at(i);
3933           method->set_constants(scratch_class->constants());
3934         }
3935       }
3936     } while (!ikh.is_null());
3937   }
3938 #endif
3939 
3940   // Replace methods and constantpool
3941   the_class->set_methods(_new_methods);
3942   scratch_class->set_methods(_old_methods);     // To prevent potential GCing of the old methods,
3943                                           // and to be able to undo operation easily.
3944 
3945   ConstantPool* old_constants = the_class->constants();
3946   the_class->set_constants(scratch_class->constants());
3947   scratch_class->set_constants(old_constants);  // See the previous comment.
3948 #if 0
3949   // We are swapping the guts of "the new class" with the guts of "the
3950   // class". Since the old constant pool has just been attached to "the
3951   // new class", it seems logical to set the pool holder in the old
3952   // constant pool also. However, doing this will change the observable
3953   // class hierarchy for any old methods that are still executing. A
3954   // method can query the identity of its "holder" and this query uses
3955   // the method's constant pool link to find the holder. The change in
3956   // holding class from "the class" to "the new class" can confuse
3957   // things.
3958   //
3959   // Setting the old constant pool's holder will also cause
3960   // verification done during vtable initialization below to fail.
3961   // During vtable initialization, the vtable's class is verified to be
3962   // a subtype of the method's holder. The vtable's class is "the
3963   // class" and the method's holder is gotten from the constant pool
3964   // link in the method itself. For "the class"'s directly implemented
3965   // methods, the method holder is "the class" itself (as gotten from
3966   // the new constant pool). The check works fine in this case. The
3967   // check also works fine for methods inherited from super classes.
3968   //
3969   // Miranda methods are a little more complicated. A miranda method is
3970   // provided by an interface when the class implementing the interface
3971   // does not provide its own method.  These interfaces are implemented
3972   // internally as an InstanceKlass. These special instanceKlasses
3973   // share the constant pool of the class that "implements" the
3974   // interface. By sharing the constant pool, the method holder of a
3975   // miranda method is the class that "implements" the interface. In a
3976   // non-redefine situation, the subtype check works fine. However, if
3977   // the old constant pool's pool holder is modified, then the check
3978   // fails because there is no class hierarchy relationship between the
3979   // vtable's class and "the new class".
3980 
3981   old_constants->set_pool_holder(scratch_class());
3982 #endif
3983 
3984   // track number of methods that are EMCP for add_previous_version() call below
3985   int emcp_method_count = check_methods_and_mark_as_obsolete();
3986   transfer_old_native_function_registrations(the_class);
3987 
3988   // The class file bytes from before any retransformable agents mucked
3989   // with them was cached on the scratch class, move to the_class.
3990   // Note: we still want to do this if nothing needed caching since it
3991   // should get cleared in the_class too.
3992   if (the_class->get_cached_class_file_bytes() == 0) {
3993     // the_class doesn't have a cache yet so copy it
3994     the_class->set_cached_class_file(scratch_class->get_cached_class_file());
3995   }
3996   else if (scratch_class->get_cached_class_file_bytes() !=
3997            the_class->get_cached_class_file_bytes()) {
3998     // The same class can be present twice in the scratch classes list or there
3999     // are multiple concurrent RetransformClasses calls on different threads.
4000     // In such cases we have to deallocate scratch_class cached_class_file.
4001     os::free(scratch_class->get_cached_class_file());
4002   }
4003 
4004   // NULL out in scratch class to not delete twice.  The class to be redefined
4005   // always owns these bytes.
4006   scratch_class->set_cached_class_file(NULL);
4007 
4008   // Replace inner_classes
4009   Array<u2>* old_inner_classes = the_class->inner_classes();
4010   the_class->set_inner_classes(scratch_class->inner_classes());
4011   scratch_class->set_inner_classes(old_inner_classes);
4012 
4013   // Initialize the vtable and interface table after
4014   // methods have been rewritten
4015   {
4016     ResourceMark rm(THREAD);
4017     // no exception should happen here since we explicitly
4018     // do not check loader constraints.
4019     // compare_and_normalize_class_versions has already checked:
4020     //  - classloaders unchanged, signatures unchanged
4021     //  - all instanceKlasses for redefined classes reused & contents updated
4022     the_class->vtable()->initialize_vtable(false, THREAD);
4023     the_class->itable()->initialize_itable(false, THREAD);
4024     assert(!HAS_PENDING_EXCEPTION || (THREAD->pending_exception()->is_a(SystemDictionary::ThreadDeath_klass())), "redefine exception");
4025   }
4026 
4027   // Leave arrays of jmethodIDs and itable index cache unchanged
4028 
4029   // Copy the "source file name" attribute from new class version
4030   the_class->set_source_file_name_index(
4031     scratch_class->source_file_name_index());
4032 
4033   // Copy the "source debug extension" attribute from new class version
4034   the_class->set_source_debug_extension(
4035     scratch_class->source_debug_extension(),
4036     scratch_class->source_debug_extension() == NULL ? 0 :
4037     (int)strlen(scratch_class->source_debug_extension()));
4038 
4039   // Use of javac -g could be different in the old and the new
4040   if (scratch_class->access_flags().has_localvariable_table() !=
4041       the_class->access_flags().has_localvariable_table()) {
4042 
4043     AccessFlags flags = the_class->access_flags();
4044     if (scratch_class->access_flags().has_localvariable_table()) {
4045       flags.set_has_localvariable_table();
4046     } else {
4047       flags.clear_has_localvariable_table();
4048     }
4049     the_class->set_access_flags(flags);
4050   }
4051 
4052   swap_annotations(the_class, scratch_class);
4053 
4054   // Replace minor version number of class file
4055   u2 old_minor_version = the_class->minor_version();
4056   the_class->set_minor_version(scratch_class->minor_version());
4057   scratch_class->set_minor_version(old_minor_version);
4058 
4059   // Replace major version number of class file
4060   u2 old_major_version = the_class->major_version();
4061   the_class->set_major_version(scratch_class->major_version());
4062   scratch_class->set_major_version(old_major_version);
4063 
4064   // Replace CP indexes for class and name+type of enclosing method
4065   u2 old_class_idx  = the_class->enclosing_method_class_index();
4066   u2 old_method_idx = the_class->enclosing_method_method_index();
4067   the_class->set_enclosing_method_indices(
4068     scratch_class->enclosing_method_class_index(),
4069     scratch_class->enclosing_method_method_index());
4070   scratch_class->set_enclosing_method_indices(old_class_idx, old_method_idx);
4071 
4072   the_class->set_has_been_redefined();
4073 
4074   // keep track of previous versions of this class
4075   the_class->add_previous_version(scratch_class, emcp_method_count);
4076 
4077   RC_TIMER_STOP(_timer_rsc_phase1);
4078   RC_TIMER_START(_timer_rsc_phase2);
4079 
4080   // Adjust constantpool caches and vtables for all classes
4081   // that reference methods of the evolved class.
4082   AdjustCpoolCacheAndVtable adjust_cpool_cache_and_vtable(THREAD);
4083   ClassLoaderDataGraph::classes_do(&adjust_cpool_cache_and_vtable);
4084 
4085   // JSR-292 support
4086   MemberNameTable* mnt = the_class->member_names();
4087   if (mnt != NULL) {
4088     bool trace_name_printed = false;
4089     mnt->adjust_method_entries(the_class(), &trace_name_printed);
4090   }
4091 
4092   if (the_class->oop_map_cache() != NULL) {
4093     // Flush references to any obsolete methods from the oop map cache
4094     // so that obsolete methods are not pinned.
4095     the_class->oop_map_cache()->flush_obsolete_entries();
4096   }
4097 
4098   // increment the classRedefinedCount field in the_class and in any
4099   // direct and indirect subclasses of the_class
4100   increment_class_counter((InstanceKlass *)the_class(), THREAD);
4101 
4102   // RC_TRACE macro has an embedded ResourceMark
4103   RC_TRACE_WITH_THREAD(0x00000001, THREAD,
4104     ("redefined name=%s, count=%d (avail_mem=" UINT64_FORMAT "K)",
4105     the_class->external_name(),
4106     java_lang_Class::classRedefinedCount(the_class_mirror),
4107     os::available_memory() >> 10));
4108 
4109   {
4110     ResourceMark rm(THREAD);
4111     Events::log_redefinition(THREAD, "redefined class name=%s, count=%d",
4112                              the_class->external_name(),
4113                              java_lang_Class::classRedefinedCount(the_class_mirror));
4114 
4115   }
4116   RC_TIMER_STOP(_timer_rsc_phase2);
4117 } // end redefine_single_class()
4118 
4119 
4120 // Increment the classRedefinedCount field in the specific InstanceKlass
4121 // and in all direct and indirect subclasses.
4122 void VM_RedefineClasses::increment_class_counter(InstanceKlass *ik, TRAPS) {
4123   oop class_mirror = ik->java_mirror();
4124   Klass* class_oop = java_lang_Class::as_Klass(class_mirror);
4125   int new_count = java_lang_Class::classRedefinedCount(class_mirror) + 1;
4126   java_lang_Class::set_classRedefinedCount(class_mirror, new_count);
4127 
4128   if (class_oop != _the_class_oop) {
4129     // _the_class_oop count is printed at end of redefine_single_class()
4130     RC_TRACE_WITH_THREAD(0x00000008, THREAD,
4131       ("updated count in subclass=%s to %d", ik->external_name(), new_count));
4132   }
4133 
4134   for (Klass *subk = ik->subklass(); subk != NULL;
4135        subk = subk->next_sibling()) {
4136     if (subk->oop_is_instance()) {
4137       // Only update instanceKlasses
4138       InstanceKlass *subik = (InstanceKlass*)subk;
4139       // recursively do subclasses of the current subclass
4140       increment_class_counter(subik, THREAD);
4141     }
4142   }
4143 }
4144 
4145 void VM_RedefineClasses::CheckClass::do_klass(Klass* k) {
4146   bool no_old_methods = true;  // be optimistic
4147 
4148   // Both array and instance classes have vtables.
4149   // a vtable should never contain old or obsolete methods
4150   ResourceMark rm(_thread);
4151   if (k->vtable_length() > 0 &&
4152       !k->vtable()->check_no_old_or_obsolete_entries()) {
4153     if (RC_TRACE_ENABLED(0x00004000)) {
4154       RC_TRACE_WITH_THREAD(0x00004000, _thread,
4155         ("klassVtable::check_no_old_or_obsolete_entries failure"
4156          " -- OLD or OBSOLETE method found -- class: %s",
4157          k->signature_name()));
4158       k->vtable()->dump_vtable();
4159     }
4160     no_old_methods = false;
4161   }
4162 
4163   if (k->oop_is_instance()) {
4164     HandleMark hm(_thread);
4165     InstanceKlass *ik = InstanceKlass::cast(k);
4166 
4167     // an itable should never contain old or obsolete methods
4168     if (ik->itable_length() > 0 &&
4169         !ik->itable()->check_no_old_or_obsolete_entries()) {
4170       if (RC_TRACE_ENABLED(0x00004000)) {
4171         RC_TRACE_WITH_THREAD(0x00004000, _thread,
4172           ("klassItable::check_no_old_or_obsolete_entries failure"
4173            " -- OLD or OBSOLETE method found -- class: %s",
4174            ik->signature_name()));
4175         ik->itable()->dump_itable();
4176       }
4177       no_old_methods = false;
4178     }
4179 
4180     // the constant pool cache should never contain non-deleted old or obsolete methods
4181     if (ik->constants() != NULL &&
4182         ik->constants()->cache() != NULL &&
4183         !ik->constants()->cache()->check_no_old_or_obsolete_entries()) {
4184       if (RC_TRACE_ENABLED(0x00004000)) {
4185         RC_TRACE_WITH_THREAD(0x00004000, _thread,
4186           ("cp-cache::check_no_old_or_obsolete_entries failure"
4187            " -- OLD or OBSOLETE method found -- class: %s",
4188            ik->signature_name()));
4189         ik->constants()->cache()->dump_cache();
4190       }
4191       no_old_methods = false;
4192     }
4193   }
4194 
4195   // print and fail guarantee if old methods are found.
4196   if (!no_old_methods) {
4197     if (RC_TRACE_ENABLED(0x00004000)) {
4198       dump_methods();
4199     } else {
4200       tty->print_cr("INFO: use the '-XX:TraceRedefineClasses=16384' option "
4201         "to see more info about the following guarantee() failure.");
4202     }
4203     guarantee(false, "OLD and/or OBSOLETE method(s) found");
4204   }
4205 }
4206 
4207 
4208 void VM_RedefineClasses::dump_methods() {
4209   int j;
4210   RC_TRACE(0x00004000, ("_old_methods --"));
4211   for (j = 0; j < _old_methods->length(); ++j) {
4212     Method* m = _old_methods->at(j);
4213     RC_TRACE_NO_CR(0x00004000, ("%4d  (%5d)  ", j, m->vtable_index()));
4214     m->access_flags().print_on(tty);
4215     tty->print(" --  ");
4216     m->print_name(tty);
4217     tty->cr();
4218   }
4219   RC_TRACE(0x00004000, ("_new_methods --"));
4220   for (j = 0; j < _new_methods->length(); ++j) {
4221     Method* m = _new_methods->at(j);
4222     RC_TRACE_NO_CR(0x00004000, ("%4d  (%5d)  ", j, m->vtable_index()));
4223     m->access_flags().print_on(tty);
4224     tty->print(" --  ");
4225     m->print_name(tty);
4226     tty->cr();
4227   }
4228   RC_TRACE(0x00004000, ("_matching_(old/new)_methods --"));
4229   for (j = 0; j < _matching_methods_length; ++j) {
4230     Method* m = _matching_old_methods[j];
4231     RC_TRACE_NO_CR(0x00004000, ("%4d  (%5d)  ", j, m->vtable_index()));
4232     m->access_flags().print_on(tty);
4233     tty->print(" --  ");
4234     m->print_name(tty);
4235     tty->cr();
4236     m = _matching_new_methods[j];
4237     RC_TRACE_NO_CR(0x00004000, ("      (%5d)  ", m->vtable_index()));
4238     m->access_flags().print_on(tty);
4239     tty->cr();
4240   }
4241   RC_TRACE(0x00004000, ("_deleted_methods --"));
4242   for (j = 0; j < _deleted_methods_length; ++j) {
4243     Method* m = _deleted_methods[j];
4244     RC_TRACE_NO_CR(0x00004000, ("%4d  (%5d)  ", j, m->vtable_index()));
4245     m->access_flags().print_on(tty);
4246     tty->print(" --  ");
4247     m->print_name(tty);
4248     tty->cr();
4249   }
4250   RC_TRACE(0x00004000, ("_added_methods --"));
4251   for (j = 0; j < _added_methods_length; ++j) {
4252     Method* m = _added_methods[j];
4253     RC_TRACE_NO_CR(0x00004000, ("%4d  (%5d)  ", j, m->vtable_index()));
4254     m->access_flags().print_on(tty);
4255     tty->print(" --  ");
4256     m->print_name(tty);
4257     tty->cr();
4258   }
4259 }
4260 
4261 void VM_RedefineClasses::print_on_error(outputStream* st) const {
4262   VM_Operation::print_on_error(st);
4263   if (_the_class_oop != NULL) {
4264     ResourceMark rm;
4265     st->print_cr(", redefining class %s", _the_class_oop->external_name());
4266   }
4267 }