1 /*
   2  * Copyright (c) 2003, 2018, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "aot/aotLoader.hpp"
  27 #include "classfile/classFileStream.hpp"
  28 #include "classfile/metadataOnStackMark.hpp"
  29 #include "classfile/systemDictionary.hpp"
  30 #include "classfile/verifier.hpp"
  31 #include "code/codeCache.hpp"
  32 #include "compiler/compileBroker.hpp"
  33 #include "interpreter/oopMapCache.hpp"
  34 #include "interpreter/rewriter.hpp"
  35 #include "logging/logStream.hpp"
  36 #include "memory/metadataFactory.hpp"
  37 #include "memory/metaspaceShared.hpp"
  38 #include "memory/resourceArea.hpp"
  39 #include "memory/universe.hpp"
  40 #include "oops/fieldStreams.hpp"
  41 #include "oops/klassVtable.hpp"
  42 #include "oops/oop.inline.hpp"
  43 #include "prims/jvmtiImpl.hpp"
  44 #include "prims/jvmtiRedefineClasses.hpp"
  45 #include "prims/jvmtiThreadState.inline.hpp"
  46 #include "prims/resolvedMethodTable.hpp"
  47 #include "prims/methodComparator.hpp"
  48 #include "runtime/deoptimization.hpp"
  49 #include "runtime/handles.inline.hpp"
  50 #include "runtime/jniHandles.inline.hpp"
  51 #include "runtime/relocator.hpp"
  52 #include "runtime/safepointVerifiers.hpp"
  53 #include "utilities/bitMap.inline.hpp"
  54 #include "utilities/events.hpp"
  55 
  56 Array<Method*>* VM_RedefineClasses::_old_methods = NULL;
  57 Array<Method*>* VM_RedefineClasses::_new_methods = NULL;
  58 Method**  VM_RedefineClasses::_matching_old_methods = NULL;
  59 Method**  VM_RedefineClasses::_matching_new_methods = NULL;
  60 Method**  VM_RedefineClasses::_deleted_methods      = NULL;
  61 Method**  VM_RedefineClasses::_added_methods        = NULL;
  62 int         VM_RedefineClasses::_matching_methods_length = 0;
  63 int         VM_RedefineClasses::_deleted_methods_length  = 0;
  64 int         VM_RedefineClasses::_added_methods_length    = 0;
  65 Klass*      VM_RedefineClasses::_the_class = NULL;
  66 
  67 
  68 VM_RedefineClasses::VM_RedefineClasses(jint class_count,
  69                                        const jvmtiClassDefinition *class_defs,
  70                                        JvmtiClassLoadKind class_load_kind) {
  71   _class_count = class_count;
  72   _class_defs = class_defs;
  73   _class_load_kind = class_load_kind;
  74   _any_class_has_resolved_methods = false;
  75   _res = JVMTI_ERROR_NONE;
  76 }
  77 
  78 static inline InstanceKlass* get_ik(jclass def) {
  79   oop mirror = JNIHandles::resolve_non_null(def);
  80   return InstanceKlass::cast(java_lang_Class::as_Klass(mirror));
  81 }
  82 
  83 // If any of the classes are being redefined, wait
  84 // Parallel constant pool merging leads to indeterminate constant pools.
  85 void VM_RedefineClasses::lock_classes() {
  86   MutexLocker ml(RedefineClasses_lock);
  87   bool has_redefined;
  88   do {
  89     has_redefined = false;
  90     // Go through classes each time until none are being redefined.
  91     for (int i = 0; i < _class_count; i++) {
  92       if (get_ik(_class_defs[i].klass)->is_being_redefined()) {
  93         RedefineClasses_lock->wait();
  94         has_redefined = true;
  95         break;  // for loop
  96       }
  97     }
  98   } while (has_redefined);
  99   for (int i = 0; i < _class_count; i++) {
 100     get_ik(_class_defs[i].klass)->set_is_being_redefined(true);
 101   }
 102   RedefineClasses_lock->notify_all();
 103 }
 104 
 105 void VM_RedefineClasses::unlock_classes() {
 106   MutexLocker ml(RedefineClasses_lock);
 107   for (int i = 0; i < _class_count; i++) {
 108     assert(get_ik(_class_defs[i].klass)->is_being_redefined(),
 109            "should be being redefined to get here");
 110     get_ik(_class_defs[i].klass)->set_is_being_redefined(false);
 111   }
 112   RedefineClasses_lock->notify_all();
 113 }
 114 
 115 bool VM_RedefineClasses::doit_prologue() {
 116   if (_class_count == 0) {
 117     _res = JVMTI_ERROR_NONE;
 118     return false;
 119   }
 120   if (_class_defs == NULL) {
 121     _res = JVMTI_ERROR_NULL_POINTER;
 122     return false;
 123   }
 124   for (int i = 0; i < _class_count; i++) {
 125     if (_class_defs[i].klass == NULL) {
 126       _res = JVMTI_ERROR_INVALID_CLASS;
 127       return false;
 128     }
 129     if (_class_defs[i].class_byte_count == 0) {
 130       _res = JVMTI_ERROR_INVALID_CLASS_FORMAT;
 131       return false;
 132     }
 133     if (_class_defs[i].class_bytes == NULL) {
 134       _res = JVMTI_ERROR_NULL_POINTER;
 135       return false;
 136     }
 137 
 138     oop mirror = JNIHandles::resolve_non_null(_class_defs[i].klass);
 139     // classes for primitives and arrays and vm anonymous classes cannot be redefined
 140     // check here so following code can assume these classes are InstanceKlass
 141     if (!is_modifiable_class(mirror)) {
 142       _res = JVMTI_ERROR_UNMODIFIABLE_CLASS;
 143       return false;
 144     }
 145   }
 146 
 147   // Start timer after all the sanity checks; not quite accurate, but
 148   // better than adding a bunch of stop() calls.
 149   if (log_is_enabled(Info, redefine, class, timer)) {
 150     _timer_vm_op_prologue.start();
 151   }
 152 
 153   lock_classes();
 154   // We first load new class versions in the prologue, because somewhere down the
 155   // call chain it is required that the current thread is a Java thread.
 156   _res = load_new_class_versions(Thread::current());
 157   if (_res != JVMTI_ERROR_NONE) {
 158     // free any successfully created classes, since none are redefined
 159     for (int i = 0; i < _class_count; i++) {
 160       if (_scratch_classes[i] != NULL) {
 161         ClassLoaderData* cld = _scratch_classes[i]->class_loader_data();
 162         // Free the memory for this class at class unloading time.  Not before
 163         // because CMS might think this is still live.
 164         InstanceKlass* ik = get_ik(_class_defs[i].klass);
 165         if (ik->get_cached_class_file() == _scratch_classes[i]->get_cached_class_file()) {
 166           // Don't double-free cached_class_file copied from the original class if error.
 167           _scratch_classes[i]->set_cached_class_file(NULL);
 168         }
 169         cld->add_to_deallocate_list(InstanceKlass::cast(_scratch_classes[i]));
 170       }
 171     }
 172     // Free os::malloc allocated memory in load_new_class_version.
 173     os::free(_scratch_classes);
 174     _timer_vm_op_prologue.stop();
 175     unlock_classes();
 176     return false;
 177   }
 178 
 179   _timer_vm_op_prologue.stop();
 180   return true;
 181 }
 182 
 183 void VM_RedefineClasses::doit() {
 184   Thread *thread = Thread::current();
 185 
 186 #if INCLUDE_CDS
 187   if (UseSharedSpaces) {
 188     // Sharing is enabled so we remap the shared readonly space to
 189     // shared readwrite, private just in case we need to redefine
 190     // a shared class. We do the remap during the doit() phase of
 191     // the safepoint to be safer.
 192     if (!MetaspaceShared::remap_shared_readonly_as_readwrite()) {
 193       log_info(redefine, class, load)("failed to remap shared readonly space to readwrite, private");
 194       _res = JVMTI_ERROR_INTERNAL;
 195       return;
 196     }
 197   }
 198 #endif
 199 
 200   // Mark methods seen on stack and everywhere else so old methods are not
 201   // cleaned up if they're on the stack.
 202   MetadataOnStackMark md_on_stack(true);
 203   HandleMark hm(thread);   // make sure any handles created are deleted
 204                            // before the stack walk again.
 205 
 206   for (int i = 0; i < _class_count; i++) {
 207     redefine_single_class(_class_defs[i].klass, _scratch_classes[i], thread);
 208   }
 209 
 210   // Clean out MethodData pointing to old Method*
 211   // Have to do this after all classes are redefined and all methods that
 212   // are redefined are marked as old.
 213   MethodDataCleaner clean_weak_method_links;
 214   ClassLoaderDataGraph::classes_do(&clean_weak_method_links);
 215 
 216   // JSR-292 support
 217   if (_any_class_has_resolved_methods) {
 218     bool trace_name_printed = false;
 219     ResolvedMethodTable::adjust_method_entries(&trace_name_printed);
 220   }
 221 
 222   // Disable any dependent concurrent compilations
 223   SystemDictionary::notice_modification();
 224 
 225   // Set flag indicating that some invariants are no longer true.
 226   // See jvmtiExport.hpp for detailed explanation.
 227   JvmtiExport::set_has_redefined_a_class();
 228 
 229   // check_class() is optionally called for product bits, but is
 230   // always called for non-product bits.
 231 #ifdef PRODUCT
 232   if (log_is_enabled(Trace, redefine, class, obsolete, metadata)) {
 233 #endif
 234     log_trace(redefine, class, obsolete, metadata)("calling check_class");
 235     CheckClass check_class(thread);
 236     ClassLoaderDataGraph::classes_do(&check_class);
 237 #ifdef PRODUCT
 238   }
 239 #endif
 240 
 241   // Clean up any metadata now unreferenced while MetadataOnStackMark is set.
 242   ClassLoaderDataGraph::clean_deallocate_lists(false);
 243 }
 244 
 245 void VM_RedefineClasses::doit_epilogue() {
 246   unlock_classes();
 247 
 248   // Free os::malloc allocated memory.
 249   os::free(_scratch_classes);
 250 
 251   // Reset the_class to null for error printing.
 252   _the_class = NULL;
 253 
 254   if (log_is_enabled(Info, redefine, class, timer)) {
 255     // Used to have separate timers for "doit" and "all", but the timer
 256     // overhead skewed the measurements.
 257     julong doit_time = _timer_rsc_phase1.milliseconds() +
 258                        _timer_rsc_phase2.milliseconds();
 259     julong all_time = _timer_vm_op_prologue.milliseconds() + doit_time;
 260 
 261     log_info(redefine, class, timer)
 262       ("vm_op: all=" JULONG_FORMAT "  prologue=" JULONG_FORMAT "  doit=" JULONG_FORMAT,
 263        all_time, (julong)_timer_vm_op_prologue.milliseconds(), doit_time);
 264     log_info(redefine, class, timer)
 265       ("redefine_single_class: phase1=" JULONG_FORMAT "  phase2=" JULONG_FORMAT,
 266        (julong)_timer_rsc_phase1.milliseconds(), (julong)_timer_rsc_phase2.milliseconds());
 267   }
 268 }
 269 
 270 bool VM_RedefineClasses::is_modifiable_class(oop klass_mirror) {
 271   // classes for primitives cannot be redefined
 272   if (java_lang_Class::is_primitive(klass_mirror)) {
 273     return false;
 274   }
 275   Klass* k = java_lang_Class::as_Klass(klass_mirror);
 276   // classes for arrays cannot be redefined
 277   if (k == NULL || !k->is_instance_klass()) {
 278     return false;
 279   }
 280 
 281   // Cannot redefine or retransform an anonymous class.
 282   if (InstanceKlass::cast(k)->is_anonymous()) {
 283     return false;
 284   }
 285   return true;
 286 }
 287 
 288 // Append the current entry at scratch_i in scratch_cp to *merge_cp_p
 289 // where the end of *merge_cp_p is specified by *merge_cp_length_p. For
 290 // direct CP entries, there is just the current entry to append. For
 291 // indirect and double-indirect CP entries, there are zero or more
 292 // referenced CP entries along with the current entry to append.
 293 // Indirect and double-indirect CP entries are handled by recursive
 294 // calls to append_entry() as needed. The referenced CP entries are
 295 // always appended to *merge_cp_p before the referee CP entry. These
 296 // referenced CP entries may already exist in *merge_cp_p in which case
 297 // there is nothing extra to append and only the current entry is
 298 // appended.
 299 void VM_RedefineClasses::append_entry(const constantPoolHandle& scratch_cp,
 300        int scratch_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p,
 301        TRAPS) {
 302 
 303   // append is different depending on entry tag type
 304   switch (scratch_cp->tag_at(scratch_i).value()) {
 305 
 306     // The old verifier is implemented outside the VM. It loads classes,
 307     // but does not resolve constant pool entries directly so we never
 308     // see Class entries here with the old verifier. Similarly the old
 309     // verifier does not like Class entries in the input constant pool.
 310     // The split-verifier is implemented in the VM so it can optionally
 311     // and directly resolve constant pool entries to load classes. The
 312     // split-verifier can accept either Class entries or UnresolvedClass
 313     // entries in the input constant pool. We revert the appended copy
 314     // back to UnresolvedClass so that either verifier will be happy
 315     // with the constant pool entry.
 316     //
 317     // this is an indirect CP entry so it needs special handling
 318     case JVM_CONSTANT_Class:
 319     case JVM_CONSTANT_UnresolvedClass:
 320     {
 321       int name_i = scratch_cp->klass_name_index_at(scratch_i);
 322       int new_name_i = find_or_append_indirect_entry(scratch_cp, name_i, merge_cp_p,
 323                                                      merge_cp_length_p, THREAD);
 324 
 325       if (new_name_i != name_i) {
 326         log_trace(redefine, class, constantpool)
 327           ("Class entry@%d name_index change: %d to %d",
 328            *merge_cp_length_p, name_i, new_name_i);
 329       }
 330 
 331       (*merge_cp_p)->temp_unresolved_klass_at_put(*merge_cp_length_p, new_name_i);
 332       if (scratch_i != *merge_cp_length_p) {
 333         // The new entry in *merge_cp_p is at a different index than
 334         // the new entry in scratch_cp so we need to map the index values.
 335         map_index(scratch_cp, scratch_i, *merge_cp_length_p);
 336       }
 337       (*merge_cp_length_p)++;
 338     } break;
 339 
 340     // these are direct CP entries so they can be directly appended,
 341     // but double and long take two constant pool entries
 342     case JVM_CONSTANT_Double:  // fall through
 343     case JVM_CONSTANT_Long:
 344     {
 345       ConstantPool::copy_entry_to(scratch_cp, scratch_i, *merge_cp_p, *merge_cp_length_p,
 346         THREAD);
 347 
 348       if (scratch_i != *merge_cp_length_p) {
 349         // The new entry in *merge_cp_p is at a different index than
 350         // the new entry in scratch_cp so we need to map the index values.
 351         map_index(scratch_cp, scratch_i, *merge_cp_length_p);
 352       }
 353       (*merge_cp_length_p) += 2;
 354     } break;
 355 
 356     // these are direct CP entries so they can be directly appended
 357     case JVM_CONSTANT_Float:   // fall through
 358     case JVM_CONSTANT_Integer: // fall through
 359     case JVM_CONSTANT_Utf8:    // fall through
 360 
 361     // This was an indirect CP entry, but it has been changed into
 362     // Symbol*s so this entry can be directly appended.
 363     case JVM_CONSTANT_String:      // fall through
 364     {
 365       ConstantPool::copy_entry_to(scratch_cp, scratch_i, *merge_cp_p, *merge_cp_length_p,
 366         THREAD);
 367 
 368       if (scratch_i != *merge_cp_length_p) {
 369         // The new entry in *merge_cp_p is at a different index than
 370         // the new entry in scratch_cp so we need to map the index values.
 371         map_index(scratch_cp, scratch_i, *merge_cp_length_p);
 372       }
 373       (*merge_cp_length_p)++;
 374     } break;
 375 
 376     // this is an indirect CP entry so it needs special handling
 377     case JVM_CONSTANT_NameAndType:
 378     {
 379       int name_ref_i = scratch_cp->name_ref_index_at(scratch_i);
 380       int new_name_ref_i = find_or_append_indirect_entry(scratch_cp, name_ref_i, merge_cp_p,
 381                                                          merge_cp_length_p, THREAD);
 382 
 383       int signature_ref_i = scratch_cp->signature_ref_index_at(scratch_i);
 384       int new_signature_ref_i = find_or_append_indirect_entry(scratch_cp, signature_ref_i,
 385                                                               merge_cp_p, merge_cp_length_p,
 386                                                               THREAD);
 387 
 388       // If the referenced entries already exist in *merge_cp_p, then
 389       // both new_name_ref_i and new_signature_ref_i will both be 0.
 390       // In that case, all we are appending is the current entry.
 391       if (new_name_ref_i != name_ref_i) {
 392         log_trace(redefine, class, constantpool)
 393           ("NameAndType entry@%d name_ref_index change: %d to %d",
 394            *merge_cp_length_p, name_ref_i, new_name_ref_i);
 395       }
 396       if (new_signature_ref_i != signature_ref_i) {
 397         log_trace(redefine, class, constantpool)
 398           ("NameAndType entry@%d signature_ref_index change: %d to %d",
 399            *merge_cp_length_p, signature_ref_i, new_signature_ref_i);
 400       }
 401 
 402       (*merge_cp_p)->name_and_type_at_put(*merge_cp_length_p,
 403         new_name_ref_i, new_signature_ref_i);
 404       if (scratch_i != *merge_cp_length_p) {
 405         // The new entry in *merge_cp_p is at a different index than
 406         // the new entry in scratch_cp so we need to map the index values.
 407         map_index(scratch_cp, scratch_i, *merge_cp_length_p);
 408       }
 409       (*merge_cp_length_p)++;
 410     } break;
 411 
 412     // this is a double-indirect CP entry so it needs special handling
 413     case JVM_CONSTANT_Fieldref:           // fall through
 414     case JVM_CONSTANT_InterfaceMethodref: // fall through
 415     case JVM_CONSTANT_Methodref:
 416     {
 417       int klass_ref_i = scratch_cp->uncached_klass_ref_index_at(scratch_i);
 418       int new_klass_ref_i = find_or_append_indirect_entry(scratch_cp, klass_ref_i,
 419                                                           merge_cp_p, merge_cp_length_p, THREAD);
 420 
 421       int name_and_type_ref_i = scratch_cp->uncached_name_and_type_ref_index_at(scratch_i);
 422       int new_name_and_type_ref_i = find_or_append_indirect_entry(scratch_cp, name_and_type_ref_i,
 423                                                           merge_cp_p, merge_cp_length_p, THREAD);
 424 
 425       const char *entry_name = NULL;
 426       switch (scratch_cp->tag_at(scratch_i).value()) {
 427       case JVM_CONSTANT_Fieldref:
 428         entry_name = "Fieldref";
 429         (*merge_cp_p)->field_at_put(*merge_cp_length_p, new_klass_ref_i,
 430           new_name_and_type_ref_i);
 431         break;
 432       case JVM_CONSTANT_InterfaceMethodref:
 433         entry_name = "IFMethodref";
 434         (*merge_cp_p)->interface_method_at_put(*merge_cp_length_p,
 435           new_klass_ref_i, new_name_and_type_ref_i);
 436         break;
 437       case JVM_CONSTANT_Methodref:
 438         entry_name = "Methodref";
 439         (*merge_cp_p)->method_at_put(*merge_cp_length_p, new_klass_ref_i,
 440           new_name_and_type_ref_i);
 441         break;
 442       default:
 443         guarantee(false, "bad switch");
 444         break;
 445       }
 446 
 447       if (klass_ref_i != new_klass_ref_i) {
 448         log_trace(redefine, class, constantpool)
 449           ("%s entry@%d class_index changed: %d to %d", entry_name, *merge_cp_length_p, klass_ref_i, new_klass_ref_i);
 450       }
 451       if (name_and_type_ref_i != new_name_and_type_ref_i) {
 452         log_trace(redefine, class, constantpool)
 453           ("%s entry@%d name_and_type_index changed: %d to %d",
 454            entry_name, *merge_cp_length_p, name_and_type_ref_i, new_name_and_type_ref_i);
 455       }
 456 
 457       if (scratch_i != *merge_cp_length_p) {
 458         // The new entry in *merge_cp_p is at a different index than
 459         // the new entry in scratch_cp so we need to map the index values.
 460         map_index(scratch_cp, scratch_i, *merge_cp_length_p);
 461       }
 462       (*merge_cp_length_p)++;
 463     } break;
 464 
 465     // this is an indirect CP entry so it needs special handling
 466     case JVM_CONSTANT_MethodType:
 467     {
 468       int ref_i = scratch_cp->method_type_index_at(scratch_i);
 469       int new_ref_i = find_or_append_indirect_entry(scratch_cp, ref_i, merge_cp_p,
 470                                                     merge_cp_length_p, THREAD);
 471       if (new_ref_i != ref_i) {
 472         log_trace(redefine, class, constantpool)
 473           ("MethodType entry@%d ref_index change: %d to %d", *merge_cp_length_p, ref_i, new_ref_i);
 474       }
 475       (*merge_cp_p)->method_type_index_at_put(*merge_cp_length_p, new_ref_i);
 476       if (scratch_i != *merge_cp_length_p) {
 477         // The new entry in *merge_cp_p is at a different index than
 478         // the new entry in scratch_cp so we need to map the index values.
 479         map_index(scratch_cp, scratch_i, *merge_cp_length_p);
 480       }
 481       (*merge_cp_length_p)++;
 482     } break;
 483 
 484     // this is an indirect CP entry so it needs special handling
 485     case JVM_CONSTANT_MethodHandle:
 486     {
 487       int ref_kind = scratch_cp->method_handle_ref_kind_at(scratch_i);
 488       int ref_i = scratch_cp->method_handle_index_at(scratch_i);
 489       int new_ref_i = find_or_append_indirect_entry(scratch_cp, ref_i, merge_cp_p,
 490                                                     merge_cp_length_p, THREAD);
 491       if (new_ref_i != ref_i) {
 492         log_trace(redefine, class, constantpool)
 493           ("MethodHandle entry@%d ref_index change: %d to %d", *merge_cp_length_p, ref_i, new_ref_i);
 494       }
 495       (*merge_cp_p)->method_handle_index_at_put(*merge_cp_length_p, ref_kind, new_ref_i);
 496       if (scratch_i != *merge_cp_length_p) {
 497         // The new entry in *merge_cp_p is at a different index than
 498         // the new entry in scratch_cp so we need to map the index values.
 499         map_index(scratch_cp, scratch_i, *merge_cp_length_p);
 500       }
 501       (*merge_cp_length_p)++;
 502     } break;
 503 
 504     // this is an indirect CP entry so it needs special handling
 505     case JVM_CONSTANT_Dynamic:  // fall through
 506     case JVM_CONSTANT_InvokeDynamic:
 507     {
 508       // Index of the bootstrap specifier in the operands array
 509       int old_bs_i = scratch_cp->invoke_dynamic_bootstrap_specifier_index(scratch_i);
 510       int new_bs_i = find_or_append_operand(scratch_cp, old_bs_i, merge_cp_p,
 511                                             merge_cp_length_p, THREAD);
 512       // The bootstrap method NameAndType_info index
 513       int old_ref_i = scratch_cp->invoke_dynamic_name_and_type_ref_index_at(scratch_i);
 514       int new_ref_i = find_or_append_indirect_entry(scratch_cp, old_ref_i, merge_cp_p,
 515                                                     merge_cp_length_p, THREAD);
 516       if (new_bs_i != old_bs_i) {
 517         log_trace(redefine, class, constantpool)
 518           ("Dynamic entry@%d bootstrap_method_attr_index change: %d to %d",
 519            *merge_cp_length_p, old_bs_i, new_bs_i);
 520       }
 521       if (new_ref_i != old_ref_i) {
 522         log_trace(redefine, class, constantpool)
 523           ("Dynamic entry@%d name_and_type_index change: %d to %d", *merge_cp_length_p, old_ref_i, new_ref_i);
 524       }
 525 
 526       if (scratch_cp->tag_at(scratch_i).is_dynamic_constant())
 527         (*merge_cp_p)->dynamic_constant_at_put(*merge_cp_length_p, new_bs_i, new_ref_i);
 528       else
 529         (*merge_cp_p)->invoke_dynamic_at_put(*merge_cp_length_p, new_bs_i, new_ref_i);
 530       if (scratch_i != *merge_cp_length_p) {
 531         // The new entry in *merge_cp_p is at a different index than
 532         // the new entry in scratch_cp so we need to map the index values.
 533         map_index(scratch_cp, scratch_i, *merge_cp_length_p);
 534       }
 535       (*merge_cp_length_p)++;
 536     } break;
 537 
 538     // At this stage, Class or UnresolvedClass could be in scratch_cp, but not
 539     // ClassIndex
 540     case JVM_CONSTANT_ClassIndex: // fall through
 541 
 542     // Invalid is used as the tag for the second constant pool entry
 543     // occupied by JVM_CONSTANT_Double or JVM_CONSTANT_Long. It should
 544     // not be seen by itself.
 545     case JVM_CONSTANT_Invalid: // fall through
 546 
 547     // At this stage, String could be here, but not StringIndex
 548     case JVM_CONSTANT_StringIndex: // fall through
 549 
 550     // At this stage JVM_CONSTANT_UnresolvedClassInError should not be
 551     // here
 552     case JVM_CONSTANT_UnresolvedClassInError: // fall through
 553 
 554     default:
 555     {
 556       // leave a breadcrumb
 557       jbyte bad_value = scratch_cp->tag_at(scratch_i).value();
 558       ShouldNotReachHere();
 559     } break;
 560   } // end switch tag value
 561 } // end append_entry()
 562 
 563 
 564 int VM_RedefineClasses::find_or_append_indirect_entry(const constantPoolHandle& scratch_cp,
 565       int ref_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p, TRAPS) {
 566 
 567   int new_ref_i = ref_i;
 568   bool match = (ref_i < *merge_cp_length_p) &&
 569                scratch_cp->compare_entry_to(ref_i, *merge_cp_p, ref_i, THREAD);
 570 
 571   if (!match) {
 572     // forward reference in *merge_cp_p or not a direct match
 573     int found_i = scratch_cp->find_matching_entry(ref_i, *merge_cp_p, THREAD);
 574     if (found_i != 0) {
 575       guarantee(found_i != ref_i, "compare_entry_to() and find_matching_entry() do not agree");
 576       // Found a matching entry somewhere else in *merge_cp_p so just need a mapping entry.
 577       new_ref_i = found_i;
 578       map_index(scratch_cp, ref_i, found_i);
 579     } else {
 580       // no match found so we have to append this entry to *merge_cp_p
 581       append_entry(scratch_cp, ref_i, merge_cp_p, merge_cp_length_p, THREAD);
 582       // The above call to append_entry() can only append one entry
 583       // so the post call query of *merge_cp_length_p is only for
 584       // the sake of consistency.
 585       new_ref_i = *merge_cp_length_p - 1;
 586     }
 587   }
 588 
 589   return new_ref_i;
 590 } // end find_or_append_indirect_entry()
 591 
 592 
 593 // Append a bootstrap specifier into the merge_cp operands that is semantically equal
 594 // to the scratch_cp operands bootstrap specifier passed by the old_bs_i index.
 595 // Recursively append new merge_cp entries referenced by the new bootstrap specifier.
 596 void VM_RedefineClasses::append_operand(const constantPoolHandle& scratch_cp, int old_bs_i,
 597        constantPoolHandle *merge_cp_p, int *merge_cp_length_p, TRAPS) {
 598 
 599   int old_ref_i = scratch_cp->operand_bootstrap_method_ref_index_at(old_bs_i);
 600   int new_ref_i = find_or_append_indirect_entry(scratch_cp, old_ref_i, merge_cp_p,
 601                                                 merge_cp_length_p, THREAD);
 602   if (new_ref_i != old_ref_i) {
 603     log_trace(redefine, class, constantpool)
 604       ("operands entry@%d bootstrap method ref_index change: %d to %d", _operands_cur_length, old_ref_i, new_ref_i);
 605   }
 606 
 607   Array<u2>* merge_ops = (*merge_cp_p)->operands();
 608   int new_bs_i = _operands_cur_length;
 609   // We have _operands_cur_length == 0 when the merge_cp operands is empty yet.
 610   // However, the operand_offset_at(0) was set in the extend_operands() call.
 611   int new_base = (new_bs_i == 0) ? (*merge_cp_p)->operand_offset_at(0)
 612                                  : (*merge_cp_p)->operand_next_offset_at(new_bs_i - 1);
 613   int argc     = scratch_cp->operand_argument_count_at(old_bs_i);
 614 
 615   ConstantPool::operand_offset_at_put(merge_ops, _operands_cur_length, new_base);
 616   merge_ops->at_put(new_base++, new_ref_i);
 617   merge_ops->at_put(new_base++, argc);
 618 
 619   for (int i = 0; i < argc; i++) {
 620     int old_arg_ref_i = scratch_cp->operand_argument_index_at(old_bs_i, i);
 621     int new_arg_ref_i = find_or_append_indirect_entry(scratch_cp, old_arg_ref_i, merge_cp_p,
 622                                                       merge_cp_length_p, THREAD);
 623     merge_ops->at_put(new_base++, new_arg_ref_i);
 624     if (new_arg_ref_i != old_arg_ref_i) {
 625       log_trace(redefine, class, constantpool)
 626         ("operands entry@%d bootstrap method argument ref_index change: %d to %d",
 627          _operands_cur_length, old_arg_ref_i, new_arg_ref_i);
 628     }
 629   }
 630   if (old_bs_i != _operands_cur_length) {
 631     // The bootstrap specifier in *merge_cp_p is at a different index than
 632     // that in scratch_cp so we need to map the index values.
 633     map_operand_index(old_bs_i, new_bs_i);
 634   }
 635   _operands_cur_length++;
 636 } // end append_operand()
 637 
 638 
 639 int VM_RedefineClasses::find_or_append_operand(const constantPoolHandle& scratch_cp,
 640       int old_bs_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p, TRAPS) {
 641 
 642   int new_bs_i = old_bs_i; // bootstrap specifier index
 643   bool match = (old_bs_i < _operands_cur_length) &&
 644                scratch_cp->compare_operand_to(old_bs_i, *merge_cp_p, old_bs_i, THREAD);
 645 
 646   if (!match) {
 647     // forward reference in *merge_cp_p or not a direct match
 648     int found_i = scratch_cp->find_matching_operand(old_bs_i, *merge_cp_p,
 649                                                     _operands_cur_length, THREAD);
 650     if (found_i != -1) {
 651       guarantee(found_i != old_bs_i, "compare_operand_to() and find_matching_operand() disagree");
 652       // found a matching operand somewhere else in *merge_cp_p so just need a mapping
 653       new_bs_i = found_i;
 654       map_operand_index(old_bs_i, found_i);
 655     } else {
 656       // no match found so we have to append this bootstrap specifier to *merge_cp_p
 657       append_operand(scratch_cp, old_bs_i, merge_cp_p, merge_cp_length_p, THREAD);
 658       new_bs_i = _operands_cur_length - 1;
 659     }
 660   }
 661   return new_bs_i;
 662 } // end find_or_append_operand()
 663 
 664 
 665 void VM_RedefineClasses::finalize_operands_merge(const constantPoolHandle& merge_cp, TRAPS) {
 666   if (merge_cp->operands() == NULL) {
 667     return;
 668   }
 669   // Shrink the merge_cp operands
 670   merge_cp->shrink_operands(_operands_cur_length, CHECK);
 671 
 672   if (log_is_enabled(Trace, redefine, class, constantpool)) {
 673     // don't want to loop unless we are tracing
 674     int count = 0;
 675     for (int i = 1; i < _operands_index_map_p->length(); i++) {
 676       int value = _operands_index_map_p->at(i);
 677       if (value != -1) {
 678         log_trace(redefine, class, constantpool)("operands_index_map[%d]: old=%d new=%d", count, i, value);
 679         count++;
 680       }
 681     }
 682   }
 683   // Clean-up
 684   _operands_index_map_p = NULL;
 685   _operands_cur_length = 0;
 686   _operands_index_map_count = 0;
 687 } // end finalize_operands_merge()
 688 
 689 // Symbol* comparator for qsort
 690 // The caller must have an active ResourceMark.
 691 static int symcmp(const void* a, const void* b) {
 692   char* astr = (*(Symbol**)a)->as_C_string();
 693   char* bstr = (*(Symbol**)b)->as_C_string();
 694   return strcmp(astr, bstr);
 695 }
 696 
 697 static jvmtiError check_nest_attributes(InstanceKlass* the_class,
 698                                         InstanceKlass* scratch_class) {
 699   // Check whether the class NestHost attribute has been changed.
 700   Thread* thread = Thread::current();
 701   ResourceMark rm(thread);
 702   JvmtiThreadState *state = JvmtiThreadState::state_for((JavaThread*)thread);
 703   u2 the_nest_host_idx = the_class->nest_host_index();
 704   u2 scr_nest_host_idx = scratch_class->nest_host_index();
 705 
 706   if (the_nest_host_idx != 0 && scr_nest_host_idx != 0) {
 707     Symbol* the_sym = the_class->constants()->klass_name_at(the_nest_host_idx);
 708     Symbol* scr_sym = scratch_class->constants()->klass_name_at(scr_nest_host_idx);
 709     if (the_sym != scr_sym) {
 710       log_trace(redefine, class, nestmates)
 711         ("redefined class %s attribute change error: NestHost class: %s replaced with: %s",
 712          the_class->external_name(), the_sym->as_C_string(), scr_sym->as_C_string());
 713       return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_ATTRIBUTE_CHANGED;
 714     }
 715   } else if ((the_nest_host_idx == 0) ^ (scr_nest_host_idx == 0)) {
 716     const char* action_str = (the_nest_host_idx != 0) ? "removed" : "added";
 717     log_trace(redefine, class, nestmates)
 718       ("redefined class %s attribute change error: NestHost attribute %s",
 719        the_class->external_name(), action_str);
 720     return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_ATTRIBUTE_CHANGED;
 721   }
 722 
 723   // Check whether the class NestMembers attribute has been changed.
 724   Array<u2>* the_nest_members = the_class->nest_members();
 725   Array<u2>* scr_nest_members = scratch_class->nest_members();
 726   bool the_members_exists = the_nest_members != Universe::the_empty_short_array();
 727   bool scr_members_exists = scr_nest_members != Universe::the_empty_short_array();
 728 
 729   int members_len = the_nest_members->length();
 730   if (the_members_exists && scr_members_exists) {
 731     if (members_len != scr_nest_members->length()) {
 732       log_trace(redefine, class, nestmates)
 733         ("redefined class %s attribute change error: NestMember len=%d changed to len=%d",
 734          the_class->external_name(), members_len, scr_nest_members->length());
 735       return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_ATTRIBUTE_CHANGED;
 736     }
 737 
 738     // The order of entries in the NestMembers array is not specified so we
 739     // have to explicitly check for the same contents. We do this by copying
 740     // the referenced symbols into their own arrays, sorting them and then
 741     // comparing each element pair.
 742 
 743     Symbol** the_syms = NEW_RESOURCE_ARRAY_RETURN_NULL(Symbol*, members_len);
 744     Symbol** scr_syms = NEW_RESOURCE_ARRAY_RETURN_NULL(Symbol*, members_len);
 745 
 746     if (the_syms == NULL || scr_syms == NULL) {
 747       return JVMTI_ERROR_OUT_OF_MEMORY;
 748     }
 749 
 750     for (int i = 0; i < members_len; i++) {
 751       int the_cp_index = the_nest_members->at(i);
 752       int scr_cp_index = scr_nest_members->at(i);
 753       the_syms[i] = the_class->constants()->klass_name_at(the_cp_index);
 754       scr_syms[i] = scratch_class->constants()->klass_name_at(scr_cp_index);
 755     }
 756 
 757     qsort(the_syms, members_len, sizeof(Symbol*), symcmp);
 758     qsort(scr_syms, members_len, sizeof(Symbol*), symcmp);
 759 
 760     for (int i = 0; i < members_len; i++) {
 761       if (the_syms[i] != scr_syms[i]) {
 762         log_trace(redefine, class, nestmates)
 763           ("redefined class %s attribute change error: NestMembers[%d]: %s changed to %s",
 764            the_class->external_name(), i, the_syms[i]->as_C_string(), scr_syms[i]->as_C_string());
 765         return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_ATTRIBUTE_CHANGED;
 766       }
 767     }
 768   } else if (the_members_exists ^ scr_members_exists) {
 769     const char* action_str = (the_members_exists) ? "removed" : "added";
 770     log_trace(redefine, class, nestmates)
 771       ("redefined class %s attribute change error: NestMembers attribute %s",
 772        the_class->external_name(), action_str);
 773     return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_ATTRIBUTE_CHANGED;
 774   }
 775 
 776   return JVMTI_ERROR_NONE;
 777 }
 778 
 779 jvmtiError VM_RedefineClasses::compare_and_normalize_class_versions(
 780              InstanceKlass* the_class,
 781              InstanceKlass* scratch_class) {
 782   int i;
 783 
 784   // Check superclasses, or rather their names, since superclasses themselves can be
 785   // requested to replace.
 786   // Check for NULL superclass first since this might be java.lang.Object
 787   if (the_class->super() != scratch_class->super() &&
 788       (the_class->super() == NULL || scratch_class->super() == NULL ||
 789        the_class->super()->name() !=
 790        scratch_class->super()->name())) {
 791     return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
 792   }
 793 
 794   // Check if the number, names and order of directly implemented interfaces are the same.
 795   // I think in principle we should just check if the sets of names of directly implemented
 796   // interfaces are the same, i.e. the order of declaration (which, however, if changed in the
 797   // .java file, also changes in .class file) should not matter. However, comparing sets is
 798   // technically a bit more difficult, and, more importantly, I am not sure at present that the
 799   // order of interfaces does not matter on the implementation level, i.e. that the VM does not
 800   // rely on it somewhere.
 801   Array<Klass*>* k_interfaces = the_class->local_interfaces();
 802   Array<Klass*>* k_new_interfaces = scratch_class->local_interfaces();
 803   int n_intfs = k_interfaces->length();
 804   if (n_intfs != k_new_interfaces->length()) {
 805     return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
 806   }
 807   for (i = 0; i < n_intfs; i++) {
 808     if (k_interfaces->at(i)->name() !=
 809         k_new_interfaces->at(i)->name()) {
 810       return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
 811     }
 812   }
 813 
 814   // Check whether class is in the error init state.
 815   if (the_class->is_in_error_state()) {
 816     // TBD #5057930: special error code is needed in 1.6
 817     return JVMTI_ERROR_INVALID_CLASS;
 818   }
 819 
 820   // Check whether the nest-related attributes have been changed.
 821   jvmtiError err = check_nest_attributes(the_class, scratch_class);
 822   if (err != JVMTI_ERROR_NONE) {
 823     return err;
 824   }
 825 
 826   // Check whether class modifiers are the same.
 827   jushort old_flags = (jushort) the_class->access_flags().get_flags();
 828   jushort new_flags = (jushort) scratch_class->access_flags().get_flags();
 829   if (old_flags != new_flags) {
 830     return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_MODIFIERS_CHANGED;
 831   }
 832 
 833   // Check if the number, names, types and order of fields declared in these classes
 834   // are the same.
 835   JavaFieldStream old_fs(the_class);
 836   JavaFieldStream new_fs(scratch_class);
 837   for (; !old_fs.done() && !new_fs.done(); old_fs.next(), new_fs.next()) {
 838     // access
 839     old_flags = old_fs.access_flags().as_short();
 840     new_flags = new_fs.access_flags().as_short();
 841     if ((old_flags ^ new_flags) & JVM_RECOGNIZED_FIELD_MODIFIERS) {
 842       return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
 843     }
 844     // offset
 845     if (old_fs.offset() != new_fs.offset()) {
 846       return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
 847     }
 848     // name and signature
 849     Symbol* name_sym1 = the_class->constants()->symbol_at(old_fs.name_index());
 850     Symbol* sig_sym1 = the_class->constants()->symbol_at(old_fs.signature_index());
 851     Symbol* name_sym2 = scratch_class->constants()->symbol_at(new_fs.name_index());
 852     Symbol* sig_sym2 = scratch_class->constants()->symbol_at(new_fs.signature_index());
 853     if (name_sym1 != name_sym2 || sig_sym1 != sig_sym2) {
 854       return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
 855     }
 856   }
 857 
 858   // If both streams aren't done then we have a differing number of
 859   // fields.
 860   if (!old_fs.done() || !new_fs.done()) {
 861     return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
 862   }
 863 
 864   // Do a parallel walk through the old and new methods. Detect
 865   // cases where they match (exist in both), have been added in
 866   // the new methods, or have been deleted (exist only in the
 867   // old methods).  The class file parser places methods in order
 868   // by method name, but does not order overloaded methods by
 869   // signature.  In order to determine what fate befell the methods,
 870   // this code places the overloaded new methods that have matching
 871   // old methods in the same order as the old methods and places
 872   // new overloaded methods at the end of overloaded methods of
 873   // that name. The code for this order normalization is adapted
 874   // from the algorithm used in InstanceKlass::find_method().
 875   // Since we are swapping out of order entries as we find them,
 876   // we only have to search forward through the overloaded methods.
 877   // Methods which are added and have the same name as an existing
 878   // method (but different signature) will be put at the end of
 879   // the methods with that name, and the name mismatch code will
 880   // handle them.
 881   Array<Method*>* k_old_methods(the_class->methods());
 882   Array<Method*>* k_new_methods(scratch_class->methods());
 883   int n_old_methods = k_old_methods->length();
 884   int n_new_methods = k_new_methods->length();
 885   Thread* thread = Thread::current();
 886 
 887   int ni = 0;
 888   int oi = 0;
 889   while (true) {
 890     Method* k_old_method;
 891     Method* k_new_method;
 892     enum { matched, added, deleted, undetermined } method_was = undetermined;
 893 
 894     if (oi >= n_old_methods) {
 895       if (ni >= n_new_methods) {
 896         break; // we've looked at everything, done
 897       }
 898       // New method at the end
 899       k_new_method = k_new_methods->at(ni);
 900       method_was = added;
 901     } else if (ni >= n_new_methods) {
 902       // Old method, at the end, is deleted
 903       k_old_method = k_old_methods->at(oi);
 904       method_was = deleted;
 905     } else {
 906       // There are more methods in both the old and new lists
 907       k_old_method = k_old_methods->at(oi);
 908       k_new_method = k_new_methods->at(ni);
 909       if (k_old_method->name() != k_new_method->name()) {
 910         // Methods are sorted by method name, so a mismatch means added
 911         // or deleted
 912         if (k_old_method->name()->fast_compare(k_new_method->name()) > 0) {
 913           method_was = added;
 914         } else {
 915           method_was = deleted;
 916         }
 917       } else if (k_old_method->signature() == k_new_method->signature()) {
 918         // Both the name and signature match
 919         method_was = matched;
 920       } else {
 921         // The name matches, but the signature doesn't, which means we have to
 922         // search forward through the new overloaded methods.
 923         int nj;  // outside the loop for post-loop check
 924         for (nj = ni + 1; nj < n_new_methods; nj++) {
 925           Method* m = k_new_methods->at(nj);
 926           if (k_old_method->name() != m->name()) {
 927             // reached another method name so no more overloaded methods
 928             method_was = deleted;
 929             break;
 930           }
 931           if (k_old_method->signature() == m->signature()) {
 932             // found a match so swap the methods
 933             k_new_methods->at_put(ni, m);
 934             k_new_methods->at_put(nj, k_new_method);
 935             k_new_method = m;
 936             method_was = matched;
 937             break;
 938           }
 939         }
 940 
 941         if (nj >= n_new_methods) {
 942           // reached the end without a match; so method was deleted
 943           method_was = deleted;
 944         }
 945       }
 946     }
 947 
 948     switch (method_was) {
 949     case matched:
 950       // methods match, be sure modifiers do too
 951       old_flags = (jushort) k_old_method->access_flags().get_flags();
 952       new_flags = (jushort) k_new_method->access_flags().get_flags();
 953       if ((old_flags ^ new_flags) & ~(JVM_ACC_NATIVE)) {
 954         return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_MODIFIERS_CHANGED;
 955       }
 956       {
 957         u2 new_num = k_new_method->method_idnum();
 958         u2 old_num = k_old_method->method_idnum();
 959         if (new_num != old_num) {
 960           Method* idnum_owner = scratch_class->method_with_idnum(old_num);
 961           if (idnum_owner != NULL) {
 962             // There is already a method assigned this idnum -- switch them
 963             // Take current and original idnum from the new_method
 964             idnum_owner->set_method_idnum(new_num);
 965             idnum_owner->set_orig_method_idnum(k_new_method->orig_method_idnum());
 966           }
 967           // Take current and original idnum from the old_method
 968           k_new_method->set_method_idnum(old_num);
 969           k_new_method->set_orig_method_idnum(k_old_method->orig_method_idnum());
 970           if (thread->has_pending_exception()) {
 971             return JVMTI_ERROR_OUT_OF_MEMORY;
 972           }
 973         }
 974       }
 975       log_trace(redefine, class, normalize)
 976         ("Method matched: new: %s [%d] == old: %s [%d]",
 977          k_new_method->name_and_sig_as_C_string(), ni, k_old_method->name_and_sig_as_C_string(), oi);
 978       // advance to next pair of methods
 979       ++oi;
 980       ++ni;
 981       break;
 982     case added:
 983       // method added, see if it is OK
 984       new_flags = (jushort) k_new_method->access_flags().get_flags();
 985       if ((new_flags & JVM_ACC_PRIVATE) == 0
 986            // hack: private should be treated as final, but alas
 987           || (new_flags & (JVM_ACC_FINAL|JVM_ACC_STATIC)) == 0
 988          ) {
 989         // new methods must be private
 990         return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_ADDED;
 991       }
 992       {
 993         u2 num = the_class->next_method_idnum();
 994         if (num == ConstMethod::UNSET_IDNUM) {
 995           // cannot add any more methods
 996           return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_ADDED;
 997         }
 998         u2 new_num = k_new_method->method_idnum();
 999         Method* idnum_owner = scratch_class->method_with_idnum(num);
1000         if (idnum_owner != NULL) {
1001           // There is already a method assigned this idnum -- switch them
1002           // Take current and original idnum from the new_method
1003           idnum_owner->set_method_idnum(new_num);
1004           idnum_owner->set_orig_method_idnum(k_new_method->orig_method_idnum());
1005         }
1006         k_new_method->set_method_idnum(num);
1007         k_new_method->set_orig_method_idnum(num);
1008         if (thread->has_pending_exception()) {
1009           return JVMTI_ERROR_OUT_OF_MEMORY;
1010         }
1011       }
1012       log_trace(redefine, class, normalize)
1013         ("Method added: new: %s [%d]", k_new_method->name_and_sig_as_C_string(), ni);
1014       ++ni; // advance to next new method
1015       break;
1016     case deleted:
1017       // method deleted, see if it is OK
1018       old_flags = (jushort) k_old_method->access_flags().get_flags();
1019       if ((old_flags & JVM_ACC_PRIVATE) == 0
1020            // hack: private should be treated as final, but alas
1021           || (old_flags & (JVM_ACC_FINAL|JVM_ACC_STATIC)) == 0
1022          ) {
1023         // deleted methods must be private
1024         return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_DELETED;
1025       }
1026       log_trace(redefine, class, normalize)
1027         ("Method deleted: old: %s [%d]", k_old_method->name_and_sig_as_C_string(), oi);
1028       ++oi; // advance to next old method
1029       break;
1030     default:
1031       ShouldNotReachHere();
1032     }
1033   }
1034 
1035   return JVMTI_ERROR_NONE;
1036 }
1037 
1038 
1039 // Find new constant pool index value for old constant pool index value
1040 // by seaching the index map. Returns zero (0) if there is no mapped
1041 // value for the old constant pool index.
1042 int VM_RedefineClasses::find_new_index(int old_index) {
1043   if (_index_map_count == 0) {
1044     // map is empty so nothing can be found
1045     return 0;
1046   }
1047 
1048   if (old_index < 1 || old_index >= _index_map_p->length()) {
1049     // The old_index is out of range so it is not mapped. This should
1050     // not happen in regular constant pool merging use, but it can
1051     // happen if a corrupt annotation is processed.
1052     return 0;
1053   }
1054 
1055   int value = _index_map_p->at(old_index);
1056   if (value == -1) {
1057     // the old_index is not mapped
1058     return 0;
1059   }
1060 
1061   return value;
1062 } // end find_new_index()
1063 
1064 
1065 // Find new bootstrap specifier index value for old bootstrap specifier index
1066 // value by seaching the index map. Returns unused index (-1) if there is
1067 // no mapped value for the old bootstrap specifier index.
1068 int VM_RedefineClasses::find_new_operand_index(int old_index) {
1069   if (_operands_index_map_count == 0) {
1070     // map is empty so nothing can be found
1071     return -1;
1072   }
1073 
1074   if (old_index == -1 || old_index >= _operands_index_map_p->length()) {
1075     // The old_index is out of range so it is not mapped.
1076     // This should not happen in regular constant pool merging use.
1077     return -1;
1078   }
1079 
1080   int value = _operands_index_map_p->at(old_index);
1081   if (value == -1) {
1082     // the old_index is not mapped
1083     return -1;
1084   }
1085 
1086   return value;
1087 } // end find_new_operand_index()
1088 
1089 
1090 // Returns true if the current mismatch is due to a resolved/unresolved
1091 // class pair. Otherwise, returns false.
1092 bool VM_RedefineClasses::is_unresolved_class_mismatch(const constantPoolHandle& cp1,
1093        int index1, const constantPoolHandle& cp2, int index2) {
1094 
1095   jbyte t1 = cp1->tag_at(index1).value();
1096   if (t1 != JVM_CONSTANT_Class && t1 != JVM_CONSTANT_UnresolvedClass) {
1097     return false;  // wrong entry type; not our special case
1098   }
1099 
1100   jbyte t2 = cp2->tag_at(index2).value();
1101   if (t2 != JVM_CONSTANT_Class && t2 != JVM_CONSTANT_UnresolvedClass) {
1102     return false;  // wrong entry type; not our special case
1103   }
1104 
1105   if (t1 == t2) {
1106     return false;  // not a mismatch; not our special case
1107   }
1108 
1109   char *s1 = cp1->klass_name_at(index1)->as_C_string();
1110   char *s2 = cp2->klass_name_at(index2)->as_C_string();
1111   if (strcmp(s1, s2) != 0) {
1112     return false;  // strings don't match; not our special case
1113   }
1114 
1115   return true;  // made it through the gauntlet; this is our special case
1116 } // end is_unresolved_class_mismatch()
1117 
1118 
1119 jvmtiError VM_RedefineClasses::load_new_class_versions(TRAPS) {
1120 
1121   // For consistency allocate memory using os::malloc wrapper.
1122   _scratch_classes = (InstanceKlass**)
1123     os::malloc(sizeof(InstanceKlass*) * _class_count, mtClass);
1124   if (_scratch_classes == NULL) {
1125     return JVMTI_ERROR_OUT_OF_MEMORY;
1126   }
1127   // Zero initialize the _scratch_classes array.
1128   for (int i = 0; i < _class_count; i++) {
1129     _scratch_classes[i] = NULL;
1130   }
1131 
1132   ResourceMark rm(THREAD);
1133 
1134   JvmtiThreadState *state = JvmtiThreadState::state_for(JavaThread::current());
1135   // state can only be NULL if the current thread is exiting which
1136   // should not happen since we're trying to do a RedefineClasses
1137   guarantee(state != NULL, "exiting thread calling load_new_class_versions");
1138   for (int i = 0; i < _class_count; i++) {
1139     // Create HandleMark so that any handles created while loading new class
1140     // versions are deleted. Constant pools are deallocated while merging
1141     // constant pools
1142     HandleMark hm(THREAD);
1143     InstanceKlass* the_class = get_ik(_class_defs[i].klass);
1144     Symbol*  the_class_sym = the_class->name();
1145 
1146     log_debug(redefine, class, load)
1147       ("loading name=%s kind=%d (avail_mem=" UINT64_FORMAT "K)",
1148        the_class->external_name(), _class_load_kind, os::available_memory() >> 10);
1149 
1150     ClassFileStream st((u1*)_class_defs[i].class_bytes,
1151                        _class_defs[i].class_byte_count,
1152                        "__VM_RedefineClasses__",
1153                        ClassFileStream::verify);
1154 
1155     // Parse the stream.
1156     Handle the_class_loader(THREAD, the_class->class_loader());
1157     Handle protection_domain(THREAD, the_class->protection_domain());
1158     // Set redefined class handle in JvmtiThreadState class.
1159     // This redefined class is sent to agent event handler for class file
1160     // load hook event.
1161     state->set_class_being_redefined(the_class, _class_load_kind);
1162 
1163     InstanceKlass* scratch_class = SystemDictionary::parse_stream(
1164                                                       the_class_sym,
1165                                                       the_class_loader,
1166                                                       protection_domain,
1167                                                       &st,
1168                                                       THREAD);
1169     // Clear class_being_redefined just to be sure.
1170     state->clear_class_being_redefined();
1171 
1172     // TODO: if this is retransform, and nothing changed we can skip it
1173 
1174     // Need to clean up allocated InstanceKlass if there's an error so assign
1175     // the result here. Caller deallocates all the scratch classes in case of
1176     // an error.
1177     _scratch_classes[i] = scratch_class;
1178 
1179     if (HAS_PENDING_EXCEPTION) {
1180       Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1181       log_info(redefine, class, load, exceptions)("parse_stream exception: '%s'", ex_name->as_C_string());
1182       CLEAR_PENDING_EXCEPTION;
1183 
1184       if (ex_name == vmSymbols::java_lang_UnsupportedClassVersionError()) {
1185         return JVMTI_ERROR_UNSUPPORTED_VERSION;
1186       } else if (ex_name == vmSymbols::java_lang_ClassFormatError()) {
1187         return JVMTI_ERROR_INVALID_CLASS_FORMAT;
1188       } else if (ex_name == vmSymbols::java_lang_ClassCircularityError()) {
1189         return JVMTI_ERROR_CIRCULAR_CLASS_DEFINITION;
1190       } else if (ex_name == vmSymbols::java_lang_NoClassDefFoundError()) {
1191         // The message will be "XXX (wrong name: YYY)"
1192         return JVMTI_ERROR_NAMES_DONT_MATCH;
1193       } else if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1194         return JVMTI_ERROR_OUT_OF_MEMORY;
1195       } else {  // Just in case more exceptions can be thrown..
1196         return JVMTI_ERROR_FAILS_VERIFICATION;
1197       }
1198     }
1199 
1200     // Ensure class is linked before redefine
1201     if (!the_class->is_linked()) {
1202       the_class->link_class(THREAD);
1203       if (HAS_PENDING_EXCEPTION) {
1204         Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1205         log_info(redefine, class, load, exceptions)("link_class exception: '%s'", ex_name->as_C_string());
1206         CLEAR_PENDING_EXCEPTION;
1207         if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1208           return JVMTI_ERROR_OUT_OF_MEMORY;
1209         } else {
1210           return JVMTI_ERROR_INTERNAL;
1211         }
1212       }
1213     }
1214 
1215     // Do the validity checks in compare_and_normalize_class_versions()
1216     // before verifying the byte codes. By doing these checks first, we
1217     // limit the number of functions that require redirection from
1218     // the_class to scratch_class. In particular, we don't have to
1219     // modify JNI GetSuperclass() and thus won't change its performance.
1220     jvmtiError res = compare_and_normalize_class_versions(the_class,
1221                        scratch_class);
1222     if (res != JVMTI_ERROR_NONE) {
1223       return res;
1224     }
1225 
1226     // verify what the caller passed us
1227     {
1228       // The bug 6214132 caused the verification to fail.
1229       // Information about the_class and scratch_class is temporarily
1230       // recorded into jvmtiThreadState. This data is used to redirect
1231       // the_class to scratch_class in the JVM_* functions called by the
1232       // verifier. Please, refer to jvmtiThreadState.hpp for the detailed
1233       // description.
1234       RedefineVerifyMark rvm(the_class, scratch_class, state);
1235       Verifier::verify(
1236         scratch_class, Verifier::ThrowException, true, THREAD);
1237     }
1238 
1239     if (HAS_PENDING_EXCEPTION) {
1240       Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1241       log_info(redefine, class, load, exceptions)("verify_byte_codes exception: '%s'", ex_name->as_C_string());
1242       CLEAR_PENDING_EXCEPTION;
1243       if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1244         return JVMTI_ERROR_OUT_OF_MEMORY;
1245       } else {
1246         // tell the caller the bytecodes are bad
1247         return JVMTI_ERROR_FAILS_VERIFICATION;
1248       }
1249     }
1250 
1251     res = merge_cp_and_rewrite(the_class, scratch_class, THREAD);
1252     if (HAS_PENDING_EXCEPTION) {
1253       Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1254       log_info(redefine, class, load, exceptions)("merge_cp_and_rewrite exception: '%s'", ex_name->as_C_string());
1255       CLEAR_PENDING_EXCEPTION;
1256       if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1257         return JVMTI_ERROR_OUT_OF_MEMORY;
1258       } else {
1259         return JVMTI_ERROR_INTERNAL;
1260       }
1261     }
1262 
1263     if (VerifyMergedCPBytecodes) {
1264       // verify what we have done during constant pool merging
1265       {
1266         RedefineVerifyMark rvm(the_class, scratch_class, state);
1267         Verifier::verify(scratch_class, Verifier::ThrowException, true, THREAD);
1268       }
1269 
1270       if (HAS_PENDING_EXCEPTION) {
1271         Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1272         log_info(redefine, class, load, exceptions)
1273           ("verify_byte_codes post merge-CP exception: '%s'", ex_name->as_C_string());
1274         CLEAR_PENDING_EXCEPTION;
1275         if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1276           return JVMTI_ERROR_OUT_OF_MEMORY;
1277         } else {
1278           // tell the caller that constant pool merging screwed up
1279           return JVMTI_ERROR_INTERNAL;
1280         }
1281       }
1282     }
1283 
1284     Rewriter::rewrite(scratch_class, THREAD);
1285     if (!HAS_PENDING_EXCEPTION) {
1286       scratch_class->link_methods(THREAD);
1287     }
1288     if (HAS_PENDING_EXCEPTION) {
1289       Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1290       log_info(redefine, class, load, exceptions)
1291         ("Rewriter::rewrite or link_methods exception: '%s'", ex_name->as_C_string());
1292       CLEAR_PENDING_EXCEPTION;
1293       if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1294         return JVMTI_ERROR_OUT_OF_MEMORY;
1295       } else {
1296         return JVMTI_ERROR_INTERNAL;
1297       }
1298     }
1299 
1300     log_debug(redefine, class, load)
1301       ("loaded name=%s (avail_mem=" UINT64_FORMAT "K)", the_class->external_name(), os::available_memory() >> 10);
1302   }
1303 
1304   return JVMTI_ERROR_NONE;
1305 }
1306 
1307 
1308 // Map old_index to new_index as needed. scratch_cp is only needed
1309 // for log calls.
1310 void VM_RedefineClasses::map_index(const constantPoolHandle& scratch_cp,
1311        int old_index, int new_index) {
1312   if (find_new_index(old_index) != 0) {
1313     // old_index is already mapped
1314     return;
1315   }
1316 
1317   if (old_index == new_index) {
1318     // no mapping is needed
1319     return;
1320   }
1321 
1322   _index_map_p->at_put(old_index, new_index);
1323   _index_map_count++;
1324 
1325   log_trace(redefine, class, constantpool)
1326     ("mapped tag %d at index %d to %d", scratch_cp->tag_at(old_index).value(), old_index, new_index);
1327 } // end map_index()
1328 
1329 
1330 // Map old_index to new_index as needed.
1331 void VM_RedefineClasses::map_operand_index(int old_index, int new_index) {
1332   if (find_new_operand_index(old_index) != -1) {
1333     // old_index is already mapped
1334     return;
1335   }
1336 
1337   if (old_index == new_index) {
1338     // no mapping is needed
1339     return;
1340   }
1341 
1342   _operands_index_map_p->at_put(old_index, new_index);
1343   _operands_index_map_count++;
1344 
1345   log_trace(redefine, class, constantpool)("mapped bootstrap specifier at index %d to %d", old_index, new_index);
1346 } // end map_index()
1347 
1348 
1349 // Merge old_cp and scratch_cp and return the results of the merge via
1350 // merge_cp_p. The number of entries in *merge_cp_p is returned via
1351 // merge_cp_length_p. The entries in old_cp occupy the same locations
1352 // in *merge_cp_p. Also creates a map of indices from entries in
1353 // scratch_cp to the corresponding entry in *merge_cp_p. Index map
1354 // entries are only created for entries in scratch_cp that occupy a
1355 // different location in *merged_cp_p.
1356 bool VM_RedefineClasses::merge_constant_pools(const constantPoolHandle& old_cp,
1357        const constantPoolHandle& scratch_cp, constantPoolHandle *merge_cp_p,
1358        int *merge_cp_length_p, TRAPS) {
1359 
1360   if (merge_cp_p == NULL) {
1361     assert(false, "caller must provide scratch constantPool");
1362     return false; // robustness
1363   }
1364   if (merge_cp_length_p == NULL) {
1365     assert(false, "caller must provide scratch CP length");
1366     return false; // robustness
1367   }
1368   // Worst case we need old_cp->length() + scratch_cp()->length(),
1369   // but the caller might be smart so make sure we have at least
1370   // the minimum.
1371   if ((*merge_cp_p)->length() < old_cp->length()) {
1372     assert(false, "merge area too small");
1373     return false; // robustness
1374   }
1375 
1376   log_info(redefine, class, constantpool)("old_cp_len=%d, scratch_cp_len=%d", old_cp->length(), scratch_cp->length());
1377 
1378   {
1379     // Pass 0:
1380     // The old_cp is copied to *merge_cp_p; this means that any code
1381     // using old_cp does not have to change. This work looks like a
1382     // perfect fit for ConstantPool*::copy_cp_to(), but we need to
1383     // handle one special case:
1384     // - revert JVM_CONSTANT_Class to JVM_CONSTANT_UnresolvedClass
1385     // This will make verification happy.
1386 
1387     int old_i;  // index into old_cp
1388 
1389     // index zero (0) is not used in constantPools
1390     for (old_i = 1; old_i < old_cp->length(); old_i++) {
1391       // leave debugging crumb
1392       jbyte old_tag = old_cp->tag_at(old_i).value();
1393       switch (old_tag) {
1394       case JVM_CONSTANT_Class:
1395       case JVM_CONSTANT_UnresolvedClass:
1396         // revert the copy to JVM_CONSTANT_UnresolvedClass
1397         // May be resolving while calling this so do the same for
1398         // JVM_CONSTANT_UnresolvedClass (klass_name_at() deals with transition)
1399         (*merge_cp_p)->temp_unresolved_klass_at_put(old_i,
1400           old_cp->klass_name_index_at(old_i));
1401         break;
1402 
1403       case JVM_CONSTANT_Double:
1404       case JVM_CONSTANT_Long:
1405         // just copy the entry to *merge_cp_p, but double and long take
1406         // two constant pool entries
1407         ConstantPool::copy_entry_to(old_cp, old_i, *merge_cp_p, old_i, CHECK_0);
1408         old_i++;
1409         break;
1410 
1411       default:
1412         // just copy the entry to *merge_cp_p
1413         ConstantPool::copy_entry_to(old_cp, old_i, *merge_cp_p, old_i, CHECK_0);
1414         break;
1415       }
1416     } // end for each old_cp entry
1417 
1418     ConstantPool::copy_operands(old_cp, *merge_cp_p, CHECK_0);
1419     (*merge_cp_p)->extend_operands(scratch_cp, CHECK_0);
1420 
1421     // We don't need to sanity check that *merge_cp_length_p is within
1422     // *merge_cp_p bounds since we have the minimum on-entry check above.
1423     (*merge_cp_length_p) = old_i;
1424   }
1425 
1426   // merge_cp_len should be the same as old_cp->length() at this point
1427   // so this trace message is really a "warm-and-breathing" message.
1428   log_debug(redefine, class, constantpool)("after pass 0: merge_cp_len=%d", *merge_cp_length_p);
1429 
1430   int scratch_i;  // index into scratch_cp
1431   {
1432     // Pass 1a:
1433     // Compare scratch_cp entries to the old_cp entries that we have
1434     // already copied to *merge_cp_p. In this pass, we are eliminating
1435     // exact duplicates (matching entry at same index) so we only
1436     // compare entries in the common indice range.
1437     int increment = 1;
1438     int pass1a_length = MIN2(old_cp->length(), scratch_cp->length());
1439     for (scratch_i = 1; scratch_i < pass1a_length; scratch_i += increment) {
1440       switch (scratch_cp->tag_at(scratch_i).value()) {
1441       case JVM_CONSTANT_Double:
1442       case JVM_CONSTANT_Long:
1443         // double and long take two constant pool entries
1444         increment = 2;
1445         break;
1446 
1447       default:
1448         increment = 1;
1449         break;
1450       }
1451 
1452       bool match = scratch_cp->compare_entry_to(scratch_i, *merge_cp_p,
1453         scratch_i, CHECK_0);
1454       if (match) {
1455         // found a match at the same index so nothing more to do
1456         continue;
1457       } else if (is_unresolved_class_mismatch(scratch_cp, scratch_i,
1458                                               *merge_cp_p, scratch_i)) {
1459         // The mismatch in compare_entry_to() above is because of a
1460         // resolved versus unresolved class entry at the same index
1461         // with the same string value. Since Pass 0 reverted any
1462         // class entries to unresolved class entries in *merge_cp_p,
1463         // we go with the unresolved class entry.
1464         continue;
1465       }
1466 
1467       int found_i = scratch_cp->find_matching_entry(scratch_i, *merge_cp_p,
1468         CHECK_0);
1469       if (found_i != 0) {
1470         guarantee(found_i != scratch_i,
1471           "compare_entry_to() and find_matching_entry() do not agree");
1472 
1473         // Found a matching entry somewhere else in *merge_cp_p so
1474         // just need a mapping entry.
1475         map_index(scratch_cp, scratch_i, found_i);
1476         continue;
1477       }
1478 
1479       // The find_matching_entry() call above could fail to find a match
1480       // due to a resolved versus unresolved class or string entry situation
1481       // like we solved above with the is_unresolved_*_mismatch() calls.
1482       // However, we would have to call is_unresolved_*_mismatch() over
1483       // all of *merge_cp_p (potentially) and that doesn't seem to be
1484       // worth the time.
1485 
1486       // No match found so we have to append this entry and any unique
1487       // referenced entries to *merge_cp_p.
1488       append_entry(scratch_cp, scratch_i, merge_cp_p, merge_cp_length_p,
1489         CHECK_0);
1490     }
1491   }
1492 
1493   log_debug(redefine, class, constantpool)
1494     ("after pass 1a: merge_cp_len=%d, scratch_i=%d, index_map_len=%d",
1495      *merge_cp_length_p, scratch_i, _index_map_count);
1496 
1497   if (scratch_i < scratch_cp->length()) {
1498     // Pass 1b:
1499     // old_cp is smaller than scratch_cp so there are entries in
1500     // scratch_cp that we have not yet processed. We take care of
1501     // those now.
1502     int increment = 1;
1503     for (; scratch_i < scratch_cp->length(); scratch_i += increment) {
1504       switch (scratch_cp->tag_at(scratch_i).value()) {
1505       case JVM_CONSTANT_Double:
1506       case JVM_CONSTANT_Long:
1507         // double and long take two constant pool entries
1508         increment = 2;
1509         break;
1510 
1511       default:
1512         increment = 1;
1513         break;
1514       }
1515 
1516       int found_i =
1517         scratch_cp->find_matching_entry(scratch_i, *merge_cp_p, CHECK_0);
1518       if (found_i != 0) {
1519         // Found a matching entry somewhere else in *merge_cp_p so
1520         // just need a mapping entry.
1521         map_index(scratch_cp, scratch_i, found_i);
1522         continue;
1523       }
1524 
1525       // No match found so we have to append this entry and any unique
1526       // referenced entries to *merge_cp_p.
1527       append_entry(scratch_cp, scratch_i, merge_cp_p, merge_cp_length_p,
1528         CHECK_0);
1529     }
1530 
1531     log_debug(redefine, class, constantpool)
1532       ("after pass 1b: merge_cp_len=%d, scratch_i=%d, index_map_len=%d",
1533        *merge_cp_length_p, scratch_i, _index_map_count);
1534   }
1535   finalize_operands_merge(*merge_cp_p, THREAD);
1536 
1537   return true;
1538 } // end merge_constant_pools()
1539 
1540 
1541 // Scoped object to clean up the constant pool(s) created for merging
1542 class MergeCPCleaner {
1543   ClassLoaderData*   _loader_data;
1544   ConstantPool*      _cp;
1545   ConstantPool*      _scratch_cp;
1546  public:
1547   MergeCPCleaner(ClassLoaderData* loader_data, ConstantPool* merge_cp) :
1548                  _loader_data(loader_data), _cp(merge_cp), _scratch_cp(NULL) {}
1549   ~MergeCPCleaner() {
1550     _loader_data->add_to_deallocate_list(_cp);
1551     if (_scratch_cp != NULL) {
1552       _loader_data->add_to_deallocate_list(_scratch_cp);
1553     }
1554   }
1555   void add_scratch_cp(ConstantPool* scratch_cp) { _scratch_cp = scratch_cp; }
1556 };
1557 
1558 // Merge constant pools between the_class and scratch_class and
1559 // potentially rewrite bytecodes in scratch_class to use the merged
1560 // constant pool.
1561 jvmtiError VM_RedefineClasses::merge_cp_and_rewrite(
1562              InstanceKlass* the_class, InstanceKlass* scratch_class,
1563              TRAPS) {
1564   // worst case merged constant pool length is old and new combined
1565   int merge_cp_length = the_class->constants()->length()
1566         + scratch_class->constants()->length();
1567 
1568   // Constant pools are not easily reused so we allocate a new one
1569   // each time.
1570   // merge_cp is created unsafe for concurrent GC processing.  It
1571   // should be marked safe before discarding it. Even though
1572   // garbage,  if it crosses a card boundary, it may be scanned
1573   // in order to find the start of the first complete object on the card.
1574   ClassLoaderData* loader_data = the_class->class_loader_data();
1575   ConstantPool* merge_cp_oop =
1576     ConstantPool::allocate(loader_data,
1577                            merge_cp_length,
1578                            CHECK_(JVMTI_ERROR_OUT_OF_MEMORY));
1579   MergeCPCleaner cp_cleaner(loader_data, merge_cp_oop);
1580 
1581   HandleMark hm(THREAD);  // make sure handles are cleared before
1582                           // MergeCPCleaner clears out merge_cp_oop
1583   constantPoolHandle merge_cp(THREAD, merge_cp_oop);
1584 
1585   // Get constants() from the old class because it could have been rewritten
1586   // while we were at a safepoint allocating a new constant pool.
1587   constantPoolHandle old_cp(THREAD, the_class->constants());
1588   constantPoolHandle scratch_cp(THREAD, scratch_class->constants());
1589 
1590   // If the length changed, the class was redefined out from under us. Return
1591   // an error.
1592   if (merge_cp_length != the_class->constants()->length()
1593          + scratch_class->constants()->length()) {
1594     return JVMTI_ERROR_INTERNAL;
1595   }
1596 
1597   // Update the version number of the constant pools (may keep scratch_cp)
1598   merge_cp->increment_and_save_version(old_cp->version());
1599   scratch_cp->increment_and_save_version(old_cp->version());
1600 
1601   ResourceMark rm(THREAD);
1602   _index_map_count = 0;
1603   _index_map_p = new intArray(scratch_cp->length(), scratch_cp->length(), -1);
1604 
1605   _operands_cur_length = ConstantPool::operand_array_length(old_cp->operands());
1606   _operands_index_map_count = 0;
1607   int operands_index_map_len = ConstantPool::operand_array_length(scratch_cp->operands());
1608   _operands_index_map_p = new intArray(operands_index_map_len, operands_index_map_len, -1);
1609 
1610   // reference to the cp holder is needed for copy_operands()
1611   merge_cp->set_pool_holder(scratch_class);
1612   bool result = merge_constant_pools(old_cp, scratch_cp, &merge_cp,
1613                   &merge_cp_length, THREAD);
1614   merge_cp->set_pool_holder(NULL);
1615 
1616   if (!result) {
1617     // The merge can fail due to memory allocation failure or due
1618     // to robustness checks.
1619     return JVMTI_ERROR_INTERNAL;
1620   }
1621 
1622   log_info(redefine, class, constantpool)("merge_cp_len=%d, index_map_len=%d", merge_cp_length, _index_map_count);
1623 
1624   if (_index_map_count == 0) {
1625     // there is nothing to map between the new and merged constant pools
1626 
1627     if (old_cp->length() == scratch_cp->length()) {
1628       // The old and new constant pools are the same length and the
1629       // index map is empty. This means that the three constant pools
1630       // are equivalent (but not the same). Unfortunately, the new
1631       // constant pool has not gone through link resolution nor have
1632       // the new class bytecodes gone through constant pool cache
1633       // rewriting so we can't use the old constant pool with the new
1634       // class.
1635 
1636       // toss the merged constant pool at return
1637     } else if (old_cp->length() < scratch_cp->length()) {
1638       // The old constant pool has fewer entries than the new constant
1639       // pool and the index map is empty. This means the new constant
1640       // pool is a superset of the old constant pool. However, the old
1641       // class bytecodes have already gone through constant pool cache
1642       // rewriting so we can't use the new constant pool with the old
1643       // class.
1644 
1645       // toss the merged constant pool at return
1646     } else {
1647       // The old constant pool has more entries than the new constant
1648       // pool and the index map is empty. This means that both the old
1649       // and merged constant pools are supersets of the new constant
1650       // pool.
1651 
1652       // Replace the new constant pool with a shrunken copy of the
1653       // merged constant pool
1654       set_new_constant_pool(loader_data, scratch_class, merge_cp, merge_cp_length,
1655                             CHECK_(JVMTI_ERROR_OUT_OF_MEMORY));
1656       // The new constant pool replaces scratch_cp so have cleaner clean it up.
1657       // It can't be cleaned up while there are handles to it.
1658       cp_cleaner.add_scratch_cp(scratch_cp());
1659     }
1660   } else {
1661     if (log_is_enabled(Trace, redefine, class, constantpool)) {
1662       // don't want to loop unless we are tracing
1663       int count = 0;
1664       for (int i = 1; i < _index_map_p->length(); i++) {
1665         int value = _index_map_p->at(i);
1666 
1667         if (value != -1) {
1668           log_trace(redefine, class, constantpool)("index_map[%d]: old=%d new=%d", count, i, value);
1669           count++;
1670         }
1671       }
1672     }
1673 
1674     // We have entries mapped between the new and merged constant pools
1675     // so we have to rewrite some constant pool references.
1676     if (!rewrite_cp_refs(scratch_class, THREAD)) {
1677       return JVMTI_ERROR_INTERNAL;
1678     }
1679 
1680     // Replace the new constant pool with a shrunken copy of the
1681     // merged constant pool so now the rewritten bytecodes have
1682     // valid references; the previous new constant pool will get
1683     // GCed.
1684     set_new_constant_pool(loader_data, scratch_class, merge_cp, merge_cp_length,
1685                           CHECK_(JVMTI_ERROR_OUT_OF_MEMORY));
1686     // The new constant pool replaces scratch_cp so have cleaner clean it up.
1687     // It can't be cleaned up while there are handles to it.
1688     cp_cleaner.add_scratch_cp(scratch_cp());
1689   }
1690 
1691   return JVMTI_ERROR_NONE;
1692 } // end merge_cp_and_rewrite()
1693 
1694 
1695 // Rewrite constant pool references in klass scratch_class.
1696 bool VM_RedefineClasses::rewrite_cp_refs(InstanceKlass* scratch_class,
1697        TRAPS) {
1698 
1699   // rewrite constant pool references in the nest attributes:
1700   if (!rewrite_cp_refs_in_nest_attributes(scratch_class)) {
1701     // propagate failure back to caller
1702     return false;
1703   }
1704 
1705   // rewrite constant pool references in the methods:
1706   if (!rewrite_cp_refs_in_methods(scratch_class, THREAD)) {
1707     // propagate failure back to caller
1708     return false;
1709   }
1710 
1711   // rewrite constant pool references in the class_annotations:
1712   if (!rewrite_cp_refs_in_class_annotations(scratch_class, THREAD)) {
1713     // propagate failure back to caller
1714     return false;
1715   }
1716 
1717   // rewrite constant pool references in the fields_annotations:
1718   if (!rewrite_cp_refs_in_fields_annotations(scratch_class, THREAD)) {
1719     // propagate failure back to caller
1720     return false;
1721   }
1722 
1723   // rewrite constant pool references in the methods_annotations:
1724   if (!rewrite_cp_refs_in_methods_annotations(scratch_class, THREAD)) {
1725     // propagate failure back to caller
1726     return false;
1727   }
1728 
1729   // rewrite constant pool references in the methods_parameter_annotations:
1730   if (!rewrite_cp_refs_in_methods_parameter_annotations(scratch_class,
1731          THREAD)) {
1732     // propagate failure back to caller
1733     return false;
1734   }
1735 
1736   // rewrite constant pool references in the methods_default_annotations:
1737   if (!rewrite_cp_refs_in_methods_default_annotations(scratch_class,
1738          THREAD)) {
1739     // propagate failure back to caller
1740     return false;
1741   }
1742 
1743   // rewrite constant pool references in the class_type_annotations:
1744   if (!rewrite_cp_refs_in_class_type_annotations(scratch_class, THREAD)) {
1745     // propagate failure back to caller
1746     return false;
1747   }
1748 
1749   // rewrite constant pool references in the fields_type_annotations:
1750   if (!rewrite_cp_refs_in_fields_type_annotations(scratch_class, THREAD)) {
1751     // propagate failure back to caller
1752     return false;
1753   }
1754 
1755   // rewrite constant pool references in the methods_type_annotations:
1756   if (!rewrite_cp_refs_in_methods_type_annotations(scratch_class, THREAD)) {
1757     // propagate failure back to caller
1758     return false;
1759   }
1760 
1761   // There can be type annotations in the Code part of a method_info attribute.
1762   // These annotations are not accessible, even by reflection.
1763   // Currently they are not even parsed by the ClassFileParser.
1764   // If runtime access is added they will also need to be rewritten.
1765 
1766   // rewrite source file name index:
1767   u2 source_file_name_idx = scratch_class->source_file_name_index();
1768   if (source_file_name_idx != 0) {
1769     u2 new_source_file_name_idx = find_new_index(source_file_name_idx);
1770     if (new_source_file_name_idx != 0) {
1771       scratch_class->set_source_file_name_index(new_source_file_name_idx);
1772     }
1773   }
1774 
1775   // rewrite class generic signature index:
1776   u2 generic_signature_index = scratch_class->generic_signature_index();
1777   if (generic_signature_index != 0) {
1778     u2 new_generic_signature_index = find_new_index(generic_signature_index);
1779     if (new_generic_signature_index != 0) {
1780       scratch_class->set_generic_signature_index(new_generic_signature_index);
1781     }
1782   }
1783 
1784   return true;
1785 } // end rewrite_cp_refs()
1786 
1787 // Rewrite constant pool references in the NestHost and NestMembers attributes.
1788 bool VM_RedefineClasses::rewrite_cp_refs_in_nest_attributes(
1789        InstanceKlass* scratch_class) {
1790 
1791   u2 cp_index = scratch_class->nest_host_index();
1792   if (cp_index != 0) {
1793     scratch_class->set_nest_host_index(find_new_index(cp_index));
1794   }
1795   Array<u2>* nest_members = scratch_class->nest_members();
1796   for (int i = 0; i < nest_members->length(); i++) {
1797     u2 cp_index = nest_members->at(i);
1798     nest_members->at_put(i, find_new_index(cp_index));
1799   }
1800   return true;
1801 }
1802 
1803 // Rewrite constant pool references in the methods.
1804 bool VM_RedefineClasses::rewrite_cp_refs_in_methods(
1805        InstanceKlass* scratch_class, TRAPS) {
1806 
1807   Array<Method*>* methods = scratch_class->methods();
1808 
1809   if (methods == NULL || methods->length() == 0) {
1810     // no methods so nothing to do
1811     return true;
1812   }
1813 
1814   // rewrite constant pool references in the methods:
1815   for (int i = methods->length() - 1; i >= 0; i--) {
1816     methodHandle method(THREAD, methods->at(i));
1817     methodHandle new_method;
1818     rewrite_cp_refs_in_method(method, &new_method, THREAD);
1819     if (!new_method.is_null()) {
1820       // the method has been replaced so save the new method version
1821       // even in the case of an exception.  original method is on the
1822       // deallocation list.
1823       methods->at_put(i, new_method());
1824     }
1825     if (HAS_PENDING_EXCEPTION) {
1826       Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1827       log_info(redefine, class, load, exceptions)("rewrite_cp_refs_in_method exception: '%s'", ex_name->as_C_string());
1828       // Need to clear pending exception here as the super caller sets
1829       // the JVMTI_ERROR_INTERNAL if the returned value is false.
1830       CLEAR_PENDING_EXCEPTION;
1831       return false;
1832     }
1833   }
1834 
1835   return true;
1836 }
1837 
1838 
1839 // Rewrite constant pool references in the specific method. This code
1840 // was adapted from Rewriter::rewrite_method().
1841 void VM_RedefineClasses::rewrite_cp_refs_in_method(methodHandle method,
1842        methodHandle *new_method_p, TRAPS) {
1843 
1844   *new_method_p = methodHandle();  // default is no new method
1845 
1846   // We cache a pointer to the bytecodes here in code_base. If GC
1847   // moves the Method*, then the bytecodes will also move which
1848   // will likely cause a crash. We create a NoSafepointVerifier
1849   // object to detect whether we pass a possible safepoint in this
1850   // code block.
1851   NoSafepointVerifier nsv;
1852 
1853   // Bytecodes and their length
1854   address code_base = method->code_base();
1855   int code_length = method->code_size();
1856 
1857   int bc_length;
1858   for (int bci = 0; bci < code_length; bci += bc_length) {
1859     address bcp = code_base + bci;
1860     Bytecodes::Code c = (Bytecodes::Code)(*bcp);
1861 
1862     bc_length = Bytecodes::length_for(c);
1863     if (bc_length == 0) {
1864       // More complicated bytecodes report a length of zero so
1865       // we have to try again a slightly different way.
1866       bc_length = Bytecodes::length_at(method(), bcp);
1867     }
1868 
1869     assert(bc_length != 0, "impossible bytecode length");
1870 
1871     switch (c) {
1872       case Bytecodes::_ldc:
1873       {
1874         int cp_index = *(bcp + 1);
1875         int new_index = find_new_index(cp_index);
1876 
1877         if (StressLdcRewrite && new_index == 0) {
1878           // If we are stressing ldc -> ldc_w rewriting, then we
1879           // always need a new_index value.
1880           new_index = cp_index;
1881         }
1882         if (new_index != 0) {
1883           // the original index is mapped so we have more work to do
1884           if (!StressLdcRewrite && new_index <= max_jubyte) {
1885             // The new value can still use ldc instead of ldc_w
1886             // unless we are trying to stress ldc -> ldc_w rewriting
1887             log_trace(redefine, class, constantpool)
1888               ("%s@" INTPTR_FORMAT " old=%d, new=%d", Bytecodes::name(c), p2i(bcp), cp_index, new_index);
1889             *(bcp + 1) = new_index;
1890           } else {
1891             log_trace(redefine, class, constantpool)
1892               ("%s->ldc_w@" INTPTR_FORMAT " old=%d, new=%d", Bytecodes::name(c), p2i(bcp), cp_index, new_index);
1893             // the new value needs ldc_w instead of ldc
1894             u_char inst_buffer[4]; // max instruction size is 4 bytes
1895             bcp = (address)inst_buffer;
1896             // construct new instruction sequence
1897             *bcp = Bytecodes::_ldc_w;
1898             bcp++;
1899             // Rewriter::rewrite_method() does not rewrite ldc -> ldc_w.
1900             // See comment below for difference between put_Java_u2()
1901             // and put_native_u2().
1902             Bytes::put_Java_u2(bcp, new_index);
1903 
1904             Relocator rc(method, NULL /* no RelocatorListener needed */);
1905             methodHandle m;
1906             {
1907               PauseNoSafepointVerifier pnsv(&nsv);
1908 
1909               // ldc is 2 bytes and ldc_w is 3 bytes
1910               m = rc.insert_space_at(bci, 3, inst_buffer, CHECK);
1911             }
1912 
1913             // return the new method so that the caller can update
1914             // the containing class
1915             *new_method_p = method = m;
1916             // switch our bytecode processing loop from the old method
1917             // to the new method
1918             code_base = method->code_base();
1919             code_length = method->code_size();
1920             bcp = code_base + bci;
1921             c = (Bytecodes::Code)(*bcp);
1922             bc_length = Bytecodes::length_for(c);
1923             assert(bc_length != 0, "sanity check");
1924           } // end we need ldc_w instead of ldc
1925         } // end if there is a mapped index
1926       } break;
1927 
1928       // these bytecodes have a two-byte constant pool index
1929       case Bytecodes::_anewarray      : // fall through
1930       case Bytecodes::_checkcast      : // fall through
1931       case Bytecodes::_getfield       : // fall through
1932       case Bytecodes::_getstatic      : // fall through
1933       case Bytecodes::_instanceof     : // fall through
1934       case Bytecodes::_invokedynamic  : // fall through
1935       case Bytecodes::_invokeinterface: // fall through
1936       case Bytecodes::_invokespecial  : // fall through
1937       case Bytecodes::_invokestatic   : // fall through
1938       case Bytecodes::_invokevirtual  : // fall through
1939       case Bytecodes::_ldc_w          : // fall through
1940       case Bytecodes::_ldc2_w         : // fall through
1941       case Bytecodes::_multianewarray : // fall through
1942       case Bytecodes::_new            : // fall through
1943       case Bytecodes::_putfield       : // fall through
1944       case Bytecodes::_putstatic      :
1945       {
1946         address p = bcp + 1;
1947         int cp_index = Bytes::get_Java_u2(p);
1948         int new_index = find_new_index(cp_index);
1949         if (new_index != 0) {
1950           // the original index is mapped so update w/ new value
1951           log_trace(redefine, class, constantpool)
1952             ("%s@" INTPTR_FORMAT " old=%d, new=%d", Bytecodes::name(c),p2i(bcp), cp_index, new_index);
1953           // Rewriter::rewrite_method() uses put_native_u2() in this
1954           // situation because it is reusing the constant pool index
1955           // location for a native index into the ConstantPoolCache.
1956           // Since we are updating the constant pool index prior to
1957           // verification and ConstantPoolCache initialization, we
1958           // need to keep the new index in Java byte order.
1959           Bytes::put_Java_u2(p, new_index);
1960         }
1961       } break;
1962       default:
1963         break;
1964     }
1965   } // end for each bytecode
1966 
1967   // We also need to rewrite the parameter name indexes, if there is
1968   // method parameter data present
1969   if(method->has_method_parameters()) {
1970     const int len = method->method_parameters_length();
1971     MethodParametersElement* elem = method->method_parameters_start();
1972 
1973     for (int i = 0; i < len; i++) {
1974       const u2 cp_index = elem[i].name_cp_index;
1975       const u2 new_cp_index = find_new_index(cp_index);
1976       if (new_cp_index != 0) {
1977         elem[i].name_cp_index = new_cp_index;
1978       }
1979     }
1980   }
1981 } // end rewrite_cp_refs_in_method()
1982 
1983 
1984 // Rewrite constant pool references in the class_annotations field.
1985 bool VM_RedefineClasses::rewrite_cp_refs_in_class_annotations(
1986        InstanceKlass* scratch_class, TRAPS) {
1987 
1988   AnnotationArray* class_annotations = scratch_class->class_annotations();
1989   if (class_annotations == NULL || class_annotations->length() == 0) {
1990     // no class_annotations so nothing to do
1991     return true;
1992   }
1993 
1994   log_debug(redefine, class, annotation)("class_annotations length=%d", class_annotations->length());
1995 
1996   int byte_i = 0;  // byte index into class_annotations
1997   return rewrite_cp_refs_in_annotations_typeArray(class_annotations, byte_i,
1998            THREAD);
1999 }
2000 
2001 
2002 // Rewrite constant pool references in an annotations typeArray. This
2003 // "structure" is adapted from the RuntimeVisibleAnnotations_attribute
2004 // that is described in section 4.8.15 of the 2nd-edition of the VM spec:
2005 //
2006 // annotations_typeArray {
2007 //   u2 num_annotations;
2008 //   annotation annotations[num_annotations];
2009 // }
2010 //
2011 bool VM_RedefineClasses::rewrite_cp_refs_in_annotations_typeArray(
2012        AnnotationArray* annotations_typeArray, int &byte_i_ref, TRAPS) {
2013 
2014   if ((byte_i_ref + 2) > annotations_typeArray->length()) {
2015     // not enough room for num_annotations field
2016     log_debug(redefine, class, annotation)("length() is too small for num_annotations field");
2017     return false;
2018   }
2019 
2020   u2 num_annotations = Bytes::get_Java_u2((address)
2021                          annotations_typeArray->adr_at(byte_i_ref));
2022   byte_i_ref += 2;
2023 
2024   log_debug(redefine, class, annotation)("num_annotations=%d", num_annotations);
2025 
2026   int calc_num_annotations = 0;
2027   for (; calc_num_annotations < num_annotations; calc_num_annotations++) {
2028     if (!rewrite_cp_refs_in_annotation_struct(annotations_typeArray,
2029            byte_i_ref, THREAD)) {
2030       log_debug(redefine, class, annotation)("bad annotation_struct at %d", calc_num_annotations);
2031       // propagate failure back to caller
2032       return false;
2033     }
2034   }
2035   assert(num_annotations == calc_num_annotations, "sanity check");
2036 
2037   return true;
2038 } // end rewrite_cp_refs_in_annotations_typeArray()
2039 
2040 
2041 // Rewrite constant pool references in the annotation struct portion of
2042 // an annotations_typeArray. This "structure" is from section 4.8.15 of
2043 // the 2nd-edition of the VM spec:
2044 //
2045 // struct annotation {
2046 //   u2 type_index;
2047 //   u2 num_element_value_pairs;
2048 //   {
2049 //     u2 element_name_index;
2050 //     element_value value;
2051 //   } element_value_pairs[num_element_value_pairs];
2052 // }
2053 //
2054 bool VM_RedefineClasses::rewrite_cp_refs_in_annotation_struct(
2055        AnnotationArray* annotations_typeArray, int &byte_i_ref, TRAPS) {
2056   if ((byte_i_ref + 2 + 2) > annotations_typeArray->length()) {
2057     // not enough room for smallest annotation_struct
2058     log_debug(redefine, class, annotation)("length() is too small for annotation_struct");
2059     return false;
2060   }
2061 
2062   u2 type_index = rewrite_cp_ref_in_annotation_data(annotations_typeArray,
2063                     byte_i_ref, "type_index", THREAD);
2064 
2065   u2 num_element_value_pairs = Bytes::get_Java_u2((address)
2066                                  annotations_typeArray->adr_at(byte_i_ref));
2067   byte_i_ref += 2;
2068 
2069   log_debug(redefine, class, annotation)
2070     ("type_index=%d  num_element_value_pairs=%d", type_index, num_element_value_pairs);
2071 
2072   int calc_num_element_value_pairs = 0;
2073   for (; calc_num_element_value_pairs < num_element_value_pairs;
2074        calc_num_element_value_pairs++) {
2075     if ((byte_i_ref + 2) > annotations_typeArray->length()) {
2076       // not enough room for another element_name_index, let alone
2077       // the rest of another component
2078       log_debug(redefine, class, annotation)("length() is too small for element_name_index");
2079       return false;
2080     }
2081 
2082     u2 element_name_index = rewrite_cp_ref_in_annotation_data(
2083                               annotations_typeArray, byte_i_ref,
2084                               "element_name_index", THREAD);
2085 
2086     log_debug(redefine, class, annotation)("element_name_index=%d", element_name_index);
2087 
2088     if (!rewrite_cp_refs_in_element_value(annotations_typeArray,
2089            byte_i_ref, THREAD)) {
2090       log_debug(redefine, class, annotation)("bad element_value at %d", calc_num_element_value_pairs);
2091       // propagate failure back to caller
2092       return false;
2093     }
2094   } // end for each component
2095   assert(num_element_value_pairs == calc_num_element_value_pairs,
2096     "sanity check");
2097 
2098   return true;
2099 } // end rewrite_cp_refs_in_annotation_struct()
2100 
2101 
2102 // Rewrite a constant pool reference at the current position in
2103 // annotations_typeArray if needed. Returns the original constant
2104 // pool reference if a rewrite was not needed or the new constant
2105 // pool reference if a rewrite was needed.
2106 u2 VM_RedefineClasses::rewrite_cp_ref_in_annotation_data(
2107      AnnotationArray* annotations_typeArray, int &byte_i_ref,
2108      const char * trace_mesg, TRAPS) {
2109 
2110   address cp_index_addr = (address)
2111     annotations_typeArray->adr_at(byte_i_ref);
2112   u2 old_cp_index = Bytes::get_Java_u2(cp_index_addr);
2113   u2 new_cp_index = find_new_index(old_cp_index);
2114   if (new_cp_index != 0) {
2115     log_debug(redefine, class, annotation)("mapped old %s=%d", trace_mesg, old_cp_index);
2116     Bytes::put_Java_u2(cp_index_addr, new_cp_index);
2117     old_cp_index = new_cp_index;
2118   }
2119   byte_i_ref += 2;
2120   return old_cp_index;
2121 }
2122 
2123 
2124 // Rewrite constant pool references in the element_value portion of an
2125 // annotations_typeArray. This "structure" is from section 4.8.15.1 of
2126 // the 2nd-edition of the VM spec:
2127 //
2128 // struct element_value {
2129 //   u1 tag;
2130 //   union {
2131 //     u2 const_value_index;
2132 //     {
2133 //       u2 type_name_index;
2134 //       u2 const_name_index;
2135 //     } enum_const_value;
2136 //     u2 class_info_index;
2137 //     annotation annotation_value;
2138 //     struct {
2139 //       u2 num_values;
2140 //       element_value values[num_values];
2141 //     } array_value;
2142 //   } value;
2143 // }
2144 //
2145 bool VM_RedefineClasses::rewrite_cp_refs_in_element_value(
2146        AnnotationArray* annotations_typeArray, int &byte_i_ref, TRAPS) {
2147 
2148   if ((byte_i_ref + 1) > annotations_typeArray->length()) {
2149     // not enough room for a tag let alone the rest of an element_value
2150     log_debug(redefine, class, annotation)("length() is too small for a tag");
2151     return false;
2152   }
2153 
2154   u1 tag = annotations_typeArray->at(byte_i_ref);
2155   byte_i_ref++;
2156   log_debug(redefine, class, annotation)("tag='%c'", tag);
2157 
2158   switch (tag) {
2159     // These BaseType tag values are from Table 4.2 in VM spec:
2160     case 'B':  // byte
2161     case 'C':  // char
2162     case 'D':  // double
2163     case 'F':  // float
2164     case 'I':  // int
2165     case 'J':  // long
2166     case 'S':  // short
2167     case 'Z':  // boolean
2168 
2169     // The remaining tag values are from Table 4.8 in the 2nd-edition of
2170     // the VM spec:
2171     case 's':
2172     {
2173       // For the above tag values (including the BaseType values),
2174       // value.const_value_index is right union field.
2175 
2176       if ((byte_i_ref + 2) > annotations_typeArray->length()) {
2177         // not enough room for a const_value_index
2178         log_debug(redefine, class, annotation)("length() is too small for a const_value_index");
2179         return false;
2180       }
2181 
2182       u2 const_value_index = rewrite_cp_ref_in_annotation_data(
2183                                annotations_typeArray, byte_i_ref,
2184                                "const_value_index", THREAD);
2185 
2186       log_debug(redefine, class, annotation)("const_value_index=%d", const_value_index);
2187     } break;
2188 
2189     case 'e':
2190     {
2191       // for the above tag value, value.enum_const_value is right union field
2192 
2193       if ((byte_i_ref + 4) > annotations_typeArray->length()) {
2194         // not enough room for a enum_const_value
2195         log_debug(redefine, class, annotation)("length() is too small for a enum_const_value");
2196         return false;
2197       }
2198 
2199       u2 type_name_index = rewrite_cp_ref_in_annotation_data(
2200                              annotations_typeArray, byte_i_ref,
2201                              "type_name_index", THREAD);
2202 
2203       u2 const_name_index = rewrite_cp_ref_in_annotation_data(
2204                               annotations_typeArray, byte_i_ref,
2205                               "const_name_index", THREAD);
2206 
2207       log_debug(redefine, class, annotation)
2208         ("type_name_index=%d  const_name_index=%d", type_name_index, const_name_index);
2209     } break;
2210 
2211     case 'c':
2212     {
2213       // for the above tag value, value.class_info_index is right union field
2214 
2215       if ((byte_i_ref + 2) > annotations_typeArray->length()) {
2216         // not enough room for a class_info_index
2217         log_debug(redefine, class, annotation)("length() is too small for a class_info_index");
2218         return false;
2219       }
2220 
2221       u2 class_info_index = rewrite_cp_ref_in_annotation_data(
2222                               annotations_typeArray, byte_i_ref,
2223                               "class_info_index", THREAD);
2224 
2225       log_debug(redefine, class, annotation)("class_info_index=%d", class_info_index);
2226     } break;
2227 
2228     case '@':
2229       // For the above tag value, value.attr_value is the right union
2230       // field. This is a nested annotation.
2231       if (!rewrite_cp_refs_in_annotation_struct(annotations_typeArray,
2232              byte_i_ref, THREAD)) {
2233         // propagate failure back to caller
2234         return false;
2235       }
2236       break;
2237 
2238     case '[':
2239     {
2240       if ((byte_i_ref + 2) > annotations_typeArray->length()) {
2241         // not enough room for a num_values field
2242         log_debug(redefine, class, annotation)("length() is too small for a num_values field");
2243         return false;
2244       }
2245 
2246       // For the above tag value, value.array_value is the right union
2247       // field. This is an array of nested element_value.
2248       u2 num_values = Bytes::get_Java_u2((address)
2249                         annotations_typeArray->adr_at(byte_i_ref));
2250       byte_i_ref += 2;
2251       log_debug(redefine, class, annotation)("num_values=%d", num_values);
2252 
2253       int calc_num_values = 0;
2254       for (; calc_num_values < num_values; calc_num_values++) {
2255         if (!rewrite_cp_refs_in_element_value(
2256                annotations_typeArray, byte_i_ref, THREAD)) {
2257           log_debug(redefine, class, annotation)("bad nested element_value at %d", calc_num_values);
2258           // propagate failure back to caller
2259           return false;
2260         }
2261       }
2262       assert(num_values == calc_num_values, "sanity check");
2263     } break;
2264 
2265     default:
2266       log_debug(redefine, class, annotation)("bad tag=0x%x", tag);
2267       return false;
2268   } // end decode tag field
2269 
2270   return true;
2271 } // end rewrite_cp_refs_in_element_value()
2272 
2273 
2274 // Rewrite constant pool references in a fields_annotations field.
2275 bool VM_RedefineClasses::rewrite_cp_refs_in_fields_annotations(
2276        InstanceKlass* scratch_class, TRAPS) {
2277 
2278   Array<AnnotationArray*>* fields_annotations = scratch_class->fields_annotations();
2279 
2280   if (fields_annotations == NULL || fields_annotations->length() == 0) {
2281     // no fields_annotations so nothing to do
2282     return true;
2283   }
2284 
2285   log_debug(redefine, class, annotation)("fields_annotations length=%d", fields_annotations->length());
2286 
2287   for (int i = 0; i < fields_annotations->length(); i++) {
2288     AnnotationArray* field_annotations = fields_annotations->at(i);
2289     if (field_annotations == NULL || field_annotations->length() == 0) {
2290       // this field does not have any annotations so skip it
2291       continue;
2292     }
2293 
2294     int byte_i = 0;  // byte index into field_annotations
2295     if (!rewrite_cp_refs_in_annotations_typeArray(field_annotations, byte_i,
2296            THREAD)) {
2297       log_debug(redefine, class, annotation)("bad field_annotations at %d", i);
2298       // propagate failure back to caller
2299       return false;
2300     }
2301   }
2302 
2303   return true;
2304 } // end rewrite_cp_refs_in_fields_annotations()
2305 
2306 
2307 // Rewrite constant pool references in a methods_annotations field.
2308 bool VM_RedefineClasses::rewrite_cp_refs_in_methods_annotations(
2309        InstanceKlass* scratch_class, TRAPS) {
2310 
2311   for (int i = 0; i < scratch_class->methods()->length(); i++) {
2312     Method* m = scratch_class->methods()->at(i);
2313     AnnotationArray* method_annotations = m->constMethod()->method_annotations();
2314 
2315     if (method_annotations == NULL || method_annotations->length() == 0) {
2316       // this method does not have any annotations so skip it
2317       continue;
2318     }
2319 
2320     int byte_i = 0;  // byte index into method_annotations
2321     if (!rewrite_cp_refs_in_annotations_typeArray(method_annotations, byte_i,
2322            THREAD)) {
2323       log_debug(redefine, class, annotation)("bad method_annotations at %d", i);
2324       // propagate failure back to caller
2325       return false;
2326     }
2327   }
2328 
2329   return true;
2330 } // end rewrite_cp_refs_in_methods_annotations()
2331 
2332 
2333 // Rewrite constant pool references in a methods_parameter_annotations
2334 // field. This "structure" is adapted from the
2335 // RuntimeVisibleParameterAnnotations_attribute described in section
2336 // 4.8.17 of the 2nd-edition of the VM spec:
2337 //
2338 // methods_parameter_annotations_typeArray {
2339 //   u1 num_parameters;
2340 //   {
2341 //     u2 num_annotations;
2342 //     annotation annotations[num_annotations];
2343 //   } parameter_annotations[num_parameters];
2344 // }
2345 //
2346 bool VM_RedefineClasses::rewrite_cp_refs_in_methods_parameter_annotations(
2347        InstanceKlass* scratch_class, TRAPS) {
2348 
2349   for (int i = 0; i < scratch_class->methods()->length(); i++) {
2350     Method* m = scratch_class->methods()->at(i);
2351     AnnotationArray* method_parameter_annotations = m->constMethod()->parameter_annotations();
2352     if (method_parameter_annotations == NULL
2353         || method_parameter_annotations->length() == 0) {
2354       // this method does not have any parameter annotations so skip it
2355       continue;
2356     }
2357 
2358     if (method_parameter_annotations->length() < 1) {
2359       // not enough room for a num_parameters field
2360       log_debug(redefine, class, annotation)("length() is too small for a num_parameters field at %d", i);
2361       return false;
2362     }
2363 
2364     int byte_i = 0;  // byte index into method_parameter_annotations
2365 
2366     u1 num_parameters = method_parameter_annotations->at(byte_i);
2367     byte_i++;
2368 
2369     log_debug(redefine, class, annotation)("num_parameters=%d", num_parameters);
2370 
2371     int calc_num_parameters = 0;
2372     for (; calc_num_parameters < num_parameters; calc_num_parameters++) {
2373       if (!rewrite_cp_refs_in_annotations_typeArray(
2374              method_parameter_annotations, byte_i, THREAD)) {
2375         log_debug(redefine, class, annotation)("bad method_parameter_annotations at %d", calc_num_parameters);
2376         // propagate failure back to caller
2377         return false;
2378       }
2379     }
2380     assert(num_parameters == calc_num_parameters, "sanity check");
2381   }
2382 
2383   return true;
2384 } // end rewrite_cp_refs_in_methods_parameter_annotations()
2385 
2386 
2387 // Rewrite constant pool references in a methods_default_annotations
2388 // field. This "structure" is adapted from the AnnotationDefault_attribute
2389 // that is described in section 4.8.19 of the 2nd-edition of the VM spec:
2390 //
2391 // methods_default_annotations_typeArray {
2392 //   element_value default_value;
2393 // }
2394 //
2395 bool VM_RedefineClasses::rewrite_cp_refs_in_methods_default_annotations(
2396        InstanceKlass* scratch_class, TRAPS) {
2397 
2398   for (int i = 0; i < scratch_class->methods()->length(); i++) {
2399     Method* m = scratch_class->methods()->at(i);
2400     AnnotationArray* method_default_annotations = m->constMethod()->default_annotations();
2401     if (method_default_annotations == NULL
2402         || method_default_annotations->length() == 0) {
2403       // this method does not have any default annotations so skip it
2404       continue;
2405     }
2406 
2407     int byte_i = 0;  // byte index into method_default_annotations
2408 
2409     if (!rewrite_cp_refs_in_element_value(
2410            method_default_annotations, byte_i, THREAD)) {
2411       log_debug(redefine, class, annotation)("bad default element_value at %d", i);
2412       // propagate failure back to caller
2413       return false;
2414     }
2415   }
2416 
2417   return true;
2418 } // end rewrite_cp_refs_in_methods_default_annotations()
2419 
2420 
2421 // Rewrite constant pool references in a class_type_annotations field.
2422 bool VM_RedefineClasses::rewrite_cp_refs_in_class_type_annotations(
2423        InstanceKlass* scratch_class, TRAPS) {
2424 
2425   AnnotationArray* class_type_annotations = scratch_class->class_type_annotations();
2426   if (class_type_annotations == NULL || class_type_annotations->length() == 0) {
2427     // no class_type_annotations so nothing to do
2428     return true;
2429   }
2430 
2431   log_debug(redefine, class, annotation)("class_type_annotations length=%d", class_type_annotations->length());
2432 
2433   int byte_i = 0;  // byte index into class_type_annotations
2434   return rewrite_cp_refs_in_type_annotations_typeArray(class_type_annotations,
2435       byte_i, "ClassFile", THREAD);
2436 } // end rewrite_cp_refs_in_class_type_annotations()
2437 
2438 
2439 // Rewrite constant pool references in a fields_type_annotations field.
2440 bool VM_RedefineClasses::rewrite_cp_refs_in_fields_type_annotations(
2441        InstanceKlass* scratch_class, TRAPS) {
2442 
2443   Array<AnnotationArray*>* fields_type_annotations = scratch_class->fields_type_annotations();
2444   if (fields_type_annotations == NULL || fields_type_annotations->length() == 0) {
2445     // no fields_type_annotations so nothing to do
2446     return true;
2447   }
2448 
2449   log_debug(redefine, class, annotation)("fields_type_annotations length=%d", fields_type_annotations->length());
2450 
2451   for (int i = 0; i < fields_type_annotations->length(); i++) {
2452     AnnotationArray* field_type_annotations = fields_type_annotations->at(i);
2453     if (field_type_annotations == NULL || field_type_annotations->length() == 0) {
2454       // this field does not have any annotations so skip it
2455       continue;
2456     }
2457 
2458     int byte_i = 0;  // byte index into field_type_annotations
2459     if (!rewrite_cp_refs_in_type_annotations_typeArray(field_type_annotations,
2460            byte_i, "field_info", THREAD)) {
2461       log_debug(redefine, class, annotation)("bad field_type_annotations at %d", i);
2462       // propagate failure back to caller
2463       return false;
2464     }
2465   }
2466 
2467   return true;
2468 } // end rewrite_cp_refs_in_fields_type_annotations()
2469 
2470 
2471 // Rewrite constant pool references in a methods_type_annotations field.
2472 bool VM_RedefineClasses::rewrite_cp_refs_in_methods_type_annotations(
2473        InstanceKlass* scratch_class, TRAPS) {
2474 
2475   for (int i = 0; i < scratch_class->methods()->length(); i++) {
2476     Method* m = scratch_class->methods()->at(i);
2477     AnnotationArray* method_type_annotations = m->constMethod()->type_annotations();
2478 
2479     if (method_type_annotations == NULL || method_type_annotations->length() == 0) {
2480       // this method does not have any annotations so skip it
2481       continue;
2482     }
2483 
2484     log_debug(redefine, class, annotation)("methods type_annotations length=%d", method_type_annotations->length());
2485 
2486     int byte_i = 0;  // byte index into method_type_annotations
2487     if (!rewrite_cp_refs_in_type_annotations_typeArray(method_type_annotations,
2488            byte_i, "method_info", THREAD)) {
2489       log_debug(redefine, class, annotation)("bad method_type_annotations at %d", i);
2490       // propagate failure back to caller
2491       return false;
2492     }
2493   }
2494 
2495   return true;
2496 } // end rewrite_cp_refs_in_methods_type_annotations()
2497 
2498 
2499 // Rewrite constant pool references in a type_annotations
2500 // field. This "structure" is adapted from the
2501 // RuntimeVisibleTypeAnnotations_attribute described in
2502 // section 4.7.20 of the Java SE 8 Edition of the VM spec:
2503 //
2504 // type_annotations_typeArray {
2505 //   u2              num_annotations;
2506 //   type_annotation annotations[num_annotations];
2507 // }
2508 //
2509 bool VM_RedefineClasses::rewrite_cp_refs_in_type_annotations_typeArray(
2510        AnnotationArray* type_annotations_typeArray, int &byte_i_ref,
2511        const char * location_mesg, TRAPS) {
2512 
2513   if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
2514     // not enough room for num_annotations field
2515     log_debug(redefine, class, annotation)("length() is too small for num_annotations field");
2516     return false;
2517   }
2518 
2519   u2 num_annotations = Bytes::get_Java_u2((address)
2520                          type_annotations_typeArray->adr_at(byte_i_ref));
2521   byte_i_ref += 2;
2522 
2523   log_debug(redefine, class, annotation)("num_type_annotations=%d", num_annotations);
2524 
2525   int calc_num_annotations = 0;
2526   for (; calc_num_annotations < num_annotations; calc_num_annotations++) {
2527     if (!rewrite_cp_refs_in_type_annotation_struct(type_annotations_typeArray,
2528            byte_i_ref, location_mesg, THREAD)) {
2529       log_debug(redefine, class, annotation)("bad type_annotation_struct at %d", calc_num_annotations);
2530       // propagate failure back to caller
2531       return false;
2532     }
2533   }
2534   assert(num_annotations == calc_num_annotations, "sanity check");
2535 
2536   if (byte_i_ref != type_annotations_typeArray->length()) {
2537     log_debug(redefine, class, annotation)
2538       ("read wrong amount of bytes at end of processing type_annotations_typeArray (%d of %d bytes were read)",
2539        byte_i_ref, type_annotations_typeArray->length());
2540     return false;
2541   }
2542 
2543   return true;
2544 } // end rewrite_cp_refs_in_type_annotations_typeArray()
2545 
2546 
2547 // Rewrite constant pool references in a type_annotation
2548 // field. This "structure" is adapted from the
2549 // RuntimeVisibleTypeAnnotations_attribute described in
2550 // section 4.7.20 of the Java SE 8 Edition of the VM spec:
2551 //
2552 // type_annotation {
2553 //   u1 target_type;
2554 //   union {
2555 //     type_parameter_target;
2556 //     supertype_target;
2557 //     type_parameter_bound_target;
2558 //     empty_target;
2559 //     method_formal_parameter_target;
2560 //     throws_target;
2561 //     localvar_target;
2562 //     catch_target;
2563 //     offset_target;
2564 //     type_argument_target;
2565 //   } target_info;
2566 //   type_path target_path;
2567 //   annotation anno;
2568 // }
2569 //
2570 bool VM_RedefineClasses::rewrite_cp_refs_in_type_annotation_struct(
2571        AnnotationArray* type_annotations_typeArray, int &byte_i_ref,
2572        const char * location_mesg, TRAPS) {
2573 
2574   if (!skip_type_annotation_target(type_annotations_typeArray,
2575          byte_i_ref, location_mesg, THREAD)) {
2576     return false;
2577   }
2578 
2579   if (!skip_type_annotation_type_path(type_annotations_typeArray,
2580          byte_i_ref, THREAD)) {
2581     return false;
2582   }
2583 
2584   if (!rewrite_cp_refs_in_annotation_struct(type_annotations_typeArray,
2585          byte_i_ref, THREAD)) {
2586     return false;
2587   }
2588 
2589   return true;
2590 } // end rewrite_cp_refs_in_type_annotation_struct()
2591 
2592 
2593 // Read, verify and skip over the target_type and target_info part
2594 // so that rewriting can continue in the later parts of the struct.
2595 //
2596 // u1 target_type;
2597 // union {
2598 //   type_parameter_target;
2599 //   supertype_target;
2600 //   type_parameter_bound_target;
2601 //   empty_target;
2602 //   method_formal_parameter_target;
2603 //   throws_target;
2604 //   localvar_target;
2605 //   catch_target;
2606 //   offset_target;
2607 //   type_argument_target;
2608 // } target_info;
2609 //
2610 bool VM_RedefineClasses::skip_type_annotation_target(
2611        AnnotationArray* type_annotations_typeArray, int &byte_i_ref,
2612        const char * location_mesg, TRAPS) {
2613 
2614   if ((byte_i_ref + 1) > type_annotations_typeArray->length()) {
2615     // not enough room for a target_type let alone the rest of a type_annotation
2616     log_debug(redefine, class, annotation)("length() is too small for a target_type");
2617     return false;
2618   }
2619 
2620   u1 target_type = type_annotations_typeArray->at(byte_i_ref);
2621   byte_i_ref += 1;
2622   log_debug(redefine, class, annotation)("target_type=0x%.2x", target_type);
2623   log_debug(redefine, class, annotation)("location=%s", location_mesg);
2624 
2625   // Skip over target_info
2626   switch (target_type) {
2627     case 0x00:
2628     // kind: type parameter declaration of generic class or interface
2629     // location: ClassFile
2630     case 0x01:
2631     // kind: type parameter declaration of generic method or constructor
2632     // location: method_info
2633 
2634     {
2635       // struct:
2636       // type_parameter_target {
2637       //   u1 type_parameter_index;
2638       // }
2639       //
2640       if ((byte_i_ref + 1) > type_annotations_typeArray->length()) {
2641         log_debug(redefine, class, annotation)("length() is too small for a type_parameter_target");
2642         return false;
2643       }
2644 
2645       u1 type_parameter_index = type_annotations_typeArray->at(byte_i_ref);
2646       byte_i_ref += 1;
2647 
2648       log_debug(redefine, class, annotation)("type_parameter_target: type_parameter_index=%d", type_parameter_index);
2649     } break;
2650 
2651     case 0x10:
2652     // kind: type in extends clause of class or interface declaration
2653     //       (including the direct superclass of an anonymous class declaration),
2654     //       or in implements clause of interface declaration
2655     // location: ClassFile
2656 
2657     {
2658       // struct:
2659       // supertype_target {
2660       //   u2 supertype_index;
2661       // }
2662       //
2663       if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
2664         log_debug(redefine, class, annotation)("length() is too small for a supertype_target");
2665         return false;
2666       }
2667 
2668       u2 supertype_index = Bytes::get_Java_u2((address)
2669                              type_annotations_typeArray->adr_at(byte_i_ref));
2670       byte_i_ref += 2;
2671 
2672       log_debug(redefine, class, annotation)("supertype_target: supertype_index=%d", supertype_index);
2673     } break;
2674 
2675     case 0x11:
2676     // kind: type in bound of type parameter declaration of generic class or interface
2677     // location: ClassFile
2678     case 0x12:
2679     // kind: type in bound of type parameter declaration of generic method or constructor
2680     // location: method_info
2681 
2682     {
2683       // struct:
2684       // type_parameter_bound_target {
2685       //   u1 type_parameter_index;
2686       //   u1 bound_index;
2687       // }
2688       //
2689       if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
2690         log_debug(redefine, class, annotation)("length() is too small for a type_parameter_bound_target");
2691         return false;
2692       }
2693 
2694       u1 type_parameter_index = type_annotations_typeArray->at(byte_i_ref);
2695       byte_i_ref += 1;
2696       u1 bound_index = type_annotations_typeArray->at(byte_i_ref);
2697       byte_i_ref += 1;
2698 
2699       log_debug(redefine, class, annotation)
2700         ("type_parameter_bound_target: type_parameter_index=%d, bound_index=%d", type_parameter_index, bound_index);
2701     } break;
2702 
2703     case 0x13:
2704     // kind: type in field declaration
2705     // location: field_info
2706     case 0x14:
2707     // kind: return type of method, or type of newly constructed object
2708     // location: method_info
2709     case 0x15:
2710     // kind: receiver type of method or constructor
2711     // location: method_info
2712 
2713     {
2714       // struct:
2715       // empty_target {
2716       // }
2717       //
2718       log_debug(redefine, class, annotation)("empty_target");
2719     } break;
2720 
2721     case 0x16:
2722     // kind: type in formal parameter declaration of method, constructor, or lambda expression
2723     // location: method_info
2724 
2725     {
2726       // struct:
2727       // formal_parameter_target {
2728       //   u1 formal_parameter_index;
2729       // }
2730       //
2731       if ((byte_i_ref + 1) > type_annotations_typeArray->length()) {
2732         log_debug(redefine, class, annotation)("length() is too small for a formal_parameter_target");
2733         return false;
2734       }
2735 
2736       u1 formal_parameter_index = type_annotations_typeArray->at(byte_i_ref);
2737       byte_i_ref += 1;
2738 
2739       log_debug(redefine, class, annotation)
2740         ("formal_parameter_target: formal_parameter_index=%d", formal_parameter_index);
2741     } break;
2742 
2743     case 0x17:
2744     // kind: type in throws clause of method or constructor
2745     // location: method_info
2746 
2747     {
2748       // struct:
2749       // throws_target {
2750       //   u2 throws_type_index
2751       // }
2752       //
2753       if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
2754         log_debug(redefine, class, annotation)("length() is too small for a throws_target");
2755         return false;
2756       }
2757 
2758       u2 throws_type_index = Bytes::get_Java_u2((address)
2759                                type_annotations_typeArray->adr_at(byte_i_ref));
2760       byte_i_ref += 2;
2761 
2762       log_debug(redefine, class, annotation)("throws_target: throws_type_index=%d", throws_type_index);
2763     } break;
2764 
2765     case 0x40:
2766     // kind: type in local variable declaration
2767     // location: Code
2768     case 0x41:
2769     // kind: type in resource variable declaration
2770     // location: Code
2771 
2772     {
2773       // struct:
2774       // localvar_target {
2775       //   u2 table_length;
2776       //   struct {
2777       //     u2 start_pc;
2778       //     u2 length;
2779       //     u2 index;
2780       //   } table[table_length];
2781       // }
2782       //
2783       if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
2784         // not enough room for a table_length let alone the rest of a localvar_target
2785         log_debug(redefine, class, annotation)("length() is too small for a localvar_target table_length");
2786         return false;
2787       }
2788 
2789       u2 table_length = Bytes::get_Java_u2((address)
2790                           type_annotations_typeArray->adr_at(byte_i_ref));
2791       byte_i_ref += 2;
2792 
2793       log_debug(redefine, class, annotation)("localvar_target: table_length=%d", table_length);
2794 
2795       int table_struct_size = 2 + 2 + 2; // 3 u2 variables per table entry
2796       int table_size = table_length * table_struct_size;
2797 
2798       if ((byte_i_ref + table_size) > type_annotations_typeArray->length()) {
2799         // not enough room for a table
2800         log_debug(redefine, class, annotation)("length() is too small for a table array of length %d", table_length);
2801         return false;
2802       }
2803 
2804       // Skip over table
2805       byte_i_ref += table_size;
2806     } break;
2807 
2808     case 0x42:
2809     // kind: type in exception parameter declaration
2810     // location: Code
2811 
2812     {
2813       // struct:
2814       // catch_target {
2815       //   u2 exception_table_index;
2816       // }
2817       //
2818       if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
2819         log_debug(redefine, class, annotation)("length() is too small for a catch_target");
2820         return false;
2821       }
2822 
2823       u2 exception_table_index = Bytes::get_Java_u2((address)
2824                                    type_annotations_typeArray->adr_at(byte_i_ref));
2825       byte_i_ref += 2;
2826 
2827       log_debug(redefine, class, annotation)("catch_target: exception_table_index=%d", exception_table_index);
2828     } break;
2829 
2830     case 0x43:
2831     // kind: type in instanceof expression
2832     // location: Code
2833     case 0x44:
2834     // kind: type in new expression
2835     // location: Code
2836     case 0x45:
2837     // kind: type in method reference expression using ::new
2838     // location: Code
2839     case 0x46:
2840     // kind: type in method reference expression using ::Identifier
2841     // location: Code
2842 
2843     {
2844       // struct:
2845       // offset_target {
2846       //   u2 offset;
2847       // }
2848       //
2849       if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
2850         log_debug(redefine, class, annotation)("length() is too small for a offset_target");
2851         return false;
2852       }
2853 
2854       u2 offset = Bytes::get_Java_u2((address)
2855                     type_annotations_typeArray->adr_at(byte_i_ref));
2856       byte_i_ref += 2;
2857 
2858       log_debug(redefine, class, annotation)("offset_target: offset=%d", offset);
2859     } break;
2860 
2861     case 0x47:
2862     // kind: type in cast expression
2863     // location: Code
2864     case 0x48:
2865     // kind: type argument for generic constructor in new expression or
2866     //       explicit constructor invocation statement
2867     // location: Code
2868     case 0x49:
2869     // kind: type argument for generic method in method invocation expression
2870     // location: Code
2871     case 0x4A:
2872     // kind: type argument for generic constructor in method reference expression using ::new
2873     // location: Code
2874     case 0x4B:
2875     // kind: type argument for generic method in method reference expression using ::Identifier
2876     // location: Code
2877 
2878     {
2879       // struct:
2880       // type_argument_target {
2881       //   u2 offset;
2882       //   u1 type_argument_index;
2883       // }
2884       //
2885       if ((byte_i_ref + 3) > type_annotations_typeArray->length()) {
2886         log_debug(redefine, class, annotation)("length() is too small for a type_argument_target");
2887         return false;
2888       }
2889 
2890       u2 offset = Bytes::get_Java_u2((address)
2891                     type_annotations_typeArray->adr_at(byte_i_ref));
2892       byte_i_ref += 2;
2893       u1 type_argument_index = type_annotations_typeArray->at(byte_i_ref);
2894       byte_i_ref += 1;
2895 
2896       log_debug(redefine, class, annotation)
2897         ("type_argument_target: offset=%d, type_argument_index=%d", offset, type_argument_index);
2898     } break;
2899 
2900     default:
2901       log_debug(redefine, class, annotation)("unknown target_type");
2902 #ifdef ASSERT
2903       ShouldNotReachHere();
2904 #endif
2905       return false;
2906   }
2907 
2908   return true;
2909 } // end skip_type_annotation_target()
2910 
2911 
2912 // Read, verify and skip over the type_path part so that rewriting
2913 // can continue in the later parts of the struct.
2914 //
2915 // type_path {
2916 //   u1 path_length;
2917 //   {
2918 //     u1 type_path_kind;
2919 //     u1 type_argument_index;
2920 //   } path[path_length];
2921 // }
2922 //
2923 bool VM_RedefineClasses::skip_type_annotation_type_path(
2924        AnnotationArray* type_annotations_typeArray, int &byte_i_ref, TRAPS) {
2925 
2926   if ((byte_i_ref + 1) > type_annotations_typeArray->length()) {
2927     // not enough room for a path_length let alone the rest of the type_path
2928     log_debug(redefine, class, annotation)("length() is too small for a type_path");
2929     return false;
2930   }
2931 
2932   u1 path_length = type_annotations_typeArray->at(byte_i_ref);
2933   byte_i_ref += 1;
2934 
2935   log_debug(redefine, class, annotation)("type_path: path_length=%d", path_length);
2936 
2937   int calc_path_length = 0;
2938   for (; calc_path_length < path_length; calc_path_length++) {
2939     if ((byte_i_ref + 1 + 1) > type_annotations_typeArray->length()) {
2940       // not enough room for a path
2941       log_debug(redefine, class, annotation)
2942         ("length() is too small for path entry %d of %d", calc_path_length, path_length);
2943       return false;
2944     }
2945 
2946     u1 type_path_kind = type_annotations_typeArray->at(byte_i_ref);
2947     byte_i_ref += 1;
2948     u1 type_argument_index = type_annotations_typeArray->at(byte_i_ref);
2949     byte_i_ref += 1;
2950 
2951     log_debug(redefine, class, annotation)
2952       ("type_path: path[%d]: type_path_kind=%d, type_argument_index=%d",
2953        calc_path_length, type_path_kind, type_argument_index);
2954 
2955     if (type_path_kind > 3 || (type_path_kind != 3 && type_argument_index != 0)) {
2956       // not enough room for a path
2957       log_debug(redefine, class, annotation)("inconsistent type_path values");
2958       return false;
2959     }
2960   }
2961   assert(path_length == calc_path_length, "sanity check");
2962 
2963   return true;
2964 } // end skip_type_annotation_type_path()
2965 
2966 
2967 // Rewrite constant pool references in the method's stackmap table.
2968 // These "structures" are adapted from the StackMapTable_attribute that
2969 // is described in section 4.8.4 of the 6.0 version of the VM spec
2970 // (dated 2005.10.26):
2971 // file:///net/quincunx.sfbay/export/gbracha/ClassFile-Java6.pdf
2972 //
2973 // stack_map {
2974 //   u2 number_of_entries;
2975 //   stack_map_frame entries[number_of_entries];
2976 // }
2977 //
2978 void VM_RedefineClasses::rewrite_cp_refs_in_stack_map_table(
2979        const methodHandle& method, TRAPS) {
2980 
2981   if (!method->has_stackmap_table()) {
2982     return;
2983   }
2984 
2985   AnnotationArray* stackmap_data = method->stackmap_data();
2986   address stackmap_p = (address)stackmap_data->adr_at(0);
2987   address stackmap_end = stackmap_p + stackmap_data->length();
2988 
2989   assert(stackmap_p + 2 <= stackmap_end, "no room for number_of_entries");
2990   u2 number_of_entries = Bytes::get_Java_u2(stackmap_p);
2991   stackmap_p += 2;
2992 
2993   log_debug(redefine, class, stackmap)("number_of_entries=%u", number_of_entries);
2994 
2995   // walk through each stack_map_frame
2996   u2 calc_number_of_entries = 0;
2997   for (; calc_number_of_entries < number_of_entries; calc_number_of_entries++) {
2998     // The stack_map_frame structure is a u1 frame_type followed by
2999     // 0 or more bytes of data:
3000     //
3001     // union stack_map_frame {
3002     //   same_frame;
3003     //   same_locals_1_stack_item_frame;
3004     //   same_locals_1_stack_item_frame_extended;
3005     //   chop_frame;
3006     //   same_frame_extended;
3007     //   append_frame;
3008     //   full_frame;
3009     // }
3010 
3011     assert(stackmap_p + 1 <= stackmap_end, "no room for frame_type");
3012     u1 frame_type = *stackmap_p;
3013     stackmap_p++;
3014 
3015     // same_frame {
3016     //   u1 frame_type = SAME; /* 0-63 */
3017     // }
3018     if (frame_type <= 63) {
3019       // nothing more to do for same_frame
3020     }
3021 
3022     // same_locals_1_stack_item_frame {
3023     //   u1 frame_type = SAME_LOCALS_1_STACK_ITEM; /* 64-127 */
3024     //   verification_type_info stack[1];
3025     // }
3026     else if (frame_type >= 64 && frame_type <= 127) {
3027       rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
3028         calc_number_of_entries, frame_type, THREAD);
3029     }
3030 
3031     // reserved for future use
3032     else if (frame_type >= 128 && frame_type <= 246) {
3033       // nothing more to do for reserved frame_types
3034     }
3035 
3036     // same_locals_1_stack_item_frame_extended {
3037     //   u1 frame_type = SAME_LOCALS_1_STACK_ITEM_EXTENDED; /* 247 */
3038     //   u2 offset_delta;
3039     //   verification_type_info stack[1];
3040     // }
3041     else if (frame_type == 247) {
3042       stackmap_p += 2;
3043       rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
3044         calc_number_of_entries, frame_type, THREAD);
3045     }
3046 
3047     // chop_frame {
3048     //   u1 frame_type = CHOP; /* 248-250 */
3049     //   u2 offset_delta;
3050     // }
3051     else if (frame_type >= 248 && frame_type <= 250) {
3052       stackmap_p += 2;
3053     }
3054 
3055     // same_frame_extended {
3056     //   u1 frame_type = SAME_FRAME_EXTENDED; /* 251*/
3057     //   u2 offset_delta;
3058     // }
3059     else if (frame_type == 251) {
3060       stackmap_p += 2;
3061     }
3062 
3063     // append_frame {
3064     //   u1 frame_type = APPEND; /* 252-254 */
3065     //   u2 offset_delta;
3066     //   verification_type_info locals[frame_type - 251];
3067     // }
3068     else if (frame_type >= 252 && frame_type <= 254) {
3069       assert(stackmap_p + 2 <= stackmap_end,
3070         "no room for offset_delta");
3071       stackmap_p += 2;
3072       u1 len = frame_type - 251;
3073       for (u1 i = 0; i < len; i++) {
3074         rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
3075           calc_number_of_entries, frame_type, THREAD);
3076       }
3077     }
3078 
3079     // full_frame {
3080     //   u1 frame_type = FULL_FRAME; /* 255 */
3081     //   u2 offset_delta;
3082     //   u2 number_of_locals;
3083     //   verification_type_info locals[number_of_locals];
3084     //   u2 number_of_stack_items;
3085     //   verification_type_info stack[number_of_stack_items];
3086     // }
3087     else if (frame_type == 255) {
3088       assert(stackmap_p + 2 + 2 <= stackmap_end,
3089         "no room for smallest full_frame");
3090       stackmap_p += 2;
3091 
3092       u2 number_of_locals = Bytes::get_Java_u2(stackmap_p);
3093       stackmap_p += 2;
3094 
3095       for (u2 locals_i = 0; locals_i < number_of_locals; locals_i++) {
3096         rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
3097           calc_number_of_entries, frame_type, THREAD);
3098       }
3099 
3100       // Use the largest size for the number_of_stack_items, but only get
3101       // the right number of bytes.
3102       u2 number_of_stack_items = Bytes::get_Java_u2(stackmap_p);
3103       stackmap_p += 2;
3104 
3105       for (u2 stack_i = 0; stack_i < number_of_stack_items; stack_i++) {
3106         rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
3107           calc_number_of_entries, frame_type, THREAD);
3108       }
3109     }
3110   } // end while there is a stack_map_frame
3111   assert(number_of_entries == calc_number_of_entries, "sanity check");
3112 } // end rewrite_cp_refs_in_stack_map_table()
3113 
3114 
3115 // Rewrite constant pool references in the verification type info
3116 // portion of the method's stackmap table. These "structures" are
3117 // adapted from the StackMapTable_attribute that is described in
3118 // section 4.8.4 of the 6.0 version of the VM spec (dated 2005.10.26):
3119 // file:///net/quincunx.sfbay/export/gbracha/ClassFile-Java6.pdf
3120 //
3121 // The verification_type_info structure is a u1 tag followed by 0 or
3122 // more bytes of data:
3123 //
3124 // union verification_type_info {
3125 //   Top_variable_info;
3126 //   Integer_variable_info;
3127 //   Float_variable_info;
3128 //   Long_variable_info;
3129 //   Double_variable_info;
3130 //   Null_variable_info;
3131 //   UninitializedThis_variable_info;
3132 //   Object_variable_info;
3133 //   Uninitialized_variable_info;
3134 // }
3135 //
3136 void VM_RedefineClasses::rewrite_cp_refs_in_verification_type_info(
3137        address& stackmap_p_ref, address stackmap_end, u2 frame_i,
3138        u1 frame_type, TRAPS) {
3139 
3140   assert(stackmap_p_ref + 1 <= stackmap_end, "no room for tag");
3141   u1 tag = *stackmap_p_ref;
3142   stackmap_p_ref++;
3143 
3144   switch (tag) {
3145   // Top_variable_info {
3146   //   u1 tag = ITEM_Top; /* 0 */
3147   // }
3148   // verificationType.hpp has zero as ITEM_Bogus instead of ITEM_Top
3149   case 0:  // fall through
3150 
3151   // Integer_variable_info {
3152   //   u1 tag = ITEM_Integer; /* 1 */
3153   // }
3154   case ITEM_Integer:  // fall through
3155 
3156   // Float_variable_info {
3157   //   u1 tag = ITEM_Float; /* 2 */
3158   // }
3159   case ITEM_Float:  // fall through
3160 
3161   // Double_variable_info {
3162   //   u1 tag = ITEM_Double; /* 3 */
3163   // }
3164   case ITEM_Double:  // fall through
3165 
3166   // Long_variable_info {
3167   //   u1 tag = ITEM_Long; /* 4 */
3168   // }
3169   case ITEM_Long:  // fall through
3170 
3171   // Null_variable_info {
3172   //   u1 tag = ITEM_Null; /* 5 */
3173   // }
3174   case ITEM_Null:  // fall through
3175 
3176   // UninitializedThis_variable_info {
3177   //   u1 tag = ITEM_UninitializedThis; /* 6 */
3178   // }
3179   case ITEM_UninitializedThis:
3180     // nothing more to do for the above tag types
3181     break;
3182 
3183   // Object_variable_info {
3184   //   u1 tag = ITEM_Object; /* 7 */
3185   //   u2 cpool_index;
3186   // }
3187   case ITEM_Object:
3188   {
3189     assert(stackmap_p_ref + 2 <= stackmap_end, "no room for cpool_index");
3190     u2 cpool_index = Bytes::get_Java_u2(stackmap_p_ref);
3191     u2 new_cp_index = find_new_index(cpool_index);
3192     if (new_cp_index != 0) {
3193       log_debug(redefine, class, stackmap)("mapped old cpool_index=%d", cpool_index);
3194       Bytes::put_Java_u2(stackmap_p_ref, new_cp_index);
3195       cpool_index = new_cp_index;
3196     }
3197     stackmap_p_ref += 2;
3198 
3199     log_debug(redefine, class, stackmap)
3200       ("frame_i=%u, frame_type=%u, cpool_index=%d", frame_i, frame_type, cpool_index);
3201   } break;
3202 
3203   // Uninitialized_variable_info {
3204   //   u1 tag = ITEM_Uninitialized; /* 8 */
3205   //   u2 offset;
3206   // }
3207   case ITEM_Uninitialized:
3208     assert(stackmap_p_ref + 2 <= stackmap_end, "no room for offset");
3209     stackmap_p_ref += 2;
3210     break;
3211 
3212   default:
3213     log_debug(redefine, class, stackmap)("frame_i=%u, frame_type=%u, bad tag=0x%x", frame_i, frame_type, tag);
3214     ShouldNotReachHere();
3215     break;
3216   } // end switch (tag)
3217 } // end rewrite_cp_refs_in_verification_type_info()
3218 
3219 
3220 // Change the constant pool associated with klass scratch_class to
3221 // scratch_cp. If shrink is true, then scratch_cp_length elements
3222 // are copied from scratch_cp to a smaller constant pool and the
3223 // smaller constant pool is associated with scratch_class.
3224 void VM_RedefineClasses::set_new_constant_pool(
3225        ClassLoaderData* loader_data,
3226        InstanceKlass* scratch_class, constantPoolHandle scratch_cp,
3227        int scratch_cp_length, TRAPS) {
3228   assert(scratch_cp->length() >= scratch_cp_length, "sanity check");
3229 
3230   // scratch_cp is a merged constant pool and has enough space for a
3231   // worst case merge situation. We want to associate the minimum
3232   // sized constant pool with the klass to save space.
3233   ConstantPool* cp = ConstantPool::allocate(loader_data, scratch_cp_length, CHECK);
3234   constantPoolHandle smaller_cp(THREAD, cp);
3235 
3236   // preserve version() value in the smaller copy
3237   int version = scratch_cp->version();
3238   assert(version != 0, "sanity check");
3239   smaller_cp->set_version(version);
3240 
3241   // attach klass to new constant pool
3242   // reference to the cp holder is needed for copy_operands()
3243   smaller_cp->set_pool_holder(scratch_class);
3244 
3245   scratch_cp->copy_cp_to(1, scratch_cp_length - 1, smaller_cp, 1, THREAD);
3246   if (HAS_PENDING_EXCEPTION) {
3247     // Exception is handled in the caller
3248     loader_data->add_to_deallocate_list(smaller_cp());
3249     return;
3250   }
3251   scratch_cp = smaller_cp;
3252 
3253   // attach new constant pool to klass
3254   scratch_class->set_constants(scratch_cp());
3255   scratch_cp->initialize_unresolved_klasses(loader_data, CHECK);
3256 
3257   int i;  // for portability
3258 
3259   // update each field in klass to use new constant pool indices as needed
3260   for (JavaFieldStream fs(scratch_class); !fs.done(); fs.next()) {
3261     jshort cur_index = fs.name_index();
3262     jshort new_index = find_new_index(cur_index);
3263     if (new_index != 0) {
3264       log_trace(redefine, class, constantpool)("field-name_index change: %d to %d", cur_index, new_index);
3265       fs.set_name_index(new_index);
3266     }
3267     cur_index = fs.signature_index();
3268     new_index = find_new_index(cur_index);
3269     if (new_index != 0) {
3270       log_trace(redefine, class, constantpool)("field-signature_index change: %d to %d", cur_index, new_index);
3271       fs.set_signature_index(new_index);
3272     }
3273     cur_index = fs.initval_index();
3274     new_index = find_new_index(cur_index);
3275     if (new_index != 0) {
3276       log_trace(redefine, class, constantpool)("field-initval_index change: %d to %d", cur_index, new_index);
3277       fs.set_initval_index(new_index);
3278     }
3279     cur_index = fs.generic_signature_index();
3280     new_index = find_new_index(cur_index);
3281     if (new_index != 0) {
3282       log_trace(redefine, class, constantpool)("field-generic_signature change: %d to %d", cur_index, new_index);
3283       fs.set_generic_signature_index(new_index);
3284     }
3285   } // end for each field
3286 
3287   // Update constant pool indices in the inner classes info to use
3288   // new constant indices as needed. The inner classes info is a
3289   // quadruple:
3290   // (inner_class_info, outer_class_info, inner_name, inner_access_flags)
3291   InnerClassesIterator iter(scratch_class);
3292   for (; !iter.done(); iter.next()) {
3293     int cur_index = iter.inner_class_info_index();
3294     if (cur_index == 0) {
3295       continue;  // JVM spec. allows null inner class refs so skip it
3296     }
3297     int new_index = find_new_index(cur_index);
3298     if (new_index != 0) {
3299       log_trace(redefine, class, constantpool)("inner_class_info change: %d to %d", cur_index, new_index);
3300       iter.set_inner_class_info_index(new_index);
3301     }
3302     cur_index = iter.outer_class_info_index();
3303     new_index = find_new_index(cur_index);
3304     if (new_index != 0) {
3305       log_trace(redefine, class, constantpool)("outer_class_info change: %d to %d", cur_index, new_index);
3306       iter.set_outer_class_info_index(new_index);
3307     }
3308     cur_index = iter.inner_name_index();
3309     new_index = find_new_index(cur_index);
3310     if (new_index != 0) {
3311       log_trace(redefine, class, constantpool)("inner_name change: %d to %d", cur_index, new_index);
3312       iter.set_inner_name_index(new_index);
3313     }
3314   } // end for each inner class
3315 
3316   // Attach each method in klass to the new constant pool and update
3317   // to use new constant pool indices as needed:
3318   Array<Method*>* methods = scratch_class->methods();
3319   for (i = methods->length() - 1; i >= 0; i--) {
3320     methodHandle method(THREAD, methods->at(i));
3321     method->set_constants(scratch_cp());
3322 
3323     int new_index = find_new_index(method->name_index());
3324     if (new_index != 0) {
3325       log_trace(redefine, class, constantpool)
3326         ("method-name_index change: %d to %d", method->name_index(), new_index);
3327       method->set_name_index(new_index);
3328     }
3329     new_index = find_new_index(method->signature_index());
3330     if (new_index != 0) {
3331       log_trace(redefine, class, constantpool)
3332         ("method-signature_index change: %d to %d", method->signature_index(), new_index);
3333       method->set_signature_index(new_index);
3334     }
3335     new_index = find_new_index(method->generic_signature_index());
3336     if (new_index != 0) {
3337       log_trace(redefine, class, constantpool)
3338         ("method-generic_signature_index change: %d to %d", method->generic_signature_index(), new_index);
3339       method->set_generic_signature_index(new_index);
3340     }
3341 
3342     // Update constant pool indices in the method's checked exception
3343     // table to use new constant indices as needed.
3344     int cext_length = method->checked_exceptions_length();
3345     if (cext_length > 0) {
3346       CheckedExceptionElement * cext_table =
3347         method->checked_exceptions_start();
3348       for (int j = 0; j < cext_length; j++) {
3349         int cur_index = cext_table[j].class_cp_index;
3350         int new_index = find_new_index(cur_index);
3351         if (new_index != 0) {
3352           log_trace(redefine, class, constantpool)("cext-class_cp_index change: %d to %d", cur_index, new_index);
3353           cext_table[j].class_cp_index = (u2)new_index;
3354         }
3355       } // end for each checked exception table entry
3356     } // end if there are checked exception table entries
3357 
3358     // Update each catch type index in the method's exception table
3359     // to use new constant pool indices as needed. The exception table
3360     // holds quadruple entries of the form:
3361     //   (beg_bci, end_bci, handler_bci, klass_index)
3362 
3363     ExceptionTable ex_table(method());
3364     int ext_length = ex_table.length();
3365 
3366     for (int j = 0; j < ext_length; j ++) {
3367       int cur_index = ex_table.catch_type_index(j);
3368       int new_index = find_new_index(cur_index);
3369       if (new_index != 0) {
3370         log_trace(redefine, class, constantpool)("ext-klass_index change: %d to %d", cur_index, new_index);
3371         ex_table.set_catch_type_index(j, new_index);
3372       }
3373     } // end for each exception table entry
3374 
3375     // Update constant pool indices in the method's local variable
3376     // table to use new constant indices as needed. The local variable
3377     // table hold sextuple entries of the form:
3378     // (start_pc, length, name_index, descriptor_index, signature_index, slot)
3379     int lvt_length = method->localvariable_table_length();
3380     if (lvt_length > 0) {
3381       LocalVariableTableElement * lv_table =
3382         method->localvariable_table_start();
3383       for (int j = 0; j < lvt_length; j++) {
3384         int cur_index = lv_table[j].name_cp_index;
3385         int new_index = find_new_index(cur_index);
3386         if (new_index != 0) {
3387           log_trace(redefine, class, constantpool)("lvt-name_cp_index change: %d to %d", cur_index, new_index);
3388           lv_table[j].name_cp_index = (u2)new_index;
3389         }
3390         cur_index = lv_table[j].descriptor_cp_index;
3391         new_index = find_new_index(cur_index);
3392         if (new_index != 0) {
3393           log_trace(redefine, class, constantpool)("lvt-descriptor_cp_index change: %d to %d", cur_index, new_index);
3394           lv_table[j].descriptor_cp_index = (u2)new_index;
3395         }
3396         cur_index = lv_table[j].signature_cp_index;
3397         new_index = find_new_index(cur_index);
3398         if (new_index != 0) {
3399           log_trace(redefine, class, constantpool)("lvt-signature_cp_index change: %d to %d", cur_index, new_index);
3400           lv_table[j].signature_cp_index = (u2)new_index;
3401         }
3402       } // end for each local variable table entry
3403     } // end if there are local variable table entries
3404 
3405     rewrite_cp_refs_in_stack_map_table(method, THREAD);
3406   } // end for each method
3407 } // end set_new_constant_pool()
3408 
3409 
3410 // Unevolving classes may point to methods of the_class directly
3411 // from their constant pool caches, itables, and/or vtables. We
3412 // use the ClassLoaderDataGraph::classes_do() facility and this helper
3413 // to fix up these pointers.
3414 
3415 // Adjust cpools and vtables closure
3416 void VM_RedefineClasses::AdjustCpoolCacheAndVtable::do_klass(Klass* k) {
3417 
3418   // This is a very busy routine. We don't want too much tracing
3419   // printed out.
3420   bool trace_name_printed = false;
3421   InstanceKlass *the_class = InstanceKlass::cast(_the_class);
3422 
3423   // If the class being redefined is java.lang.Object, we need to fix all
3424   // array class vtables also
3425   if (k->is_array_klass() && _the_class == SystemDictionary::Object_klass()) {
3426     k->vtable().adjust_method_entries(the_class, &trace_name_printed);
3427 
3428   } else if (k->is_instance_klass()) {
3429     HandleMark hm(_thread);
3430     InstanceKlass *ik = InstanceKlass::cast(k);
3431 
3432     // HotSpot specific optimization! HotSpot does not currently
3433     // support delegation from the bootstrap class loader to a
3434     // user-defined class loader. This means that if the bootstrap
3435     // class loader is the initiating class loader, then it will also
3436     // be the defining class loader. This also means that classes
3437     // loaded by the bootstrap class loader cannot refer to classes
3438     // loaded by a user-defined class loader. Note: a user-defined
3439     // class loader can delegate to the bootstrap class loader.
3440     //
3441     // If the current class being redefined has a user-defined class
3442     // loader as its defining class loader, then we can skip all
3443     // classes loaded by the bootstrap class loader.
3444     bool is_user_defined = (_the_class->class_loader() != NULL);
3445     if (is_user_defined && ik->class_loader() == NULL) {
3446       return;
3447     }
3448 
3449     // Fix the vtable embedded in the_class and subclasses of the_class,
3450     // if one exists. We discard scratch_class and we don't keep an
3451     // InstanceKlass around to hold obsolete methods so we don't have
3452     // any other InstanceKlass embedded vtables to update. The vtable
3453     // holds the Method*s for virtual (but not final) methods.
3454     // Default methods, or concrete methods in interfaces are stored
3455     // in the vtable, so if an interface changes we need to check
3456     // adjust_method_entries() for every InstanceKlass, which will also
3457     // adjust the default method vtable indices.
3458     // We also need to adjust any default method entries that are
3459     // not yet in the vtable, because the vtable setup is in progress.
3460     // This must be done after we adjust the default_methods and
3461     // default_vtable_indices for methods already in the vtable.
3462     // If redefining Unsafe, walk all the vtables looking for entries.
3463     if (ik->vtable_length() > 0 && (_the_class->is_interface()
3464         || _the_class == SystemDictionary::internal_Unsafe_klass()
3465         || ik->is_subtype_of(_the_class))) {
3466       // ik->vtable() creates a wrapper object; rm cleans it up
3467       ResourceMark rm(_thread);
3468 
3469       ik->vtable().adjust_method_entries(the_class, &trace_name_printed);
3470       ik->adjust_default_methods(the_class, &trace_name_printed);
3471     }
3472 
3473     // If the current class has an itable and we are either redefining an
3474     // interface or if the current class is a subclass of the_class, then
3475     // we potentially have to fix the itable. If we are redefining an
3476     // interface, then we have to call adjust_method_entries() for
3477     // every InstanceKlass that has an itable since there isn't a
3478     // subclass relationship between an interface and an InstanceKlass.
3479     // If redefining Unsafe, walk all the itables looking for entries.
3480     if (ik->itable_length() > 0 && (_the_class->is_interface()
3481         || _the_class == SystemDictionary::internal_Unsafe_klass()
3482         || ik->is_subclass_of(_the_class))) {
3483       ResourceMark rm(_thread);
3484       ik->itable().adjust_method_entries(the_class, &trace_name_printed);
3485     }
3486 
3487     // The constant pools in other classes (other_cp) can refer to
3488     // methods in the_class. We have to update method information in
3489     // other_cp's cache. If other_cp has a previous version, then we
3490     // have to repeat the process for each previous version. The
3491     // constant pool cache holds the Method*s for non-virtual
3492     // methods and for virtual, final methods.
3493     //
3494     // Special case: if the current class is the_class, then new_cp
3495     // has already been attached to the_class and old_cp has already
3496     // been added as a previous version. The new_cp doesn't have any
3497     // cached references to old methods so it doesn't need to be
3498     // updated. We can simply start with the previous version(s) in
3499     // that case.
3500     constantPoolHandle other_cp;
3501     ConstantPoolCache* cp_cache;
3502 
3503     if (ik != _the_class) {
3504       // this klass' constant pool cache may need adjustment
3505       other_cp = constantPoolHandle(ik->constants());
3506       cp_cache = other_cp->cache();
3507       if (cp_cache != NULL) {
3508         cp_cache->adjust_method_entries(the_class, &trace_name_printed);
3509       }
3510     }
3511 
3512     // the previous versions' constant pool caches may need adjustment
3513     for (InstanceKlass* pv_node = ik->previous_versions();
3514          pv_node != NULL;
3515          pv_node = pv_node->previous_versions()) {
3516       cp_cache = pv_node->constants()->cache();
3517       if (cp_cache != NULL) {
3518         cp_cache->adjust_method_entries(pv_node, &trace_name_printed);
3519       }
3520     }
3521   }
3522 }
3523 
3524 // Clean method data for this class
3525 void VM_RedefineClasses::MethodDataCleaner::do_klass(Klass* k) {
3526   if (k->is_instance_klass()) {
3527     InstanceKlass *ik = InstanceKlass::cast(k);
3528     // Clean MethodData of this class's methods so they don't refer to
3529     // old methods that are no longer running.
3530     Array<Method*>* methods = ik->methods();
3531     int num_methods = methods->length();
3532     for (int index = 0; index < num_methods; ++index) {
3533       if (methods->at(index)->method_data() != NULL) {
3534         methods->at(index)->method_data()->clean_weak_method_links();
3535       }
3536     }
3537   }
3538 }
3539 
3540 void VM_RedefineClasses::update_jmethod_ids() {
3541   for (int j = 0; j < _matching_methods_length; ++j) {
3542     Method* old_method = _matching_old_methods[j];
3543     jmethodID jmid = old_method->find_jmethod_id_or_null();
3544     if (jmid != NULL) {
3545       // There is a jmethodID, change it to point to the new method
3546       methodHandle new_method_h(_matching_new_methods[j]);
3547       Method::change_method_associated_with_jmethod_id(jmid, new_method_h());
3548       assert(Method::resolve_jmethod_id(jmid) == _matching_new_methods[j],
3549              "should be replaced");
3550     }
3551   }
3552 }
3553 
3554 int VM_RedefineClasses::check_methods_and_mark_as_obsolete() {
3555   int emcp_method_count = 0;
3556   int obsolete_count = 0;
3557   int old_index = 0;
3558   for (int j = 0; j < _matching_methods_length; ++j, ++old_index) {
3559     Method* old_method = _matching_old_methods[j];
3560     Method* new_method = _matching_new_methods[j];
3561     Method* old_array_method;
3562 
3563     // Maintain an old_index into the _old_methods array by skipping
3564     // deleted methods
3565     while ((old_array_method = _old_methods->at(old_index)) != old_method) {
3566       ++old_index;
3567     }
3568 
3569     if (MethodComparator::methods_EMCP(old_method, new_method)) {
3570       // The EMCP definition from JSR-163 requires the bytecodes to be
3571       // the same with the exception of constant pool indices which may
3572       // differ. However, the constants referred to by those indices
3573       // must be the same.
3574       //
3575       // We use methods_EMCP() for comparison since constant pool
3576       // merging can remove duplicate constant pool entries that were
3577       // present in the old method and removed from the rewritten new
3578       // method. A faster binary comparison function would consider the
3579       // old and new methods to be different when they are actually
3580       // EMCP.
3581       //
3582       // The old and new methods are EMCP and you would think that we
3583       // could get rid of one of them here and now and save some space.
3584       // However, the concept of EMCP only considers the bytecodes and
3585       // the constant pool entries in the comparison. Other things,
3586       // e.g., the line number table (LNT) or the local variable table
3587       // (LVT) don't count in the comparison. So the new (and EMCP)
3588       // method can have a new LNT that we need so we can't just
3589       // overwrite the new method with the old method.
3590       //
3591       // When this routine is called, we have already attached the new
3592       // methods to the_class so the old methods are effectively
3593       // overwritten. However, if an old method is still executing,
3594       // then the old method cannot be collected until sometime after
3595       // the old method call has returned. So the overwriting of old
3596       // methods by new methods will save us space except for those
3597       // (hopefully few) old methods that are still executing.
3598       //
3599       // A method refers to a ConstMethod* and this presents another
3600       // possible avenue to space savings. The ConstMethod* in the
3601       // new method contains possibly new attributes (LNT, LVT, etc).
3602       // At first glance, it seems possible to save space by replacing
3603       // the ConstMethod* in the old method with the ConstMethod*
3604       // from the new method. The old and new methods would share the
3605       // same ConstMethod* and we would save the space occupied by
3606       // the old ConstMethod*. However, the ConstMethod* contains
3607       // a back reference to the containing method. Sharing the
3608       // ConstMethod* between two methods could lead to confusion in
3609       // the code that uses the back reference. This would lead to
3610       // brittle code that could be broken in non-obvious ways now or
3611       // in the future.
3612       //
3613       // Another possibility is to copy the ConstMethod* from the new
3614       // method to the old method and then overwrite the new method with
3615       // the old method. Since the ConstMethod* contains the bytecodes
3616       // for the method embedded in the oop, this option would change
3617       // the bytecodes out from under any threads executing the old
3618       // method and make the thread's bcp invalid. Since EMCP requires
3619       // that the bytecodes be the same modulo constant pool indices, it
3620       // is straight forward to compute the correct new bcp in the new
3621       // ConstMethod* from the old bcp in the old ConstMethod*. The
3622       // time consuming part would be searching all the frames in all
3623       // of the threads to find all of the calls to the old method.
3624       //
3625       // It looks like we will have to live with the limited savings
3626       // that we get from effectively overwriting the old methods
3627       // when the new methods are attached to the_class.
3628 
3629       // Count number of methods that are EMCP.  The method will be marked
3630       // old but not obsolete if it is EMCP.
3631       emcp_method_count++;
3632 
3633       // An EMCP method is _not_ obsolete. An obsolete method has a
3634       // different jmethodID than the current method. An EMCP method
3635       // has the same jmethodID as the current method. Having the
3636       // same jmethodID for all EMCP versions of a method allows for
3637       // a consistent view of the EMCP methods regardless of which
3638       // EMCP method you happen to have in hand. For example, a
3639       // breakpoint set in one EMCP method will work for all EMCP
3640       // versions of the method including the current one.
3641     } else {
3642       // mark obsolete methods as such
3643       old_method->set_is_obsolete();
3644       obsolete_count++;
3645 
3646       // obsolete methods need a unique idnum so they become new entries in
3647       // the jmethodID cache in InstanceKlass
3648       assert(old_method->method_idnum() == new_method->method_idnum(), "must match");
3649       u2 num = InstanceKlass::cast(_the_class)->next_method_idnum();
3650       if (num != ConstMethod::UNSET_IDNUM) {
3651         old_method->set_method_idnum(num);
3652       }
3653 
3654       // With tracing we try not to "yack" too much. The position of
3655       // this trace assumes there are fewer obsolete methods than
3656       // EMCP methods.
3657       if (log_is_enabled(Trace, redefine, class, obsolete, mark)) {
3658         ResourceMark rm;
3659         log_trace(redefine, class, obsolete, mark)
3660           ("mark %s(%s) as obsolete", old_method->name()->as_C_string(), old_method->signature()->as_C_string());
3661       }
3662     }
3663     old_method->set_is_old();
3664   }
3665   for (int i = 0; i < _deleted_methods_length; ++i) {
3666     Method* old_method = _deleted_methods[i];
3667 
3668     assert(!old_method->has_vtable_index(),
3669            "cannot delete methods with vtable entries");;
3670 
3671     // Mark all deleted methods as old, obsolete and deleted
3672     old_method->set_is_deleted();
3673     old_method->set_is_old();
3674     old_method->set_is_obsolete();
3675     ++obsolete_count;
3676     // With tracing we try not to "yack" too much. The position of
3677     // this trace assumes there are fewer obsolete methods than
3678     // EMCP methods.
3679     if (log_is_enabled(Trace, redefine, class, obsolete, mark)) {
3680       ResourceMark rm;
3681       log_trace(redefine, class, obsolete, mark)
3682         ("mark deleted %s(%s) as obsolete", old_method->name()->as_C_string(), old_method->signature()->as_C_string());
3683     }
3684   }
3685   assert((emcp_method_count + obsolete_count) == _old_methods->length(),
3686     "sanity check");
3687   log_trace(redefine, class, obsolete, mark)("EMCP_cnt=%d, obsolete_cnt=%d", emcp_method_count, obsolete_count);
3688   return emcp_method_count;
3689 }
3690 
3691 // This internal class transfers the native function registration from old methods
3692 // to new methods.  It is designed to handle both the simple case of unchanged
3693 // native methods and the complex cases of native method prefixes being added and/or
3694 // removed.
3695 // It expects only to be used during the VM_RedefineClasses op (a safepoint).
3696 //
3697 // This class is used after the new methods have been installed in "the_class".
3698 //
3699 // So, for example, the following must be handled.  Where 'm' is a method and
3700 // a number followed by an underscore is a prefix.
3701 //
3702 //                                      Old Name    New Name
3703 // Simple transfer to new method        m       ->  m
3704 // Add prefix                           m       ->  1_m
3705 // Remove prefix                        1_m     ->  m
3706 // Simultaneous add of prefixes         m       ->  3_2_1_m
3707 // Simultaneous removal of prefixes     3_2_1_m ->  m
3708 // Simultaneous add and remove          1_m     ->  2_m
3709 // Same, caused by prefix removal only  3_2_1_m ->  3_2_m
3710 //
3711 class TransferNativeFunctionRegistration {
3712  private:
3713   InstanceKlass* the_class;
3714   int prefix_count;
3715   char** prefixes;
3716 
3717   // Recursively search the binary tree of possibly prefixed method names.
3718   // Iteration could be used if all agents were well behaved. Full tree walk is
3719   // more resilent to agents not cleaning up intermediate methods.
3720   // Branch at each depth in the binary tree is:
3721   //    (1) without the prefix.
3722   //    (2) with the prefix.
3723   // where 'prefix' is the prefix at that 'depth' (first prefix, second prefix,...)
3724   Method* search_prefix_name_space(int depth, char* name_str, size_t name_len,
3725                                      Symbol* signature) {
3726     TempNewSymbol name_symbol = SymbolTable::probe(name_str, (int)name_len);
3727     if (name_symbol != NULL) {
3728       Method* method = the_class->lookup_method(name_symbol, signature);
3729       if (method != NULL) {
3730         // Even if prefixed, intermediate methods must exist.
3731         if (method->is_native()) {
3732           // Wahoo, we found a (possibly prefixed) version of the method, return it.
3733           return method;
3734         }
3735         if (depth < prefix_count) {
3736           // Try applying further prefixes (other than this one).
3737           method = search_prefix_name_space(depth+1, name_str, name_len, signature);
3738           if (method != NULL) {
3739             return method; // found
3740           }
3741 
3742           // Try adding this prefix to the method name and see if it matches
3743           // another method name.
3744           char* prefix = prefixes[depth];
3745           size_t prefix_len = strlen(prefix);
3746           size_t trial_len = name_len + prefix_len;
3747           char* trial_name_str = NEW_RESOURCE_ARRAY(char, trial_len + 1);
3748           strcpy(trial_name_str, prefix);
3749           strcat(trial_name_str, name_str);
3750           method = search_prefix_name_space(depth+1, trial_name_str, trial_len,
3751                                             signature);
3752           if (method != NULL) {
3753             // If found along this branch, it was prefixed, mark as such
3754             method->set_is_prefixed_native();
3755             return method; // found
3756           }
3757         }
3758       }
3759     }
3760     return NULL;  // This whole branch bore nothing
3761   }
3762 
3763   // Return the method name with old prefixes stripped away.
3764   char* method_name_without_prefixes(Method* method) {
3765     Symbol* name = method->name();
3766     char* name_str = name->as_utf8();
3767 
3768     // Old prefixing may be defunct, strip prefixes, if any.
3769     for (int i = prefix_count-1; i >= 0; i--) {
3770       char* prefix = prefixes[i];
3771       size_t prefix_len = strlen(prefix);
3772       if (strncmp(prefix, name_str, prefix_len) == 0) {
3773         name_str += prefix_len;
3774       }
3775     }
3776     return name_str;
3777   }
3778 
3779   // Strip any prefixes off the old native method, then try to find a
3780   // (possibly prefixed) new native that matches it.
3781   Method* strip_and_search_for_new_native(Method* method) {
3782     ResourceMark rm;
3783     char* name_str = method_name_without_prefixes(method);
3784     return search_prefix_name_space(0, name_str, strlen(name_str),
3785                                     method->signature());
3786   }
3787 
3788  public:
3789 
3790   // Construct a native method transfer processor for this class.
3791   TransferNativeFunctionRegistration(InstanceKlass* _the_class) {
3792     assert(SafepointSynchronize::is_at_safepoint(), "sanity check");
3793 
3794     the_class = _the_class;
3795     prefixes = JvmtiExport::get_all_native_method_prefixes(&prefix_count);
3796   }
3797 
3798   // Attempt to transfer any of the old or deleted methods that are native
3799   void transfer_registrations(Method** old_methods, int methods_length) {
3800     for (int j = 0; j < methods_length; j++) {
3801       Method* old_method = old_methods[j];
3802 
3803       if (old_method->is_native() && old_method->has_native_function()) {
3804         Method* new_method = strip_and_search_for_new_native(old_method);
3805         if (new_method != NULL) {
3806           // Actually set the native function in the new method.
3807           // Redefine does not send events (except CFLH), certainly not this
3808           // behind the scenes re-registration.
3809           new_method->set_native_function(old_method->native_function(),
3810                               !Method::native_bind_event_is_interesting);
3811         }
3812       }
3813     }
3814   }
3815 };
3816 
3817 // Don't lose the association between a native method and its JNI function.
3818 void VM_RedefineClasses::transfer_old_native_function_registrations(InstanceKlass* the_class) {
3819   TransferNativeFunctionRegistration transfer(the_class);
3820   transfer.transfer_registrations(_deleted_methods, _deleted_methods_length);
3821   transfer.transfer_registrations(_matching_old_methods, _matching_methods_length);
3822 }
3823 
3824 // Deoptimize all compiled code that depends on this class.
3825 //
3826 // If the can_redefine_classes capability is obtained in the onload
3827 // phase then the compiler has recorded all dependencies from startup.
3828 // In that case we need only deoptimize and throw away all compiled code
3829 // that depends on the class.
3830 //
3831 // If can_redefine_classes is obtained sometime after the onload
3832 // phase then the dependency information may be incomplete. In that case
3833 // the first call to RedefineClasses causes all compiled code to be
3834 // thrown away. As can_redefine_classes has been obtained then
3835 // all future compilations will record dependencies so second and
3836 // subsequent calls to RedefineClasses need only throw away code
3837 // that depends on the class.
3838 //
3839 void VM_RedefineClasses::flush_dependent_code(InstanceKlass* ik, TRAPS) {
3840   assert_locked_or_safepoint(Compile_lock);
3841 
3842   // All dependencies have been recorded from startup or this is a second or
3843   // subsequent use of RedefineClasses
3844   if (JvmtiExport::all_dependencies_are_recorded()) {
3845     CodeCache::flush_evol_dependents_on(ik);
3846   } else {
3847     CodeCache::mark_all_nmethods_for_deoptimization();
3848 
3849     ResourceMark rm(THREAD);
3850     DeoptimizationMarker dm;
3851 
3852     // Deoptimize all activations depending on marked nmethods
3853     Deoptimization::deoptimize_dependents();
3854 
3855     // Make the dependent methods not entrant
3856     CodeCache::make_marked_nmethods_not_entrant();
3857 
3858     // From now on we know that the dependency information is complete
3859     JvmtiExport::set_all_dependencies_are_recorded(true);
3860   }
3861 }
3862 
3863 void VM_RedefineClasses::compute_added_deleted_matching_methods() {
3864   Method* old_method;
3865   Method* new_method;
3866 
3867   _matching_old_methods = NEW_RESOURCE_ARRAY(Method*, _old_methods->length());
3868   _matching_new_methods = NEW_RESOURCE_ARRAY(Method*, _old_methods->length());
3869   _added_methods        = NEW_RESOURCE_ARRAY(Method*, _new_methods->length());
3870   _deleted_methods      = NEW_RESOURCE_ARRAY(Method*, _old_methods->length());
3871 
3872   _matching_methods_length = 0;
3873   _deleted_methods_length  = 0;
3874   _added_methods_length    = 0;
3875 
3876   int nj = 0;
3877   int oj = 0;
3878   while (true) {
3879     if (oj >= _old_methods->length()) {
3880       if (nj >= _new_methods->length()) {
3881         break; // we've looked at everything, done
3882       }
3883       // New method at the end
3884       new_method = _new_methods->at(nj);
3885       _added_methods[_added_methods_length++] = new_method;
3886       ++nj;
3887     } else if (nj >= _new_methods->length()) {
3888       // Old method, at the end, is deleted
3889       old_method = _old_methods->at(oj);
3890       _deleted_methods[_deleted_methods_length++] = old_method;
3891       ++oj;
3892     } else {
3893       old_method = _old_methods->at(oj);
3894       new_method = _new_methods->at(nj);
3895       if (old_method->name() == new_method->name()) {
3896         if (old_method->signature() == new_method->signature()) {
3897           _matching_old_methods[_matching_methods_length  ] = old_method;
3898           _matching_new_methods[_matching_methods_length++] = new_method;
3899           ++nj;
3900           ++oj;
3901         } else {
3902           // added overloaded have already been moved to the end,
3903           // so this is a deleted overloaded method
3904           _deleted_methods[_deleted_methods_length++] = old_method;
3905           ++oj;
3906         }
3907       } else { // names don't match
3908         if (old_method->name()->fast_compare(new_method->name()) > 0) {
3909           // new method
3910           _added_methods[_added_methods_length++] = new_method;
3911           ++nj;
3912         } else {
3913           // deleted method
3914           _deleted_methods[_deleted_methods_length++] = old_method;
3915           ++oj;
3916         }
3917       }
3918     }
3919   }
3920   assert(_matching_methods_length + _deleted_methods_length == _old_methods->length(), "sanity");
3921   assert(_matching_methods_length + _added_methods_length == _new_methods->length(), "sanity");
3922 }
3923 
3924 
3925 void VM_RedefineClasses::swap_annotations(InstanceKlass* the_class,
3926                                           InstanceKlass* scratch_class) {
3927   // Swap annotation fields values
3928   Annotations* old_annotations = the_class->annotations();
3929   the_class->set_annotations(scratch_class->annotations());
3930   scratch_class->set_annotations(old_annotations);
3931 }
3932 
3933 
3934 // Install the redefinition of a class:
3935 //    - house keeping (flushing breakpoints and caches, deoptimizing
3936 //      dependent compiled code)
3937 //    - replacing parts in the_class with parts from scratch_class
3938 //    - adding a weak reference to track the obsolete but interesting
3939 //      parts of the_class
3940 //    - adjusting constant pool caches and vtables in other classes
3941 //      that refer to methods in the_class. These adjustments use the
3942 //      ClassLoaderDataGraph::classes_do() facility which only allows
3943 //      a helper method to be specified. The interesting parameters
3944 //      that we would like to pass to the helper method are saved in
3945 //      static global fields in the VM operation.
3946 void VM_RedefineClasses::redefine_single_class(jclass the_jclass,
3947        InstanceKlass* scratch_class, TRAPS) {
3948 
3949   HandleMark hm(THREAD);   // make sure handles from this call are freed
3950 
3951   if (log_is_enabled(Info, redefine, class, timer)) {
3952     _timer_rsc_phase1.start();
3953   }
3954 
3955   InstanceKlass* the_class = get_ik(the_jclass);
3956 
3957   // Remove all breakpoints in methods of this class
3958   JvmtiBreakpoints& jvmti_breakpoints = JvmtiCurrentBreakpoints::get_jvmti_breakpoints();
3959   jvmti_breakpoints.clearall_in_class_at_safepoint(the_class);
3960 
3961   // Deoptimize all compiled code that depends on this class
3962   flush_dependent_code(the_class, THREAD);
3963 
3964   _old_methods = the_class->methods();
3965   _new_methods = scratch_class->methods();
3966   _the_class = the_class;
3967   compute_added_deleted_matching_methods();
3968   update_jmethod_ids();
3969 
3970   _any_class_has_resolved_methods = the_class->has_resolved_methods() || _any_class_has_resolved_methods;
3971 
3972   // Attach new constant pool to the original klass. The original
3973   // klass still refers to the old constant pool (for now).
3974   scratch_class->constants()->set_pool_holder(the_class);
3975 
3976 #if 0
3977   // In theory, with constant pool merging in place we should be able
3978   // to save space by using the new, merged constant pool in place of
3979   // the old constant pool(s). By "pool(s)" I mean the constant pool in
3980   // the klass version we are replacing now and any constant pool(s) in
3981   // previous versions of klass. Nice theory, doesn't work in practice.
3982   // When this code is enabled, even simple programs throw NullPointer
3983   // exceptions. I'm guessing that this is caused by some constant pool
3984   // cache difference between the new, merged constant pool and the
3985   // constant pool that was just being used by the klass. I'm keeping
3986   // this code around to archive the idea, but the code has to remain
3987   // disabled for now.
3988 
3989   // Attach each old method to the new constant pool. This can be
3990   // done here since we are past the bytecode verification and
3991   // constant pool optimization phases.
3992   for (int i = _old_methods->length() - 1; i >= 0; i--) {
3993     Method* method = _old_methods->at(i);
3994     method->set_constants(scratch_class->constants());
3995   }
3996 
3997   // NOTE: this doesn't work because you can redefine the same class in two
3998   // threads, each getting their own constant pool data appended to the
3999   // original constant pool.  In order for the new methods to work when they
4000   // become old methods, they need to keep their updated copy of the constant pool.
4001 
4002   {
4003     // walk all previous versions of the klass
4004     InstanceKlass *ik = the_class;
4005     PreviousVersionWalker pvw(ik);
4006     do {
4007       ik = pvw.next_previous_version();
4008       if (ik != NULL) {
4009 
4010         // attach previous version of klass to the new constant pool
4011         ik->set_constants(scratch_class->constants());
4012 
4013         // Attach each method in the previous version of klass to the
4014         // new constant pool
4015         Array<Method*>* prev_methods = ik->methods();
4016         for (int i = prev_methods->length() - 1; i >= 0; i--) {
4017           Method* method = prev_methods->at(i);
4018           method->set_constants(scratch_class->constants());
4019         }
4020       }
4021     } while (ik != NULL);
4022   }
4023 #endif
4024 
4025   // Replace methods and constantpool
4026   the_class->set_methods(_new_methods);
4027   scratch_class->set_methods(_old_methods);     // To prevent potential GCing of the old methods,
4028                                           // and to be able to undo operation easily.
4029 
4030   Array<int>* old_ordering = the_class->method_ordering();
4031   the_class->set_method_ordering(scratch_class->method_ordering());
4032   scratch_class->set_method_ordering(old_ordering);
4033 
4034   ConstantPool* old_constants = the_class->constants();
4035   the_class->set_constants(scratch_class->constants());
4036   scratch_class->set_constants(old_constants);  // See the previous comment.
4037 #if 0
4038   // We are swapping the guts of "the new class" with the guts of "the
4039   // class". Since the old constant pool has just been attached to "the
4040   // new class", it seems logical to set the pool holder in the old
4041   // constant pool also. However, doing this will change the observable
4042   // class hierarchy for any old methods that are still executing. A
4043   // method can query the identity of its "holder" and this query uses
4044   // the method's constant pool link to find the holder. The change in
4045   // holding class from "the class" to "the new class" can confuse
4046   // things.
4047   //
4048   // Setting the old constant pool's holder will also cause
4049   // verification done during vtable initialization below to fail.
4050   // During vtable initialization, the vtable's class is verified to be
4051   // a subtype of the method's holder. The vtable's class is "the
4052   // class" and the method's holder is gotten from the constant pool
4053   // link in the method itself. For "the class"'s directly implemented
4054   // methods, the method holder is "the class" itself (as gotten from
4055   // the new constant pool). The check works fine in this case. The
4056   // check also works fine for methods inherited from super classes.
4057   //
4058   // Miranda methods are a little more complicated. A miranda method is
4059   // provided by an interface when the class implementing the interface
4060   // does not provide its own method.  These interfaces are implemented
4061   // internally as an InstanceKlass. These special instanceKlasses
4062   // share the constant pool of the class that "implements" the
4063   // interface. By sharing the constant pool, the method holder of a
4064   // miranda method is the class that "implements" the interface. In a
4065   // non-redefine situation, the subtype check works fine. However, if
4066   // the old constant pool's pool holder is modified, then the check
4067   // fails because there is no class hierarchy relationship between the
4068   // vtable's class and "the new class".
4069 
4070   old_constants->set_pool_holder(scratch_class());
4071 #endif
4072 
4073   // track number of methods that are EMCP for add_previous_version() call below
4074   int emcp_method_count = check_methods_and_mark_as_obsolete();
4075   transfer_old_native_function_registrations(the_class);
4076 
4077   // The class file bytes from before any retransformable agents mucked
4078   // with them was cached on the scratch class, move to the_class.
4079   // Note: we still want to do this if nothing needed caching since it
4080   // should get cleared in the_class too.
4081   if (the_class->get_cached_class_file() == 0) {
4082     // the_class doesn't have a cache yet so copy it
4083     the_class->set_cached_class_file(scratch_class->get_cached_class_file());
4084   }
4085   else if (scratch_class->get_cached_class_file() !=
4086            the_class->get_cached_class_file()) {
4087     // The same class can be present twice in the scratch classes list or there
4088     // are multiple concurrent RetransformClasses calls on different threads.
4089     // In such cases we have to deallocate scratch_class cached_class_file.
4090     os::free(scratch_class->get_cached_class_file());
4091   }
4092 
4093   // NULL out in scratch class to not delete twice.  The class to be redefined
4094   // always owns these bytes.
4095   scratch_class->set_cached_class_file(NULL);
4096 
4097   // Replace inner_classes
4098   Array<u2>* old_inner_classes = the_class->inner_classes();
4099   the_class->set_inner_classes(scratch_class->inner_classes());
4100   scratch_class->set_inner_classes(old_inner_classes);
4101 
4102   // Initialize the vtable and interface table after
4103   // methods have been rewritten
4104   // no exception should happen here since we explicitly
4105   // do not check loader constraints.
4106   // compare_and_normalize_class_versions has already checked:
4107   //  - classloaders unchanged, signatures unchanged
4108   //  - all instanceKlasses for redefined classes reused & contents updated
4109   the_class->vtable().initialize_vtable(false, THREAD);
4110   the_class->itable().initialize_itable(false, THREAD);
4111   assert(!HAS_PENDING_EXCEPTION || (THREAD->pending_exception()->is_a(SystemDictionary::ThreadDeath_klass())), "redefine exception");
4112 
4113   // Leave arrays of jmethodIDs and itable index cache unchanged
4114 
4115   // Copy the "source file name" attribute from new class version
4116   the_class->set_source_file_name_index(
4117     scratch_class->source_file_name_index());
4118 
4119   // Copy the "source debug extension" attribute from new class version
4120   the_class->set_source_debug_extension(
4121     scratch_class->source_debug_extension(),
4122     scratch_class->source_debug_extension() == NULL ? 0 :
4123     (int)strlen(scratch_class->source_debug_extension()));
4124 
4125   // Use of javac -g could be different in the old and the new
4126   if (scratch_class->access_flags().has_localvariable_table() !=
4127       the_class->access_flags().has_localvariable_table()) {
4128 
4129     AccessFlags flags = the_class->access_flags();
4130     if (scratch_class->access_flags().has_localvariable_table()) {
4131       flags.set_has_localvariable_table();
4132     } else {
4133       flags.clear_has_localvariable_table();
4134     }
4135     the_class->set_access_flags(flags);
4136   }
4137 
4138   swap_annotations(the_class, scratch_class);
4139 
4140   // Replace minor version number of class file
4141   u2 old_minor_version = the_class->minor_version();
4142   the_class->set_minor_version(scratch_class->minor_version());
4143   scratch_class->set_minor_version(old_minor_version);
4144 
4145   // Replace major version number of class file
4146   u2 old_major_version = the_class->major_version();
4147   the_class->set_major_version(scratch_class->major_version());
4148   scratch_class->set_major_version(old_major_version);
4149 
4150   // Replace CP indexes for class and name+type of enclosing method
4151   u2 old_class_idx  = the_class->enclosing_method_class_index();
4152   u2 old_method_idx = the_class->enclosing_method_method_index();
4153   the_class->set_enclosing_method_indices(
4154     scratch_class->enclosing_method_class_index(),
4155     scratch_class->enclosing_method_method_index());
4156   scratch_class->set_enclosing_method_indices(old_class_idx, old_method_idx);
4157 
4158   // Replace fingerprint data
4159   the_class->set_has_passed_fingerprint_check(scratch_class->has_passed_fingerprint_check());
4160   the_class->store_fingerprint(scratch_class->get_stored_fingerprint());
4161 
4162   the_class->set_has_been_redefined();
4163 
4164   if (!the_class->should_be_initialized()) {
4165     // Class was already initialized, so AOT has only seen the original version.
4166     // We need to let AOT look at it again.
4167     AOTLoader::load_for_klass(the_class, THREAD);
4168   }
4169 
4170   // keep track of previous versions of this class
4171   the_class->add_previous_version(scratch_class, emcp_method_count);
4172 
4173   _timer_rsc_phase1.stop();
4174   if (log_is_enabled(Info, redefine, class, timer)) {
4175     _timer_rsc_phase2.start();
4176   }
4177 
4178   // Adjust constantpool caches and vtables for all classes
4179   // that reference methods of the evolved class.
4180   AdjustCpoolCacheAndVtable adjust_cpool_cache_and_vtable(THREAD);
4181   ClassLoaderDataGraph::classes_do(&adjust_cpool_cache_and_vtable);
4182 
4183   if (the_class->oop_map_cache() != NULL) {
4184     // Flush references to any obsolete methods from the oop map cache
4185     // so that obsolete methods are not pinned.
4186     the_class->oop_map_cache()->flush_obsolete_entries();
4187   }
4188 
4189   increment_class_counter((InstanceKlass *)the_class, THREAD);
4190   {
4191     ResourceMark rm(THREAD);
4192     // increment the classRedefinedCount field in the_class and in any
4193     // direct and indirect subclasses of the_class
4194     log_info(redefine, class, load)
4195       ("redefined name=%s, count=%d (avail_mem=" UINT64_FORMAT "K)",
4196        the_class->external_name(), java_lang_Class::classRedefinedCount(the_class->java_mirror()), os::available_memory() >> 10);
4197     Events::log_redefinition(THREAD, "redefined class name=%s, count=%d",
4198                              the_class->external_name(),
4199                              java_lang_Class::classRedefinedCount(the_class->java_mirror()));
4200 
4201   }
4202   _timer_rsc_phase2.stop();
4203 } // end redefine_single_class()
4204 
4205 
4206 // Increment the classRedefinedCount field in the specific InstanceKlass
4207 // and in all direct and indirect subclasses.
4208 void VM_RedefineClasses::increment_class_counter(InstanceKlass *ik, TRAPS) {
4209   oop class_mirror = ik->java_mirror();
4210   Klass* class_oop = java_lang_Class::as_Klass(class_mirror);
4211   int new_count = java_lang_Class::classRedefinedCount(class_mirror) + 1;
4212   java_lang_Class::set_classRedefinedCount(class_mirror, new_count);
4213 
4214   if (class_oop != _the_class) {
4215     // _the_class count is printed at end of redefine_single_class()
4216     log_debug(redefine, class, subclass)("updated count in subclass=%s to %d", ik->external_name(), new_count);
4217   }
4218 
4219   for (Klass *subk = ik->subklass(); subk != NULL;
4220        subk = subk->next_sibling()) {
4221     if (subk->is_instance_klass()) {
4222       // Only update instanceKlasses
4223       InstanceKlass *subik = InstanceKlass::cast(subk);
4224       // recursively do subclasses of the current subclass
4225       increment_class_counter(subik, THREAD);
4226     }
4227   }
4228 }
4229 
4230 void VM_RedefineClasses::CheckClass::do_klass(Klass* k) {
4231   bool no_old_methods = true;  // be optimistic
4232 
4233   // Both array and instance classes have vtables.
4234   // a vtable should never contain old or obsolete methods
4235   ResourceMark rm(_thread);
4236   if (k->vtable_length() > 0 &&
4237       !k->vtable().check_no_old_or_obsolete_entries()) {
4238     if (log_is_enabled(Trace, redefine, class, obsolete, metadata)) {
4239       log_trace(redefine, class, obsolete, metadata)
4240         ("klassVtable::check_no_old_or_obsolete_entries failure -- OLD or OBSOLETE method found -- class: %s",
4241          k->signature_name());
4242       k->vtable().dump_vtable();
4243     }
4244     no_old_methods = false;
4245   }
4246 
4247   if (k->is_instance_klass()) {
4248     HandleMark hm(_thread);
4249     InstanceKlass *ik = InstanceKlass::cast(k);
4250 
4251     // an itable should never contain old or obsolete methods
4252     if (ik->itable_length() > 0 &&
4253         !ik->itable().check_no_old_or_obsolete_entries()) {
4254       if (log_is_enabled(Trace, redefine, class, obsolete, metadata)) {
4255         log_trace(redefine, class, obsolete, metadata)
4256           ("klassItable::check_no_old_or_obsolete_entries failure -- OLD or OBSOLETE method found -- class: %s",
4257            ik->signature_name());
4258         ik->itable().dump_itable();
4259       }
4260       no_old_methods = false;
4261     }
4262 
4263     // the constant pool cache should never contain non-deleted old or obsolete methods
4264     if (ik->constants() != NULL &&
4265         ik->constants()->cache() != NULL &&
4266         !ik->constants()->cache()->check_no_old_or_obsolete_entries()) {
4267       if (log_is_enabled(Trace, redefine, class, obsolete, metadata)) {
4268         log_trace(redefine, class, obsolete, metadata)
4269           ("cp-cache::check_no_old_or_obsolete_entries failure -- OLD or OBSOLETE method found -- class: %s",
4270            ik->signature_name());
4271         ik->constants()->cache()->dump_cache();
4272       }
4273       no_old_methods = false;
4274     }
4275   }
4276 
4277   // print and fail guarantee if old methods are found.
4278   if (!no_old_methods) {
4279     if (log_is_enabled(Trace, redefine, class, obsolete, metadata)) {
4280       dump_methods();
4281     } else {
4282       log_trace(redefine, class)("Use the '-Xlog:redefine+class*:' option "
4283         "to see more info about the following guarantee() failure.");
4284     }
4285     guarantee(false, "OLD and/or OBSOLETE method(s) found");
4286   }
4287 }
4288 
4289 
4290 void VM_RedefineClasses::dump_methods() {
4291   int j;
4292   log_trace(redefine, class, dump)("_old_methods --");
4293   for (j = 0; j < _old_methods->length(); ++j) {
4294     LogStreamHandle(Trace, redefine, class, dump) log_stream;
4295     Method* m = _old_methods->at(j);
4296     log_stream.print("%4d  (%5d)  ", j, m->vtable_index());
4297     m->access_flags().print_on(&log_stream);
4298     log_stream.print(" --  ");
4299     m->print_name(&log_stream);
4300     log_stream.cr();
4301   }
4302   log_trace(redefine, class, dump)("_new_methods --");
4303   for (j = 0; j < _new_methods->length(); ++j) {
4304     LogStreamHandle(Trace, redefine, class, dump) log_stream;
4305     Method* m = _new_methods->at(j);
4306     log_stream.print("%4d  (%5d)  ", j, m->vtable_index());
4307     m->access_flags().print_on(&log_stream);
4308     log_stream.print(" --  ");
4309     m->print_name(&log_stream);
4310     log_stream.cr();
4311   }
4312   log_trace(redefine, class, dump)("_matching_methods --");
4313   for (j = 0; j < _matching_methods_length; ++j) {
4314     LogStreamHandle(Trace, redefine, class, dump) log_stream;
4315     Method* m = _matching_old_methods[j];
4316     log_stream.print("%4d  (%5d)  ", j, m->vtable_index());
4317     m->access_flags().print_on(&log_stream);
4318     log_stream.print(" --  ");
4319     m->print_name();
4320     log_stream.cr();
4321 
4322     m = _matching_new_methods[j];
4323     log_stream.print("      (%5d)  ", m->vtable_index());
4324     m->access_flags().print_on(&log_stream);
4325     log_stream.cr();
4326   }
4327   log_trace(redefine, class, dump)("_deleted_methods --");
4328   for (j = 0; j < _deleted_methods_length; ++j) {
4329     LogStreamHandle(Trace, redefine, class, dump) log_stream;
4330     Method* m = _deleted_methods[j];
4331     log_stream.print("%4d  (%5d)  ", j, m->vtable_index());
4332     m->access_flags().print_on(&log_stream);
4333     log_stream.print(" --  ");
4334     m->print_name(&log_stream);
4335     log_stream.cr();
4336   }
4337   log_trace(redefine, class, dump)("_added_methods --");
4338   for (j = 0; j < _added_methods_length; ++j) {
4339     LogStreamHandle(Trace, redefine, class, dump) log_stream;
4340     Method* m = _added_methods[j];
4341     log_stream.print("%4d  (%5d)  ", j, m->vtable_index());
4342     m->access_flags().print_on(&log_stream);
4343     log_stream.print(" --  ");
4344     m->print_name(&log_stream);
4345     log_stream.cr();
4346   }
4347 }
4348 
4349 void VM_RedefineClasses::print_on_error(outputStream* st) const {
4350   VM_Operation::print_on_error(st);
4351   if (_the_class != NULL) {
4352     ResourceMark rm;
4353     st->print_cr(", redefining class %s", _the_class->external_name());
4354   }
4355 }